Apache Beam Java Lib and ANTLR #2207
223 fail, 1 skipped, 11 pass in 1m 51s
235 tests 11 ✅ 1m 51s ⏱️
37 suites 1 💤
37 files 223 ❌
Results for commit 9baa7ba.
Annotations
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testBinaryCombinePerKeyInstanceMethodReference (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testBinaryCombinePerKeyInstanceMethodReference(CombineTest.java:951)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testCombinePerKeyInstanceMethodReference (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testCombinePerKeyInstanceMethodReference(CombineTest.java:937)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testBasicCombine (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$SharedTestBase.runTestBasicCombine(CombineTest.java:141)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testBasicCombine(CombineTest.java:678)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testCombinePerKeyLambda (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testCombinePerKeyLambda(CombineTest.java:909)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testHotKeyCombining (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testHotKeyCombining(CombineTest.java:738)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testSimpleCombineEmpty (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 1s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$SharedTestBase.runTestSimpleCombine(CombineTest.java:122)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testSimpleCombineEmpty(CombineTest.java:672)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:480)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:511)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator$Context.createDataset(TransformTranslator.java:187)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:41)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:32)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testSimpleCombine (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$SharedTestBase.runTestSimpleCombine(CombineTest.java:122)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testSimpleCombine(CombineTest.java:663)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testBinaryCombinePerKeyLambda (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testBinaryCombinePerKeyLambda(CombineTest.java:923)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testHotKeyCombiningWithAccumulationMode (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testHotKeyCombiningWithAccumulationMode(CombineTest.java:763)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.CombineTest$BasicTests
github-actions / Test Results
testBasicCombineEmpty (org.apache.beam.sdk.transforms.CombineTest$BasicTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.CombineTest$BasicTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.CombineTest$SharedTestBase.runTestBasicCombine(CombineTest.java:141)
at org.apache.beam.sdk.transforms.CombineTest$BasicTests.testBasicCombineEmpty(CombineTest.java:689)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:480)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:511)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator$Context.createDataset(TransformTranslator.java:187)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:41)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:32)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ReifyTimestampsTest
github-actions / Test Results
extractFromValuesWhenValueTimestampedLaterSucceeds (org.apache.beam.sdk.transforms.ReifyTimestampsTest) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ReifyTimestampsTest.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ReifyTimestampsTest.extractFromValuesWhenValueTimestampedLaterSucceeds(ReifyTimestampsTest.java:128)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:331)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:112)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:40)
at org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:60)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:52)
at jdk.internal.reflect.GeneratedMethodAccessor13.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
at com.sun.proxy.$Proxy2.processTestClass(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$2.run(TestWorker.java:176)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:129)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:100)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:60)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:113)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:65)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
Check warning on line 0 in org.apache.beam.sdk.transforms.ReifyTimestampsTest
github-actions / Test Results
extractFromValuesSucceeds (org.apache.beam.sdk.transforms.ReifyTimestampsTest) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ReifyTimestampsTest.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ReifyTimestampsTest.extractFromValuesSucceeds(ReifyTimestampsTest.java:92)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:331)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:112)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:40)
at org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:60)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:52)
at jdk.internal.reflect.GeneratedMethodAccessor13.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
at com.sun.proxy.$Proxy2.processTestClass(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$2.run(TestWorker.java:176)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:129)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:100)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:60)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:113)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:65)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
Check warning on line 0 in org.apache.beam.sdk.transforms.ReifyTimestampsTest
github-actions / Test Results
inValuesSucceeds (org.apache.beam.sdk.transforms.ReifyTimestampsTest) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ReifyTimestampsTest.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ReifyTimestampsTest.inValuesSucceeds(ReifyTimestampsTest.java:60)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:331)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:112)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:40)
at org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:60)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:52)
at jdk.internal.reflect.GeneratedMethodAccessor13.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
at com.sun.proxy.$Proxy2.processTestClass(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$2.run(TestWorker.java:176)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:129)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:100)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:60)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:113)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:65)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
Check warning on line 0 in org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests
github-actions / Test Results
testRewindowWithTimestampCombiner (org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner(GroupByKeyTest.java:752)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:331)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.junit.runners.Suite.runChild(Suite.java:128)
at org.junit.runners.Suite.runChild(Suite.java:27)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:112)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:40)
at org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:60)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:52)
at jdk.internal.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
at com.sun.proxy.$Proxy2.processTestClass(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$2.run(TestWorker.java:176)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:129)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:100)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:60)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:113)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:65)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
Check warning on line 0 in org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests
github-actions / Test Results
testGroupByKeyMergingWindows (org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testGroupByKeyMergingWindows(GroupByKeyTest.java:718)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:331)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.junit.runners.Suite.runChild(Suite.java:128)
at org.junit.runners.Suite.runChild(Suite.java:27)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:112)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:40)
at org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:60)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:52)
at jdk.internal.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
at com.sun.proxy.$Proxy2.processTestClass(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$2.run(TestWorker.java:176)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:129)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:100)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:60)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:113)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:65)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
Check warning on line 0 in org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests
github-actions / Test Results
testGroupByKeyMultipleWindows (org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testGroupByKeyMultipleWindows(GroupByKeyTest.java:690)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:331)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.junit.runners.Suite.runChild(Suite.java:128)
at org.junit.runners.Suite.runChild(Suite.java:27)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:112)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:40)
at org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:60)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:52)
at jdk.internal.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
at com.sun.proxy.$Proxy2.processTestClass(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$2.run(TestWorker.java:176)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:129)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:100)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:60)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:113)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:65)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
Check warning on line 0 in org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests
github-actions / Test Results
testGroupByKeyAndWindows (org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testGroupByKeyAndWindows(GroupByKeyTest.java:660)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:331)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.junit.runners.Suite.runChild(Suite.java:128)
at org.junit.runners.Suite.runChild(Suite.java:27)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:112)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:40)
at org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:60)
at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:52)
at jdk.internal.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
at com.sun.proxy.$Proxy2.processTestClass(Unknown Source)
at org.gradle.api.internal.tasks.testing.worker.TestWorker$2.run(TestWorker.java:176)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.executeAndMaintainThreadName(TestWorker.java:129)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:100)
at org.gradle.api.internal.tasks.testing.worker.TestWorker.execute(TestWorker.java:60)
at org.gradle.process.internal.worker.child.ActionExecutionWorker.execute(ActionExecutionWorker.java:56)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:113)
at org.gradle.process.internal.worker.child.SystemApplicationClassLoaderWorker.call(SystemApplicationClassLoaderWorker.java:65)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.run(GradleWorkerMain.java:69)
at worker.org.gradle.process.internal.worker.GradleWorkerMain.main(GradleWorkerMain.java:74)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:829)
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testSameSideInputReadTwice (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testSameSideInputReadTwice(ParDoTest.java:905)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testParDoWithOnlyTaggedOutput (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testParDoWithOnlyTaggedOutput(ParDoTest.java:822)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testMultiOutputChaining (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testMultiOutputChaining(ParDoTest.java:1462)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testMultiOutputParDoWithSideInputs (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testMultiOutputParDoWithSideInputs(ParDoTest.java:1255)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:480)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:511)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator$Context.createDataset(TransformTranslator.java:187)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:41)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:32)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testSideInputsWithMultipleWindows (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testSideInputsWithMultipleWindows(ParDoTest.java:1341)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:480)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:511)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator$Context.createDataset(TransformTranslator.java:187)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:41)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:32)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testParDoEmptyWithTaggedOutput (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testParDoEmptyWithTaggedOutput(ParDoTest.java:769)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:480)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:511)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator$Context.createDataset(TransformTranslator.java:187)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:41)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:32)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testParDoWithTaggedOutput (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testParDoWithTaggedOutput(ParDoTest.java:727)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:492)
at org.apache.beam.runners.spark.structuredstreaming.io.BoundedDatasetFactory.createDatasetFromRDD(BoundedDatasetFactory.java:103)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:57)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ReadSourceTranslatorBatch.translate(ReadSourceTranslatorBatch.java:38)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more
Check warning on line 0 in org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests
github-actions / Test Results
testMultiOutputParDoWithSideInputsIsCumulative (org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests) failed
runners/spark/3/build/test-results/validatesStructuredStreamingRunnerBatch/TEST-org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.xml [took 0s]
Raw output
java.lang.RuntimeException: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.runtimeExceptionFrom(SparkStructuredStreamingPipelineResult.java:56)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.unwrapCause(SparkStructuredStreamingPipelineResult.java:70)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:100)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingPipelineResult.waitUntilFinish(SparkStructuredStreamingPipelineResult.java:88)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:168)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.run(SparkStructuredStreamingRunner.java:82)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:325)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:404)
at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:343)
at org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testMultiOutputParDoWithSideInputsIsCumulative(ParDoTest.java:1293)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:288)
at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:282)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.spark.sql.catalyst.expressions.VirtualColumn$
at org.apache.spark.sql.catalyst.analysis.Analyzer.<init>(Analyzer.scala:1753)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anon$1.<init>(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.analyzer(BaseSessionStateBuilder.scala:178)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:354)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:87)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:87)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:75)
at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:183)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:183)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:75)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:73)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:65)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:205)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:211)
at org.apache.spark.sql.Dataset$.apply(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:480)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:511)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator$Context.createDataset(TransformTranslator.java:187)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:41)
at org.apache.beam.runners.spark.structuredstreaming.translation.batch.ImpulseTranslatorBatch.translate(ImpulseTranslatorBatch.java:32)
at org.apache.beam.runners.spark.structuredstreaming.translation.TransformTranslator.translate(TransformTranslator.java:79)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$TranslatingVisitor.visit(PipelineTranslator.java:282)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator$PTransformVisitor.visitPrimitiveTransform(PipelineTranslator.java:477)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:477)
at org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator.translate(PipelineTranslator.java:129)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.translatePipeline(SparkStructuredStreamingRunner.java:190)
at org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner.lambda$run$0(SparkStructuredStreamingRunner.java:154)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
... 1 more