Facebook
From Arunima, 1 Year ago, written in Plain Text.
Embed
Download Paste or View Raw
Hits: 361
  1. Py4JJavaError: An error occurred while calling o197.showString.
  2. : java.lang.NoClassDefFoundError: org/apache/spark/sql/catalyst/expressions/IfNull
  3.  at org.apache.ignite.spark.impl.optimization.SystemExpressions$.apply(SystemExpressions.scala:35)
  4.  at org.apache.ignite.spark.impl.optimization.package$.$anonfun$exprsAllowed$2(package.scala:100)
  5.  at org.apache.ignite.spark.impl.optimization.package$.$anonfun$exprsAllowed$2$adapted(package.scala:100)
  6.  at scala.collection.LinearSeqOptimized.exists(LinearSeqOptimized.scala:95)
  7.  at scala.collection.LinearSeqOptimized.exists$(LinearSeqOptimized.scala:92)
  8.  at scala.collection.immutable.List.exists(List.scala:91)
  9.  at org.apache.ignite.spark.impl.optimization.package$.exprsAllowed(package.scala:100)
  10.  at org.apache.spark.sql.ignite.IgniteOptimization$$anonfun$pushDownOperators$1.applyOrElse(IgniteOptimization.scala:109)
  11.  at org.apache.spark.sql.ignite.IgniteOptimization$$anonfun$pushDownOperators$1.applyOrElse(IgniteOptimization.scala:63)
  12.  at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$4(TreeNode.scala:642)
  13.  at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:176)
  14.  at org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:642)
  15.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformUpWithPruning(LogicalPlan.scala:30)
  16.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning(AnalysisHelper.scala:279)
  17.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning$(AnalysisHelper.scala:275)
  18.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  19.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  20.  at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:635)
  21.  at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1228)
  22.  at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1227)
  23.  at org.apache.spark.sql.catalyst.plans.logical.OrderPreservingUnaryNode.mapChildren(LogicalPlan.scala:208)
  24.  at org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:635)
  25.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformUpWithPruning(LogicalPlan.scala:30)
  26.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning(AnalysisHelper.scala:279)
  27.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning$(AnalysisHelper.scala:275)
  28.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  29.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  30.  at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:635)
  31.  at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1228)
  32.  at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1227)
  33.  at org.apache.spark.sql.catalyst.plans.logical.OrderPreservingUnaryNode.mapChildren(LogicalPlan.scala:208)
  34.  at org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:635)
  35.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformUpWithPruning(LogicalPlan.scala:30)
  36.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning(AnalysisHelper.scala:279)
  37.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning$(AnalysisHelper.scala:275)
  38.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  39.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  40.  at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:635)
  41.  at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1228)
  42.  at org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1227)
  43.  at org.apache.spark.sql.catalyst.plans.logical.GlobalLimit.mapChildren(basicLogicalOperators.scala:1258)
  44.  at org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:635)
  45.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformUpWithPruning(LogicalPlan.scala:30)
  46.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning(AnalysisHelper.scala:279)
  47.  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformUpWithPruning$(AnalysisHelper.scala:275)
  48.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  49.  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformUpWithPruning(LogicalPlan.scala:30)
  50.  at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:611)
  51.  at org.apache.spark.sql.ignite.IgniteOptimization$.pushDownOperators(IgniteOptimization.scala:63)
  52.  at org.apache.spark.sql.ignite.IgniteOptimization$.apply(IgniteOptimization.scala:40)
  53.  at org.apache.spark.sql.ignite.IgniteOptimization$.apply(IgniteOptimization.scala:33)
  54.  at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:211)
  55.  at scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
  56.  at scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
  57.  at scala.collection.immutable.List.foldLeft(List.scala:91)
  58.  at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:208)
  59.  at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:200)
  60.  at scala.collection.immutable.List.foreach(List.scala:431)
  61.  at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:200)
  62.  at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:179)
  63.  at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
  64.  at org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:179)
  65.  at org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:126)
  66.  at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
  67.  at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:185)
  68.  at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:510)
  69.  at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:185)
  70.  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
  71.  at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:184)
  72.  at org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:122)
  73.  at org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:118)
  74.  at org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:136)
  75.  at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:154)
  76.  at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:151)
  77.  at org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:204)
  78.  at org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:249)
  79.  at org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:218)
  80.  at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:103)
  81.  at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
  82.  at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
  83.  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
  84.  at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
  85.  at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3856)
  86.  at org.apache.spark.sql.Dataset.head(Dataset.scala:2863)
  87.  at org.apache.spark.sql.Dataset.take(Dataset.scala:3084)
  88.  at org.apache.spark.sql.Dataset.getRows(Dataset.scala:288)
  89.  at org.apache.spark.sql.Dataset.showString(Dataset.scala:327)
  90.  at jdk.internal.reflect.GeneratedMethodAccessor72.invoke(Unknown Source)
  91.  at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  92.  at java.base/java.lang.reflect.Method.invoke(Method.java:568)
  93.  at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  94.  at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  95.  at py4j.Gateway.invoke(Gateway.java:282)
  96.  at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  97.  at py4j.commands.CallCommand.execute(CallCommand.java:79)
  98.  at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
  99.  at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
  100.  at java.base/java.lang.Thread.run(Thread.java:833)