2

spark Dataset APIを使用してjsonファイルを読み込もうとしていますが、このjsonにはいくつかのフィールド名にスペースが含まれています。Spark(フィールド名のスペース)でjsonからcaseクラスへのマッピング

これは

{"Field Name" : "value"} 

私のケースクラスは、その後、私はDataFrameにファイルを読み込むことができ、この

case class MyType(`Field Name`: String) 

ようにする必要があり、JSONの行になり、それが正しいスキーマをロードします

val dataframe = spark.read.json(path) 

問題は、私がDataFrameDataset[MyType]

dataframe.as[MyType] 

Encoder[MyType]によってロードStructSchemaが間違っていると、それはスペースの代わりに$u0020を紹介し、私は次のエラー

cannot resolve '`Field$u0020Name`' given input columns: [Field Name]; 
org.apache.spark.sql.AnalysisException: cannot resolve '`Field$u0020Name`' given input columns: [Field Name]; 
    at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42) 
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:88) 
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:85) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:289) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:289) 
    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:288) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:306) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:304) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:306) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:304) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$11.apply(TreeNode.scala:335) 
    at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) 
    at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) 
    at scala.collection.immutable.List.foreach(List.scala:381) 
    at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) 
    at scala.collection.immutable.List.map(List.scala:285) 
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:333) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:304) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:286) 
    at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:268) 
    at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformExpressionsUp$1.apply(QueryPlan.scala:268) 
    at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpression$1(QueryPlan.scala:279) 
    at org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:289) 
    at org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$6.apply(QueryPlan.scala:298) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187) 
    at org.apache.spark.sql.catalyst.plans.QueryPlan.mapExpressions(QueryPlan.scala:298) 
    at org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsUp(QueryPlan.scala:268) 
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:85) 
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:78) 
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:127) 
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.checkAnalysis(CheckAnalysis.scala:78) 
    at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:91) 
    at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolveAndBind(ExpressionEncoder.scala:256) 
    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:206) 
    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:170) 
    at org.apache.spark.sql.Dataset$.apply(Dataset.scala:61) 
    at org.apache.spark.sql.Dataset.as(Dataset.scala:380) 
    at com.radius.floodgate.preprocess.BomboraSuite$$anonfun$5.apply$mcV$sp(BomboraSuite.scala:151) 
    at com.radius.floodgate.preprocess.BomboraSuite$$anonfun$5.apply(BomboraSuite.scala:141) 
    at com.radius.floodgate.preprocess.BomboraSuite$$anonfun$5.apply(BomboraSuite.scala:141) 
    at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) 
    at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) 
    at org.scalatest.Transformer.apply(Transformer.scala:22) 
    at org.scalatest.Transformer.apply(Transformer.scala:20) 
    at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) 
    at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) 
    at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) 
    at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) 
    at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) 
    at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) 
    at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) 
    at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) 
    at org.scalatest.FunSuite.runTest(FunSuite.scala:1560) 
    at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) 
    at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) 
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) 
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) 
    at scala.collection.immutable.List.foreach(List.scala:381) 
    at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) 
    at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) 
    at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) 
    at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) 
    at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) 
    at org.scalatest.Suite$class.run(Suite.scala:1147) 
    at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) 
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) 
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) 
    at org.scalatest.SuperEngine.runImpl(Engine.scala:521) 
    at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) 
    at com.radius.floodgate.preprocess.BomboraSuite.org$scalatest$BeforeAndAfterAll$$super$run(BomboraSuite.scala:18) 
    at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213) 
    at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210) 
    at com.radius.floodgate.preprocess.BomboraSuite.run(BomboraSuite.scala:18) 
    at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) 
    at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340) 
    at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334) 
    at scala.collection.immutable.List.foreach(List.scala:381) 
    at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) 
    at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011) 
    at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010) 
    at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500) 
    at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010) 
    at org.scalatest.tools.Runner$.run(Runner.scala:850) 
    at org.scalatest.tools.Runner.run(Runner.scala) 
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138) 
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28) 

は、この問題を解決するためのいずれかの回避策があり得ますか?

答えて

2

回避するには、スペースなしの列名(アンダースコア)&を作成して、DF列の名前を大文字と小文字の列の列名に変更します。

case class MyType(Field_Name: String) 

dataframe.withColumnRenamed("Field Name", "Field_Name").as[MyType] 
関連する問題