Target raster frame has spatial_index, spatial_key, and five tile columns. No temporal key.
org.apache.spark.SparkException: Job aborted.
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply$mcV$sp(FileFormatWriter.scala:213)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:166)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply(FileFormatWriter.scala:166)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:166)
at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:145)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
at org.apache.spark.sql.execution.datasources.DataSource.writeInFileFormat(DataSource.scala:435)
at org.apache.spark.sql.execution.datasources.DataSource.write(DataSource.scala:471)
at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:50)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:609)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:233)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:217)
at org.apache.spark.sql.DataFrameWriter.parquet(DataFrameWriter.scala:508)
... 96 elided
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 13 in stage 319.0 failed 4 times, most recent failure: Lost task 13.3 in stage 319.0 (TID 16809, 10.0.4.150, executor 4): org.apache.spark.SparkException: Task failed while writing rows
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:270)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1$$anonfun$apply$mcV$sp$1.apply(FileFormatWriter.scala:189)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1$$anonfun$apply$mcV$sp$1.apply(FileFormatWriter.scala:188)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:108)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.IllegalArgumentException: Unsupported dataType: {"type":"struct","fields":[{"name":"spatial_key","type":{"type":"struct","fields":[{"name":"col","type":"integer","nullable":false,"metadata":{}},{"name":"row","type":"integer","nullable":false,"metadata":{}}]},"nullable":false,"metadata":{"_context":{"extent":{"xmax":752414.0000034694,"ymin":4151951.9999965937,"ymax":4243599.999999758,"xmin":658974.0000002789},"layoutDefinition":{"tileLayout":{"layoutRows":65602,"tileRows":100,"layoutCols":6106,"tileCols":100},"extent":{"xmax":805300.0,"ymin":2657400.0,"ymax":9217600.0,"xmin":194700.0}},"bounds":{"minKey":{"col":4642,"row":49740},"maxKey":{"col":5577,"row":50656}},"cellType":"int32","crs":"proj=utm +zone=17 +ellps=GRS80 +datum=NAD83 +units=m +no_defs "},"_stRole":"spatial_key"}},{"name":"bounds","type":{"type":"udt","class":"org.apache.spark.sql.jts.PolygonUDT$","pyClass":null,"sqlType":{"type":"struct","fields":[{"name":"wkb","type":"binary","nullable":true,"metadata":{}}]}},"nullable":false,"metadata":{}},{"name":"naip_band1","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band2","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band3","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band4","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}}]}, [1.1] failure: `TimestampType' expected but `{' found
{"type":"struct","fields":[{"name":"spatial_key","type":{"type":"struct","fields":[{"name":"col","type":"integer","nullable":false,"metadata":{}},{"name":"row","type":"integer","nullable":false,"metadata":{}}]},"nullable":false,"metadata":{"_context":{"extent":{"xmax":752414.0000034694,"ymin":4151951.9999965937,"ymax":4243599.999999758,"xmin":658974.0000002789},"layoutDefinition":{"tileLayout":{"layoutRows":65602,"tileRows":100,"layoutCols":6106,"tileCols":100},"extent":{"xmax":805300.0,"ymin":2657400.0,"ymax":9217600.0,"xmin":194700.0}},"bounds":{"minKey":{"col":4642,"row":49740},"maxKey":{"col":5577,"row":50656}},"cellType":"int32","crs":"proj=utm +zone=17 +ellps=GRS80 +datum=NAD83 +units=m +no_defs "},"_stRole":"spatial_key"}},{"name":"bounds","type":{"type":"udt","class":"org.apache.spark.sql.jts.PolygonUDT$","pyClass":null,"sqlType":{"type":"struct","fields":[{"name":"wkb","type":"binary","nullable":true,"metadata":{}}]}},"nullable":false,"metadata":{}},{"name":"naip_band1","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band2","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band3","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band4","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}}]}
^
at org.apache.spark.sql.catalyst.parser.LegacyTypeStringParser$.parse(LegacyTypeStringParser.scala:90)
at org.apache.spark.sql.types.StructType$$anonfun$7.apply(StructType.scala:414)
at org.apache.spark.sql.types.StructType$$anonfun$7.apply(StructType.scala:414)
at scala.util.Try.getOrElse(Try.scala:79)
at org.apache.spark.sql.types.StructType$.fromString(StructType.scala:414)
at org.apache.spark.sql.execution.datasources.parquet.ParquetWriteSupport.init(ParquetWriteSupport.scala:80)
at org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:341)
at org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:302)
at org.apache.spark.sql.execution.datasources.parquet.ParquetOutputWriter.<init>(ParquetOutputWriter.scala:37)
at org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat$$anon$1.newInstance(ParquetFileFormat.scala:159)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask.org$apache$spark$sql$execution$datasources$FileFormatWriter$DynamicPartitionWriteTask$$newOutputWriter(FileFormatWriter.scala:416)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask$$anonfun$execute$2.apply(FileFormatWriter.scala:449)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask$$anonfun$execute$2.apply(FileFormatWriter.scala:438)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at org.apache.spark.sql.catalyst.util.AbstractScalaRowIterator.foreach(AbstractScalaRowIterator.scala:26)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask.execute(FileFormatWriter.scala:438)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:256)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:254)
at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1371)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:259)
... 8 more
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1517)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1505)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1504)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1504)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
at scala.Option.foreach(Option.scala:257)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1732)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1687)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1676)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2029)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1.apply$mcV$sp(FileFormatWriter.scala:186)
... 130 more
Caused by: org.apache.spark.SparkException: Task failed while writing rows
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:270)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1$$anonfun$apply$mcV$sp$1.apply(FileFormatWriter.scala:189)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$write$1$$anonfun$apply$mcV$sp$1.apply(FileFormatWriter.scala:188)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:108)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
... 3 more
Caused by: java.lang.IllegalArgumentException: Unsupported dataType: {"type":"struct","fields":[{"name":"spatial_key","type":{"type":"struct","fields":[{"name":"col","type":"integer","nullable":false,"metadata":{}},{"name":"row","type":"integer","nullable":false,"metadata":{}}]},"nullable":false,"metadata":{"_context":{"extent":{"xmax":752414.0000034694,"ymin":4151951.9999965937,"ymax":4243599.999999758,"xmin":658974.0000002789},"layoutDefinition":{"tileLayout":{"layoutRows":65602,"tileRows":100,"layoutCols":6106,"tileCols":100},"extent":{"xmax":805300.0,"ymin":2657400.0,"ymax":9217600.0,"xmin":194700.0}},"bounds":{"minKey":{"col":4642,"row":49740},"maxKey":{"col":5577,"row":50656}},"cellType":"int32","crs":"proj=utm +zone=17 +ellps=GRS80 +datum=NAD83 +units=m +no_defs "},"_stRole":"spatial_key"}},{"name":"bounds","type":{"type":"udt","class":"org.apache.spark.sql.jts.PolygonUDT$","pyClass":null,"sqlType":{"type":"struct","fields":[{"name":"wkb","type":"binary","nullable":true,"metadata":{}}]}},"nullable":false,"metadata":{}},{"name":"naip_band1","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band2","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band3","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band4","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}}]}, [1.1] failure: `TimestampType' expected but `{' found
{"type":"struct","fields":[{"name":"spatial_key","type":{"type":"struct","fields":[{"name":"col","type":"integer","nullable":false,"metadata":{}},{"name":"row","type":"integer","nullable":false,"metadata":{}}]},"nullable":false,"metadata":{"_context":{"extent":{"xmax":752414.0000034694,"ymin":4151951.9999965937,"ymax":4243599.999999758,"xmin":658974.0000002789},"layoutDefinition":{"tileLayout":{"layoutRows":65602,"tileRows":100,"layoutCols":6106,"tileCols":100},"extent":{"xmax":805300.0,"ymin":2657400.0,"ymax":9217600.0,"xmin":194700.0}},"bounds":{"minKey":{"col":4642,"row":49740},"maxKey":{"col":5577,"row":50656}},"cellType":"int32","crs":"proj=utm +zone=17 +ellps=GRS80 +datum=NAD83 +units=m +no_defs "},"_stRole":"spatial_key"}},{"name":"bounds","type":{"type":"udt","class":"org.apache.spark.sql.jts.PolygonUDT$","pyClass":null,"sqlType":{"type":"struct","fields":[{"name":"wkb","type":"binary","nullable":true,"metadata":{}}]}},"nullable":false,"metadata":{}},{"name":"naip_band1","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band2","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band3","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}},{"name":"naip_band4","type":{"type":"udt","class":"org.apache.spark.sql.gt.types.TileUDT$","pyClass":"pyrasterframes.TileUDT","sqlType":{"type":"struct","fields":[{"name":"cellType","type":"string","nullable":false,"metadata":{}},{"name":"cols","type":"short","nullable":false,"metadata":{}},{"name":"rows","type":"short","nullable":false,"metadata":{}},{"name":"data","type":"binary","nullable":false,"metadata":{}}]}},"nullable":true,"metadata":{}}]}
^
at org.apache.spark.sql.catalyst.parser.LegacyTypeStringParser$.parse(LegacyTypeStringParser.scala:90)
at org.apache.spark.sql.types.StructType$$anonfun$7.apply(StructType.scala:414)
at org.apache.spark.sql.types.StructType$$anonfun$7.apply(StructType.scala:414)
at scala.util.Try.getOrElse(Try.scala:79)
at org.apache.spark.sql.types.StructType$.fromString(StructType.scala:414)
at org.apache.spark.sql.execution.datasources.parquet.ParquetWriteSupport.init(ParquetWriteSupport.scala:80)
at org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:341)
at org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:302)
at org.apache.spark.sql.execution.datasources.parquet.ParquetOutputWriter.<init>(ParquetOutputWriter.scala:37)
at org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat$$anon$1.newInstance(ParquetFileFormat.scala:159)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask.org$apache$spark$sql$execution$datasources$FileFormatWriter$DynamicPartitionWriteTask$$newOutputWriter(FileFormatWriter.scala:416)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask$$anonfun$execute$2.apply(FileFormatWriter.scala:449)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask$$anonfun$execute$2.apply(FileFormatWriter.scala:438)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at org.apache.spark.sql.catalyst.util.AbstractScalaRowIterator.foreach(AbstractScalaRowIterator.scala:26)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$DynamicPartitionWriteTask.execute(FileFormatWriter.scala:438)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:256)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$$anonfun$org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask$3.apply(FileFormatWriter.scala:254)
at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1371)
at org.apache.spark.sql.execution.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:259)
... 8 more