You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
2014-06-13 14:30:00,665 [spark-akka.actor.default-dispatcher-18] INFO org.apache.spark.streaming.scheduler.JobScheduler - Starting job streaming job 1402650000000 ms.3 from job set of time 1402650000000 ms
2014-06-13 14:30:00,665 [spark-akka.actor.default-dispatcher-18] ERROR org.apache.spark.streaming.scheduler.JobScheduler - Error running job streaming job 1402650000000 ms.2
java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
at org.apache.pig.data.DataType.mapToString(DataType.java:1587)
at org.apache.pig.impl.util.TupleFormat.format(TupleFormat.java:55)
at org.apache.pig.data.AbstractTuple.toString(AbstractTuple.java:39)
at java.lang.String.valueOf(String.java:2854)
at scala.collection.mutable.StringBuilder.append(StringBuilder.scala:197)
at scala.Tuple2.toString(Tuple2.scala:22)
at java.lang.String.valueOf(String.java:2854)
at java.io.PrintStream.println(PrintStream.java:821)
at scala.Console$.println(Console.scala:240)
at scala.Predef$.println(Predef.scala:287)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1$$anonfun$apply$1.apply(DStream.scala:590)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1$$anonfun$apply$1.apply(DStream.scala:590)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1.apply(DStream.scala:590)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1.apply(DStream.scala:585)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:41)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:40)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:40)
at scala.util.Try$.apply(Try.scala:161)
at org.apache.spark.streaming.scheduler.Job.run(Job.scala:32)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:155)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:744)
The text was updated successfully, but these errors were encountered:
2014-06-13 14:30:00,665 [spark-akka.actor.default-dispatcher-18] INFO org.apache.spark.streaming.scheduler.JobScheduler - Starting job streaming job 1402650000000 ms.3 from job set of time 1402650000000 ms
2014-06-13 14:30:00,665 [spark-akka.actor.default-dispatcher-18] ERROR org.apache.spark.streaming.scheduler.JobScheduler - Error running job streaming job 1402650000000 ms.2
java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.String
at org.apache.pig.data.DataType.mapToString(DataType.java:1587)
at org.apache.pig.impl.util.TupleFormat.format(TupleFormat.java:55)
at org.apache.pig.data.AbstractTuple.toString(AbstractTuple.java:39)
at java.lang.String.valueOf(String.java:2854)
at scala.collection.mutable.StringBuilder.append(StringBuilder.scala:197)
at scala.Tuple2.toString(Tuple2.scala:22)
at java.lang.String.valueOf(String.java:2854)
at java.io.PrintStream.println(PrintStream.java:821)
at scala.Console$.println(Console.scala:240)
at scala.Predef$.println(Predef.scala:287)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1$$anonfun$apply$1.apply(DStream.scala:590)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1$$anonfun$apply$1.apply(DStream.scala:590)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1.apply(DStream.scala:590)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachFunc$2$1.apply(DStream.scala:585)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:41)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:40)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:40)
at scala.util.Try$.apply(Try.scala:161)
at org.apache.spark.streaming.scheduler.Job.run(Job.scala:32)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:155)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:744)
The text was updated successfully, but these errors were encountered: