运行时json4s NoSuchMethodError

时间:2015-07-29 19:07:43

标签: json scala json4s

我正在使用json4s来解析json字符串,代码在本地运行良好,但是当我将它放入生产服务器时,它会抛出NoSuchMethodError。我的代码如下:

import scala.util.parsing.json.JSON
import scala.util.matching.Regex
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._
import org.json4s.JsonInput

def getIndexMap(vtStr: String): Map[String, Any] = {
  implicit val formats = DefaultFormats
  import org.json4s.native.JsonMethods
  import JsonMethods._
  var json = JsonMethods.parse(StringInput(vtStr), false, true) // the exception thrown here
}

堆栈跟踪请:

WARN scheduler.TaskSetManager: Lost task 0.0 in stage 3.0 (TID 79, avh007.av.pan.local): java.lang.NoSuchMethodError: scala.runtime.ObjectRef.create(Ljava/lang/Object;)Lscala/runtime/ObjectRef;
    at org.json4s.native.JsonParser$$anonfun$1.apply(JsonParser.scala:145)
    at org.json4s.native.JsonParser$$anonfun$1.apply(JsonParser.scala:143)
    at org.json4s.native.JsonParser$.parse(JsonParser.scala:131)
    at org.json4s.native.JsonParser$.parse(JsonParser.scala:71)
    at org.json4s.native.JsonParser$.parse(JsonParser.scala:66)
    at org.json4s.native.JsonMethods$class.parse(JsonMethods.scala:11)
    at org.json4s.native.JsonMethods$.parse(JsonMethods.scala:63)
    at com.panw.spark.PanavParser$.getIndexMap(PanavParser.scala:84)
    at com.panw.spark.PanavStreamScala2$$anonfun$main$2$$anonfun$apply$1$$anonfun$apply$2.apply(PanavStreamScala2.scala:184)
    at com.panw.spark.PanavStreamScala2$$anonfun$main$2$$anonfun$apply$1$$anonfun$apply$2.apply(PanavStreamScala2.scala:163)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
    at com.panw.spark.PanavStreamScala2$$anonfun$main$2$$anonfun$apply$1.apply(PanavStreamScala2.scala:163)
    at com.panw.spark.PanavStreamScala2$$anonfun$main$2$$anonfun$apply$1.apply(PanavStreamScala2.scala:140)
    at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:806)
    at org.apache.spark.rdd.RDD$$anonfun$foreachPartition$1.apply(RDD.scala:806)
    at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1498)
    at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1498)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
    at org.apache.spark.scheduler.Task.run(Task.scala:64)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)

0 个答案:

没有答案