java.lang.IllegalArgumentException:要求失败:无法写入关闭的ByteBufferOutputStream

时间:2019-05-26 02:56:09

标签: spark-streaming-kafka

Kafkaconsumer对多线程访问不安全4答案 Kafka是火花流的数据源,接收器是redis。但是,“ java.lang.IllegalArgumentException:要求失败:无法写入关闭的ByteBufferOutputStream”出现了,如何解决?

def initKafkaParams(bootstrap_servers: String, groupId: String, duration: String = "5000") : Map[String,Object] = {
    val kafkaParams = Map[String,Object](
      "bootstrap.servers" -> bootstrap_servers,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> groupId,
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (true: java.lang.Boolean), 
      "auto.commit.interval.ms" -> duration
    )
    kafkaParams
  }

val kafkaParams = KafkaUtil.initKafkaParams(Configuration.bootstrap_servers_log, groupId, duration)
    val topics = Array(topic)
    val stream = KafkaUtils.createDirectStream(ssc, PreferConsistent, Subscribe[String, String](topics, kafkaParams))
    val cachedStream = stream.cache()
    val closed_uids = cachedStream.map(record => parseJson(record.value)).filter(record => record != null)
    closed_uids.foreachRDD(rdd =>
      rdd.foreachPartition(rows =>
      {
        val rc = RedisClient("recall")
        try {
          val redis = rc.getRedisClient()
          val pipe = redis.pipelined()
          val redisKey = "zyf_radio"
          rows.foreach(r => {
            //val redis = rc.getRedisClient()
            pipe.sadd(redisKey, r)
          })
          pipe.sync()
          redis.close()
        } catch {
          case e: Exception => println("redis error!")
        }
      }
      )
    )

java.lang.IllegalArgumentException:要求失败:无法写入关闭的ByteBufferOutputStream     在scala.Predef $ .require(Predef.scala:224)     在org.apache.spark.util.ByteBufferOutputStream.write(ByteBufferOutputStream.scala:40)     在java.io.ObjectOutputStream $ BlockDataOutputStream.drain(ObjectOutputStream.java:1877)     在java.io.ObjectOutputStream $ BlockDataOutputStream.setBlockDataMode(ObjectOutputStream.java:1786)     在java.io.ObjectOutputStream.writeFatalException(ObjectOutputStream.java:1580)     在java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:351)     在org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:43)处     在org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100)     在org.apache.spark.rpc.netty.NettyRpcEnv.serialize(NettyRpcEnv.scala:253)     在org.apache.spark.rpc.netty.NettyRpcEnv.send(NettyRpcEnv.scala:192)     在org.apache.spark.rpc.netty.NettyRpcEndpointRef.send(NettyRpcEnv.scala:512)     在org.apache.spark.executor.CoarseGrainedExecutorBackend.statusUpdate(CoarseGrainedExecutorBackend.scala:142)     在org.apache.spark.executor.Executor $ TaskRunner.run(Executor.scala:412)     在java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)     在java.util.concurrent.ThreadPoolExecutor $ Worker.run(ThreadPoolExecutor.java:617)     在java.lang.Thread.run(Thread.java:748) 74631 1(推测性)SUCCESS RACK_LOCAL 5 / c1-dsj-hadoop040.bj 标准输出 斯特德 2019/05/26 10:37:40 38 ms 27.0 KB / 20

0 个答案:

没有答案