object H extends App{
val conf=new SparkConf().setMaster("local[2]").setAppName("hello")
val ss=new StreamingContext(conf,Seconds(5))
val kafkaParams=Map[String,String]("metadata.broker.list"->"myhadoop1:9092")
ss.checkpoint("hdfs://myhadoop1:8020/data")
val topic=Set[String]("wordcount1")
//kafka
val lines=KafkaUtils.createDirectStream[String,String,StringDecoder,StringDecoder](ss,kafkaParams,topic)
lines.flatMap(_._2.split(" ")).map((_,1)).updateStateByKey((seqs:Seq[Int],option:Option[Int])=>{
var oldValue=option.getOrElse(0)
for(seq<-seqs){
oldValue+=seq
}
Option[Int](oldValue)
}).print()
ss.start()
ss.awaitTermination()
}
分享文章:Streaming与kafkaupdateStateBykey()
标题URL:
http://cxhlcq.com/article/gcojeh.html