object H extends App{
val conf=new SparkConf().setMaster("local[2]").setAppName("hello")
val ss=new StreamingContext(conf,Seconds(5))
val kafkaParams=Map[String,String]("metadata.broker.list"->"myhadoop1:9092")
ss.checkpoint("hdfs://myhadoop1:8020/data")
val topic=Set[String]("wordcount1")
//kafka
val lines=KafkaUtils.createDirectStream[String,String,StringDecoder,StringDecoder](ss,kafkaParams,topic)
lines.flatMap(_._2.split(" ")).map((_,1)).updateStateByKey((seqs:Seq[Int],option:Option[Int])=>{
var oldValue=option.getOrElse(0)
for(seq<-seqs){
oldValue+=seq
}
Option[Int](oldValue)
}).print()
ss.start()
ss.awaitTermination()
}
本文标题:Streaming与kafkaupdateStateBykey()
网站URL:
http://kswsj.cn/article/gcojeh.html