带状态的更新是使用的updateStateByKey方法,里面传入一个函数,函数要自己写,注意需要设置checkpoint

import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 需要设置checkpoint
  * 有状态的计算
  */
class UpdataByKey {

}
object UpdataByKey{
    //自定义函数进行带状态更新
  def addFunc (currValue:Seq[Int],point:Option[Int])={
    Some(currValue.sum+point.getOrElse(0));
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("UpdataByKey").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val ssc = new StreamingContext(sc,Seconds(10))
    val topics = "xiaopeng";
    val topicMap = topics.split(",").map((_,2)).toMap
    val lines = KafkaUtils.createStream(ssc,"192.168.10.219:2181","han",topicMap)
    val words = lines.flatMap(line =>line._2.split(" ")).map(word =>(word,1))
    words.updateStateByKey[Int](addFunc _)
    words.print()
    ssc.start()
    ssc.awaitTermination()
  }
}

 

相关文章:

  • 2022-12-23
  • 2021-07-21
  • 2022-01-24
  • 2021-07-18
  • 2021-06-10
  • 2021-08-16
  • 2021-09-15
  • 2021-06-24
猜你喜欢
  • 2021-12-19
  • 2021-08-16
  • 2021-10-26
  • 2022-12-23
  • 2021-09-03
  • 2022-01-07
  • 2021-05-15
相关资源
相似解决方案