zoukankan      html  css  js  c++  java
  • Spark Streaming + Kafka 整合向导之createDirectStream

    启动zk: zkServer.sh start

    启动kafka:kafka-server-start.sh $KAFKA_HOME/config/server.properties

    创建一个topic:kafka-topics.sh --create --zookeeper node1:2181 --replication-factor 1 --partitions 1 --topic test

    启动一个生产者:kafka-console-producer.sh --broker-list node1:9092 --topic test

    运行代码测试:

    package com.lin.spark
    
    import org.apache.kafka.common.serialization.StringDeserializer
    import org.apache.spark.SparkConf
    import org.apache.spark.rdd.RDD
    import org.apache.spark.streaming.{Seconds, StreamingContext}
    import org.apache.spark.streaming.kafka010._
    import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
    import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
    
    /**
      * Created by Administrator on 2019/6/7.
      */
    object Halo {
      def main(args: Array[String]): Unit = {
        val kafkaParams = Map[String, Object](
          "bootstrap.servers" -> "node1:9092",
          "key.deserializer" -> classOf[StringDeserializer],
          "value.deserializer" -> classOf[StringDeserializer],
          "group.id" -> "use_a_separate_group_id_for_each_stream",
          "auto.offset.reset" -> "latest",
          "enable.auto.commit" -> (true: java.lang.Boolean)
        )
    
        val conf = new SparkConf().setAppName("Halo").setMaster("local[2]")
        val ssc = new StreamingContext(conf,Seconds(5))
    
        val topics = Array("test")
        val stream = KafkaUtils.createDirectStream[String, String](
          ssc,
          PreferConsistent,
          Subscribe[String, String](topics, kafkaParams)
        )
    
        stream.foreachRDD(rdd => {
          val offsetRange = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
          val maped: RDD[(String, String)] = rdd.map(record => (record.key,record.value))
          //计算逻辑
          maped.foreach(println)
          //循环输出
          for(o <- offsetRange){
            println(s"${o.topic}  ${o.partition} ${o.fromOffset} ${o.untilOffset}")
          }
        })
    
        ssc.start()
        ssc.awaitTermination()
      }
    }

    参考:

    http://spark.apache.org/docs/2.2.0/streaming-kafka-0-10-integration.html

    https://cloud.tencent.com/developer/article/1355430

  • 相关阅读:
    Solr环境配置
    SolrJ解析MoreLikeThis查询结果
    思维导图软件PersonalBrain 6.0.6.4破解版使用
    离散对数-详解
    转:pptp和l2tp的区别
    DiffieHellman Key Exchange (DH)源代码
    磁盘IOPS计算
    转:TCP/IP Network Performance Benchmarks and Tools
    转:弄清楚你的业务类型——OLTP or OLAP
    U8软件的端口
  • 原文地址:https://www.cnblogs.com/linkmust/p/10990848.html
Copyright © 2011-2022 走看看