zoukankan      html  css  js  c++  java
  • kafka Consumer2Local

    public class Consumer2Local {
    private final static Logger log = LoggerFactory.getLogger(Consumer2Local.class);

    public static void main(String[] args) throws IOException {

    if (args.length != 6) {
    throw new RuntimeException("参数列表:bootstrap.servers、topic、groupName,needSecurity、earliest、output " +
    "example: java -jar -Djava.security.auth.login.config=kafka_client_jass.conf kafka_consumer_util.jar bbrd-kafka-133-11:19092 ocenter_xes_order_infos bdc_rt_1 need earliest /home/hadoop/wangxin/kafka-client/data/ocenter_xes_order_infos"
    );
    }

    Properties props = new Properties();
    props.setProperty("bootstrap.servers", args[0]);
    props.setProperty("group.id", args[2]);
    props.setProperty("enable.auto.commit", "true");
    props.setProperty("auto.commit.interval.ms", "1000");
    if ("need".equalsIgnoreCase(args[3])) {
    props.setProperty("security.protocol", "SASL_PLAINTEXT");
    props.setProperty("sasl.mechanism", "PLAIN");
    props.setProperty("sasl.username","xes_data");
    props.setProperty("sasl.password","jrUHKOJJHm6VIQCG");
    }
    if ("earliest".equalsIgnoreCase(args[4])) {
    props.setProperty("auto.offset.reset", "earliest");
    }
    props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(args[1]));
    final FileWriter fileWriter = new FileWriter(new File(args[5]));
    final BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
    ConsumerRecords<String, String> records;
    //当虚拟机退出时,暂停消费,然后将数据写入完整。
    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
    @Override
    public void run() {
    try {
    bufferedWriter.flush();
    fileWriter.flush();
    fileWriter.close();
    bufferedWriter.close();
    log.info("安全退出!");
    } catch (IOException e) {
    e.printStackTrace();
    }
    }
    }));

    while (true) {
    records = consumer.poll(100);
    for (ConsumerRecord<String, String> record : records) {
    bufferedWriter.write(record.value());
    bufferedWriter.newLine();
    }
    }
    }
    }




    java -jar kafka2local.jar public-common-kafka-1:9092 $1 $1 notNeed  earliest /home/ads/maoxiangyi/kafka_util/data/$1


  • 相关阅读:
    【教程】模拟登陆百度之Java代码版
    Redis错误配置详解
    Redis内存存储结构分析
    Notepad++安装插件
    hadoop2.x 常用端口及定义方法
    微信订阅号可以开通微信支付吗?
    CDH 的Cloudera Manager免费与收费版的对比表
    Hadoop调度框架
    再谈spark部署搭建和企业级项目接轨的入门经验(博主推荐)
    Hive环境的安装部署(完美安装)(集群内或集群外都适用)(含卸载自带mysql安装指定版本)
  • 原文地址:https://www.cnblogs.com/maoxiangyi/p/11027573.html
Copyright © 2011-2022 走看看