zoukankan      html  css  js  c++  java
  • 大数据技术之Flume 配置示例

    大数据技术之Flume 配置示例   1 & 3


    [root@bigdatacloud conf]# cat test1
    a1.sources = r1
    a1.sinks = k1
    a1.channels = c1

    # Describe/configure the source
    a1.sources.r1.type = netcat
    a1.sources.r1.bind =0.0.0.0
    a1.sources.r1.port = 44444

    # Describe the sink
    a1.sinks.k1.type = logger

    # Use a channel which buffers events in memory
    a1.channels.c1.type = memory
    a1.channels.c1.capacity = 1000
    a1.channels.c1.transactionCapacity = 100

    # Bind the source and sink to the channel
    a1.sources.r1.channels = c1
    a1.sinks.k1.channel = c1


    ====================

    [root@bigdatacloud conf]# cat test2
    a1.sources=r1
    a1.sinks=k1
    a1.channels=c1

    # Describe/configure the source
    a1.sources.r1.type=spooldir
    a1.sources.r1.spoolDir=/opt/sqooldir

    # Describe the sink
    a1.sinks.k1.type=avro
    a1.sinks.k1.hostname=bigdatastorm
    a1.sinks.k1.port=44444

    # Use a channel which buffers events in memory
    a1.channels.c1.type=memory
    a1.channels.c1.capacity=1000
    a1.channels.c1.transactionCapacity=100

    # Bind the source and sink to the channel
    a1.sources.r1.channels=c1
    a1.sinks.k1.channel=c1

    =========================

    [root@bigdatacloud conf]# cat test3
    a1.sources=r1
    a1.sinks=k1
    a1.channels=c1

    # Describe/configure the source
    a1.sources.r1.type=avro
    a1.sources.r1.bind=0.0.0.0
    a1.sources.r1.port=44444

    # Describe the sink
    a1.sinks.k1.type=hdfs
    a1.sinks.k1.hdfs.path=hdfs://mycluster/flume/data/%y-%m-%d
    a1.sinks.k1.hdfs.rollInterval=0
    a1.sinks.k1.hdfs.rollCount=0
    a1.sinks.k1.hdfs.rollSize=10240000
    a1.sinks.k1.hdfs.fileType=DataStream
    a1.sinks.k1.hdfs.idleTimeout=5
    a1.sinks.k1.hdfs.useLocalTimeStamp=true
    a1.sinks.k1.hdfs.callTimeout=10000

    #a1.sinks.k1.type=hdfs
    #a1.sinks.k1.type=hdfs
    #a1.sinks.k1.type=hdfs

    # Use a channel which buffers events in memory
    a1.channels.c1.type=memory
    a1.channels.c1.capacity=1000
    a1.channels.c1.transactionCapacity=100

    # Bind the source and sink to the channel
    a1.sources.r1.channels=c1
    a1.sinks.k1.channel=c1

    ====================

    [root@bigdatacloud conf]# cat getnginxlog
    a1.sources=r1
    a1.sinks=k1
    a1.channels=c1

    # Describe/configure the source
    #a1.sources.r1.type=avro
    a1.sources.r1.type=exec
    #a1.sources.r1.bind=0.0.0.0
    #a1.sources.r1.port=44444
    a1.sources.r1.command=tail -F /opt/first_project/data/access.log

    # Describe the sink
    a1.sinks.k1.type=hdfs
    a1.sinks.k1.hdfs.path=hdfs://mycluster/flume/data1/%y-%m-%d
    a1.sinks.k1.hdfs.rollInterval=0
    a1.sinks.k1.hdfs.rollCount=0
    a1.sinks.k1.hdfs.rollSize=10240000
    a1.sinks.k1.hdfs.fileType=DataStream
    a1.sinks.k1.hdfs.idleTimeout=5
    a1.sinks.k1.hdfs.useLocalTimeStamp=true
    a1.sinks.k1.hdfs.callTimeout=10000

    #a1.sinks.k1.type=hdfs
    #a1.sinks.k1.type=hdfs
    #a1.sinks.k1.type=hdfs

    # Use a channel which buffers events in memory
    a1.channels.c1.type=memory
    a1.channels.c1.capacity=1000
    a1.channels.c1.transactionCapacity=100

    # Bind the source and sink to the channel
    a1.sources.r1.channels=c1
    a1.sinks.k1.channel=c1


  • 相关阅读:
    Mysql如何修改unique key
    centos 编译 安装 protobuf
    EasyNetQ简单使用
    微信发送模板消息
    Python删除开头空格
    代码积累-Common
    sql With(NoLock),With(ReadPast)
    webform 使用log4net配置
    log4net.dll添加报错
    js-小数计算问题
  • 原文地址:https://www.cnblogs.com/TendToBigData/p/10501492.html
Copyright © 2011-2022 走看看