flume监控文件夹,整合kafka的配置

#logser可以看做是flume服务的名称,每个flume都由sources、channels和sinks三部分组成
#sources可以看做是数据源头、channels是中间转存的渠道、sinks是数据后面的去向
logser.sources = src_study_info src_new_pen
logser.sinks = kfk_study_info kfk_new_pen
logser.channels = ch_study_info ch_new_pen

#source
#源头类型是TAILDIR,就可以实时监控以追加形式写入文件的日志
logser.sources.src_study_info.type = TAILDIR
logser.sources.src_new_pen.type = TAILDIR
#positionFile记录所有监控的文件信息
logser.sources.src_study_info.positionFile = /data/flume/position/study_info/taildir_position.json
logser.sources.src_new_pen.positionFile = /data/flume/position/new_pen/taildir_position.json
#监控的文件组
logser.sources.src_study_info.filegroups = f1
logser.sources.src_new_pen.filegroups = f1
#文件组包含的具体文件,也就是我们监控的文件
logser.sources.src_study_info.filegroups.f1 = /data/flume/study_info_log/..log
logser.sources.src_new_pen.filegroups.f1 = /data/flume/new_pen_log/.
.log

#interceptor
#写kafka的topic即可
logser.sources.src_study_info.interceptors = i1 i2
logser.sources.src_study_info.interceptors.i1.type=static
logser.sources.src_study_info.interceptors.i1.key = type
logser.sources.src_study_info.interceptors.i1.value = study_info
logser.sources.src_study_info.interceptors.i2.type=static
logser.sources.src_study_info.interceptors.i2.key = topic
logser.sources.src_study_info.interceptors.i2.value = study_info

#newpen
logser.sources.src_new_pen.interceptors = i1 i2
logser.sources.src_new_pen.interceptors.i1.type=static
logser.sources.src_new_pen.interceptors.i1.key = type
logser.sources.src_new_pen.interceptors.i1.value = new_pen
logser.sources.src_new_pen.interceptors.i2.type=static
logser.sources.src_new_pen.interceptors.i2.key = topic
logser.sources.src_new_pen.interceptors.i2.value = new_pen

#channel
logser.channels.ch_study_info.type = memory
logser.channels.ch_study_info.capacity = 10000
logser.channels.ch_study_info.transactionCapacity = 1000

logser.channels.ch_new_pen.type = memory
logser.channels.ch_new_pen.capacity = 10000
logser.channels.ch_new_pen.transactionCapacity = 1000

#kfk sink
#指定sink类型是Kafka,说明日志最后要发送到Kafka
logser.sinks.kfk_study_info.type = org.apache.flume.sink.kafka.KafkaSink

logser.sinks.kfk_new_pen.type = org.apache.flume.sink.kafka.KafkaSink

#Kafka broker
logser.sinks.kfk_study_info.brokerList = 192.168.48.201:9092

logser.sinks.kfk_new_pen.brokerList = 192.168.48.201:9092

#Bind the source and sink to the channel
logser.sources.src_study_info.channels = ch_study_info
logser.sinks.kfk_study_info.channel = ch_study_info

logser.sources.src_new_pen.channels = ch_new_pen
logser.sinks.kfk_new_pen.channel = ch_new_pen


更多精彩内容