[[email protected] data]#tar -zxvf apache-flume-1.7.0-bin.tar.gz
[[email protected] conf]# cp flume-env.sh.template flume-env.sh
修改java_home
[[email protected] conf]# cp flume-env.sh
export JAVA_HOME=/data/jdk
export JAVA_OPTS="-Xms100m -Xmx2000m -Dcom.sun.management.jmxremote"
flume.conf
#定义agent名, source,channel,sink的名称
a4.sources = r1
a4.channels = c1
a4.sinks = k1
#具体定义source
a4.sources.r1.type = spooldir
a4.sources.r1.spoolDir = /root/logs
#具体定义channel
a4.channels.c1.type = memory
a4.channels.c1.capacity = 10000
a4.channels.c1.transactionCapacity = 100
#定义拦截器,拦截一些无效的数据, 为消息添加时间戳,按照日志存入到当天的时间中
#a4.sources.r1.interceptors = i1
#a4.sources.r1.interceptors.i1 = org.apache.flume.interceptor.TimestampInterceptor$Builder
#定义sinks
a4.sinks.k1.type = hdfs
a4.sinks.k1.hdfs.path = hdfs://ns1/flume/%Y%m%d
a4.sinks.k1.hdfs.filePrefix = events-
a4.sinks.k1.hdfs.fileType = DataStream
#不按照条数生成文件
a4.sinks.k1.hdfs.rollCount = 0
#HDFS上的文件达到128M时生成一个文件
a4.sinks.k1.hdfs.rollSize = 134217728
#HDFS上的文件达到60秒生成一个文件
a4.sinks.k1.hdfs.rollInterval = 60
#组装 source、channel、sink
a4.sources.r1.channels = c1
a4.sinks.k1.channels = c1
[[email protected] lib]# scp namenode:/data/hadoop/etc/hadoop/{core-site.xml,hdfs-site.xml} /data/apache-flume-1.7.0-bin/conf
[[email protected] bin]# ./flume-ng agent -n a4 -c ../conf -f ../conf/a4.conf -Dflume.root.logger=INFO,console
报错1:
java.lang.NoClassDefFoundError: org/apache/hadoop/io/SequenceFile$CompressionType
[[email protected] lib]# scp 192.168.20.184:/data/hadoop//share/hadoop/common/hadoop-common-2.7.3.jar ./
报错2:
java.lang.NoClassDefFoundError: org/apache/commons/configuration/Configuration
[[email protected] lib]# scp 192.168.20.184:/data/hadoop//share/hadoop/common/lib/commons-configuration-1.6.jar ./
[[email protected] lib]# scp 192.168.20.184:/data/hadoop//share/hadoop/common/lib/hadoop-auth-2.7.3.jar ./
[[email protected] lib]# scp 192.168.20.184:/data/hadoop/share/hadoop/common/lib/htrace-core-3.1.0-incubating.jar ./
报错3
Caused by: java.lang.NoClassDefFoundError: org/apache/commons/io/Charsets
[[email protected] lib]# scp 192.168.20.184:/data/hadoop/share/hadoop/common/lib/commons-io-2.4.jar ./
[[email protected] bin]$ ./hdfs dfs -chown -R root /flume