FLUME 1.8.0 설치 (로그를 HADOOP에 저장)
========플럼 설치 (수집 서버, 저장 서버)
su - hadoop
cd /home
wget http://mirror.navercorp.com/apache/flume/1.8.0/apache-flume-1.8.0-bin.tar.gz
tar zxvf ./apache-flume-1.8.0-bin.tar.gz
mv ./apache-flume-1.8.0-bin.tar.gz ./flume
===========환경파일 (수집 서버, 저장 서버)버
su - root
vi /etc/profile
#################FLUME
export FLUME_HOME=/home/flume
export PATH="$FLUME_HOME/bin:$PATH"export FLUME_CONF_DIR=/home/flume/conf
===========설정 (수집 서버, 저장 서버 각각)
su - hadoop
cd /home/flume/conf
===============수집서버의 환경 설정
root@hadoop01 conf]# vi ./agent-systemlog.conf
#agent2.sources = r1 r2
agent2.sources = r1
agent2.channels = c1
agent2.sinks = k1
# 파일로 부터 로그를 읽어 온다.
agent2.sources.r1.type = exec
agent2.sources.r1.command = tail -F /var/log/test
agent2.sources.r1.channels = c1
#agent2.sources.r2.type = exec
#agent2.sources.r2.command = tail -F /logs/info.log
#agent2.sources.r2.channels = c1
agent2.channels.c1.type = memory
agent2.channels.c1.capacity = 10000
agent2.channels.c1.transactionCapacity = 1000
# 원격의 서버로 전달
agent2.sinks.k1.type = avro
agent2.sinks.k1.channel = c1
agent2.sinks.k1.hostname = hadoop
agent2.sinks.k1.port = 4545
========================저장 서버 설정파일 수정
[hadoop@hadoop conf]$ vi ./hdfs-save.conf
agent1.sources = r1
agent1.channels = c1
agent1.sinks = k1
# 데이타를 수신 (port는 4545)
agent1.sources.r1.type = avro
agent1.sources.r1.bind = 0.0.0.0
agent1.sources.r1.port = 4545
agent1.sources.r1.channels = c1
agent1.channels.c1.type = memory
agent1.channels.c1.capacity = 10000
agent1.channels.c1.transactionCapacity = 1000
# hdfs에 저장
agent1.sinks.k1.type = hdfs
agent1.sinks.k1.hdfs.path = hdfs://hadoop/flume/test
# 하루(24 hour) 단위로 파일.. rolling.
agent1.sinks.k1.hdfs.rollInterval = 30
agent1.sinks.k1.channel = c1
==== 저장 서버에 플럼 실행
flume-ng agent --conf-file $FLUME_HOME/conf/hdfs-save.conf --name agent1
==== 수집 서버에 플럼 실행flume-ng agent --conf-file $FLUME_HOME/conf/agent-systemlog.conf --name agent2
su - hadoop
cd /home
wget http://mirror.navercorp.com/apache/flume/1.8.0/apache-flume-1.8.0-bin.tar.gz
tar zxvf ./apache-flume-1.8.0-bin.tar.gz
mv ./apache-flume-1.8.0-bin.tar.gz ./flume
===========환경파일 (수집 서버, 저장 서버)버
su - root
vi /etc/profile
#################FLUME
export FLUME_HOME=/home/flume
export PATH="$FLUME_HOME/bin:$PATH"export FLUME_CONF_DIR=/home/flume/conf
===========설정 (수집 서버, 저장 서버 각각)
su - hadoop
cd /home/flume/conf
===============수집서버의 환경 설정
root@hadoop01 conf]# vi ./agent-systemlog.conf
#agent2.sources = r1 r2
agent2.sources = r1
agent2.channels = c1
agent2.sinks = k1
# 파일로 부터 로그를 읽어 온다.
agent2.sources.r1.type = exec
agent2.sources.r1.command = tail -F /var/log/test
agent2.sources.r1.channels = c1
#agent2.sources.r2.type = exec
#agent2.sources.r2.command = tail -F /logs/info.log
#agent2.sources.r2.channels = c1
agent2.channels.c1.type = memory
agent2.channels.c1.capacity = 10000
agent2.channels.c1.transactionCapacity = 1000
# 원격의 서버로 전달
agent2.sinks.k1.type = avro
agent2.sinks.k1.channel = c1
agent2.sinks.k1.hostname = hadoop
agent2.sinks.k1.port = 4545
========================저장 서버 설정파일 수정
[hadoop@hadoop conf]$ vi ./hdfs-save.conf
agent1.sources = r1
agent1.channels = c1
agent1.sinks = k1
# 데이타를 수신 (port는 4545)
agent1.sources.r1.type = avro
agent1.sources.r1.bind = 0.0.0.0
agent1.sources.r1.port = 4545
agent1.sources.r1.channels = c1
agent1.channels.c1.type = memory
agent1.channels.c1.capacity = 10000
agent1.channels.c1.transactionCapacity = 1000
# hdfs에 저장
agent1.sinks.k1.type = hdfs
agent1.sinks.k1.hdfs.path = hdfs://hadoop/flume/test
# 하루(24 hour) 단위로 파일.. rolling.
agent1.sinks.k1.hdfs.rollInterval = 30
agent1.sinks.k1.channel = c1
==== 저장 서버에 플럼 실행
flume-ng agent --conf-file $FLUME_HOME/conf/hdfs-save.conf --name agent1
==== 수집 서버에 플럼 실행flume-ng agent --conf-file $FLUME_HOME/conf/agent-systemlog.conf --name agent2
댓글
댓글 쓰기