From 0c869183818bf7dd2281ca154beb20d2cdff36a7 Mon Sep 17 00:00:00 2001 From: ppabc Date: Tue, 14 Jun 2016 16:46:37 +0800 Subject: [PATCH] Flume --- flume/Flume.sh | 79 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 flume/Flume.sh diff --git a/flume/Flume.sh b/flume/Flume.sh new file mode 100644 index 0000000..8891290 --- /dev/null +++ b/flume/Flume.sh @@ -0,0 +1,79 @@ +#!/bin/bash +## kafka flume 2016-06-14 +## http://www.aqzt.com +##email: ppabc@qq.com +##robert yu +##centos 6 +##kafka apache-flume 搭建 +##Flume从1.6.0开始,官方已经直接支持Kafka的sink了。这样就非常方便的可以将从Flume采集的数据,发送给Kafka。 + +yum install -y curl openssh-server openssh-clients postfix cronie git nmap unzip wget lsof xz gcc make vim curl gcc-c++ libtool + +##注意修改主机名 +cat >>/etc/hosts<>/etc/profile</opt/apache-flume-1.6.0-bin/conf/flume-env.sh</opt/apache-flume-1.6.0-bin/conf/tail_kafka.conf</dev/null 2>&1 & + + +##在一个终端,启动Consumer,执行如下命令: +bin/kafka-console-consumer.sh --zookeeper 192.168.142.136:2181 --topic mykafka --from-beginning + +##访问NGINX的域名 +curl test_kafka.flume.com +##生成NGINX的日志会在Consumer显示出来 + +##停止命令 +##ps ax | grep -i 'kafka-rest' | grep -v grep | awk '{print $1}' | xargs kill +