Created
March 5, 2015 20:18
-
-
Save maxgarvey/b33e1bd33ec9cdb5335d to your computer and use it in GitHub Desktop.
local heka demo
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[LogstreamerInput] | |
hostname = "127.0.0.1:9999" | |
log_directory = "/usr/local/Cellar/nginx/1.6.2/logs/" | |
file_match = 'error\.log' | |
[AMQPOutput] | |
url = "amqp://guest:[email protected]:5672" | |
exchange = "HEKA" | |
exchange_type = "direct" | |
routing_key = "heka" | |
message_matcher = "TRUE" | |
[ElasticSearchOutput] | |
message_matcher = "TRUE" | |
encoder = "ESJsonEncoder" | |
[LogOutput] | |
message_matcher = "TRUE" | |
encoder = "PayloadEncoder" | |
[ESJsonEncoder] | |
[PayloadEncoder] | |
append_newlines = false |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[AMQPInput] | |
url = "amqp://guest:[email protected]:5672" | |
exchange = "HEKA" | |
exchange_type = "direct" | |
routing_key = "heka" | |
queue = "heka_queue" | |
queue_auto_delete = false | |
queue_durability = true | |
[FileOutput] | |
message_matcher = "TRUE" | |
path = "/tmp/heka_log.log" | |
encoder = "PayloadEncoder" | |
[LogOutput] | |
message_matcher = "TRUE" | |
encoder = "PayloadEncoder" | |
[PayloadEncoder] | |
append_newlines = false |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# I got a little heka demo going on my Mac: | |
# All running locally, I got an nginx server to log to a file watched by Heka | |
# Heka reads the lines and prints them to standard out, puts them on a rabbitmq queue, | |
# and send them to elasticsearch, which is plugged into kibana. | |
# Here's what I did: | |
# install elasticsearch and rabbitmq | |
brew install elasticsearch rabbitmq nginx | |
# to run elasticsearch: | |
elasticsearch & | |
# to run rabbitmq: | |
sudo /usr/local/sbin/rabbitmq-server & | |
# add basic nginx config | |
sudo mv /usr/local/etc/nginx/nginx.conf /usr/local/etc/nginx/nginx.conf.orig | |
sudo touch /usr/local/etc/nginx/nginx.conf | |
sudo chmod 777 /usr/local/etc/nginx/nginx.conf | |
echo '' > /usr/local/etc/nginx/nginx.conf | |
echo -e 'worker_processes 1;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e 'error_log logs/error.log;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e 'events {\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' worker_connections 1024;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e '}\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e 'http {\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' include mime.types;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' default_type application/octet-stream;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' access_log logs/access.log;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' sendfile on;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' keepalive_timeout 65;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' server {\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' listen 8080;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' server_name localhost;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' location / {\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e " root /Users/$USER/nginx_stuff;\n" >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' index index.html index.htm;\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' }\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e ' }\n' >> /usr/local/etc/nginx/nginx.conf | |
echo -e '}\n' >> /usr/local/etc/nginx/nginx.conf | |
mkdir ~/nginx_stuff | |
touch ~/nginx_stuff/index.html | |
echo 'blahblahblah' > ~/nginx_stuff/index.html | |
# run nginx | |
nginx | |
# get heka | |
cd ~ | |
mkdir -p heka_stuff | |
cd heka_stuff | |
git clone [email protected]:mozilla-services/heka.git | |
# to build heka: | |
cd ~/heka_stuff/heka | |
./build.sh | |
# make some heka configs | |
cd ~/heka_stuff | |
mkdir -p ~/heka_stuff/examples | |
touch examples/example.toml | |
echo '' > examples/example.toml | |
echo -e '[LogstreamerInput]' >> examples/example.toml | |
echo -e 'hostname = "127.0.0.1:9999"' >> examples/example.toml | |
echo -e 'log_directory = "/usr/local/Cellar/nginx/1.6.2/logs/"' >> examples/example.toml | |
echo -e "file_match = 'error\.log'" >> examples/example.toml | |
echo -e '\n' >> examples/example.toml | |
echo -e '[AMQPOutput]' >> examples/example.toml | |
echo -e 'url = "amqp://guest:[email protected]:5672"' >> examples/example.toml | |
echo -e 'exchange = "HEKA"' >> examples/example.toml | |
echo -e 'exchange_type = "direct"' >> examples/example.toml | |
echo -e 'routing_key = "heka"' >> examples/example.toml | |
echo -e 'message_matcher = "TRUE"' >> examples/example.toml | |
echo -e '\n' >> examples/example.toml | |
echo -e '[ElasticSearchOutput]' >> examples/example.toml | |
echo -e 'message_matcher = "TRUE"' >> examples/example.toml | |
echo -e 'encoder = "ESJsonEncoder"' >> examples/example.toml | |
echo -e '\n' >> examples/example.toml | |
echo -e '[LogOutput]' >> examples/example.toml | |
echo -e 'message_matcher = "TRUE"' >> examples/example.toml | |
echo -e 'encoder = "PayloadEncoder"' >> examples/example.toml | |
echo -e '\n' >> examples/example.toml | |
echo -e '[ESJsonEncoder]' >> examples/example.toml | |
echo -e '\n' >> examples/example.toml | |
echo -e '[PayloadEncoder]' >> examples/example.toml | |
echo -e 'append_newlines = false' >> examples/example.toml | |
echo -e '\n' >> examples/example.toml | |
# and: | |
touch examples/example_reader.toml | |
echo '' > examples/example_reader.toml | |
echo -e '[AMQPInput]' >> examples/example_reader.toml | |
echo -e 'url = "amqp://guest:[email protected]:5672"' >> examples/example_reader.toml | |
echo -e 'exchange = "HEKA"' >> examples/example_reader.toml | |
echo -e 'exchange_type = "direct"' >> examples/example_reader.toml | |
echo -e 'routing_key = "heka"' >> examples/example_reader.toml | |
echo -e 'queue = "heka_queue"' >> examples/example_reader.toml | |
echo -e 'queue_auto_delete = false' >> examples/example_reader.toml | |
echo -e 'queue_durability = true' >> examples/example_reader.toml | |
echo -e '\n' >> examples/example_reader.toml | |
echo -e '[FileOutput]' >> examples/example_reader.toml | |
echo -e 'message_matcher = "TRUE"' >> examples/example_reader.toml | |
echo -e 'path = "/tmp/heka_log.log"' >> examples/example_reader.toml | |
echo -e 'encoder = "PayloadEncoder"' >> examples/example_reader.toml | |
echo -e '\n' >> examples/example_reader.toml | |
echo -e '[LogOutput]' >> examples/example_reader.toml | |
echo -e 'message_matcher = "TRUE"' >> examples/example_reader.toml | |
echo -e 'encoder = "PayloadEncoder"' >> examples/example_reader.toml | |
echo -e '\n' >> examples/example_reader.toml | |
echo -e '[PayloadEncoder]' >> examples/example_reader.toml | |
echo -e 'append_newlines = false' >> examples/example_reader.toml | |
echo -e '\n' >> examples/example_reader.toml | |
# to run heka: | |
sudo ~/heka_stuff/heka/build/heka/bin/hekad -config=/Users/$USER/heka_stuff/examples/example.toml & | |
sudo ~/heka_stuff/heka/build/heka/bin/hekad -config=/Users/$USER/heka_stuff/examples/example_reader.toml & | |
# then install kibana: | |
mkdir -p ~/kibana_stuff | |
cd ~/kibana_stuff | |
wget https://download.elasticsearch.org/kibana/kibana/kibana-4.0.1-darwin-x64.tar.gz | |
tar -zxvf kibana-4.0.1-darwin-x64.tar.gz | |
mv kibana-4.0.1-darwin-x64 kibana | |
cd kibana | |
# to run kibana: | |
./bin/kibana & | |
# Added the rabbit gui plugin: | |
sudo /usr/local/sbin/rabbitmq-plugins enable rabbitmq_management | |
# In RabbitMQ's GUI, I enabled a binding between the exchange HEKA, routing_key heka, and queue heka_queue | |
# got to the GUI by going to: | |
# http://127.0.0.1:15672/ | |
# then, I started up nginx (from a brew install of nginx at unprivileged port 8080) | |
# curling the endpoint gives a favico error which gets written into the log file and | |
# sent to elastic search and rabbit. | |
# The second heka instance reads the log lines off of rabbit and prints them to | |
# standard out. | |
# of course, the icing on the cake is that we have effortlessly got a kibana up and running | |
# which we can check out by taking the browser to: | |
# http://127.0.0.1:5601/ |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
worker_processes 1; | |
error_log logs/error.log; | |
events { | |
worker_connections 1024; | |
} | |
http { | |
include mime.types; | |
default_type application/octet-stream; | |
access_log logs/access.log; | |
sendfile on; | |
keepalive_timeout 65; | |
server { | |
listen 8080; | |
server_name localhost; | |
location / { | |
root /Users/mgarvey/nginx_stuff; | |
index index.html index.htm; | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
here's a script that will setup a local running heka system.
There are 2 running instances of the heka software:
one watches an nginx logfile, and when it gets a new line, writes that to standard out, to elasticache, and to a rabbit queue
the other reads from the rabbitmq queue, and then adds lines to standard out as well as /tml/heka_log.log logfile.
I believe the only outstanding configurational detail missing here is that I had to manually create the queue via rabbit's GUI, and I had to create a binding between the HEKA exchange and the heka_queue queue (the one i created) on the routing key "heka"