rpm -ivh https://dl.dropboxusercontent.com/u/5756075/jdk-7u45-linux-x64.rpm
rpm --import http://packages.elasticsearch.org/GPG-KEY-elasticsearch
cat > /etc/yum.repos.d/elasticsearch.repo <<EOF
[elasticsearch-1.3]
name=Elasticsearch repository for 1.3.x packages
baseurl=http://packages.elasticsearch.org/elasticsearch/1.3/centos
gpgcheck=1
gpgkey=http://packages.elasticsearch.org/GPG-KEY-elasticsearch
enabled=1
EOF
yum -y install elasticsearch
- Increase the openfile limits to elasticsearch by:
echo 'elasticsearch soft nofile 32000' >> /etc/security/limits.conf
echo 'elasticsearch hard nofile 32000' >> /etc/security/limits.conf
- Configure elasticsearch data storage path
echo 'path.data: /data/es/logs' >> /etc/elasticsearch/elasticsearch.yml
mkdir -p /data/es/logs
chown -R elasticsearch:elasticsearch /data/es/logs
- Disallow elasticsearch process from swapping (try to lock the process address space into RAM)
sed -i "s|^# bootstrap.mlockall:.*$|bootstrap.mlockall: true|" /etc/elasticsearch/elasticsearch.yml
- Change the JVM Size
sed -i "s|^#ES_HEAP_SIZE=.*$|ES_HEAP_SIZE=4g|" /etc/sysconfig/elasticsearch
- Start ElasticSearch
service elasticsearch start
- Download Kibana
cd /opt
wget https://download.elasticsearch.org/kibana/kibana/kibana-3.1.0.tar.gz
tar xzf kibana-3.1.0.tar.gz
ln -s kibana-3.1.0 kibana
- Install Nginx
rpm -Uvh http://download.fedoraproject.org/pub/epel/6/i386/epel-release-6-8.noarch.rpm
yum -y install nginx
- Configure Nginx to server kibana
mkdir -p /usr/share/nginx/kibana3
cp -R /opt/kibana/* /usr/share/nginx/kibana3/
- Download sample nginx config:
cd ~; curl -OL https://github.com/elasticsearch/kibana/raw/master/sample/nginx.conf
sed -i "s|server_name kibana.myhost.org;|server_name $(hostname -f);|" nginx.conf
sed -i "s|root /usr/share/kibana3;|root /usr/share/nginx/kibana3;|" nginx.conf
cp ~/nginx.conf /etc/nginx/conf.d/default.conf
If you don't find the sample nginx.conf
try this: https://github.com/elasticsearch/kibana/blob/kibana3/sample/nginx.conf, it generally should be laying around in some other branch of kibana.
- Install apache2-utils to generate username and password pair
yum -y install httpd-tools-2.2.15
htpasswd -c /etc/nginx/conf.d/kibana.myhost.org.htpasswd admin
- Start nginx for serving kibana
service nginx start
cat > /etc/yum.repos.d/logstash.repo <<EOF
[logstash-1.4]
name=logstash repository for 1.4.x packages
baseurl=http://packages.elasticsearch.org/logstash/1.4/centos
gpgcheck=1
gpgkey=http://packages.elasticsearch.org/GPG-KEY-elasticsearch
enabled=1
EOF
yum -y install logstash logstash-contrib
Since we are going to use Logstash Forwarder to ship logs from our Servers to our Logstash Server, we need to create an SSL certificate and key pair. The certificate is used by the Logstash Forwarder to verify the identity of Logstash Server.
Generate the SSL certificate and private key, in the appropriate locations (/etc/pki/tls/...), with the following command:
cd /etc/pki/tls; sudo openssl req -x509 -batch -nodes -days 3650 -newkey rsa:2048 -keyout private/logstash-forwarder.key -out certs/logstash-forwarder.crt
The logstash-forwarder.crt
file will be copied to all of the servers that will send logs to Logstash but we will do that a little later. Let's complete our Logstash configuration.
cat > /etc/logstash/conf.d/01-lumberjack-input.conf <<EOF
input {
lumberjack {
port => 5000
type => "logs"
ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
}
}
EOF
This specifies a lumberjack input that will listen on tcp port 5000, and it will use the SSL certificate and private key that we created earlier.
Now lets create another config file, where we will add a filter for syslog messages:
cat > /etc/logstash/conf.d/10-syslog.conf <<EOF
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
EOF
This filter looks for logs that are labeled as "syslog" type (by a Logstash Forwarder), and it will try to use "grok" to parse incoming syslog logs to make it structured and query-able.
Now lets create another config file to tell logstash to store logs in elasticsearch.
cat > /etc/logstash/conf.d/30-lumberjack-output.conf <<EOF
output {
elasticsearch { host => localhost }
stdout { codec => rubydebug }
}
EOF
service logstash start
Note: Do these steps for each server that you want to send logs to your Logstash Server.
scp /etc/pki/tls/certs/logstash-forwarder.crt user@server:/tmp
rpm -ivh http://packages.elasticsearch.org/logstashforwarder/centos/logstash-forwarder-0.3.1-1.x86_64.rpm
cd /etc/init.d/; sudo curl -o logstash-forwarder http://logstashbook.com/code/4/logstash_forwarder_redhat_init
chmod +x logstash-forwarder
cat > /etc/sysconfig/logstash-forwarder <<EOF
LOGSTASH_FORWARDER_OPTIONS="-config /etc/logstash-forwarder -spool-size 100"
EOF
cp /tmp/logstash-forwarder.crt /etc/pki/tls/certs
LS_SERVER=$(hostname -f)
cat > /etc/logstash-forwarder <<EOF
{
"network": {
"servers": [ "${LS_SERVER}:5000" ],
"timeout": 15,
"ssl ca": "/etc/pki/tls/certs/logstash-forwarder.crt"
},
"files": [
{
"paths": [
"/var/log/messages",
"/var/log/secure"
],
"fields": { "type": "syslog" }
}
]
}
EOF