Last active
May 7, 2018 05:31
-
-
Save kimsyversen/e51286fcfa39a01aed42682347414e9c to your computer and use it in GitHub Desktop.
ELK6 Server Install Script - Verified on Ubuntu 17.10 Desktop
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
# How to install: | |
# curl -L https://gist.githubusercontent.com/kimsyversen/e51286fcfa39a01aed42682347414e9c/raw/5bb25bbcefd665ed8d71f43308b70c83f2c710e6/elk6.sh | sudo bash | |
# Src https://logz.io/learn/complete-guide-elk-stack | |
# For client see https://gist.github.com/kimsyversen/758c7a6104ce8ec5e407769c9c27a3b3 | |
# Check for sudo | |
if [[ $UID != 0 ]]; then | |
echo "Please run this script with sudo:" | |
echo "sudo $0 $*" | |
exit 1 | |
fi | |
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add - | |
echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-6.x.list | |
apt update | |
# Install useful tools | |
apt install -y net-tools curl default-jre apt-transport-https openssh-server | |
apt install elasticsearch | |
cat << EOF >> /etc/elasticsearch/elasticsearch.yml | |
network.host: "localhost" | |
http.port: 9200 | |
EOF | |
service elasticsearch start | |
# Generate certificate for TLS connection between filebeat and logstash | |
sudo sed -i "/\[ v3_ca \]/a subjectAltName = IP:$(ifconfig enp0s5 | grep broadcast | awk '{print $2}')" /etc/ssl/openssl.cnf | |
cd /etc/ssl | |
openssl req -x509 -days 365 -batch -nodes -newkey rsa:2048 -keyout logstash-forwarder.key -out logstash-forwarder.crt | |
sudo apt -y install logstash kibana | |
cat << EOF >> /etc/kibana/kibana.yml | |
server.port: 5601 | |
elasticsearch.url: "http://localhost:9200" | |
EOF | |
# Add config for syslog | |
# Src: https://www.elastic.co/guide/en/logstash/current/logstash-config-for-filebeat-modules.html | |
cat << EOF > /etc/logstash/conf.d/10-ssh.conf | |
input { | |
beats { | |
port => 5044 | |
host => "0.0.0.0" | |
ssl => true | |
ssl_certificate => "/etc/ssl/logstash-forwarder.crt" | |
ssl_key => "/etc/ssl/logstash-forwarder.key" | |
} | |
} | |
filter { | |
grok { | |
match => { "message" => ["%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host_target} sshd\[%{BASE10NUM}\]: Failed password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"] } | |
add_tag => "[ssh_failed_login_valid_user]" | |
} | |
grok { | |
match => { "message" => ["%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host_target} sshd\[%{BASE10NUM}\]: Failed password for invalid user %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"] } | |
add_tag => "[ssh_failed_login_invalid_user]" | |
} | |
grok { | |
match => { "message" => ["%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host_target} sshd\[%{BASE10NUM}\]: Accepted password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"] } | |
add_tag => "[ssh_sucessful_login]" | |
} | |
geoip { | |
source => "clientip" | |
} | |
} | |
output { | |
elasticsearch { | |
hosts => localhost | |
manage_template => false | |
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}" | |
document_type => "%{[@metadata][type]}" | |
} | |
} | |
EOF | |
# Set up reverse proxy and authentication | |
# Src: https://www.digitalocean.com/community/tutorials/how-to-install-elasticsearch-logstash-and-kibana-elk-stack-on-ubuntu-16-04 | |
sudo apt-get -y install nginx | |
#echo -e "Set a password for accessing Kibana:\n" | |
#echo "admin:`openssl passwd -apr1`" | sudo tee -a /etc/nginx/htpasswd.users | |
cat << EOF > /etc/nginx/sites-available/default | |
server { | |
listen 80; | |
server_name example.com; | |
#auth_basic "Restricted Access"; | |
#auth_basic_user_file /etc/nginx/htpasswd.users; | |
location / { | |
proxy_pass http://localhost:5601; | |
proxy_http_version 1.1; | |
proxy_set_header Upgrade \$http_upgrade; | |
proxy_set_header Connection 'upgrade'; | |
proxy_set_header Host \$host; | |
proxy_cache_bypass \$http_upgrade; | |
} | |
} | |
EOF | |
sudo nginx -t | |
sudo systemctl restart nginx | |
#Todo: Remember to configure firewall | |
#wget https://logz.io/sample-data -O /home/kim/apache-daily-access.log | |
sudo systemctl daemon-reload | |
sudo systemctl enable elasticsearch | |
sudo systemctl enable logstash | |
sudo systemctl enable kibana | |
sudo service kibana restart | |
sudo service logstash restart | |
sudo service elasticsearch restart | |
ln -s /usr/share/logstash/bin/logstash /bin/logstash | |
ln -s /usr/share/elasticsearch/bin/elasticsearch /bin/elasticsearch | |
# Install Geoip plugin | |
#sudo /usr/share/elasticsearch/bin/elasticsearch-plugin install ingest-geoip | |
# Since logs are sent trough logstash and not directly to elastic, load elastic indexes manually | |
#sudo apt install -y filebeat | |
#sudo filebeat setup --template -E output.logstash.enabled=false -E 'output.elasticsearch.hosts=["localhost:9200"]' | |
# | |
## Install template to elastic | |
#sudo filebeat export template > filebeat.template.json | |
#curl -XPUT -H 'Content-Type: application/json' http://localhost:9200/_template/filebeat-6.2.4 [email protected] | |
# | |
## Set up dashboards in Kibana | |
#sudo filebeat setup --dashboards | |
# | |
## Force kibana to look at new documents | |
#curl -XDELETE 'http://localhost:9200/filebeat-*' | |
sudo service kibana restart | |
sudo service logstash restart | |
sudo service elasticsearch restart |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment