Skip to content

Instantly share code, notes, and snippets.

@tankhuu
Last active June 8, 2023 15:41
Show Gist options
  • Save tankhuu/a0799617ad96390d8045c4406d45ecd1 to your computer and use it in GitHub Desktop.
Save tankhuu/a0799617ad96390d8045c4406d45ecd1 to your computer and use it in GitHub Desktop.
Logstash sample configuration files
filter {
mutate {
rename => { "lon" => "location" }
merge => { "location" => "lat" }
}
mutate {
convert => { "location" => "float" }
remove_field => ["lat"]
}
date {
locale => "en"
timezone => "UTC"
match => [ "created_at", "YYYY-MM-dd HH:mm:ss" ]
target => "@timestamp"
}
}
filter {
fingerprint {
source => ["id", "start_time", "host"]
target => "%{[@metadata][fingerprint]}"
method => "MURMUR3"
}
}
output {
elasticsearch {
hosts => "example.com"
document_id => "%{[@metadata][fingerprint]}"
}
}
#################################
# Grok Patterns #
# Logstash Magento Log #
# Exception Log #
%{TIMESTAMP_ISO8601:timestamp}] %{USERNAME:action}.%{LOGLEVEL:logLevel}: %{GREEDYDATA:exceptionMessage} \{%{GREEDYDATA:exception_details}\}
# Debug Log #
%{TIMESTAMP_ISO8601:timestamp}] %{USERNAME:action}.%{LOGLEVEL:logLevel}: %{GREEDYDATA:debugMessage}: %{GREEDYDATA:debug_details} \[\]
# System Log #
%{TIMESTAMP_ISO8601:timestamp}] %{USERNAME:action}.%{LOGLEVEL:logLevel}: %{GREEDYDATA:systemMessage} \[\] \[\]
# Update Log #
%{TIMESTAMP_ISO8601:timestamp}] %{USERNAME:action}.%{LOGLEVEL:logLevel}: %{GREEDYDATA:updateMessage} \[\] \[\]
# Setup Cron Log #
%{USERNAME:action}: %{GREEDYDATA:setupCronMessage}
# Update Cron Log #
%{USERNAME:action}: %{GREEDYDATA:updateCronMessage}
# Magento Cron Log #
%{USERNAME:action}: %{GREEDYDATA:magentoCronMessage}
# Connector Log #
%{TIMESTAMP_ISO8601:timestamp} %{WORD:logLevel} %{GREEDYDATA:connectorMessage}
# in MySQL version 5.7, the mysql.slow_log table define field sql_text is mediumblob
# which was mediumtext in version 5.6
# Therefore we have to convert it to text, before index to elasticsearch
input {
jdbc {
jdbc_driver_library => "/etc/logstash/connectors/mysql-connector-java-5.1.44-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://mysql_host:3306/mysql"
jdbc_user => "mysql_user"
jdbc_password => "mysql_password"
schedule => "*/5 * * * *"
tracking_column => "start_time"
statement => "SELECT CONVERT(sql_text USING utf8 ) AS sql_text, DATE_FORMAT(start_time, '%Y-%m-%d %T') AS start_time, user_host, TIME_TO_SEC(query_time) AS query_time, TIME_TO_SEC(lock_time) AS lock_time, rows_sent, rows_examined, db, last_insert_id, insert_id, server_id, thread_id FROM mysql.slow_log"
type => "mysql-slow"
}
}
input {
# Need to install mysql-connector (https://dev.mysql.com/downloads/connector/j/5.1.html)
jdbc {
jdbc_driver_library => "/etc/logstash/connectors/mysql-connector-java-5.1.44-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://mysql_host:3306/db_name"
jdbc_user => "mysql_user"
jdbc_password => "mysql_password"
schedule => "29 16 * * *"
statement => "SELECT DATE_FORMAT(start_time, '%Y-%m-%d %T') AS start_time, id FROM table_name limit 5"
}
}
filter {
date {
locale => "en"
timezone => "UTC"
match => [ "start_time", "YYYY-MM-dd HH:mm:ss" ]
target => "@timestamp"
}
fingerprint {
source => ["id", "start_time"]
concatenate_sources => true
method => "SHA256"
key => "Log analytics"
base64encode => true
}
}
input {
# Need to install mysql-connector (https://dev.mysql.com/downloads/connector/j/5.1.html)
jdbc {
jdbc_driver_library => "/etc/logstash/connectors/mysql-connector-java-5.1.41-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://mysql_host:3306/db_name"
jdbc_user => "mysql_user"
jdbc_password => "mysql_password"
schedule => "55 15 * * *"
statement => "SELECT id FROM entity e WHERE password IS NOT NULL"
type => "mysql"
}
}
input {
jdbc {
jdbc_driver_library => "/Downloads/NetSuiteJDBCDrivers.linux64bit/NQjc.jar"
jdbc_driver_class => "com.netsuite.jdbc.openaccess.OpenAccessDriver"
jdbc_connection_string => "jdbc:ns://service-host:service-port;ServerDataSource=service-data-source;encrypted=1;Ciphersuites=TLS_RSA_WITH_AES_128_CBC_SHA;CustomProperties=(AccountID=account-id;RoleID=role-id)"
jdbc_user => "username"
jdbc_password => "password"
schedule => "* * * * *"
statement => "SELECT * FROM LOCATIONS"
}
}
output {
stdout {
code => rubydebug
}
}
output {
# Need to install output-amazon_es (https://github.com/awslabs/logstash-output-amazon_es)
if [type] == "mysql" {
amazon_es {
hosts => ["foo.us-east-1.es.amazonaws.com"]
region => "us-east-1"
# aws_access_key_id, aws_secret_access_key optional if instance profile is configured
aws_access_key_id => "ACCESS_KEY"
aws_secret_access_key => "SECRET_KEY"
index => "mysql-logs-%{+YYYY.MM.dd}"
document_type => "%{type}"
document_id => "%{id}"
}
}
}
output {
if [type] == "mysql" {
elasticsearch {
hosts => "127.0.0.1:9200"
index => "prefix-%{country}-%{+YYYY.MM.dd}"
document_type => "%{type}"
document_id => "%{id}"
manage_template => false
}
}
}
conf_file='/etc/logstash/avai-conf/magento_log_amazon_es.conf'
sudo -u logstash /usr/share/logstash/bin/logstash --path.settings /etc/logstash/ -f $conf_file -t
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment