Created
May 20, 2019 20:51
-
-
Save rinkucool007/e96d2b92255b11a9cf4cee714ffb411d to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Example -1 | |
===================== | |
input { | |
file { | |
path => [ "/usr/local/var/log/suricata/eve.json", "/var/log/ulogd.json" ] | |
codec => json | |
type => "json-log" | |
} | |
} | |
filter { | |
if [type] == "json-log" { | |
date { | |
match => [ "timestamp", "ISO8601" ] | |
} | |
} | |
} | |
output { | |
stdout { codec => rubydebug } | |
elasticsearch { embedded => true } | |
} | |
Example -2 | |
=============================== | |
input { | |
file { | |
type => "linux-syslog" | |
path => [ "/var/log/daemon.log", "/var/log/auth.log", "/var/log/mail.info" ] | |
} | |
filter { | |
if [type] == "linux-syslog" { | |
grok { | |
match => { "message" => "Accepted %{WORD:auth_method} for %{USER:username} from %{IP:src_ip} port %{INT:src_port} ssh2" } | |
} | |
grok { | |
match => { "message" => "Invalid user %{USER:username} from %{IP:src_ip}" } | |
} | |
} | |
} | |
Example -3 | |
================================ | |
file { | |
path => [ "/var/log/apache2/*access.log" ] | |
type => "apache-access" | |
} | |
file { | |
type => "apache-error" | |
path => "/var/log/apache2/error.log" | |
} | |
} | |
filter { | |
if [type] == "apache-access" { | |
grok { | |
match => { "message" => "%{COMBINEDAPACHELOG}" } | |
} | |
} | |
if [type] == "apache-error" { | |
grok { | |
match => { "message" => "%{APACHEERRORLOG}" } | |
patterns_dir => ["/var/lib/logstash/etc/grok"] | |
} | |
} | |
} | |
HTTPERRORDATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} | |
APACHEERRORLOG \[%{HTTPERRORDATE:timestamp}\] \[%{WORD:severity}\] \[client %{IPORHOST:clientip}\] %{GREEDYDATA:message_remainder} | |
Example -4 | |
================================== | |
input { | |
file { | |
type => "kern-log" | |
path => "/var/log/kern.log" | |
} | |
} | |
filter { | |
if [type] == "kern-log" { | |
grok { | |
match => { "message" => "%{IPTABLES}"} | |
patterns_dir => ["/var/lib/logstash/etc/grok"] | |
} | |
} | |
} | |
NETFILTERMAC %{COMMONMAC:dst_mac}:%{COMMONMAC:src_mac}:%{ETHTYPE:ethtype} | |
ETHTYPE (?:(?:[A-Fa-f0-9]{2}):(?:[A-Fa-f0-9]{2})) | |
IPTABLES1 (?:IN=%{WORD:in_device} OUT=(%{WORD:out_device})? MAC=%{NETFILTERMAC} SRC=%{IP:src_ip} DST=%{IP:dst_ip}.*(TTL=%{INT:ttl})?.*PROTO=%{WORD:proto}?.*SPT=%{INT:src_port}?.*DPT=%{INT:dst_port}?.*) | |
IPTABLES2 (?:IN=%{WORD:in_device} OUT=(%{WORD:out_device})? MAC=%{NETFILTERMAC} SRC=%{IP:src_ip} DST=%{IP:dst_ip}.*(TTL=%{INT:ttl})?.*PROTO=%{INT:proto}?.*) | |
IPTABLES (?:%{IPTABLES1}|%{IPTABLES2}) | |
Example -5 | |
=========================================== | |
input { | |
file { | |
type => "exim-log" | |
path => "/var/log/exim4/mainlog" | |
} | |
} | |
filter { | |
if [type] == "exim-log" { | |
multiline { | |
pattern => "%{DATE} %{TIME} %{HOSTNAME:msgid} (=>|Completed)" | |
what => "previous" | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "<= %{NOTSPACE:from} H=%{NOTSPACE:server} \[%{IP:src_ip}\]" | |
] | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "=> %{USERNAME:username} <%{NOTSPACE:dest}> R=%{WORD:transport}" | |
] | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "=> %{NOTSPACE:dest} R=%{WORD:transport}" | |
] | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "%{DATE} %{TIME} H=%{NOTSPACE:server}%{GREEDYDATA} \[%{IP:src_ip}\] F=<%{NOTSPACE:mail_to}> temporarily rejected RCPT <%{NOTSPACE:dest}>: greylisted" | |
] | |
} | |
} | |
} | |
Example -6 | |
========================================= | |
input { | |
file { | |
type => "linux-syslog" | |
path => [ "/var/log/daemon.log", "/var/log/auth.log", "/var/log/mail.info" ] | |
} | |
file { | |
path => [ "/var/log/apache2/*access.log" ] | |
type => "apache-access" | |
} | |
file { | |
type => "apache-error" | |
path => "/var/log/apache2/error.log" | |
} | |
file { | |
type => "exim-log" | |
path => "/var/log/exim4/mainlog" | |
} | |
file { | |
type => "kern-log" | |
path => "/var/log/kern.log" | |
} | |
file { | |
path => ["/var/log/suricata/eve.json" ] | |
codec => json | |
} | |
} | |
filter { | |
if [type] == "apache-access" { | |
grok { | |
match => { "message" => "%{COMBINEDAPACHELOG}" } | |
} | |
} | |
if [type] == "linux-syslog" { | |
grok { | |
match => { "message" => "Accepted %{WORD:auth_method} for %{USER:username} from %{IP:src_ip} port %{INT:src_port} ssh2" } | |
} | |
} | |
if [type] == "apache-error" { | |
grok { | |
match => { "message" => "%{APACHEERRORLOG}" } | |
patterns_dir => ["/var/lib/logstash/etc/grok"] | |
} | |
} | |
if [type] == "exim-log" { | |
multiline { | |
pattern => "%{DATE} %{TIME} %{HOSTNAME:msgid} (=>|Completed)" | |
what => "previous" | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "<= %{NOTSPACE:from} H=%{NOTSPACE:server} \[%{IP:src_ip}\]" | |
] | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "=> %{USERNAME:username} <%{NOTSPACE:dest}> R=%{WORD:transport}" | |
] | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "=> %{NOTSPACE:dest} R=%{WORD:transport}" | |
] | |
} | |
grok { | |
break_on_match => false | |
match => [ | |
"message", "%{DATE} %{TIME} H=%{NOTSPACE:server}%{GREEDYDATA} \[%{IP:src_ip}\] F=<%{NOTSPACE:mail_to}> temporarily rejected RCPT <%{NOTSPACE:dest}>: greylisted" | |
] | |
} | |
} | |
if [type] == "kern-log" { | |
grok { | |
match => { "message" => "%{IPTABLES}"} | |
patterns_dir => ["/var/lib/logstash/etc/grok"] | |
} | |
} | |
if [src_ip] { | |
geoip { | |
source => "src_ip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
convert => [ "[geoip][coordinates]", "float" ] | |
} | |
} | |
if [clientip] { | |
geoip { | |
source => "clientip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
convert => [ "[geoip][coordinates]", "float" ] | |
} | |
} | |
if [srcip] { | |
geoip { | |
source => "srcip" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
} | |
mutate { | |
convert => [ "[geoip][coordinates]", "float" ] | |
} | |
} | |
} | |
output { | |
stdout { codec => rubydebug } | |
elasticsearch { embedded => true } | |
} | |
================================================= | |
input { | |
file { | |
path => "/var/opt/teradata/viewpoint/dcs/logs/error.log" | |
type => "apache" | |
start_position => "beginning" | |
} | |
} | |
filter { | |
date { | |
match => ["logdate", "YYYY-MM-DD HH:mm:ss", "ISO8601"] | |
} | |
multiline { | |
pattern => "^\s" | |
what => "previous" | |
} | |
grep { | |
match =>["message", "-w ERROR\|EXCEPTION"] | |
} | |
} | |
output { | |
stdout {} | |
} | |
===================================================================== | |
grok { | |
type => "linux-syslog" | |
pattern => "%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host_target} sshd\[%{BASE10NUM}\]: Failed password for invalid user %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2" | |
add_tag => "ssh_brute_force_attack" | |
} | |
grok { | |
type => "linux-syslog" | |
pattern => "%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host_target} sudo: pam_unix\(sudo:auth\): authentication failure; logname=%{USERNAME:logname} uid=%{BASE10NUM:uid} euid=%{BASE10NUM:euid} tty=%{TTY:tty} ruser=%{USERNAME:ruser} rhost=(?:%{HOSTNAME:remote_host}|\s*) user=%{USERNAME:user}" | |
add_tag => "sudo_auth_failure" | |
} | |
grok { | |
type => "linux-syslog" | |
pattern => "%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host_target} sshd\[%{BASE10NUM}\]: Failed password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2" | |
add_tag => "ssh_failed_login" | |
} | |
grok { | |
type => "linux-syslog" | |
pattern => "%{SYSLOGTIMESTAMP:timestamp} %{HOSTNAME:host_target} sshd\[%{BASE10NUM}\]: Accepted password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2" | |
add_tag => "ssh_sucessful_login" | |
} | |
===================================================================================== | |
input { | |
stdin { | |
type => "stdin-type" | |
} | |
file { | |
type => "apache" | |
path => ["/var/log/apache2/access_log", "/var/log/apache2/error_log", "/var/log/apache2/ssl_request_log"] | |
} | |
file { | |
type => "pazpar2" | |
path => "/var/log/pazpar2/pazpar2.log" | |
} | |
file { | |
type => "metaproxy" | |
path => "/var/log/metaproxy/metaproxy.log" | |
} | |
file { | |
type => "couchdb" | |
path => "/var/log/couchdb/couch.log" | |
} | |
file { | |
type => "mysql" | |
path => "/var/log/mysqld.log" | |
} | |
file { | |
type => "nagios" | |
path => "/var/log/nagios/nagios.log" | |
} | |
file { | |
type => "tivoli" | |
path => ["/var/log/tivoli/dsmerror.log", "/var/log/tivoli/dsmsched.log"] | |
} | |
} | |
filter { | |
grok { | |
type => "apache" | |
# See the following URL for a complete list of named patterns | |
# logstash/grok ships with by default: | |
# https://github.com/logstash/logstash/tree/master/patterns | |
# | |
# The grok filter will use the below pattern and on successful match use | |
# any captured values as new fields in the event. | |
pattern => "%{COMBINEDAPACHELOG}" | |
} | |
date { | |
type => "apache" | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The apache time format looks like: "18/Aug/2011:05:44:34 -0700" | |
locale => en | |
match => ["timestamp", "dd/MMM/yyyy:HH:mm:ss Z"] | |
} | |
grok { | |
type => "pazpar2" | |
# Try to match pazpar2's log format, e.g.: | |
# 2013-05-15 23:19:27 pazpar2 [log] Request: GET /search.pz2?command=server-status | |
# 2013-05-16 00:05:50 pazpar2 [log] Request: GET /search.pz2?command=ping&session=335780868&windowid= | |
break_on_match => false | |
pattern => "%{DATESTAMP:timestamp} pazpar2 \[%{WORD:loglevel}\] %{GREEDYDATA:logmessage}" | |
pattern => "%{GREEDYDATA} Session %{NOTSPACE:session}: %{GREEDYDATA:sessionmessage}" | |
pattern => "%{GREEDYDATA} PQF for Client %{NOTSPACE:target}: %{GREEDYDATA:pqf}" | |
pattern => "%{GREEDYDATA} Client %{NOTSPACE:target}: Search CQL: %{GREEDYDATA:cql}" | |
pattern => "%{GREEDYDATA} Request: %{NOTSPACE:method} %{GREEDYDATA:request}" | |
pattern => "%{GREEDYDATA} Request: %{GREEDYDATA}session=%{WORD:session}" | |
pattern => "%{GREEDYDATA} Request: %{GREEDYDATA}command=%{WORD:command}" | |
pattern => "%{GREEDYDATA} Request: %{GREEDYDATA}block=%{WORD:block}" | |
} | |
date { | |
type => "pazpar2" | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The time format looks like: "2013-05-15 23:19:27" | |
locale => en | |
match => ["timestamp", "yyyy-MM-dd HH:mm:ss"] | |
} | |
grok { | |
type => "metaproxy" | |
# Try to match pazpar2's log format, e.g.: | |
# 2013-05-16--00:24:32 10868 [log] metaproxy start 1.3.55 3e8ea72503dab6e988c622e647b7bbd3abb338e6 | |
# 2013-05-16--00:26:07 10868 [log] FN 134.76.22.139:2 2 0.000000 HTTP_Request POST /dspace/geo-leo | |
break_on_match => false | |
pattern => "%{WORD:date} %{NUMBER:pid} \[%{WORD:loglevel}\] %{GREEDYDATA:logmessage}" | |
pattern => "%{WORD:date} %{NUMBER:pid} \[%{WORD:loglevel}\] %{GREEDYDATA:filtermessage} %{IP:ip}:%{GREEDYDATA:commandmessage}" | |
} | |
date { | |
type => "metaproxy" | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The time format looks like: "2013-05-15--23:19:27" | |
locale => en | |
match => ["timestamp", "yyyy-MM-dd--HH:mm:ss"] | |
} | |
grok { | |
type => couchdb | |
pattern => "\[%{DATA:timestamp}\] \[%{DATA:loglevel}\] \[%{DATA:pid}\] %{IP:clientip} %{USER:ident} %{USER:auth} %{WORD:verb} %{NOTSPACE:path} %{NUMBER:response}" | |
} | |
date { | |
type => "couchdb" | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The time format looks like: "Wed, 15 May 2013 22:16:16 GMT" | |
locale => en | |
match => ["timestamp", "EEE, dd MMM yyyy HH:mm:ss z"] | |
} | |
grok { | |
type => nagios | |
pattern => "%{NAGIOSLOGLINE}" | |
} | |
date { | |
type => "nagios" | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The time format looks like: "Wed, 15 May 2013 22:16:16 GMT" | |
locale => en | |
match => ["nagios_epoch", "UNIX"] | |
} | |
grok { | |
type => "tivoli" | |
# 05/15/13 02:11:26 ANS1802E Incremental backup of '/' finished with 4 failure | |
pattern => "(?<tivoli_time>.{19}) %{GREEDYDATA:message}" | |
} | |
date { | |
type => "tivoli" | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The time format looks like: "05/15/13 02:11:26" | |
locale => en | |
match => ["tivoli_time", "MM/dd/yy HH:mm:ss"] | |
} | |
} | |
output { | |
stdout { | |
debug => true | |
debug_format => "json" | |
} | |
elasticsearch { | |
embedded => true | |
} | |
} | |
==================================================================================================== | |
input { | |
file { | |
type => "rodslog" | |
start_position => beginning | |
debug => true | |
sincedb_path => "/root/logstash/.sincedb" | |
path => [ "/root/logstash/logs/eu/var/log/irods/rodsLog.*" ] | |
} | |
} | |
filter { | |
# Drop verbose and non-informative events | |
if [message] =~ /environment variable set/ {drop { }} | |
if [message] =~ /Authenticated/ {drop { }} | |
grok { | |
match => { "message" => "%{SYSLOGTIMESTAMP:log_timestamp}\spid:%{POSINT:pid}\s%{LOGLEVEL:loglevel}:\s+%{GREEDYDATA:rods_msg}" } | |
add_field => [ "event_timestamp", "%{@timestamp}" ] | |
} | |
if [loglevel == "DEBUG"] {drop { }} | |
# Get client IPs | |
if [rods_msg] =~ /^Agent/ { | |
grok { | |
match => { "rods_msg" => "Agent\sprocess\s%{POSINT:agent_pid}\sstarted\sfor\spuser=%{USER:puser}\sand\scuser=%{USER:cuser}\sfrom\s%{IPV4:irods_client_addr}" } | |
remove_field => "rods_msg" | |
} | |
} | |
# Year is not present in the log date format, getting it from the filename | |
grok { | |
match => { "path" => "%{YEAR:log_year}" } | |
} | |
mutate { | |
replace => [ "log_timestamp", "%{log_timestamp} %{log_year}" ] | |
replace => [ "host", "ids-eu.incf.net" ] | |
remove_field => "log_year" | |
} | |
date { match => [ "log_timestamp", "MMM dd HH:mm:ss yyyy", "MMM d HH:mm:ss yyyy" ] } | |
# GeoIP | |
if [irods_client_addr] { | |
geoip { | |
source => "irods_client_addr" | |
target => "geoip" | |
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] | |
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] | |
add_tag => ["geoip"] | |
} | |
mutate { | |
convert => [ "[geoip][coordinates]", "float" ] | |
} | |
} | |
} | |
output { | |
stdout { codec => rubydebug } | |
elasticsearch_http { | |
index => "logstash-%{+YYYY.MM}" | |
workers => 4 | |
} | |
} | |
================================================================================ | |
input { | |
stdin { } | |
file { | |
type => "postfix" | |
path => ["/var/log/mail.*"] | |
} | |
} | |
filter { | |
if [type] == "postfix" { | |
grok { | |
patterns_dir => ["/etc/logstash/patterns"] | |
match => [ "message", "%{POSTFIX}" ] | |
add_tag => [ "postfix", "grokked", "%{component}" ] | |
} | |
date { | |
match => [ "timestamp", "MMM dd YYY HH:mm:ss", "MMM d YYY HH:mm:ss", "ISO8601" ] | |
add_tag => [ "dated" ] | |
} | |
if "qmgr" in [tags] { | |
grok { | |
patterns_dir => ["/etc/logstash/patterns"] | |
match => [ "message", "%{POSTFIXQMGR}" ] | |
named_captures_only => true | |
} | |
} | |
if "bounce" in [tags] { | |
grok { | |
patterns_dir => ["/etc/logstash/patterns"] | |
match => [ "message", "%{POSTFIXBOUNCE}" ] | |
named_captures_only => true | |
} | |
} | |
if "cleanup" in [tags] { | |
grok { | |
patterns_dir => ["/etc/logstash/patterns"] | |
match => [ "message", "%{POSTFIXCLEANUP}" ] | |
named_captures_only => true | |
} | |
} | |
if "smtpd" in [tags] { | |
grok { | |
patterns_dir => ["/etc/logstash/patterns"] | |
match => [ "message", "%{POSTFIXSMTPD}"] | |
named_captures_only => true | |
} | |
} | |
} | |
} | |
output { | |
stdout { } | |
redis { | |
host => "10.80.1.203" | |
data_type => "list" | |
key => "logstash" | |
} | |
} | |
=================================================================================== | |
input { | |
redis { | |
host => "192.168.1.32" | |
data_type => "list" | |
key => "logstash" | |
debug => true | |
} | |
} | |
filter { | |
if [type] == "syslog" { | |
grok { | |
match => [ "message", "(?:%{SYSLOGLINE}|%{SYSLOGREMOTELINE})" ] | |
patterns_dir => "/etc/logstash/grok" | |
} | |
date { | |
match => [ "logdate", | |
"MMM d HH:mm:ss", # syslog 'day' value can be space-leading | |
"MMM dd HH:mm:ss", | |
"ISO8601" # Some syslogs use ISO8601 time format | |
] | |
} | |
} | |
} | |
output { | |
elasticsearch { | |
host => "192.168.1.32" | |
cluster => "logstash" | |
} | |
} | |
=================================================================================================== | |
#logstash config: | |
filter { | |
grok { | |
type => "jboss" | |
patterns_dir => "/as/conf/logstash/patterns" | |
pattern => "%{JBOSSLOG}" | |
} | |
date { | |
timestamp => "yyyy-MM-dd HH:mm:ss,SSS" | |
} | |
} | |
#cat /as/conf/logstash/patterns | |
#JBOSSLOG %{TIMESTAMP_ISO8601:timestamp} %{LOGLEVEL:loglevel} \[\s?%{JAVACLASS:class}\s?\] | |
========================================================================================================== | |
input { | |
tcp { | |
type => "haproxy" | |
port => 3333 | |
} | |
} | |
filter { | |
grok { | |
type => "haproxy" | |
# See the following URL for a complete list of named patterns | |
# logstash/grok ships with by default: | |
# https://github.com/logstash/logstash/tree/master/patterns | |
# | |
# The grok filter will use the below pattern and on successful match use | |
# any captured values as new fields in the event. | |
pattern => "%{HAPROXYHTTP}" | |
} | |
date { | |
type => "haproxy" | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The apache time format looks like: "18/Aug/2011:05:44:34 -0700" | |
syslog_timestamp => ["MMM d HH:mm:ss", "MMM dd HH:mm:ss"] | |
} | |
} | |
output { | |
elasticsearch { | |
# Setting 'embedded' will run a real elasticsearch server inside logstash. | |
# This option below saves you from having to run a separate process just | |
# for ElasticSearch, so you can get started quicker! | |
embedded => true | |
} | |
} | |
================================================================================================================= | |
input { | |
file { | |
type => "nginx-access" | |
path => ["/var/log/nginx/access.log"] | |
} | |
} | |
filter { | |
if [type] == "nginx-access" { | |
grok { | |
# https://github.com/logstash/logstash/tree/master/patterns/grok-patterns | |
match => { "message" => "%{COMBINEDAPACHELOG}" } | |
} | |
} | |
} | |
output { | |
statsd { | |
host => "localhost" | |
port => 8125 | |
increment => "%{host}.nginx.response.%{response}" | |
} | |
elasticsearch { | |
embedded => true | |
} | |
# will log debug to the stdout | |
#stdout { codec => rubydebug } | |
} | |
======================================================================================================================== | |
#I have the following input: | |
09:36:01 03/13/2014 INFO PerfLogger UNKNOWN,QUERY,1299 | |
#Grok filter | |
grok { | |
'patterns_dir' => '/opt/logstash/server/etc/patterns' | |
'pattern' => '%{TIME} %{DATE} INFO PerfLogger %{DATA},QUERY,%{NUMBER:query}' | |
'type' => 'wbc' | |
} | |
Data is not been parsed properly instead I get the following tag: _grokparsefailure | |
============================================================================================================================ | |
# This file was created for logstash01 | |
# by Chef | |
# Manual changes will be lost | |
filter { | |
grok { | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{JAVACLASS:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE},%{JAVACLASS:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE},%{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO %{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO %{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO %{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{JAVACLASS:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{DATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{GREEDYDATA:data1} from %{GREEDYDATA:target_metric} where %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{GREEDYDATA:data1} FROM %{GREEDYDATA:target_metric} WHERE %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{GREEDYDATA:data1} FROM %{GREEDYDATA:target_metric} where %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{GREEDYDATA:data1} from %{GREEDYDATA:target_metric} WHERE %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE},%{GREEDYDATA:data1} from %{GREEDYDATA:target_metric} where %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE},%{GREEDYDATA:data1} FROM %{GREEDYDATA:target_metric} WHERE %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE},%{GREEDYDATA:data1} FROM %{GREEDYDATA:target_metric} where %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE},%{GREEDYDATA:data1} from %{GREEDYDATA:target_metric} WHERE %{GREEDYDATA:data2},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
'match' => ['message', '%{TIME} %{DATE},select %{GREEDYDATA:data1}, %{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
add_tag => ["graphite_metrics"] | |
named_captures_only => true | |
tags => "application" | |
break_on_match => false | |
} | |
metrics { | |
tags => "graphite_metrics" | |
add_tag => ["metrics"] | |
timer => ["%{environment}.%{host}.%{category_metric}.%{target_metric}.metrics", "%{value_metric}"] | |
clear_interval => 300 | |
meter => "%{environment}.%{host}.%{category_metric}.%{target_metric}" | |
} | |
} | |
output { | |
graphite { | |
tags => "metrics" | |
host =>'graphiteui01' | |
include_metrics => ["\S*"] | |
fields_are_metrics => true | |
} | |
#On the logs, the latency is defined with a numeric value at the end of each line. | |
#Output to send the latency metrics. | |
graphite { | |
tags => 'graphite_metrics' | |
host => 'graphiteui01' | |
metrics => ["%{environment}.%{host}.%{category_metric}.%{target_metric}.latency","%{value_metric}"] | |
} | |
} | |
=================================================================================================================== | |
filter { | |
grok { | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{JAVACLASS:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
add_tag => ["graphite_metrics"] | |
named_captures_only => true | |
tags => "application" | |
} | |
grok { | |
'match' => ['message', '%{TIME} %{DATE},%{JAVACLASS:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
add_tag => ["graphite_metrics"] | |
named_captures_only => true | |
tags => "application" | |
} | |
grok { | |
'match' => ['message', '%{TIME} %{DATE},%{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
add_tag => ["graphite_metrics"] | |
named_captures_only => true | |
tags => "application" | |
} | |
grok { | |
'match' => ['message', '%{TIME} %{DATE} INFO %{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
add_tag => ["graphite_metrics"] | |
named_captures_only => true | |
tags => "application" | |
} | |
grok { | |
'match' => ['message', '%{TIME} %{DATE} INFO %{GREEDYDATA:target_metric},%{DATA:category_metric},%{NUMBER:value_metric}'] | |
add_tag => ["graphite_metrics"] | |
named_captures_only => true | |
tags => "application" | |
} | |
metrics { | |
tags => "graphite_metrics" | |
add_tag => ["metrics"] | |
timer => ["%{environment}.%{host}.%{category_metric}.%{target_metric}.metrics", "%{value_metric}"] | |
clear_interval => 300 | |
meter => "%{environment}.%{host}.%{category_metric}.%{target_metric}" | |
} | |
} | |
output { | |
graphite { | |
tags => "metrics" | |
host =>'graphiteui01' | |
include_metrics => ["\S*"] | |
fields_are_metrics => true | |
} | |
#On the logs, the latency is defined with a numeric value at the end of each line. | |
#Output to send the latency metrics. | |
graphite { | |
tags => 'graphite_metrics' | |
host => 'graphiteui01' | |
metrics => ["%{environment}.%{host}.%{category_metric}.%{target_metric}.latency","%{value_metric}"] | |
} | |
} | |
=========================================================================================================================== | |
input { | |
file { | |
'path' => ['/tmp/application'] | |
'tags' => ['metrics','application'] | |
'type' => 'application' | |
} | |
} | |
filter { | |
grok { | |
'match' => ['message', '%{TIME} %{DATE} INFO PerfLogger %{JAVACLASS:metrics},%{DATA:category},%{NUMBER:jdbc}'] | |
} | |
metrics { | |
type => "application" | |
meter => "jdbc.%{jdbc}" | |
add_tag => "metrics" | |
} | |
} | |
output { | |
stdout { | |
# only emit events with the 'metric' tag | |
tags => "metrics" | |
message => "Value: %{jdbc} Counter: %{jdbc.count} Rate 1 Min% {jdbc.rate_1m}" | |
} | |
} | |
#Value: %{jdbc} Counter: %{jdbc.count} Rate 1 Min% {jdbc.rate_1m | |
#Im getting: | |
Value: 100 Counter: %{jdbc.count} Rate 1 Min% {jdbc.rate_1m} | |
Value: 200 Counter: %{jdbc.count} Rate 1 Min% {jdbc.rate_1m} | |
Value: 300 Counter: %{jdbc.count} Rate 1 Min% {jdbc.rate_1m} | |
#Im expecting | |
Value: 100 Counter: 1 Rate 1 Min% 100 | |
Value: 200 Counter: 1 Rate 1 Min% 150 | |
Value: 300 Counter: 1 Rate 1 Min% 200 | |
================================================================================================================= | |
#Input | |
12:23:05 03/13/2014 INFO PerfLogger select DISPLAY_OFFERING_ON_WEB_IND from HWAYS_MASTER .offering_master where offering_master_id = ? ,QUERY,4 | |
#Grok patterns | |
%{TIME} %{DATE} INFO PerfLogger %{DATA:data},QUERY,%{NUMBER:query_ms} | |
It works fine on http://grokdebug.herokuapp.com/ | |
However, Logstash server is failing when it tries to parse this. | |
filter { | |
#11:33:44 03/13/2014 INFO PerfLogger ImageAssetJdbcDao.get,JDBC,10 | |
grok { | |
'pattern' => '%{TIME} %{DATE} INFO PerfLogger %{DATA:data},JDBC,%{NUMBER:jdbc}' | |
'type' => 'wbc' | |
} | |
#12:11:00 03/13/2014 INFO PerfLogger UNKNOWN,QUERY,308 | |
grok { | |
'pattern' => '%{TIME} %{DATE} INFO PerfLogger %{DATA:data},QUERY,%{NUMBER:query_ms}' | |
'type' => 'wbc' | |
} | |
} | |
============================================================================================================================= | |
#I have the following input: | |
09:36:01 03/13/2014 INFO PerfLogger UNKNOWN,QUERY,1299 | |
#Grok filter | |
grok { | |
'patterns_dir' => '/opt/logstash/server/etc/patterns' | |
'pattern' => '%{TIME} %{DATE} INFO PerfLogger %{DATA},QUERY,%{NUMBER:query}' | |
'type' => 'wbc' | |
} | |
Data is not been parsed properly instead I get the following tag: _grokparsefailure | |
============================================================================================================================== | |
input { | |
stdin { type => "file" } | |
#redis { | |
# data_type => 'list' | |
# host => 'localhost' | |
# key => 'logstash:beaver' | |
# type => 'redis-input-beaver' | |
#} | |
} | |
filter { | |
grok { | |
tags => [ "syslog" ] | |
pattern => [ "%{SYSLOGBASE}" ] | |
add_tag => "%{program}" | |
} | |
grok { | |
tags => [ "postfix/smtpd" ] | |
pattern => [ "%{POSTFIXSMTPDCONNECTS}", | |
"%{POSTFIXSMTPDACTIONS}", | |
"%{POSTFIXSMTPDTIMEOUTS}", | |
"%{POSTFIXSMTPDLOGIN}", | |
"." ] | |
named_captures_only => true | |
} | |
grok { | |
tags => [ "postfix/smtp" ] | |
pattern => [ "%{POSTFIXSMTPRELAY}", | |
"%{POSTFIXSMTPCONNECT}", | |
"%{POSTFIXSMTP5XX}", | |
"%{POSTFIXSMTPREFUSAL}", | |
"%{POSTFIXSMTPLOSTCONNECTION}", | |
"%{POSTFIXSMTPTIMEOUT}", | |
"." ] | |
named_captures_only => true | |
} | |
grok { | |
tags => [ "postfix/bounce" ] | |
pattern => "%{POSTFIXBOUNCE}" | |
named_captures_only => true | |
} | |
grok { | |
tags => [ "postfix/qmgr" ] | |
pattern => "%{POSTFIXQMGR}" | |
named_captures_only => true | |
} | |
grok { | |
tags => [ "postfix/anvil" ] | |
pattern => "%{POSTFIXANVIL}" | |
named_captures_only => true | |
} | |
grok { | |
tags => [ "postfix/cleanup" ] | |
pattern => "%{POSTFIXCLEANUP}" | |
named_captures_only => true | |
} | |
} | |
output { | |
stdout { debug => true } | |
} | |
============================================================================================================================== | |
input { | |
file { | |
path => [ "/var/log/messages", "/var/log/kern.log" ] | |
type => "linux-syslog" | |
} | |
file { | |
path => "/var/log/apache2/access.log" | |
type => "apache-access" | |
} | |
file { | |
path => "/var/log/apache2/error.log" | |
type => "apache-error" | |
} | |
} | |
output { | |
amqp { | |
host => "myamqpserver" | |
exchange_type => "fanout" | |
name => "rawlogs" | |
} | |
amqp { | |
host => "127.0.0.1" | |
exchange_type => "topic" | |
name => "logsniff" | |
} | |
stdout { } | |
} | |
# Filters are applied in the order the appear. | |
filter { | |
multiline { | |
type => "supervisorlogs" | |
pattern => "^\s" | |
what => previous | |
} | |
multiline { | |
type => "testing" | |
pattern => "^\s" | |
what => previous | |
} | |
grok { | |
type => "linux-syslog" | |
pattern => ["%{SYSLOG_SUDO}", "%{SYSLOG_KERNEL}", "%{SYSLOGLINE}" ] | |
} | |
grok { | |
type => "nagios" | |
pattern => "%{NAGIOSLOGLINE}" | |
} | |
#date { | |
#" testing" => fizzle | |
#} | |
} | |
============================================================================================================ | |
POSTFIXHOST (%{HOST:remotehostname}\[%{IP:remoteip}\]) | |
POSTFIXQID (?:<postfix_queue_id>[0-9A-F]{11}|NOQUEUE): | |
RSYSLOGRELPMESSAGE (%{SYSLOG5424PRI}%{TIMESTAMP_ISO8601:syslog5424_ts} %{SYSLOGHOST:loghost} %{SYSLOGPROG}: %{GREEDYDATA:message}) | |
input { | |
relp { | |
port => 5142 | |
type => 'rsyslogrelp' | |
debug => true | |
} | |
} | |
filter { | |
if [type] == "rsyslogrelp" { | |
syslog_pri { } | |
grok { | |
patterns_dir => "/etc/logstash/patterns" | |
match => [ 'message', '%{RSYSLOGRELPMESSAGE}' ] | |
overwrite => [ "message" ] | |
} | |
} | |
# silence spammy snmpd | |
if [program] == 'snmpd' and [message] =~ '^error on subcontainer' { | |
drop {} | |
} | |
# fix non fqdns | |
if [loghost] == "nue1d0" or [loghost] == "nue1" { | |
mutate { replace => [ "loghost", "%{loghost}.datavibe.net" ] } | |
} | |
if [loghost] == "eeqj" { | |
mutate { replace => [ "loghost", "%{loghost}.com" ] } | |
} | |
if [loghost] == "admin" { | |
mutate { replace => [ "loghost", "%{loghost}.eeqj.de" ] } | |
} | |
if [program] == 'sshd' { | |
grok { | |
patterns_dir => "/etc/logstash/patterns" | |
match => [ 'message', 'from %{IP:remoteip} port' ] | |
tag_on_failure => [] | |
} | |
} | |
if [program] =~ 'dovecot' { | |
grok { | |
patterns_dir => "/etc/logstash/patterns" | |
match => [ 'message', 'rip=%{IP:remoteip}' ] | |
tag_on_failure => [] | |
} | |
} | |
if [program] =~ '^postfix\/' { | |
grok { | |
patterns_dir => "/etc/logstash/patterns" | |
match => [ 'message', '%{POSTFIXQID} %{GREEDYDATA:message}' ] | |
overwrite => [ "message" ] | |
tag_on_failure => [] | |
} | |
} | |
if [program] == 'postfix/smtpd' { | |
grok { | |
patterns_dir => "/etc/logstash/patterns" | |
match => ['message', '(?<postfix_event>connect|disconnect) from %{POSTFIXHOST}' ] | |
add_field => [ 'event', 'postfix_smtpd_%{postfix_event}' ] | |
remove_field => 'postfix_event' | |
tag_on_failure => [] | |
} | |
grok { | |
patterns_dir => "/etc/logstash/patterns" | |
match => ['message', '^reject: %{WORD} from %{POSTFIXHOST}:' ] | |
tag_on_failure => [] | |
} | |
} | |
if [remoteip] != '' { | |
geoip { | |
source => 'remoteip' | |
} | |
} | |
} | |
output { | |
stdout { | |
codec => rubydebug | |
} | |
file { | |
codec => json | |
flush_interval => 0 | |
path => "/opt/logstash/logarchive/%{+YYYYMM}/%{+YYYYMMdd}.%{loghost}.json" | |
} | |
} | |
============================================================================================================ | |
input { | |
file { | |
type => "example" | |
path => ["/srv/infinidb.export/db-2013-08-23-17:32:06.log"] | |
start_position => "beginning" | |
} | |
} | |
filter { | |
if [type] == "example" { | |
grok { | |
match => [ "message", "(?:%{INFINIDB_EXPORT})" ] | |
patterns_dir => ["/srv/logstash/grok"] | |
} | |
date { | |
match => [ "timestamp", | |
"MMM d HH:mm:ss", # syslog 'day' value can be space-leading | |
"MMM dd HH:mm:ss", | |
"ISO8601" # Some syslogs use ISO8601 time format | |
] | |
} | |
} | |
} | |
output { | |
stdout { codec => rubydebug } | |
} | |
========================================================================================================================= | |
input { | |
file { | |
type => "linux-syslog" | |
# Wildcards work, here :) | |
path => [ "/var/log/remote/remotesys.log", "/var/log/messages" ] | |
sincedb_path => [ "/tmp/logstash" ] | |
} | |
} | |
filter { | |
grok { | |
type => "linux-syslog" | |
pattern => [ "%{SYSLOGBASE}" ] | |
} | |
grep { | |
type => "linux-syslog" | |
match => [ "@message", "apache-access:" ] | |
add_tag => "apache-access" | |
drop => false | |
} | |
grok { | |
type => "linux-syslog" | |
tags => ["apache-access"] | |
pattern => [ "%{COMBINEDAPACHELOG}" ] | |
} | |
} | |
output { | |
elasticsearch { | |
embedded => true | |
} | |
} | |
================================================================================================================================= | |
input { | |
file { | |
type => "nginx_access" | |
path => ["/var/log/nginx/**"] | |
exclude => ["*.gz", "error.*"] | |
discover_interval => 10 | |
} | |
} | |
filter { | |
if [type] == "nginx_access" { | |
grok { | |
match => { | |
"message" => "%{COMBINEDAPACHELOG}" | |
} | |
} | |
date { | |
# Try to pull the timestamp from the 'timestamp' field (parsed above with | |
# grok). The apache time format looks like: "18/Aug/2011:05:44:34 -0700" | |
match => { "timestamp" => "dd/MMM/yyyy:HH:mm:ss Z" } | |
} | |
} | |
} | |
output { | |
stdout { | |
debug => true | |
} | |
redis { | |
host => "YOUR_LOGSTASH_HOST" | |
data_type => "list" | |
key => "logstash" | |
} | |
} | |
================================================================================================================================== | |
# ======================================================================================================================== | |
/var/log/syslog: | |
Sep 27 22:42:19 aragorn dbus[884]: [system] Activating service name='org.kubuntu.qaptworker' (using servicehelper) | |
Sep 27 22:42:19 aragorn dbus[884]: [system] Successfully activated service 'org.kubuntu.qaptworker' | |
Sep 27 22:42:19 aragorn dbus[884]: [system] Activating service name='org.debian.AptXapianIndex' (using servicehelper) | |
Sep 27 22:42:19 aragorn dbus[884]: [system] Successfully activated service 'org.debian.AptXapianIndex' | |
# ======================================================================================================================== | |
logstash.conf | |
# ===== # | |
# INPUT # | |
# ===== # | |
input { | |
file { | |
type => "linux-syslog" | |
path => ["/var/log/syslog","/var/log/auth.log","/var/log/kern.log"] | |
debug => true | |
} | |
} | |
# ====== # | |
# FILTER # | |
# ====== # | |
filter { | |
grok { | |
type => "linux-syslog" | |
pattern => "%{SYSLOGLINE}" | |
} | |
date { | |
type => "linux-syslog" | |
timestamp => ["MMM dd HH:mm:ss","MMM d HH:mm:ss"] | |
} | |
# noop { | |
# type => "linux-syslog" | |
# add_tag => ["{'mongoDate': {'$date': '%{@timestamp}'}"] | |
# } | |
} | |
# ====== # | |
# OUTPUT # | |
# ====== # | |
output { | |
mongodb { | |
type => "linux-syslog" | |
host => "127.0.0.1" | |
port => "27017" | |
database => "logs" | |
collection => "syslogs" | |
} | |
} | |
# ======================================================================================================================== | |
logstash.sh | |
#!/bin/sh | |
# kill -2 pid to stop logstash | |
java -jar logstash-1.1.1-monolithic.jar agent -v -f logstash.conf & | |
# ======================================================================================================================== | |
Error example | |
Failed parsing date from field {"field":"timestamp","value":"Sep 27 22:42:19","exception":"java.lang.IllegalArgumentException: Invalid format: \"Sep 27 22:42:19\"","backtrace":["org/joda/time/format/DateTimeFormatter.java:866:in `parseDateTime'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filters/date.rb:101:in `register'","org/jruby/RubyProc.java:258:in `call'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filters/date.rb:149:in `filter'","org/jruby/RubyArray.java:1615:in `each'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filters/date.rb:143:in `filter'","org/jruby/RubyArray.java:1615:in `each'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filters/date.rb:136:in `filter'","org/jruby/RubyHash.java:1186:in `each'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filters/date.rb:128:in `filter'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filters/base.rb:88:in `execute'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filterworker.rb:58:in `filter'","org/jruby/RubyArray.java:1615:in `each'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filterworker.rb:48:in `filter'","org/jruby/RubyArray.java:1615:in `each'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filterworker.rb:47:in `filter'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/filterworker.rb:32:in `run'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/agent.rb:708:in `run_filter'","file:/home/olivier/application/logstash/logstash-1.1.1-monolithic.jar!/logstash/agent.rb:435:in `run_with_config'"],"level":"warn"} | |
====================================================================================================================================== | |
input { | |
file { | |
path => "/var/log/messages" | |
type => "syslog" | |
} | |
file { | |
path => "/var/log/php_errors.log" | |
type => "phperror" | |
} | |
file { | |
path => "/var/log/httpd/access_log" | |
type => "apache_access" | |
} | |
file { | |
path => "/var/log/httpd/error_log" | |
type => "apache_error" | |
} | |
} | |
filter { | |
if [type] == "apache_access" { | |
grok { | |
match => ["message", "%{COMBINEDAPACHELOG}"] | |
} | |
} else if [type]== "syslog" { | |
grok { | |
match => ["message", "%{SYSLOGBASE2} %{GREEDYDATA:message}" ] | |
overwrite => ["message"] | |
} | |
} else if [type] == "phperror" { | |
grok { | |
patterns_dir => "./p" | |
match => ["message", "%{PHP_LOG}"] | |
overwrite => ["message"] | |
} | |
} | |
} | |
output { | |
#stdout { codec => rubydebug } | |
elasticsearch { embedded => true } | |
if [type] == "apache_access" { | |
statsd { | |
host => "localhost" | |
port => 8125 | |
namespace => "logstash" | |
increment => "apache.httpcode.%{response}" | |
} | |
} else if [type] == "phperror" { | |
statsd { | |
host => "localhost" | |
port => 8125 | |
namespace => "logstash" | |
increment => "phperror.level.%{level}" | |
} | |
} | |
} | |
======================================================================================================================== | |
input { | |
file { | |
path => "/Users/nopik/Warsztat/logstash/redis.log" | |
type => "r" | |
start_position=>"beginning" | |
} | |
} | |
filter { | |
grok { | |
type => "r" | |
pattern => [ "\[%{NUMBER:pid}\] %{MONTHDAY:md} %{MONTH:m} %{TIME:t} %{DATA:level} %{GREEDYDATA:redis_message}" ] | |
} | |
} | |
output { | |
elasticsearch { | |
embedded => true | |
} | |
} | |
Some data: | |
[54778] 22 Sep 19:49:06 * Server started, Redis version 2.4.9 | |
[54778] 22 Sep 19:49:06 * The server is now ready to accept connections on port 6379 | |
[54778] 22 Sep 19:49:06 - 0 clients connected (0 slaves), 922336 bytes in use | |
========================================================================================================================= | |
input { | |
# unix { | |
# path => "/var/run/openshift/lockstash.sock" | |
# codec => "json" | |
# type => "metric" | |
# force_unlink => true | |
# } | |
file { | |
path => ["/var/lib/openshift/**/log/*", "/var/lib/openshift/**/logs/*"] | |
stat_interval => 1 | |
type => "gear_log" | |
} | |
} | |
filter { | |
if [type] == "gear_log" { | |
grok { | |
match => { | |
"path" => "/var/lib/openshift/(?<gear_uuid>[^/]+)/" | |
} | |
} | |
grok { | |
patterns_dir => "logstash.d/patterns" | |
match => { | |
"message" => "%{NODEJS_TIMESTAMP:timestamp}: %{GREEDYDATA:message}" | |
} | |
overwrite => ["message", "timestamp"] | |
} | |
date { | |
match => [ "timestamp", "EEE MMM dd yyyy HH:mm:ss zzzZZ (zzz)" ] | |
remove_field => ["timestamp"] | |
} | |
} | |
} | |
output { | |
if [type] == "gear_log" { | |
file { | |
path => "/var/log/openshift/logstash/%{gear_uuid}/gear_logs" | |
} | |
} else if [type] == "metric" { | |
file { | |
path => "/var/log/openshift/logstash/%{gear_uuid}/metrics" | |
} | |
} | |
} | |
=========================================================================================================== | |
# | |
# | |
input { | |
file { | |
type => "apache" | |
path => ["/var/log/httpd/logstash_access_log"] | |
} | |
file { | |
type => "tomcat" | |
path => [ "/var/log/tomcat6/catalina.out" ] | |
codec => multiline { | |
pattern => "(^\d+\serror)|(^.+Exception: .+)|(^\s+at .+)|(^\s+... \d+ more)|(^\s*Caused by:.+)" | |
what => "previous" | |
} | |
} | |
file { | |
type => "postfix" | |
path => ["/var/log/mail.*"] | |
} | |
file { | |
type => "alpha" | |
path => ["/opt/alpha/logs/alpha.log"] | |
tags => [ "alpha", "finance" ] | |
} | |
} | |
filter { | |
if [type] == "postfix" { | |
grok { | |
patterns_dir => ["/etc/logstash/patterns"] | |
match => [ "message", "%{POSTFIX}" ] | |
add_tag => [ "postfix", "grokked", "%{component}" ] | |
} | |
if "qmgr" in [tags] { | |
grok { | |
patterns_dir => ["/etc/logstash/patterns"] | |
match => [ "message", "%{POSTFIXQMGR}" ] | |
} | |
} | |
date { | |
match => [ "timestamp", "MMM dd HH:mm:ss" ] | |
add_tag => [ "dated" ] | |
} | |
} | |
if [type] == "tomcat" and [message] !~ /(.+)/ { | |
drop { } | |
} | |
if [type] == "alpha" { | |
grok { | |
match => [ "message", "(?<timestamp>[\d]+)\+(?<tz>[\w]{3})\s(?<msg>.*)\s%{UNIXPATH:file}\#%{POSINT:line}\s%{GREEDYDATA:id}\s%{WORD:appname}\/(?<appver>[\d.\d.\d\w]+)\/(?<apprelease>[\w\s]+)" ] | |
add_tag => [ "grokked" ] | |
} | |
date { | |
match => [ "timestamp", "UNIX" ] | |
timezone => tz | |
add_tag => [ "dated" ] | |
} | |
if [apprelease] == "QA Release" { | |
mutate { | |
add_field => [ "environment", "qa" ] | |
} | |
else { | |
mutate { | |
add_field => [ "environment", "production" ] | |
} | |
} | |
mutate { | |
convert => [ "line", "integer" ] | |
} | |
} | |
} | |
output { | |
stdout { } | |
redis { host => "10.0.0.1" data_type => "list" key => "logstash" } | |
} | |
=============================================================================================================================== | |
input { | |
tcp{ | |
port => 2003 | |
type => "graphite" | |
mode => "server" | |
} | |
} | |
filter { | |
grok { | |
pattern => "%{DATA:name} %{NUMBER:value:float} %{POSINT:ts}" | |
singles => true | |
type => "graphite" | |
} | |
date { | |
type => "graphite" | |
match => ["ts", UNIX] | |
} | |
mutate { | |
type => "graphite" | |
remove => ts | |
gsub =>[ | |
"name", ".value", "" | |
,"name", "counter-", "" | |
,"name", "gauge-", "" | |
] | |
} | |
} | |
output { | |
# stdout { debug => true debug_format => "ruby" type => "graphite"} | |
rabbitmq { | |
host => "rmq-10002-prod-nydc1.nydc1.outbrain.com" | |
vhost => "logstash" | |
user => "logstash" | |
password => "logstash" | |
exchange => "logstash.graphite.out" | |
exchange_type => "fanout" | |
durable => false # If rabbitmq restarts, the exchange disappears. | |
persistent => false # Messages are not persisted to disk | |
type => "graphite" | |
} | |
} | |
=========================================================================================================== | |
input { | |
udp { | |
port => 514 | |
type => "syslog" | |
} | |
} | |
filter { | |
grok { | |
add_tag => ['dns'] | |
pattern => ["%{GREEDYDATA:throw_data}%{SYSLOGTIMESTAMP:throw_syslogtimestamp} %{IP:router_ip}\/%{IP:throw_ip} %{INT:throw_int}: %{SYSLOGTIMESTAMP:syslogtimestamp} %{DATA:timezone}: %{DATA:logtype}: %{DATA:logline1} %{IP:client_ip}%{DATA:logline2} '%{DATA:address}'"] | |
type => "syslog" | |
} | |
date { | |
type => 'syslog' | |
match => [ 'syslogtimestamp', 'MMM d HH:mm:ss.SSS', 'MMM dd HH:mm:ss.SSS'] | |
} | |
} | |
output { | |
elasticsearch { | |
cluster => "corp" | |
index => "logstash-%{+YYYY.MM}" | |
node_name => "logstash.example.com" | |
} | |
stdout { } | |
} | |
======================================================================================================================================= | |
input { | |
tcp { | |
port => 5514 | |
type => "linux-syslog" | |
} | |
udp { | |
port => 5514 | |
type => "linux-syslog" | |
} | |
} | |
filter { | |
#first try to filter stuff by IP's | |
if [host_ip] =~ /^10.99/ { | |
mutate { | |
replace => [ "type", "ucs-syslog"] | |
} | |
} #end if | |
} #end filter | |
filter { | |
if [type] == "linux-syslog" { | |
grok { | |
#linux-syslog pattern | |
patterns_dir => [ "/etc/logstash/indexing-service/config/OCT_logstash_patterns" ] | |
match => [ "message", "%{LINUXSYSLOG}" ] | |
#pattern => [ "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" ] | |
#run-parts pattern | |
pattern => [ "%{SYSLOG5424PRI:syslog_pri}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}\(%{URIPATH:syslog_uri}\)\[%{POSINT:syslog_pid}%{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
date { | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
grok { | |
match => [ "host", "%{IP:host_ip}" ] | |
} | |
##not the right way to do this - but we'll figure out a better way later | |
grok { | |
match => [ "host", "%{IP:host_fqdn}" ] | |
} | |
dns { | |
reverse => [ "host_fqdn", "field_with_address" ] | |
action => "replace" | |
add_tag => "dns-ed" | |
} | |
mutate { | |
exclude_tags => "_grokparsefailure" | |
#replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "host", "%{host_fqdn}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
replace => [ "received_from", "%{host_fqdn}" ] | |
} | |
mutate { | |
remove => [ "syslog_hostname", "syslog_message", "host_fqdn" ] | |
} | |
} #end if | |
} | |
output { | |
elasticsearch { | |
cluster => "logstash" | |
embedded => false | |
} | |
#stdout {} | |
# file { | |
# path => '/tmp/logstash.txt' | |
# } | |
} | |
#============================ | |
# patterns file | |
#============================ | |
LINUXSYSLOG <%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message} | |
RUNPARTS %{SYSLOG5424PRI:syslog_pri}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}\(%{URIPATH:syslog_uri}\)\[%{POSINT:syslog_pid}%{GREEDYDATA:syslog_message} | |
#========================== | |
# logstash log output | |
#========================== | |
{:timestamp=>"2013-09-13T11:18:20.259000-0600", :message=>"Using milestone 2 input plugin 'tcp'. This plugin should be stable, but if you see strange behavior, please let us know! For more information on plugin milestones, see http://logstash.net/docs/1.2.1/plugin-milestones", :level=>:warn} | |
{:timestamp=>"2013-09-13T11:18:20.457000-0600", :message=>"Using milestone 2 input plugin 'udp'. This plugin should be stable, but if you see strange behavior, please let us know! For more information on plugin milestones, see http://logstash.net/docs/1.2.1/plugin-milestones", :level=>:warn} | |
{:timestamp=>"2013-09-13T11:18:20.502000-0600", :message=>"You are using a deprecated config setting \"pattern\" set in grok. Deprecated settings will continue to work, but are scheduled for removal from logstash in the future. If you have any questions about this, please visit the #logstash channel on freenode irc.", :name=>"pattern", :plugin=><LogStash::Filters::Grok --->, :level=>:warn} | |
{:timestamp=>"2013-09-13T11:18:20.518000-0600", :message=>"+---------------------------------------------------------+\n| An unexpected error occurred. This is probably a bug. |\n| You can find help with this problem in a few places: |\n| |\n| * chat: #logstash IRC channel on freenode irc. |\n| IRC via the web: http://goo.gl/TI4Ro |\n| * email: [email protected] |\n| * bug system: https://logstash.jira.com/ |\n| |\n+---------------------------------------------------------+\nThe error reported is: \n can't convert Array into String"} | |
#========================= | |
# sample input line | |
#========================= | |
<77>Sep 13 11:01:01 kibana-1 run-parts(/etc/cron.hourly)[1864 finished 0anacron | |
============================================================================================================================= | |
#Input | |
12:23:05 03/13/2014 INFO PerfLogger select DISPLAY_OFFERING_ON_WEB_IND from HWAYS_MASTER .offering_master where offering_master_id = ? ,QUERY,4 | |
#Grok patterns | |
%{TIME} %{DATE} INFO PerfLogger %{DATA:data},QUERY,%{NUMBER:query_ms} | |
It works fine on http://grokdebug.herokuapp.com/ | |
However, Logstash server is failing when it tries to parse this. | |
filter { | |
#11:33:44 03/13/2014 INFO PerfLogger ImageAssetJdbcDao.get,JDBC,10 | |
grok { | |
'pattern' => '%{TIME} %{DATE} INFO PerfLogger %{DATA:data},JDBC,%{NUMBER:jdbc}' | |
'type' => 'wbc' | |
} | |
#12:11:00 03/13/2014 INFO PerfLogger UNKNOWN,QUERY,308 | |
grok { | |
'pattern' => '%{TIME} %{DATE} INFO PerfLogger %{DATA:data},QUERY,%{NUMBER:query_ms}' | |
'type' => 'wbc' | |
} | |
} | |
================================================================================================================== | |
input { | |
unix { | |
path => "/var/run/openshift/lockstash.sock" | |
codec => "json" | |
type => "metric" | |
} | |
file { | |
path => ["/var/lib/openshift/**/log/*", "/var/lib/openshift/**/logs/*"] | |
stat_interval => 5 | |
type => "gear_log" | |
} | |
} | |
filter { | |
if [type] == "gear_log" { | |
grok { | |
match => [ "path", "/var/lib/openshift/(?<gear_uuid>[^/]+)/" ] | |
} | |
} | |
} | |
output { | |
if [type] == "gear_log" { | |
file { | |
path => "/var/log/openshift/logstash/gear_log" | |
} | |
} else if [type] == "metric" { | |
file { | |
path => "/var/log/openshift/logstash/${gear_uuid}/metric" | |
} | |
} | |
} | |
=================================================================================================== | |
input { | |
amqp { | |
# ship logs to the 'rawlogs' fanout queue. | |
type => "all" | |
host => "test-rabbitmq-01" | |
exchange => "rawlogs" | |
name => "rawlogs_consumer" | |
user => "guest" | |
password => "guest" | |
} | |
} | |
filter { | |
grok { | |
type => "syslog" # for logs of type "syslog" | |
pattern => "%{SYSLOGLINE}" | |
# You can specify multiple 'pattern' lines | |
} | |
grok { | |
type => "java" # for logs of type 'java' | |
pattern => "%{JAVASTACKTRACEPART}" | |
} | |
} | |
output { | |
stdout { } | |
# If your elasticsearch server is discoverable with multicast, use this: | |
#elasticsearch { } | |
gelf { | |
facility => "logstash-gelf" | |
host => '127.0.0.1' | |
} | |
# If you can't discover using multicast, set the address explicitly | |
#elasticsearch { | |
gelf { | |
facility => "logstash-gelf" | |
host => '127.0.0.1' | |
} | |
# If you can't discover using multicast, set the address explicitly | |
#elasticsearch { | |
# host => "myelasticsearchserver" | |
#} | |
} | |
=============================================================================================== | |
input { | |
stdin { | |
type => "example" | |
} | |
tcp { | |
type => "tcp" | |
tags => "tcp" | |
port => "443" | |
} | |
file { | |
type => "syslog" | |
# Wildcards work, here :) | |
path => [ "/var/log/*.log", "/var/log/messages", "/var/log/syslog" ] | |
} | |
} | |
filter { | |
if [type] == "tcp" { | |
grok { | |
match => [ "message", '%{IP:clientip} - - \[%{MONTHDAY:monthday}/%{MONTH:month}/%{YEAR:year}:%{TIME:time}\] \"%{URIPROTO:protocol}%{GREEDYDATA:less}%{URIPARAM:params} HTTP %{NUMBER:httpversion}" %{NUMBER:response} %{NUMBER:bytes} %{QS:referrer}%{GREEDYDATA:more}' ] | |
} | |
#geoip {source => "message" } | |
} | |
if [type] == "example" { | |
grok { | |
match => [ "message", "::%{WORD:word}"] | |
match => [ "message", ":::%{WORD:word}"] | |
} | |
} | |
geoip {source => "clientip" } | |
} | |
output { | |
stdout { codec => rubydebug } | |
statsd { | |
host => "192.168.1.149" | |
type => "tcp" | |
port => 8125 | |
namespace => "logstash" | |
debug => false | |
increment => "{clientip}" | |
increment => "{response}" | |
#count => "%{bytes}" | |
} | |
redis { host => "127.0.0.1" data_type => "list" key => "logstash" } | |
} | |
=============================================================================================================================== | |
filter { | |
if [logsource] =~ /^foofe/ or [logsource] =~ /^foomw/ { | |
grok { | |
match => [ "message", "(?:%{SYSLOGTIMESTAMP}|%{TIMESTAMP_ISO8601}) (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST} %{PROG}(?:\[%{POSINT}\])?: %{WORD:logcategory} - %{GREEDYDATA:message}" ] | |
overwrite => [ "message" ] | |
} | |
} | |
} | |
output { | |
if [logsource] =~ /^foofe/ or [logsource] =~ /^foomw/ { | |
elasticsearch_http { | |
host => "es.server.tld" | |
index => "foo-%{+YYYY.MM.dd}" | |
} | |
} else if [program] == "puppet-master" { | |
elasticsearch_http { | |
host => "es.server.tld" | |
index => "puppetmaster-%{+YYYY.MM.dd}" | |
} | |
} else { | |
elasticsearch_http { | |
host => "es.server.tld" | |
index => "logstash-%{+YYYY.MM.dd}" | |
} | |
} | |
} | |
===================================================================================================================================== | |
Grok Filter: | |
filter { | |
if [type] == "syslog" { | |
grok { | |
match => { "message" => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" } | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{host}" ] | |
} | |
syslog_pri { } | |
date { | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
if !("_grokparsefailure" in [tags]) { | |
mutate { | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
} | |
mutate { | |
remove_field => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
-------------------------------------------------------------------------------------------- | |
{ | |
"message" => "Dec 30 09:57:01 2013 stream.maverik.com authmgr[3465]: <522026> <INFO> <stream.maverik.com 10.9.9.3> MAC=64:a3:cb:5d:c7:f7 IP=0.0.0.0 User miss: ingress=0x1001c, VLAN=1109 flags=0x48", | |
"@timestamp" => "2013-12-30T16:57:01.881Z", | |
"@version" => "1", | |
"type" => "syslog", | |
"host" => "rsyslog1.maverik.com", | |
"path" => "/var/log/hosts/10.9.9.3/syslog.log", | |
"tags" => [ | |
[0] "_grokparsefailure" | |
], | |
=================================================================================================================================== | |
## Primary Logstash Forwarder | |
input | |
{ | |
tcp { type => "windows_security" port => "50003" codec => plain { charset => "US-ASCII" } } | |
tcp { type => "windows_application" port => "50004" codec => plain { charset => "US-ASCII" } } | |
tcp { type => "windows_system" port => "50005" codec => plain { charset => "US-ASCII" } } | |
} | |
filter | |
{ | |
grok { match => [ "host", "%{IPORHOST:ipaddr}(:%{NUMBER})?" ] } | |
mutate { replace => [ "fqdn", "%{ipaddr}" ] } | |
dns { reverse => [ "fqdn", "fqdn" ] action => "replace" } | |
} | |
output | |
{ | |
#stdout { debug => true } | |
#stdout { codec => rubydebug } | |
redis { host => "10.5.0.204" data_type => "list" key => "logstash" } | |
file { path => [ "/var/log/hosts/%{fqdn}/%{type}" ] message_format => "%{message}" } | |
} | |
## This is a sample configuration file. See the nxlog reference manual about the | |
## configuration options. It should be installed locally and is also available | |
## online at http://nxlog.org/nxlog-docs/en/nxlog-reference-manual.html | |
## Please set the ROOT to the folder your nxlog was installed into, | |
## otherwise it will not start. | |
#define ROOT C:\Program Files\nxlog | |
define ROOT C:\Program Files (x86)\nxlog | |
Moduledir %ROOT%\modules | |
CacheDir %ROOT%\data | |
Pidfile %ROOT%\data\nxlog.pid | |
SpoolDir %ROOT%\data | |
LogFile %ROOT%\data\nxlog.log | |
# For windows 2003 and earlier use the following: | |
# Module im_mseventlog | |
<Input in_security_eventlog> | |
Module im_msvistalog | |
Query <QueryList>\ | |
<Query Id="0">\ | |
<Select Path="Security">*</Select>\ | |
</Query>\ | |
</QueryList> | |
</Input> | |
<Input in_application_eventlog> | |
Module im_msvistalog | |
Query <QueryList>\ | |
<Query Id="0">\ | |
<Select Path="Application">*</Select>\ | |
</Query>\ | |
</QueryList> | |
</Input> | |
<Input in_system_eventlog> | |
Module im_msvistalog | |
Query <QueryList>\ | |
<Query Id="0">\ | |
<Select Path="System">*</Select>\ | |
</Query>\ | |
</QueryList> | |
</Input> | |
<Route 1> | |
Path in_security_eventlog => out_security_eventlog | |
Path in_application_eventlog => out_application_eventlog | |
Path in_system_eventlog => out_system_eventlog | |
</Route> | |
<Output out_security_eventlog> | |
Module om_tcp | |
Host 10.5.0.206 | |
Port 50003 | |
</Output> | |
<Output out_application_eventlog> | |
Module om_tcp | |
Host 10.5.0.206 | |
Port 50004 | |
</Output>v | |
<Output out_system_eventlog> | |
Module om_tcp | |
Host 10.5.0.206 | |
Port 50005 | |
</Output> | |
============================================================================================================== | |
Grok, Patterns, Regex's and Testing: | |
Since Logstash relies so heavily on regular expressions, it comes with many predefined shortcuts for typical patterns that can be used instead of writing your own regex from scratch. These shortcuts, or "grok patterns" as they are called, are designed to match text that you would typically find in log messages, from something as simple as "WORD"s and "USERNAME"s to more complicated patterns such as "PATH"s and "URI"s. You can use these shortcuts to build your own patterns for use in grok or multiline directives. You can reference these patterns via the file ./logstash/patterns/grok-patterns. | |
Grok is used to assign data to fields using these patterns and regular expressions. This is done by declaring a pre-configured pattern and appending the name of the desire field with a colon (:). An example of this would use the below pattern, which assigns the shortcut WORD to a regex that parses concurrent characters on word boundaries. | |
---- | |
WORD \b\w+\b | |
---- | |
This shortcut can then be used to capture data and assign it to the field action. | |
---- | |
%{WORD:action} | |
---- | |
You can also assign fields inside of a grok pattern like so, which creates a "meta" pattern HTTPLOG that captures and assigns various fields within and typical HTTP related log message. | |
---- | |
HTTPLOG %{IPORHOST:http_clientip} (?:%{USER:http_ident}) (?:%{USER:http_auth}) \[%{HTTPTIMESTAMP:http_timestamp} %{INT}\] "(?:(?:%{WORD:http_action}) (?:%{URIPATHPARAM:http_request})(?: HTTP/%{NUMBER:http_version})?|-)" (?:%{NUMBER:http_response}) (?:%{NUMBER:http_bytes}|-)(?:$|\s(?:%{QS:http_referrer}) %{QS:http_agent}) | |
---- | |
Testing: | |
When crafting your own patterns and regular expressions for use in Logstash it is helpful to be able to test them before adding them to the production config. You can test your grok patterns either straight ruby or via the website http://grokdebug.herokuapp.com/ to ensure that they parse and capture data correctly. In order to test locally via ruby, you'll need a current version of ruby installed as well as the "jls-grok" gem installed. | |
First you'll need to launch the interactive ruby console via irb and load the grok gem | |
---- | |
# irb | |
1.9.2-p320 :001 > require 'rubygems' | |
=> false | |
1.9.2-p320 :002 > require 'grok-pure' | |
=> true | |
1.9.2-p320 :003 > grok = Grok.new | |
=> #<Grok:0x000000029526b0 @patterns={}, @logger=#<Cabin::Channel:0x00000002952408 @subscribers={21658540=>#<Cabin::Outputs::StdlibLogger:0x0000000294f758 @logger=#<Logger:0x000000029520c0 @progname=nil, @level=0, @default_formatter=#<Logger::Formatter:0x0000000294ff00 @datetime_format=nil>, @formatter=nil, @logdev=#<Logger::LogDevice:0x0000000294fd48 @shift_size=nil, @shift_age=nil, @filename=nil, @dev=#<IO:<STDOUT>>, @mutex=#<Logger::LogDevice::LogDeviceMutex:0x0000000294fca8 @mon_owner=nil, @mon_count=0, @mon_mutex=#<Mutex:0x0000000294fa50>>>>>}, @data={}, @level=:warn, @metrics=#<Cabin::Metrics:0x00000002952368 @metrics_lock=#<Mutex:0x00000002952340>, @metrics={}, @channel=#<Cabin::Channel:0x00000002952408 ...>>, @subscriber_lock=#<Mutex:0x000000029521b0>>> | |
---- | |
Next you'll need to load any existing patterns that you wish to use from file, in this case we'll use the stock patterns | |
---- | |
1.9.2-p320 :004 > grok.add_patterns_from_file("/opt/logstash/patterns/grok-patterns") | |
=> nil | |
---- | |
You'll need to define the sample log message you'd like to test your patterns against then pick and compile an existing pattern from the file loaded previously | |
---- | |
1.9.2-p320 :005 > text = "2012-11-26 17:33:16,057 INFO [com.fuzebox.tst.cache.manager.MasterAirlineCacheManager] (WorkManager(2)-10076) Getting value from cache for key=US with licenseeCode=0003" | |
=> "2012-11-26 17:33:16,057 INFO [com.fuzebox.tst.cache.manager.MasterAirlineCacheManager] (WorkManager(2)-10076) Getting value from cache for key=US with licenseeCode=0003" | |
1.9.2-p320 :006 > pattern = '%{TIMESTAMP_ISO8601}' | |
=> "%{TIMESTAMP_ISO8601}" | |
1.9.2-p320 :007 > grok.compile(pattern) | |
=> nil | |
---- | |
Now you can test your pattern against the sample text. This will return any matches and their assignments found by the pattern. You'll notice that even though our pattern is a single value that we get multiple matches back. This is because the pattern "TIMESTAMP_ISO8601" is composed of several other patterns strung together. | |
---- | |
1.9.2-p320 :008 > grok.match(text).captures() | |
=> {"TIMESTAMP_ISO8601"=>["2012-11-26 17:33:16,057"], "YEAR"=>["2012"], "MONTHNUM"=>["11"], "MONTHDAY"=>["26"], "HOUR"=>["17", nil], "MINUTE"=>["33", nil], "SECOND"=>["16,057"], "ISO8601_TIMEZONE"=>[nil]} | |
---- | |
If you pattern fails to match the text you will end up with an error like this. Modify your pattern or ensure you are matching against the correct text and try again. | |
---- | |
1.9.2-p320 :010 > grok.match(text).captures() | |
NoMethodError: undefined method `captures' for false:FalseClass | |
from (irb):10 | |
from /usr/local/rvm/rubies/ruby-1.9.2-p320/bin/irb:16:in `<main>' | |
Installation | |
---- | |
================================================================================================================================ | |
input { | |
lumberjack { | |
port => 5100 | |
ssl_certificate => "/etc/ssl/logstash.pub" | |
ssl_key => "/etc/ssl/logstash.key" | |
type => "lumberjack" | |
} | |
syslog { | |
type => "syslog" | |
port => 5500 | |
} | |
} | |
filter { | |
# Extract type from lumberjack input | |
mutate { | |
type => "lumberjack" | |
replace => [ "@type", "%{type}" ] | |
} | |
# Process syslog | |
mutate { | |
type => "syslog" | |
replace => [ "@source_host", "%{logsource}" ] | |
replace => [ "@message", "%{message}" ] | |
remove => [ "message" ] | |
} | |
metrics { | |
type => "syslog" | |
add_tag => [ "metric", "syslog" ] | |
ignore_older_than => 60 | |
meter => ["%{@source_host}/%{@type}_%{severity_label}"] | |
} | |
# Process metabus | |
multiline { | |
type => "metabus" | |
pattern => "%{METABUS_DATE}" | |
negate => true | |
what => "previous" | |
patterns_dir => "/etc/logstash/grok/" | |
} | |
grok { | |
type => "metabus" | |
pattern => [ "%{METABUS_DATE:date} *%{LOGLEVEL:level} +%{METABUS_CLASS:class} - %{METABUS_DATA:message}"] | |
patterns_dir => "/etc/logstash/grok/" | |
singles => true | |
} | |
date { | |
type => "metabus" | |
date => "dd-MM HH:mm:ss.SSS" | |
} | |
mutate { | |
type => "metabus" | |
convert => [ "fieldname", "integer" ] | |
replace => [ "@message", "%{message}" ] | |
replace => [ "@source_host", "%{@source_host}" ] | |
remove => [ "message", "date" ] | |
} | |
metrics { | |
type => "metabus" | |
add_tag => [ "metric", "metabus" ] | |
ignore_older_than => 60 | |
meter => ["%{@source_host}/%{@type}_log_hits", "%{@source_host}/%{@type}_%{level}"] | |
} | |
# Process metacommerce | |
multiline { | |
type => "metacommerce" | |
pattern => "%{METABUS_DATE}" | |
negate => true | |
what => "previous" | |
patterns_dir => "/etc/logstash/grok/" | |
} | |
grok { | |
type => "metacommerce" | |
pattern => [ "%{METABUS_DATE:date} *%{LOGLEVEL:level} +%{METABUS_CLASS:class} - %{METABUS_DATA:message}"] | |
patterns_dir => "/etc/logstash/grok/" | |
singles => true | |
} | |
date { | |
type => "metacommerce" | |
date => "dd-MM HH:mm:ss.SSS" | |
} | |
mutate { | |
type => "metacommerce" | |
convert => [ "fieldname", "integer" ] | |
replace => [ "@message", "%{message}" ] | |
replace => [ "@source_host", "%{@source_host}" ] | |
remove => [ "message", "date" ] | |
} | |
metrics { | |
type => "metacommerce" | |
add_tag => [ "metric", "metacommerce" ] | |
ignore_older_than => 60 | |
meter => ["%{@source_host}/%{@type}_log_hits", "%{@source_host}/%{@type}_%{level}"] | |
} | |
} | |
output { | |
redis { | |
exclude_tags => [ "metric" ] | |
host => "logstash.dev" | |
data_type => "list" | |
key => "logstash" | |
} | |
# Syslog metrics | |
ganglia { | |
tags => [ "metric", "syslog" ] | |
group => "Syslog" | |
metrics => [ | |
"syslog_notice", "syslog_Notice\.count", | |
"syslog_warning", "syslog_Warning\.count", | |
"syslog_error", "syslog_Error\.count", | |
"syslog_critical", "syslog_Critical\.count", | |
"syslog_alert", "syslog_Alert\.count", | |
"syslog_emergency","syslog_Emergency\.count" ] | |
metric_type => "uint32" | |
slope => "positive" | |
lifetime => "0" | |
spoof_back => true | |
} | |
# Metabus metrics | |
ganglia { | |
tags => [ "metric", "metabus" ] | |
group => "MetaBus" | |
metrics => [ | |
"metabus_log_hits", "metabus_log_hits\.rate_1m", | |
"metabus_trace", "metabus_TRACE\.rate_1m", | |
"metabus_error", "metabus_ERROR\.rate_1m", | |
"metabus_warn", "metabus_WARN\.rate_1m", | |
"metabus_info", "metabus_INFO\.rate_1m", | |
"metabus_debug", "metabus_DEBUG\.rate_1m" ] | |
metric_type => "uint32" | |
slope => "both" | |
lifetime => "0" | |
spoof_back => true | |
} | |
# Metacommerce metrics | |
ganglia { | |
tags => [ "metric", "metacommerce" ] | |
group => "MetaCommerce" | |
metrics => [ | |
"metacommerce_log_hits", "metacommerce_log_hits\.rate_1m", | |
"metacommerce_trace", "metacommerce_TRACE\.rate_1m", | |
"metacommerce_error", "metacommerce_ERROR\.rate_1m", | |
"metacommerce_warn", "metacommerce_WARN\.rate_1m", | |
"metacommerce_info", "metacommerce_INFO\.rate_1m", | |
"metacommerce_debug", "metacommerce_DEBUG\.rate_1m" ] | |
metric_type => "uint32" | |
slope => "both" | |
lifetime => "0" | |
spoof_back => true | |
} | |
} | |
=========================================================================================================================== | |
input | |
{ | |
# We're primarily fed by Log4NET over UDP. | |
udp | |
{ | |
port => 44444 | |
type => "log4netudp" | |
format => "plain" | |
} | |
} | |
filter | |
{ | |
## | |
## Inline filtering of data coming in over UDP, our primary source. | |
## | |
# Filter out anything that the bot sends. | |
grep | |
{ | |
match => ["@message", ".*Sent:.*"] | |
type => "log4netudp" | |
negate => "true" | |
} | |
# Remove PING statements | |
grep | |
{ | |
match => ["@message", ".*TRACE SpikeLite.Irc.* - Received: PING :.*"] | |
type => "log4netudp" | |
negate => "true" | |
} | |
# Parse out the header part of the message. This is the common stuff we don't really care about. | |
grok | |
{ | |
type => "log4netudp" | |
pattern => ["@message", "%{DATESTAMP:eventtime} %{LOGLEVEL:level} %{NOTSPACE:class} - Received: :(?<nickname>[^!]+)!%{NOTSPACE:hostmask} %{GREEDYDATA:unparsed_payload}"] | |
} | |
# Swap the event time to what was in our logs. This must be done after the grok. | |
date | |
{ | |
type => "log4netudp" | |
match => ["eventtime", "yyyy-MM-dd HH:mm:ss,SSS"] | |
} | |
# Strip off the parts of the message we no longer need. | |
mutate | |
{ | |
exclude_tags => ["_grokparsefailure"] | |
type => "log4netudp" | |
replace => [ "@message", "%{unparsed_payload}"] | |
} | |
# Grok QUIT messages. | |
grok | |
{ | |
type => "log4netudp" | |
pattern => ["@message","QUIT :%{GREEDYDATA:payload}"] | |
add_field => ["action", "QUIT", | |
"target", "N/A", | |
"parse_rule", "quit"] | |
} | |
# Grok NICK changes | |
grok | |
{ | |
tags => ["_grokparsefailure"] | |
type => "log4netudp" | |
pattern => ["@message", "NICK :%{NOTSPACE:payload}"] | |
add_field => ["action", "NICK", | |
"target", "N/A", | |
"parse_rule", "nick"] | |
remove_tag => ["_grokparsefailure"] | |
} | |
# Grok MODE changes | |
grok | |
{ | |
tags => ["_grokparsefailure"] | |
type => "log4netudp" | |
pattern => ["@message", "MODE %{NOTSPACE:target} %{GREEDYDATA:payload}"] | |
add_field => ["action", "MODE", | |
"parse_rule", "mode"] | |
remove_tag => ["_grokparsefailure"] | |
} | |
# Grok KICK messages | |
grok | |
{ | |
tags => ["_grokparsefailure"] | |
type => "log4netudp" | |
pattern => ["@message", "KICK (?<target>#+\S+) %{NOTSPACE:action_target} :%{GREEDYDATA:payload}"] | |
add_field => ["action", "KICK", | |
"parse_rule", "kick"] | |
remove_tag => ["_grokparsefailure"] | |
} | |
# Grok things with a : delimiter before free text (like PRIVMSG, NOTICE) | |
grok | |
{ | |
tags => ["_grokparsefailure"] | |
type => "log4netudp" | |
pattern => ["@message", "%{NOTSPACE:action} (?<target>#+\S+) :%{GREEDYDATA:payload}"] | |
add_field => ["parse_rule", "privmsg"] | |
remove_tag => ["_grokparsefailure"] | |
} | |
# Grok things like JOIN. | |
grok | |
{ | |
tags => ["_grokparsefailure"] | |
type => "log4netudp" | |
pattern => ["@message", "%{NOTSPACE:action} (?<target>#+\S+)"] | |
add_field => ["payload", "", | |
"parse_rule", "join"] | |
remove_tag => ["_grokparsefailure"] | |
} | |
# Filter out pm/notice sent straight to the bot. Apparently multiple matches are an AND so we have separate blocks. | |
grep | |
{ | |
match => ["@message", "^PRIVMSG .*"] | |
type => "log4netudp" | |
negate => "true" | |
tags => ["_grokparsefailure"] | |
} | |
grep | |
{ | |
match => ["@message", ".* TRACE SpikeLite.Irc.* - .* sent a NOTICE: .*"] | |
type => "log4netudp" | |
negate => "true" | |
tags => ["_grokparsefailure"] | |
} | |
grep | |
{ | |
match => ["@message", "^NOTICE .*"] | |
type => "log4netudp" | |
negate => "true" | |
tags => ["_grokparsefailure"] | |
} | |
# Strip off unused fields, reformat our message. People can create their own columnar view, this is just the default message. | |
mutate | |
{ | |
type => "log4netudp" | |
remove => ["level", "class", "eventtime", "unparsed_payload"] | |
replace => ["@message", "<%{nickname}> %{action} -> %{target}: %{payload}"] | |
} | |
} | |
output | |
{ | |
elasticsearch | |
{ | |
embedded => true | |
index => "logstash-%{+YYYY.MM}" | |
} | |
} | |
==================================================================================================================================== | |
input { | |
file { | |
type => "cslogs" | |
path => "/etc/logstash/logs/log1.log" | |
start_position => "beginning" | |
# sincedb_path => "/home/ubuntu/.sincedb" | |
} | |
} | |
filter { | |
if [type] == "cslogs" { | |
grok { | |
match => [ "message", ' %{DATE:date} - %{TIME:time}: "%{DATA:nick}<%{IN T:pid}><%{DATA:vid}><%{WORD:team}>" (%{WORD:action} "%{WORD:dnick}<%{INT:did}><% {DATA:vid2}><%{WORD:dteam}>" with "%{WORD:wpused}") ' ] | |
} | |
} | |
} | |
output { | |
stdout { } | |
elasticsearch { embedded => true cluster => "counter"} | |
} | |
root@ubuntu-VirtualBox:/etc/logstash# cat stdin.conf | |
input { | |
file { | |
type => "cslogs" | |
path => "/etc/logstash/logs/log1.log" | |
start_position => "beginning" | |
# sincedb_path => "/home/ubuntu/.sincedb" | |
} | |
} | |
filter { | |
if [type] == "cslogs" { | |
grok { | |
match => [ "message", ' %{DATE:date} - %{TIME:time}: "%{DATA:nick}<%{INT:pid}><%{DATA:vid}><%{WORD:team}>" (%{WORD:action} "%{WORD:dnick}<%{INT:did}><%{DATA:vid2}><%{WORD:dteam}>" with "%{WORD:wpused}") ' ] | |
} | |
} | |
} | |
output { | |
stdout { } | |
elasticsearch { embedded => true } | |
============================================================================================================================== | |
############## | |
logstash.conf: | |
############## | |
input { | |
tcp { | |
port => 5544 | |
type => syslog | |
} | |
udp { | |
port => 5544 | |
type => syslog | |
} | |
} | |
filter { | |
grok { | |
type => "syslog" | |
pattern => [ "<%{NONNEGINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_app} %{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
syslog_pri { | |
type => "syslog" | |
} | |
date { | |
type => "syslog" | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
type => "syslog" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
mutate { | |
type => "syslog" | |
remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
grok { | |
type => "syslog" | |
pattern => [ "(^.+Exception: .+)|(^\t+at .+)|(^\t+... \d+ more)|(^\t*Caused by:.+)" ] | |
drop_if_match => true | |
} | |
} | |
output { | |
elasticsearch { embedded => true } | |
} | |
============================================================================================================================ | |
# Output line: | |
2013-11-14 22:07:08.266 UTC [6687] - [postgres@[local] pgbench] SELECT - ERROR: column "sync_statex" does not exist at character 20 | |
# Logstash grok matcher... | |
# In config file: | |
%{DATESTAMP:timestamp} UTC \\[%{POSINT:pid}\\] - \\[%{WORD:user}@\\[local\\] %{WORD:db}\\] %{WORD:command} - %{WORD:level}:%{SPACE}%{GREEDYDATA:message} | |
# Translated for grokdebug.herokuapp.com: | |
%{DATESTAMP:timestamp} UTC \[%{POSINT:pid}\] - \[%{WORD:user}@\[local\] %{WORD:db}\] %{WORD:command} - %{WORD:level}:%{SPACE}%{GREEDYDATA:message} | |
======================================================================================================================================= | |
input { | |
file { | |
type => "iis" | |
path => "C:/inetpub/logs/LogFiles/W3SVC/*.log" | |
} | |
} | |
filter { | |
#ignore log comments | |
if [message] =~ "^#" { | |
drop {} | |
} | |
grok { | |
match => ["message", "%{TIMESTAMP_ISO8601:log_timestamp} %{WORD:iisSite} %{IPORHOST:site} %{WORD:method} %{URIPATH:page} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:username} %{IPORHOST:clienthost} %{NOTSPACE:useragent} %{NOTSPACE:referer} %{NUMBER:response} %{NUMBER:subresponse} %{NUMBER:scstatus} %{NUMBER:bytes:int} %{NUMBER:timetaken:int}"] | |
} | |
#Set the Event Timesteamp from the log | |
date { | |
match => [ "log_timestamp", "YYYY-MM-dd HH:mm:ss" ] | |
timezone => "Etc/UCT" | |
} | |
ruby{ code => "event['kilobytes'] = event['bytes'] / 1024.0" } | |
#https://logstash.jira.com/browse/LOGSTASH-1354 | |
#geoip{ | |
# source => "clienthost" | |
# add_tag => [ "geoip" ] | |
#} | |
useragent { | |
source=> "useragent" | |
prefix=> "browser" | |
} | |
mutate { | |
remove_field => [ "log_timestamp"] | |
} | |
} | |
output { | |
elasticsearch { | |
host => "127.0.0.1" | |
} | |
} | |
============================================================================================================================== | |
# | |
# Logstash conf! | |
# | |
input { | |
tcp { | |
host => "0.0.0.0" | |
port => 514 | |
type => syslog | |
} | |
udp { | |
host => "0.0.0.0" | |
port => 514 | |
type => syslog | |
} | |
} | |
filter { | |
grok { | |
match => { "message" => "(?:<%{INT:priority}>)?%{SYSLOGBASE2} (?:\s?%{LOGLEVEL:log_level} )?(?:\s?%{WORD:log_format}: )?%{GREEDYDATA:syslog_message}" } | |
} | |
syslog_pri { } | |
if ("_grokparsefailure" in [tags]) { | |
mutate { | |
replace => ["@message", "TOPARSE: %{message}"] | |
} | |
} else if [log_format] == "json" { | |
mutate { | |
gsub => ["syslog_message", "@timestamp", "syslog_timestamp"] | |
} | |
json { | |
source => "syslog_message" | |
} | |
mutate { | |
replace => ["@message", "%{message}"] | |
} | |
} else { | |
mutate { | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
} | |
mutate { | |
remove_field => [ | |
"syslog_hostname", "syslog_message", "syslog_timestamp", | |
"syslog_severity_code", "syslog_facility_code", | |
"message" #facility_label", "severity_label" | |
] | |
} | |
} | |
output { | |
stdout { | |
debug => true | |
} | |
elasticsearch { | |
# bind_host => "foo" | |
host => "localhost" | |
embedded => false | |
} | |
} | |
====================================================================================================== | |
I've a little bash script that check is Mysql slave is running then logger logs the output to /var/log/messages: | |
----- | |
----- | |
#!/bin/bash | |
user="" | |
pass="" | |
function SlaveCheck() { | |
mysql -u $user -p$pass -e 'SHOW SLAVE STATUS \G;' | grep Running | awk '{print $1,$2}' | logger -t "Check if mysql_slave has failed" | |
} | |
SlaveCheck | |
----- | |
----- | |
here comes Logstash and a Grok filter: | |
filter { | |
if [type] == "syslog" { | |
grok { | |
match => [ "message", "%{SYSLOGTIMESTAMP:timestamp} %{SYSLOGHOST:host} %{GREEDYDATA:mysql}" ] | |
} | |
} | |
} | |
and this is the Logstash output configuration for send an alert if the Mysql slave fail: | |
----- | |
----- | |
output { | |
if [type] == "syslog" and [mysql] =~ "Slave_IO_Running: No" or [mysql] =~ "Slave_SQL_Running: No" { | |
email { | |
htmlbody => " | |
<html> | |
<head> | |
</h4>Notifica</h4> | |
<p> Rilevato errore Mysql Slave: %{mysql} </p> | |
<p> Estratto del log: %{timestamp} %{mysql} </p> | |
</head> | |
</html> | |
" | |
from => "redacted" | |
subject => "Report errore Mysql Slave" | |
to => "{{ monitor }}" | |
} | |
} | |
} | |
======================================================================================= | |
input { | |
file { | |
path => "/var/log/jenkins/*" | |
type => "jenkins-server" | |
start_position => "beginning" | |
} | |
} | |
# The first filter munges the logs into discrete events. | |
filter { | |
if [type] == "jenkins-server" { | |
# set all messages from the jenkins log as type 'jenkins' and add the @message field. | |
mutate { | |
add_field => ["@message_type", "jenkins"] | |
add_field => ["@message", "%{message}"] | |
} | |
# Any line that does not begin with a date is deemed to be a continuation of the previous | |
# line. This has the effect of delimiting the entire log file on timestamps. | |
multiline { | |
pattern => "^%{MONTH} %{MONTHDAY}, %{YEAR} %{TIME} (AM|PM)" | |
negate => true | |
what => "previous" | |
} | |
} | |
# ...other event types get processed here... | |
} | |
# now that we have possibly-multiline events, we can clean them up. | |
# We do this in a new filter so that we only process complete events instead of individual lines. | |
filter { | |
# munge the possibly-multiline messages into a single string | |
mutate { | |
join => ["@message", "\n"] | |
} | |
# split @message into __date and __msg, and overwrite the @timestamp value. | |
grok { | |
match => [ "@message", "^(?<__date>%{MONTH} %{MONTHDAY}, %{YEAR} %{TIME} (AM|PM)) (?<__msg>.+)" ] | |
} | |
date { | |
match => [ "__date", | |
"MMM dd, YYYY HH:mm:ss a" | |
] | |
timezone => "America/Los_Angeles" | |
} | |
# ...now some patterns to categorize specific event types... | |
# parse build completion messages, adding the jenkins_* fields and the 'build' tag | |
grok { | |
match => [ "@message", "(?<jenkins_job>\S+) #(?<jenkins_build_number>\d+) (?<__msg>.+): (?<jenkins_build_status>\w+)" ] | |
tag_on_failure => [] | |
overwrite => true | |
add_tag => ['build'] | |
} | |
# tag messages that come from the perforce SCM plugin (and associated classes) | |
grok { | |
match => [ "@message", "\.perforce\."] | |
tag_on_failure => [] | |
add_tag => ['p4-plugin'] | |
} | |
# ...other grok patterns go here... | |
# if we have extracted a short message string, replace @message with it now | |
if [__msg] { | |
mutate { | |
replace => ["@message","%{__msg}"] | |
} | |
} | |
# convert @message back into an array of lines | |
mutate { | |
split => ["@message", "\n"] | |
} | |
} | |
# Lastly, clean-up temporary fields and unwanted tags. | |
filter { | |
mutate { | |
remove_field => [ | |
"message", | |
"__msg", | |
"__date" | |
] | |
remove_tag => [ | |
"multiline", | |
"_grokparsefailure" | |
] | |
} | |
} | |
# send it on to the logstash consumer via redis. | |
output { | |
# debuggin | |
#stdout { debug => true } | |
# use redis as a message bus, in-case the logstash consumer falls over. | |
redis { | |
host => 'localhost' | |
data_type => 'list' | |
key => 'logstash:redis' | |
} | |
} | |
======================================================================================================================== | |
input { | |
stdin { type => "stdin-type"} | |
# syslog { | |
# type => syslog | |
# port => 5544 | |
# } | |
tcp { | |
port => 10514 | |
type => "syslog" | |
tags => ["production"] | |
} | |
file { | |
type => "linux-syslog" | |
path => [ "/var/log/messages", "/var/log/syslog" ] | |
} | |
} | |
filter { | |
grep { | |
type => "syslog" | |
match => [ "%{@syslog_program}", "varnishncsa.log" ] | |
# match => [ "@message", "GET" ] | |
# match => [ "@syslog_program", "varnishncsa.log" ] | |
add_tag => "apache-access-grepped" | |
drop => false | |
} | |
grok { | |
type => "syslog" | |
pattern => [ "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
grok { | |
type => "apache-combined" | |
pattern => "%{COMBINEDAPACHELOG}" | |
} | |
grok { | |
type => "syslog" | |
tags => ["apache-access-grepped"] | |
pattern => [ "%{COMBINEDAPACHELOG}" ] | |
add_tag => "apache-access-grokked" | |
} | |
syslog_pri { | |
type => "syslog" | |
} | |
date { | |
type => "syslog" | |
syslog_timestamp => [ "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
# syslog_timestamp => [ "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
type => "syslog" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
mutate { | |
type => "syslog" | |
remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
} | |
output { | |
stdout { debug => true debug_format => "json"} | |
elasticsearch { | |
embedded => false | |
cluster => "logstash" | |
host => "localhost" | |
} | |
} | |
===================================================================================================== | |
input { | |
file { | |
path => "/var/log/app/app_servicediscovery.log" | |
type => "servicediscovery" | |
} | |
} | |
filter { | |
multiline { | |
pattern => "^\s" | |
what => "previous" | |
} | |
multiline { | |
pattern => "^---" | |
what => "previous" | |
} | |
multiline { | |
pattern => "^Traceback" | |
what => "previous" | |
} | |
multiline { | |
pattern => "^\S*?(Error|Exception):\s" | |
what => "previous" | |
} | |
if [type] == "servicediscovery" { | |
grok { | |
match => { "message" => "%{DATESTAMP:timestamp}:%{DATA:python_module}:%{LOGLEVEL:Level}:%{GREEDYDATA:message}" } | |
add_field => [ "hostname", "{{ grains['host'] }}" ] | |
add_field => [ "process", "service_discovery_daemon"] | |
} | |
} | |
} | |
=================================================================================================================== | |
input { | |
file { | |
path => "/var/log/fail2ban.log" | |
type => "fail2banlog" | |
} | |
} | |
filter { | |
if [type] == "fail2banlog" { | |
grok { | |
match => { "message" => "%{DATESTAMP:timestamp} %{DATA:source}: %{LOGLEVEL:Level} %{GREEDYDATA:message}" } | |
add_field => [ "hostname", "{{ salt['grains.get']('host') }}" ] | |
} | |
} | |
} | |
=================================================================================================================== | |
output { | |
redis { host => "rabbitmq1" data_type => "list" key => "logstash" } | |
} | |
===================================================================================================================== | |
input { | |
tcp { | |
format => "plain" | |
mode => server | |
port => 9999 | |
type => "syslog" | |
} | |
} | |
filter { | |
if [type] == "syslog" { | |
grok { | |
match => { "message" => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" } | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{host}" ] | |
} | |
syslog_pri { } | |
date { | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601" ] | |
} | |
if !("_grokparsefailure" in [tags]) { | |
mutate { | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
} | |
mutate { | |
remove_field => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
} | |
} | |
=============================================================================================================================== | |
# This is the logstash server index configuration. | |
# This file will be put in the same folder with logtash.jar file in the | |
# /etc/logtash/ | |
# This takes information straight from redis and loads it into elasticsearch. | |
input { | |
redis { | |
host => "rabbitmq1" | |
threads => 4 | |
# these settings should match the output of the agent | |
data_type => "list" | |
key => "logstash" | |
# We use json_event here since the sender is a logstash agent | |
format => "json_event" | |
codec => oldlogstashjson { | |
} | |
} | |
} | |
output { | |
elasticsearch_http { | |
host => "elasticsearch1" | |
} | |
} | |
================================================================================================================================ | |
input { | |
zeromq { | |
type => 'zmq' | |
topology => 'pushpull' | |
address => 'tcp://*:5556' | |
mode => 'server' | |
} | |
} | |
output { | |
elasticsearch { | |
cluster => "ELA003" | |
index => "ossec-logstash-%{+YYYY.MM.dd}" | |
tags => ["ossec"] | |
} | |
stdout { | |
debug => true | |
debug_format => "json" | |
tags => ["ossec"] | |
} | |
} | |
grep { | |
tags => ["ossec"] | |
add_tag => ["ossecproc"] | |
} | |
grok { | |
tags => ["ossecproc"] | |
match => [ "@message", "%{GREEDYDATA:FIRST}\s%{GREEDYDATA:ID}\:\s%{GREEDYDATA:TYPE}\,\s%{YEAR:YEAR}\s%{MONTH:MONTH}\s%{MONTHDAY:MONTHDAY}\s%{TIME:TIME}\s\(ssh_integrity_check_linux\)\s%{GREEDYDATA:SERVER}\-\>syscheck\sRule\:\s%{INT:RULE}\s\(level\s%{INT:LEVEL}\)\s\-\>\s\'%{DATA:DESCRIPTION}\'\s%{GREEDYDATA:ORIGMESSAGE}" ] | |
remove_tag => ["ossecproc"] | |
add_tag => ["ossec-integrity","ossec-match"] | |
} | |
grok { | |
tags => ["ossecproc"] | |
match => [ "@message", "%{GREEDYDATA:FIRST}\s%{GREEDYDATA:ID}\:\s%{GREEDYDATA:TYPE}\,\s%{YEAR:YEAR}\s%{MONTH:MONTH}\s%{MONTHDAY:MONTHDAY}\s%{TIME:TIME}\s\(%{WORD:SERVER}\)\s%{IP:IP}\-\>%{GREEDYDATA:LOG}\sRule\:\s%{INT:RULE}\s\(level\s%{INT:LEVEL}\)\s\-\>\s\'%{GREEDYDATA:DESCRIPTION}\'\s%{GREEDYDATA:ORIGMESSAGE}" ] | |
remove_tag => ["ossecproc"] | |
add_tag => ["ossec-alert","ossec-match"] | |
} | |
grok { | |
tags => ["ossecproc"] | |
match => [ "@message", "%{GREEDYDATA:FIRST}\s%{GREEDYDATA:ID}\:\s-\s%{DATA:TYPE}\s%{YEAR:YEAR}\s%{MONTH:MONTH}\s%{MONTHDAY:MONTHDAY}\s%{TIME:TIME}\s\(%{DATA:SERVER}\)\s%{IP:IP}\-\>%{DATA:LOG}\sRule\:\s%{INT:RULE}\s\(level\s%{INT:LEVEL}\)\s\-\>\s\'%{GREEDYDATA:DESCRIPTION}\'\s%{GREEDYDATA:ORIGMESSAGE}" ] | |
remove_tag => ["ossecproc"] | |
add_tag => ["ossec-syslog","ossec-match"] | |
} | |
grok { | |
tags => ["ossecproc"] | |
match => [ "@message", "%{GREEDYDATA:FIRST}\s%{GREEDYDATA:ID}\:\s%{GREEDYDATA:TYPE}\,\s%{YEAR:YEAR}\s%{MONTH:MONTH}\s%{MONTHDAY:MONTHDAY}\s%{TIME:TIME}\s%{GREEDYDATA:SERVER}\-\>syscheck\sRule\:\s%{INT:RULE}\s\(level\s%{INT:LEVEL}\)\s\-\>\s\'%{DATA:DESCRIPTION}\'\s%{GREEDYDATA:ORIGMESSAGE}" ] | |
remove_tag => ["ossecproc"] | |
add_tag => ["ossec-syslog","ossec-match"] | |
} | |
=========================================================================================================================================== | |
timestamp: | |
2013-08-07T19:18:07.115Z | |
event: | |
<133>Aug 7 19:18:06 server03 app-log at org.apache.camel.util.AsyncProcessorHelper.process(AsyncProcessorHelper.java:73) [camel-core-2.9.1.jar:2.9.1] | |
############## | |
logstash.conf: | |
############## | |
input { | |
tcp { | |
port => 5544 | |
type => syslog | |
} | |
udp { | |
port => 5544 | |
type => syslog | |
} | |
} | |
filter { | |
grok { | |
type => "syslog" | |
pattern => [ "<%{NONNEGINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_app} %{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
syslog_pri { | |
type => "syslog" | |
} | |
date { | |
type => "syslog" | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
type => "syslog" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
mutate { | |
type => "syslog" | |
remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
grok { | |
type => "syslog_message" | |
pattern => [ "(^.+Exception: .+)|(^\s+at .+)|(^\s+... \d+ more)|(^\s*Caused by:.+)" ] | |
drop_if_match => true | |
} | |
} | |
output { | |
elasticsearch { embedded => true } | |
} | |
OR | |
############## | |
logstash.conf: | |
############## | |
input { | |
tcp { | |
port => 5544 | |
type => syslog | |
} | |
udp { | |
port => 5544 | |
type => syslog | |
} | |
} | |
filter { | |
grok { | |
type => "syslog" | |
pattern => [ "<%{NONNEGINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_app} %{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
grok { | |
type => "syslog" | |
match => [ "syslog_message", "(^.+Exception: .+)|(^\s+at .+)|(^\s+... \d+ more)|(^\s*Caused by:.+)" ] | |
drop_if_match => true | |
} | |
syslog_pri { | |
type => "syslog" | |
} | |
date { | |
type => "syslog" | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
type => "syslog" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
mutate { | |
type => "syslog" | |
remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
} | |
output { | |
elasticsearch { embedded => true } | |
} | |
========================================================================================================================= | |
timestamp: | |
2013-08-07T19:18:07.115Z | |
event: | |
<133>Aug 7 19:18:06 server03 app-log at org.apache.camel.util.AsyncProcessorHelper.process(AsyncProcessorHelper.java:73) [camel-core-2.9.1.jar:2.9.1] | |
############## | |
logstash.conf: | |
############## | |
input { | |
tcp { | |
port => 5544 | |
type => syslog | |
} | |
udp { | |
port => 5544 | |
type => syslog | |
} | |
} | |
filter { | |
grok { | |
type => "syslog" | |
pattern => [ "<%{NONNEGINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_app} %{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
syslog_pri { | |
type => "syslog" | |
} | |
date { | |
type => "syslog" | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
type => "syslog" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
mutate { | |
type => "syslog" | |
remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
grok { | |
type => "syslog_message" | |
pattern => [ "(^.+Exception: .+)|(^\s+at .+)|(^\s+... \d+ more)|(^\s*Caused by:.+)" ] | |
drop_if_match => true | |
} | |
} | |
output { | |
elasticsearch { embedded => true } | |
} | |
OR | |
############## | |
logstash.conf: | |
############## | |
input { | |
tcp { | |
port => 5544 | |
type => syslog | |
} | |
udp { | |
port => 5544 | |
type => syslog | |
} | |
} | |
filter { | |
grok { | |
type => "syslog" | |
pattern => [ "<%{NONNEGINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_app} %{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
grok { | |
type => "syslog" | |
match => [ "syslog_message", "(^.+Exception: .+)|(^\s+at .+)|(^\s+... \d+ more)|(^\s*Caused by:.+)" ] | |
drop_if_match => true | |
} | |
syslog_pri { | |
type => "syslog" | |
} | |
date { | |
type => "syslog" | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
type => "syslog" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
mutate { | |
type => "syslog" | |
remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
} | |
output { | |
elasticsearch { embedded => true } | |
} | |
======================================================================================================================= | |
input { | |
stdin { | |
type => "syslog" | |
} | |
#tcp { | |
# port => 1514 | |
# type => syslog | |
#} | |
#udp { | |
# port => 1514 | |
# type => syslog | |
#} | |
} | |
filter { | |
if [type] == "syslog" { | |
grok { | |
overwrite => "message" | |
match => { | |
"message" => "^(?:<%{POSINT:syslog_pri}>)?%{SYSLOGTIMESTAMP:timestamp} %{IPORHOST:syslog_host} (?:%{PROG:program}(?:\[%{POSINT:pid}\])?: )?%{GREEDYDATA:message}" | |
} | |
} | |
syslog_pri {} | |
} | |
if [program] == "apache" { | |
grok { | |
patterns_dir => [ "/tmp/foo" ] | |
match => { | |
"message" => "^%{IPORHOSTORDASH:vhost} %{IPORHOSTORDASH:client_ip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:accept_date}\] \"(?:%{WORD:http_verb} %{NOTSPACE:http_request}(?: HTTP/%{NUMBER:http_version})?|%{DATA:rawrequest})\" %{NUMBER:http_status_code} (?:%{NUMBER:bytes_read}|-) %{QS:referrer} %{QS:useragent}" | |
} | |
} | |
mutate { | |
replace => [ "type", "apache" ] | |
convert => [ "http_status_code", "integer" ] | |
convert => [ "bytes_read", "integer" ] | |
uppercase => [ "http_verb" ] | |
} | |
} else if [program] == "apache-error" { | |
mutate { | |
replace => [ "type", "apache-error" ] | |
} | |
grok { | |
patterns_dir => [ "/tmp/foo" ] | |
match => { | |
"message" => [ | |
"^\[%{APACHEERRORDATE:accept_date}\] \[error\] \[client %{IP:client_ip}\]" | |
] | |
} | |
} | |
if !("_grokparsefailure" in [tags]) { | |
grok { | |
break_on_match => "false" | |
tag_on_failure => "false" | |
match => { | |
message => [ | |
"in %{PATH:absolute_path} on line %{POSINT:line_number}", | |
"exist: %{PATH:absolute_path}" | |
] | |
} | |
} | |
if [absolute_path] { | |
grok { | |
patterns_dir => [ "/tmp/foo" ] | |
tag_on_failure => "false" | |
match => { | |
absolute_path => [ | |
"^/mnt/%{PATHCOMPONENT:cluster}/%{PATHCOMPONENT:store}/%{PATHCOMPONENT:domain}/%{PATHCOMPONENT:instance}", | |
"^/var/www/%{PATHCOMPONENT:cluster}/%{PATHCOMPONENT:domain}/%{PATHCOMPONENT:instance}" | |
] | |
} | |
} | |
if [domain] and [instance] { | |
mutate { | |
add_field => [ "vhost", "%{instance}.%{domain}" ] | |
remove_field => [ "instance", "cluster", "store" ] | |
} | |
} | |
} | |
} | |
} else if [program] == "haproxy" { | |
grok { | |
match => { | |
"message" => "^%{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?\"(<BADREQ>|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?\"" | |
} | |
} | |
mutate { | |
replace => [ "type", "haproxy" ] | |
convert => [ "time_request", "integer" ] | |
convert => [ "time_queue", "integer" ] | |
convert => [ "time_backend_connect", "integer" ] | |
convert => [ "time_backend_response", "integer" ] | |
convert => [ "time_duration", "integer" ] | |
convert => [ "http_status_code", "integer" ] | |
convert => [ "bytes_read", "integer" ] | |
uppercase => [ "http_verb" ] | |
} | |
} | |
} | |
output { | |
# Print each event to stdout. | |
stdout { | |
# Enabling 'rubydebug' codec on the stdout output will make logstash | |
# pretty-print the entire event as something similar to a JSON representation. | |
codec => rubydebug | |
} | |
file { | |
path => "/var/log/aggregate/%{type}.%{+yyyy-MM-dd}" | |
} | |
#gelf { | |
# #host => "ops-log1-dnslb.sjc.sugarcrm.pvt" | |
# host => "ops-log1a.sjc.sugarcrm.pvt" | |
# port => "12201" | |
#} | |
#elasticsearch { | |
# # Setting 'embedded' will run a real elasticsearch server inside logstash. | |
# # This option below saves you from having to run a separate process just | |
# # for ElasticSearch, so you can get started quicker! | |
# embedded => true | |
#} | |
} | |
==================================================================================================================== | |
THREAD_NAME [ \t\w.-]+ | |
PRIORITY DEBUG|ERROR|FATAL|INFO|WARN | |
CATEGORY [ \t\w.:;-]+ | |
BLAMMO_TIME %{TIMESTAMP_ISO8601:timestamp} +%{INT:uptime} | |
BLAMMO_NAMESPACE \[%{THREAD_NAME:thread}\] \(%{PRIORITY:priority}\) {%{CATEGORY:category}} | |
BLAMMO_MESSAGE %{GREEDYDATA:msg} | |
BLAMMO_RELAY %{BLAMMO_TIME} %{BLAMMO_NAMESPACE} %{BLAMMO_MESSAGE} | |
input { | |
lumberjack { | |
port => 5043 | |
ssl_certificate => "/etc/logstash-forwarder/logstash-forwarder.crt" | |
ssl_key => "/etc/logstash-forwarder/logstash-forwarder.key" | |
} | |
} | |
filter { | |
if [type] == "blammo_relay" { | |
multiline { | |
pattern => "(^\d+\serror)|(^.+Exception: .+)|(^\s+at .+)|(^\ns+... \d+ more)|(^\s*Caused by:.+)" | |
what => "previous" | |
add_tag => [ "multiline" ] | |
} | |
if "multiline" not in [tags] { | |
grok { | |
patterns_dir => [ "/etc/logstash/patterns" ] | |
match => [ "message", "%{BLAMMO_RELAY}" ] | |
} | |
} | |
date { | |
match => [ "timestamp", "YYYY-MM-dd HH:mm:ss,SSS", "ISO8601" ] | |
remove_field => [ "timestamp" ] | |
} | |
} | |
} | |
output { | |
stdout { codec => rubydebug } | |
elasticsearch { host => localhost } | |
} | |
{ | |
"message" => [ | |
[ 0] "2014-04-02 15:40:45,904 +1452 [main] (ERROR) {ServerContainer} Unable to start JMX server connector", | |
[ 1] "java.io.IOException: Cannot bind to URL [rmi://localhost:1099/jmxrmi9999]: javax.naming.ServiceUnavailableException [Root exception is java.rmi.ConnectException: Connection refused to host: localhost; nested exception is: ", | |
[ 2] "\tjava.net.ConnectException: Connection refused]", | |
[ 3] "\tat javax.management.remote.rmi.RMIConnectorServer.newIOException(RMIConnectorServer.java:826)", | |
[ 4] "\tat javax.management.remote.rmi.RMIConnectorServer.start(RMIConnectorServer.java:431)", | |
[ 5] "\tat com.linkedin.databus2.core.container.netty.ServerContainer.doStart(ServerContainer.java:443)", | |
[ 6] "\tat com.linkedin.databus.container.netty.HttpRelay.doStart(HttpRelay.java:275)", | |
[ 7] "\tat com.linkedin.databus2.relay.DatabusRelayMain.doStart(DatabusRelayMain.java:303)", | |
[ 8] "\tat com.linkedin.databus2.core.container.netty.ServerContainer.start(ServerContainer.java:369)", | |
[ 9] "\tat com.linkedin.databus2.core.container.netty.ServerContainer.startAndBlock(ServerContainer.java:360)", | |
[10] "\tat com.etsy.blammo.relay.Relay.main(Relay.java:36)", | |
[11] "Caused by: javax.naming.ServiceUnavailableException [Root exception is java.rmi.ConnectException: Connection refused to host: localhost; nested exception is: ", | |
[12] "\tat com.sun.jndi.rmi.registry.RegistryContext.bind(RegistryContext.java:143)", | |
[13] "\tat com.sun.jndi.toolkit.url.GenericURLContext.bind(GenericURLContext.java:226)", | |
[14] "\tat javax.naming.InitialContext.bind(InitialContext.java:419)", | |
[15] "\tat javax.management.remote.rmi.RMIConnectorServer.bind(RMIConnectorServer.java:643)", | |
[16] "\tat javax.management.remote.rmi.RMIConnectorServer.start(RMIConnectorServer.java:426)" | |
], | |
"@version" => "1", | |
"@timestamp" => [ | |
[ 0] "2014-04-02T19:40:45.904Z", | |
[ 1] "2014-04-02T19:40:57.324Z", | |
[ 2] "2014-04-02T19:40:57.330Z", | |
[ 3] "2014-04-02T19:40:57.335Z", | |
[ 4] "2014-04-02T19:40:57.341Z", | |
[ 5] "2014-04-02T19:40:57.346Z", | |
[ 6] "2014-04-02T19:40:57.351Z", | |
[ 7] "2014-04-02T19:40:57.356Z", | |
[ 8] "2014-04-02T19:40:57.361Z", | |
[ 9] "2014-04-02T19:40:57.366Z", | |
[10] "2014-04-02T19:40:57.372Z", | |
[11] "2014-04-02T19:40:57.379Z", | |
[12] "2014-04-02T19:40:57.385Z", | |
[13] "2014-04-02T19:40:57.390Z", | |
[14] "2014-04-02T19:40:57.396Z", | |
[15] "2014-04-02T19:40:57.402Z", | |
[16] "2014-04-02T19:40:57.407Z", | |
[17] "2014-04-02T19:40:57.413Z" | |
], | |
"file" => "/var/log/blammo/relay.log", | |
"host" => "ETSYNY-L645.local", | |
"offset" => [ | |
[ 0] "61328346", | |
[ 1] "61328446", | |
[ 2] "61328669", | |
[ 3] "61328717", | |
[ 4] "61328812", | |
[ 5] "61328898", | |
[ 6] "61328995", | |
[ 7] "61329074", | |
[ 8] "61329158", | |
[ 9] "61329253", | |
[10] "61329356", | |
[11] "61329408", | |
[12] "61329567", | |
[13] "61329615", | |
[14] "61329692", | |
[15] "61329772", | |
[16] "61329834", | |
[17] "61329919" | |
], | |
"type" => "blammo_relay", | |
"uptime" => "+1452", | |
"thread" => "main", | |
"priority" => "ERROR", | |
"category" => "ServerContainer", | |
"msg" => "Unable to start JMX server connector", | |
"tags" => [ | |
[0] "_grokparsefailure", | |
[1] "multiline" | |
] | |
} | |
NoMethodError: undefined method `tv_sec' for #<Array:0x5a44b545> | |
sprintf at /usr/local/logstash/lib/logstash/event.rb:223 | |
gsub at org/jruby/RubyString.java:3041 | |
sprintf at /usr/local/logstash/lib/logstash/event.rb:209 | |
receive at /usr/local/logstash/lib/logstash/outputs/elasticsearch.rb:324 | |
handle at /usr/local/logstash/lib/logstash/outputs/base.rb:86 | |
initialize at (eval):76 | |
call at org/jruby/RubyProc.java:271 | |
output at /usr/local/logstash/lib/logstash/pipeline.rb:266 | |
outputworker at /usr/local/logstash/lib/logstash/pipeline.rb:225 | |
start_outputs at /usr/local/logstash/lib/logstash/pipeline.rb:152 | |
============================================================================================================= | |
[root@app-ccand-001 httpd]# cat /var/opt/logstash/logstash.conf | |
input { | |
tcp { | |
type => "klog" | |
port => '1514' | |
format => 'plain' | |
tags => [ "TFILE" ] | |
} | |
file { | |
type => "klog" | |
path => "/var/log/httpd/klog*.log" | |
} | |
} | |
filter { | |
grok { | |
type => "klog" | |
match => [ "@message", "%{WORD:level}%{SPACE}%{DATESTAMP:klogdate} - %{NUMBER:playerid} : %{BASE16NUM:udid} - %{PROG:module} - %{GREEDYDATA:body}" ] | |
add_tag => [ "KLOG" ] | |
} | |
grok { | |
type => "klog" | |
match => [ "@message", "%{WORD:level}%{SPACE}%{DATESTAMP:klogdate} - %{WORD:playerid} - %{PROG:module} - %{GREEDYDATA:body}" ] | |
add_tag => [ "KLOGERR" ] | |
} | |
#2013-01-16 23:59:59 | |
date { | |
type => "klog" | |
klogdate => "yyyy-MM-dd HH:mm:ss" | |
} | |
mutate { | |
type => "klog" | |
remove => [ "klogdate" ] | |
} | |
# grep { | |
# type => "klog" | |
# tags => ["_grokparsefailure"] | |
# negate => true | |
# } | |
multiline { | |
type => "klog" | |
pattern => "^\s+" | |
what => "previous" | |
add_tag => [ "MULTI" ] | |
} | |
} | |
output { | |
redis { | |
host => "log-006.int.funzio.com" | |
data_type => "list" | |
key => "logstash" | |
} | |
} | |
----------------------------- | |
{"@source":"file://app-ccand-001.int.funzio.com/var/log/httpd/klog-2013-01-18.log","@tags":["KLOG","_grokparsefailure","MULTI"],"@fields":{"level":["DEBUG"],"playerid":["6186290288752810682"],"udid":["89f802139c174522128dcf20b4825e61"],"module":["battle_list_helper.php"],"body":["SHARD NAME: crimecity_battlelist_ios_5"],"logname":["klog-2013-01-18.log"]},"@timestamp":"2013-01-18T16:19:00.000Z","@source_host":"app-ccand-001.int.funzio.com","@source_path":"/var/log/httpd/klog-2013-01-18.log","@message":"DEBUG 2013-01-18 08:19:00 - 6186290288752810682 : 89f802139c174522128dcf20b4825e61 - battle_list_helper.php - SHARD NAME: crimecity_battlelist_ios_5","@type":"klog"} | |
----------------------------- | |
[root@app-ccand-001 httpd]# grep -C1 'DEBUG 2013-01-18 08:19:00 - 6186290288752810682 : 89f802139c174522128dcf20b4825e61 - battle_list_helper.php - SHARD' klog-2013-01-18.log | |
DEBUG 2013-01-18 08:19:00 - 314974125652558492 : bbaf71c9e65f40c76de71681a926e888 - buildings.php - Building.rob() called vs 312535295904511436 auto_loot: | |
DEBUG 2013-01-18 08:19:00 - 6186290288752810682 : 89f802139c174522128dcf20b4825e61 - battle_list_helper.php - SHARD NAME: crimecity_battlelist_ios_5 | |
DEBUG 2013-01-18 08:19:00 - 6186290288752810682 : 89f802139c174522128dcf20b4825e61 - battle_list_helper.php - getting random cache key:ct-v3-battleList:crimecity_battlelist_ios_5:2:268 | |
----------------------------- | |
============================================================================================================================== | |
input { | |
stdin { type => "mail"} | |
} | |
filter { | |
mutate { | |
remove_field => [ "path", "host" ] | |
} | |
if [type] == "mail" { | |
grok { | |
match => [ "message", "%{SYSLOGBASE} (?<msg>.*)" ] | |
patterns_dir => [ "/etc/logstash/patterns.test" ] | |
add_tag => [ "grokked" ] | |
break_on_match => true | |
} | |
} | |
if [program] == "test" { | |
grok { | |
patterns_dir => [ "/etc/logstash/patterns.test" ] | |
match => [ "msg", "%{TEST}" ] | |
add_tag => [ "test_grokked" ] | |
break_on_match => true | |
# keep_empty_captures => true | |
# remove_field => [ "msg" ] | |
overwrite => [ "size" ] | |
} | |
} | |
if [program] == "exim" { | |
grok { | |
patterns_dir => [ "/etc/logstash/patterns.test" ] | |
match => [ "msg", "%{EXIM}" ] | |
add_tag => [ "exim_grokked" ] | |
break_on_match => true | |
# keep_empty_captures => true | |
# remove_field => [ "msg" ] | |
overwrite => [ "size" ] | |
} | |
} | |
date { | |
match => [ "timestamp", "MMM dd HH:mm:ss" ] | |
add_tag => [ "dated" ] | |
} | |
} | |
output { | |
stdout { | |
debug => true | |
} | |
} | |
TEST (%{TEST2}|%{TEST1}) | |
TEST1 R=(?<router>\S+) T=(?<transport>\S+) S=(?<size>\d+) | |
TEST2 cid=(?<cid>\S+)\s+drcpt=(?<drcpt>\d+)\s+size=(?<size>\d+)\sthing=(?<thing>\S+) | |
================================================================================================================= | |
$ cat base64_decode.conf | |
input { | |
stdin { } | |
} | |
filter { | |
grok { | |
match => ["message", "%{WORD:prefix} %{WORD:b64} %{WORD:suffix}"] | |
} | |
ruby { | |
init => "require 'base64'" | |
code => "event['b64_decoded'] = Base64.decode64(event['b64']) if event.include?('b64')" | |
} | |
} | |
output { | |
stdout { | |
codec => rubydebug | |
} | |
} | |
# messages: | |
# p bWlkZGxl s | |
# p middle s | |
# p s | |
$ echo "p bWlkZGxl s\np middle s\np s" | ./bin/logstash agent -f base64_decode.conf | |
[deprecated] I18n.enforce_available_locales will default to true in the future. If you really want to skip validation of your locale you can set I18n.enforce_available_locales = false to avoid this message. | |
Using milestone 1 filter plugin 'ruby'. This plugin should work, but would benefit from use by folks like you. Please let us know if you find bugs or have suggestions on how to improve this plugin. For more information on plugin milestones, see http://logstash.net/docs/1.2.3.dev/plugin-milestones {:level=>:warn} | |
{ | |
"message" => "p bWlkZGxl s", | |
"@timestamp" => "2013-12-07T04:09:16.662Z", | |
"@version" => "1", | |
"host" => "t61", | |
"prefix" => "p", | |
"b64" => "bWlkZGxl", | |
"suffix" => "s", | |
"b64_decoded" => "middle" | |
} | |
{ | |
"message" => "p middle s", | |
"@timestamp" => "2013-12-07T04:09:16.663Z", | |
"@version" => "1", | |
"host" => "t61", | |
"prefix" => "p", | |
"b64" => "middle", | |
"suffix" => "s", | |
"b64_decoded" => "\x9A']" | |
} | |
{ | |
"message" => "p s", | |
"@timestamp" => "2013-12-07T04:09:16.663Z", | |
"@version" => "1", | |
"host" => "t61", | |
"tags" => [ | |
[0] "_grokparsefailure" | |
] | |
} | |
============================================================================================================================ | |
# WARNING! This file is maintained by Puppet, do not modify directly! | |
input { | |
file { | |
type => "syslog" | |
path => ["/var/log/secure", "/var/log/messages"] | |
tags => ["syslog"] | |
} | |
pipe { | |
type => "edgecast-logs" | |
command => "while [ 1 ] ; do for i in $( find /home/edgecast/logs/*.log.gz ); do if ! /usr/sbin/lsof ${i} ; then zcat ${i}; mv ${i} ${i}.scanned; fi ; done ; done" | |
tags => ["cdn"] | |
} | |
} | |
filter { | |
grok { | |
type => "edgecast-logs" | |
pattern => "(?<timestamp>%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}) %{INT:timetaken} %{IPORHOST:clientip} %{INT:filesize} %{IPORHOST:sourceip} %{INT:sourceport} %{NOTSPACE:response} %{INT:bytes} %{WORD:verb} %{NOTSPACE:request} - %{INT:duration} %{INT:rsbytes} \"(?:%{NOTSPACE:referrer}|-)\" %{QUOTEDSTRING:agent} %{INT:customerid}" | |
} | |
} | |
output { | |
stdout { | |
debug => true | |
debug_format => "json" | |
} | |
redis { | |
# Hardcoded due to DNS misdirection | |
host => "1.2.3.4" | |
data_type => "list" | |
key => "logstash" | |
} | |
} | |
========================================================================================================================= | |
input { | |
tcp { | |
port => 5000 | |
type => "syslog" | |
host => "127.0.0.1" | |
} | |
udp { | |
port => 5000 | |
type => "syslog" | |
host => "127.0.0.1" | |
} | |
udp { | |
port => 5001 | |
host => "127.0.0.1" | |
type => "apache" | |
buffer_size => 8192 | |
format => "json_event" | |
} | |
udp { | |
port => 5002 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "_SERVER_NAME_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5003 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "_SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5004 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "logging._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5010 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "ajenti._SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5011 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "graphite._SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5012 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "graylog._SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5013 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "logstash._SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5014 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "rabbit._SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5015 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "sensu._SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
udp { | |
port => 5016 | |
host => "127.0.0.1" | |
type => "apache-error" | |
tags => [ "Apache", "Error", "supervisor._SERVER_NAME_._TLD_" ] | |
format => "plain" | |
} | |
} | |
filter { | |
# SYSLOG PROCESSING | |
grok { | |
type => "syslog" | |
pattern => [ "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" ] | |
add_field => [ "received_at", "%{@timestamp}" ] | |
add_field => [ "received_from", "%{@source_host}" ] | |
} | |
syslog_pri { | |
type => "syslog" | |
} | |
date { | |
type => "syslog" | |
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] | |
} | |
mutate { | |
type => "syslog" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{syslog_hostname}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
mutate { | |
type => "syslog" | |
remove => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ] | |
} | |
# APACHE ACCESS LOG PROCESSING | |
date { | |
type => "apache" | |
timestamp => "ISO8601" | |
} | |
mutate { | |
type => "apache" | |
remove => [ "timestamp" ] | |
} | |
# APACHE ERROR LOG PROCESSING | |
grok { | |
type => "apache-error" | |
pattern => [ "%{GENERICAPACHEERROR}" ] | |
patterns_dir => "/usr/local/logstash/conf/grok_patterns" | |
} | |
date { | |
type => "apache-error" | |
timestamp => "EEE MMM dd HH:mm:ss yyyy" | |
} | |
mutate { | |
type => "apache-error" | |
replace => [ "severity", "%{apacheseverity}" ] | |
remove => [ "apacheseverity", "timestamp" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "ajenti._SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "ajenti._SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "graphite._SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "graphite._SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "graylog._SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "graylog._SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "logstash._SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "logstash._SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "rabbit._SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "rabbit._SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "sensu._SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "sensu._SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "supervisor._SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "supervisor._SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "logging._TLD_" ] | |
replace => [ "@source_host", "logging._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "_SERVER_NAME_._TLD_" ] | |
replace => [ "@source_host", "_SERVER_NAME_._TLD_" ] | |
} | |
mutate { | |
type => "apache-error" | |
tags => [ "Apache", "Error", "_SERVER_NAME_" ] | |
replace => [ "@source_host", "_SERVER_NAME_" ] | |
} | |
} | |
output { | |
redis { | |
host => "###REDIS_IP###" | |
data_type => "list" | |
key => "logstash" | |
} | |
} | |
================================================================================================================== | |
/etc/logstash/logstash.conf : | |
# We handle the syslog part of the Cisco PIX/ASA messages | |
grok { | |
tags => "cisco-fw" | |
patterns_dir => "/etc/logstash/patterns" | |
pattern => "^<%{POSINT:syslog_pri}>(?:(%{TIMESTAMP_ISO8601:timestamp8601} |%{CISCOTIMESTAMP:timestamp} ))?%{SYSLOGHOST:logsource}?[ :]+%{GREEDYDATA:syslog_message}" | |
} | |
syslog_pri { | |
tags => "cisco-fw" | |
} | |
mutate { | |
tags => "cisco-fw" | |
exclude_tags => "_grokparsefailure" | |
replace => [ "@source_host", "%{logsource}" ] | |
replace => [ "@message", "%{syslog_message}" ] | |
} | |
# for optional fields (device name in message, Cisco syslog tag) | |
grok { | |
tags => "cisco-fw" | |
patterns_dir => "/etc/logstash/patterns" | |
pattern => "(?:%{SYSLOGHOST:device} )?(?:: )?%%{CISCOFWTAG:ciscotag}:%{GREEDYDATA}" | |
} | |
# we extract fields | |
grok { | |
tags => "cisco-fw" | |
break_on_match => false | |
patterns_dir => "/etc/logstash/patterns" | |
pattern => [ | |
"%{CISCOFW1}", | |
"%{CISCOFW2}", | |
"%{CISCOFW3}", | |
"%{CISCOFW4}", | |
"%{CISCOFW4b}", | |
"%{CISCOFW5}", | |
"%{CISCOFW6a}", | |
"%{CISCOFW6b}", | |
"%{CISCOFW7}", | |
"%{CISCOFW8}", | |
"%{CISCOFW9}", | |
"%{CISCOFW10}", | |
"%{CISCOFW11}", | |
"%{CISCOFW12}", | |
"%{CISCOFW13}", | |
"%{CISCOFW14}", | |
"%{CISCOFW15}", | |
"%{CISCOFW16}", | |
"%{CISCOFW17}", | |
"%{CISCOFW18}" | |
] | |
} | |
date { | |
tags => "cisco-fw" | |
timestamp8601 => ISO8601 | |
timestamp => [ | |
"MMM dd HH:mm:ss.SSS", | |
"MMM d HH:mm:ss.SSS", | |
"MMM dd HH:mm:ss", | |
"MMM d HH:mm:ss", | |
"MMM dd yyyy HH:mm:ss.SSS", | |
"MMM d yyyy HH:mm:ss.SSS", | |
"MMM dd yyyy HH:mm:ss", | |
"MMM d yyyy HH:mm:ss" | |
] | |
innertimestamp => [ | |
"MMM dd HH:mm:ss.SSS", | |
"MMM d HH:mm:ss.SSS", | |
"MMM dd HH:mm:ss", | |
"MMM d HH:mm:ss", | |
"MMM dd yyyy HH:mm:ss.SSS", | |
"MMM d yyyy HH:mm:ss.SSS", | |
"MMM dd yyyy HH:mm:ss", | |
"MMM d yyyy HH:mm:ss", | |
"yyyy-MM-dd HH:mm:ss.SSS", | |
"yyyy-MM-dd HH:mm:ss" | |
] | |
locale => "Locale.US" | |
} | |
/etc/logstash/patterns/cisco-firewalls : | |
# ASA-1-106100 | |
CISCOFW1 access-list %{DATA:policy_id} %{WORD:action} %{WORD:protocol} %{DATA}/%{IP:src_ip}\(%{DATA:src_port}\) -> %{DATA}/%{IP:dst_ip}\(%{DATA:dst_port}\) | |
# ASA-3-710003 | |
CISCOFW2 %{WORD:action} %{WORD:protocol} type=%{INT}, code=%{INT} from %{IP:src_ip} on interface | |
# ASA-3-710003 | |
CISCOFW3 %{WORD:protocol} access %{WORD:action} by ACL from %{IP:src_ip}/%{DATA:src_port} to %{DATA}:%{IP:dst_ip}/%{DATA:dst_port} | |
# ASA-4-106023 | |
CISCOFW4 %{WORD:action} %{WORD:protocol} src %{DATA}:%{IP:src_ip}/%{DATA:src_port} dst %{DATA}:%{IP:dst_ip}/%{DATA:dst_port} by access-group %{DATA:policy_id} | |
CISCOFW4b %{WORD:action} %{WORD:protocol} src %{DATA}:%{IP:src_ip} dst %{DATA}:%{IP:dst_ip} \(type %{INT}, code %{INT}\) by access-group %{DATA:policy_id} | |
# ASA-6-106015 | |
CISCOFW5 Deny %{WORD:protocol} \(%{GREEDYDATA:action}\) from %{IP:src_ip}/%{DATA:src_port} to %{IP:dst_ip}/%{DATA:dst_port} flags | |
# ASA-6-302013 | |
CISCOFW6a %{WORD:action} inbound %{WORD:protocol} connection %{INT} for %{DATA}:%{IP:src_ip}/%{DATA:src_port} \(%{IP:src_xlated_ip}/%{DATA:src_xlated_port}\) to %{DATA}:%{IP:dst_ip}/%{DATA:dst_port} \(%{IP:dst_xlated_ip}/%{DATA:dst_xlated_port}\) | |
CISCOFW6b %{WORD:action} outbound %{WORD:protocol} connection %{INT} for %{DATA}:%{IP:dst_ip}/%{DATA:dst_port} \(%{IP:dst_xlated_ip}/%{DATA:dst_xlated_port}\) to %{DATA}:%{IP:src_ip}/%{DATA:src_port} \(%{IP:src_xlated_ip}/%{DATA:src_xlated_port}\) | |
# ASA-7-710002 | ASA-7-710005 | |
CISCOFW7 %{WORD:protocol} (?:request|access) %{WORD:action} from %{IP:src_ip}/%{DATA:src_port} to %{DATA}:%{IP:dst_ip}/%{WORD:service} | |
# ASA-6-302020 | |
CISCOFW8 %{WORD:action} (?:inbound|outbound) %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT} gaddr %{IP:src_xlated_ip}/%{INT} laddr %{IP:src_ip} | |
# ASA-1-106021 | |
CISCOFW9 %{WORD:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface | |
# ASA-2-106006-7 | |
CISCOFW10 %{WORD:action} inbound %{WORD:protocol} from %{IP:src_ip}/%{DATA:src_port} to %{IP:dst_ip}/%{DATA:dst_port} (?:on interface|due to) | |
# ASA-4-313004 | |
CISCOFW11 %{WORD:action} %{WORD:protocol} type=%{INT}, from (?:laddr )?%{IP:src_ip} on interface %{DATA} to %{IP:dst_ip} | |
# ASA-2-106001 | |
CISCOFW12 (?:Inbound|Outbound) %{WORD:protocol} connection %{WORD:action} from %{IP:src_ip}/%{DATA:src_port} to %{IP:dst_ip}/%{DATA:dst_port} flags | |
# ASA-3-106014 | |
CISCOFW13 %{WORD:action} (?:inbound|outbound) %{WORD:protocol} src %{DATA}:%{IP:src_ip} dst %{DATA}:%{IP:dst_ip} | |
# ASA-4-419001 | |
CISCOFW14 %{WORD:action} %{WORD:protocol} packet from %{DATA}:%{IP:src_ip}(?:/%{DATA:src_port})? to %{DATA}:%{IP:dst_ip}(?:/%{DATA:dst_port})? | |
# ASA-4-313005 | |
CISCOFW15 %ASA-4-313005: %{DATA:action} for %{WORD:protocol} error message: %{WORD} src %{DATA}:%{IP:src_ip} dst %{DATA}:%{IP:dst_ip} (?:\(type %{INT}, code %{INT}\)) | |
# PIX-3-710003 | |
CISCOFW16 %{WORD:protocol} access %{WORD:action} by ACL from %{IP:src_ip}/%{DATA:src_port} to %{DATA}:%{IP:dst_ip}/%{WORD:service} | |
# ASA-4-500004 | |
CISCOFW17 %{WORD:action} transport field for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{DATA:src_port} to %{IP:dst_ip}/%{DATA:dst_port} | |
# ASA-6-305011 # dynamic NAT creation | |
#CISCOFW00 %{WORD:action} dynamic %{WORD:protocol} translation from %{DATA}:%{IP:src_ip}/%{DATA:src_port} to %{DATA}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port} | |
# ASA-5-305013 | |
CISCOFW18 Connection for %{WORD:protocol} src %{DATA}:%{IP:src_ip} dst %{DATA}:%{IP:dst_ip} (?:\(type %{INT}, code %{INT}\) )?%{WORD:action} due to | |
/etc/logstash/patterns/cisco-std : | |
CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME} | |
CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+) | |
CISCOFWTAG (?:ASA|PIX|FWSM)-%{INT}-(?:[A-Z0-9_]+) | |
============================================================================================================================= | |
Dec 12 21:06:10 core smbd: [2012/12/12 21:06:03, 0] smbd/server.c:1051(main) | |
Dec 12 21:06:10 core smbd: smbd version 3.6.3 started. | |
Dec 12 21:06:10 core smbd: Copyright Andrew Tridgell and the Samba Team 1992-2011 | |
Dec 12 21:06:10 core smbd: [2012/12/12 21:06:03.734670, 2, pid=6956, effective(0, 0), real(0, 0)] param/loadparm.c:4985(max_open_files) | |
Dec 12 21:06:10 core smbd: rlimit_max: increasing rlimit_max (1024) to minimum Windows limit (16384) | |
grep { | |
# Match all syslog messages from smbd and nmbd | |
type => "syslog" | |
match => [ "syslog_program", "^[sn]mbd$" ] | |
drop => false | |
add_tag => [ "samba" ] | |
} | |
grep { | |
# Ignore all samba tags from nas | |
# (not really samba, but solaris kernel cifs server with other log format) | |
type => "syslog" | |
tags => [ "samba" ] | |
match => [ "@source_host", "^nas$" ] | |
drop => false | |
remove_tag => [ "samba" ] | |
} | |
multiline { | |
# Join samba multiline events | |
type => "syslog" | |
tags => [ "samba" ] | |
pattern => "^\s" | |
what => "previous" | |
} | |
grok { | |
# convert samba multiline logs into something more useful | |
type => "syslog" | |
tags => [ "samba" ] | |
pattern => "^\[%{DATESTAMP:samba_timestamp},%{SPACE}%{NUMBER:samba_severity_code}(?:,%{SPACE}pid=%{NUMBER:syslog_pid},%{SPACE}effective\(%{NUMBER:euid},%{SPACE}%{NUMBER:egid}\),%{SPACE}real\(%{NUMBER:uid},%{SPACE}%{NUMBER:gid}\))?\]%{SPACE}%{DATA:samba_class}\n%{GREEDYDATA:samba_message}" | |
add_tag => [ "grokked" ] | |
add_field => [ "samba_timestamp_tz", "%{samba_timestamp} CET" ] | |
add_field => [ "logstash_timestamp", "%{@timestamp}" ] | |
} | |
# date { | |
# # Set real timestamp from samba timestamp | |
# type => "syslog" | |
# tags => [ "samba", "multiline", "grokked" ] | |
# samba_timestamp_tz => [ "yyyy/MM/dd HH:mm:ss.S z", "yyyy/MM/dd HH:mm:ss z" ] | |
# } | |
mutate { | |
# Simplify samba multiline events | |
type => "syslog" | |
tags => [ "samba", "multiline", "grokked" ] | |
# Trim initial whitespace in actual message | |
gsub => [ "samba_message", "^\s*", "" ] | |
# replace => [ "@message", "%{samba_message}" ] | |
# remove => [ "samba_timestamp", "samba_timestamp_tz", "samba_message" ] | |
} | |
============================================================================================================ | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment