Skip to content

Instantly share code, notes, and snippets.

@zsprackett
Created January 17, 2014 03:41
Show Gist options
  • Save zsprackett/8468014 to your computer and use it in GitHub Desktop.
Save zsprackett/8468014 to your computer and use it in GitHub Desktop.
input {
stdin {
type => "syslog"
}
#tcp {
# port => 1514
# type => syslog
#}
#udp {
# port => 1514
# type => syslog
#}
}
filter {
if [type] == "syslog" {
grok {
overwrite => "message"
match => {
"message" => "^(?:<%{POSINT:syslog_pri}>)?%{SYSLOGTIMESTAMP:timestamp} %{IPORHOST:syslog_host} (?:%{PROG:program}(?:\[%{POSINT:pid}\])?: )?%{GREEDYDATA:message}"
}
}
syslog_pri {}
}
if [program] == "apache" {
grok {
patterns_dir => [ "/tmp/foo" ]
match => {
"message" => "^%{IPORHOSTORDASH:vhost} %{IPORHOSTORDASH:client_ip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:accept_date}\] \"(?:%{WORD:http_verb} %{NOTSPACE:http_request}(?: HTTP/%{NUMBER:http_version})?|%{DATA:rawrequest})\" %{NUMBER:http_status_code} (?:%{NUMBER:bytes_read}|-) %{QS:referrer} %{QS:useragent}"
}
}
mutate {
replace => [ "type", "apache" ]
convert => [ "http_status_code", "integer" ]
convert => [ "bytes_read", "integer" ]
uppercase => [ "http_verb" ]
}
} else if [program] == "apache-error" {
mutate {
replace => [ "type", "apache-error" ]
}
grok {
patterns_dir => [ "/tmp/foo" ]
match => {
"message" => [
"^\[%{APACHEERRORDATE:accept_date}\] \[error\] \[client %{IP:client_ip}\]"
]
}
}
if !("_grokparsefailure" in [tags]) {
grok {
break_on_match => "false"
tag_on_failure => "false"
match => {
message => [
"in %{PATH:absolute_path} on line %{POSINT:line_number}",
"exist: %{PATH:absolute_path}"
]
}
}
if [absolute_path] {
grok {
patterns_dir => [ "/tmp/foo" ]
tag_on_failure => "false"
match => {
absolute_path => [
"^/mnt/%{PATHCOMPONENT:cluster}/%{PATHCOMPONENT:store}/%{PATHCOMPONENT:domain}/%{PATHCOMPONENT:instance}",
"^/var/www/%{PATHCOMPONENT:cluster}/%{PATHCOMPONENT:domain}/%{PATHCOMPONENT:instance}"
]
}
}
if [domain] and [instance] {
mutate {
add_field => [ "vhost", "%{instance}.%{domain}" ]
remove_field => [ "instance", "cluster", "store" ]
}
}
}
}
} else if [program] == "haproxy" {
grok {
match => {
"message" => "^%{IP:client_ip}:%{INT:client_port} \[%{HAPROXYDATE:accept_date}\] %{NOTSPACE:frontend_name} %{NOTSPACE:backend_name}/%{NOTSPACE:server_name} %{INT:time_request}/%{INT:time_queue}/%{INT:time_backend_connect}/%{INT:time_backend_response}/%{NOTSPACE:time_duration} %{INT:http_status_code} %{NOTSPACE:bytes_read} %{DATA:captured_request_cookie} %{DATA:captured_response_cookie} %{NOTSPACE:termination_state} %{INT:actconn}/%{INT:feconn}/%{INT:beconn}/%{INT:srvconn}/%{NOTSPACE:retries} %{INT:srv_queue}/%{INT:backend_queue} (\{%{HAPROXYCAPTUREDREQUESTHEADERS}\})?( )?(\{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?( )?\"(<BADREQ>|(%{WORD:http_verb} (%{URIPROTO:http_proto}://)?(?:%{USER:http_user}(?::[^@]*)?@)?(?:%{URIHOST:http_host})?(?:%{URIPATHPARAM:http_request})?( HTTP/%{NUMBER:http_version})?))?\""
}
}
mutate {
replace => [ "type", "haproxy" ]
convert => [ "time_request", "integer" ]
convert => [ "time_queue", "integer" ]
convert => [ "time_backend_connect", "integer" ]
convert => [ "time_backend_response", "integer" ]
convert => [ "time_duration", "integer" ]
convert => [ "http_status_code", "integer" ]
convert => [ "bytes_read", "integer" ]
uppercase => [ "http_verb" ]
}
}
}
output {
# Print each event to stdout.
stdout {
# Enabling 'rubydebug' codec on the stdout output will make logstash
# pretty-print the entire event as something similar to a JSON representation.
codec => rubydebug
}
file {
path => "/var/log/aggregate/%{type}.%{+yyyy-MM-dd}"
}
#gelf {
# #host => "ops-log1-dnslb.sjc.sugarcrm.pvt"
# host => "ops-log1a.sjc.sugarcrm.pvt"
# port => "12201"
#}
#elasticsearch {
# # Setting 'embedded' will run a real elasticsearch server inside logstash.
# # This option below saves you from having to run a separate process just
# # for ElasticSearch, so you can get started quicker!
# embedded => true
#}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment