Last active
February 20, 2017 12:54
-
-
Save untergeek/f8046268a20bde875deb to your computer and use it in GitHub Desktop.
Logstash Debugging #1
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
input { | |
# file { | |
# path => ["C:/logs/Proj/*/*.log"] | |
# start_position => beginning | |
# } | |
stdin { } | |
} | |
filter { | |
grok { | |
match => { "message" => ["%{TIMESTAMP_ISO8601:Logdate} \[%{WORD:LogLevel}\] \[%{NUMBER:ThreadId}\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \[%{WORD:LogLevel}\] \[%{WORD:Type}\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \[%{WORD:LogLevel}%{SPACE}\] \[%{NUMBER:ThreadId}\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \[%{NUMBER:ThreadId}\] %{WORD:LogLevel} ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} %{WORD:LogLevel} ?%{GREEDYDATA:data}" ] } | |
#match => { "path" => "%{GREEDYDATA}/%{GREEDYDATA:Component}/%{GREEDYDATA:}_%{GREEDYDATA:ProcessId}.log" } | |
#break_on_match => false | |
} | |
date { | |
match => [ "Logdate", "YYYY-MM-dd HH:mm:ss,SSS" ] | |
} | |
} | |
output { | |
# elasticsearch { | |
# protocol => "http" | |
# host => localhost | |
# } | |
stdout { codec => rubydebug } | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
$ bin/logstash -f h.conf --verbose | |
Grok patterns path {:patterns_dir=>["/Users/buh/logstash-1.4.2/patterns/*"], :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/firewalls", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/grok-patterns", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/haproxy", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/java", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/junos", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/linux-syslog", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/mcollective", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/mcollective-patterns", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/mongodb", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/nagios", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/postgresql", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/redis", :level=>:info} | |
Grok loading patterns from file {:path=>"/Users/buh/logstash-1.4.2/patterns/ruby", :level=>:info} | |
Match data {:match=>{"message"=>["%{TIMESTAMP_ISO8601:Logdate} \\[%{WORD:LogLevel}\\] \\[%{NUMBER:ThreadId}\\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \\[%{WORD:LogLevel}\\] \\[%{WORD:Type}\\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \\[%{WORD:LogLevel}%{SPACE}\\] \\[%{NUMBER:ThreadId}\\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \\[%{NUMBER:ThreadId}\\] %{WORD:LogLevel} ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} %{WORD:LogLevel} ?%{GREEDYDATA:data}"]}, :level=>:info} | |
Grok compile {:field=>"message", :patterns=>["%{TIMESTAMP_ISO8601:Logdate} \\[%{WORD:LogLevel}\\] \\[%{NUMBER:ThreadId}\\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \\[%{WORD:LogLevel}\\] \\[%{WORD:Type}\\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \\[%{WORD:LogLevel}%{SPACE}\\] \\[%{NUMBER:ThreadId}\\] ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} \\[%{NUMBER:ThreadId}\\] %{WORD:LogLevel} ?%{GREEDYDATA:data}", "%{TIMESTAMP_ISO8601:Logdate} %{WORD:LogLevel} ?%{GREEDYDATA:data}"], :level=>:info} | |
Pipeline started {:level=>:info} | |
2014-10-13 16:42:46,946 [1] DEBUG SolaceManager - Creating Solace session: Host=abc, VPN=xyz, User=indiana, ConnectRetries=-1, SendBlocking=True | |
{ | |
"message" => "2014-10-13 16:42:46,946 [1] DEBUG SolaceManager - Creating Solace session: Host=abc, VPN=xyz, User=indiana, ConnectRetries=-1, SendBlocking=True", | |
"@version" => "1", | |
"@timestamp" => "2014-10-13T22:42:46.946Z", | |
"host" => "Aironaut.local", | |
"Logdate" => "2014-10-13 16:42:46,946", | |
"ThreadId" => "1", | |
"LogLevel" => "DEBUG", | |
"data" => "SolaceManager - Creating Solace session: Host=abc, VPN=xyz, User=indiana, ConnectRetries=-1, SendBlocking=True" | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
I took the liberty of correcting and testing this with
stdin
andstdout
. I simply pasted your example in at the command-line and show therubydebug
output.Notes
date
filter before a date had been extracted bygrok
. I moved thedate
filter down below yourgrok
filter and changed thematch
field to useLogdate
, which you specified should be the field name in yourgrok
statement.break_on_match => false
tells grok to keep parsing the message with the other grok rules, even if a match is found. This worked for me immediately after I commented this line out, allowing Logstash to use the default behavior (break_on_match => true
), which will stop parsing after the first matching pattern is found. This is likely to be the reason you found multiple fields in your output.elasticsearch_http
output is deprecated. Theelasticsearch
output plugin now has http output built-in. You only need to specifyprotocol => "http"
for this to work. I updated this for you.date
match statement was set to recognize a 3-character month (MMM
), so I removed the superfluousM
. As you can see in the output,@timestamp
matches theLogdate
field.match
statement to your grok rule. What are you trying to accomplish here?