Created
December 9, 2019 04:02
-
-
Save zufardhiyaulhaq/5823ea02e06e67ed924f03045d3cdf27 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
filter { | |
if "modsecurity" in [tags] { | |
########################### | |
##### start of filter ##### | |
########################### | |
##### section split ##### | |
ruby { | |
code => " | |
if !event.get('message').nil? | |
modSecSectionData = event.get('message').split(/(?:--[a-fA-F0-9]{8}-([A-Z])--)/) | |
modSecSectionData.shift | |
for i in 0..((modSecSectionData.length-1)/2) | |
sectionName = 'rawSection'.concat(modSecSectionData.shift) | |
sectionData = modSecSectionData.shift | |
sectionName = sectionName.strip | |
if !sectionData.nil? | |
sectionData = sectionData.strip | |
end | |
event.set(sectionName, sectionData) | |
end | |
end | |
" | |
} | |
##### section a parse ##### | |
grok { | |
match => { | |
"rawSectionA" => "\[(?<modsec_timestamp>%{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} [-\+]{1,2}%{INT})\] %{DATA:uniqueId} %{IP:sourceIp} %{INT:sourcePort} %{IP:destIp} %{INT:destPort}" | |
} | |
} | |
##### section b parse ##### | |
if [rawSectionB] =~ /.+/ { | |
grok { | |
match => { | |
"rawSectionB" => [ "(?m)^%{DATA:httpMethod}\s(?<requestedUri>\S+)\s(?<incomingProtocol>[^\n]+)(?:\n(?<raw_requestHeaders>.+)?)?$", | |
"(?<httpMethod>^(.*)$)" ] | |
} | |
} | |
} | |
if [raw_requestHeaders] =~ /.+/ { | |
kv { | |
source => "raw_requestHeaders" | |
field_split => "\n" | |
value_split => ":" | |
target => "requestHeaders" | |
} | |
# trim leading/trailing hack @see https://logstash.jira.com/browse/LOGSTASH-1369 | |
ruby { | |
code => " | |
requestHeaders = event.get('requestHeaders').to_hash | |
requestHeaders.each { |k, v| | |
if !v.nil? and v.is_a? String | |
requestHeaders[k] = v.strip | |
end | |
} | |
" | |
} | |
} | |
if [raw_requestHeaders] =~ /Cookie/ and [raw_requestHeaders] =~ /myCookie=.+\b/ { | |
grok { | |
match => { | |
"raw_requestHeaders" => "(?<myCookie>myCookie[^; \s]+)" | |
} | |
} | |
} | |
if [raw_requestHeaders] =~ /X-Forwarded-For:/ { | |
grok { | |
match => { | |
"raw_requestHeaders" => "X-Forwarded-For: %{IPORHOST:XForwardedFor}" | |
} | |
} | |
} | |
if [requestHeaders][Cookie] =~ /.+/ { | |
kv { | |
source => "[requestHeaders][Cookie]" | |
field_split => "; " | |
value_split => "=" | |
target => "requestCookies" | |
} | |
} | |
##### section c parse ##### | |
if [rawSectionC] =~ /.+/ { | |
mutate { | |
add_field => { "requestBody" => "%{rawSectionC}" } | |
} | |
} | |
##### section f parse ##### | |
if [rawSectionF] =~ /.+/ { | |
grok { | |
match => { | |
"rawSectionF" => "(?m)^(?<serverProtocol>.+?)\s(?<responseStatus>[^\n]+)(\n(?<raw_responseHeaders>.+)?)?$" | |
} | |
} | |
} | |
if [rawSectionF] =~ /(.+?)\s(.+)\n{1}/ { | |
grok { | |
match => { | |
"rawSectionF" => ".+?\n(?m)(?<raw_responseHeaders>.+)" | |
} | |
} | |
} | |
if [raw_responseHeaders] =~ /.+/ { | |
kv { | |
source => "raw_responseHeaders" | |
field_split => "\n" | |
value_split => ":" | |
target => "responseHeaders" | |
} | |
# trim leading/trailing hack @see https://logstash.jira.com/browse/LOGSTASH-1369 | |
ruby { | |
code => " | |
responseHeaders = event.get('responseHeaders').to_hash | |
responseHeaders.each { |k, v| | |
if !v.nil? and v.is_a? String | |
responseHeaders[k] = v.strip | |
end | |
} | |
" | |
} | |
} | |
##### section h parse ##### | |
if [rawSectionH] =~ /.+/ { | |
# build the auditlog trailer messages | |
ruby { | |
code => " | |
def extractVal(pattern, fromString, storeResultIn, underKeyName, multiValues=false) | |
if multiValues | |
result = fromString.scan(pattern) | |
if !result.empty? | |
storeResultIn[underKeyName] = result.flatten | |
end | |
else | |
result = pattern.match(fromString) | |
if !result.nil? | |
storeResultIn[underKeyName] = result[1] | |
end | |
end | |
end | |
auditLogTrailerMessages = Array.new() | |
trailer_array = event.get('rawSectionH').split(/\n/) | |
trailer_array.each do |entry| | |
if entry.match(/^Message: /) | |
msg = Hash.new() | |
extractVal(/Message: (.+)\s($|(\s*\[file))/, entry, msg, 'info') | |
extractVal(/\[file \".+\/(.*?).conf\"\]/, entry, msg, 'file') | |
extractVal(/\[line \"(.*?)\"\]/, entry, msg, 'line') | |
extractVal(/\[id \"(.*?)\"\]/, entry, msg, 'id') | |
extractVal(/\[msg \"(.*?)\"\]/, entry, msg, 'msg') | |
extractVal(/\[severity \"(.*?)\"\]/, entry, msg, 'severity') | |
extractVal(/\[data \"(.*?)\"\]/, entry, msg, 'data') | |
extractVal(/\[tag \"(.*?)\"\]/, entry, msg, 'tag') | |
auditLogTrailerMessages.push(msg) | |
end | |
end | |
event.set('auditLogTrailerMessages', auditLogTrailerMessages) | |
" | |
} | |
} | |
if [rawSectionH] =~ /.+/ { | |
# For 2.9.1+ compatibility | |
mutate { | |
# This works for most things, but if the error has things like REQUEST_HEADERS:Referer in it, it can be problematic. | |
# gsub => [ "rawSectionH", 'ModSecurity:', 'ModSecurity' ] | |
# Alternately, just drop "Apache-Error:" from rawSectionH. | |
gsub => [ "rawSectionH", '^Apache-Error:.*', '' ] | |
} | |
kv { | |
source => "rawSectionH" | |
field_split => "\n" | |
value_split => ":" | |
target => "auditLogTrailer" | |
} | |
# trim leading/trailing hack @see https://logstash.jira.com/browse/LOGSTASH-1369 | |
ruby { | |
code => " | |
auditLogTrailer = event.get('auditLogTrailer').to_hash | |
auditLogTrailerMessages = event.get('auditLogTrailerMessages') | |
auditLogTrailer.each { |k, v| | |
if !v.nil? and v.is_a? String | |
auditLogTrailer[k] = v.strip | |
end | |
} | |
auditLogTrailer.delete('Message') | |
auditLogTrailer['messages'] = auditLogTrailerMessages | |
event.set('auditLogTrailer', auditLogTrailer) | |
" | |
} | |
mutate { | |
remove_field => ['auditLogTrailerMessages'] | |
} | |
} | |
grok { | |
match => { | |
"rawSectionH" => "Stopwatch: %{WORD:event_date_microseconds}" | |
} | |
} | |
mutate { | |
convert => [ "event_date_microseconds", "float" ] | |
} | |
# micro -> milli | |
ruby { | |
code => " | |
event_date_milliseconds = (event.get('event_date_microseconds') / 1000.0) | |
event.set('event_date_milliseconds', event_date_milliseconds) | |
" | |
} | |
# milli -> seconds | |
ruby { | |
code => " | |
event_date_seconds = (event.get('event_date_milliseconds') / 1000.0) | |
event.set('event_date_seconds', event_date_seconds) | |
" | |
} | |
# NOTE!, this forces the event's @timestamp to be = to the stopwatch value | |
date { | |
match => [ "event_date_seconds", "UNIX" ] | |
timezone => "GMT" | |
} | |
# a second copy of a iso8601 date | |
ruby { | |
code => " | |
event.set('event_timestamp', (Time.at(event.get('event_date_seconds')).gmtime).iso8601(3)) | |
" | |
} | |
if [rawSectionH] =~ /.+/ { | |
# extract distinct severities from the messages built in 2080_filter_section_h_parse_messages_to_auditLogTrailerMessages.conf | |
ruby { | |
code => " | |
modsecSeverities = Set.new | |
trailerMsgs = event.get('auditLogTrailer[messages]') | |
trailerMsgs.each {|m| | |
if m.key?('severity') | |
modsecSeverities.add(m['severity']) | |
end | |
} | |
event.set('modsecSeverities', modsecSeverities.to_a) | |
" | |
} | |
} | |
if [rawSectionH] =~ /.+/ { | |
# extract distinct Attack type from the messages built in 2080_filter_section_h_parse_messages_to_auditLogTrailerMessages.conf | |
ruby { | |
code => " | |
modsecAttacktype = Set.new | |
trailerMsgs = event.get('auditLogTrailer[messages]') | |
trailerMsgs.each {|m| | |
if m.key?('msg') | |
modsecAttacktype.add(m['msg']) | |
end | |
} | |
event.set('modsecAttacktype', modsecAttacktype.to_a) | |
" | |
} | |
} | |
if [rawSectionH] =~ /.+/ { | |
# extract distinct Attack data from the messages built in 2080_filter_section_h_parse_messages_to_auditLogTrailerMessages.conf | |
ruby { | |
code => " | |
modsecAttackdata = Set.new | |
trailerMsgs = event.get('auditLogTrailer[messages]') | |
trailerMsgs.each {|m| | |
if m.key?('info') | |
modsecAttackdata.add(m['info']) | |
end | |
} | |
event.set('modsecAttackdata', modsecAttackdata.to_a) | |
" | |
} | |
} | |
if [rawSectionH] =~ /.+/ { | |
# extract distinct attack type from the messages built in 2080_filter_section_h_parse_messages_to_auditLogTrailerMessages.conf | |
ruby { | |
code => " | |
type = { 'REQUEST-901-INITIALIZATION' => 'Initialization', | |
'REQUEST-903.9001-DRUPAL-EXCLUSION-RULES' => '', | |
'REQUEST-903.9002-WORDPRESS-EXCLUSION-RULES' => '', | |
'REQUEST-903.9003-NEXTCLOUD-EXCLUSION-RULES' => '', | |
'REQUEST-903.9004-DOKUWIKI-EXCLUSION-RULES' => '', | |
'REQUEST-903.9005-CPANEL-EXCLUSION-RULES' => '', | |
'REQUEST-903.9006-XENFORO-EXCLUSION-RULES' => '', | |
'REQUEST-905-COMMON-EXCEPTIONS' => 'Common exceptions', | |
'REQUEST-910-IP-REPUTATION' => 'Bad IP reputation', | |
'REQUEST-911-METHOD-ENFORCEMENT' => '', | |
'REQUEST-912-DOS-PROTECTION' => 'DoS attack', | |
'REQUEST-913-SCANNER-DETECTION' => 'Scanner detection', | |
'REQUEST-920-PROTOCOL-ENFORCEMENT' => 'Prevent exploitation', | |
'REQUEST-921-PROTOCOL-ATTACK' => 'HTTP protocol attack', | |
'REQUEST-930-APPLICATION-ATTACK-LFI' => 'Local File Inclusion attack', | |
'REQUEST-931-APPLICATION-ATTACK-RFI' => 'Remote File Inclusion attack', | |
'REQUEST-932-APPLICATION-ATTACK-RCE' => 'Remote Code Execution attack', | |
'REQUEST-933-APPLICATION-ATTACK-PHP' => 'PHP application attack', | |
'REQUEST-934-APPLICATION-ATTACK-NODEJS' => 'NodeJS application attack', | |
'REQUEST-941-APPLICATION-ATTACK-XSS' => 'XSS attack', | |
'REQUEST-942-APPLICATION-ATTACK-SQLI' => 'SQL injection attack', | |
'REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION' => 'Session fixation attack', | |
'REQUEST-944-APPLICATION-ATTACK-JAVA' => 'Java application attack', | |
'REQUEST-949-BLOCKING-EVALUATION' => 'Blocking evaluation', | |
'RESPONSE-950-DATA-LEAKAGES' => 'Prevent data leakages', | |
'RESPONSE-951-DATA-LEAKAGES-SQL' => 'Prevent SQL data leakages', | |
'RESPONSE-952-DATA-LEAKAGES-JAVA' => 'Prevent Java data leakages', | |
'RESPONSE-953-DATA-LEAKAGES-PHP' => 'Prevent PHP data leakages', | |
'RESPONSE-954-DATA-LEAKAGES-IIS' => 'Prevent ISS data leakages', | |
'RESPONSE-959-BLOCKING-EVALUATION' => 'Anomalous traffic blocked', | |
'RESPONSE-980-CORRELATION' => 'Correlation'} | |
attackType = Set.new | |
trailerMsgs = event.get('auditLogTrailer[messages]') | |
trailerMsgs.each {|m| | |
if type.has_key?(m['file']) | |
attackType.add(type[m['file']]) | |
end | |
} | |
event.set('attackType', attackType.to_a) | |
" | |
} | |
} | |
##### section k parse ##### | |
if [rawSectionK] =~ /.+/ { | |
# hack.. @see https://logstash.jira.com/browse/LOGSTASH-1331 | |
mutate { | |
gsub => [ "rawSectionK", "\n", "~" ] | |
gsub => [ "rawSectionK", "(~+)", "~" ] | |
split => [ "rawSectionK" , "~" ] | |
} | |
mutate { | |
rename => [ "rawSectionK", "matchedRules"] | |
} | |
ruby { | |
code => " | |
secRuleIds = Array.new() | |
matchedRules_array = event.get('matchedRules') | |
matchedRules_array.each do |entry| | |
if entry.match(/^SecRule /) and entry.match(/,id:/) | |
secRuleIds.push(/,id:(?<ruleId>\d+)/.match(entry)[:ruleId]) | |
end | |
end | |
event.set('secRuleIds', secRuleIds) | |
" | |
} | |
} | |
##### Filter Cleanup ##### | |
mutate { | |
remove_field => [ "message", "raw_responseHeaders", "raw_requestHeaders", "rawSectionZ"] | |
} | |
##### GeoIP ##### | |
geoip { | |
source => "sourceIp" | |
} | |
######################### | |
##### end of filter ##### | |
######################### | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment