Last active
October 1, 2020 13:59
-
-
Save xiangshen-dk/614b2cd08236066160e25d6024efa779 to your computer and use it in GitHub Desktop.
Fluent Bit config for logs with large payloads.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
apiVersion: v1 | |
kind: ConfigMap | |
metadata: | |
name: fluent-bit-config | |
namespace: logging | |
labels: | |
k8s-app: fluent-bit | |
data: | |
# Configuration files: server, input, filters and output | |
# ====================================================== | |
fluent-bit.conf: | | |
[SERVICE] | |
Flush 1 | |
Log_Level info | |
Daemon off | |
Parsers_File parsers.conf | |
HTTP_Server On | |
HTTP_Listen 0.0.0.0 | |
HTTP_Port 2020 | |
@INCLUDE input-kubernetes.conf | |
@INCLUDE filter-kubernetes.conf | |
@INCLUDE output-stackdriver.conf | |
input-kubernetes.conf: | | |
[INPUT] | |
Name tail | |
Path /var/log/containers/*.log | |
Tag kube.* | |
Parser docker | |
DB /var/log/flb_kube.db | |
Mem_Buf_Limit 20MB | |
Skip_Long_Lines On | |
Refresh_Interval 10 | |
# Control the log line length | |
Buffer_Chunk_Size 256k | |
Buffer_Max_Size 10240k | |
# Using the docker mode to deal with multiline messages emitted by docker | |
Docker_Mode On | |
get-size.lua: | | |
-- Internal function to convert a table to string. | |
local table_to_string_internal = function(t) | |
local s = "" | |
if t ~= nil then | |
for key, val in pairs(t) do | |
s = s .. string.format("\"%s\": \"%s\"", key, val) | |
end | |
end | |
return s | |
end | |
function cb_print(tag, timestamp, record) | |
s = table_to_string_internal(record) | |
log_size = string.len(s) | |
record["log_size"] = log_size | |
if log_size > 255000 then | |
record["mytag"] = "biglog" | |
end | |
return 1, timestamp, record | |
end | |
drop-payload.lua: | | |
function cb_drop(tag, timestamp, record) | |
if record["mytag"] ~= nil then | |
record["log"] = "Log payload is too large. Send it to /testing/biglog." .. tag | |
end | |
return 1, timestamp, record | |
end | |
filter-kubernetes.conf: | | |
[FILTER] | |
Name kubernetes | |
Match kube.* | |
Kube_URL https://kubernetes.default.svc:443 | |
Kube_CA_File /var/run/secrets/kubernetes.io/serviceaccount/ca.crt | |
Kube_Token_File /var/run/secrets/kubernetes.io/serviceaccount/token | |
Kube_Tag_Prefix kube.var.log.containers. | |
# Try to merge the log messages | |
Merge_Log On | |
Merge_Log_Key log_processed | |
K8S-Logging.Parser On | |
K8S-Logging.Exclude Off | |
[FILTER] | |
Name lua | |
Match kube.* | |
# lua script to get the size of a log message | |
# if the size is big, also change the 'mytag' value appended in the record | |
script get-size.lua | |
call cb_print | |
[FILTER] | |
# rewrite the log tag if the value of 'mytag' in the log record is 'biglog' | |
Name rewrite_tag | |
Match kube.* | |
# 'true' is needed at the end to keep the original record | |
Rule $mytag ^(biglog)$ biglog.$TAG true | |
Emitter_Name re_emitted | |
[FILTER] | |
# replace the large payload in the original record. so we can send it to Cloud Logging | |
Name lua | |
Match kube.* | |
script drop-payload.lua | |
call cb_drop | |
output-stackdriver.conf: | | |
[OUTPUT] | |
# if the log tag is 'biglog', write the log to /testing | |
# /testing could be a mounted file system | |
Name file | |
Path /testing | |
Match biglog.* | |
[OUTPUT] | |
# write the log records still have the 'kube.*' tags to Cloud Logging | |
Name stackdriver | |
Match kube.* | |
parsers.conf: | | |
[PARSER] | |
Name apache | |
Format regex | |
Regex ^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>[^\"]*?)(?: +\S*)?)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>[^\"]*)" "(?<agent>[^\"]*)")?$ | |
Time_Key time | |
Time_Format %d/%b/%Y:%H:%M:%S %z | |
[PARSER] | |
Name apache2 | |
Format regex | |
Regex ^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>[^\"]*)" "(?<agent>[^\"]*)")?$ | |
Time_Key time | |
Time_Format %d/%b/%Y:%H:%M:%S %z | |
[PARSER] | |
Name apache_error | |
Format regex | |
Regex ^\[[^ ]* (?<time>[^\]]*)\] \[(?<level>[^\]]*)\](?: \[pid (?<pid>[^\]]*)\])?( \[client (?<client>[^\]]*)\])? (?<message>.*)$ | |
[PARSER] | |
Name nginx | |
Format regex | |
Regex ^(?<remote>[^ ]*) (?<host>[^ ]*) (?<user>[^ ]*) \[(?<time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>[^\"]*?)(?: +\S*)?)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>[^\"]*)" "(?<agent>[^\"]*)")?$ | |
Time_Key time | |
Time_Format %d/%b/%Y:%H:%M:%S %z | |
[PARSER] | |
Name json | |
Format json | |
Time_Key time | |
Time_Format %d/%b/%Y:%H:%M:%S %z | |
[PARSER] | |
Name docker | |
Format json | |
Time_Key time | |
Time_Format %Y-%m-%dT%H:%M:%S.%L | |
Time_Keep On | |
[PARSER] | |
Name syslog | |
Format regex | |
Regex ^\<(?<pri>[0-9]+)\>(?<time>[^ ]* {1,2}[^ ]* [^ ]*) (?<host>[^ ]*) (?<ident>[a-zA-Z0-9_\/\.\-]*)(?:\[(?<pid>[0-9]+)\])?(?:[^\:]*\:)? *(?<message>.*)$ | |
Time_Key time | |
Time_Format %b %d %H:%M:%S | |
[PARSER] | |
Name containerd_with_glog | |
Format regex | |
Regex ^(?<time>.+) (?<stream>stdout|stderr) [^ ]* ((?<severity>\w)\d{4} [^\s]*\s+\d+\s+(?<source_file>[^ \]]+)\:(?<source_line>\d+)\]\s)?(?<message>.*)$ | |
Time_Key time | |
Time_Format %Y-%m-%dT%H:%M:%S.%L%z | |
[PARSER] | |
Name glog | |
Format regex | |
Regex ^(?<severity>\w)(?<time>\d{4} [^\s]*)\s+(?<pid>\d+)\s+(?<source_file>[^ \]]+)\:(?<source_line>\d+)\]\s(?<message>.*)$ | |
Time_Key time | |
Time_Format %m%d %H:%M:%S.%L |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment