Skip to content

Instantly share code, notes, and snippets.

@amitkumarj441
Created August 25, 2017 13:38
Show Gist options
  • Select an option

  • Save amitkumarj441/b7b3f5c846c7a8dab0be501406ec25fe to your computer and use it in GitHub Desktop.

Select an option

Save amitkumarj441/b7b3f5c846c7a8dab0be501406ec25fe to your computer and use it in GitHub Desktop.
Fluentd Pod log
[root@viaq ~]# oc logs -f logging-fluentd-lqf3n
+ fluentdargs=-vv
+ echo '>>>>>> ENVIRONMENT VARS <<<<<'
+ env
+ sort
>>>>>> ENVIRONMENT VARS <<<<<
DATA_VERSION=1.6.0
ES_CA=/etc/fluent/keys/ca
ES_CLIENT_CERT=/etc/fluent/keys/cert
ES_CLIENT_KEY=/etc/fluent/keys/key
ES_COPY=false
ES_COPY_CA=
ES_COPY_CLIENT_CERT=
ES_COPY_CLIENT_KEY=
ES_COPY_HOST=
ES_COPY_PASSWORD=
ES_COPY_PORT=
ES_COPY_SCHEME=https
ES_COPY_USERNAME=
ES_HOST=logging-es
ES_PORT=9200
FLUENTD_VERSION=0.12.39
GEM_HOME=/opt/app-root/src
HOME=/opt/app-root/src
HOSTNAME=logging-fluentd-lqf3n
JOURNAL_READ_FROM_HEAD=
JOURNAL_SOURCE=
K8S_HOST_URL=https://kubernetes.default.svc.cluster.local
KUBERNETES_PORT=tcp://172.30.0.1:443
KUBERNETES_PORT_443_TCP=tcp://172.30.0.1:443
KUBERNETES_PORT_443_TCP_ADDR=172.30.0.1
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT_443_TCP_PROTO=tcp
KUBERNETES_PORT_53_TCP=tcp://172.30.0.1:53
KUBERNETES_PORT_53_TCP_ADDR=172.30.0.1
KUBERNETES_PORT_53_TCP_PORT=53
KUBERNETES_PORT_53_TCP_PROTO=tcp
KUBERNETES_PORT_53_UDP=udp://172.30.0.1:53
KUBERNETES_PORT_53_UDP_ADDR=172.30.0.1
KUBERNETES_PORT_53_UDP_PORT=53
KUBERNETES_PORT_53_UDP_PROTO=udp
KUBERNETES_SERVICE_HOST=172.30.0.1
KUBERNETES_SERVICE_PORT=443
KUBERNETES_SERVICE_PORT_DNS=53
KUBERNETES_SERVICE_PORT_DNS_TCP=53
KUBERNETES_SERVICE_PORT_HTTPS=443
LOGGING_ES_CLUSTER_PORT=tcp://172.30.63.148:9300
LOGGING_ES_CLUSTER_PORT_9300_TCP=tcp://172.30.63.148:9300
LOGGING_ES_CLUSTER_PORT_9300_TCP_ADDR=172.30.63.148
LOGGING_ES_CLUSTER_PORT_9300_TCP_PORT=9300
LOGGING_ES_CLUSTER_PORT_9300_TCP_PROTO=tcp
LOGGING_ES_CLUSTER_SERVICE_HOST=172.30.63.148
LOGGING_ES_CLUSTER_SERVICE_PORT=9300
LOGGING_ES_PORT=tcp://172.30.39.221:9200
LOGGING_ES_PORT_9200_TCP=tcp://172.30.39.221:9200
LOGGING_ES_PORT_9200_TCP_ADDR=172.30.39.221
LOGGING_ES_PORT_9200_TCP_PORT=9200
LOGGING_ES_PORT_9200_TCP_PROTO=tcp
LOGGING_ES_SERVICE_HOST=172.30.39.221
LOGGING_ES_SERVICE_PORT=9200
LOGGING_KIBANA_PORT=tcp://172.30.204.171:443
LOGGING_KIBANA_PORT_443_TCP=tcp://172.30.204.171:443
LOGGING_KIBANA_PORT_443_TCP_ADDR=172.30.204.171
LOGGING_KIBANA_PORT_443_TCP_PORT=443
LOGGING_KIBANA_PORT_443_TCP_PROTO=tcp
LOGGING_KIBANA_SERVICE_HOST=172.30.204.171
LOGGING_KIBANA_SERVICE_PORT=443
LOGGING_MUX_PORT=tcp://172.30.212.89:24284
LOGGING_MUX_PORT_23456_TCP=tcp://172.30.212.89:23456
LOGGING_MUX_PORT_23456_TCP_ADDR=172.30.212.89
LOGGING_MUX_PORT_23456_TCP_PORT=23456
LOGGING_MUX_PORT_23456_TCP_PROTO=tcp
LOGGING_MUX_PORT_24284_TCP=tcp://172.30.212.89:24284
LOGGING_MUX_PORT_24284_TCP_ADDR=172.30.212.89
LOGGING_MUX_PORT_24284_TCP_PORT=24284
LOGGING_MUX_PORT_24284_TCP_PROTO=tcp
LOGGING_MUX_SERVICE_HOST=172.30.212.89
LOGGING_MUX_SERVICE_PORT=24284
LOGGING_MUX_SERVICE_PORT_MUX_FORWARD=24284
LOGGING_MUX_SERVICE_PORT_TCP_JSON=23456
OPS_CA=/etc/fluent/keys/ca
OPS_CLIENT_CERT=/etc/fluent/keys/cert
OPS_CLIENT_KEY=/etc/fluent/keys/key
OPS_COPY_CA=
OPS_COPY_CLIENT_CERT=
OPS_COPY_CLIENT_KEY=
OPS_COPY_HOST=
OPS_COPY_PASSWORD=
OPS_COPY_PORT=
OPS_COPY_SCHEME=https
OPS_COPY_USERNAME=
OPS_HOST=logging-es
OPS_PORT=9200
PATH=/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
PWD=/opt/app-root/src
RUBY_VERSION=2.0
SHLVL=1
USE_JOURNAL=
VERBOSE=true
_=/usr/bin/env
+ echo '>>>>>>>>>>>>><<<<<<<<<<<<<<<<'
+ '[' -z '' -o '' = false ']'
+ '[' -z '' -o '' = true ']'
+ '[' -z '' ']'
+ '[' -d /var/log/journal ']'
+ export JOURNAL_SOURCE=/run/log/journal
+ JOURNAL_SOURCE=/run/log/journal
+ '[' -z '' ']'
+ docker_uses_journal
+ grep -q '^[^#].*"log-driver":' /etc/docker/daemon.json
>>>>>>>>>>>>><<<<<<<<<<<<<<<<
+ grep -q '^OPTIONS='\''[^'\'']*--log-driver=journald' /etc/sysconfig/docker
+ return 0
+ export USE_JOURNAL=true
+ USE_JOURNAL=true
++ /usr/sbin/ip -4 addr show dev eth0
++ grep inet
++ sed -e 's/[ \t]*inet \([0-9.]*\).*/\1/'
+ IPADDR4=10.128.0.131
++ grep inet6
++ /usr/sbin/ip -6 addr show dev eth0
++ sed 's/[ \t]*inet6 \([a-f0-9:]*\).*/\1/'
+ IPADDR6=fe80::24c0:d1ff:fe4c:13ea
+ export IPADDR4 IPADDR6
+ BUFFER_SIZE_LIMIT=16777216
+ CFG_DIR=/etc/fluent/configs.d
+ '[' '' = true ']'
+ ruby generate_throttle_configs.rb
+ rm -f '/etc/fluent/configs.d/openshift/*mux*.conf'
+ '[' true = true ']'
+ export BUFFER_QUEUE_FULL_ACTION=block
+ BUFFER_QUEUE_FULL_ACTION=block
+ K8S_FILTER_REMOVE_KEYS=log,stream,MESSAGE,_SOURCE_REALTIME_TIMESTAMP,__REALTIME_TIMESTAMP,CONTAINER_ID,CONTAINER_ID_FULL,CONTAINER_NAME,PRIORITY,_BOOT_ID,_CAP_EFFECTIVE,_CMDLINE,_COMM,_EXE,_GID,_HOSTNAME,_MACHINE_ID,_PID,_SELINUX_CONTEXT,_SYSTEMD_CGROUP,_SYSTEMD_SLICE,_SYSTEMD_UNIT,_TRANSPORT,_UID,_AUDIT_LOGINUID,_AUDIT_SESSION,_SYSTEMD_OWNER_UID,_SYSTEMD_SESSION,_SYSTEMD_USER_UNIT,CODE_FILE,CODE_FUNCTION,CODE_LINE,ERRNO,MESSAGE_ID,RESULT,UNIT,_KERNEL_DEVICE,_KERNEL_SUBSYSTEM,_UDEV_SYSNAME,_UDEV_DEVNODE,_UDEV_DEVLINK,SYSLOG_FACILITY,SYSLOG_IDENTIFIER,SYSLOG_PID
+ '[' -n '' ']'
+ rm -f /etc/fluent/configs.d/openshift/filter-pre-mux-client.conf /etc/fluent/configs.d/openshift/output-pre-mux-client.conf
+ export K8S_FILTER_REMOVE_KEYS
+ '[' -n '' ']'
+ NUM_OUTPUTS=2
+ '[' false = true ']'
+ FILE_BUFFER_PATH=/var/lib/fluentd
+ mkdir -p /var/lib/fluentd
++ df -B1 /var/lib/fluentd
++ grep -v Filesystem
++ awk '{print $2}'
+ DF_LIMIT=10725883904
+ DF_LIMIT=10725883904
++ expr 10725883904 / 4
+ DF_LIMIT=2681470976
+ '[' 2681470976 -eq 0 ']'
++ echo 2Gi
++ sed -e 's/[Kk]/*1024/g;s/[Mm]/*1024*1024/g;s/[Gg]/*1024*1024*1024/g;s/i//g'
++ bc
+ TOTAL_LIMIT=2147483648
+ '[' 2147483648 -le 0 ']'
++ expr 2147483648 '*' 2
+ TOTAL_LIMIT=4294967296
+ '[' 2681470976 -lt 4294967296 ']'
+ echo 'WARNING: Available disk space (2681470976 bytes) is less than the user specified file buffer limit ( times 2).'
+ TOTAL_LIMIT=2681470976
WARNING: Available disk space (2681470976 bytes) is less than the user specified file buffer limit ( times 2).
++ echo 16777216
++ sed -e 's/[Kk]/*1024/g;s/[Mm]/*1024*1024/g;s/[Gg]/*1024*1024*1024/g;s/i//g'
++ bc
+ BUFFER_SIZE_LIMIT=16777216
+ BUFFER_SIZE_LIMIT=16777216
++ expr 2681470976 / 2
+ TOTAL_BUFFER_SIZE_LIMIT=1340735488
+ '[' -z 1340735488 -o 1340735488 -eq 0 ']'
++ expr 1340735488 / 16777216
+ BUFFER_QUEUE_LIMIT=79
+ '[' -z 79 -o 79 -eq 0 ']'
+ export BUFFER_QUEUE_LIMIT BUFFER_SIZE_LIMIT
+ OPS_COPY_HOST=
+ OPS_COPY_PORT=
+ OPS_COPY_SCHEME=https
+ OPS_COPY_CLIENT_CERT=
+ OPS_COPY_CLIENT_KEY=
+ OPS_COPY_CA=
+ OPS_COPY_USERNAME=
+ OPS_COPY_PASSWORD=
+ export OPS_COPY_HOST OPS_COPY_PORT OPS_COPY_SCHEME OPS_COPY_CLIENT_CERT OPS_COPY_CLIENT_KEY OPS_COPY_CA OPS_COPY_USERNAME OPS_COPY_PASSWORD
+ '[' false = true -a -z '' ']'
+ '[' false = true ']'
+ echo
+ echo
+ '[' '' = true ']'
+ rm -f /etc/fluent/configs.d/openshift/input-pre-monitor.conf
+ '[' '' = true ']'
+ rm -f /etc/fluent/configs.d/openshift/input-pre-debug.conf
+ '[' '' = true ']'
+ echo 'umounts of dead containers will fail. Ignoring...'
+ umount /var/lib/docker/containers/027944197e07023ae69b1c90c252f722c5ca33577b2beb61077757aac065851c/shm /var/lib/docker/containers/120a08168a428e09dc5c304dcd90a63412ca34babdb0555ddf85df0fc0bdab72/shm /var/lib/docker/containers/12aeedcce96f166cd2c35a571b960945d9bcb24ca7cb92775c90a52d27011612/shm /var/lib/docker/containers/16de2d48bcd97f513bb97e662ae95f1d8ce0f8dff2a90933a53ac6c9fe874804/shm /var/lib/docker/containers/211ad6f2f241f3719770f98745564576c3e7056bfdd2254e6a7a12cb88a79d0d/shm /var/lib/docker/containers/24874b22855a6c379a89e242c02023049a67ca48ff4ea3204ce34fbfb9e5f923/shm /var/lib/docker/containers/4742b211afb843601f5eed649d299518404fb2e4ff3dee72c79d11dd6e798e20/shm /var/lib/docker/containers/5635b4f8f062547cbe3e3bee71aef370fd70dcbce8066914d85b65e1dd56467a/shm /var/lib/docker/containers/587038da3f0f9f284995f09026f73c39ba726eabf13e046db05dd5f9cde71b51/shm /var/lib/docker/containers/64769f2d7c97413a62398304ad2fd978057d8322b01854172a0662c987af4652/shm /var/lib/docker/containers/6d716c2cfe290e302109d5313224ba01dc65023d9699052aadda511ada40db2b/shm /var/lib/docker/containers/73d9d5a2274c2b6d272109fac66b2f0328738ff3ddd3bd5f849819de0dd3c45b/shm /var/lib/docker/containers/824bed761d957873644dcba87e0ddb026f1b30f16ce698500123d22235208e66/shm /var/lib/docker/containers/85cf7a18881ab6237510155b66d0bc553a47c85fe9dc613a075309c13fc6eb64/shm /var/lib/docker/containers/9a57779a8ed35b4716969d345696d75f13f0b8d6836f3b5840731da7d4e49f3c/shm /var/lib/docker/containers/a895a22b9a83f030a9a7589ff887481780908c640276806bc878b0392ff09e1d/shm /var/lib/docker/containers/b186a651311c1f2eaadea66b70b64e2b88e7c5a8eca42054fc3c02d28049ff26/shm /var/lib/docker/containers/b33676fdbb79016bd88de63859c03c74539637b86cc93747936ff7b4f7fe667d/shm /var/lib/docker/containers/b44f343f8223bd483a529ec8e2bf931b0d2853958ebe013922829224fc706d07/shm /var/lib/docker/containers/b95ed7083ba746090254ad8393f582321297a6c494af82608727007059475c61/shm /var/lib/docker/containers/c9509a13daa3ee76715ef01b490c6178c931428f53320e65c3b4a6df82f24707/shm /var/lib/docker/containers/d5f021fe3163c4524fbcf8651c16c25cc70b387b00cfac36aea91a01d8c57477/shm /var/lib/docker/containers/d6ac6bad69916860c1602521a3d1c16c06a3873acfaaa623f2153034a940923a/shm /var/lib/docker/containers/ef1904492c4700203740951b95148c54076ec32aa197132f87e52d0acc84910b/shm /var/lib/docker/containers/f8ff35b6a4ae024a9593d3f6c4389f315d302a3540e41f0cc7287d5af5d88e34/shm /var/lib/docker/containers/fb058c205baed27a55b55774d5e3c63094d07494fb1ad04a049cf7e7ba939b67/shm
umounts of dead containers will fail. Ignoring...
umount: /var/lib/docker/containers/120a08168a428e09dc5c304dcd90a63412ca34babdb0555ddf85df0fc0bdab72/shm: not mounted
umount: /var/lib/docker/containers/12aeedcce96f166cd2c35a571b960945d9bcb24ca7cb92775c90a52d27011612/shm: not mounted
umount: /var/lib/docker/containers/16de2d48bcd97f513bb97e662ae95f1d8ce0f8dff2a90933a53ac6c9fe874804/shm: not mounted
umount: /var/lib/docker/containers/24874b22855a6c379a89e242c02023049a67ca48ff4ea3204ce34fbfb9e5f923/shm: not mounted
umount: /var/lib/docker/containers/5635b4f8f062547cbe3e3bee71aef370fd70dcbce8066914d85b65e1dd56467a/shm: not mounted
umount: /var/lib/docker/containers/6d716c2cfe290e302109d5313224ba01dc65023d9699052aadda511ada40db2b/shm: not mounted
umount: /var/lib/docker/containers/824bed761d957873644dcba87e0ddb026f1b30f16ce698500123d22235208e66/shm: not mounted
umount: /var/lib/docker/containers/85cf7a18881ab6237510155b66d0bc553a47c85fe9dc613a075309c13fc6eb64/shm: not mounted
umount: /var/lib/docker/containers/9a57779a8ed35b4716969d345696d75f13f0b8d6836f3b5840731da7d4e49f3c/shm: not mounted
umount: /var/lib/docker/containers/b186a651311c1f2eaadea66b70b64e2b88e7c5a8eca42054fc3c02d28049ff26/shm: not mounted
umount: /var/lib/docker/containers/b33676fdbb79016bd88de63859c03c74539637b86cc93747936ff7b4f7fe667d/shm: not mounted
umount: /var/lib/docker/containers/b44f343f8223bd483a529ec8e2bf931b0d2853958ebe013922829224fc706d07/shm: not mounted
umount: /var/lib/docker/containers/b95ed7083ba746090254ad8393f582321297a6c494af82608727007059475c61/shm: not mounted
umount: /var/lib/docker/containers/d5f021fe3163c4524fbcf8651c16c25cc70b387b00cfac36aea91a01d8c57477/shm: not mounted
umount: /var/lib/docker/containers/d6ac6bad69916860c1602521a3d1c16c06a3873acfaaa623f2153034a940923a/shm: not mounted
umount: /var/lib/docker/containers/ef1904492c4700203740951b95148c54076ec32aa197132f87e52d0acc84910b/shm: not mounted
umount: /var/lib/docker/containers/f8ff35b6a4ae024a9593d3f6c4389f315d302a3540e41f0cc7287d5af5d88e34/shm: not mounted
umount: /var/lib/docker/containers/fb058c205baed27a55b55774d5e3c63094d07494fb1ad04a049cf7e7ba939b67/shm: not mounted
+ :
+ [[ -n '' ]]
+ exec fluentd -vv
2017-08-25 13:36:45 +0000 [info]: fluent/supervisor.rb:471:read_config: reading config file path="/etc/fluent/fluent.conf"
2017-08-25 13:36:50 +0000 [info]: fluent/supervisor.rb:337:supervise: starting fluentd-0.12.39
2017-08-25 13:36:50 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered buffer plugin 'file'
2017-08-25 13:36:50 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered buffer plugin 'memory'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered filter plugin 'grep'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered filter plugin 'parser'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered filter plugin 'record_transformer'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered filter plugin 'stdout'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'debug_agent'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'dummy'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'exec'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'forward'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'gc_stat'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'http'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'monitor_agent'
2017-08-25 13:36:51 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'object_space'
2017-08-25 13:36:52 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'status'
2017-08-25 13:36:52 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'unix'
2017-08-25 13:36:52 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'syslog'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'tail'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'tcp'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'udp'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'copy'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'exec'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'exec_filter'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'file'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'forward'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'null'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'relabel'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'roundrobin'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'stdout'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'tcp'
2017-08-25 13:36:53 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'unix'
2017-08-25 13:36:53 +0000 [info]: fluent/engine.rb:126:block in configure: gem 'fluent-plugin-elasticsearch' version '1.9.5'
2017-08-25 13:36:53 +0000 [info]: fluent/engine.rb:126:block in configure: gem 'fluent-plugin-kubernetes_metadata_filter' version '0.27.0'
2017-08-25 13:36:53 +0000 [info]: fluent/engine.rb:126:block in configure: gem 'fluent-plugin-rewrite-tag-filter' version '1.5.6'
2017-08-25 13:36:53 +0000 [info]: fluent/engine.rb:126:block in configure: gem 'fluent-plugin-secure-forward' version '0.4.5'
2017-08-25 13:36:53 +0000 [info]: fluent/engine.rb:126:block in configure: gem 'fluent-plugin-systemd' version '0.0.8'
2017-08-25 13:36:53 +0000 [info]: fluent/engine.rb:126:block in configure: gem 'fluent-plugin-viaq_data_model' version '0.0.5'
2017-08-25 13:36:53 +0000 [info]: fluent/engine.rb:126:block in configure: gem 'fluentd' version '0.12.39'
2017-08-25 13:36:53 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="journal" type="grep"
2017-08-25 13:36:53 +0000 [warn]: config/section.rb:124:block in generate: 'exclude1' parameter is deprecated: Use <exclude> section
2017-08-25 13:36:53 +0000 [info]: fluent/agent.rb:129:add_match: adding match in @INGRESS pattern="journal" type="rewrite_tag_filter"
2017-08-25 13:36:54 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'rewrite_tag_filter'
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule1 ["CONTAINER_NAME", /^k8s_kibana\./, "", "kubernetes.journal.container.kibana"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule2 ["CONTAINER_NAME", /^k8s_[^\.]+\.[^_]+_[^_]+_default_/, "", "kubernetes.journal.container._default_"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule3 ["CONTAINER_NAME", /^k8s_[^\.]+\.[^_]+_[^_]+_openshift-infra_/, "", "kubernetes.journal.container._openshift-infra_"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule4 ["CONTAINER_NAME", /^k8s_[^\.]+\.[^_]+_[^_]+_openshift_/, "", "kubernetes.journal.container._openshift_"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule5 ["CONTAINER_NAME", /^k8s_.*fluentd/, "", "kubernetes.journal.container.fluentd"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule6 ["CONTAINER_NAME", /^k8s_/, "", "kubernetes.journal.container"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule7 ["CONTAINER_NAME", /_openshift-infra_/, "", "journal.container._openshift-infra_"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule8 ["CONTAINER_NAME", /_openshift_/, "", "journal.container._openshift_"]
2017-08-25 13:36:54 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule9 ["_TRANSPORT", /.+/, "", "journal.system"]
2017-08-25 13:36:54 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="kubernetes.**" type="kubernetes_metadata"
2017-08-25 13:36:54 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered filter plugin 'kubernetes_metadata'
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="**kibana**" type="record_transformer"
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="kubernetes.journal.container**" type="record_transformer"
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="kubernetes.var.log.containers**" type="record_transformer"
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:129:add_match: adding match in @INGRESS pattern="systemd.origin-master" type="rewrite_tag_filter"
2017-08-25 13:37:06 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule1 ["_SYSTEMD_UNIT", /^origin-master.service/, "", "journal.system.kubernetes.master"]
2017-08-25 13:37:06 +0000 [info]: plugin/out_rewrite_tag_filter.rb:55:block in configure: adding rewrite_tag_filter rule: rewriterule2 ["_SYSTEMD_UNIT", /^origin-node.service/, "", "journal.system.kubernetes.node"]
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="journal.system.kubernetes.master" type="record_transformer"
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="system.var.log**" type="record_transformer"
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="journal.system**" type="record_transformer"
2017-08-25 13:37:06 +0000 [info]: fluent/agent.rb:141:add_filter: adding filter in @INGRESS pattern="**" type="viaq_data_model"
2017-08-25 13:37:07 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered filter plugin 'viaq_data_model'
2017-08-25 13:37:07 +0000 [info]: fluent/agent.rb:129:add_match: adding match in @INGRESS pattern="journal.system** system.var.log** **_default_** **_openshift_** **_openshift-infra_** mux.ops" type="copy"
2017-08-25 13:37:07 +0000 [debug]: plugin/out_copy.rb:44:block in configure: adding store type="elasticsearch_dynamic"
2017-08-25 13:37:09 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'elasticsearch'
2017-08-25 13:37:09 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered output plugin 'elasticsearch_dynamic'
2017-08-25 13:37:10 +0000 [warn]: fluent/buffer.rb:164:configure: 'block' action stops input process until the buffer full is resolved. Check your pipeline this action is fit or not
2017-08-25 13:37:10 +0000 [info]: fluent/agent.rb:129:add_match: adding match in @INGRESS pattern="**" type="copy"
2017-08-25 13:37:10 +0000 [debug]: plugin/out_copy.rb:44:block in configure: adding store type="elasticsearch_dynamic"
2017-08-25 13:37:10 +0000 [warn]: fluent/buffer.rb:164:configure: 'block' action stops input process until the buffer full is resolved. Check your pipeline this action is fit or not
2017-08-25 13:37:10 +0000 [info]: fluent/root_agent.rb:152:add_source: adding source type="systemd"
2017-08-25 13:37:10 +0000 [trace]: fluent/plugin.rb:122:register_impl: registered input plugin 'systemd'
2017-08-25 13:37:10 +0000 [info]: fluent/engine.rb:133:configure: using configuration file: <ROOT>
<source>
@type systemd
@label @INGRESS
path /run/log/journal
pos_file /var/log/journal.pos
tag journal
</source>
<label @INGRESS>
<filter journal>
@type grep
exclude1 PRIORITY ^7$
</filter>
<match journal>
@type rewrite_tag_filter
rewriterule1 CONTAINER_NAME ^k8s_kibana\. kubernetes.journal.container.kibana
rewriterule2 CONTAINER_NAME ^k8s_[^\.]+\.[^_]+_[^_]+_default_ kubernetes.journal.container._default_
rewriterule3 CONTAINER_NAME ^k8s_[^\.]+\.[^_]+_[^_]+_openshift-infra_ kubernetes.journal.container._openshift-infra_
rewriterule4 CONTAINER_NAME ^k8s_[^\.]+\.[^_]+_[^_]+_openshift_ kubernetes.journal.container._openshift_
rewriterule5 CONTAINER_NAME ^k8s_.*fluentd kubernetes.journal.container.fluentd
rewriterule6 CONTAINER_NAME ^k8s_ kubernetes.journal.container
rewriterule7 CONTAINER_NAME _openshift-infra_ journal.container._openshift-infra_
rewriterule8 CONTAINER_NAME _openshift_ journal.container._openshift_
rewriterule9 _TRANSPORT .+ journal.system
</match>
<filter kubernetes.**>
type kubernetes_metadata
kubernetes_url https://kubernetes.default.svc.cluster.local
bearer_token_file /var/run/secrets/kubernetes.io/serviceaccount/token
ca_file /var/run/secrets/kubernetes.io/serviceaccount/ca.crt
include_namespace_id true
use_journal true
container_name_to_kubernetes_regexp ^(?<name_prefix>[^_]+)_(?<container_name>[^\._]+)(\.(?<container_hash>[^_]+))?_(?<pod_name>[^_]+)_(?<namespace>[^_]+)_[^_]+_[^_]+$
</filter>
<filter **kibana**>
@type record_transformer
enable_ruby
remove_keys req,res,msg,name,level,v,pid,err
<record>
log ${(err rescue nil) || (msg rescue nil) || record['MESSAGE'] || log}
</record>
</filter>
<filter kubernetes.journal.container**>
@type record_transformer
enable_ruby
remove_keys log,stream,MESSAGE,_SOURCE_REALTIME_TIMESTAMP,__REALTIME_TIMESTAMP,CONTAINER_ID,CONTAINER_ID_FULL,CONTAINER_NAME,PRIORITY,_BOOT_ID,_CAP_EFFECTIVE,_CMDLINE,_COMM,_EXE,_GID,_HOSTNAME,_MACHINE_ID,_PID,_SELINUX_CONTEXT,_SYSTEMD_CGROUP,_SYSTEMD_SLICE,_SYSTEMD_UNIT,_TRANSPORT,_UID,_AUDIT_LOGINUID,_AUDIT_SESSION,_SYSTEMD_OWNER_UID,_SYSTEMD_SESSION,_SYSTEMD_USER_UNIT,CODE_FILE,CODE_FUNCTION,CODE_LINE,ERRNO,MESSAGE_ID,RESULT,UNIT,_KERNEL_DEVICE,_KERNEL_SUBSYSTEM,_UDEV_SYSNAME,_UDEV_DEVNODE,_UDEV_DEVLINK,SYSLOG_FACILITY,SYSLOG_IDENTIFIER,SYSLOG_PID
<record>
hostname ${(record['kubernetes']['host'] rescue nil) || File.open('/etc/docker-hostname') { |f| f.readline }.rstrip}
message ${record['message'] || record['MESSAGE'] || log}
level ${["emerg", "alert", "crit", "err", "warning", "notice", "info", "debug", "trace", "unknown"][begin; ('%d' % record['PRIORITY'] || 9).to_i; rescue; 9; end]}
time ${Time.at((record["_SOURCE_REALTIME_TIMESTAMP"] || record["__REALTIME_TIMESTAMP"]).to_f / 1000000.0).utc.to_datetime.rfc3339(6)}
pipeline_metadata {"collector":{"ipaddr4":"${ENV['IPADDR4']}","ipaddr6":"${ENV['IPADDR6']}","inputname":"fluent-plugin-systemd","name":"fluentd openshift","received_at":"${Time.at((record['_SOURCE_REALTIME_TIMESTAMP'] || record['__REALTIME_TIMESTAMP']).to_f / 1000000.0).utc.to_datetime.rfc3339(6)}","version":"${ENV['FLUENTD_VERSION'] + ' ' + ENV['DATA_VERSION']}"}}
systemd {"t":{"MACHINE_ID":"${record['_MACHINE_ID']}","AUDIT_LOGINUID":"${record['_AUDIT_LOGINUID']}","AUDIT_SESSION":"${record['_AUDIT_SESSION']}","BOOT_ID":"${record['_BOOT_ID']}","CAP_EFFECTIVE":"${record['_CAP_EFFECTIVE']}","CMDLINE":"${record['_CMDLINE']}","COMM":"${record['_COMM']}","EXE":"${record['_EXE']}","GID":"${record['_GID']}","HOSTNAME":"${record['_HOSTNAME']}","PID":"${record['_PID']}","SELINUX_CONTEXT":"${record['_SELINUX_CONTEXT']}","SOURCE_REALTIME_TIMESTAMP":"${record['_SOURCE_REALTIME_TIMESTAMP']}","SYSTEMD_CGROUP":"${record['_SYSTEMD_CGROUP']}","SYSTEMD_OWNER_UID":"${record['_SYSTEMD_OWNER_UID']}","SYSTEMD_SESSION":"${record['_SYSTEMD_SESSION']}","SYSTEMD_SLICE":"${record['_SYSTEMD_SLICE']}","SYSTEMD_UNIT":"${record['_SYSTEMD_UNIT']}","SYSTEMD_USER_UNIT":"${record['_SYSTEMD_USER_UNIT']}","TRANSPORT":"${record['_TRANSPORT']}","UID":"${record['_UID']}"},"u":{"CODE_FILE":"${record['CODE_FILE']}","CODE_FUNCTION":"${record['CODE_FUNCTION']}","CODE_LINE":"${record['CODE_LINE']}","ERRNO":"${record['ERRNO']}","MESSAGE_ID":"${record['MESSAGE_ID']}","RESULT":"${record['RESULT']}","UNIT":"${record['UNIT']}","SYSLOG_FACILITY":"${record['SYSLOG_FACILITY']}","SYSLOG_IDENTIFIER":"${record['SYSLOG_IDENTIFIER']}","SYSLOG_PID":"${record['SYSLOG_PID']}"},"k":{"KERNEL_DEVICE":"${record['_KERNEL_DEVICE']}","KERNEL_SUBSYSTEM":"${record['_KERNEL_SUBSYSTEM']}","UDEV_SYSNAME":"${record['_UDEV_SYSNAME']}","UDEV_DEVNODE":"${record['_UDEV_DEVNODE']}","UDEV_DEVLINK":"${record['_UDEV_DEVLINK']}"}}
</record>
</filter>
<filter kubernetes.var.log.containers**>
@type record_transformer
enable_ruby
remove_keys log,stream
<record>
hostname ${(record['kubernetes']['host'] rescue nil) || File.open('/etc/docker-hostname') { |f| f.readline }.rstrip}
message ${(message rescue nil) || log}
pipeline_metadata {"collector":{"ipaddr4":"${ENV['IPADDR4']}","ipaddr6":"${ENV['IPADDR6']}","inputname":"fluent-plugin-in_tail","name":"fluentd openshift","received_at":"${record['time'].to_s}","version":"${ENV['FLUENTD_VERSION'] + ' ' + ENV['DATA_VERSION']}"}}
level ${record['stream'] == 'stdout' ? 'info' : 'err'}
time ${time.utc.to_datetime.rfc3339(6)}
</record>
</filter>
<match systemd.origin-master>
@type rewrite_tag_filter
rewriterule1 _SYSTEMD_UNIT ^origin-master.service journal.system.kubernetes.master
rewriterule2 _SYSTEMD_UNIT ^origin-node.service journal.system.kubernetes.node
</match>
<filter journal.system.kubernetes.master>
@type record_transformer
enable_ruby
<record>
k8s_api ${record['MESSAGE'].match(/GET \/api/)[0]}
</record>
</filter>
<filter system.var.log**>
@type record_transformer
enable_ruby
remove_keys host,pid,ident
<record>
systemd {"t":{"PID":"${record['pid']}"},"u":{"SYSLOG_IDENTIFIER":"${record['ident']}"}}
hostname ${host.eql?('localhost') ? (begin; File.open('/etc/docker-hostname') { |f| f.readline }.rstrip; rescue; host; end) : host}
time ${ (Time.at(time) > Time.now) ? (Time.new((time.year - 1), time.month, time.day, time.hour, time.min, time.sec, time.utc_offset).utc.to_datetime.rfc3339(6)) : (time.utc.to_datetime.rfc3339(6)) }
pipeline_metadata {"collector":{"ipaddr4":"${ENV['IPADDR4']}","ipaddr6":"${ENV['IPADDR6']}","inputname":"fluent-plugin-in_tail","name":"fluentd openshift","received_at":"${(Time.at(time) > Time.now) ? (Time.new((time.year - 1), time.month, time.day, time.hour, time.min, time.sec, time.utc_offset).utc.to_datetime.rfc3339(6)) : (time.utc.to_datetime.rfc3339(6))}","version":"${ENV['FLUENTD_VERSION'] + ' ' + ENV['DATA_VERSION']}"}}
</record>
</filter>
<filter journal.system**>
@type record_transformer
enable_ruby
remove_keys log,stream,MESSAGE,_SOURCE_REALTIME_TIMESTAMP,__REALTIME_TIMESTAMP,CONTAINER_ID,CONTAINER_ID_FULL,CONTAINER_NAME,PRIORITY,_BOOT_ID,_CAP_EFFECTIVE,_CMDLINE,_COMM,_EXE,_GID,_HOSTNAME,_MACHINE_ID,_PID,_SELINUX_CONTEXT,_SYSTEMD_CGROUP,_SYSTEMD_SLICE,_SYSTEMD_UNIT,_TRANSPORT,_UID,_AUDIT_LOGINUID,_AUDIT_SESSION,_SYSTEMD_OWNER_UID,_SYSTEMD_SESSION,_SYSTEMD_USER_UNIT,CODE_FILE,CODE_FUNCTION,CODE_LINE,ERRNO,MESSAGE_ID,RESULT,UNIT,_KERNEL_DEVICE,_KERNEL_SUBSYSTEM,_UDEV_SYSNAME,_UDEV_DEVNODE,_UDEV_DEVLINK,SYSLOG_FACILITY,SYSLOG_IDENTIFIER,SYSLOG_PID
<record>
systemd {"t":{"MACHINE_ID":"${record['_MACHINE_ID']}","AUDIT_LOGINUID":"${record['_AUDIT_LOGINUID']}","AUDIT_SESSION":"${record['_AUDIT_SESSION']}","BOOT_ID":"${record['_BOOT_ID']}","CAP_EFFECTIVE":"${record['_CAP_EFFECTIVE']}","CMDLINE":"${record['_CMDLINE']}","COMM":"${record['_COMM']}","EXE":"${record['_EXE']}","GID":"${record['_GID']}","HOSTNAME":"${record['_HOSTNAME']}","PID":"${record['_PID']}","SELINUX_CONTEXT":"${record['_SELINUX_CONTEXT']}","SOURCE_REALTIME_TIMESTAMP":"${record['_SOURCE_REALTIME_TIMESTAMP']}","SYSTEMD_CGROUP":"${record['_SYSTEMD_CGROUP']}","SYSTEMD_OWNER_UID":"${record['_SYSTEMD_OWNER_UID']}","SYSTEMD_SESSION":"${record['_SYSTEMD_SESSION']}","SYSTEMD_SLICE":"${record['_SYSTEMD_SLICE']}","SYSTEMD_UNIT":"${record['_SYSTEMD_UNIT']}","SYSTEMD_USER_UNIT":"${record['_SYSTEMD_USER_UNIT']}","TRANSPORT":"${record['_TRANSPORT']}","UID":"${record['_UID']}"},"u":{"CODE_FILE":"${record['CODE_FILE']}","CODE_FUNCTION":"${record['CODE_FUNCTION']}","CODE_LINE":"${record['CODE_LINE']}","ERRNO":"${record['ERRNO']}","MESSAGE_ID":"${record['MESSAGE_ID']}","RESULT":"${record['RESULT']}","UNIT":"${record['UNIT']}","SYSLOG_FACILITY":"${record['SYSLOG_FACILITY']}","SYSLOG_IDENTIFIER":"${record['SYSLOG_IDENTIFIER']}","SYSLOG_PID":"${record['SYSLOG_PID']}"},"k":{"KERNEL_DEVICE":"${record['_KERNEL_DEVICE']}","KERNEL_SUBSYSTEM":"${record['_KERNEL_SUBSYSTEM']}","UDEV_SYSNAME":"${record['_UDEV_SYSNAME']}","UDEV_DEVNODE":"${record['_UDEV_DEVNODE']}","UDEV_DEVLINK":"${record['_UDEV_DEVLINK']}"}}
hostname ${_HOSTNAME.eql?('localhost') ? (begin; File.open('/etc/docker-hostname') { |f| f.readline }.rstrip; rescue; _HOSTNAME; end) : _HOSTNAME}
message ${record["MESSAGE"]}
pipeline_metadata {"collector":{"ipaddr4":"${ENV['IPADDR4']}","ipaddr6":"${ENV['IPADDR6']}","inputname":"fluent-plugin-systemd","name":"fluentd openshift","received_at":"${(record['_SOURCE_REALTIME_TIMESTAMP'] || record['__REALTIME_TIMESTAMP']) ? Time.at((record['_SOURCE_REALTIME_TIMESTAMP'] || record['__REALTIME_TIMESTAMP']).to_f / 1000000.0).utc.to_datetime.rfc3339(6) : time.utc.to_datetime.rfc3339(6)}","version":"${ENV['FLUENTD_VERSION'] + ' ' + ENV['DATA_VERSION']}"}}
time ${(record["_SOURCE_REALTIME_TIMESTAMP"] || record["__REALTIME_TIMESTAMP"]) ? Time.at((record["_SOURCE_REALTIME_TIMESTAMP"] || record["__REALTIME_TIMESTAMP"]).to_f / 1000000.0).utc.to_datetime.rfc3339(6) : time.utc.to_datetime.rfc3339(6)}
level ${["emerg", "alert", "crit", "err", "warning", "notice", "info", "debug", "trace", "unknown"][begin; ('%d' % record['PRIORITY'] || 9).to_i; rescue; 9; end]}
</record>
</filter>
<filter **>
@type viaq_data_model
default_keep_fields CEE,docker,file,geoip,hostname,kubernetes,level,message,offset,pid,pipeline_metadata,rsyslog,service,systemd,tags,time,ovirt,collectd,tlog,aushape,namespace_name,namespace_uuid
extra_keep_fields
keep_empty_fields message
use_undefined false
undefined_name undefined
rename_time true
rename_time_if_missing false
src_time_name time
dest_time_name @timestamp
</filter>
<match journal.system** system.var.log** **_default_** **_openshift_** **_openshift-infra_** mux.ops>
@type copy
<store>
@type elasticsearch_dynamic
host logging-es
port 9200
scheme https
index_name .operations.${begin record['@timestamp'].nil? ? Time.at(time).getutc.strftime(@logstash_dateformat) : Time.parse(record['@timestamp']).getutc.strftime(@logstash_dateformat) rescue $log.error("record is missing time and @timestamp - record " + record.to_s) end}
user fluentd
password xxxxxx
client_key /etc/fluent/keys/key
client_cert /etc/fluent/keys/cert
ca_file /etc/fluent/keys/ca
type_name com.redhat.viaq.common
reload_connections false
reload_on_failure false
flush_interval 5s
max_retry_wait 300
disable_retry_limit true
buffer_type file
buffer_path /var/lib/fluentd/buffer-output-es-ops-config
buffer_queue_limit 79
buffer_chunk_limit 16777216
buffer_queue_full_action block
</store>
</match>
<match **>
@type copy
<store>
@type elasticsearch_dynamic
host logging-es
port 9200
scheme https
index_name project.${begin record['kubernetes']['namespace_name'] rescue $log.error("record is missing kubernetes.namespace_name - record " + record.to_s) end}.${begin record['kubernetes']['namespace_id'] rescue $log.error("record is missing kubernetes.namespace_id - record " + record.to_s) end}.${begin Time.parse(record['@timestamp']).getutc.strftime(@logstash_dateformat) rescue $log.error("record is missing @timestamp - record " + record.to_s) end}
user fluentd
password xxxxxx
client_key /etc/fluent/keys/key
client_cert /etc/fluent/keys/cert
ca_file /etc/fluent/keys/ca
type_name com.redhat.viaq.common
reload_connections false
reload_on_failure false
flush_interval 5s
max_retry_wait 300
disable_retry_limit true
buffer_type file
buffer_path /var/lib/fluentd/buffer-output-es-config
buffer_queue_limit 79
buffer_chunk_limit 16777216
buffer_queue_full_action block
</store>
</match>
</label>
</ROOT>
2017-08-25 13:37:19 +0000 [info]: plugin/out_elasticsearch_dynamic.rb:64:client: Connection opened to Elasticsearch cluster => {:host=>"logging-es", :port=>9200, :scheme=>"https", :user=>"fluentd", :password=>"obfuscated"}
2017-08-25 13:37:40 +0000 [info]: plugin/out_elasticsearch_dynamic.rb:64:client: Connection opened to Elasticsearch cluster => {:host=>"logging-es", :port=>9200, :scheme=>"https", :user=>"fluentd", :password=>"obfuscated"}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment