# use this for backwards compatability # fullnameOverride: "" # Version for ElasticSearch and Kibana have to match so we define it at top-level version: 7.8.1 elastic_password: "" # super_secret_elastic_password es: nodeSets: [] #- count: 2 # storage: # size: 16Gi # class: local-sc-xfs # zone: us-west-2a s3Snapshot: enabled: false iamrole: "" # INSERT_CLOUDFORMATION_OUTPUT_ElasticSearchSnapshots prometheus: false kibana: count: 1 #servicename: kibana.example.com istio: enabled: false gateway: "istio-system/ingressgateway" url: "" # kibana.example.com fluentd: enabled: false #image: # repository: quay.io/fluentd_elasticsearch/fluentd # tag: v2.9.0 istio: enabled: false # broken as of 2.5.1 ;-( # useStatefulSet: true replicaCount: 2 plugins: enabled: false pluginsList: #- fluent-plugin-detect-exceptions #- fluent-plugin-s3 #- fluent-plugin-grok-parser #persistence: # enabled: true # storageClass: "ebs-sc-gp2-xfs" # accessMode: ReadWriteOnce # size: 4Gi service: ports: - name: tcp-forward protocol: TCP containerPort: 24224 - name: http-fluentd protocol: TCP containerPort: 9880 metrics: enabled: false serviceMonitor: enabled: true additionalLabels: release: metrics namespace: monitoring output: host: logging-es-http shared_key: "cloudbender" env: OUTPUT_USER: elastic OUTPUT_SSL_VERIFY: "false" extraEnvVars: - name: OUTPUT_PASSWORD valueFrom: secretKeyRef: name: logging-es-elastic-user key: elastic - name: FLUENTD_SHARED_KEY valueFrom: secretKeyRef: name: logging-fluentd-secret key: shared_key extraVolumes: - name: fluentd-certs secret: secretName: fluentd-certificate extraVolumeMounts: - name: fluentd-certs mountPath: /mnt/fluentd-certs readOnly: true configMaps: forward-input.conf: | @type forward port 24224 bind 0.0.0.0 skip_invalid_event true cert_path /mnt/fluentd-certs/tls.crt private_key_path /mnt/fluentd-certs/tls.key self_hostname "#{ENV['HOSTNAME']}" shared_key "#{ENV['FLUENTD_SHARED_KEY']}" output.conf: | @id elasticsearch @type elasticsearch @log_level info include_tag_key true id_key id remove_keys id # KubeZero pipeline incl. GeoIP etc. pipeline fluentd host "#{ENV['OUTPUT_HOST']}" port "#{ENV['OUTPUT_PORT']}" scheme "#{ENV['OUTPUT_SCHEME']}" ssl_version "#{ENV['OUTPUT_SSL_VERSION']}" ssl_verify "#{ENV['OUTPUT_SSL_VERIFY']}" user "#{ENV['OUTPUT_USER']}" password "#{ENV['OUTPUT_PASSWORD']}" logstash_format true reload_connections false reconnect_on_error true reload_on_failure true request_timeout 15s @type file path /var/log/fluentd-buffers/kubernetes.system.buffer flush_mode interval flush_thread_count 2 flush_interval 5s flush_at_shutdown true retry_type exponential_backoff retry_timeout 60m retry_max_interval 30 chunk_limit_size "#{ENV['OUTPUT_BUFFER_CHUNK_LIMIT']}" queue_limit_length "#{ENV['OUTPUT_BUFFER_QUEUE_LIMIT']}" overflow_action drop_oldest_chunk # filter.conf: | # # @type parser # key_name message # reserve_data true # reserve_time true # # @type grok # # # SSH # # pattern %{DATA:system.auth.ssh.event} %{DATA:system.auth.ssh.method} for (invalid user )?%{DATA:system.auth.user} from %{IPORHOST:system.auth.ip} port %{NUMBER:system.auth.port} ssh2(: %{GREEDYDATA:system.auth.ssh.signature})? # # # pattern %{DATA:system.auth.ssh.event} user %{DATA:system.auth.user} from %{IPORHOST:system.auth.ip} # # # # sudo # # pattern \s*%{DATA:system.auth.user} :( %{DATA:system.auth.sudo.error} ;)? TTY=%{DATA:system.auth.sudo.tty} ; PWD=%{DATA:system.auth.sudo.pwd} ; USER=%{DATA:system.auth.sudo.user} ; COMMAND=%{GREEDYDATA:system.auth.sudo.command} # # # # Users # # pattern new group: name=%{DATA:system.auth.groupadd.name}, GID=%{NUMBER:system.auth.groupadd.gid} # # # pattern new user: name=%{DATA:system.auth.useradd.name}, UID=%{NUMBER:system.auth.useradd.uid}, GID=%{NUMBER:system.auth.useradd.gid}, home=%{DATA:system.auth.useradd.home}, shell=%{DATA:system.auth.useradd.shell}$ # # # # pattern %{GREEDYDATA:message} # # # fluent-bit: enabled: true test: enabled: false config: outputs: | [OUTPUT] Match * Name forward Host fluentd Port 24224 tls on tls.verify off Shared_Key cloudbender inputs: | [INPUT] Name tail Path /var/log/containers/*.log Parser cri Tag kube.* Mem_Buf_Limit 5MB Skip_Long_Lines On Refresh_Interval 10 DB /var/log/flb_kube.db DB.Sync Normal filters: | [FILTER] Name kubernetes Match kube.* Merge_Log On Keep_Log Off K8S-Logging.Parser On K8S-Logging.Exclude On [FILTER] Name lua Match kube.* script /fluent-bit/etc/functions.lua call dedot customParsers: | [PARSER] # http://rubular.com/r/tjUt3Awgg4 Name cri Format regex Regex ^(?