Docker build ELK collect and display Tomcat log

Architecture

Show the front end -> Search Index <- log extracting and filtering -> log buffer <- log collection
Kibana -> Elastash <- Logstash - > redis <- filebeat

Log file name and content:

/iba/ibaboss/Java/bossmobile-tomcat-8.0.26/logs/catalina.out
#截取的内容:
22-Jun-2018 17:45:22.397 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version:        Apache Tomcat/8.0.26
22-Jun-2018 17:45:22.399 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server built:          Aug 18 2015 11:38:37 UTC
22-Jun-2018 17:45:22.399 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server number:         8.0.26.0

/iba/ibaboss/java/bossmobile-tomcat-8.0.26/logs/ibalife.log
# 截取的内容:
[ERROR] [2018-06-30 17:41:56][com.iba.boss.pubsub.listener.core.ListenerTemplate]ErpCustomerRegEventListener onListen Done
[ERROR] [2018-06-30 17:41:56][com.iba.boss.pubsub.listener.user.BmcLevelDescEventListener]bmcLevelDescEventListener -> Waiting for set levelDesc
[ERROR] [2018-06-30 17:41:56][com.iba.boss.pubsub.listener.core.ListenerTemplate]BmcLevelDescEventListener onListen Done

Installation docker

For more information please refer to 
 https://www.linuxidc.com/Linux/2019-01/156519.htm 
https://www.linuxidc.com/Linux/2019-08/160028.htm

Installation docker-compose

For more information please refer to https://www.linuxidc.com/Linux/2019-08/160026.htm

Installation Redis (herein Docker)

docker pull redis 

mkdir /home/ibaboss/compose/config -p 
cd  /home/ibaboss/compose/config

# redis 的配置,密码为 ibalife
vi redis.conf 

#daemonize yes
pidfile /data/redis.pid
port 6379
tcp-backlog 30000
timeout 0
tcp-keepalive 10
loglevel notice
logfile /data/redis.log
databases 16
save 900 1
save 300 10
save 60 10000
stop-writes-on-bgsave-error yes
rdbcompression yes
rdbchecksum yes
dbfilename dump.rdb
dir /data
slave-serve-stale-data yes
slave-read-only yes
repl-diskless-sync no
repl-diskless-sync-delay 5
repl-disable-tcp-nodelay no
slave-priority 100
requirepass ibalife
maxclients 30000
appendonly no
appendfilename "appendonly.aof"
appendfsync everysec
no-appendfsync-on-rewrite no
auto-aof-rewrite-percentage 100
auto-aof-rewrite-min-size 64mb
aof-load-truncated yes
lua-time-limit 5000
slowlog-log-slower-than 10000
slowlog-max-len 128
latency-monitor-threshold 0
notify-keyspace-events KEA
hash-max-ziplist-entries 512
hash-max-ziplist-value 64
list-max-ziplist-entries 512
list-max-ziplist-value 64
set-max-intset-entries 1000
zset-max-ziplist-entries 128
zset-max-ziplist-value 64
hll-sparse-max-bytes 3000
activerehashing yes 
Client-the Output-Buffer-limit Normal 0 0 0 
Client-the Output-Buffer-limit Slave 256MB 64MB 60 
Client-the Output-Buffer-limit PubSub 32MB 8MB 60 
Hz 10 
AOF of-rewrite-with the incremental-fsync yes 

# write docker-compose redis yml document 
CD / Home / ibaboss / Compose 

VI-redis.yml Docker Compose- 
Version: '. 3' 
Services: 
  elk_redis: 
    Image: Redis: Latest 
    CONTAINER_NAME: elk_redis 
    the ports: 
      - "192.168.0.223:6379:6379" to enhance # safety, redis internal network open only 
    Volumes: 
      - ./config/redis.conf:/usr/local/etc/redis/redis.conf 
    networks:
      - logs_elk # specified network logs_elk 
    EntryPoint: 
      - Redis-Server
      - /usr/local/etc/redis/redis.conf 

Networks: 
  logs_elk: 
    External: # specify the network 
      name: logs_elk 

# to create elk dedicated network 
Docker --attachable logs_elk the Create Network 

# start Redis 
Docker Docker--f-Compose Compose -d up -redis.yml 

# view the status of 
Docker PS -a 

# CONTAINER ID can be obtained by the previous step, view the boot log 
docker logs -f 4841efd2e1ef

Installation filebeat

mkdir / Home / Tools -p 

CD / Home / Tools 

# installation package upload / Home / Tools 
the tar zxvf filebeat-6.3.0-Linux-x86_64.tar.gz -C / usr / local 
CD / usr / local 
LN -s /usr/local/filebeat-6.3.0-linux-x86_64 / usr / local / filebeat

Configuration profiles filebeat

CD / usr / local / filebeat 

CAT filebeat4bossmobile.yml 
filebeat.inputs: 
- type: log 
  Enabled: to true 
  Paths: 
    - /iba/ibaboss/java/bossmobile-tomcat-8.0.26/logs/catalina.out 
  multiline.pattern: ' ^ [[: word:]] | ^ java '# match multiple rows specified regular expression 
  multiline.negate: true # pattern matching to define the top row is for multiple lines merge, that is not defined as part of the log , the rules refer to the article under the multiline links 
  multiline.match: after # define how to match combined into line, before or after the 
  fields: # Add a custom field in service information collection, the value of which bossmobile_catalina, distinguish two class log 
    -Service: bossmobile_catalina 

- type: log 
  Enabled: to true 
  Paths: 
    - /iba/ibaboss/java/bossmobile-tomcat-8.0.26/logs/ibalife.* 
  multiline.pattern: '^ \ ['
  multiline.negate: to true  
  multiline.match: the After
  Fields: # Add the collected information in a custom field service, which value bossmobile_ibalife, to distinguish between two types of log 
    -Service: bossmobile_ibalife 

output.redis: 
  the hosts: [ "192.168.0.223"] # Here is redis the network address 
  password: "ibalife" 
  Key: "bossmobile" # redis deposited into the bossmobile key in the 
  db: 0 
  timeout: 5

Start filebeat

# Create filebeat save the log folder 
mkdir / IBA / ibaboss / filebeat_logs 

nohup ./filebeat filebeat4bossmobile.yml -c -e> /iba/ibaboss/filebeat_logs/filebeat4bossmobile.log 2> & 1 & 

# If you want to re-read the log, you can after stopping filebeat delete, and then rebooting 
PS -ef | grep filebeat 

the kill -9 PID 

RM / usr / local / filebeat / the Data / Registry

ELK installation configuration

cd /home/ibaboss/compose

cat docker-compose-elk.yml 
version: '3'
services:
  elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch:6.2.4
    container_name: logs_elasticsearch           # 给容器命名
    restart: always
    environment:
      - discovery.type=single-node
      - cluster.name=docker-cluster
      - network.host=0.0.0.0
      - discovery.zen.minimum_master_nodes=1
      - ES_JAVA_OPTS=-Xms512m -Xmx512m
    volumes:
      - /iba/ibaboss/elk-data:/var/lib/elasticsearch
    networks:
      logs_elk:     # 指定使用的网络
        aliases:
          - elasticsearch # alias of the container, other containers in logs_elk elasticsearch network can be accessed through an alias to the container 

  kibana: 
    Image: docker.elastic.co/kibana/kibana:6.2.4 
    CONTAINER_NAME: logs_kibana 
    the ports: 
      - "5601: 5601 " 
    restart: Always 
    Networks: 
      logs_elk: 
        aliases: 
          - kibana 
    Environment: 
      - ELASTICSEARCH_URL = HTTP: // elasticsearch: 9200 
      - SERVER_NAME = kibana 
    depends_on: 
      - elasticsearch 

  logstash: 
    Image: docker.elastic.co/logstash/logstash:6.2.4 
    CONTAINER_NAME: logs_logstash 
    restart: Always 
    Environment: 
      - LS_JAVA_OPTS-Xmx256m--Xms256m =
    volumes:
      - ./config/logstash.conf:/etc/logstash.conf
    networks:
      logs_elk:
        aliases:
          - logstash
    depends_on:
      - elasticsearch
    entrypoint:
      - logstash
      - -f
      - /etc/logstash.conf

networks:
  logs_elk:
    external:
      name: logs_elk
CD / Home / ibaboss / Compose / config 

CAT logstash.conf 

INPUT { 
        Redis { 
                Port => "6379"                                     
                Host => "elk_redis" # Redis host is a host elk_redis logs_elk network 
                data_type => "List" 
                Key => "bossmobile "# obtained from redis the bossmobile key data 
                password =>" ibalife " 
        } 

} 

filter {         
     field mutate {# define removed 
     remove_field => [" _id ", " @ version "," _ index "," _ score "," _ type "," beat.hostname "," beat.name "," beat.version "," fields.service","input.type","offset","prospector.type","source"]
    }

  IF [Fields] [-Service] == "bossmobile_catalina" { 
    Grok {# match the message field time, placed in a custom field customer_time 
        match => [ "message", "(<customer_time>% {MONTHDAY}? - } {MONTH% -%} {YEAR HOUR% {}: {%} MINUTE: SECOND, {%}) "] 
    } 
  } 
    
  IF [Fields] [-Service] ==" bossmobile_ibalife "{ 
    Grok { 
        match => [" Message " , "(<customer_time>% {YEAR} -% {monthnum} -% {MONTHDAY}% {HOUR}:?% {MINUTE}:% {SECOND,})"] 
    } 
  } 

    DATE { 
       match => [ "customer_time", "dd-MMM-yyyy HH: mm: ss.SSS", "yyyy-mM-dd HH: mm: ss"] # customer_time format type from the string into the time date, for example, 22-Jun-2018 17: 45: 22.397 corresponding to dd-MMM-yyyy HH: mm : ss.SSS : ss.SSS
        locale => "en"
        target => [ "@timestamp"] # @timestamp replacement value field, the value for kibana @ timestamp ordering 
        TimeZone => "Asia / of Shanghai" 
    } 
 
} 

Output {# The redis of service fields are created different index elasticsearch 
  IF [Fields] [-Service] == "bossmobile_catalina" {          
        elasticsearch { 
                the hosts => [ "elasticsearch: 9200"] 
                index => "bossmobile_catalina - YYYY.MM.DD% {+}" 
        } 
  } 
  
  IF [Fields] [ -Service] == "bossmobile_ibalife" { 
        elasticsearch { 
                the hosts => [ "elasticsearch: 9200"] 
                index => "bossmobile_ibalife - YYYY.MM.DD% {+}"
        } 
  } 

} 

# Starting container
mkdir /iba/ibaboss/elk-data -p
cd /home/ibaboss/compose
docker-compose -f docker-compose-elk.yml  up -d

docker ps -a

Access ip kibana located: 5601, create Index Patterns, bossmobile_catalina- * and bossmobile_ibalife- *


Coverage - overwrite

Use Grok overwrite parameter information may be covered log

grok {
        match => { "message" => "\[%{WORD}\] \[%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}\]%{GREEDYDATA:message}" }
        overwrite => [ "message" ]
    }

Here will [ERROR] [2019-06-18 10:58:56] Such information removal

Guess you like

Origin www.linuxidc.com/Linux/2019-08/160027.htm