前提條件:
- swarm環境
- 有 ES、LogStash和kibana
- 隻需要通過filebeat發送到ES然後使用kibana展示即可
收集java日志
程式啟動要指定一個日志輸出檔案,并将此檔案目錄挂載到主控端,如果不使用此方法,filebeat将沒有權限通路預設輸出目錄,/data/docker_dir/containers/
主控端目錄(多節點則都要建立):
/data/docker_filebeat/
- conf 存放配置檔案
- logs 存放java容器日志
- registry 權限777,filebeat會在目錄下建立檔案
示例
java容器Dockerfile
最後一條 将日志發送到/root/logs/saas-admin.log
FROM docker.wecode123.com:30080/network/centos:7.5.1804
COPY saas-admin.jar saas-admin.jar
COPY argvtest.py /home/work/workspace/online_codeus/docs/argvtest.py
COPY format_code.py /home/work/workspace/online_codeus/docs/format_code.py
COPY .terminalfx /root/.terminalfx
COPY magic_box.py /usr/local/python3/lib/python3.6/magic_box.py
COPY wecode.py /usr/local/python3/lib/python3.6/wecode.py
COPY codeus.py /usr/local/python3/lib/python3.6/codeus.py
ADD pinpoint.tar.gz /
ENV LANG=en_US.UTF-8
ENV JAVA_HOME=/usr/local/jdk
ENV PATH=$JAVA_HOME/bin:$PATH
ENV export JAVA_HOME M2_HOME
ENV PYTHON_HOME=/usr/local/python3
ENV export PYTHON_HOME
ENV export CLASSPATH=.
ENV PATH=$JAVA_HOME/bin:$M2_HOME/bin:$PYTHON_HOME/bin:$PATH:$HOME/.local/bin:$HOME/bin
ENV PTY_LIB_FOLDER=/root/.terminalfx/libpty
ENV export PTY_LIB_FOLDER
RUN /bin/cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && echo 'Asia/Shanghai' >/etc/timezone
#CMD exec java -jar cloudcompiler-1.0-SNAPSHOT.jar --spring.profiles.active=$CONFIG_FILE
CMD exec java -jar saas-admin.jar --server.port=20240 --spring.profiles.active=qc >> /root/logs/saas-admin.log
主要就是最後一條日志輸出,其它的根據自己環境定義就行啦
啟動使用的是stack
使用volumes将日志挂載到主控端,兩個節點的話都需要建立挂載目錄
version: '3'
services:
saas-admin-demo:
image: docker_image
ports:
- 30330:20240
networks:
- saas-demo
volumes:
- /data/docker_filebeat/logs/:/root/logs/
deploy:
mode: replicated
replicas: 2
restart_policy:
condition: on-failure
delay: 5s
max_attempts: 3
update_config:
parallelism: 1
delay: 10s
networks:
saas-demo:
driver: overlay
啟動後檢視目錄中是否有日志,有日志繼續下面操作
部署filebeat
配置檔案
# cat /data/docker_filebeat/conf/filebeat.yml
直接指向Logstash
filebeat.inputs:
- type: log
enabled: true
paths:
- /var/log/docker/*.log
output.logstash.hosts: ["172.17.202.147:5044"]
Logstash檔案
# cat app.conf
input {
beats {
port => "5044"
}
}
filter {
mutate {
rename => { "[host][name]" => "host" }
}
multiline {
pattern => "^202.*-.*-.*"
negate => true
what => "previous"
}
}
filter {
mutate {
rename => { "[host][name]" => "host" }
}
if [type] == "codeus-app02-accesslog" {
grok {
match => ["message","%{JAVA_DATE:date} %{JAVA_TIME:time} %{JAVA_PORT:port} %{JAVA_LOGLEVEL:loglevel} %{JAVA_LOGTYPE:logtype} %{JAVA_NUMBER:number} %{JAVA_NULL:null} %{JAVA_DATE_TIME:date_time} %{JAVA_USERID:userID} %{JAVA_USERIP:userIP} %{JAVA_BREXPO:brexpo} %{JAVA_CONNECT:connect}"]
}
}
}
filter {
mutate {
rename => { "[host][name]" => "host" }
}
if [type] == "codeus-app02-website-accesslog" {
grok {
match => ["message","%{JAVA_DATE:date} %{JAVA_TIME:time} %{JAVA_PORT:port} %{JAVA_LOGLEVEL:loglevel} %{JAVA_LOGTYPE:logtype} %{JAVA_NUMBER:number} %{JAVA_NULL:null} %{JAVA_DATE_TIME:date_time} %{JAVA_USERID:userID} %{JAVA_USERIP:userIP} %{JAVA_BREXPO:brexpo} %{JAVA_CONNECT:connect}"]
}
}
}
filter {
mutate {
rename => { "[host][name]" => "host" }
}
if [type] == "codeus-app01-accesslog" {
grok {
match => ["message","%{JAVA_DATE:date} %{JAVA_TIME:time} %{JAVA_PORT:port} %{JAVA_LOGLEVEL:loglevel} %{JAVA_LOGTYPE:logtype} %{JAVA_NUMBER:number} %{JAVA_NULL:null} %{JAVA_DATE_TIME:date_time} %{JAVA_USERID:userID} %{JAVA_USERIP:userIP} %{JAVA_BREXPO:brexpo} %{JAVA_CONNECT:connect}"]
}
}
}
filter {
mutate {
rename => { "[host][name]" => "host" }
}
if [type] == "codeus-app01-website-accesslog" {
grok {
match => ["message","%{JAVA_DATE:date} %{JAVA_TIME:time} %{JAVA_PORT:port} %{JAVA_LOGLEVEL:loglevel} %{JAVA_LOGTYPE:logtype} %{JAVA_NUMBER:number} %{JAVA_NULL:null} %{JAVA_DATE_TIME:date_time} %{JAVA_USERID:userID} %{JAVA_USERIP:userIP} %{JAVA_BREXPO:brexpo} %{JAVA_CONNECT:connect}"]
}
}
}
output {
# stdout { codec => rubydebug }
elasticsearch {
hosts => "127.0.0.1"
index => "logstash-%{+YYYY.MM.dd}"
}
}
啟動,通過stack,直接複制代碼到web頁面
# cat stack-filebeat.yml
version: '3'
services:
docker-filebeat:
image: docker.elastic.co/beats/filebeat:7.2.0
volumes:
- /data/docker_filebeat/logs/:/var/log/docker:ro
- /data/docker_filebeat/conf/filebeat.yml:/usr/share/filebeat/filebeat.yml
- /data/docker_filebeat/registry/:/usr/share/filebeat/data/registry/
deploy:
mode: replicated
replicas: 2
restart_policy:
condition: on-failure
delay: 5s
max_attempts: 3
檢視filebeat容器日志,出現下面内容表示成功
# docker logs -f filebeat-container-name
在kibana檢視
設定中添加索引
具體日志屬于哪個程式可以進一步修改filebeat配置檔案,或者根據現有檔案中的 log.file.path 索引判斷,例如:
