ELK日志服务收集SpringBoot日志案例
第一步:准备docker-compose文件
- 首先准备 docker-compose.yaml 文件
version: "3"
services:
elasticsearch:
image: elasticsearch:7.17.2
container_name: elasticsearch-server
networks:
- host
environment:
- "cluster.name=elasticsearch" #设置集群名称为elasticsearch
- "discovery.type=single-node" #以单一节点模式启动
- "ES_JAVA_OPTS=-Xms512m -Xmx512m" #设置使用jvm内存大小
volumes:
- /vault/app/elk/elasticsearch/plugins:/usr/share/elasticsearch/plugins #插件文件挂载
- /vault/app/elk/elasticsearch/data:/usr/share/elasticsearch/data #数据文件挂载
ports:
- 9200:9200
kibana:
image: kibana:7.17.2
container_name: kibana-server
networks:
- host
links:
- elasticsearch:es #可以用es这个域名访问elasticsearch服务
depends_on:
- elasticsearch #kibana在elasticsearch启动之后再启动
environment:
- "elasticsearch.hosts=http://es:9200" #设置访问elasticsearch的地址
ports:
- 5601:5601
logstash:
image: logstash:7.17.2
container_name: logstash-server
networks:
- host
volumes:
- /vault/app/elk/logstash/pipeline/logstash.conf:/usr/share/logstash/pipeline/logstash.conf #挂载logstash的配置文件
depends_on:
- elasticsearch #kibana在elasticsearch启动之后再启动
links:
- elasticsearch:es #可以用es这个域名访问elasticsearch服务
ports:
- 4560:4560
networks:
host:
第二步:准备目录
- 首先在需要安装ELK的目录下创建几个目录,我是在 /opt/app/
mkdir /elk
mkdir /elk/elasticsearch
mkdir /elk/elasticsearch/data
mkdir /elk/elasticsearch/plugins
mkdir /elk/logstash
mkdir /elk/logstash/pipeline
- 在 /elk/logstash/pipeline 目录下创建logstash.conf文件
# 输入环境配置
input {
tcp {
mode => "server"
host => "0.0.0.0"
port => 4560
codec => json_lines
}
}
# 输出环境配置
output {
# 输出到控制台
stdout {
codec => rubydebug
}
elasticsearch {
hosts => "es:9200"
user => "elastic"
password => "bmallelasticsearch"
index => "logstash-%{[spring.application.name]}-%{+YYYY.MM.dd}"
}
}
第三步:执行安装脚本
- 执行安装命令
docker-compose up -d
- 拉去缓慢,或者遇到下面异常
第四步:docker pul 延迟处理
- 解决方案
- 修改配置文件:
vim /etc/docker/daemon.json
- 配置以下内容
{
"builder": {
"gc": {
"defaultKeepStorage": "20GB",
"enabled": true
}
},
"experimental": true,
"features": {
"buildkit": true
},
"insecure-registries": [
"172.24.86.231"
],
"registry-mirrors": [
"https://dockerproxy.com",
"https://mirror.baidubce.com",
"https://ccr.ccs.tencentyun.com",
"https://docker.m.daocloud.io",
"https://docker.nju.edu.cn",
"https://docker.mirrors.ustc.edu.cn"
],
"log-driver":"json-file",
"log-opts": {
"max-size":"500m",
"max-file":"3"
}
}
- 输入输入:wq保存并退出
- docker重启
sudo systemctl daemon-reload
sudo systemctl restart docker
- 查看
docker info
- 滑倒最下面
第五步:重新安装
- 重新执行安装命令
docker-compose up -d
- 安装启动成功
第六步:防火墙
- 开放端口
firewall-cmd --zone=public --add-port=5601/tcp --permanent
firewall-cmd --zone=public --add-port=9200/tcp --permanent
firewall-cmd --zone=public --add-port=4560/tcp --permanent
- 重启防火墙
firewall-cmd --reload
第七步:验证ES是否安装成功
- 浏览器访问 IP:9200
第八步:验证Kibana是否安装成功
- 浏览器访问 IP:5601
首次进来发现什么都没有
第九步:Logstash收集SpringBoot 项目日志案例
-
创建项目测试,首先创建一个SpringBoot项目
-
maven 的配置
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!-- 日志推送 -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>7.2</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
- 创建一一个接口
@Slf4j
@RestController
public class EndPointController {
@GetMapping("/endpoint")
public String endpoint() throws NoSuchAlgorithmException {
SecureRandom random = SecureRandom.getInstanceStrong();
int guessLogTimes = random.nextInt(10);
for (int i = 0; i < guessLogTimes; i++) {
log.info("The {} Business is start now !", i + 1);
log.info("The {} Business is doing now !", i + 1);
log.info("The {} Business is end now !", i + 1);
}
log.error("我是错误error日志哦!!!!");
return "Times log Success" + guessLogTimes;
}
}
- 启动
@SpringBootApplication
public class SysLogApplication {
public static void main(String[] args) {
SpringApplication.run(SysLogApplication.class, args);
}
}
- 日志配置
logback-spring.xml
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<!-- 控制台日志输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<layout class="ch.qos.logback.classic.PatternLayout" >
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %highlight(%-5level) %cyan(%logger{50}.%M.%L) - %highlight(%msg) %m%n</pattern>
</layout>
</appender>
<!-- 指定日志索引前缀 -->
<!-- 因为安装logstash的时候已经在 logstash.conf配置中指定了格式 -->
<!-- logstash-%{[spring.application.name]}-%{+YYYY.MM.dd} -->
<!-- 最终日志文件索引的名称为:logstash-bmall11111-sys-log-2024.08.07 -->
<!-- bmall11111 为自定义前缀名称 sys-log 为服务名称 -->
<property name="indexPrefix" value="bmall11111"/>
<!-- 获取服务名称 -->
<springProperty scope="context" name="appName" source="spring.application.name"/>
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!--可以访问的logstash日志收集端口-->
<destination>139.196.86.102:4560</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
<customFields>{"spring.application.name":"${indexPrefix}-${appName}"}</customFields>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT"/>
<appender-ref ref="logstash"/>
</root>
</configuration>
- 打开高级设置
- 索引管理,查看索引
- 创建Index patterns
9. 筛选
第十步:许可证安装【7.x以上版本可跳过】
- 注册elasticsearch账号,注册地址 https://register.elastic.co/
- 邮件下载许可证
附:logback-spring.xml获取application.yml配置
1. application.yml
server:
port: 8080
tomcat:
uri-encoding: UTF-8
accept-count: 1000
threads:
max: 800
min-spare: 100
max-http-form-post-size: 8MB
spring:
application:
name: demo-server
mvc:
pathMatch:
matching-strategy: ant_path_matcher
# 环境切换
profiles:
active: @activatedProperties@
# 接口文档
knife4j:
enable: true
# 开启生产环境屏蔽
production: false
2. application-dev.yml
spring:
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/demo_db?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: xxxx
hikari:
connection-test-query: SELECT 1
minimum-idle: 5
maximum-pool-size: 20
pool-name: bili-pool
# logback-spring.xml 日志配置 开发环境
logstash:
path: /tmp/logs/demo-logs
host: 127.0.0.1
port: 4560
3. application-prod.yml
spring:
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/demo_db?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
username: root
password: xxxx
hikari:
connection-test-query: SELECT 1
minimum-idle: 5
maximum-pool-size: 20
pool-name: bili-pool
# logback-spring.xml 日志配置 生产环境
logstash:
path: /opt/app/demo-logs/logs
host: 127.90.10.123
port: 4560
4. logback-spring.xml
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 从Spring的Environment中读取自定义属性 -->
<springProperty scope="context" name="host" source="logstash.host"/>
<springProperty scope="context" name="port" source="logstash.port"/>
<springProperty scope="context" name="path" source="logstash.path"/>
<springProperty scope="context" name="server_name" source="spring.application.name"/>
<!-- 日志存放路径 -->
<property name="log.path" value="${path}"/>
<!-- 日志输出格式 -->
<property name="log.pattern" value="%d{HH:mm:ss.SSS} [%thread] %-5level %logger{20} - [%method,%line] - %msg%n" />
<!-- 控制台日志输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
<layout class="ch.qos.logback.classic.PatternLayout" >
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %highlight(%-5level) %cyan(%logger{50}.%M.%L) - %highlight(%msg) %n</pattern>
</layout>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/${server_name}-info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/${server_name}-info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/${server_name}-error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/${server_name}-error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 获取服务名称 -->
<springProperty scope="context" name="appName" source="spring.application.name"/>
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!--可以访问的logstash日志收集端口-->
<destination>${host}:${port}</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
<customFields>{"spring.application.name":"${appName}"}</customFields>
</encoder>
</appender>
<!--系统操作日志-->
<root level="info">
<appender-ref ref="file_info" />
<appender-ref ref="file_error" />
</root>
<root level="INFO">
<appender-ref ref="STDOUT"/>
<appender-ref ref="logstash"/>
</root>
</configuration>
5. maven
<!-- 环境切换 -->
<profiles>
<profile>
<id>dev</id>
<properties>
<activatedProperties>dev</activatedProperties>
</properties>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
</profile>
<profile>
<id>prod</id>
<properties>
<activatedProperties>prod</activatedProperties>
</properties>
</profile>
</profiles>