一、Rest接口日志
- 记录日志字典定义
- RestLogAspect.java
package cn.test.manage.config.aspect;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
/**
* @description Rest接口日志记录
*/
@Aspect
@Component
public class RestLogAspect {
private static Log logger = LogFactory.getLog(RestLogAspect.class);
@Resource
private RedisTemplate redisTemplate;
@Value("${server.servlet.application-display-name}")
private String system;
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'+08:00'");
private static DecimalFormat df = new DecimalFormat ("#.##");
@Pointcut("execution(public * cn.test.manage.web.rest.*.*.*(..))")
private void pointcut() {
}
@Around(value = "pointcut()")
public Object log(ProceedingJoinPoint joinPoint) throws Throwable {
Date date = new Date();
Object result = joinPoint.proceed();
try {
// 第一个参数固定为请求参数,第二个参数固定为HttpServletRequest
HttpServletRequest request = (HttpServletRequest) joinPoint.getArgs()[1];
// 从字典获取需要记录日志的接口
Object dict = redisTemplate.opsForHash().get("DICT_", system + "_logs" + "." + request.getRequestURI());
if (dict == null || StringUtils.isEmpty(dict.toString())) {
return result;
}
JSONObject json = new JSONObject();
json.put("logtype", "customize"); // 固定标识, 用于日志采集
json.put("client", request.getRemoteAddr()); // 客户端IP
json.put("host", request.getRemoteHost()); // 服务端IP
json.put("port", request.getServerPort()); // 服务端端口
json.put("starttime", sdft.format(date)); // 接口开始时间
json.put("url", request.getRequestURI()); // 接口URL
json.put("system", system); // 系统名称
setArgInfo(json, joinPoint.getArgs()[0], result, dict.toString());
// 获取token
String token = request.getParameter("token");
if (StringUtils.isEmpty(token) || "null".equals(token)) {
token = request.getHeader("Authorization");
if (token != null && token.startsWith("Bearer ")) {
token = token.substring(7);
}
}
json.put("token", token); // token
String accountId = (String) redisTemplate.opsForValue().get("token-" + token);
if (accountId != null) {
Map<String, String> map = (Map<String, String>) redisTemplate.opsForValue().get(accountId);
if (map != null) {
json.put("accountid", map.get("accountId")); // 账户ID
json.put("accountname", map.get("accountName")); // 账户名
json.put("username", map.get("name")); // 用户名
}
}
// 接口代码执行时间,会小于响应时间
json.put("resptime", df.format((double)(new Date().getTime()-date.getTime())/1000));
logger.info("\n"+JSONObject.toJSONString(json));
} catch (Exception e) {
logger.error(e);
}
return result;
}
/**
* 设置参数信息
*/
private void setArgInfo(JSONObject json, Object request, Object response, String argstr) {
String requeststr = JSONObject.toJSONString(request);
String responsestr = JSONObject.toJSONString(response);
JSONObject requestObject = JSONObject.parseObject(requeststr);
JSONObject responseObject = JSONObject.parseObject(responsestr);
String[] args = argstr.split(",");
json.put("apidesc", args[0]); // 接口描述
json.put("apitype", args[1]); // 接口类型
if (responseObject.getString("operateSuccess") == null || "false".equals(responseObject.getString("operateSuccess"))) {
json.put("level", "ERROR"); // 日志级别
} else {
json.put("level", "INFO");
}
json.put("msg", responseObject.getString("operateSuccess") + " " + responseObject.getString("msg"));
if ("true".equals(args[2])) {
json.put("request", requeststr); // 请求完整内容
}
if ("true".equals(args[3])) {
json.put("response", responsestr); // 响应完整内容
}
json.put("req", getInfo(json, requestObject, args[4])); // 请求概要内容
json.put("resp", getInfo(json, responseObject, args[5])); // 响应概要内容
}
/**
* 获取参数概要信息,集合只取第一个元素
*/
private String getInfo(JSONObject json, JSONObject object, String argstr) {
String str = "";
for (String obj : argstr.split("/")) {
Object jsonObject = object;
String fields = obj.split(":")[0];
String desc = obj.split(":")[1];
for (String field : fields.split("\\.")) {
if (jsonObject instanceof JSONObject) {
jsonObject = ((JSONObject) jsonObject).get(field);
} else if (jsonObject instanceof JSONArray) {
while (jsonObject instanceof JSONArray) {
jsonObject = ((JSONArray) jsonObject).get(0);
}
jsonObject = ((JSONObject) jsonObject).get(field);
} else {
logger.info(jsonObject);
break;
}
}
str += desc + ": " + jsonObject + ", ";
}
str = str.endsWith(", ") ? str.substring(0,str.length()-2) : str;
return str;
}
}
二、Nginx日志
- nginx.conf 中日志配置
# 日志配置
open_log_file_cache max=1000 inactive=20s valid=1m min_uses=3;
# 客户端地址 时间 协议 响应状态 响应字节数 响应时间 客户端
log_format main '{"logtype":"customize",' # 固定标识, 用于日志采集
'"starttime":"$time_iso8601",' # 日志写入时间
'"url":"$uri",' # 请求的URL
'"protocol":"$server_protocol",' # 请求使用的协议
'"upgrade":"$http_upgrade",' # 是否升级 WebSocket
'"status":"$status",' # 响应状态
'"host": "$http_host",' # 服务端地址(客户端请求的)
'"client": "$remote_addr",' # 客户端地址
'"reqsize": $request_length,' # 请求内容大小(byte)
'"respsize": $bytes_sent,' # 响应内容大小 byte)
'"resptime": $request_time,' # 响应时间(s)
'"connnum": $connection_requests,' # 当前通过一个连接获得的请求数量
'"agent": "$http_user_agent"}'; # 用户终端代理
access_log /var/log/nginx/access.log main buffer=32k flush=5s;
error_log /var/log/nginx/error.log warn;
三、采集日志
- filebeat.yml
filebeat.inputs:
- type: filestream
paths:
- /home/nginx/logs/access.log
tags: ["nginx-access"]
processors:
- decode_json_fields:
fields: ["message"]
target: "nginx"
max_depth: 1
- type: filestream
paths:
- /home/nginx/logs/error.log
tags: ["nginx-error"]
- type: filestream
paths:
- /home/docker/logs/*
tags: ["crontab-log"]
- type: filestream
paths:
- /home/logs/test/all.log
tags: ["test"]
processors:
- decode_json_fields:
fields: ["message"]
target: "test"
max_depth: 1
output.elasticsearch:
hosts: ["192.168.1.12:9200"]
preset: balanced
protocol: "http"
username: "elastic"
password: "123456"
indices:
- index: filebeat-6.13-nginx-%{+yyyy.MM}
when.contains: {tags: nginx, nginx.logtype: customize}
- index: filebeat-6.13-ser-%{+yyyy.MM}
when.contains: {tags: test, test.logtype: customize}
- index: filebeat-6.13-%{+yyyy.MM}
setup.template.settings:
index.number_of_shards: 1
index.codec: best_compression
processors:
- drop_fields:
fields: ["log","host","input","agent","ecs"]
- filebeat服务重启
sudo systemctl restart filebeat
sudo systemctl status filebeat
四、夜莺查看Nginx日志
- 日志分析 > 索引模式 > 创建索引模式
- 日志分析 > 索引模式 > 编辑 字段别名
- 日志分析 > 即时查询
五、夜莺查看Rest接口日志
- 日志分析 > 索引模式 > 创建索引模式
- 日志分析 > 索引模式 > 编辑 字段别名
- 日志分析 > 即时查询