npx skills add https://github.com/claude-dev-suite/claude-dev-suite --skill java-logging┌─────────────────────────────────────────────────────────────┐
│ 应用程序代码 │
│ (仅使用 SLF4J API) │
└─────────────────────────────────────────────────────────────┘
│
▼
┌─────────────────────────────────────────────────────────────┐
│ SLF4J 门面层 │
│ (抽象层) │
└─────────────────────────────────────────────────────────────┘
│
┌───────────────┼───────────────┐
▼ ▼ ▼
┌──────────┐ ┌──────────┐ ┌──────────────┐
│ Logback │ │ Log4j2 │ │ java.util │
│ │ │ │ │ .logging │
└──────────┘ └──────────┘ └──────────────┘
规则:始终针对 SLF4J API 进行编码。实现是运行时依赖项。
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UserService {
private static final Logger log = LoggerFactory.getLogger(UserService.class);
public User findUser(Long id) {
log.debug("正在查找用户,ID: {}", id);
try {
User user = repository.findById(id);
log.info("找到用户: {}", user.getEmail());
return user;
} catch (Exception e) {
log.error("查找用户失败,ID: {}", id, e);
throw e;
}
}
}
┌─────────────────────────────────────────────────────────────┐
│ Application Code │
│ (uses SLF4J API only) │
└─────────────────────────────────────────────────────────────┘
│
▼
┌─────────────────────────────────────────────────────────────┐
│ SLF4J Facade │
│ (abstraction layer) │
└─────────────────────────────────────────────────────────────┘
│
┌───────────────┼───────────────┐
▼ ▼ ▼
┌──────────┐ ┌──────────┐ ┌──────────────┐
│ Logback │ │ Log4j2 │ │ java.util │
│ │ │ │ │ .logging │
└──────────┘ └──────────┘ └──────────────┘
Rule : Always code to SLF4J API. Implementation is a runtime dependency.
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UserService {
private static final Logger log = LoggerFactory.getLogger(UserService.class);
public User findUser(Long id) {
log.debug("Finding user with id: {}", id);
try {
User user = repository.findById(id);
log.info("User found: {}", user.getEmail());
return user;
} catch (Exception e) {
log.error("Failed to find user with id: {}", id, e);
throw e;
}
}
}
广告位招租
在这里展示您的产品或服务
触达数万 AI 开发者,精准高效
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class OrderService {
public void processOrder(Order order) {
log.info("正在处理订单: {}", order.getId());
}
}
| 级别 | 用途 | 示例 |
|---|---|---|
TRACE | 非常详细的调试信息 | 循环迭代、变量值 |
DEBUG | 调试信息 | 方法进入/退出、查询参数 |
INFO | 业务事件 | 用户登录、订单创建 |
WARN | 潜在问题 | 使用了已弃用的 API、重试尝试 |
ERROR | 需要关注的错误 | 捕获异常、操作失败 |
log.trace("进入循环迭代 {}", i);
log.debug("查询参数: userId={}, status={}", userId, status);
log.info("订单 {} 创建成功", orderId);
log.warn("支付重试尝试 {} / {}", attempt, maxRetries);
log.error("处理订单支付失败 {}", orderId, exception);
// 良好 - 使用参数化日志记录(高效)
log.debug("正在处理用户 {},角色 {}", userId, role);
// 不佳 - 字符串拼接(总是会被求值)
log.debug("正在处理用户 " + userId + ",角色 " + role);
// 对于开销大的操作,使用 isEnabled 检查
if (log.isDebugEnabled()) {
log.debug("复杂数据: {}", computeExpensiveDebugInfo());
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 包含 Spring Boot 默认配置 -->
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<!-- 属性 -->
<property name="LOG_PATH" value="${LOG_PATH:-logs}"/>
<property name="LOG_FILE" value="${LOG_FILE:-application}"/>
<!-- 控制台输出器 -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %highlight(%-5level) [%thread] %cyan(%logger{36}) - %msg%n</pattern>
</encoder>
</appender>
<!-- 滚动文件输出器 -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/${LOG_FILE}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}/${LOG_FILE}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>100MB</maxFileSize>
<maxHistory>30</maxHistory>
<totalSizeCap>3GB</totalSizeCap>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{36} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!-- 生产环境 JSON 输出器 -->
<appender name="JSON" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/${LOG_FILE}-json.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}/${LOG_FILE}-json.%d{yyyy-MM-dd}.log.gz</fileNamePattern>
<maxHistory>7</maxHistory>
</rollingPolicy>
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<includeMdcKeyName>traceId</includeMdcKeyName>
<includeMdcKeyName>userId</includeMdcKeyName>
</encoder>
</appender>
<!-- 性能优化异步输出器 -->
<appender name="ASYNC_FILE" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>512</queueSize>
<discardingThreshold>0</discardingThreshold>
<appender-ref ref="FILE"/>
</appender>
<!-- 日志记录器配置 -->
<logger name="com.yourcompany" level="DEBUG"/>
<logger name="org.springframework" level="INFO"/>
<logger name="org.hibernate.SQL" level="DEBUG"/>
<logger name="org.hibernate.type.descriptor.sql" level="TRACE"/>
<!-- 根日志记录器 -->
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="ASYNC_FILE"/>
</root>
<!-- 环境特定配置 -->
<springProfile name="prod">
<root level="INFO">
<appender-ref ref="JSON"/>
</root>
</springProfile>
</configuration>
logging:
level:
root: INFO
com.yourcompany: DEBUG
org.springframework.web: INFO
org.hibernate.SQL: DEBUG
pattern:
console: "%d{HH:mm:ss.SSS} %highlight(%-5level) [%thread] %cyan(%logger{36}) - %msg%n"
file: "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{36} - %msg%n"
file:
name: logs/application.log
logback:
rollingpolicy:
max-file-size: 100MB
max-history: 30
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="WARN">
<Properties>
<Property name="LOG_PATH">logs</Property>
<Property name="LOG_PATTERN">%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{36} - %msg%n</Property>
</Properties>
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="${LOG_PATTERN}"/>
</Console>
<RollingFile name="File" fileName="${LOG_PATH}/app.log"
filePattern="${LOG_PATH}/app-%d{yyyy-MM-dd}-%i.log.gz">
<PatternLayout pattern="${LOG_PATTERN}"/>
<Policies>
<SizeBasedTriggeringPolicy size="100MB"/>
<TimeBasedTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="30"/>
</RollingFile>
<!-- 高性能异步输出器 -->
<Async name="AsyncFile">
<AppenderRef ref="File"/>
</Async>
</Appenders>
<Loggers>
<Logger name="com.yourcompany" level="debug"/>
<Root level="info">
<AppenderRef ref="Console"/>
<AppenderRef ref="AsyncFile"/>
</Root>
</Loggers>
</Configuration>
<!-- 用于 Logback -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>7.4</version>
</dependency>
import org.slf4j.MDC;
@Component
public class RequestLoggingFilter implements Filter {
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) {
try {
MDC.put("traceId", generateTraceId());
MDC.put("userId", getCurrentUserId());
MDC.put("requestPath", ((HttpServletRequest) request).getRequestURI());
chain.doFilter(request, response);
} finally {
MDC.clear();
}
}
}
import static net.logstash.logback.argument.StructuredArguments.*;
log.info("订单已处理",
kv("orderId", order.getId()),
kv("customerId", order.getCustomerId()),
kv("amount", order.getTotal()),
kv("currency", "USD"));
输出:
{
"@timestamp": "2025-01-15T10:30:00.000Z",
"level": "INFO",
"logger": "com.example.OrderService",
"message": "Order processed",
"orderId": "ORD-12345",
"customerId": "CUST-789",
"amount": 99.99,
"currency": "USD",
"traceId": "abc123",
"userId": "user456"
}
log.info("用户 {}", userId)// 不佳 - 记录敏感数据
log.info("用户登录: email={}, password={}", email, password);
// 良好 - 屏蔽敏感数据
log.info("用户登录: email={}", maskEmail(email));
// 辅助方法
private String maskEmail(String email) {
int atIndex = email.indexOf('@');
if (atIndex > 2) {
return email.substring(0, 2) + "***" + email.substring(atIndex);
}
return "***";
}
slf4j 技能处理 API 使用模式logback 技能处理 XML 配置| 反模式 | 为何不好 | 解决方案 |
|---|---|---|
| 使用 System.out.println | 无法控制、无法持久化、无法过滤 | 使用 SLF4J 日志记录器 |
| 在日志中使用字符串拼接 | 总是会被求值,性能影响 | 使用参数化日志记录:log.info("用户 {}", id) |
| 在生产环境中不使用异步输出器 | 阻塞应用程序线程 | 使用 AsyncAppender 包装 |
| 在多线程应用程序中记录日志不使用 MDC | 丢失请求上下文 | 使用 MDC 存储关联 ID |
| 在生产环境中使用 DEBUG 级别 | 性能影响、磁盘使用 | 在生产环境中使用 INFO 或 WARN 级别 |
| 不屏蔽敏感数据 | 违反安全/合规要求 | 在记录日志前过滤密码、令牌、个人身份信息 |
| 问题 | 原因 | 解决方案 |
|---|---|---|
| NoClassDefFoundError: StaticLoggerBinder | 缺少 SLF4J 实现 | 添加 Logback 或 Log4j2 依赖项 |
| 多重绑定警告 | 类路径上有多个实现 | 只保留一个:Logback 或 Log4j2 |
| 日志未出现 | 错误的日志级别或缺少配置 | 检查 logback.xml 和日志级别 |
| 性能下降 | 同步输出器 | 使用 AsyncAppender 包装器 |
| 日志未轮转 | 缺少滚动策略 | 配置 RollingFileAppender |
| MDC 值未显示 | 模式缺少 %X{key} | 在日志模式中添加 MDC 占位符 |
每周安装数
1
代码仓库
首次出现
3 天前
安全审计
安装于
amp1
cline1
openclaw1
opencode1
cursor1
kimi-cli1
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class OrderService {
public void processOrder(Order order) {
log.info("Processing order: {}", order.getId());
}
}
| Level | Purpose | Example |
|---|---|---|
TRACE | Very detailed debugging | Loop iterations, variable values |
DEBUG | Debugging information | Method entry/exit, query params |
INFO | Business events | User login, order placed |
WARN | Potential issues | Deprecated API used, retry attempt |
ERROR | Errors requiring attention | Exception caught, operation failed |
log.trace("Entering loop iteration {}", i);
log.debug("Query parameters: userId={}, status={}", userId, status);
log.info("Order {} placed successfully", orderId);
log.warn("Payment retry attempt {} of {}", attempt, maxRetries);
log.error("Failed to process payment for order {}", orderId, exception);
// GOOD - uses parameterized logging (efficient)
log.debug("Processing user {} with role {}", userId, role);
// BAD - string concatenation (always evaluated)
log.debug("Processing user " + userId + " with role " + role);
// For expensive operations, use isEnabled check
if (log.isDebugEnabled()) {
log.debug("Complex data: {}", computeExpensiveDebugInfo());
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- Include Spring Boot defaults -->
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<!-- Properties -->
<property name="LOG_PATH" value="${LOG_PATH:-logs}"/>
<property name="LOG_FILE" value="${LOG_FILE:-application}"/>
<!-- Console Appender -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %highlight(%-5level) [%thread] %cyan(%logger{36}) - %msg%n</pattern>
</encoder>
</appender>
<!-- Rolling File Appender -->
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/${LOG_FILE}.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}/${LOG_FILE}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>100MB</maxFileSize>
<maxHistory>30</maxHistory>
<totalSizeCap>3GB</totalSizeCap>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{36} - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!-- JSON Appender for Production -->
<appender name="JSON" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_PATH}/${LOG_FILE}-json.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}/${LOG_FILE}-json.%d{yyyy-MM-dd}.log.gz</fileNamePattern>
<maxHistory>7</maxHistory>
</rollingPolicy>
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<includeMdcKeyName>traceId</includeMdcKeyName>
<includeMdcKeyName>userId</includeMdcKeyName>
</encoder>
</appender>
<!-- Async Appender for Performance -->
<appender name="ASYNC_FILE" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>512</queueSize>
<discardingThreshold>0</discardingThreshold>
<appender-ref ref="FILE"/>
</appender>
<!-- Logger Configuration -->
<logger name="com.yourcompany" level="DEBUG"/>
<logger name="org.springframework" level="INFO"/>
<logger name="org.hibernate.SQL" level="DEBUG"/>
<logger name="org.hibernate.type.descriptor.sql" level="TRACE"/>
<!-- Root Logger -->
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="ASYNC_FILE"/>
</root>
<!-- Profile-specific configuration -->
<springProfile name="prod">
<root level="INFO">
<appender-ref ref="JSON"/>
</root>
</springProfile>
</configuration>
logging:
level:
root: INFO
com.yourcompany: DEBUG
org.springframework.web: INFO
org.hibernate.SQL: DEBUG
pattern:
console: "%d{HH:mm:ss.SSS} %highlight(%-5level) [%thread] %cyan(%logger{36}) - %msg%n"
file: "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{36} - %msg%n"
file:
name: logs/application.log
logback:
rollingpolicy:
max-file-size: 100MB
max-history: 30
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="WARN">
<Properties>
<Property name="LOG_PATH">logs</Property>
<Property name="LOG_PATTERN">%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{36} - %msg%n</Property>
</Properties>
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="${LOG_PATTERN}"/>
</Console>
<RollingFile name="File" fileName="${LOG_PATH}/app.log"
filePattern="${LOG_PATH}/app-%d{yyyy-MM-dd}-%i.log.gz">
<PatternLayout pattern="${LOG_PATTERN}"/>
<Policies>
<SizeBasedTriggeringPolicy size="100MB"/>
<TimeBasedTriggeringPolicy/>
</Policies>
<DefaultRolloverStrategy max="30"/>
</RollingFile>
<!-- Async for high performance -->
<Async name="AsyncFile">
<AppenderRef ref="File"/>
</Async>
</Appenders>
<Loggers>
<Logger name="com.yourcompany" level="debug"/>
<Root level="info">
<AppenderRef ref="Console"/>
<AppenderRef ref="AsyncFile"/>
</Root>
</Loggers>
</Configuration>
<!-- For Logback -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>7.4</version>
</dependency>
import org.slf4j.MDC;
@Component
public class RequestLoggingFilter implements Filter {
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) {
try {
MDC.put("traceId", generateTraceId());
MDC.put("userId", getCurrentUserId());
MDC.put("requestPath", ((HttpServletRequest) request).getRequestURI());
chain.doFilter(request, response);
} finally {
MDC.clear();
}
}
}
import static net.logstash.logback.argument.StructuredArguments.*;
log.info("Order processed",
kv("orderId", order.getId()),
kv("customerId", order.getCustomerId()),
kv("amount", order.getTotal()),
kv("currency", "USD"));
Output:
{
"@timestamp": "2025-01-15T10:30:00.000Z",
"level": "INFO",
"logger": "com.example.OrderService",
"message": "Order processed",
"orderId": "ORD-12345",
"customerId": "CUST-789",
"amount": 99.99,
"currency": "USD",
"traceId": "abc123",
"userId": "user456"
}
log.info("User {}", userId)// BAD - logs sensitive data
log.info("User login: email={}, password={}", email, password);
// GOOD - mask sensitive data
log.info("User login: email={}", maskEmail(email));
// Helper
private String maskEmail(String email) {
int atIndex = email.indexOf('@');
if (atIndex > 2) {
return email.substring(0, 2) + "***" + email.substring(atIndex);
}
return "***";
}
slf4j skill for API usage patternslogback skill for XML config| Anti-Pattern | Why It's Bad | Solution |
|---|---|---|
| Using System.out.println | No control, no persistence, no filtering | Use SLF4J logger |
| String concatenation in logs | Always evaluated, performance hit | Use parameterized logging: log.info("User {}", id) |
| Not using async appenders in production | Blocks application threads | Wrap with AsyncAppender |
| Logging without MDC in multi-threaded apps | Loses request context | Use MDC for correlation IDs |
| DEBUG level in production | Performance impact, disk usage | Use INFO or WARN in production |
| Not masking sensitive data | Security/compliance violation | Filter passwords, tokens, PII before logging |
| Issue | Cause | Solution |
|---|---|---|
| NoClassDefFoundError: StaticLoggerBinder | Missing SLF4J implementation | Add Logback or Log4j2 dependency |
| Multiple bindings warning | Multiple implementations on classpath | Keep only one: Logback OR Log4j2 |
| Logs not appearing | Wrong log level or missing config | Check logback.xml and log levels |
| Performance degradation | Synchronous appenders | Use AsyncAppender wrapper |
| Logs not rotating | Missing rolling policy | Configure RollingFileAppender |
| MDC values not showing | Pattern missing %X{key} | Add MDC placeholders to log pattern |
Weekly Installs
1
Repository
First Seen
3 days ago
Security Audits
Installed on
amp1
cline1
openclaw1
opencode1
cursor1
kimi-cli1
React 组合模式指南:Vercel 组件架构最佳实践,提升代码可维护性
109,600 周安装
Docnify自动化:通过Rube MCP和Composio工具包实现文档操作自动化
1 周安装
Docmosis自动化集成指南:通过Rube MCP与Composio实现文档生成自动化
1 周安装
Dictionary API自动化教程:通过Rube MCP和Composio实现词典API操作自动化
1 周安装
detrack-automation:自动化追踪技能,集成Claude AI提升开发效率
1 周安装
Demio自动化工具包:通过Rube MCP和Composio实现Demio操作自动化
1 周安装
Deel自动化工具:通过Rube MCP与Composio实现HR与薪资操作自动化
1 周安装