这篇文章重点在于:
在spring boot项目重写jdbcTemplate 支持多数据源和使用durid 输出所有执行的SQL日志
首先先定义yml配置文件
可参见文章:
https://www.hellojava.com/article/41398
然后重写durid数据源并重写jdbcTemplate
import java.util.ArrayList; import java.util.List; import javax.sql.DataSource; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.stereotype.Component; import com.alibaba.druid.filter.Filter; import com.alibaba.druid.filter.logging.Slf4jLogFilter; import com.alibaba.druid.filter.stat.StatFilter; import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder; import com.gb.soa.omp.ccommon.util.MyJdbcTemplate; @Component @Configuration public class DruidDataSourceConfiguration { @Primary @Bean(name="tenantmemDataSource") @ConfigurationProperties("spring.datasource.druid.paydurid") public DataSource tenantmemDataSource(){ DruidDataSource dataSource=DruidDataSourceBuilder.create().build(); dataSource.setProxyFilters(getDuridFilters()); return dataSource; } @Bean public StatFilter statFilter(){ StatFilter statFilter = new StatFilter(); statFilter.setSlowSqlMillis(400); statFilter.setLogSlowSql(true); statFilter.setMergeSql(true); return statFilter; } @Bean public Slf4jLogFilter slf4jLogFilter(){ Slf4jLogFilter slFilter = new Slf4jLogFilter(); slFilter.setDataSourceLogEnabled(true); slFilter.setConnectionLogEnabled(true); slFilter.setStatementLogEnabled(true); slFilter.setResultSetLogEnabled(false); slFilter.setStatementExecutableSqlLogEnable(true); return slFilter; } private List<Filter> getDuridFilters(){ List<Filter> filters=new ArrayList<>(2); filters.add(statFilter()); filters.add(slf4jLogFilter()); return filters; } @Bean(name="jdbcTemplate") public MyJdbcTemplate jdbcTemplate() { MyJdbcTemplate jdbcTemplate = new MyJdbcTemplate(); jdbcTemplate.setDataSource(tenantmemDataSource()); return jdbcTemplate; } }
然后在配置文件下新增文件 logback.xml
<?xml version="1.0" encoding="UTF-8"?> <configuration> <appender name="console" class="ch.qos.logback.core.ConsoleAppender"> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} %line - %msg%n</pattern> </encoder> </appender> <appender name="rollingFile" class="ch.qos.logback.core.rolling.RollingFileAppender"> <file>/var/log/ompc/gbpay.log</file> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <fileNamePattern>/var/log/ompc/gbpay.%d{yyyy-MM-dd}_%i.log</fileNamePattern> <!--日志文件保留天数 --> <MaxHistory>10</MaxHistory> <!--日志文件最大的大小 --> <TimeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <!-- 5MB或1KB --> <MaxFileSize>1024MB</MaxFileSize> </TimeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} %line - %msg%n</pattern> </encoder> </appender> <!-- project default level --> <root level="WARN"> <appender-ref ref="console" /> <appender-ref ref="rollingFile" /> </root> <logger name="com.gb.pay" level="INFO" /> <logger name="druid.sql" level="debug" additivity="false"> <appender-ref ref="console"/> <appender-ref ref="rollingFile"/> </logger> <logger name="druid.sql.Statement" level="debug" additivity="false"> <appender-ref ref="console"/> <appender-ref ref="rollingFile"/> </logger> <logger name="druid.sql.Connection" level="INFO" additivity="false"> <appender-ref ref="console"/> <appender-ref ref="rollingFile"/> </logger> </configuration>
尤其是最后三个 logger 自己把握分寸哈。
最后在 dao里使用
@Resource(name = "jdbcTemplate") public MyJdbcTemplate jdbcTemplate;
来取代原生的jdbcTemplate
运行起来效果就是: