在项目中,访问数据库是非常必要的,因此我们需要在配置文件中配置数据库路径,账号,密码等。这时领导往往会告诉我们,数据库密码不能是明文,哪怕是在项目配置文件里,因此我们对配置文件里的密码进行加密是非常有必要的
基础配置
基础配置就不多介绍了,采用的是 yml 配置,上面有注释
PS:这里要说下,加密采用的是 Druid 内置的非对称加密方式,因此这里的数据库密码是加密过的,下面需要 publicKey 公钥用于解密
官方参数文档,如果有需要别的参数,可以参考:官方配置属性
<https://github.com/alibaba/druid/wiki/DruidDataSource%E9%85%8D%E7%BD%AE%E5%B1%9E%E6%80%A7%E5%88%97%E8%A1%A8>
spring: datasource: url: xxxxxx # url username: xxxxxx # 用户名 password: xxxxxx
# 私钥加密过的密码 publicKey: xxxxxx #公钥 #Druid 连接池通用配置 datasource: type:
com.alibaba.druid.pool.DruidDataSource druid: # 下面为连接池的补充设置,应用到上面所有数据源中 #
初始化大小,最小,最大 initial-size: 5 min-idle: 5 max-active: 20 # 配置获取连接等待超时的时间
max-wait: 60000 # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
time-between-eviction-runs-millis: 60000 # 配置一个连接在池中最小生存的时间,单位是毫秒
min-evictable-idle-time-millis: 300000 # sql 校验 validation-query: select
count(1) from sys.objects Where type='U' And type_desc='USER_TABLE'
test-while-idle: true test-on-borrow: false test-on-return: false #
打开PSCache,并且指定每个连接上PSCache的大小 pool-prepared-statements: true #
配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
max-pool-prepared-statement-per-connection-size: 20 filters: stat # wall 若开启
wall,会把 if 中的 and 判断为注入进行拦截 use-global-data-source-stat: true #
通过connectProperties属性来打开mergeSql功能;慢SQL记录 connect-properties:
druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
Druid 配置文件
我们需要通过这个配置文件对密码进行个解密,然后再连接数据库
加依赖,通过这个依赖,才能通过配置文件对对象中的值进行注入
<!-- 配置文件处理器 --> <dependency> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId> </dependency>
DruidDatsSourceConfig.java:
由于属性路径不容,所以通过 @value 去对应
对于启动报错的,要认真检查下,配置文件中参数是否有空值,配置文件中注入的参数都要存在,不存在会报空指针异常,因此如果不需要注入的参数,就不需要在配置文件中添加,需要注入哪些,需要自己调整下,没有的就不要注入
PS:这里的 @Data 是因为采用的 lomok,因此get set 啥的都不用写了
import com.alibaba.druid.filter.config.ConfigTools; import
com.alibaba.druid.pool.DruidDataSource; import lombok.Data; import
org.slf4j.Logger; import org.slf4j.LoggerFactory; import
org.springframework.beans.factory.annotation.Value; import
org.springframework.boot.context.properties.ConfigurationProperties; import
org.springframework.context.annotation.Bean; import
org.springframework.context.annotation.Configuration; import
org.springframework.context.annotation.Primary; import javax.sql.DataSource;
import java.sql.SQLException; import java.util.Properties; /** * Druid *
@author: author * @create: 2019-07-01 11:10 **/ @Configuration
@ConfigurationProperties(prefix = "spring.datasource") @Data public class
DruidDatsSourceConfig { private Logger logger =
LoggerFactory.getLogger(DruidDatsSourceConfig.class);
@Value("${spring.datasource.url}") private String url;
@Value("${spring.datasource.username}") private String username;
@Value("${spring.datasource.password}") private String password;
@Value("${spring.datasource.url}") private String type;
@Value("${spring.datasource.publicKey}") private String publicKey;
@Value("${spring.datasource.druid.initial-size}") private Integer initialSize;
@Value("${spring.datasource.druid.min-idle}") private Integer minIdle;
@Value("${spring.datasource.druid.max-active}") private Integer maxActive;
@Value("${spring.datasource.druid.max-wait}") private Integer maxWait;
@Value("${spring.datasource.druid.time-between-eviction-runs-millis}") private
Integer timeBetweenEvictionRunsMillis;
@Value("${spring.datasource.druid.min-evictable-idle-time-millis}") private
Integer minEvictableIdleTimeMillis;
@Value("${spring.datasource.druid.validation-query}") private String
validationQuery; @Value("${spring.datasource.druid.test-while-idle}") private
Boolean testWhileIdle; @Value("${spring.datasource.druid.test-on-borrow}")
private Boolean testOnBorrow;
@Value("${spring.datasource.druid.test-on-return}") private Boolean
testOnReturn; @Value("${spring.datasource.druid.pool-prepared-statements}")
private Boolean poolPreparedStatements;
@Value("${spring.datasource.druid.max-pool-prepared-statement-per-connection-size}")
private Integer maxPoolPreparedStatementPerConnectionSize;
@Value("${spring.datasource.druid.filters}") private String filters;
@Value("${spring.datasource.druid.use-global-data-source-stat}") private
Boolean useGlobalDataSourceStat;
@Value("${spring.datasource.druid.connect-properties}") private Properties
connectProperties; /** * 数据库参数注入 * @return * @throws Exception */ @Bean
@Primary public DataSource druidDataSource() throws Exception { DruidDataSource
datasource = new DruidDataSource(); datasource.setUrl(url);
datasource.setUsername(username); // 解密后,再 set 进对象
datasource.setPassword(ConfigTools.decrypt(publicKey, password));
logger.info("密码:" + ConfigTools.decrypt(publicKey, password));
datasource.setInitialSize(initialSize); datasource.setMinIdle(minIdle);
datasource.setMaxActive(maxActive); datasource.setMaxWait(maxWait);
datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
datasource.setValidationQuery(validationQuery);
datasource.setTestWhileIdle(testWhileIdle);
datasource.setTestOnBorrow(testOnBorrow);
datasource.setTestOnReturn(testOnReturn);
datasource.setUseGlobalDataSourceStat(useGlobalDataSourceStat);
datasource.setConnectProperties(connectProperties); try {
datasource.setFilters(filters); } catch (SQLException e) {
logger.error("========druid configuration initialization filter========", e); }
return datasource; } /** * 生成公私钥以及加密密码 * @param args * @throws Exception */
public static void main(String[] args) throws Exception { String password =
"xxxx"; String[] arr = ConfigTools.genKeyPair(512);
System.out.println("password:" + password); System.out.println("privateKey:" +
arr[0]); System.out.println("publicKey:" + arr[1]);
System.out.println("password:" + ConfigTools.encrypt(arr[0], password)); } }
热门工具 换一换