使用spring boot构建动态多数据源
多数据源的配置方式有很多种,比较常见的有固定的多数据源,使用注解指定数据源,根据传递的参数指定数据源。个人认为第三种更加灵活---根据参数确定使用的多数据源,其原理与使用注解指定类似
我这里使用的是第二种,使用注解指定数据源
使用多数据源的背景
多数据源一般用于读写分离的操作,即一个service中使用到了多个数据源
核心代码
核心代码包含以下几个类:
- TargetDatasource---注解,指定数据源
- DynamicDataSourceAspect---切面,切换数据源
- DatasourceConfig---数据源配置类,加载yml文件中的数据源相关配置
- DruidDatasourceConfig---数据源配置,将相关对象加载到容器中
- DynamicDataSource---动态数据源相关
- DynamicDataSourceContextHolder---数据源切换相关类
TargetDatasource
注解类,用于指定数据源的,其中名称是在配置中指定的
/**
* @author leo
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
@Documented
public @interface TargetDatasource {
String name();
}
DynamicDataSourceAspect
动态切换数据源,根据注解指定的数据源切换对应的数据源
/**
* 动态数据源切面
* order 为-1,在其他配置执行前执行
* @author leo
*/
@Aspect
@Order(-1)
@Component
public class DynamicDataSourceAspect {
private static final Logger logger = LogManager.getLogger(DynamicDataSourceAspect.class);
/**
* 切换数据库
*
* @param point
* @param targetDatasource
* @return
* @throws Throwable
*/
@Before("@annotation(targetDatasource)")
public void changeDataSource(JoinPoint point, TargetDatasource targetDatasource) {
String datasourceId = targetDatasource.name();
if (!DynamicDataSourceContextHolder.existDataSource(datasourceId)) {
logger.error("No data source found ...【" + datasourceId + "】");
return;
}
DynamicDataSourceContextHolder.setDataSourceType(datasourceId);
}
/**
* @param point
* @param targetDatasource
* @return void
* @Description: 销毁数据源 在所有的方法执行执行完毕后
*/
@After("@annotation(targetDatasource)")
public void destroyDataSource(JoinPoint point, TargetDatasource targetDatasource) {
DynamicDataSourceContextHolder.clearDataSourceType();
}
}
DatasourceConfig
从yml中读取相应的配置
/**
* 从配置文件中读取相关的多数据源配置
*
* @author leo.z.l
* @create 2019-09-09 14:35
*/
@Component
@ConfigurationProperties("spring.datasource")
public class DatasourceConfig {
private Map<String, Object> custom;
private Map<String, Object> druid;
public Map<String, Object> getCustom() {
return custom;
}
public void setCustom(Map<String, Object> custom) {
this.custom = custom;
}
public Map<String, Object> getDruid() {
return druid;
}
public void setDruid(Map<String, Object> druid) {
this.druid = druid;
}
}
DruidDatasourceConfig
核心类,将相关的bean对象注册到容器中,可以使用spring boot默认的配置
/**
* 配置数据源,核心类
*
* @author leo.z.l
* @create 2019-09-07 16:27
*/
@Configuration
@EnableTransactionManagement
public class DruidDatasourceConfig {
private final String SPLIT_DATASOURCE_NAMES = ",";
@Autowired
public DatasourceConfig datasourceConfig;
private LinkedHashMap<Object, Object> targetDatasource = new LinkedHashMap<>();
private List<String> customDataSourceNames = new ArrayList<>();
private Logger logger = LogManager.getLogger(DruidDatasourceConfig.class);
private ConversionService conversionService = new DefaultConversionService();
/**
* druid 监控器
* @return
*/
@Bean
public ServletRegistrationBean druidServlet() {
ServletRegistrationBean reg = new ServletRegistrationBean();
reg.setServlet(new StatViewServlet());
reg.addUrlMappings("/druid/*");
reg.addInitParameter("loginUsername", "root");
reg.addInitParameter("loginPassword", "root");
return reg;
}
/**
* 过滤器
* @return
*/
@Bean
public FilterRegistrationBean filterRegistrationBean() {
FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean();
filterRegistrationBean.setFilter(new WebStatFilter());
filterRegistrationBean.addUrlPatterns("/*");
filterRegistrationBean.addInitParameter("exclusions", "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*");
filterRegistrationBean.addInitParameter("profileEnable", "true");
filterRegistrationBean.addInitParameter("principalCookieName", "USER_COOKIE");
filterRegistrationBean.addInitParameter("principalSessionName", "USER_SESSION");
return filterRegistrationBean;
}
/**
* 默认数据源,取第一个为默认数据源
* @return
*/
@Bean(name = "dataSource")
@Primary
public AbstractRoutingDataSource dataSource() {
DynamicDataSource dynamicDataSource = new DynamicDataSource();
initCustomDataSources();
dynamicDataSource.setDefaultTargetDataSource(targetDatasource.get(customDataSourceNames.get(0)));
dynamicDataSource.setTargetDataSources(targetDatasource);
dynamicDataSource.afterPropertiesSet();
return dynamicDataSource;
}
/**
* 初始化数据源
*/
private void initCustomDataSources() {
// 获取数据源名称
Map<String, Object> druidConfig = datasourceConfig.getDruid();
Map<String, Object> customDatasourceConfig = datasourceConfig.getCustom();
String dataSourceNames = (String) customDatasourceConfig.get("name");
if (StringUtils.isEmpty(dataSourceNames)) {
logger.error("The multiple data source list are empty.");
} else {
Map<String, Object> druidValuesMaps = druidConfig;
MutablePropertyValues dataSourcePropertyValue = new MutablePropertyValues(druidValuesMaps);
for (String dataSourceName : dataSourceNames.split(SPLIT_DATASOURCE_NAMES)) {
try {
Map<String, Object> customerDatasourceConfig = (Map<String, Object>) customDatasourceConfig.get(dataSourceName);
customerDatasourceConfig.forEach((key, value) -> dataSourcePropertyValue.addPropertyValue(key, value));
DataSource ds = initDruidDatasource(dataSourcePropertyValue,dataSourceName);
logger.info("Data source initialization 【" + dataSourceName + "】 successfully ...");
} catch (Exception e) {
logger.error("Data source initialization【" + dataSourceName + "】 failed ...", e);
}
}
}
}
private DataSource initDruidDatasource(MutablePropertyValues dataSourcePropertyValue,String dataSourceName) {
DruidDataSource ds = new DruidDataSource();
//获取连接池的类型
String type = dataSourcePropertyValue.get("type").toString();
if (StringUtils.isEmpty(type)) {
logger.info("数据源类型为空!");
return null;
}
if (!StringUtils.equals(type, DruidDataSource.class.getTypeName())) {
logger.info("数据源类型不匹配!");
return null;
}
RelaxedDataBinder dataBinder = new RelaxedDataBinder(ds);
dataBinder.setConversionService(conversionService);
dataBinder.setIgnoreInvalidFields(false);
dataBinder.setIgnoreNestedProperties(false);
dataBinder.setIgnoreUnknownFields(true);
dataBinder.bind(dataSourcePropertyValue);
ds.setName(dataSourceName);
//druid过滤器
try {
ds.setFilters((String) dataSourcePropertyValue.get("filters"));
} catch (SQLException e) {
logger.error("druid configuration initialization filter", e);
}
customDataSourceNames.add(dataSourceName);
DynamicDataSourceContextHolder.datasourceId.add(dataSourceName);
targetDatasource.put(dataSourceName, ds);
return ds;
}
}
DynamicDataSource
用于获取数据源
/**
* @author leo
*/
public class DynamicDataSource extends AbstractRoutingDataSource {
/**
* 获得数据源
*/
@Override
protected Object determineCurrentLookupKey() {
return DynamicDataSourceContextHolder.getDataSourceType();
}
}
DynamicDataSourceContextHolder
对数据源操作的一些方法
/**
* @author leo
*/
public class DynamicDataSourceContextHolder {
/**
* 使用ThreadLocal维护变量,ThreadLocal为每个使用该变量的线程提供独立的变量副本,
* 所以每一个线程都可以独立地改变自己的副本,而不会影响其它线程所对应的副本。
*/
private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<String>();
/**
* 管理所有的数据源id,用于数据源的判断
*/
public static List<String> datasourceId = new ArrayList<String>();
/**
* @Title: setDataSourceType
* @Description: 设置数据源的变量
* @param dataSourceType
* @return void
* @throws
*/
public static void setDataSourceType(String dataSourceType){
CONTEXT_HOLDER.set(dataSourceType);
}
/**
* @Title: getDataSourceType
* @Description: 获得数据源的变量
* @return String
* @throws
*/
public static String getDataSourceType(){
return CONTEXT_HOLDER.get();
}
/**
* @Title: clearDataSourceType
* @Description: 清空所有的数据源变量
* @return void
* @throws
*/
public static void clearDataSourceType(){
CONTEXT_HOLDER.remove();
}
/**
* @Title: existDataSource
* @Description: 判断数据源是否已存在
* @param dataSourceType
* @return boolean
* @throws
*/
public static boolean existDataSource(String dataSourceType ){
return datasourceId.contains(dataSourceType);
}
}
使用方式
直接在对应的方法上标注即可
@Override
@TargetDatasource(name = "db1")
public TradeOrder setPaymentWay(TradeOrder tradeOrder, Consumer<TradeOrder> consumer) {
consumer.accept(tradeOrder);
return tradeOrder;
}
yml配置
# begin
datasource:
custom:
name: db1,db2
db1:
name: db1
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.jdbc.Driver
url: jdbc:mysql://ipAddress:3306/dev_tdod
username: root
password: 123456
db2:
name: db2
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.jdbc.Driver
url: jdbc:mysql://ipAddress:3306/dev_tdod
username: root
password: 123456
druid:
initial-size: 5
min-idle: 5
async-init: true
async-close-connection-enable: true
max-active: 20
max-wait: 60000
time-between-eviction-runs-millis: 60000
min-evictable-idle-time-millis: 30000
validation-query: SELECT 1 FROM DUAL
test-while-idle: true
test-on-borrow: false
test-on-return: false
pool-prepared-statements: true
max-pool-prepared-statement-per-connection-size: 20
filters: stat,slf4j
# end