springboot shardingsphere druid 动态数据源切换及分库分表
druid
阿里云计算平台DataWorks(https://help.aliyun.com/document_detail/137663.html) 团队出品,为监控而生的数据库连接池
项目地址:https://gitcode.com/gh_mirrors/druid/druid
免费下载资源
·
引入依赖
<dependency>
<groupId>org.apache.shardingsphere</groupId>
<artifactId>sharding-jdbc-spring-boot-starter</artifactId>
<version>4.0.0-RC1</version>
</dependency>
yml配置文件
spring:
datasource:
type: com.alibaba.druid.pool.DruidDataSource
druid:
first: #数据源1
driverClassName: com.mysql.jdbc.Driver
url: ******
username: ******
password: YmqMysql123
second: #数据源2
driverClassName: oracle.jdbc.OracleDriver
url: ******
username: ******
password: ******
third: #数据源3
driverClassName: oracle.jdbc.OracleDriver
url: ******
username: ******
password: ******
fourth: #数据源4
driverClassName: oracle.jdbc.OracleDriver
url: ******
username: ******
password: ******
fifth: #数据源5
driverClassName: com.mysql.jdbc.Driver
url: ******
username: ******
password: ******
创建读取数据源配置类
import com.alibaba.druid.pool.DruidDataSource;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* @author wuzhenyong
* ClassName:DataSourceProperties.java
* date:2022-06-21 16:12
* Description:
*/
@Data
@Configuration
@ConfigurationProperties(prefix = "spring.datasource.druid")
public class DataSourceProperties {
private DruidDataSource first;
private DruidDataSource second;
private DruidDataSource third;
private DruidDataSource fourth;
private DruidDataSource fifth;
}
数据源名称管理
public interface DataSourceNames {
String FIRST = "first";
String SECOND = "second";
String THIRD = "third";
String FOURTH = "fourth";
String FIFTH = "fifth";
}
Sharding数据源配置及动态数据源
package com.cnpc.datasources.sharding;
import java.sql.SQLException;
import java.util.*;
import javax.sql.DataSource;
import com.alibaba.druid.pool.DruidDataSource;
import com.cnpc.datasources.DataSourceNames;
import com.cnpc.datasources.DataSourceProperties;
import com.cnpc.datasources.DynamicDataSource;
import com.cnpc.datasources.DynamicDataSourceConfig;
import lombok.SneakyThrows;
import org.apache.shardingsphere.api.config.sharding.KeyGeneratorConfiguration;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.InlineShardingStrategyConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.ShardingStrategyConfiguration;
import org.apache.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.annotation.Order;
import org.springframework.core.env.Environment;
import com.google.common.collect.Lists;
import com.zaxxer.hikari.HikariDataSource;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
/**
* sharding 数据源
*
* @author zhaolei
* @date 2020年12月3日
*
*/
@Configuration
@Order(3)
public class ShardingDataSourceConfig {
String dbNames = DataSourceNames.FIRST;
@Autowired
private DataSourceProperties properties;
@Autowired
private Environment env;
public static Map<String, DataSource> dataSourceMap = new HashMap<>();
@Bean
@Primary
public DynamicDataSource dataSource() {
System.out.println("多数据源初始化......");
Map<Object, Object> targetDataSources = new HashMap<>(16);
// targetDataSources.put(DataSourceNames.FIRST, ShardingDataSourceConfig.dataSourceMap.get(DataSourceNames.FIRST));
targetDataSources.put(DataSourceNames.FIRST, buildDataSource());
targetDataSources.put(DataSourceNames.SECOND, properties.getSecond());
targetDataSources.put(DataSourceNames.THIRD, properties.getThird());
targetDataSources.put(DataSourceNames.FOURTH, properties.getFourth());
targetDataSources.put(DataSourceNames.FIFTH, properties.getFifth());
return new DynamicDataSource(buildDataSource(), targetDataSources);
}
@SneakyThrows
private DataSource buildDataSource() {
dataSourceMap.put(DataSourceNames.FIRST, properties.getFirst());
// initDataSourceMap();
String[] split = dbNames.split(",");
// 具体分库分表策略,按什么规则来分
ShardingRuleConfiguration conf = new ShardingRuleConfiguration();
// table rule
TableRuleConfiguration tableRule = new TableRuleConfiguration("t_wx_push_info", split[0] + ".t_wx_push_info$->{1..8}");
// key生成规则
KeyGeneratorConfiguration keyGen = new KeyGeneratorConfiguration("PUSHINFO", "id");
tableRule.setKeyGeneratorConfig(keyGen);
// 分表策略
ShardingStrategyConfiguration tableShardingStrategyConfig = new InlineShardingStrategyConfiguration("id", "t_wx_push_info$->{id % 8 + 1}");
tableRule.setTableShardingStrategyConfig(tableShardingStrategyConfig);
// table rule
TableRuleConfiguration table2Rule = new TableRuleConfiguration("t_wx_push_info_details", split[0] + ".t_wx_push_info_details$->{1..8}");
// key生成规则
KeyGeneratorConfiguration key2Gen = new KeyGeneratorConfiguration("PUSHINFODETAIL", "id");
table2Rule.setKeyGeneratorConfig(key2Gen);
// 分表策略
ShardingStrategyConfiguration tableSharding2StrategyConfig = new InlineShardingStrategyConfiguration("info_type", "t_wx_push_info_details$->{info_type % 8 + 1}");
table2Rule.setTableShardingStrategyConfig(tableSharding2StrategyConfig);
conf.setTableRuleConfigs(Lists.newArrayList(tableRule, table2Rule));
Properties props = new Properties();
props.put("sql.show", true);
DataSource dataSource = ShardingDataSourceFactory.createDataSource(dataSourceMap, conf, props);
return dataSource;
}
}
DynamicDataSource动态数据源
public class DynamicDataSource extends AbstractRoutingDataSource {
private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<>();
public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) {
super.setDefaultTargetDataSource(defaultTargetDataSource);
super.setTargetDataSources(targetDataSources);
super.afterPropertiesSet();
}
@Override
protected Object determineCurrentLookupKey() {
return getDataSource();
}
public static void setDataSource(String dataSource) {
CONTEXT_HOLDER.set(dataSource);
}
public static String getDataSource() {
return CONTEXT_HOLDER.get();
}
public static void clearDataSource() {
CONTEXT_HOLDER.remove();
}
}
多数据源注解
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface DataSource {
String name() default "";
}
多数据源切面
import com.cnpc.datasources.DataSourceNames;
import com.cnpc.datasources.DynamicDataSource;
import com.cnpc.datasources.annotation.DataSource;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.Ordered;
import org.springframework.stereotype.Component;
import java.lang.reflect.Method;
/**
* 多数据源,切面处理类
* @author YangMQ
*/
@Aspect
@Component
public class DataSourceAspect implements Ordered {
protected Logger logger = LoggerFactory.getLogger(getClass());
@Pointcut("@annotation(com.cnpc.datasources.annotation.DataSource)")
public void dataSourcePointCut() {
}
@Around("dataSourcePointCut()")
public Object around(ProceedingJoinPoint point) throws Throwable {
MethodSignature signature = (MethodSignature) point.getSignature();
Method method = signature.getMethod();
DataSource ds = method.getAnnotation(DataSource.class);
if (ds == null) {
DynamicDataSource.setDataSource(DataSourceNames.FIRST);
logger.debug("set datasource is " + DataSourceNames.FIRST);
} else {
DynamicDataSource.setDataSource(ds.name());
logger.debug("set datasource is " + ds.name());
}
try {
return point.proceed();
} finally {
DynamicDataSource.clearDataSource();
logger.debug("clean datasource");
}
}
@Override
public int getOrder() {
return 1;
}
}
以上就可以进行动态数据源切换及分库分表的使用了哦
GitHub 加速计划 / druid / druid
3
3
下载
阿里云计算平台DataWorks(https://help.aliyun.com/document_detail/137663.html) 团队出品,为监控而生的数据库连接池
最近提交(Master分支:5 个月前 )
f95350b3 - 5 小时前
cb6f3ac7 - 2 天前
更多推荐
已为社区贡献8条内容
所有评论(0)