数据库

SET FOREIGN_KEY_CHECKS=0;

-- ----------------------------
-- Table structure for test_record
-- ----------------------------
DROP TABLE IF EXISTS `test_record`;
CREATE TABLE `test_record` (
  `id` int(11) NOT NULL,
  `str` varchar(255) DEFAULT NULL,
  `time` datetime DEFAULT NULL,
  UNIQUE KEY `uni_1` (`id`,`time`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
SET FOREIGN_KEY_CHECKS=0;

-- ----------------------------
-- Table structure for test_log
-- ----------------------------
DROP TABLE IF EXISTS `test_log`;
CREATE TABLE `test_log` (
  `id` int(11) NOT NULL,
  `name` varchar(255) DEFAULT NULL,
  `wait_time` datetime DEFAULT NULL,
  UNIQUE KEY `uni_1` (`id`,`wait_time`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;

pom依赖

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
	<modelVersion>4.0.0</modelVersion>
	<parent>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-parent</artifactId>
		<version>2.7.0</version>
		<relativePath/> <!-- lookup parent from repository -->
	</parent>
	<groupId>com.demo</groupId>
	<artifactId>test</artifactId>
	<version>0.0.1-SNAPSHOT</version>
	<name>test</name>
	<description>Demo project for Spring Boot</description>
	<properties>
		<java.version>1.8</java.version>
	</properties>
	<dependencies>

		<!-- SpringBoot 核心包 -->
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter</artifactId>
		</dependency>

		<!-- SpringBoot 拦截器 -->
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-aop</artifactId>
		</dependency>

		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-web</artifactId>
		</dependency>

		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-test</artifactId>
			<scope>test</scope>
		</dependency>

		<!--阿里数据库连接池 -->
		<dependency>
			<groupId>com.alibaba</groupId>
			<artifactId>druid-spring-boot-starter</artifactId>
			<version>1.1.14</version>
		</dependency>

		<dependency>
			<groupId>mysql</groupId>
			<artifactId>mysql-connector-java</artifactId>
			<scope>runtime</scope>
		</dependency>

		<dependency>
			<groupId>org.mybatis.spring.boot</groupId>
			<artifactId>mybatis-spring-boot-starter</artifactId>
			<version>1.3.2</version>
		</dependency>

		<dependency>
			<groupId>org.projectlombok</groupId>
			<artifactId>lombok</artifactId>
			<optional>true</optional>
		</dependency>

		<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-dbcp2 -->
		<dependency>
			<groupId>org.apache.commons</groupId>
			<artifactId>commons-dbcp2</artifactId>
			<version>2.1.1</version>
		</dependency>

		<!-- ShardingSphere -->
		<dependency>
			<groupId>org.apache.shardingsphere</groupId>
			<artifactId>shardingsphere-jdbc-core-spring-boot-starter</artifactId>
			<version>5.1.1</version>
		</dependency>

	</dependencies>

	<build>
		<plugins>
			<plugin>
				<groupId>org.springframework.boot</groupId>
				<artifactId>spring-boot-maven-plugin</artifactId>
			</plugin>
		</plugins>
	</build>

</project>

yaml文件

spring:
  datasource:
    type: com.alibaba.druid.pool.DruidDataSource
    driverClassName: com.mysql.cj.jdbc.Driver
    druid:
      # 主库数据源
      master:
        url: jdbc:mysql://localhost:3306/xyz?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
        username: xxx
        password: xxx
      # 从库数据源
      slave:
        # 从数据源开关/默认关闭
        enabled: false
        url: jdbc:mysql://localhost:3306/uvw?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
        username: xxx
        password: xxx
      # 历史数据数据源
      history:
        # 从数据源开关/默认关闭
        enabled: true
        url: jdbc:mysql://localhost:3306/uvw?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8
        username: xxx
        password: xxx
      # 初始连接数
      initialSize: 5
      # 最小连接池数量
      minIdle: 10
      # 最大连接池数量
      maxActive: 20
      # 配置获取连接等待超时的时间
      maxWait: 60000
      # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
      timeBetweenEvictionRunsMillis: 60000
      # 配置一个连接在池中最小生存的时间,单位是毫秒
      minEvictableIdleTimeMillis: 300000
      # 配置一个连接在池中最大生存的时间,单位是毫秒
      maxEvictableIdleTimeMillis: 900000
      # 配置检测连接是否有效
      validationQuery: SELECT 1 FROM DUAL
      testWhileIdle: true
      testOnBorrow: false
      testOnReturn: false
      webStatFilter:
        enabled: true
      statViewServlet:
        enabled: true
        # 设置白名单,不填则允许所有访问
        allow:
        url-pattern: /druid/*
        # 控制台管理用户名和密码
        login-username:
        login-password:
      filter:
        stat:
          enabled: true
          # 慢SQL记录
          log-slow-sql: true
          slow-sql-millis: 1000
          merge-sql: true
        wall:
          config:
            multi-statement-allow: true

动态数据源

/**
 * 数据源
 */
public enum DataSourceType
{
    /**
     * 主库
     */
    MASTER,

    /**
     * 从库
     */
    SLAVE,

    /**
     * 历史数据库
     */
    HISTORY
}
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

import javax.sql.DataSource;
import java.util.Map;

/**
 * 动态数据源
 */
public class DynamicDataSource extends AbstractRoutingDataSource
{
    public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources)
    {
        super.setDefaultTargetDataSource(defaultTargetDataSource);
        super.setTargetDataSources(targetDataSources);
        super.afterPropertiesSet();
    }

    @Override
    protected Object determineCurrentLookupKey()
    {
        return DynamicDataSourceContextHolder.getDataSourceType();
    }
}
import com.alibaba.druid.pool.DruidDataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;

/**
 * druid 配置属性
 */
@Configuration
public class DruidProperties
{
    @Value("${spring.datasource.druid.initialSize}")
    private int initialSize;

    @Value("${spring.datasource.druid.minIdle}")
    private int minIdle;

    @Value("${spring.datasource.druid.maxActive}")
    private int maxActive;

    @Value("${spring.datasource.druid.maxWait}")
    private int maxWait;

    @Value("${spring.datasource.druid.timeBetweenEvictionRunsMillis}")
    private int timeBetweenEvictionRunsMillis;

    @Value("${spring.datasource.druid.minEvictableIdleTimeMillis}")
    private int minEvictableIdleTimeMillis;

    @Value("${spring.datasource.druid.maxEvictableIdleTimeMillis}")
    private int maxEvictableIdleTimeMillis;

    @Value("${spring.datasource.druid.validationQuery}")
    private String validationQuery;

    @Value("${spring.datasource.druid.testWhileIdle}")
    private boolean testWhileIdle;

    @Value("${spring.datasource.druid.testOnBorrow}")
    private boolean testOnBorrow;

    @Value("${spring.datasource.druid.testOnReturn}")
    private boolean testOnReturn;

    public DruidDataSource dataSource(DruidDataSource datasource)
    {
        /** 配置初始化大小、最小、最大 */
        datasource.setInitialSize(initialSize);
        datasource.setMaxActive(maxActive);
        datasource.setMinIdle(minIdle);

        /** 配置获取连接等待超时的时间 */
        datasource.setMaxWait(maxWait);

        /** 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 */
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);

        /** 配置一个连接在池中最小、最大生存的时间,单位是毫秒 */
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setMaxEvictableIdleTimeMillis(maxEvictableIdleTimeMillis);

        /**
         * 用来检测连接是否有效的sql,要求是一个查询语句,常用select 'x'。如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会起作用。
         */
        datasource.setValidationQuery(validationQuery);
        /** 建议配置为true,不影响性能,并且保证安全性。申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。 */
        datasource.setTestWhileIdle(testWhileIdle);
        /** 申请连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能。 */
        datasource.setTestOnBorrow(testOnBorrow);
        /** 归还连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能。 */
        datasource.setTestOnReturn(testOnReturn);
        return datasource;
    }
}

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * 数据源切换处理
 */
public class DynamicDataSourceContextHolder
{
    public static final Logger log = LoggerFactory.getLogger(DynamicDataSourceContextHolder.class);

    /**
     * 使用ThreadLocal维护变量,ThreadLocal为每个使用该变量的线程提供独立的变量副本,
     *  所以每一个线程都可以独立地改变自己的副本,而不会影响其它线程所对应的副本。
     */
    private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<>();

    /**
     * 设置数据源的变量
     */
    public static void setDataSourceType(String dsType)
    {
        log.info("切换到{}数据源", dsType);
        CONTEXT_HOLDER.set(dsType);
    }

    /**
     * 获得数据源的变量
     */
    public static String getDataSourceType()
    {
        return CONTEXT_HOLDER.get();
    }

    /**
     * 清空数据源变量
     */
    public static void clearDataSourceType()
    {
        CONTEXT_HOLDER.remove();
    }
}
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import com.alibaba.druid.spring.boot.autoconfigure.properties.DruidStatProperties;
import com.alibaba.druid.util.Utils;
import org.apache.shardingsphere.driver.api.ShardingSphereDataSourceFactory;
import org.apache.shardingsphere.infra.config.algorithm.ShardingSphereAlgorithmConfiguration;
import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration;
import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableRuleConfiguration;
import org.apache.shardingsphere.sharding.api.config.strategy.sharding.ComplexShardingStrategyConfiguration;
import org.apache.shardingsphere.sharding.api.config.strategy.sharding.StandardShardingStrategyConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

import javax.servlet.*;
import javax.sql.DataSource;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;

/**
 * druid 配置多数据源
 */
@Configuration
public class DruidConfig {

    private static final Logger logger = LoggerFactory.getLogger(DruidConfig.class);

    @Value("${spring.datasource.druid.history.url}")
    private String url;
    @Value("${spring.datasource.druid.history.username}")
    private String username;
    @Value("${spring.datasource.druid.history.password}")
    private String password;
    @Value("${spring.datasource.driverClassName}")
    private String driverClassName;
    private static final String logicTable = "test_record";



    @Bean
    @ConfigurationProperties("spring.datasource.druid.master")
    public DataSource masterDataSource(DruidProperties druidProperties) {
        DruidDataSource dataSource = DruidDataSourceBuilder.create().build();
        return druidProperties.dataSource(dataSource);
    }

    @Bean
    @ConfigurationProperties("spring.datasource.druid.slave")
    @ConditionalOnProperty(prefix = "spring.datasource.druid.slave", name = "enabled", havingValue = "true")
    public DataSource slaveDataSource(DruidProperties druidProperties) {
        DruidDataSource dataSource = DruidDataSourceBuilder.create().build();
        return druidProperties.dataSource(dataSource);
    }

    @Bean
    @ConfigurationProperties("spring.datasource.druid.history")
    @ConditionalOnProperty(prefix = "spring.datasource.druid.history", name = "enabled", havingValue = "true")
    public DataSource historyDataSource(DruidProperties druidProperties) throws SQLException {

        DruidDataSource dataSource = DruidDataSourceBuilder.create().build();
        dataSource.setUrl(url);
        dataSource.setUsername(username);
        dataSource.setPassword(password);
        dataSource.setDriverClassName(driverClassName);
        Map<String,DataSource> dataSourceMap = new LinkedHashMap<>();
        dataSourceMap.put("ds",dataSource);


        logger.info("开始构建表:[{}]分片算法", logicTable);
        // 配置 表规则
        ShardingTableRuleConfiguration tableRuleConfig = new ShardingTableRuleConfiguration(logicTable,
                "ds.test_record_20220${1..9},ds.test_record_20221${0..2}");
        // 配置分表策略
        // 设置分片为标准分片且指定分片键
        String shardingColumn = "time";
//        tableRuleConfig.setTableShardingStrategy(new ComplexShardingStrategyConfiguration("time,id",ShardingConstants.SHARDING_ALGORITHM_KEY));
        tableRuleConfig.setTableShardingStrategy(new StandardShardingStrategyConfiguration(shardingColumn, ShardingConstants.SHARDING_ALGORITHM_KEY));

        // 配置分片规则
        ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration();
        shardingRuleConfig.getTables().add(tableRuleConfig);

        // 配置分表算法 设置按月分表
        shardingRuleConfig.getShardingAlgorithms().put(ShardingConstants.SHARDING_ALGORITHM_KEY,
                new ShardingSphereAlgorithmConfiguration(ShardingConstants.MONTH_STANDARD, new Properties()));
//        shardingRuleConfig.getShardingAlgorithms().put(ShardingConstants.SHARDING_ALGORITHM_KEY,
//                new ShardingSphereAlgorithmConfiguration(ShardingConstants.MONTH_COMPLEX,new Properties()));

        Properties properties = new Properties();
        properties.setProperty("sql-show","true");
        return ShardingSphereDataSourceFactory.createDataSource(dataSourceMap, Collections.singleton(shardingRuleConfig), properties);
    }

    @Bean(name = "dynamicDataSource")
    @Primary
    public DynamicDataSource dataSource(DataSource masterDataSource) {
        Map<Object, Object> targetDataSources = new HashMap<>();
        targetDataSources.put(DataSourceType.MASTER.name(), masterDataSource);
//        setDataSource(targetDataSources, DataSourceType.SLAVE.name(), "slaveDataSource");
        setDataSource(targetDataSources, DataSourceType.HISTORY.name(), "historyDataSource");
        return new DynamicDataSource(masterDataSource, targetDataSources);
    }

    /**
     * 设置数据源
     *
     * @param targetDataSources 备选数据源集合
     * @param sourceName        数据源名称
     * @param beanName          bean名称
     */
    public void setDataSource(Map<Object, Object> targetDataSources, String sourceName, String beanName) {
        try {
            DataSource dataSource = SpringUtils.getBean(beanName);
            targetDataSources.put(sourceName, dataSource);
        } catch (Exception e) {
        }
    }

    /**
     * 去除监控页面底部的广告
     */
    @SuppressWarnings({"rawtypes", "unchecked"})
    @Bean
    @ConditionalOnProperty(name = "spring.datasource.druid.statViewServlet.enabled", havingValue = "true")
    public FilterRegistrationBean removeDruidFilterRegistrationBean(DruidStatProperties properties) {
        // 获取web监控页面的参数
        DruidStatProperties.StatViewServlet config = properties.getStatViewServlet();
        // 提取common.js的配置路径
        String pattern = config.getUrlPattern() != null ? config.getUrlPattern() : "/druid/*";
        String commonJsPattern = pattern.replaceAll("\\*", "js/common.js");
        final String filePath = "support/http/resources/js/common.js";
        // 创建filter进行过滤
        Filter filter = new Filter() {
            @Override
            public void init(javax.servlet.FilterConfig filterConfig) throws ServletException {
            }

            @Override
            public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
                    throws IOException, ServletException {
                chain.doFilter(request, response);
                // 重置缓冲区,响应头不会被重置
                response.resetBuffer();
                // 获取common.js
                String text = Utils.readFromResource(filePath);
                // 正则替换banner, 除去底部的广告信息
                text = text.replaceAll("<a.*?banner\"></a><br/>", "");
                text = text.replaceAll("powered.*?shrek.wang</a>", "");
                response.getWriter().write(text);
            }

            @Override
            public void destroy() {
            }
        };
        FilterRegistrationBean registrationBean = new FilterRegistrationBean();
        registrationBean.setFilter(filter);
        registrationBean.addUrlPatterns(commonJsPattern);
        return registrationBean;
    }
}

import java.lang.annotation.*;

/**
 * 自定义多数据源切换注解
 *
 * 优先级:先方法,后类,如果方法覆盖了类上的数据源类型,以方法的为准,否则以类上的为准
 */
@Target({ ElementType.METHOD, ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
public @interface DataSource
{
    /**
     * 切换数据源名称
     */
    public DataSourceType value() default DataSourceType.MASTER;
}
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;

import java.util.Objects;

/**
 * 多数据源处理
 */
@Aspect
@Order(1)
@Component
public class DataSourceAspect {
    protected Logger logger = LoggerFactory.getLogger(getClass());

    @Pointcut("@annotation(com.demo.test.framework.util.DataSource)"
            + "|| @within(com.demo.test.framework.util.DataSource)")
    public void dsPointCut() {

    }

    @Around("dsPointCut()")
    public Object around(ProceedingJoinPoint point) throws Throwable {
        DataSource dataSource = getDataSource(point);

        if (dataSource!=null) {
            DynamicDataSourceContextHolder.setDataSourceType(dataSource.value().name());
        }

        try {
            return point.proceed();
        } finally {
            // 销毁数据源 在执行方法之后
            DynamicDataSourceContextHolder.clearDataSourceType();
        }
    }

    /**
     * 获取需要切换的数据源
     */
    public DataSource getDataSource(ProceedingJoinPoint point) {
        MethodSignature signature = (MethodSignature) point.getSignature();
        DataSource dataSource = AnnotationUtils.findAnnotation(signature.getMethod(), DataSource.class);
        if (Objects.nonNull(dataSource)) {
            return dataSource;
        }

        return AnnotationUtils.findAnnotation(signature.getDeclaringType(), DataSource.class);
    }
}

Shardingsphere配置

/**
 * 常量
 */
public class ShardingConstants {

    private ShardingConstants() {
    }

    /**
     * 标准分片Key
     */
    public static final String MONTH_STANDARD = "month_standard";

    /**
     * 复杂分片Key
     */
    public static final String MONTH_COMPLEX = "month_complex";

    /**
     * 表分片规则Key
     */
    public static final String SHARDING_ALGORITHM_KEY  = "tableShardingAlgorithm";
}
import org.apache.shardingsphere.sharding.api.sharding.complex.ComplexKeysShardingAlgorithm;
import org.apache.shardingsphere.sharding.api.sharding.complex.ComplexKeysShardingValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Collection;
import java.util.Properties;

/**
 * 复杂分片算法
 */
public class MonthComplexShardingAlgorithm implements ComplexKeysShardingAlgorithm {

    private static final Logger logger = LoggerFactory.getLogger(MonthComplexShardingAlgorithm.class);
    private Properties props = new Properties();

    @Override
    public Collection<String> doSharding(Collection collection, ComplexKeysShardingValue complexKeysShardingValue) {
        System.out.println(collection);
        System.out.println(complexKeysShardingValue);
        /**
		 * 具体实现自己完成
		 */
        return null;
    }

    @Override
    public void init() {
        logger.info("初始化复杂分片算法");
    }

    @Override
    public String getType() {
        return ShardingConstants.MONTH_COMPLEX;
    }

    @Override
    public Properties getProps() {
        return props;
    }

    @Override
    public void setProps(Properties props) {
        this.props = props;
    }
}

import com.google.common.collect.Range;
import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue;
import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue;
import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Collection;
import java.util.Date;
import java.util.Properties;
import java.util.Set;

/**
 * 按月分片算法
 *
 * @author airtrioa
 * @date 2021/4/30
 */
public class MonthStandardShardingAlgorithm implements StandardShardingAlgorithm<Date> {
    private static final Logger logger = LoggerFactory.getLogger(MonthStandardShardingAlgorithm.class);
    private Properties props = new Properties();


    @Override
    public String doSharding(Collection<String> collection, PreciseShardingValue<Date> preciseShardingValue) {
        // 获取到月份
        Date value = preciseShardingValue.getValue();
        String s = DateUtil.getFormat(value, "yyyyMM");
        String name = preciseShardingValue.getLogicTableName() + "_" + s;
        if(collection.contains(name)){
            return name;
        }
        throw new IllegalArgumentException("未找到匹配的数据表");
    }

    @Override
    public Collection<String> doSharding(Collection<String> collection, RangeShardingValue<Date> rangeShardingValue) {
        Range<Date> valueRange = rangeShardingValue.getValueRange();
        Date lowerEndpoint = valueRange.lowerEndpoint();
        Date upperEndpoint = valueRange.upperEndpoint();

        Set<String> tables = MonthPreciseShardingUtil.rangeShardingTables(collection, lowerEndpoint, upperEndpoint);
        logger.info("定位到分片表:{}", tables);
        return tables;
    }

    @Override
    public void init() {
        logger.info("初始化标准分片算法");
    }

    @Override
    public String getType() {
        return ShardingConstants.MONTH_STANDARD;
    }

    @Override
    public Properties getProps() {
        return props;
    }

    @Override
    public void setProps(Properties props) {
        this.props = props;
    }
}


spi导入分片算法

  • 路径src/main/resources/META-INF/services
(包路径).MonthStandardShardingAlgorithm
(包路径).MonthComplexShardingAlgorithm
例:com.test.sharding.MonthStandardShardingAlgorithm

工具类

import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.Random;

public class DateUtil {
    private DateUtil() {
    }

    public static Date toDate(LocalDateTime localDateTime) {
        // 获取时间地区ID
        ZoneId zoneId = ZoneId.systemDefault();
        // 转换为当地时间
        ZonedDateTime zonedDateTime = localDateTime.atZone(zoneId);
        // 转换为Date类型
        return Date.from(zonedDateTime.toInstant());
    }

    public static LocalDateTime toLocalDateTime(Date date) {
        return Instant.ofEpochMilli(date.getTime()).atZone(ZoneId.systemDefault()).toLocalDateTime();
    }

    public static String getFormat(Date date,String pattern) {
        SimpleDateFormat sdf = new SimpleDateFormat(pattern);
        return sdf.format(date);
    }

    //length用户要求产生字符串的长度
    public static String getRandomString(int length){
        String str="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
        Random random=new Random();
        StringBuffer sb=new StringBuffer();
        for(int i=0;i<length;i++){
            int number=random.nextInt(62);
            sb.append(str.charAt(number));
        }
        return sb.toString();
    }
}

import org.springframework.aop.framework.AopContext;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.stereotype.Component;

/**
 * spring工具类 方便在非spring管理环境中获取bean
 */
@Component
public final class SpringUtils implements BeanFactoryPostProcessor
{
    /** Spring应用上下文环境 */
    private static ConfigurableListableBeanFactory beanFactory;

    @Override
    public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException
    {
        SpringUtils.beanFactory = beanFactory;
    }

    /**
     * 获取对象
     *
     * @param name
     * @return Object 一个以所给名字注册的bean的实例
     * @throws BeansException
     *
     */
    @SuppressWarnings("unchecked")
    public static <T> T getBean(String name) throws BeansException
    {
        return (T) beanFactory.getBean(name);
    }

    /**
     * 获取类型为requiredType的对象
     *
     * @param clz
     * @return
     * @throws BeansException
     *
     */
    public static <T> T getBean(Class<T> clz) throws BeansException
    {
        T result = (T) beanFactory.getBean(clz);
        return result;
    }

    /**
     * 如果BeanFactory包含一个与所给名称匹配的bean定义,则返回true
     *
     * @param name
     * @return boolean
     */
    public static boolean containsBean(String name)
    {
        return beanFactory.containsBean(name);
    }

    /**
     * 判断以给定名字注册的bean定义是一个singleton还是一个prototype。 如果与给定名字相应的bean定义没有被找到,将会抛出一个异常(NoSuchBeanDefinitionException)
     *
     * @param name
     * @return boolean
     * @throws NoSuchBeanDefinitionException
     *
     */
    public static boolean isSingleton(String name) throws NoSuchBeanDefinitionException
    {
        return beanFactory.isSingleton(name);
    }

    /**
     * @param name
     * @return Class 注册对象的类型
     * @throws NoSuchBeanDefinitionException
     *
     */
    public static Class<?> getType(String name) throws NoSuchBeanDefinitionException
    {
        return beanFactory.getType(name);
    }

    /**
     * 如果给定的bean名字在bean定义中有别名,则返回这些别名
     *
     * @param name
     * @return
     * @throws NoSuchBeanDefinitionException
     *
     */
    public static String[] getAliases(String name) throws NoSuchBeanDefinitionException
    {
        return beanFactory.getAliases(name);
    }

    /**
     * 获取aop代理对象
     *
     * @param invoker
     * @return
     */
    @SuppressWarnings("unchecked")
    public static <T> T getAopProxy(T invoker)
    {
        return (T) AopContext.currentProxy();
    }
}

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.*;

/**
 * 按月分片工具类
 */
public class MonthPreciseShardingUtil {

    private static final String TEST_RECORD = "test_record_";


    private static final Integer ONE = 1;
    private static final Integer TWELVE = 12;
    private static final Integer THREE_HUNDRED_SIXTY_FIVE = 365;
    private static final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMM");

    private MonthPreciseShardingUtil() {}

    /**
     * 根据时间上下界,过滤相应的表名
     *
     * @param originalTables 表名列表
     * @param lowerEndpoint  下界
     * @param upperEndpoint  上界
     * @return 表名列表
     */
    public static Set<String> rangeShardingTables(Collection<String> originalTables, Date lowerEndpoint, Date upperEndpoint) {
        Set<String> tables = new HashSet<>();
        // 获取时间范围内的 月份集合
        Calendar min = Calendar.getInstance();
        Calendar max = Calendar.getInstance();
        min.setTime(lowerEndpoint);
        max.setTime(upperEndpoint);
        Calendar curr = min;
        while (curr.before(max)) {
            Date time = curr.getTime();
            String name = TEST_RECORD + sdf.format(time);
            if (originalTables.contains(name)) {
                tables.add(name);
            }
            curr.add(Calendar.MONTH, 1);
            curr.set(Calendar.DAY_OF_MONTH, 1);
        }

        return tables;
    }

    /**
     * 根据返回,获取到范围涉及的月份
     *
     * @param lowerEndpoint 下限
     * @param upperEndpoint 上限
     * @return 月份集合
     */
    public static Set<Integer> getSuffixListForRange(LocalDateTime lowerEndpoint, LocalDateTime upperEndpoint) {
        Set<Integer> sets = new TreeSet<>();
        if (Duration.between(lowerEndpoint, upperEndpoint).toDays() > THREE_HUNDRED_SIXTY_FIVE) {
            // 时间跨度大于一年,直接返回12个月
            for (int i = ONE; i <= TWELVE; i++) {
                sets.add(i);
            }
            return sets;
        }
        int lowerEndpointMonth = lowerEndpoint.getMonth().getValue();
        int upperEndpointMonth = upperEndpoint.getMonth().getValue();

        if (lowerEndpointMonth <= upperEndpointMonth) {
            for (int i = lowerEndpointMonth; i <= upperEndpointMonth; i++) {
                sets.add(i);
            }
        } else {
            for (int i = ONE; i <= upperEndpointMonth; i++) {
                sets.add(i);
            }
            for (int i = lowerEndpointMonth; i <= TWELVE; i++) {
                sets.add(i);
            }
        }
        return sets;
    }

    /**
     * 根据返回,获取到范围涉及的月份
     *
     * @param lowerEndpoint 下限
     * @param upperEndpoint 上限
     * @return 月份集合
     */
    public static Set<Integer> getSuffixListForRange(LocalDate lowerEndpoint, LocalDate upperEndpoint) {
        return getSuffixListForRange(LocalDateTime.of(lowerEndpoint, LocalTime.MIN), LocalDateTime.of(upperEndpoint, LocalTime.MAX));
    }
}

Mapper

import org.apache.ibatis.annotations.Insert;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;

import java.util.Date;
import java.util.List;

@Mapper
@DataSource(value = DataSourceType.HISTORY)
public interface TestRecordMapper {


    @Select("SELECT id,str,time FROM test_record WHERE time BETWEEN #{startTime} AND #{endTime}")
    List<TestRecord> findList(@Param("startTime") Date startTime, @Param("endTime") Date endTime);

    @Select("<script> " +
            "SELECT id,str,time FROM test_record " +
            "WHERE time BETWEEN #{startTime} AND #{endTime} " +
            "and id in <foreach collection=\"ids\" item=\"id\" open=\"(\" separator=\",\" close=\")\">" +
            "#{id}" +
            "</foreach>" +
            "</script>")
    List<TestRecord> findListByIds(@Param("ids") Long[] ids,@Param("startTime") Date startTime, @Param("endTime") Date endTime);

    @Select("SELECT id,str,`time` FROM test_record WHERE `time` = #{time}")
    TestRecord find(@Param("time") Date start);

    @Insert("insert into test_record(id,str,`time`)values(#{id},#{str},#{time}) on duplicate key update str = value(str)")
    int add(TestRecord testRecord);

    @Insert({"<script> insert into test_record(id,str,`time`) " +
            "values " +
            "<foreach collection=\"list\" item=\"testRecord\" index=\"index\"  separator=\",\"> "+
            "(#{testRecord.id},#{testRecord.str}, #{testRecord.time})  "+
            "</foreach> on duplicate key update str = value(str)" +
            "</script>"})
    int addList(@Param("list") List<TestRecord> testRecordList);
}


import org.apache.ibatis.annotations.Insert;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;

import java.util.Date;
import java.util.List;

public interface TestLogMapper {
    /**
     * 根据 Start End 把范围限制在 几张分片表里面
     *
     * @param start
     * @param end
     * @return
     */
    @Select("SELECT id,name,wait_time AS waitTime FROM test_log WHERE wait_time BETWEEN #{start} AND #{end}")
    List<TestLog> findList(@Param("start") Date start, @Param("end") Date end);

    @Select("SELECT id,name,wait_time AS waitTime FROM test_log WHERE wait_time = #{waitTime}")
    TestLog find(@Param("waitTime") Date start);

    @Insert("insert into test_log(id,name,wait_time)values(#{id},#{name},#{waitTime}) on duplicate key update name = value(name)")
    int add(TestLog testLog);

   @Insert({"<script> insert into test_log(id,name,wait_time) " +
            "values " +
            "<foreach collection=\"list\" item=\"testLog\" index=\"index\"  separator=\",\"> "+
            "(#{testLog.id},#{testLog.name}, #{testLog.waitTime})  "+
            "</foreach> on duplicate key update name = value(name)" +
           "</script>"})
    int addList(@Param("list") List<TestLog> testLogList);
}

实体类

import lombok.Data;
import lombok.ToString;

import java.util.Date;

@Data
@ToString
public class TestLog {

    private Long id;

    private String name;

    private Date waitTime;
}

import lombok.Data;
import lombok.ToString;

import java.util.Date;

@Data
@ToString
public class TestRecord {

    private Long id;

    private String str;

    private Date time;

}

测试类


import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.SneakyThrows;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Random;


@Component
@EnableScheduling
@EnableAsync
public class TestJob{
    private static final Logger LOGGER = LoggerFactory.getLogger(TestJob.class);

    @Autowired
    TestLogMapper testLogMapper;
    @Autowired
    TestRecordMapper testRecordMapper;



    @SneakyThrows
//    @Scheduled(cron = "*/5 * * * * ?")
    @Async
    public void test1() {
        List<TestLog> list = testLogMapper.findList(
                DateUtil.toDate(LocalDateTime.parse("2022-03-01 12:12:12", DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))),
                DateUtil.toDate(LocalDateTime.parse("2022-05-31 12:12:12", DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))));
        LOGGER.info("查询结果  条数:[{}],数据:[{}]", list.size(), list);
    }

    @SneakyThrows
//    @Scheduled(cron = "*/10 * * * * ?")
    @Async
    public void test2() {
         List<TestRecord> list = testRecordMapper.findList(
                DateUtil.toDate(LocalDateTime.parse("2022-03-01 12:12:12", DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))),
                DateUtil.toDate(LocalDateTime.parse("2022-05-31 12:12:12", DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))));
        LOGGER.info("查询结果  条数:[{}],数据:[{}]", list.size(), list);
    }

    @SneakyThrows
    @Scheduled(cron = "*/10 * * * * ?")
    @Async
    public void test5() {
        List<TestRecord> list = testRecordMapper.findListByIds(
                new Long[]{1L,2L},
                DateUtil.toDate(LocalDateTime.parse("2022-03-01 12:12:12", DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))),
                DateUtil.toDate(LocalDateTime.parse("2022-05-31 12:12:12", DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))));
        LOGGER.info("查询结果  条数:[{}],数据:[{}]", list.size(), list);
    }


    @SneakyThrows
//    @Scheduled(cron = "*/3 * * * * ?")
    @Async
    public void test3() {
        TestRecord testRecord = new TestRecord();
        testRecord.setId(1L);
        testRecord.setStr(DateUtil.getRandomString(6));
        testRecord.setTime(new Date(1652155200000l));
        int add = testRecordMapper.add(testRecord);
        LOGGER.info("插入记录  条数:[{}],数据:[{}]", add, testRecord);
    }

    @SneakyThrows
//    @Scheduled(cron = "*/3 * * * * ?")
    @Async
    public void test4() {
        List<TestRecord> recordList = new ArrayList<>();
        for (long i = 1; i < 6; i++) {
            TestRecord testRecord = new TestRecord();
            testRecord.setId(i);
            testRecord.setStr(DateUtil.getRandomString(6));
            testRecord.setTime(new Date(1652863620000l));
            recordList.add(testRecord);
        }
        int add = testRecordMapper.addList(recordList);
        LOGGER.info("插入记录  条数:[{}],数据:[{}]", add, recordList);
    }

}


最后附上源码下载
链接:https://pan.baidu.com/s/1Tr2ZRZDLtoj7roYWHemoJw
提取码:28cw

GitHub 加速计划 / druid / druid
6
3
下载
阿里云计算平台DataWorks(https://help.aliyun.com/document_detail/137663.html) 团队出品,为监控而生的数据库连接池
最近提交(Master分支:17 天前 )
a71fa6ad - 3 天前
0b375e47 - 10 天前
Logo

旨在为数千万中国开发者提供一个无缝且高效的云端环境,以支持学习、使用和贡献开源项目。

更多推荐