springboot shardingsphere druid 动态数据源切换及分库分表

简介: springboot shardingsphere druid 动态数据源切换及分库分表

引入依赖

<dependency>
    <groupId>org.apache.shardingsphere</groupId>
    <artifactId>sharding-jdbc-spring-boot-starter</artifactId>
    <version>4.0.0-RC1</version>
</dependency>

yml配置文件

spring:
  datasource:
    type: com.alibaba.druid.pool.DruidDataSource
    druid:
      first:  #数据源1
        driverClassName: com.mysql.jdbc.Driver
        url: ******
        username: ******
        password: YmqMysql123
      second:  #数据源2
        driverClassName: oracle.jdbc.OracleDriver
        url: ******
        username: ******
        password: ******
      third:  #数据源3
        driverClassName: oracle.jdbc.OracleDriver
        url: ******
        username: ******
        password: ******
      fourth:  #数据源4
        driverClassName: oracle.jdbc.OracleDriver
        url: ******
        username: ******
        password: ******
      fifth:  #数据源5
        driverClassName: com.mysql.jdbc.Driver
        url: ******
        username: ******
        password: ******

创建读取数据源配置类

import com.alibaba.druid.pool.DruidDataSource;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
 * @author wuzhenyong
 * ClassName:DataSourceProperties.java
 * date:2022-06-21 16:12
 * Description:
 */
@Data
@Configuration
@ConfigurationProperties(prefix = "spring.datasource.druid")
public class DataSourceProperties {
    private DruidDataSource first;
    private DruidDataSource second;
    private DruidDataSource third;
    private DruidDataSource fourth;
    private DruidDataSource fifth;
}

数据源名称管理

public interface DataSourceNames {
    String FIRST = "first";
    String SECOND = "second";
    String THIRD = "third";
    String FOURTH = "fourth";
    String FIFTH = "fifth";
}

Sharding数据源配置及动态数据源

package com.cnpc.datasources.sharding;
import java.sql.SQLException;
import java.util.*;
import javax.sql.DataSource;
import com.alibaba.druid.pool.DruidDataSource;
import com.cnpc.datasources.DataSourceNames;
import com.cnpc.datasources.DataSourceProperties;
import com.cnpc.datasources.DynamicDataSource;
import com.cnpc.datasources.DynamicDataSourceConfig;
import lombok.SneakyThrows;
import org.apache.shardingsphere.api.config.sharding.KeyGeneratorConfiguration;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.InlineShardingStrategyConfiguration;
import org.apache.shardingsphere.api.config.sharding.strategy.ShardingStrategyConfiguration;
import org.apache.shardingsphere.shardingjdbc.api.ShardingDataSourceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.annotation.Order;
import org.springframework.core.env.Environment;
import com.google.common.collect.Lists;
import com.zaxxer.hikari.HikariDataSource;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
/**
* sharding 数据源
*
* @author zhaolei
* @date 2020年12月3日
*
*/
@Configuration
@Order(3)
public class ShardingDataSourceConfig {
  String dbNames = DataSourceNames.FIRST;
  @Autowired
  private DataSourceProperties properties;
  @Autowired
  private Environment env;
  public static Map<String, DataSource> dataSourceMap = new HashMap<>();
    @Bean
    @Primary
    public DynamicDataSource dataSource() {
        System.out.println("多数据源初始化......");
        Map<Object, Object> targetDataSources = new HashMap<>(16);
//        targetDataSources.put(DataSourceNames.FIRST, ShardingDataSourceConfig.dataSourceMap.get(DataSourceNames.FIRST));
        targetDataSources.put(DataSourceNames.FIRST, buildDataSource());
        targetDataSources.put(DataSourceNames.SECOND, properties.getSecond());
        targetDataSources.put(DataSourceNames.THIRD, properties.getThird());
        targetDataSources.put(DataSourceNames.FOURTH, properties.getFourth());
        targetDataSources.put(DataSourceNames.FIFTH, properties.getFifth());
        return new DynamicDataSource(buildDataSource(), targetDataSources);
    }
    @SneakyThrows
  private DataSource buildDataSource() {
    dataSourceMap.put(DataSourceNames.FIRST, properties.getFirst());
    // initDataSourceMap();
    String[] split = dbNames.split(",");
    // 具体分库分表策略,按什么规则来分
    ShardingRuleConfiguration conf = new ShardingRuleConfiguration();
    // table rule
    TableRuleConfiguration tableRule = new TableRuleConfiguration("t_wx_push_info", split[0] + ".t_wx_push_info$->{1..8}");
    // key生成规则
    KeyGeneratorConfiguration keyGen = new KeyGeneratorConfiguration("PUSHINFO", "id");
    tableRule.setKeyGeneratorConfig(keyGen);
    // 分表策略
    ShardingStrategyConfiguration tableShardingStrategyConfig = new InlineShardingStrategyConfiguration("id", "t_wx_push_info$->{id % 8 + 1}");
    tableRule.setTableShardingStrategyConfig(tableShardingStrategyConfig);
    // table rule
    TableRuleConfiguration table2Rule = new TableRuleConfiguration("t_wx_push_info_details", split[0] + ".t_wx_push_info_details$->{1..8}");
    // key生成规则
    KeyGeneratorConfiguration key2Gen = new KeyGeneratorConfiguration("PUSHINFODETAIL", "id");
    table2Rule.setKeyGeneratorConfig(key2Gen);
    // 分表策略
    ShardingStrategyConfiguration tableSharding2StrategyConfig = new InlineShardingStrategyConfiguration("info_type", "t_wx_push_info_details$->{info_type % 8 + 1}");
    table2Rule.setTableShardingStrategyConfig(tableSharding2StrategyConfig);
    conf.setTableRuleConfigs(Lists.newArrayList(tableRule, table2Rule));
    Properties props = new Properties();
    props.put("sql.show", true);
    DataSource dataSource = ShardingDataSourceFactory.createDataSource(dataSourceMap, conf, props);
    return dataSource;
  }
}

DynamicDataSource动态数据源

public class DynamicDataSource extends AbstractRoutingDataSource {
    private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<>();
    public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) {
        super.setDefaultTargetDataSource(defaultTargetDataSource);
        super.setTargetDataSources(targetDataSources);
        super.afterPropertiesSet();
    }
    @Override
    protected Object determineCurrentLookupKey() {
        return getDataSource();
    }
    public static void setDataSource(String dataSource) {
        CONTEXT_HOLDER.set(dataSource);
    }
    public static String getDataSource() {
        return CONTEXT_HOLDER.get();
    }
    public static void clearDataSource() {
        CONTEXT_HOLDER.remove();
    }
}

多数据源注解

@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface DataSource {
    String name() default "";
}

多数据源切面

import com.cnpc.datasources.DataSourceNames;
import com.cnpc.datasources.DynamicDataSource;
import com.cnpc.datasources.annotation.DataSource;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.Ordered;
import org.springframework.stereotype.Component;
import java.lang.reflect.Method;
/**
 * 多数据源,切面处理类
 * @author YangMQ
 */
@Aspect
@Component
public class DataSourceAspect implements Ordered {
    protected Logger logger = LoggerFactory.getLogger(getClass());
    @Pointcut("@annotation(com.cnpc.datasources.annotation.DataSource)")
    public void dataSourcePointCut() {
    }
    @Around("dataSourcePointCut()")
    public Object around(ProceedingJoinPoint point) throws Throwable {
        MethodSignature signature = (MethodSignature) point.getSignature();
        Method method = signature.getMethod();
        DataSource ds = method.getAnnotation(DataSource.class);
        if (ds == null) {
            DynamicDataSource.setDataSource(DataSourceNames.FIRST);
            logger.debug("set datasource is " + DataSourceNames.FIRST);
        } else {
            DynamicDataSource.setDataSource(ds.name());
            logger.debug("set datasource is " + ds.name());
        }
        try {
            return point.proceed();
        } finally {
            DynamicDataSource.clearDataSource();
            logger.debug("clean datasource");
        }
    }
    @Override
    public int getOrder() {
        return 1;
    }
}

以上就可以进行动态数据源切换及分库分表的使用了哦




相关文章
|
3月前
|
SQL 监控 druid
springboot-druid数据源的配置方式及配置后台监控-自定义和导入stater(推荐-简单方便使用)两种方式配置druid数据源
这篇文章介绍了如何在Spring Boot项目中配置和监控Druid数据源,包括自定义配置和使用Spring Boot Starter两种方法。
|
2月前
|
druid Java Maven
|
4月前
|
Java 数据库连接 测试技术
SpringBoot 3.3.2 + ShardingSphere 5.5 + Mybatis-plus:轻松搞定数据加解密,支持字段级!
【8月更文挑战第30天】在数据驱动的时代,数据的安全性显得尤为重要。特别是在涉及用户隐私或敏感信息的应用中,如何确保数据在存储和传输过程中的安全性成为了开发者必须面对的问题。今天,我们将围绕SpringBoot 3.3.2、ShardingSphere 5.5以及Mybatis-plus的组合,探讨如何轻松实现数据的字段级加解密,为数据安全保驾护航。
368 1
|
5月前
|
监控 druid Java
spring boot 集成配置阿里 Druid监控配置
spring boot 集成配置阿里 Druid监控配置
323 6
|
4月前
|
druid Java 数据库连接
SpringBoot项目整合MybatisPlus持久层框架+Druid数据库连接池,以及实现增删改查功能
SpringBoot项目整合MybatisPlus和Druid数据库连接池,实现基本的增删改查功能。
400 0
|
6月前
|
druid Java 关系型数据库
在Spring Boot中集成Druid实现多数据源有两种常用的方式:使用Spring Boot的自动配置和手动配置。
在Spring Boot中集成Druid实现多数据源有两种常用的方式:使用Spring Boot的自动配置和手动配置。
996 5
|
6月前
|
druid Java 数据库
spring boot 整合 druid(深入浅出)
spring boot 整合 druid(深入浅出)
234 0
|
7月前
|
SQL 监控 druid
SpringBoot配置Druid
SpringBoot配置Druid
|
7月前
|
监控 druid Java
Spring Boot 3 集成 Druid 连接池详解
在现代的Java应用中,使用一个高效可靠的数据源是至关重要的。Druid连接池作为一款强大的数据库连接池,提供了丰富的监控和管理功能,成为很多Java项目的首选。本文将详细介绍如何在Spring Boot 3项目中配置数据源,集成Druid连接池,以实现更高效的数据库连接管理。
3899 2
Spring Boot 3 集成 Druid 连接池详解
|
druid 网络协议 Java
Spring Boot集成Druid异常discard long time none received connection.
Spring Boot集成Druid异常discard long time none received connection.
2811 0