多数据源管理以及数据的读写分离

简介: 多数据源管理以及数据的读写分离

方案一:基于AbstractDataSource实现主从数据库切换
package com.manli.api.base.aop;
import com.manli.api.base.datasource.DynmicDataSourceContextHolder;
import com.manli.api.util.LogAopUtil;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
/**

  • 切面到service 通过方法名指定数据源
  • @author Shensg
  • 2018年5月31日

*/
@Aspect
@Order(-1)
@Component
@EnableAspectJAutoProxy(proxyTargetClass = true)
public class DataSourceAspect {

private Logger log = LoggerFactory.getLogger(DataSourceAspect.class);   
   
ThreadLocal<Long> startTime = new ThreadLocal<>();   

 
@Pointcut("execution(* com.manli.api.service..*Service.*(..))")
public void webLog(){}   

@Before("webLog()")   
public void doBefore(JoinPoint joinPoint) throws Throwable {   
    startTime.set(System.currentTimeMillis());   
    String method = joinPoint.getSignature().getDeclaringTypeName() + "." + joinPoint.getSignature().getName();
    Object[] args = joinPoint.getArgs();
    String classType = joinPoint.getTarget().getClass().getName();
    Class<?> clazz = Class.forName(classType);
    String clazzName = clazz.getName();
    String methodName = joinPoint.getSignature().getName(); // 获取方法名称
     
     
    /*log.info("DataSourceAspect.doBefore 切面service");
    log.info("===============请求内容===============");
    log.info("切入点: " + joinPoint.getTarget().getClass().getName() + "类中"
            + method + "方法");
    // 获取参数名称和值
     StringBuffer sb = LogAopUtil.getNameAndArgs(this.getClass(), clazzName, methodName, args);
     log.info("请求类方法参数名称和值:" + sb);
     log.info("===============请求内容===============");*/
    DynmicDataSourceContextHolder.clear();
    if (joinPoint.getTarget().getClass().getName().contains("Primary")) {
        if (method.startsWith("add")
                || method.startsWith("create")
                || method.startsWith("save")
                || method.startsWith("edit")
                || method.startsWith("update")
                || method.startsWith("delete")
                || method.startsWith("remove")
                || method.startsWith("grant")
                || method.startsWith("get")
                ) {
            log.info("切换到: primaryDataSource");
            DynmicDataSourceContextHolder.setDataSourceKey("primaryDataSource");  
            return;
        } else {
            log.info("切换到: primaryDataSource");
            DynmicDataSourceContextHolder.setDataSourceKey("primaryDataSource");
            return;
        }
    }
    if (joinPoint.getTarget().getClass().getName().contains("Second")) {
        log.info("切换到: secondDataSource");
        DynmicDataSourceContextHolder.setDataSourceKey("secondDataSource");
        return;
    }
    log.warn("没有分库操作,切换到默认库:primaryDataSource");
    DynmicDataSourceContextHolder.setDataSourceKey("primaryDataSource");
}   

@AfterReturning(returning = "ret", pointcut = "webLog()")   
public void doAfterReturning(Object ret) throws Throwable {   
    // 处理完请求,返回内容  
    /*log.info("===============响应内容===============");
    log.info("DataSourceAspect.doAfterReturning 切面service");
    log.info("RESPONSE=" + ret);
    log.info("SPEND TIME=" + (System.currentTimeMillis() - startTime.get()));
    log.info("===============响应内容===============");*/
    DynmicDataSourceContextHolder.clear();
}   
// 抛出Exception之后被调用
@AfterThrowing(throwing = "ex", pointcut = "webLog()")  
 public void afterThrowing(Exception ex) throws Throwable {
    DynmicDataSourceContextHolder.setDataSourceKey("primaryDataSource");
    log.error("DataSourceAspect.afterThrowing 切面service");
     log.error("数据操作出现异常,切换到: readParkingData"+ex.getMessage());
 }

}

package com.manli.api.base.datasource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class DynmicDataSource extends AbstractRoutingDataSource {

private Logger log = LoggerFactory.getLogger(DynmicDataSource.class);  
/** 
 * 返回的内容是targetDataSources 的Key 
 */   
@Override   
protected Object determineCurrentLookupKey() {   
    return DynmicDataSourceContextHolder.getDataSourceKey();   
}   

}

package com.manli.api.base.datasource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.Assert;
//数据源通过key 设置和获取
public class DynmicDataSourceContextHolder {

private static Logger log = LoggerFactory.getLogger(DynmicDataSourceContextHolder.class);  
public static final ThreadLocal<String> contextHolder = new ThreadLocal<String>();   
 
public static String getDataSourceKey() {   
    return contextHolder.get();   
}   

public static void setDataSourceKey(String dataSourcekey) {   
    Assert.notNull(dataSourcekey, "dataSource cannot be null");
    contextHolder.set(dataSourcekey);   
}   
 
public static void clear() {   
    contextHolder.remove();   
}

}

package com.manli.api.base.datasource;
import com.alibaba.druid.filter.Filter;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.support.spring.stat.DruidStatInterceptor;
import com.alibaba.druid.wall.WallConfig;
import com.alibaba.druid.wall.WallFilter;
import org.springframework.aop.Advisor;
import org.springframework.aop.support.DefaultPointcutAdvisor;
import org.springframework.aop.support.JdkRegexpMethodPointcut;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import javax.sql.DataSource;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
// 定义数据源并设置到动态分配进行管理
@Configuration
@EnableConfigurationProperties(DruidSettings.class)
public class DruidDataSourceConfig {

@Autowired
private DruidSettings druidSettings;
@Bean(name="primaryDataSource",initMethod = "init", destroyMethod = "close")
@Primary
//设置为主要的,当同一个类型存在多个Bean的时候,spring 会默认注入以@Primary注解的bean 
public DataSource primaryDataSource() throws Exception{
    DruidDataSource dataSource = new DruidDataSource();
    dataSource.setDriverClassName(druidSettings.getDriverClassName());
    dataSource.setUrl(druidSettings.getUrl());
    dataSource.setUsername(druidSettings.getUsername());
    dataSource.setPassword(druidSettings.getPassword());
    dataSource.setInitialSize(druidSettings.getInitialSize());
    dataSource.setMinIdle(druidSettings.getMinIdle());
    dataSource.setMaxActive(druidSettings.getMaxActive());
    dataSource.setMaxWait(druidSettings.getMaxWait());
    dataSource.setTimeBetweenEvictionRunsMillis(druidSettings.getTimeBetweenEvictionRunsMillis());
    dataSource.setMinEvictableIdleTimeMillis(druidSettings.getMinEvictableIdleTimeMillis());
    String validationQuery = druidSettings.getValidationQuery();
    if (validationQuery != null && !"".equals(validationQuery)) {
        dataSource.setValidationQuery(validationQuery);
    }
    dataSource.setTestWhileIdle(druidSettings.isTestWhileIdle());
    dataSource.setTestOnBorrow(druidSettings.isTestOnBorrow());
    dataSource.setTestOnReturn(druidSettings.isTestOnReturn());
    if(druidSettings.isPoolPreparedStatements()){
        dataSource.setMaxPoolPreparedStatementPerConnectionSize(druidSettings.getMaxPoolPreparedStatementPerConnectionSize());
    }
    dataSource.setFilters(druidSettings.getFilters());//这是最关键的,否则SQL监控无法生效
    String connectionPropertiesStr = druidSettings.getConnectionProperties();
    if(connectionPropertiesStr != null && !"".equals(connectionPropertiesStr)){
        Properties connectProperties = new Properties();
        String[] propertiesList = connectionPropertiesStr.split(";");
        for(String propertiesTmp:propertiesList){
            String[] obj = propertiesTmp.split("=");
            String key = obj[0];
            String value = obj[1];
            connectProperties.put(key,value);
        }
        dataSource.setConnectProperties(connectProperties);
    }
    dataSource.setUseGlobalDataSourceStat(druidSettings.isUseGlobalDataSourceStat());
    /**
     * add ProxyFilters
     */
    List<Filter> filters = new ArrayList<>();
    filters.add(wallFilter());
    filters.add(new MyDruidFilter());
    dataSource.setProxyFilters(filters);
    return dataSource;
}
 
@Bean(name="secondDataSource",initMethod = "init", destroyMethod = "close")
public DataSource secondDataSource() throws Exception{
    DruidDataSource dataSource = new DruidDataSource();
    dataSource.setDriverClassName(druidSettings.getDriverClassNameTwo());
    dataSource.setUrl(druidSettings.getUrlTwo());
    dataSource.setUsername(druidSettings.getUsernameTwo());
    dataSource.setPassword(druidSettings.getPasswordTwo());
    dataSource.setInitialSize(druidSettings.getInitialSize());
    dataSource.setMinIdle(druidSettings.getMinIdle());
    dataSource.setMaxActive(druidSettings.getMaxActive());
    dataSource.setMaxWait(druidSettings.getMaxWait());
    dataSource.setTimeBetweenEvictionRunsMillis(druidSettings.getTimeBetweenEvictionRunsMillis());
    dataSource.setMinEvictableIdleTimeMillis(druidSettings.getMinEvictableIdleTimeMillis());
    String validationQuery = druidSettings.getValidationQuery();
    if (validationQuery != null && !"".equals(validationQuery)) {
        dataSource.setValidationQuery(validationQuery);
    }
    dataSource.setTestWhileIdle(druidSettings.isTestWhileIdle());
    dataSource.setTestOnBorrow(druidSettings.isTestOnBorrow());
    dataSource.setTestOnReturn(druidSettings.isTestOnReturn());
    if(druidSettings.isPoolPreparedStatements()){
        dataSource.setMaxPoolPreparedStatementPerConnectionSize(druidSettings.getMaxPoolPreparedStatementPerConnectionSize());
    }
    dataSource.setFilters(druidSettings.getFilters());//这是最关键的,否则SQL监控无法生效
    String connectionPropertiesStr = druidSettings.getConnectionProperties();
    if(connectionPropertiesStr != null && !"".equals(connectionPropertiesStr)){
        Properties connectProperties = new Properties();
        String[] propertiesList = connectionPropertiesStr.split(";");
        for(String propertiesTmp:propertiesList){
            String[] obj = propertiesTmp.split("=");
            String key = obj[0];
            String value = obj[1];
            connectProperties.put(key,value);
        }
        dataSource.setConnectProperties(connectProperties);
    }
    dataSource.setUseGlobalDataSourceStat(druidSettings.isUseGlobalDataSourceStat());
    /**
     * add ProxyFilters
     */
    List<Filter> filters = new ArrayList<>();
    filters.add(wallFilter());
    filters.add(new MyDruidFilter());
    dataSource.setProxyFilters(filters);
    return dataSource;
}
 
@Bean(name="dataSource")  
public DynmicDataSource dynmicDataSource() throws Exception {   
    DynmicDataSource dynmicDataSource = new DynmicDataSource();   
     
    Map<Object, Object> targetDataSources = new HashMap<>();   
    targetDataSources.put("primaryDataSource", primaryDataSource());   
    targetDataSources.put("secondDataSource", secondDataSource());   
    dynmicDataSource.setTargetDataSources(targetDataSources);   
    dynmicDataSource.setDefaultTargetDataSource(primaryDataSource());   
     
    return dynmicDataSource;   
}
 
/**
 * 监听Spring
 *  1.定义拦截器
 *  2.定义切入点
 *  3.定义通知类
 * @return
 */
@Bean
public DruidStatInterceptor druidStatInterceptor(){
    return new DruidStatInterceptor();
}
@Bean
public JdkRegexpMethodPointcut druidStatPointcut(){
    JdkRegexpMethodPointcut druidStatPointcut = new JdkRegexpMethodPointcut();
    String patterns = "com.manli.api.controller.*";
    String patterns2 = "com.manli.api.service.*";
    druidStatPointcut.setPatterns(patterns,patterns2);
    return druidStatPointcut;
}
@Bean
public Advisor druidStatAdvisor() {
    return new DefaultPointcutAdvisor(druidStatPointcut(), druidStatInterceptor());
}
   
@Bean
@ConfigurationProperties("spring.datasource.druid.filter.wall.config")
public WallConfig wallConfig(){
    return new WallConfig();
}
@Bean
public WallFilter wallFilter(){
    WallFilter filter = new WallFilter();
    WallConfig wallConfig = wallConfig();
    filter.setConfig(wallConfig);
    filter.setDbType("mysql");
    return filter;
}

}

方案二:整合mybatisplus , 可以使用@Ds("datasouceName")

目录
相关文章
|
6月前
|
关系型数据库 MySQL 数据库
数据库读写分离后的数据同步方式
【6月更文挑战第5天】该文介绍了应对大并发请求的数据库解决方案,主要涉及MySQL的主从同步和读写分离。根据业务对数据一致性和延迟的容忍度选择合适模式,读写分离则能进一步优化数据库负载。
199 3
数据库读写分离后的数据同步方式
|
6月前
|
消息中间件 SQL 关系型数据库
数据库如何实现读写分离以应对高并发?
【6月更文挑战第17天】数据库如何实现读写分离以应对高并发?
90 1
120分布式电商项目 - 读写分离(一主多从的实现)
120分布式电商项目 - 读写分离(一主多从的实现)
56 0
|
关系型数据库 MySQL Java
多数据源事务处理-涉及分布式事务
多数据源事务处理-涉及分布式事务
166 0
|
Java 关系型数据库 数据库
分布式微服务系统的跨库查询/操作的解决思路(关系型数据库)
分布式微服务系统的跨库查询/操作的解决思路(关系型数据库)
4506 0
分布式微服务系统的跨库查询/操作的解决思路(关系型数据库)
|
关系型数据库 MySQL 中间件
Mycat中间件综合部署高可用-读写分离-分库分表(1.6)
Mycat中间件综合部署高可用-读写分离-分库分表(1.6)
138 0
|
存储 SQL 缓存
数据库高并发和高可用方案
数据库高并发和高可用方案
数据库高并发和高可用方案
|
存储 SQL 缓存
基于springboot+jpa 实现多租户动态切换多数据源 - 数据隔离方案选择分库还是分表
基于springboot+jpa 实现多租户动态切换多数据源 - 数据隔离方案选择分库还是分表
298 0
基于springboot+jpa 实现多租户动态切换多数据源 - 数据隔离方案选择分库还是分表
|
存储 缓存 数据库
|
存储 SQL 缓存
数据库架构设计优化-读写分离|学习笔记
快速学习数据库架构设计优化-读写分离
119 0
数据库架构设计优化-读写分离|学习笔记
下一篇
DataWorks