SpringBoot2+Mybatis多个数据源实现

一、Java类

DynamicDataSource.java
package com.a.dynamic;

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

/**
 * 〈动态数据源〉
 *
 * @author zhoukai7
 * @create 7/27/18
 * @since 1.0.0
 */
public class DynamicDataSource extends AbstractRoutingDataSource {

    @Override
    protected Object determineCurrentLookupKey() {
        return DynamicDataSourceContextHolder.getDataSourceType();
    }
}
DynamicDataSourceAspect.java
package com.a.dynamic;

import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;

/**
 * 〈一句话功能简述〉<br>
 * 〈动态数据源切面〉
 *
 * @author zhoukai7
 * @create 7/27/18
 * @since 1.0.0
 */
@Aspect
@Order(-1)// 保证该AOP在@Transactional之前执行
@Component
public class DynamicDataSourceAspect {
    private static final Logger logger = LoggerFactory.getLogger(DynamicDataSourceAspect.class);

    @Before("@annotation(ds)")
    public void changeDataSource(JoinPoint point, TargetDataSource ds) throws Throwable {
        String dsId = ds.name();
        if (!DynamicDataSourceContextHolder.containsDataSource(dsId)) {
            logger.error("数据源[{}]不存在,使用默认数据源 > {}", ds.name(), point.getSignature());
        } else {
            logger.debug("Use DataSource : {} > {}", ds.name(), point.getSignature());
            DynamicDataSourceContextHolder.setDataSourceType(ds.name());
        }
    }
    
    @After("@annotation(ds)")
    public void restoreDataSource(JoinPoint point, TargetDataSource ds) {
        logger.debug("Revert DataSource : {} > {}", ds.name(), point.getSignature());
        DynamicDataSourceContextHolder.clearDataSourceType();
    }
}
DynamicDataSourceContextHolder.java
package com.a.dynamic;

import java.util.ArrayList;
import java.util.List;

/**
 * 〈一句话功能简述〉<br> 
 * 〈动态数据源句柄〉
 * 
 * @author zhoukai7
 * @create 7/27/18
 * @since 1.0.0
 */
public class DynamicDataSourceContextHolder {
    private static final ThreadLocal<String> contextHolder = new ThreadLocal<String>();
    public static List<String> dataSourceIds = new ArrayList<>();
    
    public static void setDataSourceType(String dataSourceType) {
        contextHolder.set(dataSourceType);
    }

    public static String getDataSourceType() {
            return contextHolder.get();
    }

    public static void clearDataSourceType() {
        contextHolder.remove();
    }

    /**
     * 判断指定DataSrouce当前是否存在
     *
     * @param dataSourceId
     * @author zhoukai7
     * @create 7/27/18
     */
    public static boolean containsDataSource(String dataSourceId){
        return dataSourceIds.contains(dataSourceId);
    }
}
DynamicDataSourceRegister.java
package com.a.dynamic;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.GenericBeanDefinition;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.core.env.Environment;
import org.springframework.core.type.AnnotationMetadata;
import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;

/**
 * 〈一句话功能简述〉<br> 
 * 〈注册中心〉
 *
 * @author zhoukai7
 * @create 7/27/18
 * @since 1.0.0
 */
public class DynamicDataSourceRegister implements ImportBeanDefinitionRegistrar, EnvironmentAware {
    private static final Logger logger = LoggerFactory.getLogger(DynamicDataSourceRegister.class);    
    //指定默认数据源(springboot2.0默认数据源是hikari如何想使用其他数据源可以自己配置)
    //org.apache.tomcat.jdbc.pool.DataSource
    private static final String DATASOURCE_TYPE_DEFAULT = "com.zaxxer.hikari.HikariDataSource";
    //默认数据源
    private DataSource defaultDataSource;
    //用户自定义数据源
    private Map<String, DataSource> slaveDataSources = new HashMap<>();

    @Override
    public void setEnvironment(Environment environment) {
        initDefaultDataSource(environment);
        initslaveDataSources(environment);
    }

    private void initDefaultDataSource(Environment env) {
        // 读取主数据源
        Map<String, Object> dsMap = new HashMap<>();
        dsMap.put("type", env.getProperty("spring.datasource.type"));
        dsMap.put("driver", env.getProperty("spring.datasource.driverClassName"));
        dsMap.put("url", env.getProperty("spring.datasource.url"));
        dsMap.put("username", env.getProperty("spring.datasource.username"));
        dsMap.put("password", env.getProperty("spring.datasource.password"));
        defaultDataSource = buildDataSource(dsMap);
    }

    private void initslaveDataSources(Environment env) {
        // 读取配置文件获取更多数据源
        String dsPrefixs = env.getProperty("slave.datasource.names");
        for (String dsPrefix : dsPrefixs.split(",")) {
            // 多个数据源
            Map<String, Object> dsMap = new HashMap<>();
            dsMap.put("type", env.getProperty("slave.datasource." + dsPrefix + ".type"));
            dsMap.put("driver", env.getProperty("slave.datasource." + dsPrefix + ".driverClassName"));
            dsMap.put("url", env.getProperty("slave.datasource." + dsPrefix + ".url"));
            dsMap.put("username", env.getProperty("slave.datasource." + dsPrefix + ".username"));
            dsMap.put("password", env.getProperty("slave.datasource." + dsPrefix + ".password"));
            DataSource ds = buildDataSource(dsMap);
            slaveDataSources.put(dsPrefix, ds);
        }
    }

    @Override
    public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry beanDefinitionRegistry) {
        Map<Object, Object> targetDataSources = new HashMap<Object, Object>();
        //添加默认数据源
        targetDataSources.put("dataSource", this.defaultDataSource);
        DynamicDataSourceContextHolder.dataSourceIds.add("dataSource");
        //添加其他数据源
        targetDataSources.putAll(slaveDataSources);
        for (String key : slaveDataSources.keySet()) {
            DynamicDataSourceContextHolder.dataSourceIds.add(key);
        }

        //创建DynamicDataSource
        GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
        beanDefinition.setBeanClass(DynamicDataSource.class);
        beanDefinition.setSynthetic(true);
        MutablePropertyValues mpv = beanDefinition.getPropertyValues();
        mpv.addPropertyValue("defaultTargetDataSource", defaultDataSource);
        mpv.addPropertyValue("targetDataSources", targetDataSources);
        //注册 - BeanDefinitionRegistry
        beanDefinitionRegistry.registerBeanDefinition("dataSource", beanDefinition);
        logger.info("Dynamic DataSource Registry");
    }
    
    public DataSource buildDataSource(Map<String, Object> dataSourceMap) {
        try {
            Object type = dataSourceMap.get("type");
            if (type == null) {
                type = DATASOURCE_TYPE_DEFAULT;// 默认DataSource
            }
            Class<? extends DataSource> dataSourceType;
            dataSourceType = (Class<? extends DataSource>) Class.forName((String) type);
            String driverClassName = dataSourceMap.get("driver").toString();
            String url = dataSourceMap.get("url").toString();
            String username = dataSourceMap.get("username").toString();
            String password = dataSourceMap.get("password").toString();
            // 自定义DataSource配置
            DataSourceBuilder factory = DataSourceBuilder.create().driverClassName(driverClassName).url(url)
                    .username(username).password(password).type(dataSourceType);
            return factory.build();
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        }
        return null;
    }
}
TargetDataSource.java
package com.a.dynamic;

import java.lang.annotation.*;

/**
 * 〈一句话功能简述〉<br> 
 * 〈数据源注解类〉
 *
 * @author zhoukai7
 * @create 7/27/18
 * @since 1.0.0
 */
@Target({ ElementType.METHOD, ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface TargetDataSource {
    String name();
}

二、yml文件配置

application.yml
# Tomcat
server:
    tomcat:
        uri-encoding: UTF-8
        max-threads: 1000
        min-spare-threads: 30
    port: 8012
    servlet:
        session:
            timeout: 1800
        context-path: /
        
# spring
spring:
    # 环境 dev|test|pro
    profiles:
        active: dev        
#    datasource:
#        type: com.alibaba.druid.pool.DruidDataSource
#        driverClassName: com.mysql.jdbc.Driver
    # jackson时间格式化
    jackson:
        time-zone: GMT+8
        date-format: yyyy-MM-dd HH:mm:ss
    # 文件上传
    servlet:
        multipart:
             enabled: true
             max-file-size: 100MB
             max-request-size: 100MB             
    # 指定静态资源的路径
    resources:
        static-locations: classpath:/static/,classpath:/views/
   # Spring devtools
    devtools:
         restart:
               enabled: true
    thymeleaf:
        cache: false

# Mybatis配置
#mybatis:
#    mapperLocations: classpath*:mapper/**/*.xml
#    configLocation: classpath:mybatis.xml

#mybatis
mybatis-plus:
  mapper-locations: classpath*:/mapper/**/*.xml
  #实体扫描,多个package用逗号或者分号分隔
  typeAliasesPackage: com.asiainfo.**.entity
  global-config:
    #主键类型  0:"数据库ID自增", 1:"用户输入ID",2:"全局唯一ID (数字类型唯一ID)", 3:"全局唯一ID UUID";
    id-type: 0
    #字段策略 0:"忽略判断",1:"非 NULL 判断"),2:"非空判断"
    field-strategy: 2
    #驼峰下划线转换
    db-column-underline: true
    #刷新mapper 调试神器
    refresh-mapper: true
    #数据库大写下划线转换
    #capital-mode: true
    #序列接口实现类配置
    #key-generator: com.baomidou.springboot.xxx
    #逻辑删除配置
    logic-delete-value: 0
    logic-not-delete-value: 1
    #自定义填充策略接口实现
    #meta-object-handler: com.baomidou.springboot.xxx
    #自定义SQL注入器
    #sql-injector: com.baomidou.springboot.xxx
  configuration:
    map-underscore-to-camel-case: true
    cache-enabled: false
application-dev.yml
spring:
    datasource:
        type: com.alibaba.druid.pool.DruidDataSource
        driverClassName: com.mysql.jdbc.Driver
        # 添加&useSSL=true后会报错
        url: jdbc:mysql://localhost:3309/aas?allowMultiQueries=true&useUnicode=true&characterEncoding=UTF-8
        username: root
        password: root
        #连接池的配置信息
        initialSize: 10
        minIdle: 10
        maxActive: 100
        # 配置获取连接等待超时的时间
        maxWait: 60000
        # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
        timeBetweenEvictionRunsMillis: 60000
        # 配置一个连接在池中最小生存的时间,单位是毫秒
        minEvictableIdleTimeMillis: 300000
        validationQuery: SELECT 1 FROM DUAL
        testWhileIdle: true
        testOnBorrow: false
        testOnReturn: false
        # 打开PSCache,并且指定每个连接上PSCache的大小
        poolPreparedStatements: true
        maxPoolPreparedStatementPerConnectionSize: 20       
         # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
        filters: stat,wall,slf4j
        # 通过connectProperties属性来打开mergeSql功能;慢SQL记录
        connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000

slave:
    datasource:
        names: ds1,ds2
        ds1:
            type: com.alibaba.druid.pool.DruidDataSource
            driverClassName: com.mysql.jdbc.Driver
            # 添加&useSSL=true后会报错
            url: jdbc:mysql://localhost:3309/bbs?allowMultiQueries=true&useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&failOverReadOnly=false&maxReconnects=10
            username: root
            password: root
            #连接池的配置信息
            initialSize: 10
            minIdle: 10
            maxActive: 100
            # 配置获取连接等待超时的时间
            maxWait: 60000
            # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
            timeBetweenEvictionRunsMillis: 60000
            # 配置一个连接在池中最小生存的时间,单位是毫秒
            minEvictableIdleTimeMillis: 300000
            validationQuery: SELECT 1 FROM DUAL
            testWhileIdle: true
            testOnBorrow: false
            testOnReturn: false
            # 打开PSCache,并且指定每个连接上PSCache的大小
            poolPreparedStatements: true
            maxPoolPreparedStatementPerConnectionSize: 20
            # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
            filters: stat,wall,slf4j
            # 通过connectProperties属性来打开mergeSql功能;慢SQL记录
            connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
        ds2:
            type: com.alibaba.druid.pool.DruidDataSource
            driverClassName: com.mysql.jdbc.Driver
            # 添加&useSSL=true后会报错
            url: jdbc:mysql://localhost:3309/dds?allowMultiQueries=true&useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&failOverReadOnly=false&maxReconnects=10
            username: root
            password: root
            #连接池的配置信息
            initialSize: 10
            minIdle: 10
            maxActive: 100
            # 配置获取连接等待超时的时间
            maxWait: 60000
            # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
            timeBetweenEvictionRunsMillis: 60000
            # 配置一个连接在池中最小生存的时间,单位是毫秒
            minEvictableIdleTimeMillis: 300000
            validationQuery: SELECT 1 FROM DUAL
            testWhileIdle: true
            testOnBorrow: false
            testOnReturn: false
            # 打开PSCache,并且指定每个连接上PSCache的大小
            poolPreparedStatements: true
            maxPoolPreparedStatementPerConnectionSize: 20
            # 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
            filters: stat,wall,slf4j
            # 通过connectProperties属性来打开mergeSql功能;慢SQL记录
            connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
        # 合并多个DruidDataSource的监控数据
        #useGlobalDataSourceStat: true

#    redis:
#        host: 140.143.23.15
#        port: 6379
#        # Redis服务器连接密码(默认为空)
#        password: redis@root&hp
#        # Redis数据库索引(默认为0)
#        database: 1
#        # 连接超时时间(毫秒)  2.0中需要suffix
#        timeout: 10000
#        jedis:
#            pool:
#               # 连接池中的最大空闲连接
#               max-idle: 50
#               # 连接池中的最小空闲连接
#               min-idle: 8
#               # 连接池最大连接数(使用负值表示没有限制)
#               max-active: 1024
#               # 连接池最大阻塞等待时间(使用负值表示没有限制)
#               max-wait: -1
upload:
    image:
       url: /data/image/release

三、使用方式

   @TargetDataSource(name="ds1")
	@Override
	public List<Demo> queryList(){
		Map<String,Object> map = new HashMap<>();
		return demoDao.queryList(map);
	}

    @TargetDataSource(name="ds2")
    @Override
    public List<Demo> selectList(){
        Map<String,Object> map = new HashMap<>();
        return demoDao.queryList(map);
    }

说明:
@TargetDataSource(name="ds2")只能添加在接口实现类上,而不能添加在接口上,推荐添加在service层的impl实现类上。
如果你的工程使用的mapper接口

@Mapper
public interface DemoDao extends BaseMapper<Demo> 

则不能在此处使用@TargetDataSource,如果想要在持久层使用,必须有实现类。强烈推荐在service使用。


转自:https://yq.aliyun.com/articles/620075

猜你喜欢

转载自blog.csdn.net/tiantang_1986/article/details/88989382
今日推荐