SpringBoot 多数据源配置,查询了好多方法,只有这个最好用.
SpringBoot 多数据源配置
文件结构:
DateSourceMultiple
DataSourceType.java 注解动态切换数据库
DruidConfig.java 加载数据源 *.properties 之所以不用yml是因为读取不便,配置不简便
DynamicDataSource.java 加载数据源
DynamicDataSourceAspect.java AOP切面方法进行动态数据源切换
DynamicDataSourceContextHolder.java 设置数据源,加载入缓存
SessionFactoryConfig.java sessionFactory设置
DataSourceType.java (注解动态切换数据库)
import java.lang.annotation.*;
/**
* 注解动态切换数据库
* @author cdw
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
@Documented
public @interface DataSourceType {
String type() default DruidConfig.FIRST_ORACLE; // 定义数据库名,默认Oracle数据库db1,例如:db1、db2......
}
DruidConfig.java (加载数据源)
import com.alibaba.druid.filter.Filter;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.support.http.StatViewServlet;
import com.alibaba.druid.support.http.WebStatFilter;
import com.alibaba.druid.util.StringUtils;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.boot.bind.RelaxedDataBinder;
import org.springframework.boot.bind.RelaxedPropertyResolver;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.RegistrationBean;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.convert.ConversionService;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.core.env.Environment;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.sql.DataSource;
import java.util.*;
/**
* 加载数据源 *.properties
* @author cdw
*/
@Configuration
@EnableTransactionManagement
public class DruidConfig implements EnvironmentAware {
//定义数据库类型
public static final String FIRST_ORACLE ="first_oracle";
public static final String SECOND_ORACLE ="second_oracle";
public static final String THIRD_MYSQL ="third_mysql";
public static final String DATA_SOURCE_PREfIX_CUSTOM="spring.custom.datasource.";
public static final String DATA_SOURCE_CUSTOM_NAME="name";
public static final String SEP = ",";
public static final String DRUID_SOURCE_PREFIX = "spring.datasource.druid.";
public static final String ENABLED_ATTRIBUTE_NAME = "enabled";
private List<String> customDataSourceNames = new ArrayList<String>();
private Logger logger = LogManager.getLogger(DruidConfig.class);
private ConversionService conversionService = new DefaultConversionService();
private Environment environment;
/**
* @param environment the enviroment to set
*/
@Override
public void setEnvironment(Environment environment) {
this.environment = environment;
}
@Bean(name = "dataSource")
@Primary
public AbstractRoutingDataSource dataSource() {
DynamicDataSource dynamicDataSource = new DynamicDataSource();
LinkedHashMap<Object, Object> targetDatasources = new LinkedHashMap<Object, Object>();
initCustomDataSources(targetDatasources);
dynamicDataSource.setDefaultTargetDataSource(targetDatasources.get(customDataSourceNames.get(0)));
dynamicDataSource.setTargetDataSources(targetDatasources);
dynamicDataSource.afterPropertiesSet();
return dynamicDataSource;
}
private void initCustomDataSources(LinkedHashMap<Object, Object> targetDataResources) {
RelaxedPropertyResolver property =
new RelaxedPropertyResolver(environment, DruidConfig.DATA_SOURCE_PREfIX_CUSTOM);
String dataSourceNames = property.getProperty(DruidConfig.DATA_SOURCE_CUSTOM_NAME);
if (StringUtils.isEmpty(dataSourceNames)) {
logger.error("The multiple data source list are empty.");
} else {
RelaxedPropertyResolver springDataSourceProperty =
new RelaxedPropertyResolver(environment, "spring.datasource.");
Map<String, Object> druidPropertiesMaps = springDataSourceProperty.getSubProperties("druid.");
Map<String, Object> druidValuesMaps = new HashMap<String, Object>();
for (String key : druidPropertiesMaps.keySet()) {
String druidKey = DruidConfig.DRUID_SOURCE_PREFIX + key;
druidValuesMaps.put(druidKey, druidPropertiesMaps.get(key));
}
MutablePropertyValues dataSourcePropertyValue = new MutablePropertyValues(druidValuesMaps);
for (String dataSourceName : dataSourceNames.split(DruidConfig.SEP)) {
try {
Map<String, Object> dsMaps = property.getSubProperties(dataSourceName + ".");
for (String dsKey : dsMaps.keySet()) {
if (dsKey.equals("type")) {
dataSourcePropertyValue.addPropertyValue("spring.datasource.type", dsMaps.get(dsKey));
} else {
String druidKey = DruidConfig.DRUID_SOURCE_PREFIX + dsKey;
dataSourcePropertyValue.addPropertyValue(druidKey, dsMaps.get(dsKey));
}
}
DataSource ds = dataSourcebuild(dataSourcePropertyValue);
if (null != ds) {
if (ds instanceof DruidDataSource) {
DruidDataSource druidDataSource = (DruidDataSource) ds;
druidDataSource.setName(dataSourceName);
initDruidFilters(druidDataSource);
}
customDataSourceNames.add(dataSourceName);
DynamicDataSourceContextHolder.datasourceId.add(dataSourceName);
targetDataResources.put(dataSourceName, ds);
}
logger.info("Data source initialization 【" + dataSourceName + "】 successfully ...");
} catch (Exception e) {
logger.error("Data source initialization【" + dataSourceName + "】 failed ...", e);
}
}
}
}
/**
* @param dataSourcePropertyValue 数据源创建所需参数
* @return DataSource 创建的数据源对象
* @Title: DataSourcebuild
* @Description: 创建数据源
*/
public DataSource dataSourcebuild(MutablePropertyValues dataSourcePropertyValue) {
DataSource ds = null;
if (dataSourcePropertyValue.isEmpty()) {
return ds;
}
String type = dataSourcePropertyValue.get("spring.datasource.type").toString();
if (!StringUtils.isEmpty(type)) {
if (StringUtils.equals(type, DruidDataSource.class.getTypeName())) {
ds = new DruidDataSource();
RelaxedDataBinder dataBinder = new RelaxedDataBinder(ds, DruidConfig.DRUID_SOURCE_PREFIX);
dataBinder.setConversionService(conversionService);
dataBinder.setIgnoreInvalidFields(false);
dataBinder.setIgnoreNestedProperties(false);
dataBinder.setIgnoreUnknownFields(true);
dataBinder.bind(dataSourcePropertyValue);
}
}
return ds;
}
@Bean
public ServletRegistrationBean statViewServlet() {
RelaxedPropertyResolver property =
new RelaxedPropertyResolver(environment, "spring.datasource.druid.");
Map<String, Object> druidPropertiesMaps = property.getSubProperties("stat-view-servlet.");
boolean statViewServletEnabled = false;
String statViewServletEnabledKey = DruidConfig.ENABLED_ATTRIBUTE_NAME;
ServletRegistrationBean registrationBean = null;
if (druidPropertiesMaps.containsKey(statViewServletEnabledKey)) {
String statViewServletEnabledValue =
druidPropertiesMaps.get(statViewServletEnabledKey).toString();
statViewServletEnabled = Boolean.parseBoolean(statViewServletEnabledValue);
}
if (statViewServletEnabled) {
registrationBean = new ServletRegistrationBean();
StatViewServlet statViewServlet = new StatViewServlet();
registrationBean.setServlet(statViewServlet);
String urlPatternKey = "url-pattern";
String allowKey = "allow";
String denyKey = "deny";
String usernameKey = "login-username";
String secretKey = "login-password";
String resetEnableKey = "reset-enable";
if (druidPropertiesMaps.containsKey(urlPatternKey)) {
String urlPatternValue =
druidPropertiesMaps.get(urlPatternKey).toString();
registrationBean.addUrlMappings(urlPatternValue);
} else {
registrationBean.addUrlMappings("/druid/*");
}
addBeanParameter(druidPropertiesMaps, registrationBean, "allow", allowKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "deny", denyKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "loginUsername", usernameKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "loginPassword", secretKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "resetEnable", resetEnableKey);
}
return registrationBean;
}
@Bean
public FilterRegistrationBean filterRegistrationBean() {
RelaxedPropertyResolver property =
new RelaxedPropertyResolver(environment, "spring.datasource.druid.");
Map<String, Object> druidPropertiesMaps = property.getSubProperties("web-stat-filter.");
boolean webStatFilterEnabled = false;
String webStatFilterEnabledKey = DruidConfig.ENABLED_ATTRIBUTE_NAME;
FilterRegistrationBean registrationBean = null;
if (druidPropertiesMaps.containsKey(webStatFilterEnabledKey)) {
String webStatFilterEnabledValue =
druidPropertiesMaps.get(webStatFilterEnabledKey).toString();
webStatFilterEnabled = Boolean.parseBoolean(webStatFilterEnabledValue);
}
if (webStatFilterEnabled) {
registrationBean = new FilterRegistrationBean();
WebStatFilter filter = new WebStatFilter();
registrationBean.setFilter(filter);
String urlPatternKey = "url-pattern";
String exclusionsKey = "exclusions";
String sessionStatEnabledKey = "session-stat-enable";
String profileEnabledKey = "profile-enable";
String principalCookieNameKey = "principal-cookie-name";
String principalSessionNameKey = "principal-session-name";
String sessionStateMaxCountKey = "session-stat-max-count";
if (druidPropertiesMaps.containsKey(urlPatternKey)) {
String urlPatternValue =
druidPropertiesMaps.get(urlPatternKey).toString();
registrationBean.addUrlPatterns(urlPatternValue);
} else {
registrationBean.addUrlPatterns("/*");
}
if (druidPropertiesMaps.containsKey(exclusionsKey)) {
String exclusionsValue =
druidPropertiesMaps.get(exclusionsKey).toString();
registrationBean.addInitParameter("exclusions", exclusionsValue);
} else {
registrationBean.addInitParameter("exclusions", "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*");
}
addBeanParameter(druidPropertiesMaps, registrationBean, "sessionStatEnable", sessionStatEnabledKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "profileEnable", profileEnabledKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "principalCookieName", principalCookieNameKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "sessionStatMaxCount", sessionStateMaxCountKey);
addBeanParameter(druidPropertiesMaps, registrationBean, "principalSessionName", principalSessionNameKey);
}
return registrationBean;
}
private void addBeanParameter(Map<String, Object> druidPropertyMap, RegistrationBean registrationBean, String paramName, String propertyKey) {
if (druidPropertyMap.containsKey(propertyKey)) {
String propertyValue =
druidPropertyMap.get(propertyKey).toString();
registrationBean.addInitParameter(paramName, propertyValue);
}
}
private void initDruidFilters(DruidDataSource druidDataSource) {
List<Filter> filters = druidDataSource.getProxyFilters();
RelaxedPropertyResolver filterProperty =
new RelaxedPropertyResolver(environment, "spring.datasource.druid.filter.");
String filterNames = environment.getProperty("spring.datasource.druid.filters");
String[] filterNameArray = filterNames.split("\\,");
for (int i = 0; i < filterNameArray.length; i++) {
String filterName = filterNameArray[i];
Filter filter = filters.get(i);
Map<String, Object> filterValueMap = filterProperty.getSubProperties(filterName + ".");
String statFilterEnabled = filterValueMap.get(DruidConfig.ENABLED_ATTRIBUTE_NAME).toString();
if (statFilterEnabled.equals("true")) {
MutablePropertyValues propertyValues = new MutablePropertyValues(filterValueMap);
RelaxedDataBinder dataBinder = new RelaxedDataBinder(filter);
dataBinder.bind(propertyValues);
}
}
}
}
DynamicDataSource.java (加载数据源)
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
/**
* 加载数据源
* @author cdw
*/
public class DynamicDataSource extends AbstractRoutingDataSource {
/**
* 获得数据源
*/
@Override
protected Object determineCurrentLookupKey() {
return DynamicDataSourceContextHolder.getDateSoureType();
}
}
DynamicDataSourceAspect.java (AOP切面方法进行动态数据源切换)
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
/**
* AOP切面方法进行动态数据源切换
* @author cdw
*/
@Aspect
@Order(-1)
@Component
public class DynamicDataSourceAspect {
private static final Logger logger = LogManager.getLogger(DynamicDataSourceAspect.class);
/**
* 切换数据库
*
* @param point
* @param dsa
* @return
* @throws Throwable
*/
@Before("@annotation(dsa)")
public void changeDataSource(JoinPoint point, DataSourceType dsa) {
//从参数中取值
// Object[] methodArgs = point.getArgs();
// String dsId = methodArgs[methodArgs.length - 1].toString();
if (!DynamicDataSourceContextHolder.existDateSoure(dsa.type())) {
logger.error("No data source found ...【" + dsa.type() + "】");
return;
} else {
logger.info("Set data source found ...【" + dsa.type() + "】");
DynamicDataSourceContextHolder.setDateSoureType(dsa.type());
}
}
/**
* @param point
* @param dsa
* @return void
* @throws
* @Title: destroyDataSource
* @Description: 销毁数据源 在所有的方法执行执行完毕后
*/
@After("@annotation(dsa)")
public void destroyDataSource(JoinPoint point, DataSourceType dsa) {
DynamicDataSourceContextHolder.clearDateSoureType();
}
}
DynamicDataSourceContextHolder.java (设置数据源,加载入缓存)
import java.util.ArrayList;
import java.util.List;
/**
* 设置数据源,加载入缓存
* @author cdw
*/
public class DynamicDataSourceContextHolder {
/*
* 使用ThreadLocal维护变量,ThreadLocal为每个使用该变量的线程提供独立的变量副本,
* 所以每一个线程都可以独立地改变自己的副本,而不会影响其它线程所对应的副本。
*/
private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<String>();
/*
* 管理所有的数据源id,用于数据源的判断
*/
public static List<String> datasourceId = new ArrayList<String>();
/**
* @Title: setDateSoureType
* @Description: 设置数据源的变量
* @param dateSoureType
* @return void
* @throws
*/
public static void setDateSoureType(String dateSoureType){
CONTEXT_HOLDER.set(dateSoureType);
}
/**
* @Title: getDateSoureType
* @Description: 获得数据源的变量
* @return String
* @throws
*/
public static String getDateSoureType(){
return CONTEXT_HOLDER.get();
}
/**
* @Title: clearDateSoureType
* @Description: 清空所有的数据源变量
* @return void
* @throws
*/
public static void clearDateSoureType(){
CONTEXT_HOLDER.remove();
}
/**
* @Title: existDateSoure
* @Description: 判断数据源是否已存在
* @param dateSoureType
* @return boolean
* @throws
*/
public static boolean existDateSoure(String dateSoureType ){
return datasourceId.contains(dateSoureType);
}
}
SessionFactoryConfig.java (sessionFactory设置)
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.sql.DataSource;
import java.io.IOException;
/**
* sessionFactory设置
* @author cdw
*/
@Configuration
@EnableTransactionManagement
@MapperScan("com.test.springcloud.dto.*.mapper")
public class SessionFactoryConfig {
private static Logger logger = LogManager.getLogger(SessionFactoryConfig.class);
@Autowired
private DataSource dataSource;
private String typeAliasPackage = "com.test.springcloud.dto.*.entity";
/**
*创建sqlSessionFactoryBean 实例
* 并且设置configtion 如驼峰命名.等等
* 设置mapper 映射路径
* 设置datasource数据源
* @return
*/
@Bean(name = "sqlSessionFactory")
public SqlSessionFactoryBean createSqlSessionFactoryBean() {
logger.info("createSqlSessionFactoryBean method");
try{
ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
sqlSessionFactoryBean.setDataSource(dataSource);
sqlSessionFactoryBean.setMapperLocations(resolver.getResources("classpath:com/test/springcloud/dto/*/maps/*Mapper.xml"));
sqlSessionFactoryBean.setTypeAliasesPackage(typeAliasPackage);
return sqlSessionFactoryBean;
}
catch(IOException ex){
logger.error("Error happens when getting config files." + ExceptionUtils.getMessage(ex));
}
return null;
}
@Bean
public SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory sqlSessionFactory) {
return new SqlSessionTemplate(sqlSessionFactory);
}
@Bean
public PlatformTransactionManager annotationDrivenTransactionManager() {
return new DataSourceTransactionManager(dataSource);
}
}
application.properties (数据源配置文件)
server.port=8888
spring.application.name=ServiceName
#
spring.freemarker.allow-request-override=false
spring.freemarker.cache=true
spring.freemarker.check-template-location=true
spring.freemarker.charset=utf-8
spring.freemarker.expose-request-attributes=false
spring.freemarker.expose-session-attributes=false
spring.freemarker.expose-spring-macro-helpers=false
spring.freemarker.suffix=.ftl
spring.freemarker.content-type=application/xml
spring.zipkin.base-url=http://127.0.0.1:7801
#base-url: http://127.0.0.1:7801
druid.IP=127.0.0.1
eureka.client.serviceUrl.defaultZone=http://eureka:[email protected]:8761/eureka
eureka.client.serviceUrl.register-with-eureka=false
eureka.client.instance.instance-id=dept-1001.com
eureka.client.instance.prefer-ip-address=true
eureka.client.instance.lease-renewal-interval-in-seconds=2
eureka.client.instance.lease-expiration-duration-in-seconds=5
eureka.instance.prefer-ip-address=true
eureka.instance.instance-id=${spring.cloud.client.ipAddress}:${server.port}
management.security.enabled=false
#所有数据源名称
spring.custom.datasource.name=first_oracle,second_oracle,third_mysql
#第一个数据源(默认)
spring.custom.datasource.first_oracle.name=first_oracle
spring.custom.datasource.first_oracle.type=com.alibaba.druid.pool.DruidDataSource
spring.custom.datasource.first_oracle.driver-class-name=oracle.jdbc.driver.OracleDriver
spring.custom.datasource.first_oracle.url=jdbc:oracle:thin:@127.0.0.1:1521/oracle1
spring.custom.datasource.first_oracle.username=test1
spring.custom.datasource.first_oracle.password=test1
#第二个数据源
spring.custom.datasource.second_oracle.name=second_oracle
spring.custom.datasource.second_oracle.type=com.alibaba.druid.pool.DruidDataSource
spring.custom.datasource.second_oracle.driver-class-name=oracle.jdbc.driver.OracleDriver
spring.custom.datasource.second_oracle.url=jdbc:oracle:thin:@127.0.0.1:1521/oracle2
spring.custom.datasource.second_oracle.username=test2
spring.custom.datasource.second_oracle.password=test2
#第三个数据源
spring.custom.datasource.third_mysql.name=third_mysql
spring.custom.datasource.third_mysql.type=com.alibaba.druid.pool.DruidDataSource
spring.custom.datasource.third_mysql.driver-class-name=com.mysql.jdbc.Driver
spring.custom.datasource.third_mysql.url=jdbc:mysql://127.0.0.1:3306/mysql1?characterEncoding=utf8&autoReconnect=true&useSSL=false&useAffectedRows=true
spring.custom.datasource.third_mysql.username=test3
spring.custom.datasource.third_mysql.password=test3
#数据源公共配置
spring.datasource.druid.initial-size=5
spring.datasource.druid.min-idle=5
spring.datasource.druid.async-init=true
spring.datasource.druid.async-close-connection-enable=true
spring.datasource.druid.max-active=30
spring.datasource.druid.max-wait=8000
spring.datasource.druid.time-between-eviction-runs-millis=60000
spring.datasource.druid.min-evictable-idle-time-millis=30000
spring.datasource.druid.validation-query=SELECT 1 FROM DUAL
spring.datasource.druid.test-while-idle=true
spring.datasource.druid.test-on-borrow=true
spring.datasource.druid.test-on-return=true
spring.datasource.druid.pool-prepared-statements=true
spring.datasource.druid.max-pool-prepared-statement-per-connection-size=100
spring.datasource.druid.filters=stat,wall
spring.datasource.druid.connectionProperties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=8000
#
spring.datasource.druid.removeAbandoned=true
spring.datasource.druid.removeAbandonedTimeout=300
#
spring.datasource.druid.web-stat-filter.enabled=true
spring.datasource.druid.web-stat-filter.url-pattern=/*
spring.datasource.druid.web-stat-filter.exclusions=*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*
spring.datasource.druid.web-stat-filter.session-stat-enable=true
spring.datasource.druid.web-stat-filter.profile-enable=true
#
spring.datasource.druid.stat-view-servlet.enabled=true
spring.datasource.druid.stat-view-servlet.url-pattern=/druid/*
spring.datasource.druid.stat-view-servlet.login-username=admin
spring.datasource.druid.stat-view-servlet.login-password=admin
spring.datasource.druid.stat-view-servlet.reset-enable=false
spring.datasource.druid.stat-view-servlet.allow=127.0.0.1
#
spring.datasource.druid.filter.wall.enabled=true
spring.datasource.druid.filter.wall.db-type=oracle
spring.datasource.druid.filter.wall.config.alter-table-allow=false
spring.datasource.druid.filter.wall.config.truncate-allow=false
spring.datasource.druid.filter.wall.config.drop-table-allow=false
spring.datasource.druid.filter.wall.config.none-base-statement-allow=false
spring.datasource.druid.filter.wall.config.update-where-none-check=true
spring.datasource.druid.filter.wall.config.select-into-outfile-allow=false
spring.datasource.druid.filter.wall.config.metadata-allow=true
spring.datasource.druid.filter.wall.log-violation=true
spring.datasource.druid.filter.wall.throw-exception=true
#
spring.datasource.druid.filter.stat.log-slow-sql=true
spring.datasource.druid.filter.stat.slow-sql-millis=1000
spring.datasource.druid.filter.stat.merge-sql=true
spring.datasource.druid.filter.stat.db-type=oracle
spring.datasource.druid.filter.stat.enabled=true
引用方法:
@Override
@DataSourceType(type = DruidConfig.FIRST_ORACLE)
public JSONObject getOracleView(String userName) {
// 具体实现逻辑
}
@Override
@DataSourceType(type = DruidConfig.SECOND_ORACLE)
public JSONObject getOracleView(String userName) {
// 具体实现逻辑
}
@Override
@DataSourceType(type = DruidConfig.THIRD_MYSQL)
public JSONObject getMySQLView(String userName) {
// 具体实现逻辑
}
注意:实现方法必须是serviceImpl类重写的方法!!!
以上就是完整的SpringBoot 多数据源配置,具体的数据连接和配置信息根据个人需要配置,为了简便,没有注释,请谅解!
转载请注明出处!