其实关于事务------注意以下的简介
package com.zwl.dao.aop; import com.zwl.dao.config.DatabaseContextHolder; import com.zwl.dao.config.DatabaseType; import com.zwl.dao.config.DynamicDataSource; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Before; import org.aspectj.lang.annotation.Pointcut; import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.stereotype.Component; import java.util.List; /** * ${动态处理数据源,根据命名区分} * author 二师兄超级帅 * create 2018-06-29 13:43 **/ @Aspect @Component @EnableAspectJAutoProxy(proxyTargetClass = true) @Slf4j public class DataSourceAspect { @Pointcut("execution(* com.zwl.dao.mapper.*.*(..))")//切点 public void aspect() { } @Before("aspect()") public void before(JoinPoint point) { //在指定切点的方法之前执行 String className = point.getTarget().getClass().getName(); String method = point.getSignature().getName(); String args = StringUtils.join(point.getArgs(), ","); log.info("className:{}, method:{}, args:{} ", className, method, args); try { for (DatabaseType type : DatabaseType.values()) { List<String> values = DynamicDataSource.METHOD_TYPE_MAP.get(type); for (String key : values) { if (method.startsWith(key)) { log.info(">>{} 方法使用的数据源为:{}<<", method, key); DatabaseContextHolder.setDatabaseType(type); DatabaseType types = DatabaseContextHolder.getDatabaseType(); log.info(">>{}方法使用的数据源为:{}<<", method, types); } } } } catch (Exception e) { log.error(e.getMessage(), e); } } }
package com.zwl.dao.config; /** * ${列出数据源类型} * author 二师兄超级帅 * create 2018-06-29 13:34 **/ public enum DatabaseType { master("write"), slave("read"); DatabaseType(String name) { this.name = name; } private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public String toString() { return "DatabaseType{" + "name='" + name + '\'' + '}'; } }
package com.zwl.dao.config; import com.alibaba.druid.pool.DruidDataSource; import lombok.extern.slf4j.Slf4j; import org.apache.ibatis.session.SqlSessionFactory; import org.mybatis.spring.SqlSessionFactoryBean; import org.mybatis.spring.annotation.MapperScan; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.core.env.Environment; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.transaction.annotation.EnableTransactionManagement; import javax.sql.DataSource; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; /** * ${DESCRIPTION} * author 二师兄超级帅 * create 2018-06-29 13:35 **/ @Configuration @MapperScan("com.zwl.dao.mapper") @EnableTransactionManagement @Slf4j public class DataSourceConfig { @Autowired private Environment env; // (1) @Autowired private DataSourceProperties properties; // (2) @Value("${spring.datasource.druid.filters}") // (3) private String filters; @Value("${spring.datasource.druid.initial-size}") private Integer initialSize; @Value("${spring.datasource.druid.min-idle}") private Integer minIdle; @Value("${spring.datasource.druid.max-active}") private Integer maxActive; @Value("${spring.datasource.druid.max-wait}") private Integer maxWait; @Value("${spring.datasource.druid.time-between-eviction-runs-millis}") private Long timeBetweenEvictionRunsMillis; @Value("${spring.datasource.druid.min-evictable-idle-time-millis}") private Long minEvictableIdleTimeMillis; @Value("${spring.datasource.druid.validation-query}") private String validationQuery; @Value("${spring.datasource.druid.test-while-idle}") private Boolean testWhileIdle; @Value("${spring.datasource.druid.test-on-borrow}") private boolean testOnBorrow; @Value("${spring.datasource.druid.test-on-return}") private boolean testOnReturn; @Value("${spring.datasource.druid.pool-prepared-statements}") private boolean poolPreparedStatements; @Value("${spring.datasource.druid.max-pool-prepared-statement-per-connection-size}") private Integer maxPoolPreparedStatementPerConnectionSize; @Value("${mapper.config-location}") private String mapperLocationPattern; /** * 通过Spring JDBC 快速创建 DataSource * * @return */ @Bean(name = "masterDataSource") @Qualifier("masterDataSource") @ConfigurationProperties(prefix = "spring.datasource.master") // (4) public DataSource masterDataSource() { return DataSourceBuilder.create().build(); } /** * 手动创建DruidDataSource,通过DataSourceProperties 读取配置 * * @return * @throws SQLException */ @Bean(name = "slaveDataSource") @Qualifier("slaveDataSource") @ConfigurationProperties(prefix = "spring.datasource.slave") public DataSource slaveDataSource() throws SQLException { DruidDataSource dataSource = new DruidDataSource(); dataSource.setFilters(filters); dataSource.setUrl(properties.getUrl()); dataSource.setDriverClassName(properties.getDriverClassName()); dataSource.setUsername(properties.getUsername()); dataSource.setPassword(properties.getPassword()); dataSource.setInitialSize(initialSize); dataSource.setMinIdle(minIdle); dataSource.setMaxActive(maxActive); dataSource.setMaxWait(maxWait); dataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis); dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis); dataSource.setValidationQuery(validationQuery); dataSource.setTestWhileIdle(testWhileIdle); dataSource.setTestOnBorrow(testOnBorrow); dataSource.setTestOnReturn(testOnReturn); dataSource.setPoolPreparedStatements(poolPreparedStatements); dataSource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize); return dataSource; } /** * 构造多数据源连接池 * Master 数据源连接池采用 HikariDataSource * Slave 数据源连接池采用 DruidDataSource * * @param master * @param slave * @return */ @Bean @Primary public DynamicDataSource dataSource(@Qualifier("masterDataSource") DataSource master, @Qualifier("slaveDataSource") DataSource slave) { Map<Object, Object> targetDataSources = new HashMap<>(); targetDataSources.put(DatabaseType.master, master); targetDataSources.put(DatabaseType.slave, slave); DynamicDataSource dataSource = new DynamicDataSource(); dataSource.setTargetDataSources(targetDataSources);// 该方法是AbstractRoutingDataSource的方法 dataSource.setDefaultTargetDataSource(slave);// 默认的datasource设置为myTestDbDataSource String read = env.getProperty("spring.datasource.read"); dataSource.setMethodType(DatabaseType.slave, read); String write = env.getProperty("spring.datasource.write"); dataSource.setMethodType(DatabaseType.master, write); return dataSource; } // @Bean("sqlSessionFactory") // public SqlSessionFactory sqlSessionFactory(@Qualifier("masterDataSource") DataSource myTestDbDataSource, // @Qualifier("slaveDataSource") DataSource myTestDb2DataSource) throws Exception { // SqlSessionFactoryBean fb = new SqlSessionFactoryBean(); // fb.setDataSource(this.dataSource(myTestDbDataSource, myTestDb2DataSource)); //// fb.setTypeAliasesPackage(env.getProperty("mybatis.type-aliases-package")); // fb.setMapperLocations(new PathMatchingResourcePatternResolver().getResources(env.getProperty("mybatis.mapper-locations"))); // return fb.getObject(); // } @Bean(name = "sqlSessionFactory") public SqlSessionFactory sqlSessionFactory(@Qualifier("masterDataSource") DataSource myTestDbDataSource, @Qualifier("slaveDataSource") DataSource myTestDb2DataSource) throws Exception { SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean(); sqlSessionFactoryBean.setDataSource(this.dataSource(myTestDbDataSource, myTestDb2DataSource)); PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); sqlSessionFactoryBean.setMapperLocations(resolver.getResources(mapperLocationPattern)); return sqlSessionFactoryBean.getObject(); } @Bean public DataSourceTransactionManager transactionManager(DynamicDataSource dataSource) throws Exception { return new DataSourceTransactionManager(dataSource); }
这里我遇到的一个坑就是关于springboot的yml配置文件的坑,启动springboot的项目总是报一个错:Could not
resolve placeholder 'spring.datasource.druid.filters' in value "${spring.datasource.druid.filters}
第一:springboot中的配置文件的格式是有严格的要求的:value注解,