Spring read-write separation

Spring configuration read-write separation. Prepare a master library account with read and write permissions, and a slave library account with read-only permissions.

configuration file

datasource.url= jdbc:mysql://*******rds.aliyuncs.com:3306/test?allowMultiQueries=true
datasource.username=test

datasource.password=test

datasource.driver= com.mysql.jdbc.Driver

datasource.initialSize= 10
datasource.minIdle= 2
datasource.maxActive= 100
datasource.maxWait= 6000
datasource.timeBetweenEvictionRunsMillis= 6000
datasource.minEvictableIdleTimeMillis= 300000
datasource.validationQuery= SELECT 'x'
datasource.testWhileIdle= true
datasource.testOnBorrow= false
datasource.testOnReturn= false
datasource.poolPreparedStatements= false
datasource.maxPoolPreparedStatementPerConnectionSize= 20
datasource.filters= stat


slave.datasource.url=jdbc:mysql://******/test?allowMultiQueries=true
slave.datasource.username=slavetest
slave.datasource.password=test
slave.datasource.driver= com.mysql.jdbc.Driver
slave.datasource.initialSize= 10
slave.datasource.minIdle= 2
slave.datasource.maxActive= 100
slave.datasource.maxWait= 6000
slave.datasource.timeBetweenEvictionRunsMillis= 6000
slave.datasource.minEvictableIdleTimeMillis= 300000
slave.datasource.validationQuery= SELECT 'x'
slave.datasource.testWhileIdle= true
slave.datasource.testOnBorrow= false
slave.datasource.testOnReturn= false
slave.datasource.poolPreparedStatements= false
slave.datasource.maxPoolPreparedStatementPerConnectionSize= 20

slave.datasource.filters= stat


xml data source configuration

<!--Create jdbc data source-->
<bean id="masterDataSource" class="com.alibaba.druid.pool.DruidDataSource"
init-method="init" destroy-method="close" primary="true" >
<property name="driverClassName" value="${datasource.driver}"></property>
<property name="url" value="${datasource.url}"></property>
<property name=" username" value="${datasource.username}"></property>
<property name="password" value="${datasource.password}"></property>
<!-- The number of physical connections established during initialization number. Initialization occurs when the init method is explicitly called, or the first time getConnection -->
<property name="initialSize" value="${datasource.initialSize}"><



<property name="maxActive" value="${datasource.maxActive}"></property>
<!-- How long does it take to configure the interval to detect idle connections that need to be closed, in milliseconds-->
<property name ="timeBetweenEvictionRunsMillis" value="${datasource.timeBetweenEvictionRunsMillis}"></property>
<!-- Configure the minimum lifetime of a connection in the pool, in milliseconds-->
<property name="minEvictableIdleTimeMillis" value=" ${datasource.minEvictableIdleTimeMillis}"></property>
<!-- SQL to verify whether the connection is valid or not, different data configurations are different-->
<property name="validationQuery" value="${datasource.validationQuery}"> </property>
<!-- It is recommended to configure it to true, which does not affect performance and ensures security. Detect when applying for a connection, if the idle time is greater than -->
<property name="testWhileIdle" value="${datasource.testWhileIdle}">
</property> <!-- When applying for a connection, execute the validationQuery to check whether the connection is valid. This configuration will reduce performance. -->
<property name="testOnBorrow" value="${datasource.testOnBorrow}"></property>
<!-- Execute validationQuery when returning the connection to check whether the connection is valid, this configuration will reduce performance-->
<property name= "testOnReturn" value="${datasource.testOnReturn}"></property>
<!-- Open PSCache and specify the size of PSCache on each connection-->
<property name="poolPreparedStatements" value="${datasource .poolPreparedStatements}"></property>
<property name="maxPoolPreparedStatementPerConnectionSize"
value="${datasource.maxPoolPreparedStatementPerConnectionSize}"></property>
<property name="filters" value="${datasource.filters}">< /property>
</bean>


<!--Create jdbc data source-->
<bean id="slaveDataSource" class="com.alibaba.druid.pool.DruidDataSource"
init-method="init" destroy-method="close">
<property name="driverClassName" value="${slave.datasource.driver}"></property>
<property name="url" value="${slave.datasource.url}"></property>
<property name="username" value="${slave.datasource.username}"></property>
<property name="password" value="${slave.datasource.password}"></property>
<!-- The number of physical connections established during initialization. Initialization happens when the init method is called explicitly, or when getConnection is called for the first time -->
<property name="initialSize" value="${slave.datasource.initialSize}"></property>
<!-- minimum number of connection pools- ->
<property name="minIdle" value="${slave.datasource.minIdle}"></property>
<!-- Maximum number of connection pools-->
<

<property name="timeBetweenEvictionRunsMillis" value="${slave.datasource.timeBetweenEvictionRunsMillis}"></property>
<!-- Configure the minimum lifetime of a connection in the pool, in milliseconds-->
<property name=" minEvictableIdleTimeMillis" value="${slave.datasource.minEvictableIdleTimeMillis}"></property>
<!-- SQL to verify whether the connection is valid or not, different data configurations are different-->
<property name="validationQuery" value="$ {slave.datasource.validationQuery}"></property>
<!-- It is recommended to configure it to true, which does not affect performance and ensures security. Detect when applying for a connection, if the idle time is greater than -->
<property name="testWhileIdle" value="${slave.datasource.testWhileIdle}"></property>
<!-- Execute validationQuery when applying for a connection to check whether the connection is valid , doing this configuration will reduce performance. -->
<property name="testOnBorrow"

<property name="testOnReturn" value="${slave.datasource.testOnReturn}"></property>
<!-- Open PSCache and specify the size of PSCache on each connection-->
<property name="poolPreparedStatements" value="${slave.datasource.poolPreparedStatements}"></property>
<property name="maxPoolPreparedStatementPerConnectionSize"
value="${slave.datasource.maxPoolPreparedStatementPerConnectionSize}"></property>
<property name="filters" value= "${slave.datasource.filters}"></property>
</bean>     <!-- Dynamic data source, which data source is determined according to the annotations on the service interface-->     <bean id="dataSource" class ="org.carddoc.common.dataSource.DynamicDataSource">           <      property name="targetDataSources">                <map key-type="java.lang.String">      





              <!-- write or slave -->    
             <entry key="slave" value-ref="slaveDataSource"/>      
             <!-- read or master   -->  
             <entry key="master" value-ref="masterDataSource"/>      
          </map>               
        </property>   
        <property name="defaultTargetDataSource" ref="masterDataSource"/>      
      
    </bean>


<!-- 扫描所有mapper文件 -->
<bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="configLocation" value="classpath:mapper_config.xml"></property>
<!-- <property name="mapperLocations" value="classpath:mapper/**.xml" /> -->
<property name="mapperLocations">
<array>
<value>classpath:mapper/**.xml</value>
<value>classpath:commapper/**.xml</value>
</array>
</property>
</bean>


<bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
<property name="basePackage" value="**.dao,org.carddoc.common.dao" />
<property name="sqlSessionFactoryBeanName" value="sqlSessionFactory" />
</bean>
<!-- 对dataSource 数据源进行事务管理 -->
<bean id="dsTransactionManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
<property name="dataSource" ref="dataSource"></property> <!-- Annotated mybatis service transaction management-->
</bean>

<tx:annotation-driven transaction-manager="dsTransactionManager"
proxy-target-class="true"></tx:annotation-driven>




  <!-- 为业务逻辑层的方法解析@DataSource注解  为当前线程的HandleDataSource注入数据源 -->    
    <bean id="dataSourceAspect" class="org.carddoc.common.dataSource.DataSourceAspect" />    
    <aop:config proxy-target-class="true">    
        <aop:aspect id="dataSourceAspect" ref="dataSourceAspect" order="2">    
            <aop:pointcut id="tx" expression="execution(* org.carddoc.api.*.service.imp.*.*(..)) "/>    
            <aop:before pointcut-ref="tx" method="before" />                
        </aop:aspect>    

    </aop:config>




***********************************************************

java aspect implementation

DataSource declaration annotation

import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;


/**  
 * RUNTIME  
 * The compiler will record the annotation in the class file, the VM will keep the comments at runtime, so they can be read reflectively.  
 *  
 */  
@Retention(RetentionPolicy.RUNTIME)  
@Target(ElementType.METHOD) 
public @interface DataSource {
String value();

}




DataSourceAspect aspect implementation



import java.lang.reflect.Method;


import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.reflect.MethodSignature;


public class DataSourceAspect {


/**
     * Before the dao layer method obtains the datasource object, specify it in the aspect Current thread data source
     */
    public void before(JoinPoint point)
    {
         MethodSignature methodSignature = (MethodSignature) point.getSignature();
         
         try
         {
             Method m = methodSignature.getMethod();
             if (m != null && m.isAnnotationPresent(DataSource. class))
             {
                 DataSource data = m.getAnnotation(DataSource.class);
                 System.out.println("User selects database library type: " + data.value());
                 HandleDataSource.putDataSource(data.value()); // put the data source in the current thread
             }


         } catch (Exception e)
         {
             e.printStackTrace();
         }
      
    }

}


DynamicDataSource Get the current thread data source

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;


public class DynamicDataSource extends AbstractRoutingDataSource {


  /**
     * Get the key related to the data source This key is the key value bound to the data source in Map<String,DataSource> resolvedDataSources
     * Use
     */
    @Override
    protected Object determineCurrentLookupKey()
    {
        return HandleDataSource.getDataSource();
    } when getting the target data source via determineTargetDataSource

}


HandleDataSource binds the data source of the current thread



public class HandleDataSource {


public static final ThreadLocal<String> holder = new ThreadLocal<String>();


    /**
     * Bind the current thread data source
     * 
     * @param key
     */
    public static void putDataSource(String datasource)
    {
        holder.set (datasource);
    }


    /**
     * Get the data source of the current thread
     * 
     * @return
     */
    public static String getDataSource()
    {
        return holder.get();
    }
}

Guess you like

Origin http://10.200.1.11:23101/article/api/json?id=326993009&siteId=291194637