Elastic job is a distributed scheduled task open sourced by Dangdang.com
1. Install zookeeper, which can be downloaded from the official website http://zookeeper.apache.org/
2. Add dependencies
<properties> <java.version>1.8</java.version> <elastic-job.version>2.1.5</elastic-job.version> <curator.version>2.10.0</curator.version> </properties> <dependency> <artifactId>elastic-job-common-core</artifactId> <groupId>com.dangdang</groupId> <version>${elastic-job.version}</version> </dependency> <dependency> <artifactId>elastic-job-lite-core</artifactId> <groupId>com.dangdang</groupId> <version>${elastic-job.version}</version> </dependency> <dependency> <artifactId>elastic-job-lite-spring</artifactId> <groupId>com.dangdang</groupId> <version>${elastic-job.version}</version> </dependency> <dependency> <groupId>org.apache.curator</groupId> <artifactId>curator-test</artifactId> <version>${curator.version}</version> </dependency>
3. Configure the zookeeper registry
import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperConfiguration; import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperRegistryCenter; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; /** * @author Administrator *@date September 28, 2017 */ @Configuration @ConditionalOnExpression("'${regCenter.serverList}'.length() > 0") public class RegistryCenterConfig { @Bean(initMethod = "init") public ZookeeperRegistryCenter regCenter(@Value("${regCenter.serverList}") final String serverList, @Value("${regCenter.namespace}") final String namespace) { return new ZookeeperRegistryCenter(new ZookeeperConfiguration(serverList, namespace)); } }
4, endurance arrangement
import javax.annotation.Resource; import javax.sql.DataSource; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import com.dangdang.ddframe.job.event.JobEventConfiguration; import com.dangdang.ddframe.job.event.rdb.JobEventRdbConfiguration; /** * @author Administrator * @date September 28, 2017 */ @Configuration public class JobEventConfig { @Resource private DataSource dataSource; @Bean public JobEventConfiguration jobEventConfiguration() { return new JobEventRdbConfiguration(dataSource); } }
5. Configuration file application.properties configuration, configure the execution time of zookeeper and elastic job, the number of shards, and define the sharding parameter that is shardingItemParameter, which can match the corresponding relationship with the sharding item, and is used to convert the number of sharding items into more readable business code
regCenter.serverList= 127.0.0.1:2181 regCenter.namespace= elastic-job simpleJob.cron=0/5 * * * * ? simpleJob.shardingTotalCount=3 simpleJob.shardingItemParameters=0=A,1=B,2=C
6. Task configuration class
import javax.annotation.Resource; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import com.chaos.webapi.job.ProfitLossOrderJob; import com.chaos.webapi.job.SpringSimpleJob; import com.dangdang.ddframe.job.api.simple.SimpleJob; import com.dangdang.ddframe.job.config.JobCoreConfiguration; import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration; import com.dangdang.ddframe.job.event.JobEventConfiguration; import com.dangdang.ddframe.job.lite.api.JobScheduler; import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration; import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler; import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperRegistryCenter; /** * @author Administrator *@date September 28, 2017 */ @Configuration public class SimpleJobConfig { @Resource private ZookeeperRegistryCenter regCenter; @Resource private JobEventConfiguration jobEventConfiguration; @Bean public SimpleJob simpleJob() { return new SpringSimpleJob(); } @Bean public ProfitLossOrderJob pofitLossOrderJob() { return new ProfitLossOrderJob(); } @Bean(initMethod = "init") public JobScheduler simpleJobScheduler(final SimpleJob simpleJob, @Value("${simpleJob.cron}") final String cron, @Value("${simpleJob.shardingTotalCount}") final int shardingTotalCount, @Value("${simpleJob.shardingItemParameters}") final String shardingItemParameters) { return new SpringJobScheduler(simpleJob, regCenter, getLiteJobConfiguration(simpleJob.getClass(), cron, shardingTotalCount, shardingItemParameters), jobEventConfiguration); } private LiteJobConfiguration getLiteJobConfiguration(final Class<? extends SimpleJob> jobClass, final String cron, final int shardingTotalCount, final String shardingItemParameters) { return LiteJobConfiguration.newBuilder(new SimpleJobConfiguration(JobCoreConfiguration.newBuilder( jobClass.getName(), cron, shardingTotalCount).shardingItemParameters(shardingItemParameters).build(), jobClass.getCanonicalName())).overwrite(true).build(); } }
7. Task execution class
import com.dangdang.ddframe.job.api.ShardingContext; import com.dangdang.ddframe.job.api.simple.SimpleJob; /** * @author Administrator *@date September 28, 2017 */ public class SpringSimpleJob implements SimpleJob { @Override public void execute(final ShardingContext shardingContext){ System.out.println("当前分片:"+shardingContext.getShardingItem()+"---"+shardingContext.getShardingParameter()); } }
8. Task execution results
The above is just to execute a simple print statement. The actual application can combine the database sub-database and sub-table to complete the corresponding relationship between fragmented items and business logic.