spring batch 2: 搭建环境以及简单的Job
2017-02-14 本文已影响450人
代码行间的无聊生活
这节介绍如何使用spring batch。
Spring batch 默认为内存方式(HSQLDB),但是产品功能上需要进行监控job状态,以及异常情况。所以采用了存储到数据库(Mysql),
那么就需要为这部分建表,那么我们需要配置JobRepository去使用MySQL。建表脚本在 spring-batch-core jar包下的org.springframework.batch.core中。
一、搭建环境
1. 引入jar包,采用Maven的方式引入。具体需要的包可以自己慢慢试。
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-core</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-admin-manager</artifactId>
<version>1.3.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-infrastructure</artifactId>
<version>${spring.batch.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.batch</groupId>
<artifactId>spring-batch-integration</artifactId>
<version>${spring.batch.version}</version>
</dependency>
2. 配置XML,如果集成spring boot则按官方文档就可以。
a. 配置jobRepository
<batch:job-repository id="jobRepository" transaction-manager="main_txManager"
isolation-level-for-create="REPEATABLE_READ" table-prefix="BATCH_" max-varchar-length="1000" />
b. 配置jobLauncher
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository"/>
</bean>
c. 配置job参数传递bean jobParameterBulider
<bean id="jobParameterBulider" class="org.springframework.batch.core.JobParametersBuilder" />
d. 配置job异步多线程
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
二、 开始简单的job编写
个人喜欢先写配置,再写业务逻辑上的东西。我们以提货提醒为例子,当然这个是去除了很多东西的例子,将就着看。
<!-- start 早上到货提醒 JOB-->
<batch:job id="pickUpJob" restartable="true">
<!-- master step, 10 threads (grid-size) -->
<batch:step id="pickUpmasterStep" >
<!-- partitioner="pickUpPartitioner" 为前期处理job传递过来的参数-->
<partition step="pickUpSlave" partitioner="pickUpPartitioner" >
<!-- grid-size="10" :启用多线程,线程为10-->
<handler grid-size="10" task-executor="taskExecutor" />
</partition>
</batch:step>
</batch:job>
<!-- 采用主step,嵌套step的方式,习惯这样的写法,也可以直接在主step中配置以下内容 -->
<batch:step id="pickUpSlave" >
<batch:tasklet transaction-manager="main_txManager" >
<batch:chunk reader="pickUpReader" writer="pickUpWriter"
processor="pickUpProcessor" skip-limit="20" commit-interval="100" >
<!--可跳过异常 -->
<batch:skippable-exception-classes>
<batch:include class="java.lang.Exception"/>
</batch:skippable-exception-classes>
<!-- 多输出方式 -->
<!--<batch:streams>
<batch:stream ref="pickUpWriter" />
<batch:stream ref="productItemWriter2"/>
</batch:streams>-->
</batch:chunk>
</batch:tasklet>
<!--监听:此方法中主要用于读取固定的数据 -->
<batch:listeners>
<batch:listener ref="pickUpListener" before-step-method="beforeStep" />
</batch:listeners>
</batch:step>
<bean id="pickUpListener" class="com.cwenao.cc.scheduler.batch.listener.PickUpListener" />
<!-- 读取数据,本文采用mybatis方式,当然也可以采用jdbctemplemet,不是很推荐用mybatis -->
<!-- 因为这里有个大坑在里面:mybatis使用批处理的时候,它的typ就是batch,那么我单独查询服用之前的查询的时候会抛出异常。那么问题来了,如果想解决这个问题,那么必须自己扩展mybatis的东西,这里不再阐述,有兴趣的自行研究-->
<bean id="pickUpReader" class="org.mybatis.spring.batch.MyBatisPagingItemReader" scope="step" >
<property name="sqlSessionFactory" ref="sqlSessionFactory" />
<!-- 传入mybatis定义的查询名字 -->
<property name="queryId" value="com.cwenao.cc.basic.dao.OrderDao.selectForBatchNotPick" />
<!--分页查询 -->
<property name="pageSize" value="100"/>
<!-- 查询条件 -->
<property name="parameterValues">
<map>
<!-- 查询条件值来至于 partitioner-->
<entry key="sTime" value="#{stepExecutionContext[sTime]}" />
<entry key="eTime" value="#{stepExecutionContext[eTime]}" />
</map>
</property>
</bean>
<!--结果写入库-->
<bean id="pickUpWriter" class="org.mybatis.spring.batch.MyBatisBatchItemWriter" scope="step">
<property name="sqlSessionFactory" ref="sqlSessionFactory"/>
<property name="statementId" value="com.cwenao.cc.basic.dao.NoticeInfoDao.insertSelective"/>
</bean>
<!-- end 早上到货提醒 JOB-->
以上为配置文件,接下来一个个需要实现的类:
job启动
public class BatchRedExpireOpenIdJob {
@Autowired
private JobLauncher jobLauncher;
@Autowired
private Job redExpireOpenIdJob;
@Autowired
JobParametersBuilder jobParameterBulider;
public boolean doExecuteTask(String jobId)
{
if(null == jobParameterBulider)
{
jobParameterBulider = SpringUtil.getBean("jobParameterBulider", JobParametersBuilder.class);
}
if(null == redExpireOpenIdJob)
{
redExpireOpenIdJob = SpringUtil.getBean("redExpireOpenIdJob",Job.class);
}
if(null == jobLauncher)
{
jobLauncher = SpringUtil.getBean("jobLauncher",JobLauncher.class);
}
//传递参数:jobId作为启动的job唯一标识
jobParameterBulider.addDate("date", new Date());
jobParameterBulider.addString("jobId", jobId);
try {
//启动job
JobExecution execution = jobLauncher.run(redExpireOpenIdJob,jobParameterBulider.toJobParameters());
} catch (JobExecutionAlreadyRunningException e) {
e.printStackTrace();
} catch (JobRestartException e) {
e.printStackTrace();
} catch (JobInstanceAlreadyCompleteException e) {
e.printStackTrace();
} catch (JobParametersInvalidException e) {
e.printStackTrace();
}
return true;
}
}
pickUpPartitioner类
@Scope("step")
@Component("pickUpPartitioner")
public class pickUpPartitioner implements Partitioner {
//参数来至于job启动的时候
@Value("#{jobParameters['jobId']}")
private String jobId;
@Resource
TaskSchedulerDao taskSchedulerDao;
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();
//昨天的订单日期
String preOrderTime= "";
//前天的订单日期
TaskScheduler taskScheduler = taskSchedulerDao.selectByPrimaryKey(jobId);
int days=0;
int hours=0;
if(null !=taskScheduler)
{
String hoursArry[] = taskScheduler.getTaskName().split("#");
if(null != hoursArry && hoursArry.length>1)
{
hours = Integer.parseInt(hoursArry[0]);
}
}
if(hours != 0)
{
days = hours/24;
}
String pre2OrderTime="";
if(days==0){
preOrderTime= DateUtil.dateToStr(DateUtil.getPreDay(), "yyyy-MM-dd");
}
else
preOrderTime= DateUtil.dateToStr(DateUtil.getPreDay(-days), "yyyy-MM-dd");
String sTime = preOrderTime+" 00:00:00";
String eTime = preOrderTime+" 23:59:59";
ExecutionContext value = new ExecutionContext();
value.putString("sTime", sTime);
value.putString("eTime", eTime);
result.put("partition", value);
return result;
}
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
}
读取数据库与写入数据库就不再写,就是一般的sql语句。
处理过程pickUpProcessor
pickUpProcessor类
@Scope("step")
@Component("pickUpProcessor")
public class PickUpProcessor implements ItemProcessor<OrderVo,NoticeInfo> {
private static Map<String,PickPoint> pickPointMap=new HashMap<>();
//提货通知
//获取参数jobId
@Value("#{jobParameters['jobId']}")
private String jobId;
/**
* 处理过程,返回类型与传入参数和ItemProcessor<OrderVo,NoticeInfo>相同
*/
@Override
public NoticeInfo process( OrderVo order) throws Exception {
if(null == order){
return null;
}
Integer amOrPm = DateUtil.getAmPm();
NoticeInfo noticeInfo = new NoticeInfo();
noticeInfo.setId(UUIDUtil.generateUUID());
noticeInfo.setStatus(APIConstant.status_enable);
return noticeInfo;
}
//要获取JobID必须有get方法
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
}
监听类与一般类无二,主要看业务上的使用。
到目前为止,一个简单的job就算走通了。
附录
《spring batch in action》,这本书是我的主要参考文档,看原文的,翻译什么的不是很好。
如有疑问请加公众号(K171),如果觉得对您有帮助请 github start