1.场景1
- 页面点击详情,需要十几个指标数据显示到页面上来,每次点击都是要显示最新的结果,开始采用的顺序执行,等执行完了统一返回前端,数据响应比较慢,因此采用线程池,并发提交任务
- 提交任务有两种方式:
execute:适用于不需要关注返回值的应用场景,只需要把线程丢到线程池中去执行就可以了
submit:使用于需要关注返回值的场景需要
自定义一个线程池:public class MyThreadPoolExecutor { private static final int CORE_SIZE = 22; private static final int MAX_SIZE = 25; private static final long KEEP_ALIVE_TIME = 60; private static final int QUEUE_SIZE = 1000; private static ThreadPoolExecutor threadPool = new ThreadPoolExecutor(CORE_SIZE, MAX_SIZE, KEEP_ALIVE_TIME, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(QUEUE_SIZE), new ThreadPoolExecutor.AbortPolicy()); public static ThreadPoolExecutor getThreadPool() { return threadPool; } }
初始化任务,并且调用
List<Callable< Map<String, Object>>> tasks = new ArrayList<Callable< Map<String, Object>>>(22);
//获取本期预售客座率
tasks.add(new Callable<Map<String, Object>>() {
@Override
public Map<String, Object> call() throws Exception {
Map<String, Object> currentPresaleRate = zzTeamHandleDao.getCurrentPresaleRate(add_on_seg, add_on_date, add_on_flt_no, importtime);
return currentPresaleRate;
}
});
//获得历史同期预售客座率
tasks.add(new Callable<Map<String, Object>>() {
@Override
public Map<String, Object> call() throws Exception {
Map<String, Object> historicalPresaleRate = zzTeamHandleDao.getHistoricalPresaleRate(add_on_seg, add_on_date, add_on_flt_no, importtime);
return historicalPresaleRate;
}
});
try {
List<Future<Map<String, Object>>> futureList = MyThreadPoolExecutor.getThreadPool().invokeAll(tasks, 120000, TimeUnit.MILLISECONDS);
map.putAll(mapId);
for (Future<Map<String, Object>> future : futureList) {
if(null != future){
if(future.isCancelled()) {
//处理
LOG.error("等待时间超时,取消的任务:" + future.isCancelled());
}
Map<String, Object> resultMap= future.get();
if(null != resultMap){
map.putAll(resultMap);
}
}
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
2.场景2
开始我们把基础数据存进数据库,大概有80万到150万之间,分别存放三个表,我们后面需要把每天的基础数据取出来,进行计算:现在进行计算的方法是通过航班日期把每一个进行分类,代码如下:
//按照航班日期进行分组,每5个日期为一组
private static List<List<FdlbfgDataConfig>> getGroup(String date){
List<FdlbfgDataConfig> allDataList = new FDLDetailsDao().queryFDLDataConfig(date);
List<List<FdlbfgDataConfig>> list = new ArrayList<List<FdlbfgDataConfig>>();
int length = allDataList.size();
if(length<5)
{
list.add(allDataList);
return list;
}
int j = 0;
for (int i = 0; i <= length; i++)
{
if(i!=0 && (i%5 == 0 || i == length))
{
// if(i == length)
list.add(allDataList.subList(j, i));
// else
// list.add(allDataList.subList(j, i-1));
j = i;
}
}
return list;
}
采用固定线程长度,初始化线程池
public void start(){
new SynCacheUtil(date).refreshCache(date); //加载缓存
Map<String, Map<String, PriceDistVo>> loadMap = new FDLReckonDao().load(null, null, date.replaceAll("-", "/"));
List<List<FdlbfgDataConfig>> groupList = getGroup(date);
ExecutorService fixedThreadPool = Executors.newFixedThreadPool(groupList.size());
int k = 0;
for (List<FdlbfgDataConfig> list:groupList)
{
ConfigData2FDLDetailsThread thread = new ConfigData2FDLDetailsThread(list,date,loadMap);
thread.setName("ConfigDataThread"+k);
log.info("ConfigDataThread线程执行:"+k);
k++;
fixedThreadPool.submit(thread);
}
fixedThreadPool.shutdown();
log.info("等待子线程结束");
while (true) {
if (fixedThreadPool.isTerminated()) {
log.info("所有的子线程都结束了!");
break;
}
try {
Thread.sleep(500L);
} catch (InterruptedException e) {
log.error("", e);
}
}
//将-1天的数据移动到FDL_Details_F1表中
log.info("开始将-1天的数据移动到FDL_Details_F1表中");
try
{
new FDLDetailsDao().moveFDLDetailF1(date);
}
catch(Exception e)
{
log.error("将-1天的数据移动到FDL_Details_F1表中失败,原因:"+e.getMessage());
e.printStackTrace();
}
log.info("将-1天的数据移动到FDL_Details_F1表中完成");
//清除缓存
SynCacheUtil.clearCache();
loadMap.clear();
}
对线程进行调用计算
public class ConfigData2FDLDetailsThread extends Thread {
private static final Logger log = LoggerFactory
.getLogger(ConfigData2FDLDetailsThread.class);
private List<FdlbfgDataConfig> bfgDataConfigList;
private String date;
private Map<String, Map<String, PriceDistVo>> loadMap;
public ConfigData2FDLDetailsThread(List<FdlbfgDataConfig> bfgData,String date,Map<String, Map<String, PriceDistVo>> loadMap) {
this.bfgDataConfigList = bfgData;
this.date = date;
this.loadMap = loadMap;
}
@Override
public void run() {
try {
if(null != bfgDataConfigList){
for (FdlbfgDataConfig data : bfgDataConfigList) {
log.info("开始查询:" + data.getOdat() + " 的FDL_BFG数据");
List<FdlBfg> bfgIdList = new FDLDetailsDao().queryBfgIdByData(data, date);
if (Util.isNull(bfgIdList)){
continue;
}
groupData(bfgIdList,loadMap);
log.info("该组bfg数据计算完毕");
}
}
} catch (RuntimeException e) {
log.error(Thread.currentThread().getName() + "线程执行异常",e);
}finally{
ConfigDataTask.overThread();
log.info("当前线程执行结束");
}
}
private void groupData(List<FdlBfg> bfgList,Map<String, Map<String, PriceDistVo>> loadMap) {
// 对数据进行分组 40条为一组
List<List<FdlBfg>> list = new ArrayList<List<FdlBfg>>();
int length = Util.isNull(bfgList) ? 0 : bfgList.size();
log.info("bfg数据条数为:" + length);
if (length < 100) {
list.add(bfgList);
} else {
int j = 0;
for (int i = 0; i <= length; i++) {
if (i != 0 && (i % 100 == 0 || i == length)) {
list.add(bfgList.subList(j, i));
j = i;
}
}
}
//
for (List<FdlBfg> gList : list) {
// log.info("查询本组bsgList数据开始==");
// map<bfgid,list<fdlBsg>>
Map<Long, List<FdlBsg>> bsgMap = DbUtil.queryBsgByBfgList(gList);
//log.info("查询本组bsgMap数据结束==");
Map<Long, List<FdlBlg>> blgMap = DbUtil.queryBlgByBfgList(gList);
List<FDLDetail> dataList = new ArrayList<FDLDetail>();
for (FdlBfg bfg : gList) {
try {
List<FdlBsg> bsgList = bsgMap.get(bfg.getId());
List<FdlBlg> blgList = blgMap.get(bfg.getId());
FDLSynDetailsUtil.trafficAnalysis(bfg, bsgList,
blgList, loadMap, dataList,date);
} catch (Exception e) {
log.error("计算该列BFG数据出错,BFG_ID:" + bfg.getId() + "Exception:",e);
continue;
}
}
try {
new FDLDetailsDao().saveBatchForSql(dataList);
log.info("同步数据数量为:"+dataList.size()+"条存入库.");
dataList.clear();
} catch (Exception e) {
log.error("数据同步保存FDL_SEG_DATA数据异常,失败条件:" + dataList.size(),e);
continue;
}
}
}
}