多线程大文件分块上传客户端代码


这里提供两种文件分块的方式:

1.手动进行文件的切分从而讲形成的块文件进行上传

2.利用contentBody 重写write的方法切割文件实现上传

对文件的切割用的是

RandomAccessFile 这个对象去读文件,
 
 
 RandomAccessFile   raf = new RandomAccessFile(targetFile, "r");
raf对象的seek()方法可跳跃前多少个字节进行读取文件,实现大文件的分块。
 
 

package com.wondersgroup.wbgl.web.Test2;

import com.wondersgroup.core.exceptions.ExcelException;
import com.wondersgroup.core.util.DateUtil;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.entity.mime.content.AbstractContentBody;
import org.apache.http.entity.mime.content.ContentBody;
import org.apache.http.entity.mime.content.StringBody;
import org.apache.http.impl.client.HttpClientBuilder;


import java.io.*;
import java.util.*;
import java.util.concurrent.*;


public class Test {
    public static class BlockStreamBody extends AbstractContentBody {
        public static long CLOUD_API_LOGON_SIZE = 10 * 1024 * 1024;

        //给MultipartEntity看的2个参数
        private long blockSize = 0;//本次分块上传的大小
        private String fileName = null;//上传文件名
        //writeTo需要的3个参数
        private int blockNumber = 0, blockIndex = 0;//blockNumber分块数;blockIndex当前第几块
        private File targetFile = null;//要上传的文件

        private BlockStreamBody(String mimeType) {
            super(mimeType);
            // TODO Auto-generated constructor stub
        }

        /**
         * 自定义的ContentBody构造子
         *
         * @param blockNumber 分块数
         * @param blockIndex 当前第几块
         * @param targetFile 要上传的文件
         */
        public BlockStreamBody(int blockNumber, int blockIndex, File targetFile) {
            this("application/octet-stream");
            this.blockNumber = blockNumber;//blockNumber初始化
            this.blockIndex = blockIndex;//blockIndex初始化
            this.targetFile = targetFile;//targetFile初始化
            this.fileName = targetFile.getName();//fileName初始化
            //blockSize初始化
            if (blockIndex < blockNumber) {//不是最后一块,那就是固定大小了
                this.blockSize = CLOUD_API_LOGON_SIZE;
            } else {//最后一块
                this.blockSize = targetFile.length() - CLOUD_API_LOGON_SIZE * (blockNumber - 1);
            }
        }

        @Override
        public  void writeTo(OutputStream out) throws IOException {
            RandomAccessFile   raf = new RandomAccessFile(targetFile, "r");//负责读取数据
            byte b[] = new byte[1024];//暂存容器
            if (blockIndex == 1) {//第一块
                int n = 0;
                long readLength = 0;//记录已读字节数
                while (readLength <= blockSize - 1024) {//大部分字节在这里读取
                    n = raf.read(b, 0, 1024);
                    readLength += 1024;
                    out.write(b, 0, n);
                }
                if (readLength <= blockSize) {//余下的不足 1024 个字节在这里读取
                    n = raf.read(b, 0, (int) (blockSize - readLength));
                    out.write(b, 0, n);
                }
            } else if (blockIndex < blockNumber) {//既不是第一块,也不是最后一块
                raf.seek(CLOUD_API_LOGON_SIZE * (blockIndex - 1));//跳过前[块数*固定大小 ]个字节
                int n = 0;
                long readLength = 0;//记录已读字节数
                while (readLength <= blockSize - 1024) {//大部分字节在这里读取
                    n = raf.read(b, 0, 1024);
                    readLength += 1024;
                    out.write(b, 0, n);
                }
                if (readLength <= blockSize) {//余下的不足 1024 个字节在这里读取
                    n = raf.read(b, 0, (int) (blockSize - readLength));
                    out.write(b, 0, n);
                }
            } else {//最后一块
                raf.seek(CLOUD_API_LOGON_SIZE * (blockIndex - 1));//跳过前[块数*固定大小 ]个字节
                int n = 0;
                while ((n = raf.read(b, 0, 1024)) != -1) {
                    out.write(b, 0, n);
                }
            }
            raf.close();
            //TODO 最后不要忘掉关闭out/raf
        }

        @Override
        public String getCharset() {
            // TODO Auto-generated method stub
            return null;
        }

        @Override
        public String getTransferEncoding() {
            // TODO Auto-generated method stub
            return "binary";
        }

        @Override
        public String getFilename() {
            // TODO Auto-generated method stub
            return fileName;
        }

        @Override
        public long getContentLength() {
            // TODO Auto-generated method stub
            return blockSize;
        }
    }
    private static BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<Runnable>(20);

    private static ExecutorService executorService = new ThreadPoolExecutor(5, 5, 0L, TimeUnit.MILLISECONDS, blockingQueue, new RejectedExecutionHandler() {
        @Override
        public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
            while (!blockingQueue.offer(r)) {
            }
        }
    });

    public static long CLOUD_API_LOGON_SIZE = 10* 1024*1024;
    public  static int timeOut = 60*60*1000;

    public  static  final String checkURL = "http://localhost:8080/claim/private/checked/checkChunk.do";

    public static  final  String mergeURL = "";

    public static  final  String uploadURL = "";


    public static void main(String[] args) throws Exception {
        String filePath = "E:\\workspace\\wbglProject\\dispatcher\\target\\dispatcher.war";
        System.out.println("开始上传"+ DateUtil.dateToString3(new Date()));
        uploadToDrive(filePath);
        System.out.println("上传结束"+ DateUtil.dateToString3(new Date()));
    }

    /**
     * 上传文件处理及参数封装
     *
     * @param filePath
     * @throws Exception
     */
    public static void uploadToDrive(String filePath)throws Exception {
        File targetFile = new File(filePath);
        HashMap<String, Object> params = new HashMap<String, Object>();
        File file = new File(filePath);
        params.put("fileName",file.getName());
        params.put("fileLength",file.length());
        String md5 = DigestUtils.md5Hex(new FileInputStream(file));
        params.put("md5",md5);
        BufferedReader in = null;
        long someExtra = 0;
        long targetFileSize = targetFile.length();
        int mBlockNumber = 0;
        if (targetFileSize < CLOUD_API_LOGON_SIZE) {
            mBlockNumber = 1;
            someExtra = targetFileSize;
        } else {
            mBlockNumber = (int) (targetFileSize / CLOUD_API_LOGON_SIZE);
            someExtra = targetFileSize
                    % CLOUD_API_LOGON_SIZE;
            if (someExtra > 0) {
                mBlockNumber++;
            }
        }
        params.put("chunkNum", Integer.toString(mBlockNumber));
        List<Callable<String>> callableList = new ArrayList<Callable<String>>();
        // 定义BufferedReader输入流来读取URL的响应,设置编码方式
        for (int i = 1; i <= mBlockNumber; i++) {
            int chunkIndex = i;
            params.put("chunkIndex", i);
            long chunkSize = CLOUD_API_LOGON_SIZE;
            params.put("chunkSize", CLOUD_API_LOGON_SIZE);
            if (i == mBlockNumber) {
                chunkSize = someExtra == 0 ? CLOUD_API_LOGON_SIZE : someExtra;
                params.put("chunkSize", (someExtra == 0 ? CLOUD_API_LOGON_SIZE : someExtra) + "");
            }
            //
            UploadThread thread = new UploadThread(chunkIndex , chunkSize , targetFile , params,makeHeads());
            callableList.add(thread);
        }
        List<Future<String>> futures = executorService.invokeAll(callableList);
        restPost(mergeURL,params,makeHeads(),targetFile);
    }


    /**
     * 发送请求
     *
     * @param url 请求路径
     * @param params 请求参数
     * @param heads 请求头
     * @param targetFile 上传的文件
     * @return String
     */
    public static String restPost(String url, Map<String, Object> params, Map<String, String> heads,File targetFile) {
        HttpClient httpClient = null;
        HttpPost httpPost = new HttpPost(url);
        String content="";
        try {
            MultipartEntityBuilder mpEntity = MultipartEntityBuilder.create();
            for (String head : heads.keySet()) {
                httpPost.setHeader(head, heads.get(head));
            }
            List<NameValuePair> pairs = new ArrayList<NameValuePair>();
            if (null != params && params.size() > 0) {
                for (String param : params.keySet()) {
                    mpEntity.addPart(param, new StringBody(params.get(param).toString(), ContentType.MULTIPART_FORM_DATA));}
            }
            mpEntity.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
            if (targetFile != null && targetFile.exists()) {
                ContentBody contentBody = new BlockStreamBody(Integer.parseInt(params.get("chunkNum").toString()), Integer.parseInt(params.get("chunkIndex").toString()), targetFile);
                mpEntity.addPart("file", contentBody);
            }
            httpPost.setEntity(mpEntity.build());
            httpClient = HttpClientBuilder.create().build();
            RequestConfig.Builder build = RequestConfig.custom();
            build.setConnectTimeout(timeOut);
            build.setSocketTimeout(timeOut);
            HttpResponse execute = httpClient.execute(httpPost);
            content = IOUtils.toString(execute.getEntity().getContent(), "utf-8");
            System.out.println(content);
        }catch (Exception e){
            e.printStackTrace();
            throw new ExcelException(e.getMessage());
        }
        int i=0;
        i++;
        System.out.println(i+"=============响应结果==================\n"+content);
        System.out.println("=============end==================\n");
        return content.trim();
        }

    public static Map<String, String> makeHeads(){
        Map<String, String> heads = new HashMap<String, String>();
        heads.put("serviceNo","YB");
        heads.put("serviceKey","6a69b7af4eba48859d7e71ed652e958e");
        heads.put("fileType","4");
        return heads;
    }


    public static class UploadThread implements Callable<String>{
        private final int chunkIndex;

        private final long chunkSize;

        private final File targetFile;

        private Map<String , String> heads = new HashMap<String, String>();

        private Map<String , Object> params = new HashMap<String,Object>();

        public UploadThread(int chunkIndex, long chunkSize, File targetFile,Map<String,Object> params,Map<String , String> heads) {
            this.chunkIndex = chunkIndex;
            this.chunkSize = chunkSize;
            this.targetFile = targetFile;
            this.params.putAll(params);
            this.heads.putAll(heads);
        }

        @Override
        public String call() throws Exception {
            String s = restPost(checkURL, params, heads, null);
            Map<String,Object> retmap = (Map)com.alibaba.fastjson.JSON.parse(s);
            Map body =(Map) retmap.get("body");
            Object isExist = body.get("isExist");
            if(isExist.toString().equals("false")){
                String s2 = restPost(uploadURL,params ,heads, targetFile);
            }
            return "";
        }
    }

}

2.手动切分文件实现上传

package com.wondersgroup.wbgl.web.Test;

import com.sun.org.apache.xpath.internal.operations.Bool;
import com.wondersgroup.core.util.LogUtil;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.http.Consts;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.content.StringBody;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;

import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
import java.util.concurrent.*;

public class TestChunkUpload {

    private static final Logger logger = LogUtil.getLogger(TestChunkUpload.class);

    public  static  final String check_url = "http://localhost:8080/wbgl/service/claim/private/checked/checkChunk.do";

    public static  final  String upload_url = "http://localhost:8080/wbgl/service/claim/private/checked/merge.do";

    public static  final  String merge_url = "http://localhost:8080/wbgl/service/claim/private/checked/upload.do";

/*    private static String check_url = "http://182.150.61.17:31001/wbglWeb/service/claim/private/checked/checkChunk.do";
    private static String upload_url = "http://182.150.61.17:31001/wbglWeb/service/claim/private/checked/upload.do";
    private static String merge_url = "http://182.150.61.17:31001/wbglWeb/service/claim/private/checked/merge.do";*/
    private static String serviceNo = "PK";
    private static String serviceKey = "b42bc810c84d45a08251a3ceeb84a7fe";
    private static String tempPath = "E:\\files";

    private ExecutorService executorService = Executors.newFixedThreadPool(5);


    public static void main(String[] args) {
        /* 验证拆分
        File file = new File("/Users/liqingdong/Temp/plsqldev1106x64.exe");
        Map<String, Object> chunkInfo = new TestChunkUpload().getChunkInfo(file, 7);
        System.out.println(chunkInfo.toString());
        */

        /* 验证合并
        try {
            File merge = new TestChunkUpload().merge("/Users/liqingdong/Temp/d9867da93be8fa1d64abb344adb30586");
            FileInputStream fis = new FileInputStream(merge);
            String md5Hex = DigestUtils.md5Hex(fis);
            fis.close();
            System.out.println(md5Hex);
        } catch (IOException e) {
            e.printStackTrace();
        }
        */

        File file = new File("D:\\BaiduNetdiskDownload\\12345.mp4");
        TestChunkUpload testChunkUpload = new TestChunkUpload();
        try {
            testChunkUpload.uploadByThread("4", file);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }


    }

    /**
     * 文件上传
     *
     * @param fileType 上传文件类型
     * @param file     上传文件
     * @return
     */
    @SuppressWarnings("unchecked")
    public boolean upload(String fileType, File file) {
        logger.info("文件:{} 开始上传...", file.getName());
        long start = System.currentTimeMillis();
        Map<String, Object> chunkInfo = getChunkInfo(file, 10);
        List<Map<String, Object>> chunkList = (List<Map<String, Object>>) chunkInfo.get("chunkList");
        int successCount = 0;
        for (Map<String, Object> chunkFileInfo : chunkList) {
            boolean exist = checkChunk(fileType, chunkInfo.get("md5").toString(), (Integer) chunkFileInfo.get("chunkIndex"), (Integer) chunkInfo.get("chunkNum"), (Long) chunkFileInfo.get("chunkSize"));
            if (exist) continue;
            boolean success = doUpload(fileType, (File) chunkFileInfo.get("chunkFile"), file.getName(), (Long) chunkInfo.get("fileLength"), chunkInfo.get("md5").toString(), (Integer) chunkFileInfo.get("chunkIndex"), (Integer) chunkInfo.get("chunkNum"), (Long) chunkFileInfo.get("chunkSize"));
            if (success) successCount++;
        }
        boolean result = successCount == Integer.valueOf(chunkInfo.get("chunkNum").toString()) && merge(fileType, file.getName(), (Long) chunkInfo.get("fileLength"), chunkInfo.get("md5").toString(), (Integer) chunkInfo.get("chunkNum"));

        logger.info("文件上传结束。本次上传总耗时:" + (System.currentTimeMillis() - start) / 1000 + "秒");

        // 删除本地临时分块文件
        File tempDir = new File(tempPath + File.separator + chunkInfo.get("md5").toString());
        try {
            FileUtils.deleteDirectory(tempDir);
        } catch (IOException e) {
            logger.error("删除临时文件失败,{}", e.getMessage());
        }
        return result;
    }

    /**
     * 多线程文件上传
     *
     * @param fileType
     * @param file
     * @return
     */
    @SuppressWarnings("unchecked")
    public boolean uploadByThread(String fileType, File file) throws InterruptedException {
        logger.info("文件:{} 开始上传...", file.getName());
        long start = System.currentTimeMillis();
        Map<String, Object> chunkInfo = getChunkInfo(file, 10);

        List<Map<String, Object>> chunkList = (List<Map<String, Object>>) chunkInfo.get("chunkList");
        List<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>();

        for (Map<String, Object> chunkFileInfo : chunkList) {
            Callable<Boolean> task = task(chunkInfo, chunkFileInfo, fileType, file);
            tasks.add(task);
        }
        executorService.invokeAll(tasks);

        boolean result = merge(fileType, file.getName(), (Long) chunkInfo.get("fileLength"), chunkInfo.get("md5").toString(), (Integer) chunkInfo.get("chunkNum"));

        logger.info("文件上传结束。本次上传总耗时:" + (System.currentTimeMillis() - start) / 1000 + "秒");
        return result;
    }

    private Callable<Boolean> task(final Map<String, Object> chunkInfo, final Map<String, Object> chunkFileInfo, final String fileType, final File file) {
        return new Callable<Boolean>() {
            @Override
            public Boolean call() throws Exception {
                boolean result = false;
                System.out.println(Thread.currentThread().getName() + "开始执行...");
                boolean exist = checkChunk(fileType, chunkInfo.get("md5").toString(), (Integer) chunkFileInfo.get("chunkIndex"), (Integer) chunkInfo.get("chunkNum"), (Long) chunkFileInfo.get("chunkSize"));
                if (!exist) {
                    result = doUpload(fileType, (File) chunkFileInfo.get("chunkFile"), file.getName(), (Long) chunkInfo.get("fileLength"), chunkInfo.get("md5").toString(), (Integer) chunkFileInfo.get("chunkIndex"), (Integer) chunkInfo.get("chunkNum"), (Long) chunkFileInfo.get("chunkSize"));
                }

                System.out.println(Thread.currentThread().getName() + "执行结束.");
                return exist || result;
            }
        };
    }

    /**
     * 获取文件分块信息
     *
     * @param file        文件
     * @param defaultSize 单块文件大小(单位:M)
     * @return
     */
    private Map<String, Object> getChunkInfo(File file, int defaultSize) {
        Map<String, Object> result = new HashMap<String, Object>();
        FileInputStream fis = null;
        try {
            fis = new FileInputStream(file);
            String md5 = DigestUtils.md5Hex(fis);
            result.put("md5", md5);

            long fileLength = file.length();
            result.put("fileLength", fileLength);

            int defaultByteSize = defaultSize * 1024 * 1024;
            int chunkNum = (int) Math.ceil(fileLength / (double) defaultByteSize);
            result.put("chunkNum", chunkNum);

            File chunkFile;
            FileOutputStream fos;
            File parentDirectory = new File(tempPath + File.separator + md5);
            // 删除原有MD5目录,重新生成
            if (parentDirectory.exists()) FileUtils.deleteDirectory(parentDirectory);
            parentDirectory.mkdirs();

            HashMap<String, Object> chunkFileInfo;// 单块文件信息
            List<Map<String, Object>> chunkFiles = new ArrayList<Map<String, Object>>();// 文件块信息集合
            byte[] bytes = new byte[defaultByteSize];
            RandomAccessFile raf = new RandomAccessFile(file, "r");
            for (int chunkIndex = 1; chunkIndex <= chunkNum; chunkIndex++) {
                chunkFile = new File(parentDirectory, chunkIndex + ".temp");
                chunkFile.createNewFile();
                fos = new FileOutputStream(chunkFile);

                int offset = (chunkIndex - 1) * defaultByteSize;// 文件读取偏移量
                if (chunkIndex == chunkNum) {
                    bytes = new byte[((Long) (fileLength - offset)).intValue()];
                }

                raf.seek(offset);
                raf.read(bytes);
                fos.write(bytes);
                fos.flush();
                fos.close();
                chunkFileInfo = new HashMap<String, Object>();
                chunkFileInfo.put("chunkFile", chunkFile);
                chunkFileInfo.put("chunkIndex", chunkIndex);
                chunkFileInfo.put("chunkSize", chunkFile.length());
                chunkFiles.add(chunkFileInfo);
            }
            raf.close();

            result.put("chunkList", chunkFiles);
        } catch (IOException e) {
            logger.error(e.getMessage());
        } finally {
            try {
                if (fis != null) fis.close();
            } catch (IOException e) {
                logger.error(e.getMessage());
            }
        }

        return result;
    }

    /**
     * 校验文件块
     *
     * @param fileType
     * @param md5
     * @param chunkIndex
     * @param chunkNum
     * @param chunkSize
     * @return
     */
    @SuppressWarnings("unchecked")
    private boolean checkChunk(String fileType, String md5, int chunkIndex, int chunkNum, long chunkSize) {
        CloseableHttpClient httpClient = null;
        CloseableHttpResponse response = null;
        try {
            httpClient = HttpClients.createDefault();

            HttpPost httpPost = new HttpPost(check_url);

            httpPost.addHeader("serviceNo", serviceNo);
            httpPost.addHeader("serviceKey", serviceKey);
            httpPost.addHeader("fileType", fileType);

            HttpEntity reqEntity = MultipartEntityBuilder.create()
                    .addPart("md5", new StringBody(md5, ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("chunkIndex", new StringBody(String.valueOf(chunkIndex), ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("chunkNum", new StringBody(String.valueOf(chunkNum), ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("chunkSize", new StringBody(String.valueOf(chunkSize), ContentType.create("text/plain", Consts.UTF_8)))
                    .build();

            httpPost.setEntity(reqEntity);

            // 发起请求 并返回请求的响应
            response = httpClient.execute(httpPost);

            // 获取响应对象
            HttpEntity resEntity = response.getEntity();
            if (resEntity != null) {
                String result = EntityUtils.toString(resEntity, Charset.forName("UTF-8"));
                EntityUtils.consume(resEntity);// 销毁

                logger.info("文件块[" + chunkIndex + "],校验响应结果:" + result);

                Map<String, Object> map = new ObjectMapper().readValue(result, HashMap.class);
                if (Boolean.valueOf(map.get("success").toString())) {
                    Map<String, Object> body = (Map<String, Object>) map.get("body");
                    return Boolean.valueOf(body.get("isExist").toString());
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage());
        } finally {
            try {
                if (response != null) response.close();
                if (httpClient != null) httpClient.close();
            } catch (IOException e) {
                logger.error(e.getMessage());
            }
        }
        return false;
    }

    /**
     * 分块上传
     *
     * @param fileType   文件类型
     * @param file       当前文件块
     * @param md5        文件MD5
     * @param chunkIndex 当前文件分块下标
     * @param chunkNum   文件总块数
     * @param chunkSize  文件大小
     */
    @SuppressWarnings("unchecked")
    private boolean doUpload(String fileType, File file, String fileName, long fileLength, String md5, int chunkIndex, int chunkNum, long chunkSize) {
        CloseableHttpClient httpClient = null;
        CloseableHttpResponse response = null;
        try {
            httpClient = HttpClients.createDefault();

            HttpPost httpPost = new HttpPost(upload_url);

            httpPost.addHeader("serviceNo", serviceNo);
            httpPost.addHeader("serviceKey", serviceKey);
            httpPost.addHeader("fileType", fileType);

            HttpEntity reqEntity = MultipartEntityBuilder.create()
                    .addPart("file", new FileBody(file))
                    .addPart("md5", new StringBody(md5, ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("fileName", new StringBody(fileName, ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("chunkIndex", new StringBody(String.valueOf(chunkIndex), ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("chunkNum", new StringBody(String.valueOf(chunkNum), ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("chunkSize", new StringBody(String.valueOf(chunkSize), ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("fileLength", new StringBody(String.valueOf(fileLength), ContentType.create("text/plain", Consts.UTF_8)))
                    .build();

            httpPost.setEntity(reqEntity);

            // 发起请求 并返回请求的响应
            response = httpClient.execute(httpPost);

            // 获取响应对象
            HttpEntity resEntity = response.getEntity();
            if (resEntity != null) {
                String result = EntityUtils.toString(resEntity, Charset.forName("UTF-8"));
                EntityUtils.consume(resEntity);

                logger.info("文件块[" + chunkIndex + "],上传响应结果:" + result);
                Map<String, Object> map = new ObjectMapper().readValue(result, Map.class);
                return Boolean.valueOf(map.get("success").toString());
            }
        } catch (Exception e) {
            logger.error(e.getMessage());
        } finally {
            try {
                if (response != null) response.close();
                if (httpClient != null) httpClient.close();
            } catch (IOException e) {
                logger.error(e.getMessage());
            }
        }
        return false;
    }

    /**
     * 本地测试文件合并
     *
     * @param path
     * @return
     * @throws IOException
     */
    public File merge(String path) throws IOException {
        File file = new File(path);
        File[] files = file.listFiles();
        TreeSet<File> treeSet = new TreeSet<File>(new Comparator<File>() {
            @Override
            public int compare(File o1, File o2) {
                return o1.getName().substring(0, o1.getName().indexOf(".")).compareTo(o2.getName().substring(0, o2.getName().indexOf(".")));
            }
        });

        treeSet.addAll(Arrays.asList(files));
        File merge = new File(file.getParentFile(), "new.exe");
        if (merge.exists()) FileUtils.deleteQuietly(merge);
        merge.createNewFile();
        FileOutputStream fos = new FileOutputStream(merge);
        FileInputStream fis;
        for (File f : treeSet) {
            fis = new FileInputStream(f);
            IOUtils.copy(fis, fos);
            fis.close();
        }
        fos.flush();
        fos.close();

        return merge;
    }

    /**
     * 分块文件合并
     *
     * @param fileType
     * @param fileName
     * @param fileLength
     * @param md5
     * @param chunkNum
     * @return
     */
    @SuppressWarnings("unchecked")
    private boolean merge(String fileType, String fileName, long fileLength, String md5, int chunkNum) {
        CloseableHttpClient httpClient = null;
        CloseableHttpResponse response = null;
        try {
            httpClient = HttpClients.createDefault();

            HttpPost httpPost = new HttpPost(merge_url);

            httpPost.addHeader("serviceNo", serviceNo);
            httpPost.addHeader("serviceKey", serviceKey);
            httpPost.addHeader("fileType", fileType);

            HttpEntity reqEntity = MultipartEntityBuilder.create()
                    .addPart("md5", new StringBody(md5, ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("fileName", new StringBody(fileName, ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("chunkNum", new StringBody(String.valueOf(chunkNum), ContentType.create("text/plain", Consts.UTF_8)))
                    .addPart("fileLength", new StringBody(String.valueOf(fileLength), ContentType.create("text/plain", Consts.UTF_8)))
                    .build();

            httpPost.setEntity(reqEntity);

            // 发起请求 并返回请求的响应
            response = httpClient.execute(httpPost);

            // 获取响应对象
            HttpEntity resEntity = response.getEntity();
            if (resEntity != null) {
                String result = EntityUtils.toString(resEntity, Charset.forName("UTF-8"));
                EntityUtils.consume(resEntity);

                logger.info("文件:{}合并响应结果:" + result, fileName);
                Map<String, Object> map = new ObjectMapper().readValue(result, Map.class);
                return Boolean.valueOf(map.get("success").toString());
            }
        } catch (Exception e) {
            logger.error(e.getMessage());
        } finally {
            try {
                if (response != null) response.close();
                if (httpClient != null) httpClient.close();
            } catch (IOException e) {
                logger.error(e.getMessage());
            }
        }
        return false;
    }
}

猜你喜欢

转载自blog.csdn.net/m0_37899388/article/details/78928481
今日推荐