Spring resource copy to system directory and file compression and decompression processing

Spring resource copy to system directory and file compression and decompression processing

File compression and decompression toolkit

package org.zpli.utils;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;

/**
 * created at 2023/3/3 16:33
 *
 * @author somnuszpli
 */
public class ZipUtils {
    
    

    private static final int BUFFER_SIZE = 16 * 1024;//缓存区 16K

    /**
     * 默认最大限制 100m
     */
    public static final Limit LIMIT_DEFAULT = maxSize(100L * 1024L * 1024L);

    /**
     * 在获取压缩文件时如未拿到文件大小,则认为此文件最大 10m
     */
    public static long MAX_SIZE_ENTRY_WHEN_SIZE_UNKNOWN = 10L * 1024L * 1024L;

    public static void unzip(Path zipFile, Path destDir, Limit limit) throws IOException {
    
    
        unzip(Files.newInputStream(zipFile), destDir, limit);
    }

    public static void unzip(InputStream inputStream, Path destDir, Limit limit) throws IOException {
    
    
        if (limit == null) {
    
    
            limit = LIMIT_DEFAULT;
        }

        //追加强制要求的校验 且校验在前,避免一些事务已发生
        limit = securityFile().and(limit);

        if (!(inputStream instanceof BufferedInputStream)) {
    
    
            inputStream = new BufferedInputStream(inputStream);
        }

        try (ZipInputStream in = new ZipInputStream(inputStream, StandardCharsets.UTF_8)) {
    
    
            ZipEntry entry = in.getNextEntry();
            while (entry != null) {
    
    
                if (!limit.test(entry)) {
    
    
                    entry = in.getNextEntry();
                    continue;
                }

                if (entry.isDirectory()) {
    
    
                    Path directory = destDir.resolve(entry.getName());
                    Files.createDirectories(directory);
                } else {
    
    
                    Path f = destDir.resolve(entry.getName());
                    if (!limit.readEntry(entry, f, in)) {
    
    
                        //父目录构建
                        Files.createDirectories(f.getParent());

                        Files.copy(in, f, StandardCopyOption.REPLACE_EXISTING);
                    }
                }
                entry = in.getNextEntry();
            }
        }
    }

    public static void zipFile(Path sourceFile, Path destFile) throws IOException {
    
    
        try (ZipOutputStream out = new ZipOutputStream(
                new BufferedOutputStream(Files.newOutputStream(destFile), BUFFER_SIZE),
                StandardCharsets.UTF_8)) {
    
    
            out.putNextEntry(new ZipEntry(sourceFile.getFileName().toString()));

            Files.copy(sourceFile, out);
        }
    }

    public static void zipDirectory(Path sourceDirectory, Path destFile) throws IOException {
    
    
        try (ZipOutputStream out = new ZipOutputStream(
                new BufferedOutputStream(Files.newOutputStream(destFile), BUFFER_SIZE), StandardCharsets.UTF_8)) {
    
    
            doZip(out, sourceDirectory, "", true);
        }
    }

    private static void doZip(ZipOutputStream out, Path f, String base, boolean root) throws IOException {
    
    
        if (Files.isDirectory(f)) {
    
    
            try (DirectoryStream<Path> dir = Files.newDirectoryStream(f)) {
    
    
                if (root) {
    
    
                    for (Path p : dir) {
    
    
                        doZip(out, p, p.getFileName().toString(), false);
                    }
                } else {
    
    
                    //目录本身
                    out.putNextEntry(new ZipEntry(base + "/"));

                    for (Path p : dir) {
    
    
                        doZip(out, p, base + "/" + p.getFileName().toString(), false);
                    }
                }
            }
        } else {
    
    
            out.putNextEntry(new ZipEntry(base));

            Files.copy(f, out);
        }
    }

    interface Limit {
    
    

        default boolean readEntry(ZipEntry entry, Path dest, ZipInputStream in) throws IOException {
    
    
            return false;
        }

        boolean test(ZipEntry t);

        default Limit and(Limit other) {
    
    
            Limit self = this;
            return new Limit() {
    
    
                @Override
                public boolean test(ZipEntry entry) {
    
    
                    return self.test(entry) && other.test(entry);
                }

                @Override
                public boolean readEntry(ZipEntry entry, Path dest, ZipInputStream in) throws IOException {
    
    
                    return self.readEntry(entry, dest, in) || other.readEntry(entry, dest, in);
                }
            };
        }
    }

    public static Limit maxSize(long size) {
    
    
        return new Limit() {
    
    
            long v = 0;

            @Override
            public boolean readEntry(ZipEntry entry, Path dest, ZipInputStream in) throws IOException {
    
    
                long s = entry.getSize();
                if (s == -1) {
    
    
                    //父目录构建
                    Files.createDirectories(dest.getParent());

                    int readLen = 0;
                    byte[] bytes = new byte[BUFFER_SIZE];
                    int i;
                    try (OutputStream out = Files.newOutputStream(dest)) {
    
    
                        while ((i = in.read(bytes)) != -1) {
    
    
                            readLen += i;

                            if (readLen > MAX_SIZE_ENTRY_WHEN_SIZE_UNKNOWN) {
    
    
                                throw new IllegalArgumentException(
                                        "压缩包未确认长度的文件'" + entry.getName() + "'大小超过 "
                                                + MAX_SIZE_ENTRY_WHEN_SIZE_UNKNOWN);
                            }

                            out.write(bytes, 0, i);
                        }
                    }

                    v += readLen;
                    if (v > size) {
    
    
                        throw new IllegalArgumentException("压缩包大小超过 " + size);
                    }

                    return true;
                }

                return false;
            }

            @Override
            public boolean test(ZipEntry e) {
    
    
                if (!e.isDirectory()) {
    
    
                    long s = e.getSize();
                    //如果长度未知,则交由read来继续判断
                    if (s == -1) {
    
    
                        return true;
                    }
                    v += s;
                    if (v > size) {
    
    
                        throw new IllegalArgumentException("压缩包大小超过 " + size);
                    }
                }

                return true;
            }
        };
    }

    private static Limit securityFile() {
    
    
        return e -> {
    
    
            String name = e.getName();
            if (name.contains("../")) {
    
    
                throw new IllegalArgumentException("文件'" + name + "'为非法文件");
            }

            return true;
        };
    }

    public static Limit excludeDirectory() {
    
    
        return e -> {
    
    
            if (e.isDirectory()) {
    
    
                return false;
            }

            return Paths.get(e.getName()).getNameCount() <= 1;
        };
    }

    public static Limit maxDirectoryDepth(int depth) {
    
    
        return e -> {
    
    
            if (e.isDirectory()) {
    
    
                // a/b/ 为2,即2层目录
                //因此 判定上 需要处理为 >,即超过 depth,则认为深度超了
                String name = e.getName();
                int directoryCount = Paths.get(name).getNameCount();

                if (directoryCount > depth) {
    
    
                    throw new IllegalArgumentException("目录'" + name + "'深度超过限制值:" + depth);
                }
            }

            return true;
        };
    }

    public static Limit maxFile(int count) {
    
    
        return new Limit() {
    
    
            long v = 0;

            @Override
            public boolean test(ZipEntry e) {
    
    
                if (!e.isDirectory()) {
    
    
                    v++;
                    if (v > count) {
    
    
                        throw new IllegalArgumentException("压缩包文件数超过 " + count);
                    }
                }

                return true;
            }
        };
    }

    public static Limit maxDirectory(int count) {
    
    
        return new Limit() {
    
    
            long v = 0;

            @Override
            public boolean test(ZipEntry e) {
    
    
                if (e.isDirectory()) {
    
    
                    v++;
                    if (v > count) {
    
    
                        throw new IllegalArgumentException("压缩包目录数超过 " + count);
                    }
                }

                return true;
            }
        };
    }
}

resource pack

insert image description here

provider

package org.zpli.service;

import com.qq.qidian.frm.event.producer.service.EventPublishService;
import com.qq.qidian.frm.event.producer.service.SystemUserRequestContextInitHelper;
import com.qq.qidian.frm.util.ZipUtils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Base64;
import java.util.HashSet;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationListener;
import org.springframework.core.io.Resource;
import org.springframework.util.FileSystemUtils;

/**
 * created at 2023/3/2 14:48
 *
 * @author somnuszpli
 */
@Component
@Slf4j
public class ResourceConfigUpload implements ApplicationListener<ApplicationReadyEvent> {
    
    

    private static final String DATA_SOURCE_TOPIC = "upload.pageUrlType";

    private static final String PAGES_TEMPLATE_PATH = "pages-template";

    private static final String PAGES_TEMPLATE_RESOURCE = "classpath:pages-template";

    private static final String DEST_ZIP = "tempZip";
    private static final String DOT_ZIP = ".zip";

    @Autowired
    private ApplicationContext applicationContext;

    @Autowired
    private EventPublishService eventPublishService;

    @Autowired
    private SystemUserRequestContextInitHelper systemUserRequestContextInitHelper;

    @Override
    public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
    
    
        try {
    
    
            systemUserRequestContextInitHelper.initApplicationContext();
            uploadConfigZipFile();
        } catch (Exception e) {
    
    
            log.info("handle zip file error: {}", e.getMessage(), e);
        } finally {
    
    
            systemUserRequestContextInitHelper.cleanApplicationContext();
        }
    }

    /**
     * 上报数据
     */
    private void uploadConfigZipFile() throws IOException {
    
    
        log.info("start uploadConfigZipFile...");
        Path dirPath;
        Path tempZipFile;
        Throwable var3 = null;
        InputStream inputStream = null;
        try {
    
    
            // 创建临时目录
            dirPath = Files.createTempDirectory(PAGES_TEMPLATE_PATH);
            // 拷贝 classpath:pages-template 资源至临时目录
            copy(PAGES_TEMPLATE_RESOURCE, dirPath);
            // 创建临时 zip 文件
            tempZipFile = Files.createTempFile(DEST_ZIP, DOT_ZIP);
            // 将临时目录压缩成zip文件
            ZipUtils.zipDirectory(dirPath, tempZipFile);
            // 将 zip 文件生成输入流
            inputStream = Files.newInputStream(tempZipFile);
            // 将输入流转换为字节数组
            byte[] bytes = convertInputStreamToBytes(inputStream);
            // 字节数组转字符串,使用方以 byte[] bytes = Base64.getDecoder().decode(data); 方式将字符串再转为字节数组
            String data = Base64.getEncoder().encodeToString(bytes);
            log.info("published pages template data size: {}", data.length());
            // 递归删除临时文件
            FileSystemUtils.deleteRecursively(dirPath);
            FileSystemUtils.deleteRecursively(tempZipFile);

            // 发送 Kafka 消息
            eventPublishService.createDoBoMessage(DATA_SOURCE_TOPIC, data);
        } catch (Exception e) {
    
    
            var3 = e;
            log.info("handle config zip error: {}", e.getMessage(), e);
        } finally {
    
    
            // 关闭流
            if (inputStream != null) {
    
    
                if (var3 != null) {
    
    
                    try {
    
    
                        inputStream.close();
                    } catch (Throwable var11) {
    
    
                        var3.addSuppressed(var11);
                    }
                } else {
    
    
                    inputStream.close();
                }
            }
        }
        log.info("uploadConfigZipFile success!");
    }

    private void copy(String classpath, Path destDirectory) throws IOException {
    
    
        val values = applicationContext.getResources(classpath);
        Set<String> pathSet = new HashSet<>();
        for (val value : values) {
    
    
            pathSet.add(value.getURL().getPath());
        }

        String path = classpath + "/**";
        val rs = applicationContext.getResources(path);

        for (val resource : rs) {
    
    
            val rPath = resource.getURL().getPath();

            B:
            for (String parentPath : pathSet) {
    
    
                if (rPath.startsWith(parentPath)) {
    
    
                    copy(parentPath.length() + 1, resource, destDirectory);
                    break B;
                }
            }
        }
    }

    private void copy(int prefixLenPlus1, Resource resource, Path destDir)
            throws IOException {
    
    
        if (!resource.isReadable()) {
    
    
            return;
        }

        String relativePath = resource.getURL().getPath().substring(prefixLenPlus1);
        val copyPath = destDir.resolve(relativePath);
        Files.createDirectories(copyPath.getParent());

        Files.copy(resource.getInputStream(), copyPath, StandardCopyOption.REPLACE_EXISTING);
    }

    private byte[] convertInputStreamToBytes(InputStream inputStream) throws IOException {
    
    
        ByteArrayOutputStream swapStream = new ByteArrayOutputStream();
        int available = inputStream.available();
        //buff用于存放循环读取的临时数据
        byte[] buff = new byte[available];
        int rc;
        while ((rc = inputStream.read(buff, 0, available)) > 0) {
    
    
            swapStream.write(buff, 0, rc);
        }
        return swapStream.toByteArray();
    }
}

User

package org.zpli.service;

import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.collect.Maps;
import com.qq.qidian.cms.plugin.saas.constant.CmsCacheConstants;
import com.qq.qidian.cms.plugin.saas.filter.PageUrlData;
import com.qq.qidian.cms.plugin.saas.filter.PageUrlType;
import com.qq.qidian.cms.plugin.saas.filter.PageUrlTypeRepository;
import com.qq.qidian.frm.event.consumer.kafka.KafkaConsumerConfigurations;
import com.qq.qidian.frm.event.consumer.kafka.KafkaMessageReceiver;
import com.qq.qidian.frm.event.consumer.model.EventSubscribeRecord;
import com.qq.qidian.frm.module.cache.change.util.EntityChangeEventUtils;
import com.qq.qidian.frm.util.JsonUtils;
import com.qq.qidian.frm.util.TenantActionUtils;
import com.qq.qidian.frm.util.ZipUtils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.persistence.EntityManager;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.collections.CollectionUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.FileSystemUtils;

/**
 * created at 2022/7/18 3:01 下午
 *
 * @author somnuszpli
 */
@Component
@Slf4j
@ConditionalOnProperty(value = "kafkaEnabled", havingValue = "true")
public class PageUrlTypeListener extends KafkaMessageReceiver {
    
    

    private static final String UPLOAD = "upload";

    private static final String PAGE_URL_JSON = "page-url.json";
    private static final String PAGE_FTL = "page.ftl";
    private static final String PAGE_CONFIG_JSON = "page-config.json";
    private static final String PAGE_LAYOUT_HTML = "page-layout.html";

    @Value("${kylin.systemConfigTenantId:-1}")
    private Long systemConfigTenantId;

    @Autowired
    private PageUrlTypeRepository pageUrlTypeRepository;

    @Autowired
    private EntityManager entityManager;

    @KafkaListener(topics = "do.pageUrlType",
            id = "cms-reminder-pageUrlType-handler",
            groupId = "#{kafkaConsumerConfigurations.getGroupId()}",
            containerFactory = KafkaConsumerConfigurations.RETRABLE_MULTI_THREAD_LISTENER)
    public void receive(ConsumerRecord<String, String> message) {
    
    
        processMessage(message);
    }

    @Override
    public List<String> getProcessedActions() {
    
    
        return Arrays.asList(UPLOAD);
    }

    @Override
    public void processBusinessLogic(EventSubscribeRecord record) {
    
    
        String data = record.getEventBody();
        log.info("PageUrlTypeUploadHandler PageUrlType processBusinessLogic received data is => {}", data);
        try {
    
    
            // 字符串转字节数组
            byte[] dataBytes = Base64.getDecoder().decode(data);
            // 字节数组构建输入流
            ByteArrayInputStream inputStream = new ByteArrayInputStream(dataBytes);
            // 创建临时目录
            Path pagesTemplateDir = Files.createTempDirectory("pages-template");
            // 解压zip包
            ZipUtils.unzip(inputStream, pagesTemplateDir, null);
            // 解析资源路径
            Path pageUrlPath = pagesTemplateDir.resolve(PAGE_URL_JSON);
            // page-url.json 文件不存在直接返回不处理
            if (!Files.exists(pageUrlPath)) {
    
    
                log.info("page-url.json is empty, skip logic");
                return;
            }
            // 读取文件生成字节数组
            byte[] pageUrlBytes = Files.readAllBytes(pageUrlPath);
            List<PageUrlType> pageUrlTypes = JsonUtils.getObjectMapper().readValue(pageUrlBytes, new TypeReference<>() {
    
    
            });
            // page-url.json 文件解析出来的对象列表为空,直接返回不处理
            if (CollectionUtils.isEmpty(pageUrlTypes)) {
    
    
                log.info("pageUrlTypes is empty, skip logic");
                return;
            }
            // 获取 pageUrlPath 目录下的所有path,包括文件和目录
            DirectoryStream<Path> dirPaths = Files.newDirectoryStream(pagesTemplateDir);
            Map<String, PageUrlData> pageUrlDataMap = resolve(dirPaths);
            for (PageUrlType x : pageUrlTypes) {
    
    
                log.info("PageUrlType Name: {}", x.getName());
                x.setPageData(pageUrlDataMap.get(x.getName()));
            }
            // 递归删除临时文件目录
            FileSystemUtils.deleteRecursively(pagesTemplateDir);
            handleBusinessLogic(pageUrlTypes);
        } catch (Exception e) {
    
    
            log.error("PageUrlTypeUploadHandler Failed to parse PageUrlType: ", e);
        }
    }

    public Map<String, PageUrlData> resolve(DirectoryStream<Path> dirPaths) throws IOException {
    
    
        Map<String, PageUrlData> map = Maps.newHashMap();
        for (Path path : dirPaths) {
    
    
            PageUrlData value = new PageUrlData();
            fillData(value, path);
            map.put(value.getName(), value);
        }
        return map;
    }

    private void fillData(PageUrlData value, Path path) throws IOException {
    
    
        if (!Files.isDirectory(path)) {
    
    
            return;
        }
        String fileNameUpperCase = path.getFileName().toString().toUpperCase().replaceAll("-", "_");
        value.setName(fileNameUpperCase);

        var snapshotTemplateDataPath = path.resolve(PAGE_FTL);
        if (Files.exists(snapshotTemplateDataPath)) {
    
    
            String snapshotTemplateData = Files.readString(snapshotTemplateDataPath, StandardCharsets.UTF_8);
            value.setSnapshotTemplateData(snapshotTemplateData);
        }

        var configPath = path.resolve(PAGE_CONFIG_JSON);
        if (Files.exists(configPath)) {
    
    
            String configData = Files.readString(configPath, StandardCharsets.UTF_8);
            value.setPageConfigData(configData);
        }

        var layoutPath = path.resolve(PAGE_LAYOUT_HTML);
        if (Files.exists(layoutPath)) {
    
    
            String layoutData = Files.readString(layoutPath, StandardCharsets.UTF_8);
            value.setPageLayoutData(layoutData);
        }
        log.info("PageUrlData: {}, {}, {}", value.getName(), value.getPageLayoutData(), value.getPageConfigData());
    }
}

Guess you like

Origin blog.csdn.net/ToBeMaybe_/article/details/129383644