dataWarehouseOss项目总结(二)_读取日志信息写入kafka

Controller

package com.dataWarehouseOss.controller;

import com.dataWarehouseOss.service.ReadLogService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;

/**
 * @author :LiuShihao
 * @date :Created in 2020/9/27 1:38 下午
 * @desc :
 */
@Slf4j
@RestController
public class ReadLogController {
    
    
    @Autowired
    ReadLogService logService;

    @PostMapping("/readlog")
    public String readLog(@RequestParam("file") MultipartFile file) throws IOException {
    
    
        log.info(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss :"))+"-------------执行补录日志数据-------------");
        logService.readLog(file);
        return "补录数据成功";
    }
}

ServiceImpl

package com.dataWarehouseOss.service.Impl;

import com.dataWarehouseOss.service.ReadLogService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;

/**
 * @author :LiuShihao
 * @date :Created in 2020/9/27 1:40 下午
 * @desc :
 */
@Slf4j
@Service
public class ReadLogServerImpl implements ReadLogService {
    
    
    @Autowired
    KafkaTemplate kafkaTemplate;




    @Override
    public void readLog(MultipartFile multipartFile) throws IOException {
    
    

        String[] split = multipartFile.getOriginalFilename().split("\\.");

        File tempFile = File.createTempFile(split[0] + split[1], ".log");

        multipartFile.transferTo(tempFile);

        FileInputStream fis = new FileInputStream(tempFile);

        BufferedReader br = new BufferedReader(new InputStreamReader(fis));

        String line = null;
        while ((line = br.readLine()) != null) {
    
    
            String substring = line.substring(line.indexOf("{"));
            System.out.println("发送日志数据到kafka:"+substring);
            kafkaTemplate.send("hw_data",substring);
        }
            fis.close();
            br.close();
            tempFile.deleteOnExit();
            System.out.println("结束--------------------------------------");

        }

}

总结

通过Postman调用http://localhost:8081/readlog POST方法 ,表单形式 提交文件到后台,
创建一个临时文件tempFile。

		String[] split = multipartFile.getOriginalFilename().split("\\.");

        File tempFile = File.createTempFile(split[0] + split[1], ".log");

将MultipartFile转换成File文件

 multipartFile.transferTo(tempFile);

在获取BufferedReader

		FileInputStream fis = new FileInputStream(tempFile);

        BufferedReader br = new BufferedReader(new InputStreamReader(fis));

通过br.readLine()来逐行读取。截取字符串,发送kafka。

猜你喜欢

转载自blog.csdn.net/DreamsArchitects/article/details/108842706
今日推荐