DataWarehouseOss project summary (two) _ read log information and write kafka

Article Directory

Controller

package com.dataWarehouseOss.controller;

import com.dataWarehouseOss.service.ReadLogService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;

/**
 * @author :LiuShihao
 * @date :Created in 2020/9/27 1:38 下午
 * @desc :
 */
@Slf4j
@RestController
public class ReadLogController {
    
    
    @Autowired
    ReadLogService logService;

    @PostMapping("/readlog")
    public String readLog(@RequestParam("file") MultipartFile file) throws IOException {
    
    
        log.info(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss :"))+"-------------执行补录日志数据-------------");
        logService.readLog(file);
        return "补录数据成功";
    }
}

ServiceImpl

package com.dataWarehouseOss.service.Impl;

import com.dataWarehouseOss.service.ReadLogService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;

/**
 * @author :LiuShihao
 * @date :Created in 2020/9/27 1:40 下午
 * @desc :
 */
@Slf4j
@Service
public class ReadLogServerImpl implements ReadLogService {
    
    
    @Autowired
    KafkaTemplate kafkaTemplate;




    @Override
    public void readLog(MultipartFile multipartFile) throws IOException {
    
    

        String[] split = multipartFile.getOriginalFilename().split("\\.");

        File tempFile = File.createTempFile(split[0] + split[1], ".log");

        multipartFile.transferTo(tempFile);

        FileInputStream fis = new FileInputStream(tempFile);

        BufferedReader br = new BufferedReader(new InputStreamReader(fis));

        String line = null;
        while ((line = br.readLine()) != null) {
    
    
            String substring = line.substring(line.indexOf("{"));
            System.out.println("发送日志数据到kafka:"+substring);
            kafkaTemplate.send("hw_data",substring);
        }
            fis.close();
            br.close();
            tempFile.deleteOnExit();
            System.out.println("结束--------------------------------------");

        }

}

to sum up

Use Postman to call the http://localhost:8081/readlogPOST method, submit the file to the background in the form of a form, and
create a temporary file tempFile.

		String[] split = multipartFile.getOriginalFilename().split("\\.");

        File tempFile = File.createTempFile(split[0] + split[1], ".log");

Convert MultipartFile to File file

 multipartFile.transferTo(tempFile);

In getting BufferedReader.

		FileInputStream fis = new FileInputStream(tempFile);

        BufferedReader br = new BufferedReader(new InputStreamReader(fis));

Pass br.readLine()to read line by line. Intercept the string and send it to kafka.

Guess you like

Origin blog.csdn.net/DreamsArchitects/article/details/108842706