上传日志到HDFS,备份,定期删除

1.添加Maven依赖

<dependencies>
		<dependency>
			<groupId>junit</groupId>
			<artifactId>junit</artifactId>
			<version>3.8.1</version>
			<scope>test</scope>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-client</artifactId>
			<version>2.8.3</version>
		</dependency>
		<dependency>
			<groupId>log4j</groupId>
			<artifactId>log4j</artifactId>
			<version>1.2.17</version>
		</dependency>

	</dependencies>
	<build>
		<plugins>
			<plugin>
				<groupId>org.apache.maven.plugins</groupId>
				<artifactId>maven-compiler-plugin</artifactId>
				<configuration>
					<source>1.8</source>
					<target>1.8</target>
				</configuration>
			</plugin>
		</plugins>
	</build>

2.while循环生成日志文件

Logger logger = LogManager.getLogger(CreateLog.class);
		while(true) {
			logger.info("这是info");
			Thread.sleep(2);
		}

3.使用Timer构建一个定时上传,备份的任务

//Task类需继承TimeTask方法,30ms后运行,每2s运行一次
new Timer().schedule(new Task(), 30,2000);

4.编写任务类

public class Task extends TimerTask {
	@Override
	public void run() {
		try {
			//时间以拼接文件名防止重名,构造目录分类日志文件
			SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd-HH");
			String date = format.format(new Date());
			File logdir = new File("d:/testlog");
			File[] listFiles = logdir.listFiles(new FilenameFilter() {
				// FileNameFilter是过滤哪些文件能够获取的
				@Override
				public boolean accept(File dir, String name) {
					// test.log.1 等等,日志满1M生成一份日志文件
					return name.startsWith("test.log.");
				}
			});
			for (File file : listFiles) {
				// file.renameTo(new File(""));
                //apache.common的FileUtils类
				FileUtils.moveFileToDirectory(file, new File("d:/waitUpLoad"), true);
			}
			//连接Hdfs服务器
			FileSystem fs=FileSystem.get(new URI("hdfs://jiqun01:9000"), new Configuration(), "root");
			Path path = new Path("/log/"+date.substring(0, 10));
			boolean exists = fs.exists(path);
			if(!exists) {
				fs.mkdirs(path);
			}
			//得到上传的是哪一个服务器上的日志文件
			String hostName = InetAddress.getLocalHost().getHostName();
			File fileWaitUpload=new File("d:/waitUpLoad");
			File[] list = fileWaitUpload.listFiles();
			for (File f : list) {
				//上传文件到服务器,路径为/date,名称为主机名+文件名+毫秒数
				fs.copyFromLocalFile(new Path(f.getPath()), new Path(path,hostName+"_"+f.getName()+"_"+System.currentTimeMillis()));
				//cp到备份目录
				FileUtils.moveFileToDirectory(f, new File("d:/backDir/"+date.substring(0, 10)), true);
			}
			fs.close();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

5.构建删除时间超过一天的备份日志的任务

public class DeleteBackLog {
	public static void main(String[] args) {
		Timer timer = new Timer();
		timer.schedule(new TimerTask() {
			//匿名内部类
			@Override
			public void run() {
				File file = new File("d:/backDir");
				File[] listFiles = file.listFiles();
				for (File file2 : listFiles) {
					if (file2.isDirectory()) {
						File[] listFiles2 = file2.listFiles();
						for (File file3 : listFiles2) {
							//得到最后修改时间即创建时间
							long lastModified = file3.lastModified();
							long currentTimeMillis = System.currentTimeMillis();
							//得到天数,1000ms=1s,1day=86400000ms
							long minutes = (currentTimeMillis - lastModified) / 86400000;
							if((int) minutes>1) {
								boolean delete = file3.delete();
								System.out.println(file3.getName()+"存在时间:"+(int) minutes+"分钟"+((delete==true) ?"已经":"没有")+"删除");
							}
						}
					} else {
						long lastModified = file2.lastModified();
						long currentTimeMillis = System.currentTimeMillis();
						long minutes = (currentTimeMillis - lastModified) / 60000;
						if((int) minutes>1) {
							boolean delete = file2.delete();
							System.out.println(file2.getName()+"存在时间:"+(int) minutes+""+(delete==true) != null?"已经":"没有"+"删除");
						}
					}
			}
			}
		}, 0, 3600000);

	}

7.补充

Hosts文件,此处Hadoop集群构建在虚拟机上

#127.0.0.1 localhost
192.168.59.138 jiqun01
192.168.59.139 jiqun02
192.168.59.140 jiqun03

log4j配置文件

log4j.appender.logRollingFile.Append = TRUE 
log4j.appender.logRollingFile.File = d:/testlog/test.log
log4j.appender.logRollingFile.MaxFileSize = 1MB 
log4j.appender.logRollingFile.MaxBackupIndex = 50 
log4j.appender.logRollingFile.Encoding = UTF-8 

8.运行效果截图

启动hadoop

备份文件

hdfs查看上传的日志

定期删除文件,此处将时常改为一分钟

猜你喜欢

转载自blog.csdn.net/qq_39184715/article/details/81900926