Hadoop案例——Shell定时采集数据到HDFS

1.mkdir /export/data/logs

2.vi uploadHDFS.sh

 1 #!/bin/bash
 2 
 3 
 4 
 5 #配置环境变量
 6 
 7 export JAVA_HOME=/export/servers/jdk
 8 export JRE_HOME=${JAVA_HOME}/jre
 9 export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
10 export PATH=${JAVA_HOME}/bin:$PATH
11 
12 export HADOOP_HOME=/export/servers/hadoop
13 export PATH=${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:$PATH
14 
15 
16 #日志文件存放的目录
17 log_src_dir=/export/data/logs/log/
18 
19 #待上传文件存放的目录
20 log_toupload_dir=/export/data/logs/toupload/
21 
22 #设置日期
23 date1=`date -d last-day +%Y_%m_%d`
24 
25 #日志文件上传到hdfs的根路径
26 hdfs_root_dir=/data/clickLog/$date1/
27 
28 
29 
30 #打印环境变量信息
31 echo "envs: hadoop_home: $HADOOP_HOME"
32 
33 #读取日志文件的目录,判断是否有需要上传的文件
34 echo "log_src_dir: "$log_src_dir
35 
36 ls $log_src_dir | while read fileName
37 do
38 if [[ "$fileName" == access.log.* ]]; then
39 date=`date +%Y_%m_%d_%H_%M_%S`
40 #将文件移动到待上传目录并重命名
41 echo "moving $log_src_dir$fileName to $log_toupload_dir"xxxxx_click_log_$fileName"$date"
42 mv $log_src_dir$fileName $log_toupload_dir"xxxxx_click_log_$fileName"$date
43 #将待上传的文件path写入一个列表文件willDoing
44 echo $log_toupload_dir"xxxxx_click_log_$fileName"$date >> $log_toupload_dir"willDoing."$date
45 fi
46 done
47 
48 #找到列表文件willDoing
49 ls $log_toupload_dir | grep will | grep -v "_COPY_" | grep -v "_DONE_" | while read line
50 do
51 
52 #打印信息
53 echo "toupload is in file: "$line
54 #将待上传文件列表willDoing改名为willDoing_COPY_
55 mv $log_toupload_dir$line $log_toupload_dir$line"_COPY_"
56 #读列表文件willDoing_COPY_内容(一个一个的待上传文件名)
57 #此处的line就是列表中的一个待上传文件的path
58 cat $log_toupload_dir$line"_COPY_" | while read line
59 do
60 #打印信息
61 echo "puting ... $line to hdfs path ..... $hdfs_root_dir"
62 hadoop fs -mkdir -p $hdfs_root_dir
63 hadoop fs -put $line $hdfs_root_dir
64 done
65 mv $log_toupload_dir$line"_COPY_" $log_toupload_dir$line"_DONE_"
66 done

3.mkdir /export/data/logs/log/

4.vi access.log access.log.1 access.log.2

5.sh uploadHDFS.sh

猜你喜欢

转载自www.cnblogs.com/-StarrySky-/p/11908397.html