【HDFS系统】hadoop fs 命令大全

版权声明:本文为博主原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接: https://blog.csdn.net/anzhenxi3529/article/details/102596769

hadoop fs 主要命令如下:


命令    说明
hadoop fs -mkdir    创建HDFS目录
hadoop fs -ls    列出HDFS目录
hadoop fs -copyFromLocal
使用-copyFromLocal复制本地文件(local)到HDFS

hadoop fs -put    使用-put复制本地(local)文件到HDFS
hadoop fs -copyToLocal    将HDFS上的文件复制到本地(local)
hadoop fs -get    将HDFS上的文件复制到本地(local)
hadoop fs -cp    复制HDFS文件
hadoop fs -rm    删除HDFS文件
hadoop fs -cat    列出HDFS目录下的文件的内容


1、【创建HDFS目录】:

[root@hadoop001 sbin]# hadoop fs -mkdir /user

2、【查看之前创建的HDFS目录】

[root@hadoop001 hadoop]# hadoop fs -ls /
Found 1 items
drwxr-xr-x   - root supergroup          0 2019-10-16 22:18 /user
[root@hadoop001 hadoop]# hadoop fs -ls /user
Found 2 items
drwxr-xr-x   - root supergroup          0 2019-10-16 22:18 /user/hduser
drwxr-xr-x   - root supergroup          0 2019-10-16 22:07 /user/local

3.【递归查看目录】

[root@hadoop001 hadoop]# hadoop fs -ls -R /
drwxr-xr-x   - root supergroup          0 2019-10-16 22:18 /user
drwxr-xr-x   - root supergroup          0 2019-10-16 22:18 /user/hduser
drwxr-xr-x   - root supergroup          0 2019-10-16 22:07 /user/local
drwxr-xr-x   - root supergroup          0 2019-10-16 22:07 /user/local/hive

4.【一次性创建多级目录】

[root@hadoop001 hadoop]# hadoop fs -mkdir -p /file1/file2/file3
[root@hadoop001 hadoop]# hadoop fs -ls -R /
drwxr-xr-x   - root supergroup          0 2019-10-16 23:00 /file1
drwxr-xr-x   - root supergroup          0 2019-10-16 23:00 /file1/file2
drwxr-xr-x   - root supergroup          0 2019-10-16 23:00 /file1/file2/file3
drwxr-xr-x   - root supergroup          0 2019-10-16 22:18 /user
drwxr-xr-x   - root supergroup          0 2019-10-16 22:18 /user/hduser
drwxr-xr-x   - root supergroup          0 2019-10-16 22:07 /user/local
drwxr-xr-x   - root supergroup          0 2019-10-16 22:07 /user/local/hive

5.【复制本地文件到HDFS系统中】

[root@hadoop001 hadoop-2.5.2]# hadoop fs -copyFromLocal /usr/local/README.txt /user/hduser
[root@hadoop001 hadoop-2.5.2]# ll
total 60
drwxr-xr-x 2 10021 10021  4096 Nov 15  2014 bin
drwxr-xr-x 3 10021 10021  4096 Nov 15  2014 etc
drwxr-xr-x 2 10021 10021  4096 Nov 15  2014 include
drwxr-xr-x 3 10021 10021  4096 Oct 16 22:29 lib
drwxr-xr-x 2 10021 10021  4096 Nov 15  2014 libexec
-rw-r--r-- 1 10021 10021 15458 Nov 15  2014 LICENSE.txt
drwxr-xr-x 2 root  root   4096 Oct 13 06:17 logs
-rw-r--r-- 1 10021 10021   101 Nov 15  2014 NOTICE.txt
-rw-r--r-- 1 10021 10021  1366 Nov 15  2014 README.txt
drwxr-xr-x 2 10021 10021  4096 Nov 15  2014 sbin
drwxr-xr-x 4 10021 10021  4096 Nov 15  2014 share
drwxr-xr-x 3 root  root   4096 Oct 12 23:05 tmp
[root@hadoop001 hadoop-2.5.2]# hadoop fs -cat /user/hduser/README.txt
echo 'nihao';

6.【查看该本地文件README.txt是否复制到HDFS系统中】

[root@hadoop001 hadoop-2.5.2]# hadoop fs -cat /user/hduser/README.txt
echo 'nihao';

猜你喜欢

转载自blog.csdn.net/anzhenxi3529/article/details/102596769