环境准备
首先使用idea创建maven工程,继 maven的安装、配置环境变量以及使用 之后
在中间导入相应的依赖
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.6.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-core -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>1.2.0</version>
</dependency>
需要注意的是这里的版本需要与自己的hadoop版本相对应,这里博主的版本是2.6.0
导入过后,idea会自动进入下载,这个等待时间非常长,耐心等待下载完成,如果是红色的报错就代表没有下载完成。
创建HdfsClient类
需要注意的是在导包的时候需要选择org.apache.hadoop里面的包
创建文件
//创建文件夹
@Test
public void testMkdirs() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.mkdirs(new Path("/hdfs/shell"));
//4.关闭资源
fs.close();
}
上传文件
//上传文件
@Test
public void testCopyFromLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyFromLocalFile(new Path("H:\\test\\4.txt"),new Path("/hdfs/shell"));//本地文件路径+服务器路径
//4.关闭资源
fs.close();
}
下载文件
//下载文件
@Test
public void testCopyToLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyToLocalFile(new Path("/hdfs/shell/4.txt"),new Path("H:\\test"));//服务器路径+本地文件夹路径
//4.关闭资源
fs.close();
}
删除文件夹
//删除文件夹
@Test
public void testDelete() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
//会自动递归删除
fs.deleteOnExit(new Path("/hdfs"));
//4.关闭资源
fs.close();
}
完整代码
package testmaven.kgc;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
/**
* @Author lichangxin
* @date 2020-09-04
* @Des
*/
public class HdfsClient {
//创建文件夹
@Test
public void testMkdirs() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.mkdirs(new Path("/hdfs/shell"));
//4.关闭资源
fs.close();
}
//上传文件
@Test
public void testCopyFromLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyFromLocalFile(new Path("H:\\test\\4.txt"),new Path("/hdfs/shell"));//本地文件路径+服务器路径
//4.关闭资源
fs.close();
}
//下载文件
@Test
public void testCopyToLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyToLocalFile(new Path("/hdfs/shell/4.txt"),new Path("H:\\test"));//服务器路径+本地文件夹路径
//4.关闭资源
fs.close();
}
//删除文件夹
@Test
public void testDelete() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
//会自动递归删除
fs.deleteOnExit(new Path("/hdfs"));
//4.关闭资源
fs.close();
}
}