Preparación ambiental
Primero use la idea para crear un proyecto maven, después de la instalación de maven, configuración de las variables de entorno y uso
Importar las dependencias correspondientes en el medio
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.6.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-core -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>1.2.0</version>
</dependency>
Cabe señalar que la versión aquí debe corresponder a su propia versión hadoop, la versión del blogger aquí es 2.6.0
Después de importar, la idea entrará automáticamente en la descarga. El tiempo de espera es muy largo. Espera pacientemente a que se complete la descarga. Si es un error rojo, significa que la descarga no se ha completado.
Crear clase HdfsClient
Tenga en cuenta que debe seleccionar el paquete en org.apache.hadoop al importar el paquete
Crea un archivo
//创建文件夹
@Test
public void testMkdirs() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.mkdirs(new Path("/hdfs/shell"));
//4.关闭资源
fs.close();
}
subir archivos
//上传文件
@Test
public void testCopyFromLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyFromLocalFile(new Path("H:\\test\\4.txt"),new Path("/hdfs/shell"));//本地文件路径+服务器路径
//4.关闭资源
fs.close();
}
descargar archivo
//下载文件
@Test
public void testCopyToLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyToLocalFile(new Path("/hdfs/shell/4.txt"),new Path("H:\\test"));//服务器路径+本地文件夹路径
//4.关闭资源
fs.close();
}
Eliminar carpeta
//删除文件夹
@Test
public void testDelete() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
//会自动递归删除
fs.deleteOnExit(new Path("/hdfs"));
//4.关闭资源
fs.close();
}
Código completo
package testmaven.kgc;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
/**
* @Author lichangxin
* @date 2020-09-04
* @Des
*/
public class HdfsClient {
//创建文件夹
@Test
public void testMkdirs() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.mkdirs(new Path("/hdfs/shell"));
//4.关闭资源
fs.close();
}
//上传文件
@Test
public void testCopyFromLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyFromLocalFile(new Path("H:\\test\\4.txt"),new Path("/hdfs/shell"));//本地文件路径+服务器路径
//4.关闭资源
fs.close();
}
//下载文件
@Test
public void testCopyToLocal() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
fs.copyToLocalFile(new Path("/hdfs/shell/4.txt"),new Path("H:\\test"));//服务器路径+本地文件夹路径
//4.关闭资源
fs.close();
}
//删除文件夹
@Test
public void testDelete() throws URISyntaxException, IOException, InterruptedException {
//1.创建配置
Configuration conf=new Configuration();
//2.获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.83.102:9000"), conf,"root");
//3.调用api
//会自动递归删除
fs.deleteOnExit(new Path("/hdfs"));
//4.关闭资源
fs.close();
}
}