No hay mucho que decir, cargue directamente el código, preste atención a la necesidad de agregar core-site.xml/hdfs-site.xml
importar java.io.BufferedOutputStream;
importar java.io.BufferedReader;
importar java.io.BufferedWriter;
importar java.io.Archivo;
importar java.io.FileOutputStream;
importar java.io.IOException;
importar java.io.InputStreamReader;
importar java.io.OutputStream;
importar java.io.OutputStreamWriter;
importar java.net.URI;
importar java.util.ArrayList;
importar java.util.List;
importar org.apache.hadoop.conf.Configuración;
importar org.apache.hadoop.fs.FSDataInputStream;
importar org.apache.hadoop.fs.FSDataOutputStream;
importar org.apache.hadoop.fs.FileStatus;
importar org.apache.hadoop.fs.FileSystem;
importar org.apache.hadoop.fs.Path;
importar org.apache.hadoop.security.UserGroupInformation;
importar org.junit.Prueba;
public class hdfs_jdbc { private String user = “hive”; //store keytab path private String keytab = “D:\t…”; //hdfs path private String dir = “hdfs://…”;
/**
* 在HDFS上创建文件夹
* @throws IOException */
@Test
public void createfolderOnHDFS() throws IOException{
//实例化一个Configuration,它会自动去加载本地的core-site.xml配置文件的fs.defaultFS属性
Configuration conf = new Configuration();
// 设置krb5.conf配置文件
System.setProperty("java.security.krb5.conf","D:\\t....");
//kerberos的ticket
UserGroupInformation.loginUserFromKeytab(user, keytab);
// 初始化HDFS文件系统,此时需要把读取到的fs.defaultFS属性传给fs对象
FileSystem fs = FileSystem.get(conf);
//建文件夹路径
Path path1 = new Path("hdfs://......");
//fs.mkdirs(path1);
//通过fs的listStatus方法获取一个指定path的所有文件信息(status),因此我们需要传入一个hdfs的路径,返回的是一个filStatus数组
//遍历文件信息
FileStatus files[] = fs.listStatus(new Path(dir));
for (FileStatus file1 : files) {
System.out.println(file1.toString());
}
}
/**
* 在HDFS上创建文件
* @throws IOException */
@Test
public void createFileOnHDFS() throws IOException{
String user = "hive";
String dir = "hdfs://......";
Configuration conf = new Configuration();
System.setProperty("java.security.krb5.conf", "D:\\t....");
UserGroupInformation.loginUserFromKeytab(user, keytab);
FileSystem fs = FileSystem.get(conf);
Path path1 = new Path("hdfs://......txt");
String content = "";
FSDataOutputStream fsout = fs.create(path1);
BufferedOutputStream bout = new BufferedOutputStream(fsout);
bout.write(content.getBytes(), 0, content.getBytes().length);
bout.close();
fsout.close();
System.out.println("文件创建完毕!");
}
/**
* 在本地拷贝到HDFS
* @throws IOException */
@Test
public void localToHDFS() throws IOException{
Configuration conf = new Configuration();
System.setProperty("java.security.krb5.conf","D:\\t....");
UserGroupInformation.loginUserFromKeytab(user, keytab);
FileSystem fs = FileSystem.get(conf);
fs.copyFromLocalFile(new Path("D:\\.....txt"),new Path( "/../....";);
System.out.println("文件拷贝完毕!");
}
/**
* 重命名文件
* @throws IOException */
@Test
public void renameOnHDFS() throws IOException{
Configuration conf = new Configuration();
System.setProperty("java.security.krb5.conf", "D:\\t....");
UserGroupInformation.loginUserFromKeytab(user, keytab);
FileSystem fs = FileSystem.get(conf);
Path oldFileName=new Path("/../....txt");
Path newFileName=new Path("/../....txt");
boolean isSuccess=true;
try {
isSuccess=fs.rename(oldFileName, newFileName);
} catch (IOException e) {
isSuccess=false;
}
System.out.println(isSuccess?"重命名成功!":"重命名失败!");
}
/**
* 删文件
* @throws IOException */
@Test
public void removeOnHDFS() throws IOException{
Configuration conf = new Configuration();
System.setProperty("java.security.krb5.conf",D:\\t....");
UserGroupInformation.loginUserFromKeytab(user, keytab);
FileSystem fs = FileSystem.get(conf);
fs.delete(new Path("/../....txt"), true);
}
/**
* DNFS下载到本地
* @throws IOException */
@Test
public void downloadFromDNFS() throws IOException{
Configuration conf = new Configuration();
System.setProperty("java.security.krb5.conf",D:\\t....");
UserGroupInformation.loginUserFromKeytab(user, keytab);
FileSystem fs = FileSystem.get(conf);
Path path1 = new Path("hdfs://......txt");//建文件
FSDataInputStream hdfsInStream = fs.open(path1);
OutputStream out = new FileOutputStream("D:\\.....txt");
byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while(-1 != readLen){
out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
conf.setBoolean("fs.hdfs.impl.disable.cache", true);//出现Filesystem closed时加上
out.close();
hdfsInStream.close();
fs.close();
}
/**
* 从DNFS读文件
* @throws IOException */
@Test
public void readFromDNFS() throws IOException{
System.setProperty("java.security.krb5.conf", "D:\\......");
UserGroupInformation.loginUserFromKeytab(user, keytab);
try {
String dsf = "hdfs://.......txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
FSDataInputStream hdfsInStream = fs.open(new Path(dsf));
byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while(readLen!=-1)
{
System.out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
hdfsInStream.close();
fs.close();
} catch (IOException e) {
e.printStackTrace();
}
}