1、创建HDFS配置信息静态代码块
static Configuration conf = new Configuration();
//具体配置信息根据自己的安装环境参数来
static {
//HDFS通信地址
String hdfsUrl="hdfs://localhost:9000";
conf.set("fs.defaultFS", hdfsUrl);
// conf.set("dfs.nameservices", "nameservice1");
// conf.set("dfs.ha.namenodes.nameservice1", "nn1,nn2");
// conf.set("dfs.namenode.rpc-address.nameservice1.nn1", "xxx:8020");
// conf.set("dfs.namenode.rpc-address.nameservice1.nn2", "xxx:8020");
// conf.set("dfs.client.failover.proxy.provider.nameservice1"
// ,"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
// conf.addResource("classpath:/hadoop/core-site.xml");
// conf.addResource("classpath:/hadoop/hdfs-site.xml");
// conf.addResource("classpath:/hadoop/mapred-site.xml");
}
2、创建新文件
/**
* 创建新文件
* @author LiHaoShan
* @param dst HDFS路径
* @param contents 文件内容
* */
public static Map<String,Object> createFile(String dst , byte[] contents) throws IOException{
FileSystem fs = FileSystem.get(conf);
//目标路径
Path dstPath = new Path(dst);
URI uri=dstPath.toUri();
String path=uri.getPath();
//打开一个输出流
FSDataOutputStream outputStream = fs.create(dstPath);
outputStream.write(contents);
Map<String,Object> map= Maps.newHashMap();
map.put("savePath",path);
outputStream.close();
fs.close();
System.out.println("文件创建成功!");
return map;
}
3、上传本地文件至HDFS
/**
* 上传本地文件
* @author LiHaoShan
* @param src 本地文件路径
* @param dst HDFS目标路径
*/
public static void uploadFile(String src,String dst) throws IOException{
//Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
//本地上传文件路径
Path srcPath = new Path(src);
//HDFS目标路径
Path dstPath = new Path(dst);
if(!fs.exists(dstPath)){
fs.mkdirs(dstPath);
}
//调用文件系统的文件复制函数,前面参数是指是否删除原文件,true为删除,默认为false
fs.copyFromLocalFile(srcPath, dstPath);
//打印文件路径
System.out.println("Upload to "+conf.get("fs.default.name"));
System.out.println("------------list files------------"+"\n");
FileStatus [] fileStatus = fs.listStatus(dstPath);
for (FileStatus file : fileStatus)
{
System.out.println(file.getPath());
}
fs.close();
}
4、文件重命名
/**
* 文件重命名
* @author LiHaoShan
* @param oldName 旧文件名
* @param newName 新文件名
*/
public static void rename(String oldName,String newName) throws IOException{
//Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path oldPath = new Path(oldName);
Path newPath = new Path(newName);
boolean isok = fs.rename(oldPath, newPath);
if(isok){
System.out.println("rename ok!");
}else{
System.out.println("rename failure");
}
fs.close();
}
5、删除文件
/**
* 删除文件
* @author LiHaoShan
* @param dst HDFS文件路径
* */
public static void delete(String dst){
try{
//Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path path = new Path(dst);
boolean isok = fs.deleteOnExit(path);
if(isok){
System.out.println("delete ok!");
}else{
System.out.println("delete failure");
}
fs.close();
}catch (Exception e){
}
}
6、创建目录
/**
* 创建目录
* @author LiLaoShan
* @param path HDFS路径
* */
public static void mkdir(String path) throws IOException{
//Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path srcPath = new Path(path);
boolean isok = fs.mkdirs(srcPath);
if(isok){
System.out.println("create " + path + " dir ok!");
}else{
System.out.println("create " + path + " dir failure");
}
fs.close();
}
7、读取文件内容
/**
* 读取文件的内容
* @author LiHaoShan
* @param dst HDFS文件路径
* */
public static void readFile(String dst) throws IOException{
//Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path srcPath = new Path(dst);
InputStream in = null;
try {
in = fs.open(srcPath);
//复制到标准输出流
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
8、遍历指定目录下的所有文件
/**
* 遍历指定目录(direPath)下的所有文件
* @author LiHaoShan
* @param direPath
*/
public static void getDirectoryFromHdfs(String direPath){
try {
FileSystem fs = FileSystem.get(URI.create(direPath),conf);
FileStatus[] filelist = fs.listStatus(new Path(direPath));
for (int i = 0; i < filelist.length; i++) {
System.out.println("_________" + direPath + "目录下所有文件______________");
FileStatus fileStatus = filelist[i];
System.out.println("Name:"+fileStatus.getPath().getName());
System.out.println("Size:"+fileStatus.getLen());
System.out.println("Path:"+fileStatus.getPath());
}
fs.close();
} catch (Exception e){
}
}
9、下载文件至本地
/**
* 下载文件至本地
* @author LiHaoShan
* @param dst
* @param src
* */
public static void downloadFromHdfs(String dst,String src) throws IOException{
FileSystem fs = FileSystem.get(conf);
Path path=new Path(dst);
System.out.println("*************path:"+path);
InputStream in = fs.open(path);
OutputStream out = new FileOutputStream(src);
IOUtils.copyBytes(in, out, 4096, true);
}
测试:main方法调用
public static void main(String[] args) throws IOException {
// System.setProperty("hadoop.home.dir", "/Library/hadoop-2.8.2");
String today = new SimpleDateFormat("yyyy-MM-dd").format(new Date());
String localFilePath = "/Users/lihaoshan/Documents/hdfsDoc/me2.jpeg";
String hdfsFilePath = "/hdfsFile/"+ today.substring(0,7) +"/"+today+"/" ;
//1、遍历指定目录(direPath)下的所有文件
getDirectoryFromHdfs("/hdfsDoc");
//2、新建目录
mkdir(hdfsFilePath);
//3、上传文件
uploadFile(localFilePath, hdfsFilePath);
//4、获取该路径下的文件
getDirectoryFromHdfs(hdfsFilePath);
//5、读取文件
readFile("/hdfsFile/2018-03/2018-03-01/六量统计表.xlsx");
//6、重命名
rename("/user/rec/maimaimai/2016-11/2016-11-09/quan-2016-11-09", "/user/rec/maimaimai/2016-11/2016-11-09/quan-2016-11-08.txt");
getDirectoryFromHdfs("/user/rec/maimaimai/2016-11/2016-11-09");
//7、创建文件,并向文件写入内容
byte[] contents = "hello world 世界你好\n".getBytes();
createFile("/HelloWord/test.txt",contents);
//读取文件内容
readFile("/user/rec/maimaimai/2018-03/2018-03-05/test.txt");
//8、删除文件
delete("/hdfsFile/2018-03/2018-03-01/六量统计表.xlsx"); //使用相对路径
//9、删除目录
delete("/HelloWord");
//10、下载文件至本地
downloadFromHdfs("/doc/1/20180314/9118bce0-7da4-4058-846a-a3c6d785f62a.xlsx","/Users/lihaoshan/Desktop/9118bce0-7da4-4058-846a-a3c6d785f62a.xlsx");
}