建立连接获取hadoop下的文件信息
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
public class ListFile {
public static void main(String [] args) throws IOException, Exception,URISyntaxException{
FileSystem file = FileSystem.get(new URI("hdfs://虚拟机IP地址:9000"),new Configuration(),"hadoop");
RemoteIterator<LocatedFileStatus> iterator = file.listFiles(new Path("/"), true);
while(iterator.hasNext()) {
LocatedFileStatus fileStatus = iterator.next();
Path path = fileStatus.getPath();
String name = path.getName();
System.out.println(path.toString()+" "+path.getName());
BlockLocation[] locations = fileStatus.getBlockLocations();
System.out.println("block"+locations.length);
}
}
}
编写hadoop连接工具类
package com.clouddisk.cloud.util;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class HDFSUtils {
private static Configuration conf ;
private static FileSystem fs ;
private static String address = "hdfs://192.168.43.60:9000";
public FileSystem getConf() throws URISyntaxException, IOException, InterruptedException {
conf = new Configuration();
fs = FileSystem.get(new URI(address),conf,"hadoop");
return fs;
}
public void release() throws IOException {
fs.close();
}
public String getAddress(){
return address;
}
}