HDFS的java客户端

API_01

//获取一个配置对象
Configuration conf = new Configuration();
//查看配置对象的所有内容
Iterator<Entry<String, String>> it = conf.iterator();
while(it.hasNext()) {
    System.out.println(it.next());
}
//获取一个文件系统操作的客户端实例对象
FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"),conf,"hadoop");

//创建目录
boolean b = fs.mkdirs(new Path("/testMkdir"));

//删除目录
boolean b = fs.delete(new Path("/testMkdir"),true);

//文件上传
fs.copyFromLocalFile(new Path("d:/aa.txt"),new Path("/access.log.copy"));

//递归列出指定目录下的所有的文件,不是文件夹
RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path("/"), true);

while (iterator.hasNext()){
    LocatedFileStatus status = iterator.next();
    //System.out.println(status.toString());
    BlockLocation[] locations = status.getBlockLocations();
    for (BlockLocation location : locations) {
        System.out.println("块长度:"+location.getLength());

        System.out.println("块名称:"+ Arrays.asList(location.getHosts()));
    }
}
//递归列出指定目录下的所有的文件状态
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
for (FileStatus status : fileStatuses) {
    System.out.println(status.getPath()+":"+ status.isDirectory());
}

API_02


import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;

/**
 * @author snow
 */
public class ClientTest02 {

    public static void main(String[] args) {

    }

    /**
     * 从指定位置读取文件
     * @throws IOException
     * @throws InterruptedException
     */
    public static void testRead() throws IOException, InterruptedException {
        Configuration conf = new Configuration();

        FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"), conf, "hadoop");


        FSDataInputStream in = fs.open(new Path("/test02"));

        in.seek(12);
        FileOutputStream os = new FileOutputStream("E:\\day07\\aa.txt");
        IOUtils.copy(in,os);
    }

    /**
     * 通过流的方式下载文件
     * @throws IOException
     * @throws InterruptedException
     */
    public static void testDownloadByStream() throws IOException, InterruptedException {
        Configuration conf = new Configuration();

        FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"), conf, "hadoop");


        FSDataInputStream inputStream = fs.open(new Path("/test02"));

        FileOutputStream os = new FileOutputStream("E:\\day07\\aa.txt");

        IOUtils.copy(inputStream,os);
    }


    /**
     * 通过流的方式上传文件
     * @throws IOException
     * @throws InterruptedException
     */
    public static void testUploadByStream() throws IOException, InterruptedException {
        Configuration conf = new Configuration();

        FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"), conf, "hadoop");

        FSDataOutputStream outputStream = fs.create(new Path("/test02"));


        FileInputStream inputStream = new FileInputStream("d:/aa.txt");


        IOUtils.copy(inputStream,outputStream);
    }
}

猜你喜欢

转载自blog.csdn.net/guo20082200/article/details/82290055