Java implements HDFS file operation tool class

1. Create a static code block of HDFS configuration information

    static Configuration conf = new Configuration();
    //The specific configuration information is based on your own installation environment parameters
    static {
        //HDFS communication address
        String hdfsUrl="hdfs://localhost:9000";
        conf.set("fs.defaultFS", hdfsUrl);
//        conf.set("dfs.nameservices", "nameservice1");
//        conf.set("dfs.ha.namenodes.nameservice1", "nn1,nn2");
//        conf.set("dfs.namenode.rpc-address.nameservice1.nn1", "xxx:8020");
//        conf.set("dfs.namenode.rpc-address.nameservice1.nn2", "xxx:8020");
//        conf.set("dfs.client.failover.proxy.provider.nameservice1"
//                ,"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");

//        conf.addResource("classpath:/hadoop/core-site.xml");
//        conf.addResource("classpath:/hadoop/hdfs-site.xml");
//        conf.addResource("classpath:/hadoop/mapred-site.xml");
    }

2. Create a new file

/**
 * Create new file
 * @author LiHaoShan
 * @param dst HDFS path
 * @param contents file content
 * */
public static Map<String,Object> createFile(String dst , byte[] contents) throws IOException{
    FileSystem fs = FileSystem.get(conf);
    //Target path
    Path dstPath = new Path(dst);
    URI uri = dstPath.toUri ();
    String path=uri.getPath();
    //open an output stream
    FSDataOutputStream outputStream = fs.create(dstPath);
    outputStream.write(contents);
    Map<String,Object> map= Maps.newHashMap();
    map.put("savePath",path);
    outputStream.close();
    fs.close();
    System.out.println("The file was created successfully!");
    return map;
}

3. Upload local files to HDFS

/**
 * Upload local files
 * @author LiHaoShan
 * @param src local file path
 * @param dst HDFS destination path
 */
public static void uploadFile(String src,String dst) throws IOException{
    //Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    //local upload file path
    Path srcPath = new Path(src);
    //HDFS target path
    Path dstPath = new Path(dst);
    if(!fs.exists(dstPath)){
        fs.mkdirs(dstPath);
    }
    //Call the file copy function of the file system, the previous parameter refers to whether to delete the original file, true is to delete, the default is false
    fs.copyFromLocalFile(srcPath, dstPath);

    //print file path
    System.out.println("Upload to "+conf.get("fs.default.name"));
    System.out.println("------------list files------------"+"\n");
    FileStatus [] fileStatus = fs.listStatus(dstPath);
    for (FileStatus file : fileStatus)
    {
        System.out.println(file.getPath());
    }
    fs.close();
}

4. File renaming

/**
 * file renaming
 * @author LiHaoShan
 * @param oldName old file name
 * @param newName new file name
 */
public static void rename(String oldName,String newName) throws IOException{
    //Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    Path oldPath = new Path(oldName);
    Path newPath = new Path(newName);
    boolean isok = fs.rename(oldPath, newPath);
    if (isok) {
        System.out.println("rename ok!");
    }else{
        System.out.println("rename failure");
    }
    fs.close();
}

5. Delete files

/**
 * Delete Files
 * @author LiHaoShan
 * @param dst HDFS file path
 * */
public static void delete(String dst){
    try{
        //Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(conf);
        Path path = new Path(dst);
        boolean isok = fs.deleteOnExit (path);
        if (isok) {
            System.out.println("delete ok!");
        }else{
            System.out.println("delete failure");
        }
        fs.close();
    }catch (Exception e){

    }

}

6. Create a directory

/**
 * Create a directory
 * @author LiLaoShan
 * @param path HDFS path
 * */
public static void mkdir(String path) throws IOException{
    //Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    Path srcPath = new Path(path);
    boolean isok = fs.mkdirs (srcPath);
    if (isok) {
        System.out.println("create " + path + " dir ok!");
    }else{
        System.out.println("create " + path + " dir failure");
    }
    fs.close();
}

7. Read the file content

/**
 * Read the content of the file
 * @author LiHaoShan
 * @param dst HDFS file path
 * */
public static void readFile(String dst) throws IOException{
    //Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    Path srcPath = new Path(dst);
    InputStream in = null;
    try {
        in = fs.open(srcPath);
        //copy to standard output stream
        IOUtils.copyBytes(in, System.out, 4096, false);
    } finally {
        IOUtils.closeStream(in);
    }
}

8. Traverse all files in the specified directory

/**
 * Traverse all files in the specified directory (direPath)
 * @author LiHaoShan
 * @param direPath
 */
public static void  getDirectoryFromHdfs(String direPath){
    try {
        FileSystem fs = FileSystem.get(URI.create(direPath),conf);
        FileStatus[] filelist = fs.listStatus(new Path(direPath));
        for (int i = 0; i < filelist.length; i++) {
            System.out.println("_________" + direPath + "All files in the directory______________");
            FileStatus fileStatus = filelist[i];
            System.out.println("Name:"+fileStatus.getPath().getName());
            System.out.println("Size:"+fileStatus.getLen());
            System.out.println("Path:"+fileStatus.getPath());
        }
        fs.close();
    } catch (Exception e){

    }
}

9. Download the file to the local

/**
 * Download files to local
 * @author LiHaoShan
 * @param dst
 * @param src
 * */
public static void downloadFromHdfs(String dst,String src) throws IOException{
    FileSystem fs = FileSystem.get(conf);
    Path path=new Path(dst);
    System.out.println("*************path:"+path);
    InputStream in = fs.open(path);
    OutputStream out = new FileOutputStream(src);
    IOUtils.copyBytes(in, out, 4096, true);
}

Test: main method call

public static void main(String[] args) throws IOException {
//        System.setProperty("hadoop.home.dir", "/Library/hadoop-2.8.2");
        String today = new SimpleDateFormat("yyyy-MM-dd").format(new Date());
        String localFilePath = "/Users/lihaoshan/Documents/hdfsDoc/me2.jpeg";
        String hdfsFilePath = "/hdfsFile/"+ today.substring(0,7) +"/"+today+"/" ;

        //1. Traverse all files in the specified directory (direPath)
        getDirectoryFromHdfs("/hdfsDoc");

        //2, create a new directory
        mkdir(hdfsFilePath);

        //3, upload the file
            uploadFile(localFilePath, hdfsFilePath);
        //4, get the file under the path
            getDirectoryFromHdfs(hdfsFilePath);

        //5, read the file
            readFile("/hdfsFile/2018-03/2018-03-01/Six quantity statistics table.xlsx");

        //6, rename
            rename("/user/rec/maimaimai/2016-11/2016-11-09/quan-2016-11-09", "/user/rec/maimaimai/2016-11/2016-11-09/quan-2016-11-08.txt");
            getDirectoryFromHdfs("/user/rec/maimaimai/2016-11/2016-11-09");

        //7. Create a file and write content to the file
            byte[] contents = "hello world hello world\n".getBytes();
            createFile("/HelloWord/test.txt",contents);
            //read file content
            readFile("/user/rec/maimaimai/2018-03/2018-03-05/test.txt");

        //8, delete the file
            delete("/hdfsFile/2018-03/2018-03-01/Six-quantity statistics table.xlsx"); //Use relative path
        //9, delete the directory
            delete("/HelloWord");

        //10, download the file to the local
            downloadFromHdfs("/doc/1/20180314/9118bce0-7da4-4058-846a-a3c6d785f62a.xlsx","/Users/lihaoshan/Desktop/9118bce0-7da4-4058-846a-a3c6d785f62a.xlsx");
    }

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=324910132&siteId=291194637