JAVA upload file to HDFS

 

package com.hqgf.testhdfs;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class Hdfs {
	static Configuration conf = new Configuration();

    static {
        conf.set("fs.defaultFS", "hdfs://cluster");
        conf.set("dfs.nameservices", "cluster");
        conf.set("dfs.ha.namenodes.cluster", "nn1,nn2");
        conf.set("dfs.namenode.rpc-address.cluster.nn1", "xxx.xx.x.xxx:8020");
        conf.set("dfs.namenode.rpc-address.cluster.nn2", "xxx.xx.x.xxx:8020");
        conf.set("dfs.client.failover.proxy.provider.cluster"
                ,"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
    }
    
    public static void  getDirectoryFromHdfs(String direPath){
        try {
            FileSystem fs = FileSystem.get(URI.create(direPath),conf);
            FileStatus[] filelist = fs.listStatus(new Path(direPath));
            for (int i = 0; i < filelist.length; i++) {
                System.out.println("_________" + direPath + "All files in the directory______________");
                FileStatus fileStatus = filelist[i];
                System.out.println("Name:"+fileStatus.getPath().getName());
                System.out.println("Size:"+fileStatus.getLen());
                System.out.println("Path:"+fileStatus.getPath());
            }
            fs.close();
        } catch (Exception e){

        }
    }

    public static void uploadFile(String src,String dst) throws IOException{
        
        FileSystem fs = FileSystem.get(conf);
        Path srcPath = new Path(src); //Local upload file path
        Path dstPath = new Path(dst); //hdfs target path
        //Call the file copy function of the file system, the previous parameter refers to whether to delete the original file, true is to delete, the default is false
        fs.copyFromLocalFile(false, srcPath, dstPath);

        //print file path
        System.out.println("Upload to "+conf.get("fs.default.name"));
        System.out.println("------------list files------------"+"\n");
        FileStatus [] fileStatus = fs.listStatus(dstPath);
        for (FileStatus file : fileStatus)
        {
            System.out.println(file.getPath());
        }
        fs.close();
    }
    
    public static void main(String[] args) throws IOException {

//        String localFilePath = "D:\\Project\\eclipse\\workspace\\DataCenterChanger\\test\\20180108.txt";
//        String hdfsFilePath = "/tmp/";
//        System.out.println(localFilePath);
//        System.out.println(hdfsFilePath);
//        uploadFile(localFilePath,hdfsFilePath);
        getDirectoryFromHdfs("/tmp/20180108.txt");

    }
}

 

 

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=326189345&siteId=291194637