本地上传文件到hdfs

import java.io.IOException;
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class UploadFile {
     public static void main(String[] args) throws IOException {
         Configuration conf = new Configuration();
         //conf.addResource(new Path("conf/hadoop-default.xml"));
         //conf.addResource(new Path("conf/hadoop-site.xml"));
         //通过conf来指定要操作的HDFS
         FileSystem hdfs = FileSystem.get(conf);
         //要上传的源文件所在路径
         Path src = new Path( "D:\\v.txt" );
         //hadoop文件系统的跟目录
         Path dst = new Path( "/" );
         //将源文件copy到hadoop文件系统
         hdfs.copyFromLocalFile(src, dst);
         System.out.println( "上传到" +conf.get( "fs.default.name" ));
         FileStatus files[] = hdfs.listStatus(dst);
         for ( int i= 0 ;i<files.length;i++)
         {
             System.out.println(files[i].getPath());
         }
     }
}

猜你喜欢

转载自blog.csdn.net/qq_33238935/article/details/80854929