Java operation of Hadoop HDFS file system

1. Import dependent pom

  <properties>

    ......

    <hadoop.version>3.1.2</hadoop.version>

  </properties>

  <dependencies>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    ......

  </dependencies>

 

 

2. Use

public  class TestHadoop { 

    Private  static the Configuration the conf;
     Private  static the FileSystem FS; 

    / ** 
     * Initialization 
     * @throws Exception
      * / 
    public  static  void the init () throws Exception { 
        the conf = new new the Configuration (); 
        conf.set ( "fs.defaultFS" , "HDFS: // localhost: 9527");   // corresponding port core-site.xml configured
         // get an example of the operation of HDFS, and provided that the user (since the windows permissions problem "zwj" need to be replaced for the management of member account) 
        FS = FileSystem.get ( new newThe URI ( "HDFS: // localhost: 9527"), the conf, "ZWJ" ); 
    } 

    / ** 
     * File Upload 
     * @throws Exception
      * / 
    public  static  void Upload () throws Exception {
         // back to true, means If the file exists, the cover 
        FSDataOutputStream FOUT = fs.create ( new new the Path ( "/ mydir / 001.jpg"), to true ); 
        the InputStream in = new new the FileInputStream ( "E: /tmp/qrcode/123.jpg" ); 

        / / copy stream, and after the completion of closing the flow 
        IOUtils.copyBytes (in, FOUT, 1024, to true ); 
    } 

    / **
     * Reading the specified location 
     * @throws Exception
      * / 
    public  static  void Random () throws Exception { 
        FSDataInputStream FIN = fs.open ( new new the Path ( "/ mydir / 001.jpg" ));
         // from the start position 0 reading start position 
        fin.seek (0 ); 

        the OutputStream OUT = new new a FileOutputStream ( "E: /tmp/qrcode/111.jpg" ); 

        IOUtils.copyBytes (FIN, OUT, 1024, to true ); 
    } 

    / ** 
     * Get hadoop configuration 
     * @throws Exception
      * /
    public static void conf()throws Exception{
        Iterator<Map.Entry<String, String>> iterator = conf.iterator();
        while(iterator.hasNext()){
            Map.Entry<String, String> entry = iterator.next();
            System.out.println(entry);
        }
    }

    /**
     * 创建文件夹
     * @throws Exception
     */
    public static void mkdir()throws Exception{
        boolean mkdirs = fs.mkdirs(new Path("/mydir/dir1/dir2"));
         IF (mkdirs) { 
            System.out.println ( "Creating folders success" ); 
        } 
        fs.Close (); 
    } 

    / ** 
     * delete files and folders 
     * @throws Exception
      * / 
    public  static  void the Delete ( ) throws Exception {
         // recursive delete 
        Boolean delete = fs.delete ( new new the Path ( "/ mydir"), to true );
         IF (delete) { 
            System.out.println ( "deleted successfully" ); 
        } 
        fs.Close () ; 
    } 

    / **
     * List all files recursively 
     * @throws Exception
      * / 
    public  static  void listfile () throws Exception { 
        RemoteIterator <LocatedFileStatus> = fs.listFiles the listFiles ( new new the Path ( "/"), to true );
         the while (listFiles.hasNext ()) { 
            LocatedFileStatus LFS = listFiles.next (); 
            System.out.println ( "block size:" + lfs.getBlockSize ()); 
            System.out.println ( "belonging group:" + lfs.getOwner ()); 
            the System. out.println ( "size:" + lfs.getLen ());
            System.out.println ( "File name:" + lfs.getPath () getName ().); 
            System.out.println ( "whether the directory:" + lfs.isDirectory ()); 
            System.out.println ( "Are file: "+ lfs.isFile ()); 
            System.out.println (); 
            BlockLocation [] blockLocations = lfs.getBlockLocations ();
             for (BlockLocation blockLocation: blockLocations) { 
                System.out.println ( " block offset number: "+ blockLocation.getOffset ()); 
                System.out.println ( " block length: "+ blockLocation.getLength ()); 
                System.out.println ( " block name:" + Arrays.toString(blockLocation.getNames()));
                System.out.println("块名称:" + Arrays.toString(blockLocation.getHosts()));
            }
            System.out.println("--------------------------");
        }
    }

    /**
     * 列出指定目录下的文件
     * @throws Exception
     */
    public static void listFile2()throws Exception{
        FileStatus[] listStatus = fs.listStatus(new Path("/"));
        for (FileStatus fileStatus : listStatus) {
            System.out.println("Block Size:" + fileStatus.getBlockSize ()); 
            System.out.println ( "belonging group:" + fileStatus.getOwner ()); 
            System.out.println ( "Size:" + fileStatus.getLen ()); 
            System.out.println ( "file name:" + fileStatus.getPath () getName ().); 
            System.out.println ( "whether the directory:" + fileStatus.isDirectory ()); 
            System.out.println ( "Are file: "+ fileStatus.isFile ()); 
        } 
    } 


    public  static  void main (String [] args) {
         the try { 
            the init (); 
//             Upload ();
//            random();
//            conf();
//            mkdir();
//            delete();
            listFile();
//            listFile2();
        } catch (Exception e){
            e.printStackTrace();
        }
    }

}

 

Guess you like

Origin www.cnblogs.com/vettel0329/p/11139630.html