hadoop programming practices - hdfs programming delete files

test.hdfs Package; 

Import java.io.IOException; 
Import java.net.URI; 

Import org.apache.hadoop.conf.Configuration; 
Import org.apache.hadoop.fs.FileSystem; 
Import org.apache.hadoop.fs. Path; 

/ * 
 * 
 * @authr: Kouch 
 *   
 * function: delete hdfs become realized if the file exists; determining whether a file exists with the general use; 
 *   
 * Implementation: call hadoopAPI; 
 * 
 * / 
public  class DeletedFile { 

    public  static  void main (String [] args) throws IOException, a ClassNotFoundException, InterruptedException {
         // configure class 
        the configuration = the conf new new the configuration (); 

        // analog path;
        Url = String " HDFS: // localhost: 9000 / the User / Kouch / OUT6 " ; 
        
        // file system object 
        the FileSystem FS = the FileSystem. GET (URI.create (url), conf); 
        
        // Note: The configuration file is afraid segment more efficient use of the presence; 
        IF (IsExsit.isExsit (FS, URL)) {
             // System.out.println ( "delete Kaishi"); 
            IF (fs.delete ( new new the Path (URL), to true )) { // to true : All files folder; false: If this file does not exist other files deleted 
                . System OUT .println ( " delete " + url); 
            } 
            // System.out.println ( "jieshu delete");
        } The else { 
            . The System OUT .println (URL + " does not exist " ); 
        } 
        
        
    } 
}

 



Guess you like

Origin www.cnblogs.com/floakss/p/11455803.html