Big data entry foundation four: HDFS Java programming API! (Item 1)

Java programming API for HDFS:

  need:

    Create a Maven project pom.xml to import the jar package required by hadoop

    Write test classes to connect to Hadoop

    Read a.txt to the root directory of D drive

    Upload pom.xml to the hdfs root directory to solve the upload permission problem

    Writing a second way to connect to Hadoop

    Write to create hdfs directory /a/b

    write delete hdfs directory /a/b

 

code show as below:

 

package com.etc;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URL;

import org.apache.commons.math3.linear.FieldDecompositionSolver;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.verification.api.VerificationData;

public class HDFSAPI {
    
    
    private  FileSystem fileSystem = null;
    
    
    //********************************Upload files from local to hdfs
    @Before
    public void init() throws Exception{
        //To establish a connection with HDFS, you need to know the Namenode address.
        Configuration conf = new Configuration();
        
        
        //System.setProperty("HADOOP_USER_NAME", "root");
        //solve the upload permission problem
        //conf.set("fs.defaultFS","hdfs://192.168.43.73:9000" );
        //fileSystem = FileSystem.get(conf);
        // shorthand
        fileSystem = FileSystem.get(new URI("hdfs://192.168.43.73:9000"), conf, "root");
    }
    
    //********************Upload file from local to server
    @Test
    public void testUpload() throws Exception{
        //establish a connection with HDFS
        //Open a file in the local file system as an input stream
        InputStream in =  new FileInputStream("d://b.txt");
        
        //Open an output stream using the fileSystem of hdfs
        FSDataOutputStream out = fileSystem.create(new Path("/b.txt"));
        //Write from the input stream of the local file to the output stream
        IOUtils.copyBytes(in, out, 1024,true);
        //closure    
        fileSystem.close();    
    }
    //******************** delete folder
    @Test
    public void testDel() throws Exception{
        boolean flag = fileSystem.delete(new Path("/a"), true);
    System.out.println(flag);
    }
    
    //********************Create folder
    @Test
    public void testMkdir() throws IOException{
        fileSystem.mkdirs(new Path("/a/b"));
        //Close the fileSystem connection
        fileSystem.close();
    }
    
    
    
    //************************************Download files from server to local
    public static  void main(String[] args) throws Exception {
        //To establish a connection with HDFS, you need to know the address of nomenode
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://192.168.43.73:9000");
        FileSystem fileSystem = FileSystem.get(conf);
        //open input stream
        InputStream in =  fileSystem.open(new Path("/a.txt"));
        
        
        //Open a local output stream
        OutputStream out = new FileOutputStream("d://123.txt");
        
        //copy from input stream to output stream
        IOUtils.copyBytes(in, out, 1024,true);
        
        //close the stream
        out.close();
        in.close();
        fileSystem.close();
    }

}
HDFSAPI.java

 

package com.etc;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URL;

import org.apache.commons.math3.linear.FieldDecompositionSolver;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.verification.api.VerificationData;

public class HDFSAPI {
	
	
	private  FileSystem fileSystem = null;
	
	
	//********************************Upload files from local to hdfs
	@Before
	public void init() throws Exception{
		//To establish a connection with HDFS, you need to know the Namenode address.
		Configuration conf = new Configuration();
		
		
		//System.setProperty("HADOOP_USER_NAME", "root");
		//solve the upload permission problem
		//conf.set("fs.defaultFS","hdfs://192.168.43.73:9000" );
		//fileSystem = FileSystem.get(conf);
		// shorthand
		fileSystem = FileSystem.get(new URI("hdfs://192.168.43.73:9000"), conf, "root");
	}
	
	//********************Upload file from local to server
	@Test
	public void testUpload() throws Exception{
		//establish a connection with HDFS
		//Open a file in the local file system as an input stream
		InputStream in =  new FileInputStream("d://b.txt");
		
		//Open an output stream using the fileSystem of hdfs
		FSDataOutputStream out = fileSystem.create(new Path("/b.txt"));
		//Write from the input stream of the local file to the output stream
		IOUtils.copyBytes(in, out, 1024,true);
		//closure	
		fileSystem.close();	
	}
	//******************** delete folder
	@Test
	public void testDel() throws Exception{
		boolean flag = fileSystem.delete(new Path("/a"), true);
	System.out.println(flag);
	}
	
	//********************Create folder
	@Test
	public void testMkdir() throws IOException{
		fileSystem.mkdirs(new Path("/a/b"));
		//Close the fileSystem connection
		fileSystem.close();
	}
	
	
	
	//************************************Download files from server to local
	public static  void main(String[] args) throws Exception {
		//To establish a connection with HDFS, you need to know the address of nomenode
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS","hdfs://192.168.43.73:9000");
		FileSystem fileSystem = FileSystem.get(conf);
		//open input stream
		InputStream in =  fileSystem.open(new Path("/a.txt"));
		
		
		//Open a local output stream
		OutputStream out = new FileOutputStream("d://123.txt");
		
		//copy from input stream to output stream
		IOUtils.copyBytes(in, out, 1024,true);
		
		//close the stream
		out.close();
		in.close();
		fileSystem.close();
	}

}

  

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=324842794&siteId=291194637