1. New Project
2. Package guide
Decompression-2.7.3.tar.gz hadoop
E: \ tools \ big data \ big data to enhance data \ 01- software data \ 06-Hadoop \ installer \ Java1.8 environment
compiled under \ hadoop-2.7.3 \ hadoop- 2.7.3 \ report this content share \ hadoop \ the Common
E: \ tools \ big data \ big data to enhance data \ 01- software data \ 06-Hadoop \ installer \ Java1.8 environment
compiled under \ hadoop-2.7.3 \ hadoop-2.7 .3 \ report this content share \ \ the Common \ lib hadoop
E: \ tools \ big data \ big data to enhance data \ 01- software data \ 06-Hadoop \ installer \ Java1.8 environment
compiled under \ hadoop-2.7.3 \ hadoop- 2.7.3 \ share \ hadoop \ hdfs
JUNIT unit testing
/ **
* unit test
* 1. modifier must be public
* not have parameters 2.
* 3. not have a return value
*
* Run: Select the name of the method to run
* RUN AS-JUnit Test
* /
@Test
void M1 public () {
System.out.println ( "Hello");
}
package com.zy.hdfs; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Iterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.junit.Before; import org.junit.Test; public class test { public static void main (String [] args) throws Exception { // upload // 1 Examples of Configuration the Configuration Configuration = new new the Configuration (); // ------------------- // set properties configuration.set ( "dfs.replication", "2"); // 2 copies configuration.set ( "dfs.blocksize", "80m"); // in accordance 80m segmentation // - -------------------- // 2. obtain the file system operation client instance the fileSystem FileSystem.get FS = ( new new the URI ( "HDFS: //192.168.64.111 : 9000 / "), configuration, " root "); // 3. Perform the upload operation (src local resources, dst upload destination) fs.copyFromLocalFile ( new new Path ( "C: \\ the Users Desktop \\ \\ \\ wc.txt Administrator"), new new Path ( " / ")); // twice as upload will be covered // Close the link fs.Close (); } @Test public void downloads () throws Exception { // download the Configuration Configuration = new new the Configuration (); the FileSystem FS = FileSystem.get ( new new the URI ( "HDFS: //192.168.64.111: 9000"), Configuration, "the root" ); fs.copyToLocalFile(false, new Path("/jdk-8u141-linux-x64.tar.gz"), new Path("E://jdk-8u141-linux-x64.tar.gz"), true); fs.close(); } //---------------- FileSystem fs=null; @Before//在每次运行junit时先执行 public void before() throws Exception{ System.out.println("11111"); Configuration configuration = new Configuration(); fs = FileSystem.get(newThe URI ( "HDFS: //192.168.64.111: 9000 /"), Configuration, "the root" ); } // --------------- @Test public void Delete () throws Exception { // delete fs.delete ( new new Path ( "/ elastic expression .doc"), true ); // to true to delete the folder and the contents inside, false can only delete empty folders fs.close (); } @Test public void mkdir () throws Exception { // create a directory fs.mkdirs ( new new the Path ( "/ A / B / C" )); fs.Close (); } @Test public void list() throws Exception{ FileStatus[] listStatus = fs.listStatus(new Path("/"));//不会递归进去 for (FileStatus fileStatus : listStatus) { if(fileStatus.isFile()){ System.out.println("文件名:"+fileStatus.getPath().toString()); }else{ System.out.println("目录:"+fileStatus.getPath().toString()); } } fs.close(); } @Test public voidList2 () throws Exception { // recursive traversal of all files RemoteIterator <LocatedFileStatus> = fs.listFiles the listFiles ( new new the Path ( "/"), to true ); // true, the recurse the while (listFiles.hasNext ()) { Next LocatedFileStatus = listFiles.next (); System.out.println (next.toString ()); System.out.println ( "file block size" + next.getBlockSize ()); } } }