hadoop读写操作

新建项目:

导入libs:再hadoop解压下的库

2.7.2:

https://download.csdn.net/download/ssllkkyyaa/10758406

文件api测试:

package com.example.demo;

import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;

public class MyTest {
    @Test
	public void readFile() throws Exception{
		//注册url流处理器工厂(hdfs)
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());

		URL url = new URL("hdfs://192.168.77.200:8020/user/centos/hadoop/index.html");
		URLConnection conn = url.openConnection();
		InputStream is = conn.getInputStream();
		byte[] buf = new byte[is.available()];
		is.read(buf);
		is.close();
		String str = new String(buf);
		System.out.println(str);
	}
    /**
     //	 * 通过hadoop API访问文件
     //	 */
	@Test
	public void readFileByAPI() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		Path p = new Path("/user/centos/hadoop/index.html");
		FSDataInputStream fis = fs.open(p);
		byte[] buf = new byte[1024];
		int len = -1 ;

		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		while((len = fis.read(buf)) != -1){
			baos.write(buf, 0, len);
		}
		fis.close();
		baos.close();
		System.out.println(new String(baos.toByteArray()));
	}
    /**
     //	 * 通过hadoop API访问文件
     //	 */
	@Test
	public void readFileByAPI2() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		Path p = new Path("/user/centos/hadoop/index.html");
		FSDataInputStream fis = fs.open(p);
		IOUtils.copyBytes(fis, baos, 1024);
		System.out.println(new String(baos.toByteArray()));
	}

	/**
	 * mkdir创建目录  注意操作用户权限
	 */
	@Test
	public void mkdir() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		fs.mkdirs(new Path("/user/centos/myhadoop"));
	}
    	/**
	 * putFile  写文件
	 */
	@Test
	public void putFile() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		FSDataOutputStream out = fs.create(new Path("/user/centos/myhadoop/a.txt"));
		out.write("helloworld".getBytes());
		out.close();
	}
    	/**
	 * removeFile
	 */
	@Test
	public void removeFile() throws Exception{
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.77.200:8020/");
		FileSystem fs = FileSystem.get(conf) ;
		Path p = new Path("/user/centos/myhadoop");
		fs.delete(p, true);
	}
}

下载某个文件:

将下载域名改成ip或者改hosts

http://192.168.77.202:50075/webhdfs/v1/user/centos/myhadoop/a.txt?op=OPEN&namenoderpcaddress=s200:8020&offset=0

猜你喜欢

转载自blog.csdn.net/ssllkkyyaa/article/details/83622340