版权声明:个人见解,希望大家批评指正。 https://blog.csdn.net/waneyongfu/article/details/78594360
1、maven工程的构建,hadoop用的版本是2.9.0。jdk的版本为1.8.0_131
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.9.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.9.0</version>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.9.0</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8.0_131</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
2、对 hdfs的简单操作
//文件系统
FileSystem fileSystem;
//初始化文件系统
public void init() throws Exception{
//读取数据由平台上的协议确定
URI uri = new URI("hdfs://dtbigdata:9000");
Configuration conf = new Configuration();
fileSystem = FileSystem.get(uri, conf);
}
//查看目录
public void Catalog() throws Exception{
Path path = new Path("/user");
FileStatus fileStatus = fileSystem.getFileStatus(path);
System.out.println("*************************************");
System.out.println("文件根目录: "+fileStatus.getPath());
System.out.println("这文件目录为:");
for(FileStatus fs : fileSystem.listStatus(path)){
System.out.println(fs.getPath());
}
}
//浏览文件
public void look() throws Exception{
Path path = new Path("/user/root/b.txt");
FSDataInputStream fsDataInputStream = fileSystem.open(path);
System.out.println("*************************************");
System.out.println("浏览文件的内容:");
int c;
while((c = fsDataInputStream.read()) != -1){
System.out.print((char)c);
}
fsDataInputStream.close();
}
//下载
public void download() throws Exception{
//下载路径
String file = "D://hadoop.txt";
InputStream in = fileSystem.open(new Path("/user/root/b.txt"));
OutputStream out = new FileOutputStream(file);
IOUtils.copyBytes(in, out, 4096, true);
System.out.println("*************************************");
System.out.println("文件下载完成!"+file);
}
//上传文件
public void upload() throws Exception{
Path srcPath = new Path("F:/b.txt");
Path dstPath = new Path("/user/root");
fileSystem.copyFromLocalFile(false, srcPath, dstPath);
fileSystem.close();
System.out.println("*************************************");
System.out.println("上传成功!");
}
//删除文件
public void delete() throws Exception{
Path path = new Path("/user/root/a.txt");
fileSystem.delete(path,true);
System.out.println("*************************************");
System.out.println("删除成功!");
}
3、代码的运行
3.1、会出现错误 Permission denied: user=Administrator
是因为在windows下运行,默认用户为 Administrator,该用户没有权限。
3.2、最快速的解决方法
-DHADOOP_USER_NAME=root
添加位置方法如下: