1 HttpFS
Authenticator.setDefault(new MyAuthenticator());
URL url = new URL("http://dev.test.com:14000/webhdfs/v1/tmp/test?op=OPEN");
InputStream ins = url.openConnection().getInputStream();
BufferedReader reader = new BufferedReader(new InputStreamReader(ins));
String str;
while((str = reader.readLine()) != null)
System.out.println(str);
2 Hadoop Java API
public void readFile(String file) throws IOException {
Configuration conf = new Configuration();
FileSystem fileSystem = FileSystem.get(conf);
Path path = new Path(file);
if (!fileSystem.exists(path)) {
System.out.println("File " + file + " does not exists");
return;
}
FSDataInputStream in = fileSystem.open(path);
String filename = file.substring(file.lastIndexOf('/') + 1,
file.length());
OutputStream out = new BufferedOutputStream(new FileOutputStream(new File(filename)));
byte[] b = new byte[1024];
int numBytes = 0;
while ((numBytes = in.read(b)) > 0) {
out.write(b, 0, numBytes);
}
in.close();
out.close();
fileSystem.close();
}
3 FileOutputFormat
FileOutputFormat.setOutputPath(job, new Path("file" + key));
context.write(key, result);
参考来源:
Hadoop官方教程