得到

import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import java.io.*;
 
public class HDFSApi {     /**
厦门大学林子雨编著《大数据基础编程、实验和案例教程》中收录的 5 个实验答案
7
 
     * 判断路径是否存在      */     public static boolean test(Configuration conf, String path) throws IOException {         FileSystem fs = FileSystem.get(conf);         return fs.exists(new Path(path));     }
 
    /**      * 复制文件到指定路径      * 若路径已存在,则进行覆盖      */     public static void copyFromLocalFile(Configuration conf, String localFilePath, String remoteFilePath) throws IOException {         FileSystem fs = FileSystem.get(conf);         Path localPath = new Path(localFilePath);         Path remotePath = new Path(remoteFilePath);         /* fs.copyFromLocalFile 第一个参数表示是否删除源文件,第二个参数表示是否覆 盖 */         fs.copyFromLocalFile(false, true, localPath, remotePath);         fs.close();     }       /**      * 追加文件内容      */     public static void appendToFile(Configuration conf, String localFilePath, String remoteFilePath) throws IOException {         FileSystem fs = FileSystem.get(conf);         Path remotePath = new Path(remoteFilePath);         /* 创建一个文件读入流 */         FileInputStream in = new FileInputStream(localFilePath);         /* 创建一个文件输出流,输出的内容将追加到文件末尾 */         FSDataOutputStream out = fs.append(remotePath);         /* 读写文件内容 */         byte[] data = new byte[1024];         int read = -1;         while ( (read = in.read(data)) > 0 ) {          out.write(data, 0, read);         }         out.close();         in.close();         fs.close();     }     

猜你喜欢

转载自www.cnblogs.com/wxd136/p/9762516.html