将一台服务器上的文件放在另一台服务器 的hdfs上



import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.stereotype.Service;

import java.io.*;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;

/**
 * Created by panghu on 2017/9/13.
 */
@Service
public class FileManageService {


    /**
     * 将一个服务器上的文件放到另一个服务器的hdfs     * @param local
     * @param target
     * @throws IOException
     */
    public static void fileUp(String local ,String target) throws IOException {
        URL url = new URL(local);
        HttpURLConnection conn = (HttpURLConnection)url.openConnection();
        //设置超时间为3        conn.setConnectTimeout(3*1000);
        //防止屏蔽程序抓取而返回403错误
        conn.setRequestProperty("User-Agent", "Mozilla/4.0 (compatible; MSIE 5.0; Windows NT; DigExt)");
        //得到输入流
        InputStream in = conn.getInputStream();
        System.out.println(target);
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(target), conf);
        OutputStream out = fs.create(new Path(target));
        IOUtils.copyBytes(in, out, 4096, true);
        System.out.println("上传完成。。。。。。。");
    }


    /**
     * 上传本地文件到hdfs
     * @param local
     * @param remote
     * @throws IOException
     *
     *
     */
    public static String copyFile(String uri , String local, String remote) throws IOException {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        fs.copyFromLocalFile(new Path(local), new Path(remote));
        System.out.println("copy from: " + local + " to " + remote);
        fs.close();
        return "111";
    }

    public static void main(String[] args) {
        System.out.println(1234);
    }
}

猜你喜欢

转载自blog.csdn.net/starry_xiao/article/details/77982405