Scala HDFS操作


package com.soul.bigdata.scala.day04

import java.net.URI

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}

object HDFSApp {
  def main(args: Array[String]): Unit = {

    val configuration = new Configuration
    val fs = FileSystem.get(new URI("hdfs://hadoop000:8020"), configuration)

    //创建文件夹
    //    val flag = fs.mkdirs(new Path("/f12"))
    //    println(flag)

    //自动创建文件info.txt 并写数据
    //    val out = fs.create(new Path("/f12/info.txt"))
    //    out.write("HDFS写数据".getBytes())
    //    out.flush()
    //    out.close()
    //    fs.close()

    //重命名文件
    //    val src = new Path("/f12/info.txt")
    //    val dst = new Path("/f12/info_1.txt")
    //    val flag = fs.rename(src,dst)
    //    println(flag)

    //删除文件 1
    //    val path =  new Path("/f12/info_1.txt")
    //
    //    if (fs.exists(path)){
    //      val flag = fs.delete(path)
    //      println(flag)
    //    }


    //删除文件夹 1
    val path = new Path("/f12/")

    val flag = fs.deleteOnExit(path)
    println(flag)
  }
}

猜你喜欢

转载自blog.csdn.net/weixin_33912453/article/details/90857764