idea保存结果至HDFS报错:Permission denied

  • 解决方法
    修改hfds-site.xml文件
 <property>
    <name>dfs.permissions</name>
    <value>false</value>
</property>
  • 拷贝至所有节点,然后重启hadoop
package com.ws.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RangeTest {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName("RangeTest").setMaster("local")
    sparkConf.set("dfs.permissions","false")
    val sparkContext: SparkContext = new SparkContext(sparkConf)

    val sourceRdd: RDD[String] = sparkContext.textFile("hdfs://192.168.0.21:9000/test")

    val result: RDD[(String, Int)] = sourceRdd.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)

    result.saveAsTextFile("hdfs://192.168.0.21:9000/result")

    sparkContext.stop()
  }

}

参考来自

猜你喜欢

转载自blog.csdn.net/bb23417274/article/details/87934187