Exception in thread "main" java.lang.NoSuchMethodError: scala.Predef$.$scope()Lscala/xml/TopScope$;
at org.apache.spark.ui.jobs.AllJobsPage.<init>(AllJobsPage.scala:39)
at org.apache.spark.ui.jobs.JobsTab.<init>(JobsTab.scala:38)
at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:67)
at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:84)
at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:221)
at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:163)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:452)
at Spark_day01.SparkWC$.main(SparkWC.scala:18)
at Spark_day01.SparkWC.main(SparkWC.scala)
解决办法:
原因是spark-core的版本依赖出了问题,将原来的pom.xml如下:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>2.1.0</version>
</dependency>
修改为:
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.1.1</version>
</dependency>
问题解决。。。
附上源代码:
package Spark_day01
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
object SparkWC {
def main(args: Array[String]): Unit = {
val conf:SparkConf = new SparkConf()
// 提交的集群的master
// local[3]在本地使用3个线程模拟集群运行任务
// local:默认用一个线程模拟集群运行任务
// local[*]:使用所有空闲的线程模拟集群执行任务
.setMaster("local")
.setAppName("spark-word-count")
// 创建一个SparkContext对象
val sc:SparkContext = new SparkContext(conf)
// 读取文件,获取文件里每一行,作为一个字符串,放到RDD
val lines:RDD[String] = sc.textFile(args(0))
// 切分压平
val words:RDD[String] = lines.flatMap(_.split(" "))
// word=>(word,1)
val tuple: RDD[(String,Int)] = words.map((_,1))
// 相同的key的值进行reduce
val reduced:RDD[(String,Int)] = tuple.reduceByKey(_+_)
// 结果进行排序
val sorted:RDD[(String,Int)] = reduced.sortBy(_._2,false)
// 结果保存
sorted.saveAsTextFile(args(1))
sc.stop()
}
}