package com.ws.demo
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* 自定义排序3
*/
object CustomSort3 {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("CustomSort3").setMaster("local[4]")
val sc = new SparkContext(conf)
val data = Array("ws 18 150", "tt 19 145", "nn 20 130", "bb 16 120", "cb 19 150")
val dataRdd: RDD[String] = sc.parallelize(data)
//切数据
val dataMapRDD: RDD[(String,Int,Int)] = dataRdd.map(line => {
val lineArr = line.split(" ")
val name = lineArr(0)
val age = lineArr(1).toInt
val score = lineArr(2).toInt
(name,age,score)
})
import SortRules.SortOrdering
//排序
val sort: RDD[(String, Int, Int)] = dataMapRDD.sortBy(p => Human(p._2,p._3))
println(sort.collect().toBuffer)
sc.stop()
}
}
case class Human(age: Int, score: Int)
package com.ws.demo
object SortRules {
implicit object SortOrdering extends Ordering[Human]{
override def compare(x: Human , y : Human): Int = {
if (x.score == y.score){
x.age - y.age
}else{
y.score - x.score
}
}
}
}
结果:
ArrayBuffer((ws,18,150), (cb,19,150), (tt,19,145), (nn,20,130), (bb,16,120))