spark 单词计数

1、要进行单词计数的文件内容如下

English
Spark
I love Spark very much !
Spark VS Hadoop

2、单词计数spark scala代码

package demo
import org.apache.spark.{SparkContext,SparkConf}
object WordCount{
 
def main(args:Array[String])={
   
if(args.length!=1){
     
println("Usage:demo.WordCount <input>")
     
System.exit(1)
   
}
    val input=args(0)
   
val conf =new SparkConf().setAppName("SparkWordCount")
   
val sc =new SparkContext(conf)
   
sc.textFile(input).flatMap(_.split(" ")).map(x=>(x,1)).reduceByKey((x:Int,y:Int)=>x+y).collect.foreach(println(_))
   
sc.stop()
 
}
}

3、包含”a”的单词才进行计数

package demo
import org.apache.spark.{SparkContext, SparkConf}
object WordCount {
  def main(args:Array[String])={
    if(args.length!=1){
      println("Usage:demo.WordCount <input>")
      System.exit(1)
    }
    val input=args(0)
    val conf = new SparkConf().setAppName("SparkWordCount")
    val sc = new SparkContext(conf)
   sc.textFile(input).flatMap(_.split(" ")).filter(x => x.contains("a")).map(x=>(x,1)).reduceByKey((x:Int,y:Int)=>x+y).
collect.foreach(println(_))
    sc.stop()  }}

4、单词个数大于3的才输出

package demo
import org.apache.spark.{SparkContext, SparkConf}
object WordCount {
  def main(args:Array[String])={
    if(args.length!=1){
      println("Usage:demo.WordCount <input>")
      System.exit(1)
    }
    val input=args(0)
    val conf = new SparkConf().setAppName("SparkWordCount")
    val sc = new SparkContext(conf)
    sc.textFile(input).flatMap(_.split(" ")).map(x=>(x,1)).reduceByKey((x:Int,y:Int)=>x+y).collect.foreach(x => if(x._2>2) println(x))
    sc.stop()
  }
}

猜你喜欢

转载自blog.csdn.net/abc_321a/article/details/53412419