spark 更改日志输出级别

package com.ideal.test

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.log4j.{Level, Logger}
object WC {
  Logger.getRootLogger.setLevel(Level.ERROR)
  //Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("wc")
    val sc = new SparkContext(conf)
    val lines = sc.textFile(args(0))
    val counts = lines.flatMap(_.split("\\s+"))
      .map{a=>(a, 1)}
      .reduceByKey(_ + _)
    counts.saveAsTextFile(args(1))
    sc.stop()
  }
}

 Logger.getRootLogger.setLevel(Level.ERROR) 通过在代码中设置log级别即可

猜你喜欢

转载自www.cnblogs.com/jason-dong/p/9450172.html