spark parameter settings

//設置sparkconf參數
    val sparkConf = new SparkConf() //.setAppName("DirectKafka").set("spark.task.maxFailures", "1")
    sparkConf.set("spark.rpc.askTimeout", "1200") //设置20分钟
    //spark.network.timeout
    sparkConf.set("spark.network.timeout", "1200")
    //spark.core.connection.ack.wait.timeout
    sparkConf.set("spark.core.connection.ack.wait.timeout", "1200")
    sparkConf.set("spark.core.connection.auth.wait.timeout", "1200")
    //spark.yarn.max.executor.failures
    sparkConf.set("spark.yarn.max.executor.failures", "300")
    //spark.akka.timeout
    sparkConf.set("spark.akka.timeout", "1200")
    //spark.rpc.lookupTimeout
    sparkConf.set("spark.rpc.lookupTimeout", "1200")

    //confs.set("spark.driver.memory", "5g")
    sparkConf.set("spark.speculation", "true")
    sparkConf.set("spark.shuffle.consolidateFiles", "true")

    sparkConf.set("spark.rdd.compress", "true")
    sparkConf.set("spark.storage.memoryFraction", "1")
    sparkConf.set("spark.core.connection.ack.wait.timeout", "6000")
    sparkConf.set("spark.akka.frameSize", "50")


    sparkConf.set("dfs.client.slow.io.warning.threshold.ms", "100000")
    sparkConf.set("yarn.resourcemanager.connect.max-wait.ms", Integer.MAX_VALUE.toString())
    sparkConf.set("mapred.task.timeout", "1800000")
    sparkConf.set("dfs.socket.timeout", "6000000")

Guess you like

Origin http://10.200.1.11:23101/article/api/json?id=326612066&siteId=291194637