# encoding:utf-8 __author__ = 'jwang' import os cmd_list = [] spark_submit = '/usr/local/datacenter/spark/bin/spark-submit' cmd_list.append(spark_submit) cmd_list.append('--class') cmd_list.append('com.xx.xx.RBK') cmd_list.append('--master') cmd_list.append('yarn') cmd_list.append('--name') cmd_list.append('XXRBK') cmd_list.append('--executor-memory') cmd_list.append('3g') cmd_list.append('--driver-memory') cmd_list.append('3g') cmd_list.append('--executor-cores') cmd_list.append('16') cmd_list.append('xxxx.jar') params = [] params.append('"xx"') params.append('"rule"') cmd_list.extend(params) exec_cmd = ' '.join(cmd_list) #'' print 'start to execute ::: ' + exec_cmd os.system(exec_cmd) os._exit(0)
方便调试spark参数的python脚本
猜你喜欢
转载自aijerry.iteye.com/blog/2263783
今日推荐
周排行