Run jar

rz
rm -rf incoHive.jar
>>spark-submit --queue=mr --class com.inco.hive.CNlawTOxml.Lge_SparkRead --master yarn --num-executors 10 --executor-memory 10g --total-executor-cores 100 --jars /opt/exlib/source-1.0.jar /home/liyingying/incoHive.jar

>>nohup spark-submit --queue=mr --class com.inco.hive.CNlawTOxml.Lge_SparkRead --master yarn --num-executors 10 --executor-memory 10g --total-executor-cores 100 --jars /opt/exlib/source-1.0.jar /home/liyingying/incoHive.jar &

--2.11
>>nohup spark-submit --queue=mr --class com.inco.hive.CNlawTOxml.Lge_ReadNew --master yarn --num-executors 10 --executor-memory 10g --total-executor-cores 100 --jars /opt/exlib/source-1.0.jar /home/liyingying/incoHive.jar dim.pn_an_lawmid 中文XML生成 &

//top
>>
nohup spark-submit --queue=mr --class com.inco.hive.CNlawTOxml.Lge_ReadNew --master yarn --num-executors 20 --conf spark.driver.maxResultSize=1024m --conf spark.yarn.executor.memoryOverhead=10g --executor-memory 10g --driver-memory 15g --jars /opt/exlib/source-1.0.jar /home/liyingying/incoHive.jar dim.pn_an_lawflag_id30_45 中文XML生成 &


nohup spark-submit --queue=mr --class com.inco.hive.CNlawTOxml.Lge_oritype --master yarn --num-executors 10 --conf spark.driver.maxResultSize=1024m --conf spark.yarn.executor.memoryOverhead=10g --executor-memory 10g --driver-memory 15g --jars /opt/exlib/source-1.0.jar /home/liyingying/jarr/incoHive.jar dim.pn_an_law 中文XML生成 /home/liyingying/xml/Lawxml0906 &
--pn_an_lawmid

nohup spark-submit --queue=mr --class com.inco.hive.abxml.MySpark_Read --master yarn --num-executors 10 --executor-memory 10g --total-executor-cores 100 --jars /opt/exlib/source-1.0.jar /home/liyingying/incoHive.jar ods.abxml_lyytest &

// empty
echo ""> nohup.out
// execute
>> nohup
>> tail -1000f nohup.out

// into a directory
>> cd / Home / liyingying / xml / the Test
// delete the files in a directory
>> rm -rf / home / liyingying / xml / test0903 / *

background processes: ps -ef | grep Lge
kill: kill -9 43884

Guess you like

Origin www.cnblogs.com/simly/p/11527238.html