scrapy顺序执行多个爬虫

# -*- coding:utf-8 -*-

from scrapy import cmdline
from scrapy.cmdline import execute
import sys,time,os

#会全部执行爬虫程序
os.system('scrapy crawl ccdi')
os.system('scrapy crawl ccxi')
#-----------------------------------------------------

#只会执行第一个
cmdline.execute('scrapy crawl ccdi'.split())
cmdline.execute('scrapy crawl ccxi'.split())
#-----------------------------------------------------

#只会执行第一个
sys.path.append(os.path.dirname(os.path.abspath(__file__)))  
execute(["scrapy", "crawl", "shanghaione"])  
time.sleep(30)  

sys.path.append(os.path.dirname(os.path.abspath(__file__)))  
execute(["scrapy", "crawl", "shanghaitwo"]) 

猜你喜欢

转载自blog.csdn.net/xc_zhou/article/details/80871382