Python 高级编程和异步IO并发编程 --13_9 asyncio高并发爬虫

# asyncio爬虫、去重、入库

import asyncio
import re

import aiohttp
import aiomysql
from pyquery import PyQuery
from aiohttp import TCPConnector


start_url = 'http://www.xbiquge.la/paihangbang/'
waitting_urls = []
seen_urls = set()
stopping = False

sem = asyncio.Semaphore(3)


async def fetch(url, session):
    async with sem:
        try:
            async with session.get(url) as resp:
                print('url status: {}'.format(resp.status))
                if resp.status in [200, 201]:
                    data = await resp.text()
                    return data
        except Exception as e:
            print(e)


def extract_urls(html):
    pq = PyQuery(html)
    for link in pq.items('a'):
        url = link.attr('href')
        if url and url.startswith('http') and url not in seen_urls:
            global waitting_urls
            waitting_urls.append(url)


async def init_urls(url, session):
    html = await fetch(url, session)
    seen_urls.add(url)
    extract_urls(html)


async def article_handle(url, session, pool):
    # 获取文章详情并解析入库
    html = await fetch(url, session)
    seen_urls.add(url)
    extract_urls(html)
    pq = PyQuery(html)
    title = pq("title").text()
    # title = title + '\n'
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            insert_sql = "INSERT INTO article_test VALUES('{}')".format(title)
            print(insert_sql)
            await cur.execute(insert_sql)
    # 文件操作
    # with open('aiohttp_spider.txt', mode='a', encoding='utf-8') as file_object:
    #     file_object.write(title)


async def consumer(pool, session):
    while not stopping:
        if len(waitting_urls) == 0:
                await asyncio.sleep(0.5)
                continue

        url = waitting_urls.pop()
        print('start get url: {}'.format(url))
        if re.match('http://www.xbiquge.la/\d+/\d+/', url):
            if url not in seen_urls:
                asyncio.ensure_future(article_handle(url, session, pool))
        else:
            if url not in seen_urls:
                asyncio.ensure_future(init_urls(url, session))


async def main(loop):
    # 等待mysql连接建立好
    pool = await aiomysql.create_pool(host='127.0.0.1', port=3306,
                                      user='root', password='123456',
                                      db='aiomysql_test', loop=loop,
                                      charset='utf8', autocommit=True)

    session = aiohttp.ClientSession()
    html = await fetch(start_url, session)
    seen_urls.add(start_url)
    extract_urls(html)

    asyncio.ensure_future(consumer(pool, session))


if __name__ == '__main__':
    loop = asyncio.get_event_loop()
    asyncio.ensure_future(main(loop))
    loop.run_forever()
发布了380 篇原创文章 · 获赞 129 · 访问量 11万+

猜你喜欢

转载自blog.csdn.net/f2157120/article/details/105402004