mongodb在插入数据环节避免数据重复的方法(爬虫中的使用update)

mongo 去重

import pymongo

client = pymongo.MongoClient()
collection=client.t.test

# collection.insert({'title':'python','name':'deng','age':23})

data={'title':'go','name':'wang','age':45,'url':1}
collection.update({'url':1},{'$set':data},True)

# 上面的案例,表示如何url重复的话,url不更新,其他字典如果数据不一致就会更新。

爬虫案例:

collection.update({'url':data['url'],'cover_url':data['cover_url']},{'$set':data},True)
# coding=utf8
"""
author:dengjiyun
"""
import pymongo

client=pymongo.MongoClient()
collection = client.dou.douban

import requests
url='https://movie.douban.com/j/chart/top_list'

params={
    'type':'11',
    'interval_id':'100:90',
    'action':'',
    'start':'60',
    'limit':'20'
}
headers={
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36'
}

res=requests.get(url,params=params,headers=headers).json()

for item in res:
    data={}
    # print(item['cover_url'])
    data['vote_count']=item['vote_count']  # 评论
    data['score']=item['score']       # 得分
    data['title']=item['title']       # 电影名
    data['url']=item['url']         # 详情页url
    data['cover_url']=item['cover_url']   # 封面图片
    data['rank'] =item['rank']       # 排名
    data['id'] =item['id']         # 电影id
    data['release_date']=item['release_date'] # 发布日期

    print(item)
    # 不插入重复数据  collection.update()
    collection.update({'url':data['url'],'cover_url':data['cover_url']},{'$set':data},True)
client.close()

猜你喜欢

转载自www.cnblogs.com/knighterrant/p/10920308.html