Related to the key:
1. article_time , record Published articles, zset structure
2. article_score , recorded scores of articles, zset structure
Published voting score = the number of users X 432 +
3. voted_article_id , user voting record collection of articles, the publisher of the article article vote for the default user, set the structure
4. article_article_id , used herein to describe articles, hash structure
5. The group_groupname , the article group groupname set, set the structure
6. score_groupname , under article group groupname score collection, zset structure
# Python3 # - * - Coding: UTF-8 - * - Import Redis Import Time ONE_WEEK_IN_SECONDS = 7 * 86400 # If an article like this to get 200, then this article is interesting VOTE_SCORE = 86400/200 ARTICLES_PER_PAGE = 25 DEF redis_init (Redis): # article_time record article published redis.zadd ( ' article_time ' , article_100408 = 1,496,762,197, article_100635 = 1496769721, article_100716 = 1496760089 ) # article_score recording paper score redis.zadd ( ' article_score ' , Article_100408 = 1496766517, article_100635 = 1,496,770,153, 1,496,765,705 = article_100716 ) # voted_article_id record number to the user thumbs article_id articles set redis.sadd ( ' voted_100408 ' , ' user_234487 ' , ' user_253378 ' , ' user_364680 ' , ' user_132097 ' , ' user_350917 ' ) # with each article described hash article_desc = { ' title ' : ' Kunlun ' , 'link':'www.kunlun.com', 'poster':'user_234487', 'time':1441728000, 'votes':523} redis.hmset('article_100408', article_desc) article_desc = {'title': 'zhuxian', 'link': 'www.zhuxian.com', 'poster': 'user_234488', 'time': 1081440000, 'votes': 677} redis.hmset('article_100635', article_desc) article_desc = {'title': 'soushenji', 'link': 'www.soushenji.com', 'poster': 'user_234489', 'time' : 1.18728 billion, ' votes ' : 421 } redis.hmset ( ' article_100635 ' , article_desc) # records Total posts redis.set ( ' article_index ' , 200000 ) # users to vote article DEF article_vote (conn, the User, Article This article was): from the cutoff the time.time = () - ONE_WEEK_IN_SECONDS IF conn.zscore ( ' article_time ' , Article This article was) < the cutoff: return the article_id = article.partition ( ' _ ')[-1] if conn.sadd('voted_' + article_id, user): conn.zincrby('article_score', article, VOTE_SCORE) conn.hincrby(article, 'votes', 1) # 发布新文章 def post_article(conn, user, title, link): article_id = str(conn.incr('article_index')) voted = 'voted_' + article_id # poster默认为文章的投票用户 conn.sadd(voted, user) # 设置key过期时间 conn.expire(voted, ONE_WEEK_IN_SECONDS) now = time.time() article = 'article_' + article_id conn.hmset(article, { 'title': title, 'link': link, 'poster': user, 'time': now, 'votes': 1, }) conn.zadd('article_score', article, now + VOTE_SCORE) conn.zadd ( conn.hgetall (ID)' Article_time ' , Article This article was, now) return the article_id # remove the top-rated article # article take the latest release, the Order = 'article_time' # Page, on page take DEF get_articles (conn, Page, the Order = ' article_score ' ): Start = (. 1-Page) * ARTICLES_PER_PAGE End = Start ARTICLES_PER_PAGE + -. 1 IDS = conn.zrevrange (Order, Start, End) Articles = [] for ID in IDS: article_data = article_data [ ' ID' ] = The above mentioned id articles.append (article_data) return Articles # add articles to the group, or delete articles from the group in DEF add_remove_groups (conn, the article_id, to_add = [], to_remove = []): Article This article was = ' article_ ' + the article_id for group in to_add: conn.sadd ( ' GROUP_ ' + group, article This article was) for group in to_remove: conn.srem ( ' GROUP_ ' + group, article This article was) # get the group of articles def get_group_articles(conn, group, page, order='article_score'): key = order + group if not conn.exists(key): conn.zinterstore(key, ['group_' + group, order], aggregate='max' ) # 缓存60s conn.expire(key, 60) return get_articles(conn, page, key) r = redis.Redis(host='redis_serverip', port=6379, password='redis_passwd', db=0) # redis_init(r) # article_vote(r,'use_115423', 'article_100408') # # new_article_id = post_article(r, 'user_5424', 'yingxiongzhi', 'www.yingxiongzhi.com') # print('new_article_id:', new_article_id) # # add_remove_groups(r, 'article_100408') # # get_group_articles(r, 'programming', 1)
PS:
redis-py module has two classes: Redis and StrictRedis, both the API part is slightly different, as used herein Redis this class.
Against implementation:
def article_against(conn, user, article): cutoff = time.time() - ONE_WEEK_IN_SECONDS if conn.zscore('article_time', article) < cutoff: return article_id = article.partition('_')[-1] if conn.sadd('against_' + article_id, user): conn.incrby('article_score', article, -VOTE_SCORE) conn.hincrby(article, 'votes', -1)
Remove the competitive conditions of the vote:
def article_against(conn, user, article): cutoff = time.time() - ONE_WEEK_IN_SECONDS if conn.zscore('article_time', article) < cutoff: return article_id = article.partition('_')[-1] if conn.sadd('against_' + article_id, user): pipeline = conn.pipeline() conn.incrby('article_score', article, -VOTE_SCORE) conn.hincrby(article, 'votes', -1) pipeline.execute()
References:
"Redis real"
Reproduced in: https: //www.cnblogs.com/gattaca/p/6958789.html