Merging SDIFFSTORE and ZINTERSTORE into a single ZDIFFSTORE, thanks to redis 6.2.0. Requires new docker image.

This commit is contained in:
Samuel Clay 2022-07-18 22:34:07 -04:00
parent 381953ee78
commit 9e21d03255
4 changed files with 26 additions and 21 deletions

View file

@ -173,10 +173,10 @@ class UserSubscription(models.Model):
max_score = current_time
if read_filter == 'unread':
min_score = read_dates[feed_id]
if needs_unread_recalc[feed_id]:
pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key)
# pipeline.expire(unread_stories_key, unread_cutoff_diff.days*24*60*60)
pipeline.expire(unread_stories_key, 1*60*60) # 1 hour
# if needs_unread_recalc[feed_id]:
# pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key)
# # pipeline.expire(unread_stories_key, unread_cutoff_diff.days*24*60*60)
# pipeline.expire(unread_stories_key, 1*60*60) # 1 hour
else:
min_score = 0
@ -189,7 +189,7 @@ class UserSubscription(models.Model):
ranked_stories_key = unread_ranked_stories_key
if read_filter == 'unread':
if needs_unread_recalc[feed_id]:
pipeline.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key], aggregate="MAX")
pipeline.zdiffstore(unread_ranked_stories_key, [sorted_stories_key, read_stories_key])
# pipeline.expire(unread_ranked_stories_key, unread_cutoff_diff.days*24*60*60)
pipeline.expire(unread_ranked_stories_key, 1*60*60) # 1 hours
if order == 'oldest':
@ -234,7 +234,21 @@ class UserSubscription(models.Model):
story_hashes.extend(hashes)
if store_stories_key:
r.zunionstore(store_stories_key, unread_ranked_stories_keys, aggregate="MAX")
chunk_count = 0
chunk_size = 1000
if len(unread_ranked_stories_keys) < chunk_size:
r.zunionstore(store_stories_key, unread_ranked_stories_keys)
else:
pipeline = r.pipeline()
for unread_ranked_stories_keys_group in chunks(unread_ranked_stories_keys, chunk_size):
pipeline.zunionstore(f"{store_stories_key}-chunk{chunk_count}", unread_ranked_stories_keys_group, aggregate="MAX")
chunk_count += 1
pipeline.execute()
r.zunionstore(store_stories_key, [f"{store_stories_key}-chunk{i}" for i in range(chunk_count)], aggregate="MAX")
pipeline = r.pipeline()
for i in range(chunk_count):
pipeline.delete(f"{store_stories_key}-chunk{i}")
pipeline.execute()
if not store_stories_key:
return story_hashes

View file

@ -28,7 +28,7 @@ django-paypal==1.1.2
django-qurl==0.1.1
django-pipeline>=2,<3
django-prometheus>=2,<3
django-redis-cache==3.0.0
django-redis==5.2.0
django-redis-sessions==0.6.1
django-ses==1.0.3
django-timezone-field==4.1.1
@ -101,7 +101,7 @@ PyYAML==5.3.1
pyzmq==22.0.3
raven==6.10.0
# readability-lxml==0.8.1.1 # Was vendorized due to noscript # Vendorized again due to 0.8.1.1 not out yet
redis==3.5.3
redis>=4,<5
requests==2.25.0
requests-oauthlib==1.3.0
scipy==1.5.4

View file

@ -58,12 +58,8 @@ PRO_MINUTES_BETWEEN_FETCHES = 15
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': 'db_redis:6579',
'OPTIONS': {
'DB': 6,
'PARSER_CLASS': 'redis.connection.HiredisParser'
},
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://db_redis:6579/6',
},
}

View file

@ -786,13 +786,8 @@ SESSION_REDIS = {
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '%s:%s' % (REDIS_USER['host'], REDIS_PORT),
'OPTIONS': {
'DB': 6,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'SERIALIZER_CLASS': 'redis_cache.serializers.PickleSerializer'
},
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://%s:%s/6' % (REDIS_USER['host'], REDIS_PORT),
},
}