mirror of
https://github.com/viq/NewsBlur.git
synced 2025-09-18 21:43:31 +00:00
Counting read stories on a per-feed per-feed basis. This will allow efficient usage checks down the line.
This commit is contained in:
parent
2cef63d90f
commit
d8abe375a2
3 changed files with 28 additions and 19 deletions
|
@ -646,6 +646,7 @@ class UserSubscription(models.Model):
|
|||
logging.user(request, "~FYRead %s stories in feed: %s" % (len(story_hashes), self.feed))
|
||||
else:
|
||||
logging.user(request, "~FYRead story in feed: %s" % (self.feed))
|
||||
RUserStory.aggregate_mark_read(self.feed_id)
|
||||
|
||||
for story_hash in set(story_hashes):
|
||||
RUserStory.mark_read(self.user_id, self.feed_id, story_hash, aggregated=aggregated)
|
||||
|
@ -1002,10 +1003,15 @@ class RUserStory:
|
|||
if not isinstance(story_hashes, list):
|
||||
story_hashes = [story_hashes]
|
||||
|
||||
single_story = len(story_hashes) == 1
|
||||
|
||||
for story_hash in story_hashes:
|
||||
feed_id, _ = MStory.split_story_hash(story_hash)
|
||||
feed_ids.add(feed_id)
|
||||
|
||||
|
||||
if single_story:
|
||||
cls.aggregate_mark_read(feed_id)
|
||||
|
||||
# Find other social feeds with this story to update their counts
|
||||
friend_key = "F:%s:F" % (user_id)
|
||||
share_key = "S:%s" % (story_hash)
|
||||
|
@ -1038,6 +1044,19 @@ class RUserStory:
|
|||
cls.mark_unread(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=r)
|
||||
|
||||
return feed_id, list(friend_ids)
|
||||
|
||||
@classmethod
|
||||
def aggregate_mark_read(cls, feed_id):
|
||||
if not feed_id:
|
||||
logging.debug(" ***> ~BR~FWNo feed_id on aggregate mark read. Ignoring.")
|
||||
return
|
||||
|
||||
r = redis.Redis(connection_pool=settings.REDIS_FEED_READ_POOL)
|
||||
week_of_year = datetime.datetime.now().strftime('%Y-%U')
|
||||
feed_read_key = "fR:%s:%s" % (feed_id, week_of_year)
|
||||
|
||||
r.incr(feed_read_key)
|
||||
r.expire(2*settings.DAYS_OF_STORY_HASHES*24*60*60)
|
||||
|
||||
@classmethod
|
||||
def mark_read(cls, user_id, story_feed_id, story_hash, social_user_ids=None,
|
||||
|
|
|
@ -1135,13 +1135,17 @@ class MSocialSubscription(mongo.Document):
|
|||
logging.user(request, "~FYRead %s stories in social subscription: %s" % (len(story_hashes), sub_username))
|
||||
else:
|
||||
logging.user(request, "~FYRead story in social subscription: %s" % (sub_username))
|
||||
|
||||
|
||||
for story_hash in set(story_hashes):
|
||||
if feed_id is not None:
|
||||
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=feed_id)
|
||||
if feed_id is None:
|
||||
feed_id, _ = MStory.split_story_hash(story_hash)
|
||||
|
||||
|
||||
if len(story_hashes) == 1:
|
||||
RUserStory.aggregate_mark_read(feed_id)
|
||||
|
||||
# Find other social feeds with this story to update their counts
|
||||
friend_key = "F:%s:F" % (self.user_id)
|
||||
share_key = "S:%s" % (story_hash)
|
||||
|
|
20
settings.py
20
settings.py
|
@ -34,10 +34,6 @@ import django.http
|
|||
import re
|
||||
from mongoengine import connect
|
||||
from boto.s3.connection import S3Connection
|
||||
try:
|
||||
from raven.contrib.celery import register_signal, register_logger_signal
|
||||
except ImportError, e:
|
||||
print " ---> Raven not installed, can't log errors to Sentry."
|
||||
from utils import jammit
|
||||
|
||||
# ===================
|
||||
|
@ -587,17 +583,6 @@ if not DEVELOPMENT:
|
|||
|
||||
)
|
||||
RAVEN_CLIENT = raven.Client(SENTRY_DSN)
|
||||
|
||||
# register a custom filter to filter out duplicate logs
|
||||
register_logger_signal(RAVEN_CLIENT)
|
||||
|
||||
# hook into the Celery error handler
|
||||
register_signal(RAVEN_CLIENT)
|
||||
|
||||
# The register_logger_signal function can also take an optional argument
|
||||
# `loglevel` which is the level used for the handler created.
|
||||
# Defaults to `logging.ERROR`
|
||||
register_logger_signal(RAVEN_CLIENT, loglevel=logging.ERROR)
|
||||
RAVEN_CONFIG = {
|
||||
'dsn': SENTRY_DSN,
|
||||
# If you are using git, you can also automatically configure the
|
||||
|
@ -692,12 +677,13 @@ REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379,
|
|||
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=2)
|
||||
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=3)
|
||||
REDIS_FEED_UPDATE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
|
||||
# REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS['host'], port=6379, db=8)
|
||||
# REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS['host'], port=6379, db=8) # Only used when changing DAYS_OF_UNREAD
|
||||
REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=10)
|
||||
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
|
||||
REDIS_SESSION_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=5)
|
||||
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=1)
|
||||
REDIS_FEED_READ_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=1)
|
||||
REDIS_FEED_SUB_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=2)
|
||||
REDIS_SESSION_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=5)
|
||||
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=6379, db=0)
|
||||
|
||||
# ==========
|
||||
|
|
Loading…
Add table
Reference in a new issue