This won't fix it but in the event of missing guids on feed fetch, check for stories within the mark read period.

This commit is contained in:
Samuel Clay 2017-04-30 22:32:46 -07:00
parent c9326a6f02
commit b753cf6819
3 changed files with 27 additions and 10 deletions

View file

@ -417,6 +417,7 @@ class UserSubscription(models.Model):
@classmethod
def feeds_with_updated_counts(cls, user, feed_ids=None, check_fetch_status=False, force=False):
feeds = {}
silent = not getattr(settings, "TEST_DEBUG", False)
# Get subscriptions for user
user_subs = cls.objects.select_related('feed').filter(user=user, active=True)
@ -430,7 +431,7 @@ class UserSubscription(models.Model):
sub.needs_unread_recalc or
sub.unread_count_updated < user.profile.unread_cutoff or
sub.oldest_unread_story_date < user.profile.unread_cutoff):
sub = sub.calculate_feed_scores(silent=True, force=force)
sub = sub.calculate_feed_scores(silent=silent, force=force)
if not sub: continue # TODO: Figure out the correct sub and give it a new feed_id
feed_id = sub.feed_id
@ -679,10 +680,11 @@ class UserSubscription(models.Model):
if len(story_hashes) > 1:
logging.user(request, "~FYRead %s stories in feed: %s" % (len(story_hashes), self.feed))
else:
logging.user(request, "~FYRead story in feed: %s" % (self.feed))
logging.user(request, "~FYRead story (%s) in feed: %s" % (story_hashes, self.feed))
RUserStory.aggregate_mark_read(self.feed_id)
for story_hash in set(story_hashes):
for story_hash in set(story_hashes):
# logging.user(request, "~FYRead story: %s" % (story_hash))
RUserStory.mark_read(self.user_id, self.feed_id, story_hash, aggregated=aggregated)
r.publish(self.user.username, 'story:read:%s' % story_hash)
@ -812,6 +814,7 @@ class UserSubscription(models.Model):
else:
feed_scores['neutral'] += 1
else:
print " ---> Cutoff date: %s" % date_delta
unread_story_hashes = self.story_hashes(user_id=self.user_id, feed_ids=[self.feed_id],
usersubs=[self],
read_filter='unread', group_by_feed=False,
@ -1113,7 +1116,7 @@ class RUserStory:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id)
if not story_hash: return
def redis_commands(key):

View file

@ -1,3 +1,4 @@
import redis
from utils import json_functions as json
from django.test.client import Client
from django.test import TestCase
@ -14,6 +15,9 @@ class FeedTest(TestCase):
def setUp(self):
disconnect()
settings.MONGODB = connect('test_newsblur')
settings.REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=settings.REDIS_STORY['host'], port=6379, db=10)
settings.REDIS_FEED_READ_POOL = redis.ConnectionPool(host=settings.SESSION_REDIS_HOST, port=6379, db=10)
self.client = Client()
def tearDown(self):
@ -166,8 +170,12 @@ class FeedTest(TestCase):
self.assertEquals(content['feeds'][str(feed['feed_id'])]['nt'], 9)
def test_load_feeds__google(self):
# Freezegun the date to 2017-04-30
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
r.delete('RS:1:766')
self.client.login(username='conesus', password='test')
old_story_guid = "http://www.blog.google:443/topics/inside-google/google-earths-incredible-3d-imagery-explained/"
old_story_guid = "blog.google:443/topics/inside-google/google-earths-incredible-3d-imagery-explained/"
management.call_command('loaddata', 'google1.json', verbosity=1)
print Feed.objects.all()
@ -181,15 +189,16 @@ class FeedTest(TestCase):
stories = MStory.objects(story_feed_id=feed.pk)
self.assertEquals(stories.count(), 20)
response = self.client.get(reverse('load-feeds'))
response = self.client.get(reverse('load-feeds')+"?update_counts=true")
content = json.decode(response.content)
self.assertEquals(content['feeds']['766']['nt'], 7)
self.assertEquals(content['feeds']['766']['nt'], 10)
self.client.post(reverse('mark-story-as-read'), {'story_id': old_story_guid, 'feed_id': 766})
old_story = MStory.objects.get(story_feed_id=feed.pk, story_guid__contains=old_story_guid)
self.client.post(reverse('mark-story-hashes-as-read'), {'story_hash': old_story.story_hash})
response = self.client.get(reverse('refresh-feeds'))
content = json.decode(response.content)
self.assertEquals(content['feeds']['766']['nt'], 6)
self.assertEquals(content['feeds']['766']['nt'], 9)
management.call_command('loaddata', 'google2.json', verbosity=1)
management.call_command('refresh_feed', force=False, feed=766, single_threaded=True, daemonize=False)
@ -208,7 +217,7 @@ class FeedTest(TestCase):
response = self.client.get(reverse('refresh-feeds'))
content = json.decode(response.content)
self.assertEquals(content['feeds']['766']['nt'], 6)
self.assertEquals(content['feeds']['766']['nt'], 9)
def test_load_feeds__brokelyn__invalid_xml(self):
self.client.login(username='conesus', password='test')

View file

@ -688,6 +688,11 @@ class ProcessFeed:
# story_date__gte=start_date,
# story_feed_id=self.feed.pk
))
if len(existing_stories) == 0:
existing_stories = dict((s.story_hash, s) for s in MStory.objects(
story_date__gte=start_date,
story_feed_id=self.feed.pk
))
ret_values = self.feed.add_update_stories(stories, existing_stories,
verbose=self.options['verbose'],