Perhaps this is the way to find the missing story hashes. Shouldn't cause an issue, but logic for dates may be backwards.

This commit is contained in:
Samuel Clay 2017-05-01 09:27:31 -07:00
parent ffeeb170e0
commit c9326a6f02
3 changed files with 23 additions and 4 deletions

View file

@ -149,6 +149,15 @@ class Feed(models.Model):
return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE)
@property
def story_hashes_in_unread_cutoff(self):
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
current_time = int(time.time() + 60*60*24)
unread_cutoff = self.unread_cutoff.strftime('%s')
story_hashes = r.zrevrange('zF:%s' % self.pk, current_time, unread_cutoff)
return story_hashes
@classmethod
def generate_hash_address_and_link(cls, feed_address, feed_link):
if not feed_address: feed_address = ""

View file

@ -169,8 +169,8 @@ class FeedTest(TestCase):
self.client.login(username='conesus', password='test')
old_story_guid = "http://www.blog.google:443/topics/inside-google/google-earths-incredible-3d-imagery-explained/"
management.call_command('loaddata', 'google1.json', verbosity=0)
management.call_command('loaddata', 'google1.json', verbosity=1)
print Feed.objects.all()
feed = Feed.objects.get(pk=766)
print " Testing test_load_feeds__google: %s" % feed
stories = MStory.objects(story_feed_id=feed.pk)
@ -191,7 +191,7 @@ class FeedTest(TestCase):
content = json.decode(response.content)
self.assertEquals(content['feeds']['766']['nt'], 6)
management.call_command('loaddata', 'google2.json', verbosity=0)
management.call_command('loaddata', 'google2.json', verbosity=1)
management.call_command('refresh_feed', force=False, feed=766, single_threaded=True, daemonize=False)
stories = MStory.objects(story_feed_id=feed.pk)

View file

@ -672,7 +672,17 @@ class ProcessFeed:
story['story_hash'] = MStory.feed_guid_hash_unsaved(self.feed.pk, story.get('guid'))
stories.append(story)
story_hashes.append(story.get('story_hash'))
original_story_hash_count = len(story_hashes)
story_hashes_in_unread_cutoff = self.feed.story_hashes_in_unread_cutoff
story_hashes.extend(story_hashes_in_unread_cutoff)
story_hashes = list(set(story_hashes))
if self.options['verbose'] or settings.DEBUG:
logging.debug(u' ---> [%-30s] ~FBFound ~SB%s~SN guids, adding ~SB%s~SN guids from db' % (
self.feed.log_title[:30],
original_story_hash_count, len(story_hashes)-original_story_hash_count))
existing_stories = dict((s.story_hash, s) for s in MStory.objects(
story_hash__in=story_hashes,
# story_date__gte=start_date,