Finally have a test case for the Google Blog duping.

This commit is contained in:
Samuel Clay 2017-04-30 18:47:10 -07:00
parent ef9312d02f
commit ffeeb170e0
9 changed files with 6364 additions and 30 deletions

View file

@ -0,0 +1,24 @@
[
{
"pk": 766,
"model": "rss_feeds.feed",
"fields": {
"feed_address": "%(NEWSBLUR_DIR)s/apps/rss_feeds/fixtures/google1.xml",
"days_to_trim": 90,
"feed_link": "%(NEWSBLUR_DIR)s/apps/rss_feeds/fixtures/google.html",
"feed_link_locked": true,
"fetched_once": true,
"num_subscribers": 1,
"active_subscribers": 1,
"creation": "2009-01-12",
"feed_title": "The Official Google Blog",
"last_update": "2009-07-06 22:30:03",
"next_scheduled_update": "2009-07-06 22:30:03",
"last_story_date": "2009-07-06 22:30:03",
"min_to_decay": 1,
"etag": "",
"last_modified": "2009-07-06 22:30:03",
"active": 1
}
}
]

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,24 @@
[
{
"pk": 766,
"model": "rss_feeds.feed",
"fields": {
"feed_address": "%(NEWSBLUR_DIR)s/apps/rss_feeds/fixtures/google2.xml",
"days_to_trim": 90,
"feed_link": "%(NEWSBLUR_DIR)s/apps/rss_feeds/fixtures/google.html",
"feed_link_locked": true,
"fetched_once": true,
"num_subscribers": 1,
"active_subscribers": 1,
"creation": "2009-01-12",
"feed_title": "The Official Google Blog",
"last_update": "2009-07-06 22:30:03",
"next_scheduled_update": "2009-07-06 22:30:03",
"last_story_date": "2009-07-06 22:30:03",
"min_to_decay": 1,
"etag": "",
"last_modified": "2009-07-06 22:30:03",
"active": 1
}
}
]

File diff suppressed because it is too large Load diff

View file

@ -1,23 +1,9 @@
[
{
"pk": 1,
"model": "reader.usersubscription",
"fields": {
"feed": 1,
"unread_count_updated": "2009-08-01 00:23:42",
"mark_read_date": "2009-07-28 23:17:27",
"unread_count_neutral": 0,
"unread_count_positive": 0,
"unread_count_negative": 0,
"user": 1,
"last_read_date": "2009-07-28 23:17:27"
}
},
{
"pk": 1,
"model": "reader.usersubscriptionfolders",
"fields": {
"folders": "[{\"Tech\": [4, 5]}, 1, 2, 3, 6]",
"folders": "[{\"Tech\": [4, 5]}, 2, 3, 6, 766]",
"user": 1
}
},
@ -108,6 +94,21 @@
"last_read_date": "2009-07-28 23:17:27"
}
},
{
"pk": 8,
"model": "reader.usersubscription",
"fields": {
"feed": 766,
"active": true,
"unread_count_updated": "2009-08-01 00:23:42",
"mark_read_date": "2009-07-01 22:30:03",
"unread_count_neutral": 0,
"unread_count_positive": 0,
"unread_count_negative": 0,
"user": 1,
"last_read_date": "2009-07-28 23:17:27"
}
},
{
"pk": 2,
@ -401,6 +402,38 @@
"has_feed_exception": false
}
},
{
"pk": 766,
"model": "rss_feeds.feed",
"fields": {
"premium_subscribers": -1,
"creation": "2011-08-27",
"exception_code": 0,
"last_load_time": 0,
"active_subscribers": 1,
"feed_address": "%(NEWSBLUR_DIR)s/apps/rss_feeds/fixtures/google1.xml",
"feed_link": "%(NEWSBLUR_DIR)s/apps/rss_feeds/fixtures/google.html",
"hash_address_and_link": "766",
"feed_link_locked": true,
"last_update": "2011-08-27 02:45:21",
"etag": null,
"average_stories_per_month": 0,
"feed_title": "Google Blog",
"last_modified": null,
"next_scheduled_update": "2011-08-28 14:33:50",
"favicon_color": null,
"stories_last_month": 0,
"active": true,
"favicon_not_found": false,
"has_page_exception": false,
"fetched_once": false,
"days_to_trim": 90,
"num_subscribers": 1,
"last_story_date": "2011-08-28 00:03:50",
"min_to_decay": 720,
"has_feed_exception": false
}
},
{
"pk": 56,

View file

@ -1073,15 +1073,6 @@ class Feed(models.Model):
from utils import feed_fetcher
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
original_feed_id = int(self.pk)
if getattr(settings, 'TEST_DEBUG', False):
original_feed_address = self.feed_address
original_feed_link = self.feed_link
self.feed_address = self.feed_address.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR)
if self.feed_link:
self.feed_link = self.feed_link.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR)
if self.feed_address != original_feed_address or self.feed_link != original_feed_link:
self.save(update_fields=['feed_address', 'feed_link'])
options = {
'verbose': kwargs.get('verbose'),
@ -1099,6 +1090,19 @@ class Feed(models.Model):
'feed_xml': kwargs.get('feed_xml'),
'requesting_user_id': kwargs.get('requesting_user_id', None)
}
if getattr(settings, 'TEST_DEBUG', False):
print " ---> Testing feed fetch: %s" % self.log_title
options['force'] = False
options['force_fp'] = True
original_feed_address = self.feed_address
original_feed_link = self.feed_link
self.feed_address = self.feed_address.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR)
if self.feed_link:
self.feed_link = self.feed_link.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR)
if self.feed_address != original_feed_address or self.feed_link != original_feed_link:
self.save(update_fields=['feed_address', 'feed_link'])
if self.is_newsletter:
feed = self.update_newsletter_icon()
else:

View file

@ -165,6 +165,51 @@ class FeedTest(TestCase):
content = json.decode(response.content)
self.assertEquals(content['feeds'][str(feed['feed_id'])]['nt'], 9)
def test_load_feeds__google(self):
self.client.login(username='conesus', password='test')
old_story_guid = "http://www.blog.google:443/topics/inside-google/google-earths-incredible-3d-imagery-explained/"
management.call_command('loaddata', 'google1.json', verbosity=0)
feed = Feed.objects.get(pk=766)
print " Testing test_load_feeds__google: %s" % feed
stories = MStory.objects(story_feed_id=feed.pk)
self.assertEquals(stories.count(), 0)
management.call_command('refresh_feed', force=False, feed=766, single_threaded=True, daemonize=False)
stories = MStory.objects(story_feed_id=feed.pk)
self.assertEquals(stories.count(), 20)
response = self.client.get(reverse('load-feeds'))
content = json.decode(response.content)
self.assertEquals(content['feeds']['766']['nt'], 7)
self.client.post(reverse('mark-story-as-read'), {'story_id': old_story_guid, 'feed_id': 766})
response = self.client.get(reverse('refresh-feeds'))
content = json.decode(response.content)
self.assertEquals(content['feeds']['766']['nt'], 6)
management.call_command('loaddata', 'google2.json', verbosity=0)
management.call_command('refresh_feed', force=False, feed=766, single_threaded=True, daemonize=False)
stories = MStory.objects(story_feed_id=feed.pk)
self.assertEquals(stories.count(), 20)
url = reverse('load-single-feed', kwargs=dict(feed_id=766))
response = self.client.get(url)
# pprint([c['story_title'] for c in json.decode(response.content)])
feed = json.decode(response.content)
# Test: 1 changed char in title
self.assertEquals(len(feed['stories']), 6)
response = self.client.get(reverse('refresh-feeds'))
content = json.decode(response.content)
self.assertEquals(content['feeds']['766']['nt'], 6)
def test_load_feeds__brokelyn__invalid_xml(self):
self.client.login(username='conesus', password='test')

View file

@ -732,8 +732,9 @@ class ProcessFeed:
self.feed.update_all_statistics(has_new_stories=bool(ret_values['new']), force=self.options['force'])
fetch_date = datetime.datetime.now()
if ret_values['new']:
self.feed.trim_feed()
self.feed.expire_redis()
if not getattr(settings, 'TEST_DEBUG', False):
self.feed.trim_feed()
self.feed.expire_redis()
if MStatistics.get('raw_feed', None) == self.feed.pk:
self.feed.save_raw_feed(self.raw_feed, fetch_date)
self.feed.save_feed_history(200, "OK", date=fetch_date)

View file

@ -1,5 +1,4 @@
from django.test.simple import DjangoTestSuiteRunner
from django.test import TransactionTestCase
from mongoengine import connect
@ -11,13 +10,13 @@ class TestRunner(DjangoTestSuiteRunner):
return super(TestRunner, self).setup_databases(**kwargs)
def teardown_databases(self, db_name, **kwargs):
def teardown_databases(self, old_config, **kwargs):
import pymongo
conn = pymongo.MongoClient()
db_name = 'newsblur_test'
conn.drop_database(db_name)
print 'Dropping test-database: ' + db_name
return super(TestRunner, self).teardown_databases(db_name, **kwargs)
print ('Dropping test-database: %s' % db_name)
return super(TestRunner, self).teardown_databases(old_config, **kwargs)
# class TestCase(TransactionTestCase):