Better error handling around broken feed pages.

This commit is contained in:
Samuel Clay 2011-09-04 14:47:47 -07:00
parent acfd9e7614
commit d2bcce4b6f
4 changed files with 12 additions and 3 deletions

View file

@ -261,7 +261,7 @@ class GoogleReaderImporter(Importer):
try:
stories = json.decode(stories_str)
except:
logging.user(self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FWNo stories" % (len(stories['items'])))
logging.user(self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FWNo stories")
stories = None
if stories:
logging.user(self.user, "~BB~FW~SBGoogle Reader starred stories: ~BT~FW%s stories" % (len(stories['items'])))

View file

@ -245,6 +245,7 @@ def refresh_feeds(request):
feed_ids = request.REQUEST.getlist('feed_id')
feeds = {}
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
feed_ids = [f for f in feed_ids if f and not f.startswith('river')]
if feed_ids:
user_subs = user_subs.filter(feed__in=feed_ids)
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)

View file

@ -13,6 +13,13 @@ HEADERS = {
'Connection': 'close',
}
BROKEN_PAGES = [
'tag:',
'info:',
'uuid:',
'[]',
]
class PageImporter(object):
def __init__(self, url, feed):
@ -31,7 +38,7 @@ class PageImporter(object):
response = urllib2.urlopen(request)
time.sleep(0.01) # Grrr, GIL.
data = response.read()
elif any(self.url.startswith(s) for s in ['tag:', 'info:']):
elif any(self.url.startswith(s) for s in BROKEN_PAGES):
self.save_no_page()
return
else:

3
fabfile.py vendored
View file

@ -110,7 +110,8 @@ def staging_full():
@roles('task')
def celery():
run('git pull')
with cd(env.NEWSBLUR_PATH):
run('git pull')
celery_stop()
celery_start()