Fixing a few bugs introduced with feed_id conversions.

This commit is contained in:
Samuel Clay 2012-01-27 15:29:14 -08:00
parent ba48bfb6f5
commit 53efae20ff
4 changed files with 62 additions and 9 deletions

View file

@ -191,7 +191,7 @@ class UserSubscription(models.Model):
original_m.story_date = story.story_date
original_m.save()
except OperationError, e:
logging.user(request, "~BRCan't even save: %s" % (original_m.story_id))
logging.user(request, "~BR~SKCan't even save: %s" % (original_m.story_id))
pass
return data
@ -383,6 +383,55 @@ class MUserStory(mongo.Document):
'allow_inheritance': False,
}
@classmethod
def mark_story_ids_as_read(cls, story_ids, user, request=None):
data = dict(code=0, payload=story_ids)
if not request:
request = user
if not self.needs_unread_recalc:
self.needs_unread_recalc = True
self.save()
if len(story_ids) > 1:
logging.user(request, "~FYRead %s stories in feed: %s" % (len(story_ids), self.feed))
else:
logging.user(request, "~FYRead story in feed: %s" % (self.feed))
for story_id in set(story_ids):
try:
story = MStory.objects.get(story_feed_id=self.feed_id, story_guid=story_id)
except MStory.DoesNotExist:
# Story has been deleted, probably by feed_fetcher.
continue
except MStory.MultipleObjectsReturned:
continue
now = datetime.datetime.utcnow()
date = now if now > story.story_date else story.story_date # For handling future stories
m = MUserStory(story=story, user_id=self.user_id,
feed_id=self.feed_id, read_date=date,
story_id=story_id, story_date=story.story_date)
try:
m.save()
except OperationError, e:
original_m = MUserStory.objects.get(story=story, user_id=self.user_id, feed_id=self.feed_id)
logging.user(request, "~BRMarked story as read error: %s" % (e))
logging.user(request, "~BRMarked story as read: %s" % (story_id))
logging.user(request, "~BROrigin story as read: %s" % (m.story.story_guid))
logging.user(request, "~BRMarked story id: %s" % (original_m.story_id))
logging.user(request, "~BROrigin story guid: %s" % (original_m.story.story_guid))
logging.user(request, "~BRRead now date: %s, original read: %s, story_date: %s." % (m.read_date, original_m.read_date, story.story_date))
original_m.story_id = story_id
original_m.read_date = date
original_m.story_date = story.story_date
original_m.save()
except OperationError, e:
logging.user(request, "~BR~SKCan't even save: %s" % (original_m.story_id))
pass
return data
@classmethod
def delete_old_stories(cls, feed_id):
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)

View file

@ -704,21 +704,25 @@ def mark_all_as_read(request):
def mark_story_as_read(request):
story_ids = request.REQUEST.getlist('story_id')
feed_id = int(get_argument_or_404(request, 'feed_id'))
usersub = None
try:
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
except (Feed.DoesNotExist):
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
if duplicate_feed:
feed_id = duplicate_feed[0].feed_id
try:
usersub = UserSubscription.objects.get(user=request.user,
feed=duplicate_feed[0].feed)
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
except (Feed.DoesNotExist):
return dict(code=-1)
else:
return dict(code=-1)
data = usersub.mark_story_ids_as_read(story_ids, request=request)
except UserSubscription.DoesNotExist:
MUserStory.mark_story_ids_as_read(story_ids, user=request.user, request=request)
else:
data = usersub.mark_story_ids_as_read(story_ids, request=request)
return data

View file

@ -21,7 +21,7 @@ class IconImporter(object):
def __init__(self, feed, force=False):
self.feed = feed
self.force = force
self.feed_icon, _ = MFeedIcon.objects.get_or_create(feed_id=self.feed_id)
self.feed_icon, _ = MFeedIcon.objects.get_or_create(feed_id=self.feed.pk)
def save(self):
if not self.force and self.feed.favicon_not_found:
@ -137,7 +137,7 @@ class IconImporter(object):
def fetch_image_from_page_data(self):
image = None
image_file = None
content = MFeedPage.get_data(feed_id=self.feed_id)
content = MFeedPage.get_data(feed_id=self.feed.pk)
url = self._url_from_html(content)
if url:
image, image_file = self.get_image_from_url(url)

View file

@ -137,6 +137,6 @@ class PageImporter(object):
def save_page(self, html):
if html and len(html) > 100:
feed_page, _ = MFeedPage.objects.get_or_create(feed_id=self.feed_id)
feed_page, _ = MFeedPage.objects.get_or_create(feed_id=self.feed.pk)
feed_page.page_data = html
feed_page.save()