From f6ff242457b518a2e78c7ba950f246f8d3a9290c Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 20:23:49 -0400 Subject: [PATCH 01/15] Keeping track of feed opens. This will go into statistics later. --- apps/analyzer/views.py | 4 +- apps/reader/migrations/0003_feed_opens.py | 146 ++++++++++++++++++++++ apps/reader/models.py | 28 ++++- apps/reader/views.py | 6 +- apps/rss_feeds/models.py | 3 +- 5 files changed, 181 insertions(+), 6 deletions(-) create mode 100644 apps/reader/migrations/0003_feed_opens.py diff --git a/apps/analyzer/views.py b/apps/analyzer/views.py index 1465dfa71..789452bbc 100644 --- a/apps/analyzer/views.py +++ b/apps/analyzer/views.py @@ -56,7 +56,9 @@ def save_classifier(request): classifier_dict.update({content_type: post_content}) classifier, created = ClassifierCls.objects.get_or_create(**classifier_dict) - if classifier.score != score: + if score == 0: + classifier.delete() + elif classifier.score != score: if score == 0: if ((classifier.score == 1 and opinion.startswith('remove_like')) or (classifier.score == -1 and opinion.startswith('remove_dislike'))): diff --git a/apps/reader/migrations/0003_feed_opens.py b/apps/reader/migrations/0003_feed_opens.py new file mode 100644 index 000000000..d21d4d4d0 --- /dev/null +++ b/apps/reader/migrations/0003_feed_opens.py @@ -0,0 +1,146 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Adding field 'UserSubscription.feed_opens' + db.add_column('reader_usersubscription', 'feed_opens', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False) + + + def backwards(self, orm): + + # Deleting field 'UserSubscription.feed_opens' + db.delete_column('reader_usersubscription', 'feed_opens') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'reader.feature': { + 'Meta': {'object_name': 'Feature'}, + 'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'description': ('django.db.models.fields.TextField', [], {'default': "''"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'reader.userstory': { + 'Meta': {'unique_together': "(('user', 'feed', 'story'),)", 'object_name': 'UserStory'}, + 'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'opinion': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'read_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), + 'story': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Story']"}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'reader.usersubscription': { + 'Meta': {'unique_together': "(('user', 'feed'),)", 'object_name': 'UserSubscription'}, + 'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']"}), + 'feed_opens': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_read_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 7, 6, 20, 17, 40, 108259)'}), + 'mark_read_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 7, 6, 20, 17, 40, 108313)'}), + 'needs_unread_recalc': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), + 'unread_count_negative': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'unread_count_neutral': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'unread_count_positive': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'unread_count_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2000, 1, 1, 0, 0)'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'reader.usersubscriptionfolders': { + 'Meta': {'object_name': 'UserSubscriptionFolders'}, + 'folders': ('django.db.models.fields.TextField', [], {'default': "'[]'"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'rss_feeds.feed': { + 'Meta': {'object_name': 'Feed', 'db_table': "'feeds'"}, + 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), + 'creation': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'days_to_trim': ('django.db.models.fields.IntegerField', [], {'default': '90'}), + 'etag': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'feed_address': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'}), + 'feed_link': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'feed_tagline': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'null': 'True', 'blank': 'True'}), + 'feed_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_load_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {'default': '0', 'auto_now': 'True', 'blank': 'True'}), + 'min_to_decay': ('django.db.models.fields.IntegerField', [], {'default': '15'}), + 'next_scheduled_update': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'num_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'popular_authors': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}), + 'popular_tags': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}), + 'stories_per_month': ('django.db.models.fields.IntegerField', [], {'default': '0'}) + }, + 'rss_feeds.story': { + 'Meta': {'unique_together': "(('story_feed', 'story_guid_hash'),)", 'object_name': 'Story', 'db_table': "'stories'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'story_author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.StoryAuthor']"}), + 'story_author_name': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}), + 'story_content': ('utils.compressed_textfield.StoryField', [], {'null': 'True', 'blank': 'True'}), + 'story_content_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'story_date': ('django.db.models.fields.DateTimeField', [], {}), + 'story_feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stories'", 'to': "orm['rss_feeds.Feed']"}), + 'story_guid': ('django.db.models.fields.CharField', [], {'max_length': '1000'}), + 'story_guid_hash': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'story_original_content': ('utils.compressed_textfield.StoryField', [], {'null': 'True', 'blank': 'True'}), + 'story_past_trim_date': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), + 'story_permalink': ('django.db.models.fields.CharField', [], {'max_length': '1000'}), + 'story_tags': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}), + 'story_title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['rss_feeds.Tag']", 'symmetrical': 'False'}) + }, + 'rss_feeds.storyauthor': { + 'Meta': {'object_name': 'StoryAuthor'}, + 'author_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'rss_feeds.tag': { + 'Meta': {'object_name': 'Tag'}, + 'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['reader'] diff --git a/apps/reader/models.py b/apps/reader/models.py index 2b6da4e59..8144ea7f8 100644 --- a/apps/reader/models.py +++ b/apps/reader/models.py @@ -5,11 +5,18 @@ from django.core.cache import cache from apps.rss_feeds.models import Feed, Story from apps.analyzer.models import ClassifierFeed, ClassifierAuthor, ClassifierTag, ClassifierTitle from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags -# from utils.compressed_textfield import StoryField +from utils.compressed_textfield import StoryField DAYS_OF_UNREAD = 14 class UserSubscription(models.Model): + """ + A feed which a user has subscrubed to. Carries all of the cached information + about the subscription, including unread counts of the three primary scores. + + Also has a dirty flag (needs_unread_recalc) which means that the unread counts + are not accurate and need to be calculated with `self.calculate_feed_scores()`. + """ user = models.ForeignKey(User) feed = models.ForeignKey(Feed) last_read_date = models.DateTimeField(default=datetime.datetime.now() @@ -21,6 +28,7 @@ class UserSubscription(models.Model): unread_count_negative = models.IntegerField(default=0) unread_count_updated = models.DateTimeField(default=datetime.datetime(2000,1,1)) needs_unread_recalc = models.BooleanField(default=False) + feed_opens = models.IntegerField(default=0) def __unicode__(self): return '[' + self.feed.feed_title + '] ' @@ -119,6 +127,10 @@ class UserSubscription(models.Model): class UserStory(models.Model): + """ + Stories read by the user. These are deleted as the mark_read_date for the + UserSubscription passes the UserStory date. + """ user = models.ForeignKey(User) feed = models.ForeignKey(Feed) story = models.ForeignKey(Story) @@ -135,6 +147,11 @@ class UserStory(models.Model): unique_together = ("user", "feed", "story") class UserSubscriptionFolders(models.Model): + """ + A JSON list of folders and feeds for while a user has subscribed. The list + is a recursive descent of feeds and folders in folders. Used to layout + the feeds and folders in the Reader's feed navigation pane. + """ user = models.ForeignKey(User) folders = models.TextField(default="[]") @@ -144,8 +161,12 @@ class UserSubscriptionFolders(models.Model): class Meta: verbose_name_plural = "folders" verbose_name = "folder" - + + class Feature(models.Model): + """ + Simple blog-like feature board shown to all users on the home page. + """ description = models.TextField(default="") date = models.DateTimeField(default=datetime.datetime.now) @@ -153,4 +174,5 @@ class Feature(models.Model): return "[%s] %s" % (self.date, self.description[:50]) class Meta: - ordering = ["-date"] \ No newline at end of file + ordering = ["-date"] + \ No newline at end of file diff --git a/apps/reader/views.py b/apps/reader/views.py index 3cf2142bb..9746c3359 100644 --- a/apps/reader/views.py +++ b/apps/reader/views.py @@ -21,7 +21,7 @@ try: except: pass from utils import json, feedfinder -from utils.user_functions import get_user, invalidate_template_cache +from utils.user_functions import get_user SINGLE_DAY = 60*60*24 @@ -241,6 +241,9 @@ def load_single_feed(request): classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds, classifier_authors, classifier_titles, classifier_tags) + usersub.feed_opens += 1 + usersub.save() + context = dict(stories=stories, feed_tags=feed_tags, feed_authors=feed_authors, classifiers=classifiers) data = json.encode(context) return HttpResponse(data, mimetype='application/json') @@ -510,6 +513,7 @@ def save_feed_order(request): if folders: # Test that folders can be JSON decoded folders_list = json.decode(folders) + assert folders_list is not None user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user) user_sub_folders.folders = folders user_sub_folders.save() diff --git a/apps/rss_feeds/models.py b/apps/rss_feeds/models.py index b773632fa..b85a09f9c 100644 --- a/apps/rss_feeds/models.py +++ b/apps/rss_feeds/models.py @@ -484,7 +484,7 @@ class Story(models.Model): story_guid = models.CharField(max_length=1000) story_guid_hash = models.CharField(max_length=40) story_past_trim_date = models.BooleanField(default=False) - story_tags = models.CharField(max_length=2000) + story_tags = models.CharField(max_length=2000, null=True, blank=True) tags = models.ManyToManyField('Tag') def __unicode__(self): @@ -495,6 +495,7 @@ class Story(models.Model): verbose_name = "story" db_table="stories" ordering=["-story_date"] + unique_together = (("story_feed", "story_guid_hash"),) def save(self, *args, **kwargs): if not self.story_guid_hash and self.story_guid: From 06fdef9e5e14045dcb741b372ab8d33ae15d43bb Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 22:57:18 -0400 Subject: [PATCH 02/15] Deduping feeds. Recursively switches all users and read stories and subscriptions to use the original, non-duplicate feed. --- .../management/commands/merge_feeds.py | 96 +++++++++++++++++-- apps/rss_feeds/models.py | 2 +- 2 files changed, 89 insertions(+), 9 deletions(-) diff --git a/apps/rss_feeds/management/commands/merge_feeds.py b/apps/rss_feeds/management/commands/merge_feeds.py index 8b690e43c..c343b7f98 100644 --- a/apps/rss_feeds/management/commands/merge_feeds.py +++ b/apps/rss_feeds/management/commands/merge_feeds.py @@ -1,6 +1,10 @@ from django.core.management.base import BaseCommand -from apps.rss_feeds.models import Feed +from apps.rss_feeds.models import Feed, Story +from apps.reader.models import UserSubscription, UserStory, UserSubscriptionFolders from optparse import make_option +from django.db import connection +from django.db.utils import IntegrityError +from utils import json class Command(BaseCommand): option_list = BaseCommand.option_list + ( @@ -9,12 +13,88 @@ class Command(BaseCommand): ) def handle(self, *args, **options): - feeds = Feed.objects.all() + cursor = connection.cursor() + cursor.execute("""SELECT DISTINCT f.id AS original_id, f2.id AS duplicate_id, + f.feed_address AS original_feed_address, + f2.feed_address AS duplicate_feed_address + # f.feed_title AS original_feed_title, + # f2.feed_title AS duplicate_feed_title, + # f.feed_link AS original_feed_link, + # f2.feed_link AS duplicate_feed_link, + # f2.feed_tagline AS original_feed_tagline, + # f.feed_tagline AS duplicate_feed_tagline + FROM stories s1 + INNER JOIN stories s2 ON s1.story_guid_hash = s2.story_guid_hash + INNER JOIN feeds f ON f.id = s1.story_feed_id + INNER JOIN feeds f2 ON f2.id = s2.story_feed_id + WHERE s1.story_feed_id != s2.story_feed_id + AND f2.id > f.id + AND f.feed_tagline = f2.feed_tagline + AND f.feed_link = f2.feed_link + AND f.feed_title = f2.feed_title;""") + + feed_fields = ('original_id', 'duplicate_id', 'original_feed_address', 'duplicate_feed_address') + for feeds_values in cursor.fetchall(): + feeds = dict(zip(feed_fields, feeds_values)) + original_feed = Feed.objects.get(pk=feeds['original_id']) + duplicate_feed = Feed.objects.get(pk=feeds['duplicate_id']) - feeds_count = feeds.count() - - for i in xrange(0, feeds_count, 100): - feeds = Feed.objects.all()[i:i+100] - for feed in feeds.iterator(): - pass + print " ---> Feed: [%s - %s] %s - %s" % (feeds['original_id'], feeds['duplicate_id'], + original_feed, original_feed.feed_link) + print " --> %s" % feeds['original_feed_address'] + print " --> %s" % feeds['duplicate_feed_address'] + + user_subs = UserSubscription.objects.filter(feed=duplicate_feed) + for user_sub in user_subs: + # Rewrite feed in subscription folders + user_sub_folders = UserSubscriptionFolders.objects.get(user=user_sub.user) + folders = json.decode(user_sub_folders.folders) + folders = self.rewrite_folders(folders, original_feed, duplicate_feed) + user_sub_folders.folders = json.encode(folders) + # user_sub_folders.save() + + # Switch to original feed for the user subscription + print " ===> %s " % user_sub.user + user_sub.feed = original_feed + try: + # user_sub.save() + pass + except IntegrityError: + print " !!!!> %s already subscribed" % user_sub.user + # user_sub.delete() + + # Switch read stories + user_stories = UserStory.objects.filter(feed=duplicate_feed) + print " ---> %s read stories" % user_stories.count() + for user_story in user_stories: + user_story.feed = original_feed + duplicate_story = user_story.story + original_story = Story.objects.filter(story_guid_hash=duplicate_story.story_guid_hash, + story_feed=original_feed) + if original_story: + user_story.story = original_story[0] + else: + print " ***> Can't find original story: %s" % duplicate_story + # user_story.save() + + duplicate_stories = Story.objects.filter(story_feed=duplicate_feed) + print " ---> Deleting %s stories" % duplicate_stories.count() + # duplicate_stories.delete() + # duplicate_feed.delete() + + def rewrite_folders(self, folders, original_feed, duplicate_feed): + new_folders = [] + for k, folder in enumerate(folders): + if isinstance(folder, int): + if folder == duplicate_feed.pk: + print " ===> Rewrote %s'th item: %s" % (k+1, folders) + new_folders.append(original_feed.pk) + else: + new_folders.append(folder) + elif isinstance(folder, dict): + for f_k, f_v in folder.items(): + new_folders.append({f_k: self.rewrite_folders(f_v, original_feed, duplicate_feed)}) + + return new_folders + \ No newline at end of file diff --git a/apps/rss_feeds/models.py b/apps/rss_feeds/models.py index 653a55c6a..f33c79173 100644 --- a/apps/rss_feeds/models.py +++ b/apps/rss_feeds/models.py @@ -43,7 +43,7 @@ class Feed(models.Model): return self.feed_title def save(self, *args, **kwargs): - if len(self.feed_tagline) > 1024: + if self.feed_tagline and len(self.feed_tagline) > 1024: self.feed_tagline = self.feed_tagline[:1024] super(Feed, self).save(*args, **kwargs) From e7b7036cd28a6b0b9e7cd2f3612a40411b7e09bb Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 23:18:38 -0400 Subject: [PATCH 03/15] Deduping all analyzer models for multiple feeds. --- .../management/commands/merge_feeds.py | 34 ++++++++++++++++--- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/apps/rss_feeds/management/commands/merge_feeds.py b/apps/rss_feeds/management/commands/merge_feeds.py index c343b7f98..92e82bd36 100644 --- a/apps/rss_feeds/management/commands/merge_feeds.py +++ b/apps/rss_feeds/management/commands/merge_feeds.py @@ -1,6 +1,8 @@ from django.core.management.base import BaseCommand -from apps.rss_feeds.models import Feed, Story +from apps.rss_feeds.models import Feed, Story, Tag, StoryAuthor from apps.reader.models import UserSubscription, UserStory, UserSubscriptionFolders +from apps.analyzer.models import FeatureCategory, Category, ClassifierTitle +from apps.analyzer.models import ClassifierAuthor, ClassifierFeed, ClassifierTag from optparse import make_option from django.db import connection from django.db.utils import IntegrityError @@ -77,9 +79,33 @@ class Command(BaseCommand): print " ***> Can't find original story: %s" % duplicate_story # user_story.save() - duplicate_stories = Story.objects.filter(story_feed=duplicate_feed) - print " ---> Deleting %s stories" % duplicate_stories.count() - # duplicate_stories.delete() + def delete_story_feed(model, feed_field='feed'): + duplicate_stories = model.objects.filter(**{feed_field: duplicate_feed}) + if duplicate_stories.count(): + print " ---> Deleting %s %s" % (duplicate_stories.count(), model) + # duplicate_stories.delete() + def switch_feed(model): + duplicates = model.objects.filter(feed=duplicate_feed) + if duplicates.count(): + print " ---> Switching %s %s" % (duplicates.count(), model) + for duplicate in duplicates: + duplicate.feed = original_feed + try: + # duplicate.save() + pass + except IntegrityError: + print " !!!!> %s already exists" % duplicate + # duplicates.delete() + delete_story_feed(Story, 'story_feed') + delete_story_feed(Tag) + delete_story_feed(StoryAuthor) + switch_feed(FeatureCategory) + switch_feed(Category) + switch_feed(ClassifierTitle) + switch_feed(ClassifierAuthor) + switch_feed(ClassifierFeed) + switch_feed(ClassifierTag) + # duplicate_authors.delete() # duplicate_feed.delete() def rewrite_folders(self, folders, original_feed, duplicate_feed): From da0060fb439b4ca644ea5f792413d1fe9a0f4e14 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 23:21:10 -0400 Subject: [PATCH 04/15] Commenting out sql comments. This should have been handled. --- apps/rss_feeds/management/commands/merge_feeds.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/apps/rss_feeds/management/commands/merge_feeds.py b/apps/rss_feeds/management/commands/merge_feeds.py index 92e82bd36..7e78c0297 100644 --- a/apps/rss_feeds/management/commands/merge_feeds.py +++ b/apps/rss_feeds/management/commands/merge_feeds.py @@ -19,12 +19,14 @@ class Command(BaseCommand): cursor.execute("""SELECT DISTINCT f.id AS original_id, f2.id AS duplicate_id, f.feed_address AS original_feed_address, f2.feed_address AS duplicate_feed_address + """ # f.feed_title AS original_feed_title, # f2.feed_title AS duplicate_feed_title, # f.feed_link AS original_feed_link, # f2.feed_link AS duplicate_feed_link, # f2.feed_tagline AS original_feed_tagline, # f.feed_tagline AS duplicate_feed_tagline + """ FROM stories s1 INNER JOIN stories s2 ON s1.story_guid_hash = s2.story_guid_hash INNER JOIN feeds f ON f.id = s1.story_feed_id From dd71b6ab20352944715cf727caa3fe78cbdaf9ab Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 23:30:31 -0400 Subject: [PATCH 05/15] Uncommented destructive methods/. Actually deleting duped feeds this time. --- .../management/commands/merge_feeds.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/apps/rss_feeds/management/commands/merge_feeds.py b/apps/rss_feeds/management/commands/merge_feeds.py index 7e78c0297..5abfca4ce 100644 --- a/apps/rss_feeds/management/commands/merge_feeds.py +++ b/apps/rss_feeds/management/commands/merge_feeds.py @@ -55,17 +55,17 @@ class Command(BaseCommand): folders = json.decode(user_sub_folders.folders) folders = self.rewrite_folders(folders, original_feed, duplicate_feed) user_sub_folders.folders = json.encode(folders) - # user_sub_folders.save() + user_sub_folders.save() # Switch to original feed for the user subscription print " ===> %s " % user_sub.user user_sub.feed = original_feed try: - # user_sub.save() + user_sub.save() pass except IntegrityError: print " !!!!> %s already subscribed" % user_sub.user - # user_sub.delete() + user_sub.delete() # Switch read stories user_stories = UserStory.objects.filter(feed=duplicate_feed) @@ -79,13 +79,13 @@ class Command(BaseCommand): user_story.story = original_story[0] else: print " ***> Can't find original story: %s" % duplicate_story - # user_story.save() + user_story.save() def delete_story_feed(model, feed_field='feed'): duplicate_stories = model.objects.filter(**{feed_field: duplicate_feed}) if duplicate_stories.count(): print " ---> Deleting %s %s" % (duplicate_stories.count(), model) - # duplicate_stories.delete() + duplicate_stories.delete() def switch_feed(model): duplicates = model.objects.filter(feed=duplicate_feed) if duplicates.count(): @@ -93,11 +93,11 @@ class Command(BaseCommand): for duplicate in duplicates: duplicate.feed = original_feed try: - # duplicate.save() + duplicate.save() pass except IntegrityError: print " !!!!> %s already exists" % duplicate - # duplicates.delete() + duplicates.delete() delete_story_feed(Story, 'story_feed') delete_story_feed(Tag) delete_story_feed(StoryAuthor) @@ -107,8 +107,8 @@ class Command(BaseCommand): switch_feed(ClassifierAuthor) switch_feed(ClassifierFeed) switch_feed(ClassifierTag) - # duplicate_authors.delete() - # duplicate_feed.delete() + + duplicate_feed.delete() def rewrite_folders(self, folders, original_feed, duplicate_feed): new_folders = [] @@ -116,7 +116,7 @@ class Command(BaseCommand): for k, folder in enumerate(folders): if isinstance(folder, int): if folder == duplicate_feed.pk: - print " ===> Rewrote %s'th item: %s" % (k+1, folders) + # print " ===> Rewrote %s'th item: %s" % (k+1, folders) new_folders.append(original_feed.pk) else: new_folders.append(folder) From 95ba5fcbd87738575085bdfad02f13dd8f172275 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 23:40:09 -0400 Subject: [PATCH 06/15] Pass on deleted feeds. Everything will get deleted eventually. The SQL that gets this ID is a bit aggressive. --- apps/rss_feeds/management/commands/merge_feeds.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/apps/rss_feeds/management/commands/merge_feeds.py b/apps/rss_feeds/management/commands/merge_feeds.py index 5abfca4ce..2f65a851e 100644 --- a/apps/rss_feeds/management/commands/merge_feeds.py +++ b/apps/rss_feeds/management/commands/merge_feeds.py @@ -35,13 +35,18 @@ class Command(BaseCommand): AND f2.id > f.id AND f.feed_tagline = f2.feed_tagline AND f.feed_link = f2.feed_link - AND f.feed_title = f2.feed_title;""") + AND f.feed_title = f2.feed_title + ORDER BY original_id ASC;""") feed_fields = ('original_id', 'duplicate_id', 'original_feed_address', 'duplicate_feed_address') for feeds_values in cursor.fetchall(): feeds = dict(zip(feed_fields, feeds_values)) - original_feed = Feed.objects.get(pk=feeds['original_id']) - duplicate_feed = Feed.objects.get(pk=feeds['duplicate_id']) + try: + original_feed = Feed.objects.get(pk=feeds['original_id']) + duplicate_feed = Feed.objects.get(pk=feeds['duplicate_id']) + except Feed.DoesNotExist: + print " ***> Already deleted feed: %s" % feeds['duplicate_id'] + continue print " ---> Feed: [%s - %s] %s - %s" % (feeds['original_id'], feeds['duplicate_id'], original_feed, original_feed.feed_link) From 7b655f4cc403cc863f36c3a64996a8edddfcd695 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 23:46:29 -0400 Subject: [PATCH 07/15] User stories might already exist on original feed if switching from duplicate feed. --- apps/rss_feeds/management/commands/merge_feeds.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/apps/rss_feeds/management/commands/merge_feeds.py b/apps/rss_feeds/management/commands/merge_feeds.py index 2f65a851e..b1a1277ff 100644 --- a/apps/rss_feeds/management/commands/merge_feeds.py +++ b/apps/rss_feeds/management/commands/merge_feeds.py @@ -84,7 +84,10 @@ class Command(BaseCommand): user_story.story = original_story[0] else: print " ***> Can't find original story: %s" % duplicate_story - user_story.save() + try: + user_story.save() + except IntegrityError: + print " ***> Story already saved: %s" % user_story def delete_story_feed(model, feed_field='feed'): duplicate_stories = model.objects.filter(**{feed_field: duplicate_feed}) From ebdc80f5268fad827dffd0514b0c42a78b1d5e7e Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Tue, 20 Jul 2010 23:59:56 -0400 Subject: [PATCH 08/15] Login as. Needed to test other accounts. --- apps/reader/urls.py | 1 + apps/reader/views.py | 16 +++++++++++++++- settings.py | 1 + 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/apps/reader/urls.py b/apps/reader/urls.py index 78f8f91dd..780e20127 100644 --- a/apps/reader/urls.py +++ b/apps/reader/urls.py @@ -3,6 +3,7 @@ from apps.reader import views urlpatterns = patterns('', url(r'^$', views.index), + url(r'^login_as', views.login_as, name='login_as'), url(r'^logout', views.logout, name='logout'), url(r'^login', views.login, name='login'), url(r'^signup', views.signup, name='signup'), diff --git a/apps/reader/views.py b/apps/reader/views.py index 9746c3359..81710f14c 100644 --- a/apps/reader/views.py +++ b/apps/reader/views.py @@ -9,7 +9,9 @@ from django.views.decorators.cache import never_cache from django.db.models import Q from django.core.urlresolvers import reverse from django.contrib.auth import login as login_user +from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden +from django.conf import settings from apps.analyzer.models import ClassifierFeed, ClassifierAuthor, ClassifierTag, ClassifierTitle from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags from apps.analyzer.models import get_classifiers_for_user @@ -518,4 +520,16 @@ def save_feed_order(request): user_sub_folders.folders = folders user_sub_folders.save() - return {} \ No newline at end of file + return {} + +@login_required +def login_as(request): + if not request.user.is_staff: + assert False + return HttpResponseForbidden() + username = request.GET['user'] + user = get_object_or_404(User, username=username) + user.backend = settings.AUTHENTICATION_BACKENDS[0] + login_user(request, user) + return HttpResponseRedirect(reverse('index')) + \ No newline at end of file diff --git a/settings.py b/settings.py index 7b9e9cdad..306173269 100644 --- a/settings.py +++ b/settings.py @@ -35,6 +35,7 @@ LANGUAGE_CODE = 'en-us' SITE_ID = 1 USE_I18N = False LOGIN_REDIRECT_URL = '/' +LOGIN_URL = '/reader/login' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". From 7afdd3344b6793e20c69a36662b2c901b3becee9 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 21 Jul 2010 00:47:55 -0400 Subject: [PATCH 09/15] Neutral slider has been broken for a week due to JavaScript thinking 0 was false. --- media/js/newsblur/reader.js | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/media/js/newsblur/reader.js b/media/js/newsblur/reader.js index 70fee2b62..716b6803f 100644 --- a/media/js/newsblur/reader.js +++ b/media/js/newsblur/reader.js @@ -1965,7 +1965,10 @@ self.switch_feed_view_unread_view(ui.value); }, stop: function(e, ui) { - self.model.preference('unread_view', ui.value); + self.switch_feed_view_unread_view(ui.value); + if (self.model.preference('unread_view') != ui.value) { + self.model.preference('unread_view', ui.value); + } self.flags['feed_view_positions_calculated'] = false; self.show_correct_story_titles_in_unread_view({'animate': true, 'follow': true}); } @@ -1976,11 +1979,7 @@ var $feed_list = this.$s.$feed_list; var unread_view_name = this.get_unread_view_name(unread_view); var $next_story_button = $('.task_story_next_unread'); - - if (this.model.preference('unread_view') != unread_view) { - this.model.preference('unread_view', unread_view); - } - + $feed_list.removeClass('unread_view_positive') .removeClass('unread_view_neutral') .removeClass('unread_view_negative') @@ -1993,7 +1992,9 @@ }, get_unread_view_name: function(unread_view) { - unread_view = unread_view || this.model.preference('unread_view'); + if (typeof unread_view == 'undefined') { + unread_view = this.model.preference('unread_view'); + } return (unread_view > 0 ? 'positive' From 5a219006067dd3af6eafda01ad0c3afee75a0abf Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 21 Jul 2010 00:47:33 -0400 Subject: [PATCH 10/15] Re-enabling Story count now that we're on PostgreSQL. --- gunicorn.py | 1 + utils/munin/newsblur_stories.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/gunicorn.py b/gunicorn.py index 050c71c0c..858e8beba 100644 --- a/gunicorn.py +++ b/gunicorn.py @@ -8,3 +8,4 @@ def numCPUs(): bind = "127.0.0.1:8000" pidfile = "/tmp/gunicorn_newsblur.pid" workers = numCPUs() # * 2 + 1 +workers = 1 diff --git a/utils/munin/newsblur_stories.py b/utils/munin/newsblur_stories.py index a08bbd5a1..fb1f36dd2 100755 --- a/utils/munin/newsblur_stories.py +++ b/utils/munin/newsblur_stories.py @@ -15,7 +15,7 @@ graph_config = { } metrics = { - # 'stories': Story.objects.count(), + 'stories': Story.objects.count(), 'tags': Tag.objects.count(), 'authors': StoryAuthor.objects.count(), 'read_stories': UserStory.objects.count(), From 41482ec7ff18c24aa2fd28dd98221aa04f098240 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 21 Jul 2010 11:04:21 -0400 Subject: [PATCH 11/15] Better logging for marking as read. --- apps/reader/views.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/apps/reader/views.py b/apps/reader/views.py index 81710f14c..2e467edff 100644 --- a/apps/reader/views.py +++ b/apps/reader/views.py @@ -277,6 +277,7 @@ def mark_all_as_read(request): sub.mark_read_date = read_date sub.save() + print " ---> Marking all as read [%s]: %s days" % (request.user, days,) data = json.encode(dict(code=code)) return HttpResponse(data) @@ -319,6 +320,7 @@ def mark_feed_as_read(request): data = json.encode(dict(code=code)) + print " ---> Marking feed as read [%s]: %s" % (request.user, feed,) # UserStory.objects.filter(user=request.user, feed=feed_id).delete() return HttpResponse(data) From 2f86bbbb24481e69a21f97dfd34c9ba3808b8934 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 21 Jul 2010 11:38:33 -0400 Subject: [PATCH 12/15] Fixing bad page URLs to use the feed fetcher to find the correct address. --- apps/feed_import/models.py | 3 +++ apps/reader/views.py | 22 ++++++++-------------- apps/rss_feeds/importer.py | 7 +++++++ utils/feed_fetcher.py | 15 ++++++++++++++- utils/feed_functions.py | 21 ++++++++++++++++++++- 5 files changed, 52 insertions(+), 16 deletions(-) diff --git a/apps/feed_import/models.py b/apps/feed_import/models.py index 404aed6da..2798e07ac 100644 --- a/apps/feed_import/models.py +++ b/apps/feed_import/models.py @@ -104,6 +104,9 @@ class GoogleReaderImporter(Importer): category = item.xpath('./list[@name="categories"]/object/string[@name="label"]') and \ item.xpath('./list[@name="categories"]/object/string[@name="label"]')[0].text + if not feed_address: + feed_address = feed_link + feed_data = dict(feed_address=feed_address, feed_link=feed_link, feed_title=feed_title) feed_db, _ = Feed.objects.get_or_create(feed_address=feed_address, defaults=dict(**feed_data)) us, _ = UserSubscription.objects.get_or_create( diff --git a/apps/reader/views.py b/apps/reader/views.py index 2e467edff..80af4c54f 100644 --- a/apps/reader/views.py +++ b/apps/reader/views.py @@ -22,8 +22,9 @@ try: from apps.rss_feeds.models import Feed, Story, FeedPage except: pass -from utils import json, feedfinder +from utils import json from utils.user_functions import get_user +from utils.feed_functions import fetch_address_from_page SINGLE_DAY = 60*60*24 @@ -378,19 +379,12 @@ def add_url(request): if feed: feed = feed[0] else: - feed_finder_url = feedfinder.feed(url) - if feed_finder_url: - try: - feed = Feed.objects.get(feed_address=feed_finder_url) - except Feed.DoesNotExist: - try: - feed = Feed(feed_address=feed_finder_url) - feed.save() - feed.update() - except: - code = -2 - message = "This feed has been added, but something went wrong"\ - " when downloading it. Maybe the server's busy." + try: + feed = fetch_address_from_page(url) + except: + code = -2 + message = "This feed has been added, but something went wrong"\ + " when downloading it. Maybe the server's busy." if not feed: code = -1 diff --git a/apps/rss_feeds/importer.py b/apps/rss_feeds/importer.py index 7744a6477..d9af2ebce 100644 --- a/apps/rss_feeds/importer.py +++ b/apps/rss_feeds/importer.py @@ -5,6 +5,7 @@ import re import urlparse import multiprocessing import traceback +import feedparser from apps.rss_feeds.models import FeedPage class PageImporter(object): @@ -24,6 +25,12 @@ class PageImporter(object): data = response.read() html = self.rewrite_page(data) self.save_page(html) + except ValueError, e: + print " ---> ValueError on url: %s" % e + self.feed.save_page_history(401, "Bad URL", e) + fp = feedparser.parse(self.feed.feed_address) + self.feed.feed_link = fp.feed.get('link', "") + self.feed.save() except urllib2.HTTPError, e: print "HTTP Error: %s" % e self.feed.save_page_history(e.code, e.msg, e.fp.read()) diff --git a/utils/feed_fetcher.py b/utils/feed_fetcher.py index 4bb6f85aa..79573a545 100644 --- a/utils/feed_fetcher.py +++ b/utils/feed_fetcher.py @@ -10,6 +10,7 @@ from apps.rss_feeds.importer import PageImporter from utils import feedparser from django.db.models import Q from utils.story_functions import pre_process_story +from utils.feed_functions import fetch_address_from_page import sys import time import logging @@ -17,6 +18,7 @@ import datetime import traceback import multiprocessing import urllib2 +import xml.sax # Refresh feed code adapted from Feedjack. # http://feedjack.googlecode.com @@ -76,7 +78,7 @@ class FetchFeed: agent=USER_AGENT, etag=self.feed.etag, modified=modified) - + # feed_xml, _ = FeedXML.objects.get_or_create(feed=self.feed) # feed_xml.rss_xml = self.fpf # feed_xml.save() @@ -102,6 +104,17 @@ class ProcessFeed: logging.debug(u'[%d] Processing %s' % (self.feed.id, self.feed.feed_title)) + + if self.fpf.bozo and isinstance(self.fpf.bozo_exception, feedparser.NonXMLContentType): + print " ---> Non-xml feed: %s. Fetching page." % self.feed + feed = fetch_address_from_page(self.feed.feed_address, self.feed) + if feed: + self.feed.last_modified = None + self.feed.etag = None + self.feed.save() + elif self.fpf.bozo and isinstance(self.fpf.bozo_exception, xml.sax._exceptions.SAXException): + feed = fetch_address_from_page(self.feed.feed_link, self.feed) + if hasattr(self.fpf, 'status'): if self.options['verbose']: logging.debug(u'[%d] HTTP status %d: %s' % (self.feed.id, diff --git a/utils/feed_functions.py b/utils/feed_functions.py index 61cc87ace..815362f46 100644 --- a/utils/feed_functions.py +++ b/utils/feed_functions.py @@ -1,6 +1,7 @@ import datetime import time import sys +from utils import feedfinder def encode(tstr): """ Encodes a unicode string in utf-8 @@ -48,4 +49,22 @@ def levenshtein_distance(first, second): if first[i-1] != second[j-1]: substitution += 1 distance_matrix[i][j] = min(insertion, deletion, substitution) - return distance_matrix[first_length-1][second_length-1] \ No newline at end of file + return distance_matrix[first_length-1][second_length-1] + + +def fetch_address_from_page(url, existing_feed=None): + from apps.rss_feeds.models import Feed + feed_finder_url = feedfinder.feed(url) + if feed_finder_url: + if existing_feed: + existing_feed.feed_address = feed_finder_url + existing_feed.save() + feed = existing_feed + else: + try: + feed = Feed.objects.get(feed_address=feed_finder_url) + except Feed.DoesNotExist: + feed = Feed(feed_address=feed_finder_url) + feed.save() + feed.update() + return feed \ No newline at end of file From 8b25b985b63dbc6c40ecc27a98db54f1abfdc50a Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 21 Jul 2010 12:49:33 -0400 Subject: [PATCH 13/15] Adding error instrumentation to keep track of rolling errors on feed and page fetches. --- utils/munin/newsblur_errors.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 utils/munin/newsblur_errors.py diff --git a/utils/munin/newsblur_errors.py b/utils/munin/newsblur_errors.py new file mode 100644 index 000000000..ed8e85e87 --- /dev/null +++ b/utils/munin/newsblur_errors.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +from utils.munin.base import MuninGraph +from apps.rss_feeds.models import FeedFetchHistory, PageFetchHistory +import datetime + + +graph_config = { + 'graph_category' : 'NewsBlur', + 'graph_title' : 'NewsBlur Users', + 'graph_vlabel' : 'users', + 'all.label': 'all', +} + +last_day = datetime.datetime.now() - datetime.timedelta(days=1) + +metrics = { + 'feed_errors': FeedFetchHistory.objects.filter(fetch_date__gte=last_day).exclude(status_code__in=[200, 304]).count(), + 'feed_success': FeedFetchHistory.objects.filter(fetch_date__gte=last_day).filter(status_code__in=[200, 304]).count(), + 'page_errors': PageFetchHistory.objects.filter(fetch_date__gte=last_day).exclude(status_code__in=[200, 304]).count(), + 'page_success': PageFetchHistory.objects.filter(fetch_date__gte=last_day).filter(status_code__in=[200, 304]).count(), +} + +if __name__ == '__main__': + MuninGraph(graph_config, metrics).run() From dfc04de45ab08d98311b6713f9bb09881d194121 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 21 Jul 2010 12:58:16 -0400 Subject: [PATCH 14/15] +x on munin errors. --- utils/munin/newsblur_errors.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 utils/munin/newsblur_errors.py diff --git a/utils/munin/newsblur_errors.py b/utils/munin/newsblur_errors.py old mode 100644 new mode 100755 From 34c6cce547635444040552276e0c27096c526cb8 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 21 Jul 2010 13:02:43 -0400 Subject: [PATCH 15/15] Labels for instrumentation on feed and page fetching errors. --- utils/munin/newsblur_errors.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/utils/munin/newsblur_errors.py b/utils/munin/newsblur_errors.py index ed8e85e87..2887a7b68 100644 --- a/utils/munin/newsblur_errors.py +++ b/utils/munin/newsblur_errors.py @@ -7,9 +7,12 @@ import datetime graph_config = { 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Users', - 'graph_vlabel' : 'users', - 'all.label': 'all', + 'graph_title' : 'NewsBlur Errors', + 'graph_vlabel' : 'errors', + 'feed_errors.label': 'Feed Errors', + 'feed_success.label': 'Feed Success', + 'page_errors.label': 'Page Errors', + 'page_success.label': 'Page Success', } last_day = datetime.datetime.now() - datetime.timedelta(days=1)