From 2b9ad97b51461de1e905e5f3d7a56103d1763654 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Thu, 26 Jan 2012 09:32:24 -0800 Subject: [PATCH] Switching a ton of feed and user fetches to simple ID matches. ".user.pk -> .user_id" --- apps/analyzer/views.py | 2 +- apps/api/views.py | 2 +- apps/feed_import/models.py | 4 +- apps/profile/models.py | 2 +- apps/profile/views.py | 2 +- apps/reader/models.py | 38 +++++++++---------- apps/reader/views.py | 32 ++++++++-------- .../templatetags/recommendations_tags.py | 2 +- apps/recommendations/views.py | 2 +- apps/rss_feeds/icon_importer.py | 4 +- apps/rss_feeds/models.py | 2 +- apps/rss_feeds/page_importer.py | 2 +- apps/rss_feeds/views.py | 8 ++-- apps/social/views.py | 36 +++++++++--------- .../render_recommended_feed.xhtml | 2 +- utils/feed_fetcher.py | 2 +- 16 files changed, 72 insertions(+), 70 deletions(-) diff --git a/apps/analyzer/views.py b/apps/analyzer/views.py index cecb3bdea..7c68cc0fb 100644 --- a/apps/analyzer/views.py +++ b/apps/analyzer/views.py @@ -50,7 +50,7 @@ def save_classifier(request): for post_content in post_contents: if not post_content: continue classifier_dict = { - 'user_id': request.user.pk, + 'user_id': request.user_id, 'feed_id': feed_id, 'defaults': { 'score': score diff --git a/apps/api/views.py b/apps/api/views.py index aa016f9ba..8c644d614 100644 --- a/apps/api/views.py +++ b/apps/api/views.py @@ -125,5 +125,5 @@ def add_site(request, token): return HttpResponse(callback + '(' + json.encode({ 'code': code, 'message': message, - 'usersub': us and us.feed.pk, + 'usersub': us and us.feed_id, }) + ')', mimetype='text/plain') \ No newline at end of file diff --git a/apps/feed_import/models.py b/apps/feed_import/models.py index addc21153..f1c505572 100644 --- a/apps/feed_import/models.py +++ b/apps/feed_import/models.py @@ -81,7 +81,7 @@ class OPMLExporter: def fetch_feeds(self): subs = UserSubscription.objects.filter(user=self.user) - self.feeds = dict((sub.feed.pk, sub.canonical()) for sub in subs) + self.feeds = dict((sub.feed_id, sub.canonical()) for sub in subs) class Importer: @@ -280,7 +280,7 @@ class GoogleReaderImporter(Importer): original_feed = Feed.get_feed_from_url(story['origin']['htmlUrl'], create=False, fetch=False) content = story.get('content') or story.get('summary') story_db = { - "user_id": self.user.pk, + "user_id": self.user_id, "starred_date": datetime.datetime.fromtimestamp(story['updated']), "story_date": datetime.datetime.fromtimestamp(story['published']), "story_title": story.get('title'), diff --git a/apps/profile/models.py b/apps/profile/models.py index 79b7fb84b..5f0610b3a 100644 --- a/apps/profile/models.py +++ b/apps/profile/models.py @@ -104,7 +104,7 @@ NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()} sub.feed.save() if stale_feeds: - stale_feeds = list(set([f.feed.pk for f in stale_feeds])) + stale_feeds = list(set([f.feed_id for f in stale_feeds])) self.queue_new_feeds(new_feeds=stale_feeds) def send_new_user_email(self): diff --git a/apps/profile/views.py b/apps/profile/views.py index 4928cbbc4..42a4b0f56 100644 --- a/apps/profile/views.py +++ b/apps/profile/views.py @@ -181,7 +181,7 @@ def profile_is_premium(request): if retries >= 30: subject = "Premium activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs) - message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email) + message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user_id, request.user.email) mail_admins(subject, message, fail_silently=True) code = -1 request.user.profile.is_premium = True diff --git a/apps/reader/models.py b/apps/reader/models.py index a89a0206d..db912c3ab 100644 --- a/apps/reader/models.py +++ b/apps/reader/models.py @@ -69,7 +69,7 @@ class UserSubscription(models.Model): try: super(UserSubscription, self).save(*args, **kwargs) except IntegrityError: - duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=self.feed.pk) + duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=self.feed_id) for duplicate_feed in duplicate_feeds: already_subscribed = UserSubscription.objects.filter(user=self.user, feed=duplicate_feed.feed) if not already_subscribed: @@ -130,8 +130,8 @@ class UserSubscription(models.Model): now = datetime.datetime.utcnow() # Use the latest story to get last read time. - if MStory.objects(story_feed_id=self.feed.pk).first(): - latest_story_date = MStory.objects(story_feed_id=self.feed.pk).order_by('-story_date').only('story_date')[0]['story_date']\ + if MStory.objects(story_feed_id=self.feed_id).first(): + latest_story_date = MStory.objects(story_feed_id=self.feed_id).order_by('-story_date').only('story_date')[0]['story_date']\ + datetime.timedelta(seconds=1) else: latest_story_date = now @@ -144,7 +144,7 @@ class UserSubscription(models.Model): self.unread_count_updated = now self.oldest_unread_story_date = now self.needs_unread_recalc = False - MUserStory.delete_marked_as_read_stories(self.user.pk, self.feed.pk) + MUserStory.delete_marked_as_read_stories(self.user_id, self.feed_id) self.save() @@ -165,7 +165,7 @@ class UserSubscription(models.Model): for story_id in set(story_ids): try: - story = MStory.objects.get(story_feed_id=self.feed.pk, story_guid=story_id) + story = MStory.objects.get(story_feed_id=self.feed_id, story_guid=story_id) except MStory.DoesNotExist: # Story has been deleted, probably by feed_fetcher. continue @@ -173,13 +173,13 @@ class UserSubscription(models.Model): continue now = datetime.datetime.utcnow() date = now if now > story.story_date else story.story_date # For handling future stories - m = MUserStory(story=story, user_id=self.user.pk, - feed_id=self.feed.pk, read_date=date, + m = MUserStory(story=story, user_id=self.user_id, + feed_id=self.feed_id, read_date=date, story_id=story_id, story_date=story.story_date) try: m.save() except OperationError, e: - original_m = MUserStory.objects.get(story=story, user_id=self.user.pk, feed_id=self.feed.pk) + original_m = MUserStory.objects.get(story=story, user_id=self.user_id, feed_id=self.feed_id) logging.user(request, "~BRMarked story as read error: %s" % (e)) logging.user(request, "~BRMarked story as read: %s" % (story_id)) logging.user(request, "~BROrigin story as read: %s" % (m.story.story_guid)) @@ -222,15 +222,15 @@ class UserSubscription(models.Model): else: self.mark_read_date = date_delta - read_stories = MUserStory.objects(user_id=self.user.pk, - feed_id=self.feed.pk, + read_stories = MUserStory.objects(user_id=self.user_id, + feed_id=self.feed_id, read_date__gte=self.mark_read_date) # if not silent: # logging.info(' ---> [%s] Read stories: %s' % (self.user, datetime.datetime.now() - now)) read_stories_ids = [] for us in read_stories: read_stories_ids.append(us.story_id) - stories_db = stories_db or MStory.objects(story_feed_id=self.feed.pk, + stories_db = stories_db or MStory.objects(story_feed_id=self.feed_id, story_date__gte=date_delta) # if not silent: # logging.info(' ---> [%s] MStory: %s' % (self.user, datetime.datetime.now() - now)) @@ -243,14 +243,14 @@ class UserSubscription(models.Model): unread_stories_db.append(story) if story.story_date < oldest_unread_story_date: oldest_unread_story_date = story.story_date - stories = Feed.format_stories(unread_stories_db, self.feed.pk) + stories = Feed.format_stories(unread_stories_db, self.feed_id) # if not silent: # logging.info(' ---> [%s] Format stories: %s' % (self.user, datetime.datetime.now() - now)) - classifier_feeds = list(MClassifierFeed.objects(user_id=self.user.pk, feed_id=self.feed.pk)) - classifier_authors = list(MClassifierAuthor.objects(user_id=self.user.pk, feed_id=self.feed.pk)) - classifier_titles = list(MClassifierTitle.objects(user_id=self.user.pk, feed_id=self.feed.pk)) - classifier_tags = list(MClassifierTag.objects(user_id=self.user.pk, feed_id=self.feed.pk)) + classifier_feeds = list(MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id)) + classifier_authors = list(MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id)) + classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id)) + classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)) # if not silent: # logging.info(' ---> [%s] Classifiers: %s (%s)' % (self.user, datetime.datetime.now() - now, classifier_feeds.count() + classifier_authors.count() + classifier_tags.count() + classifier_titles.count())) @@ -326,7 +326,7 @@ class UserSubscription(models.Model): return # Switch read stories - user_stories = MUserStory.objects(user_id=self.user.pk, feed_id=old_feed.pk) + user_stories = MUserStory.objects(user_id=self.user_id, feed_id=old_feed.pk) logging.info(" ---> %s read stories" % user_stories.count()) for user_story in user_stories: user_story.feed_id = new_feed.pk @@ -347,7 +347,7 @@ class UserSubscription(models.Model): user_story.delete() def switch_feed_for_classifier(model): - duplicates = model.objects(feed_id=old_feed.pk, user_id=self.user.pk) + duplicates = model.objects(feed_id=old_feed.pk, user_id=self.user_id) if duplicates.count(): logging.info(" ---> Switching %s %s" % (duplicates.count(), model)) for duplicate in duplicates: @@ -464,7 +464,7 @@ class UserSubscriptionFolders(models.Model): return if user_sub: user_sub.delete() - MUserStory.objects(user_id=self.user.pk, feed_id=feed_id).delete() + MUserStory.objects(user_id=self.user_id, feed_id=feed_id).delete() def delete_folder(self, folder_to_delete, in_folder, feed_ids_in_folder, commit_delete=True): def _find_folder_in_folders(old_folders, folder_name, feeds_to_delete, deleted_folder=None): diff --git a/apps/reader/views.py b/apps/reader/views.py index 11ea81bdc..dcc3d2007 100644 --- a/apps/reader/views.py +++ b/apps/reader/views.py @@ -191,7 +191,7 @@ def load_feeds(request): user_subs = UserSubscription.objects.select_related('feed').filter(user=user) for sub in user_subs: - pk = sub.feed.pk + pk = sub.feed_id if update_counts: sub.calculate_feed_scores(silent=True) feeds[pk] = sub.canonical(include_favicon=include_favicons) @@ -261,7 +261,7 @@ def load_feeds_flat(request): for sub in user_subs: if sub.needs_unread_recalc: sub.calculate_feed_scores(silent=True) - feeds[sub.feed.pk] = sub.canonical(include_favicon=include_favicons) + feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons) folders = json.decode(folders.folders) flat_folders = {} @@ -306,7 +306,7 @@ def refresh_feeds(request): feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)]) for i, sub in enumerate(user_subs): - pk = sub.feed.pk + pk = sub.feed_id if (sub.needs_unread_recalc or sub.unread_count_updated < UNREAD_CUTOFF or sub.oldest_unread_story_date < UNREAD_CUTOFF): @@ -325,21 +325,21 @@ def refresh_feeds(request): if request.REQUEST.get('check_fetch_status', False): feeds[pk]['not_yet_fetched'] = not sub.feed.fetched_once - if sub.feed.pk in favicons_fetching and sub.feed.pk in feed_icons: - feeds[pk]['favicon'] = feed_icons[sub.feed.pk].data - feeds[pk]['favicon_color'] = feed_icons[sub.feed.pk].color + if sub.feed_id in favicons_fetching and sub.feed_id in feed_icons: + feeds[pk]['favicon'] = feed_icons[sub.feed_id].data + feeds[pk]['favicon_color'] = feed_icons[sub.feed_id].color feeds[pk]['favicon_fetching'] = sub.feed.favicon_fetching user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) if favicons_fetching: - sub_feed_ids = [s.feed.pk for s in user_subs] + sub_feed_ids = [s.feed_id for s in user_subs] moved_feed_ids = [f for f in favicons_fetching if f not in sub_feed_ids] for moved_feed_id in moved_feed_ids: duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=moved_feed_id) - if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds: - feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed.pk] - feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed.pk + if duplicate_feeds and duplicate_feeds[0].feed_id in feeds: + feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed_id] + feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed_id if settings.DEBUG or request.REQUEST.get('check_fetch_status'): diff = datetime.datetime.utcnow()-start @@ -771,7 +771,7 @@ def mark_story_as_unread(request): # these would be ignored. data = usersub.mark_story_ids_as_read(newer_stories, request=request) - m = MUserStory.objects(story_id=story_id, user_id=request.user.pk, feed_id=feed_id) + m = MUserStory.objects(story_id=story_id, user_id=request.user_id, feed_id=feed_id) m.delete() return data @@ -995,8 +995,8 @@ def feeds_trainer(request): for us in usersubs: if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id: classifier = dict() - classifier['classifiers'] = get_classifiers_for_user(user, us.feed.pk) - classifier['feed_id'] = us.feed.pk + classifier['classifiers'] = get_classifiers_for_user(user, us.feed_id) + classifier['feed_id'] = us.feed_id classifier['stories_last_month'] = us.feed.stories_last_month classifier['num_subscribers'] = us.feed.num_subscribers classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else [] @@ -1016,7 +1016,7 @@ def save_feed_chooser(request): for sub in usersubs: try: - if sub.feed.pk in approved_feeds: + if sub.feed_id in approved_feeds: activated += 1 if not sub.active: sub.active = True @@ -1094,7 +1094,7 @@ def mark_story_as_starred(request): story_db = dict([(k, v) for k, v in story[0]._data.items() if k is not None and v is not None]) now = datetime.datetime.now() - story_values = dict(user_id=request.user.pk, starred_date=now, **story_db) + story_values = dict(user_id=request.user_id, starred_date=now, **story_db) starred_story, created = MStarredStory.objects.get_or_create( story_guid=story_values.pop('story_guid'), user_id=story_values.pop('user_id'), @@ -1114,7 +1114,7 @@ def mark_story_as_unstarred(request): code = 1 story_id = request.POST['story_id'] - starred_story = MStarredStory.objects(user_id=request.user.pk, story_guid=story_id) + starred_story = MStarredStory.objects(user_id=request.user_id, story_guid=story_id) if starred_story: logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story[0].story_title[:50])) starred_story.delete() diff --git a/apps/recommendations/templatetags/recommendations_tags.py b/apps/recommendations/templatetags/recommendations_tags.py index 6e62aa9d0..159a5a9fa 100644 --- a/apps/recommendations/templatetags/recommendations_tags.py +++ b/apps/recommendations/templatetags/recommendations_tags.py @@ -15,7 +15,7 @@ def render_recommended_feed(context, recommended_feeds, unmoderated=False): if context['user'].is_authenticated(): usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed) recommended_feed = recommended_feeds and recommended_feeds[0] - feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed.pk) + feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id) if recommended_feed: return { diff --git a/apps/recommendations/views.py b/apps/recommendations/views.py index 6846330ba..f1a695ad5 100644 --- a/apps/recommendations/views.py +++ b/apps/recommendations/views.py @@ -29,7 +29,7 @@ def load_recommended_feed(request): logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page+1)) recommended_feed = recommended_feeds and recommended_feeds[0] - feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed.pk) + feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id) if recommended_feed: return render_to_response('recommendations/render_recommended_feed.xhtml', { diff --git a/apps/rss_feeds/icon_importer.py b/apps/rss_feeds/icon_importer.py index 3f15ed5ea..a251ccf5d 100644 --- a/apps/rss_feeds/icon_importer.py +++ b/apps/rss_feeds/icon_importer.py @@ -21,7 +21,7 @@ class IconImporter(object): def __init__(self, feed, force=False): self.feed = feed self.force = force - self.feed_icon, _ = MFeedIcon.objects.get_or_create(feed_id=self.feed.pk) + self.feed_icon, _ = MFeedIcon.objects.get_or_create(feed_id=self.feed_id) def save(self): if not self.force and self.feed.favicon_not_found: @@ -137,7 +137,7 @@ class IconImporter(object): def fetch_image_from_page_data(self): image = None image_file = None - content = MFeedPage.get_data(feed_id=self.feed.pk) + content = MFeedPage.get_data(feed_id=self.feed_id) url = self._url_from_html(content) if url: image, image_file = self.get_image_from_url(url) diff --git a/apps/rss_feeds/models.py b/apps/rss_feeds/models.py index 04c8840b7..9e9da5db0 100644 --- a/apps/rss_feeds/models.py +++ b/apps/rss_feeds/models.py @@ -1309,7 +1309,7 @@ class DuplicateFeed(models.Model): return { 'duplicate_address': self.duplicate_address, 'duplicate_feed_id': self.duplicate_feed_id, - 'feed_id': self.feed.pk + 'feed_id': self.feed_id } def merge_feeds(original_feed_id, duplicate_feed_id, force=False): diff --git a/apps/rss_feeds/page_importer.py b/apps/rss_feeds/page_importer.py index 9d12d3a4a..217b60d5e 100644 --- a/apps/rss_feeds/page_importer.py +++ b/apps/rss_feeds/page_importer.py @@ -137,6 +137,6 @@ class PageImporter(object): def save_page(self, html): if html and len(html) > 100: - feed_page, _ = MFeedPage.objects.get_or_create(feed_id=self.feed.pk) + feed_page, _ = MFeedPage.objects.get_or_create(feed_id=self.feed_id) feed_page.page_data = html feed_page.save() diff --git a/apps/rss_feeds/views.py b/apps/rss_feeds/views.py index 0e29a488e..e546f4dcf 100644 --- a/apps/rss_feeds/views.py +++ b/apps/rss_feeds/views.py @@ -207,7 +207,7 @@ def exception_change_feed_address(request): usersub.calculate_feed_scores(silent=False) feed.update_all_statistics() - classifiers = get_classifiers_for_user(usersub.user, usersub.feed.pk) + classifiers = get_classifiers_for_user(usersub.user, usersub.feed_id) feeds = { original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), @@ -215,7 +215,7 @@ def exception_change_feed_address(request): return { 'code': code, 'feeds': feeds, - 'new_feed_id': usersub.feed.pk, + 'new_feed_id': usersub.feed_id, } @ajax_login_required @@ -271,7 +271,7 @@ def exception_change_feed_link(request): usersub.calculate_feed_scores(silent=False) feed.update_all_statistics() - classifiers = get_classifiers_for_user(usersub.user, usersub.feed.pk) + classifiers = get_classifiers_for_user(usersub.user, usersub.feed_id) feeds = { original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), @@ -279,7 +279,7 @@ def exception_change_feed_link(request): return { 'code': code, 'feeds': feeds, - 'new_feed_id': usersub.feed.pk, + 'new_feed_id': usersub.feed_id, } @login_required diff --git a/apps/social/views.py b/apps/social/views.py index 0fd682f17..35215ebc6 100644 --- a/apps/social/views.py +++ b/apps/social/views.py @@ -13,7 +13,7 @@ from apps.rss_feeds.models import MStory, Feed, MStarredStory from apps.social.models import MSharedStory, MSocialServices, MSocialProfile, MSocialSubscription from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags -from apps.reader.models import MUserStory +from apps.reader.models import MUserStory, UserSubscription from utils import json_functions as json from utils import log as logging from utils import PyRSS2Gen as RSS @@ -49,11 +49,11 @@ def mark_story_as_shared(request): if not story: return {'code': -1, 'message': 'Story not found.'} - shared_story = MSharedStory.objects.filter(user_id=request.user.pk, story_feed_id=feed_id, story_guid=story_id) + shared_story = MSharedStory.objects.filter(user_id=request.user_id, story_feed_id=feed_id, story_guid=story_id) if not shared_story: story_db = dict([(k, v) for k, v in story._data.items() if k is not None and v is not None]) - story_values = dict(user_id=request.user.pk, comments=comments, + story_values = dict(user_id=request.user_id, comments=comments, has_comments=bool(comments), **story_db) MSharedStory.objects.create(**story_values) logging.user(request, "~FCSharing: ~SB~FM%s (~FB%s~FM)" % (story.story_title[:50], comments[:100])) @@ -121,7 +121,7 @@ def load_social_stories(request, social_user_id, social_username=None): social_user_id = int(social_user_id) social_user = get_object_or_404(User, pk=social_user_id) offset = int(request.REQUEST.get('offset', 0)) - limit = int(request.REQUEST.get('limit', 10)) + limit = int(request.REQUEST.get('limit', 6)) page = request.REQUEST.get('page') if page: offset = limit * (int(page) - 1) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) @@ -133,10 +133,12 @@ def load_social_stories(request, social_user_id, social_username=None): return dict(stories=[]) stories = MSharedStory.stories_with_comments(stories, user, check_all=True) - story_feed_ids = [s['story_feed_id'] for s in stories] + story_feed_ids = list(set(s['story_feed_id'] for s in stories)) socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=social_user_id) - + usersubs = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids) + usersubs_map = dict((sub.feed_id, sub.mark_read_date) for sub in usersubs) + # Get intelligence classifier for user # XXX TODO: Change all analyzers to use social feed ids instead of overlapping with feed ids. Ugh. classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=social_user_id)) @@ -216,19 +218,19 @@ def profile(request): if request.method == 'POST': return save_profile(request) - profile = MSocialProfile.objects.get(user_id=request.user.pk) + profile = MSocialProfile.objects.get(user_id=request.user_id) return dict(code=1, user_profile=profile.to_json(full=True)) def save_profile(request): data = request.POST - profile = MSocialProfile.objects.get(user_id=request.user.pk) + profile = MSocialProfile.objects.get(user_id=request.user_id) profile.location = data['location'] profile.bio = data['bio'] profile.website = data['website'] profile.save() - social_services = MSocialServices.objects.get(user_id=request.user.pk) + social_services = MSocialServices.objects.get(user_id=request.user_id) social_services.set_photo(data['photo_service']) return dict(code=1, user_profile=profile.to_json(full=True)) @@ -237,7 +239,7 @@ def save_profile(request): @json.json_view def follow(request): follow_user_id = int(request.POST['user_id']) - profile = MSocialProfile.objects.get(user_id=request.user.pk) + profile = MSocialProfile.objects.get(user_id=request.user_id) profile.follow_user(follow_user_id) follow_profile = MSocialProfile.objects.get(user_id=follow_user_id) @@ -248,7 +250,7 @@ def follow(request): @json.json_view def unfollow(request): unfollow_user_id = int(request.POST['user_id']) - profile = MSocialProfile.objects.get(user_id=request.user.pk) + profile = MSocialProfile.objects.get(user_id=request.user_id) profile.unfollow_user(unfollow_user_id) unfollow_profile = MSocialProfile.objects.get(user_id=unfollow_user_id) @@ -278,13 +280,13 @@ def twitter_connect(request): # Be sure that two people aren't using the same Twitter account. existing_user = MSocialServices.objects.filter(twitter_uid=unicode(twitter_user.id)) - if existing_user and existing_user[0].user_id != request.user.pk: + if existing_user and existing_user[0].user_id != request.user_id: user = User.objects.get(pk=existing_user[0].user_id) return dict(error=("Another user (%s, %s) has " "already connected with those Twitter credentials." % (user.username, user.email_address))) - social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk) + social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user_id) social_services.twitter_uid = unicode(twitter_user.id) social_services.twitter_access_key = access_token.key social_services.twitter_access_secret = access_token.secret @@ -332,13 +334,13 @@ def facebook_connect(request): # Be sure that two people aren't using the same Facebook account. existing_user = MSocialServices.objects.filter(facebook_uid=uid) - if existing_user and existing_user[0].user_id != request.user.pk: + if existing_user and existing_user[0].user_id != request.user_id: user = User.objects.get(pk=existing_user[0].user_id) return dict(error=("Another user (%s, %s) has " "already connected with those Facebook credentials." % (user.username, user.email_address))) - social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk) + social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user_id) social_services.facebook_uid = uid social_services.facebook_access_token = access_token social_services.save() @@ -353,12 +355,12 @@ def facebook_connect(request): @ajax_login_required def twitter_disconnect(request): - social_services = MSocialServices.objects.get(user_id=request.user.pk) + social_services = MSocialServices.objects.get(user_id=request.user_id) social_services.disconnect_twitter() return friends(request) @ajax_login_required def facebook_disconnect(request): - social_services = MSocialServices.objects.get(user_id=request.user.pk) + social_services = MSocialServices.objects.get(user_id=request.user_id) social_services.disconnect_facebook() return friends(request) \ No newline at end of file diff --git a/templates/recommendations/render_recommended_feed.xhtml b/templates/recommendations/render_recommended_feed.xhtml index 19fcfd38e..01b40a9b9 100644 --- a/templates/recommendations/render_recommended_feed.xhtml +++ b/templates/recommendations/render_recommended_feed.xhtml @@ -26,7 +26,7 @@ -