Switching a ton of feed and user fetches to simple ID matches. ".user.pk -> .user_id"

This commit is contained in:
Samuel Clay 2012-01-26 09:32:24 -08:00
parent fb659b3d6f
commit 2b9ad97b51
16 changed files with 72 additions and 70 deletions

View file

@ -50,7 +50,7 @@ def save_classifier(request):
for post_content in post_contents: for post_content in post_contents:
if not post_content: continue if not post_content: continue
classifier_dict = { classifier_dict = {
'user_id': request.user.pk, 'user_id': request.user_id,
'feed_id': feed_id, 'feed_id': feed_id,
'defaults': { 'defaults': {
'score': score 'score': score

View file

@ -125,5 +125,5 @@ def add_site(request, token):
return HttpResponse(callback + '(' + json.encode({ return HttpResponse(callback + '(' + json.encode({
'code': code, 'code': code,
'message': message, 'message': message,
'usersub': us and us.feed.pk, 'usersub': us and us.feed_id,
}) + ')', mimetype='text/plain') }) + ')', mimetype='text/plain')

View file

@ -81,7 +81,7 @@ class OPMLExporter:
def fetch_feeds(self): def fetch_feeds(self):
subs = UserSubscription.objects.filter(user=self.user) subs = UserSubscription.objects.filter(user=self.user)
self.feeds = dict((sub.feed.pk, sub.canonical()) for sub in subs) self.feeds = dict((sub.feed_id, sub.canonical()) for sub in subs)
class Importer: class Importer:
@ -280,7 +280,7 @@ class GoogleReaderImporter(Importer):
original_feed = Feed.get_feed_from_url(story['origin']['htmlUrl'], create=False, fetch=False) original_feed = Feed.get_feed_from_url(story['origin']['htmlUrl'], create=False, fetch=False)
content = story.get('content') or story.get('summary') content = story.get('content') or story.get('summary')
story_db = { story_db = {
"user_id": self.user.pk, "user_id": self.user_id,
"starred_date": datetime.datetime.fromtimestamp(story['updated']), "starred_date": datetime.datetime.fromtimestamp(story['updated']),
"story_date": datetime.datetime.fromtimestamp(story['published']), "story_date": datetime.datetime.fromtimestamp(story['published']),
"story_title": story.get('title'), "story_title": story.get('title'),

View file

@ -104,7 +104,7 @@ NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()}
sub.feed.save() sub.feed.save()
if stale_feeds: if stale_feeds:
stale_feeds = list(set([f.feed.pk for f in stale_feeds])) stale_feeds = list(set([f.feed_id for f in stale_feeds]))
self.queue_new_feeds(new_feeds=stale_feeds) self.queue_new_feeds(new_feeds=stale_feeds)
def send_new_user_email(self): def send_new_user_email(self):

View file

@ -181,7 +181,7 @@ def profile_is_premium(request):
if retries >= 30: if retries >= 30:
subject = "Premium activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs) subject = "Premium activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs)
message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email) message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user_id, request.user.email)
mail_admins(subject, message, fail_silently=True) mail_admins(subject, message, fail_silently=True)
code = -1 code = -1
request.user.profile.is_premium = True request.user.profile.is_premium = True

View file

@ -69,7 +69,7 @@ class UserSubscription(models.Model):
try: try:
super(UserSubscription, self).save(*args, **kwargs) super(UserSubscription, self).save(*args, **kwargs)
except IntegrityError: except IntegrityError:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=self.feed.pk) duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=self.feed_id)
for duplicate_feed in duplicate_feeds: for duplicate_feed in duplicate_feeds:
already_subscribed = UserSubscription.objects.filter(user=self.user, feed=duplicate_feed.feed) already_subscribed = UserSubscription.objects.filter(user=self.user, feed=duplicate_feed.feed)
if not already_subscribed: if not already_subscribed:
@ -130,8 +130,8 @@ class UserSubscription(models.Model):
now = datetime.datetime.utcnow() now = datetime.datetime.utcnow()
# Use the latest story to get last read time. # Use the latest story to get last read time.
if MStory.objects(story_feed_id=self.feed.pk).first(): if MStory.objects(story_feed_id=self.feed_id).first():
latest_story_date = MStory.objects(story_feed_id=self.feed.pk).order_by('-story_date').only('story_date')[0]['story_date']\ latest_story_date = MStory.objects(story_feed_id=self.feed_id).order_by('-story_date').only('story_date')[0]['story_date']\
+ datetime.timedelta(seconds=1) + datetime.timedelta(seconds=1)
else: else:
latest_story_date = now latest_story_date = now
@ -144,7 +144,7 @@ class UserSubscription(models.Model):
self.unread_count_updated = now self.unread_count_updated = now
self.oldest_unread_story_date = now self.oldest_unread_story_date = now
self.needs_unread_recalc = False self.needs_unread_recalc = False
MUserStory.delete_marked_as_read_stories(self.user.pk, self.feed.pk) MUserStory.delete_marked_as_read_stories(self.user_id, self.feed_id)
self.save() self.save()
@ -165,7 +165,7 @@ class UserSubscription(models.Model):
for story_id in set(story_ids): for story_id in set(story_ids):
try: try:
story = MStory.objects.get(story_feed_id=self.feed.pk, story_guid=story_id) story = MStory.objects.get(story_feed_id=self.feed_id, story_guid=story_id)
except MStory.DoesNotExist: except MStory.DoesNotExist:
# Story has been deleted, probably by feed_fetcher. # Story has been deleted, probably by feed_fetcher.
continue continue
@ -173,13 +173,13 @@ class UserSubscription(models.Model):
continue continue
now = datetime.datetime.utcnow() now = datetime.datetime.utcnow()
date = now if now > story.story_date else story.story_date # For handling future stories date = now if now > story.story_date else story.story_date # For handling future stories
m = MUserStory(story=story, user_id=self.user.pk, m = MUserStory(story=story, user_id=self.user_id,
feed_id=self.feed.pk, read_date=date, feed_id=self.feed_id, read_date=date,
story_id=story_id, story_date=story.story_date) story_id=story_id, story_date=story.story_date)
try: try:
m.save() m.save()
except OperationError, e: except OperationError, e:
original_m = MUserStory.objects.get(story=story, user_id=self.user.pk, feed_id=self.feed.pk) original_m = MUserStory.objects.get(story=story, user_id=self.user_id, feed_id=self.feed_id)
logging.user(request, "~BRMarked story as read error: %s" % (e)) logging.user(request, "~BRMarked story as read error: %s" % (e))
logging.user(request, "~BRMarked story as read: %s" % (story_id)) logging.user(request, "~BRMarked story as read: %s" % (story_id))
logging.user(request, "~BROrigin story as read: %s" % (m.story.story_guid)) logging.user(request, "~BROrigin story as read: %s" % (m.story.story_guid))
@ -222,15 +222,15 @@ class UserSubscription(models.Model):
else: else:
self.mark_read_date = date_delta self.mark_read_date = date_delta
read_stories = MUserStory.objects(user_id=self.user.pk, read_stories = MUserStory.objects(user_id=self.user_id,
feed_id=self.feed.pk, feed_id=self.feed_id,
read_date__gte=self.mark_read_date) read_date__gte=self.mark_read_date)
# if not silent: # if not silent:
# logging.info(' ---> [%s] Read stories: %s' % (self.user, datetime.datetime.now() - now)) # logging.info(' ---> [%s] Read stories: %s' % (self.user, datetime.datetime.now() - now))
read_stories_ids = [] read_stories_ids = []
for us in read_stories: for us in read_stories:
read_stories_ids.append(us.story_id) read_stories_ids.append(us.story_id)
stories_db = stories_db or MStory.objects(story_feed_id=self.feed.pk, stories_db = stories_db or MStory.objects(story_feed_id=self.feed_id,
story_date__gte=date_delta) story_date__gte=date_delta)
# if not silent: # if not silent:
# logging.info(' ---> [%s] MStory: %s' % (self.user, datetime.datetime.now() - now)) # logging.info(' ---> [%s] MStory: %s' % (self.user, datetime.datetime.now() - now))
@ -243,14 +243,14 @@ class UserSubscription(models.Model):
unread_stories_db.append(story) unread_stories_db.append(story)
if story.story_date < oldest_unread_story_date: if story.story_date < oldest_unread_story_date:
oldest_unread_story_date = story.story_date oldest_unread_story_date = story.story_date
stories = Feed.format_stories(unread_stories_db, self.feed.pk) stories = Feed.format_stories(unread_stories_db, self.feed_id)
# if not silent: # if not silent:
# logging.info(' ---> [%s] Format stories: %s' % (self.user, datetime.datetime.now() - now)) # logging.info(' ---> [%s] Format stories: %s' % (self.user, datetime.datetime.now() - now))
classifier_feeds = list(MClassifierFeed.objects(user_id=self.user.pk, feed_id=self.feed.pk)) classifier_feeds = list(MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id))
classifier_authors = list(MClassifierAuthor.objects(user_id=self.user.pk, feed_id=self.feed.pk)) classifier_authors = list(MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id))
classifier_titles = list(MClassifierTitle.objects(user_id=self.user.pk, feed_id=self.feed.pk)) classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id))
classifier_tags = list(MClassifierTag.objects(user_id=self.user.pk, feed_id=self.feed.pk)) classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id))
# if not silent: # if not silent:
# logging.info(' ---> [%s] Classifiers: %s (%s)' % (self.user, datetime.datetime.now() - now, classifier_feeds.count() + classifier_authors.count() + classifier_tags.count() + classifier_titles.count())) # logging.info(' ---> [%s] Classifiers: %s (%s)' % (self.user, datetime.datetime.now() - now, classifier_feeds.count() + classifier_authors.count() + classifier_tags.count() + classifier_titles.count()))
@ -326,7 +326,7 @@ class UserSubscription(models.Model):
return return
# Switch read stories # Switch read stories
user_stories = MUserStory.objects(user_id=self.user.pk, feed_id=old_feed.pk) user_stories = MUserStory.objects(user_id=self.user_id, feed_id=old_feed.pk)
logging.info(" ---> %s read stories" % user_stories.count()) logging.info(" ---> %s read stories" % user_stories.count())
for user_story in user_stories: for user_story in user_stories:
user_story.feed_id = new_feed.pk user_story.feed_id = new_feed.pk
@ -347,7 +347,7 @@ class UserSubscription(models.Model):
user_story.delete() user_story.delete()
def switch_feed_for_classifier(model): def switch_feed_for_classifier(model):
duplicates = model.objects(feed_id=old_feed.pk, user_id=self.user.pk) duplicates = model.objects(feed_id=old_feed.pk, user_id=self.user_id)
if duplicates.count(): if duplicates.count():
logging.info(" ---> Switching %s %s" % (duplicates.count(), model)) logging.info(" ---> Switching %s %s" % (duplicates.count(), model))
for duplicate in duplicates: for duplicate in duplicates:
@ -464,7 +464,7 @@ class UserSubscriptionFolders(models.Model):
return return
if user_sub: if user_sub:
user_sub.delete() user_sub.delete()
MUserStory.objects(user_id=self.user.pk, feed_id=feed_id).delete() MUserStory.objects(user_id=self.user_id, feed_id=feed_id).delete()
def delete_folder(self, folder_to_delete, in_folder, feed_ids_in_folder, commit_delete=True): def delete_folder(self, folder_to_delete, in_folder, feed_ids_in_folder, commit_delete=True):
def _find_folder_in_folders(old_folders, folder_name, feeds_to_delete, deleted_folder=None): def _find_folder_in_folders(old_folders, folder_name, feeds_to_delete, deleted_folder=None):

View file

@ -191,7 +191,7 @@ def load_feeds(request):
user_subs = UserSubscription.objects.select_related('feed').filter(user=user) user_subs = UserSubscription.objects.select_related('feed').filter(user=user)
for sub in user_subs: for sub in user_subs:
pk = sub.feed.pk pk = sub.feed_id
if update_counts: if update_counts:
sub.calculate_feed_scores(silent=True) sub.calculate_feed_scores(silent=True)
feeds[pk] = sub.canonical(include_favicon=include_favicons) feeds[pk] = sub.canonical(include_favicon=include_favicons)
@ -261,7 +261,7 @@ def load_feeds_flat(request):
for sub in user_subs: for sub in user_subs:
if sub.needs_unread_recalc: if sub.needs_unread_recalc:
sub.calculate_feed_scores(silent=True) sub.calculate_feed_scores(silent=True)
feeds[sub.feed.pk] = sub.canonical(include_favicon=include_favicons) feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons)
folders = json.decode(folders.folders) folders = json.decode(folders.folders)
flat_folders = {} flat_folders = {}
@ -306,7 +306,7 @@ def refresh_feeds(request):
feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)]) feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)])
for i, sub in enumerate(user_subs): for i, sub in enumerate(user_subs):
pk = sub.feed.pk pk = sub.feed_id
if (sub.needs_unread_recalc or if (sub.needs_unread_recalc or
sub.unread_count_updated < UNREAD_CUTOFF or sub.unread_count_updated < UNREAD_CUTOFF or
sub.oldest_unread_story_date < UNREAD_CUTOFF): sub.oldest_unread_story_date < UNREAD_CUTOFF):
@ -325,21 +325,21 @@ def refresh_feeds(request):
if request.REQUEST.get('check_fetch_status', False): if request.REQUEST.get('check_fetch_status', False):
feeds[pk]['not_yet_fetched'] = not sub.feed.fetched_once feeds[pk]['not_yet_fetched'] = not sub.feed.fetched_once
if sub.feed.pk in favicons_fetching and sub.feed.pk in feed_icons: if sub.feed_id in favicons_fetching and sub.feed_id in feed_icons:
feeds[pk]['favicon'] = feed_icons[sub.feed.pk].data feeds[pk]['favicon'] = feed_icons[sub.feed_id].data
feeds[pk]['favicon_color'] = feed_icons[sub.feed.pk].color feeds[pk]['favicon_color'] = feed_icons[sub.feed_id].color
feeds[pk]['favicon_fetching'] = sub.feed.favicon_fetching feeds[pk]['favicon_fetching'] = sub.feed.favicon_fetching
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
if favicons_fetching: if favicons_fetching:
sub_feed_ids = [s.feed.pk for s in user_subs] sub_feed_ids = [s.feed_id for s in user_subs]
moved_feed_ids = [f for f in favicons_fetching if f not in sub_feed_ids] moved_feed_ids = [f for f in favicons_fetching if f not in sub_feed_ids]
for moved_feed_id in moved_feed_ids: for moved_feed_id in moved_feed_ids:
duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=moved_feed_id) duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id=moved_feed_id)
if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds: if duplicate_feeds and duplicate_feeds[0].feed_id in feeds:
feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed.pk] feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed_id]
feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed.pk feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed_id
if settings.DEBUG or request.REQUEST.get('check_fetch_status'): if settings.DEBUG or request.REQUEST.get('check_fetch_status'):
diff = datetime.datetime.utcnow()-start diff = datetime.datetime.utcnow()-start
@ -771,7 +771,7 @@ def mark_story_as_unread(request):
# these would be ignored. # these would be ignored.
data = usersub.mark_story_ids_as_read(newer_stories, request=request) data = usersub.mark_story_ids_as_read(newer_stories, request=request)
m = MUserStory.objects(story_id=story_id, user_id=request.user.pk, feed_id=feed_id) m = MUserStory.objects(story_id=story_id, user_id=request.user_id, feed_id=feed_id)
m.delete() m.delete()
return data return data
@ -995,8 +995,8 @@ def feeds_trainer(request):
for us in usersubs: for us in usersubs:
if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id: if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id:
classifier = dict() classifier = dict()
classifier['classifiers'] = get_classifiers_for_user(user, us.feed.pk) classifier['classifiers'] = get_classifiers_for_user(user, us.feed_id)
classifier['feed_id'] = us.feed.pk classifier['feed_id'] = us.feed_id
classifier['stories_last_month'] = us.feed.stories_last_month classifier['stories_last_month'] = us.feed.stories_last_month
classifier['num_subscribers'] = us.feed.num_subscribers classifier['num_subscribers'] = us.feed.num_subscribers
classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else [] classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else []
@ -1016,7 +1016,7 @@ def save_feed_chooser(request):
for sub in usersubs: for sub in usersubs:
try: try:
if sub.feed.pk in approved_feeds: if sub.feed_id in approved_feeds:
activated += 1 activated += 1
if not sub.active: if not sub.active:
sub.active = True sub.active = True
@ -1094,7 +1094,7 @@ def mark_story_as_starred(request):
story_db = dict([(k, v) for k, v in story[0]._data.items() story_db = dict([(k, v) for k, v in story[0]._data.items()
if k is not None and v is not None]) if k is not None and v is not None])
now = datetime.datetime.now() now = datetime.datetime.now()
story_values = dict(user_id=request.user.pk, starred_date=now, **story_db) story_values = dict(user_id=request.user_id, starred_date=now, **story_db)
starred_story, created = MStarredStory.objects.get_or_create( starred_story, created = MStarredStory.objects.get_or_create(
story_guid=story_values.pop('story_guid'), story_guid=story_values.pop('story_guid'),
user_id=story_values.pop('user_id'), user_id=story_values.pop('user_id'),
@ -1114,7 +1114,7 @@ def mark_story_as_unstarred(request):
code = 1 code = 1
story_id = request.POST['story_id'] story_id = request.POST['story_id']
starred_story = MStarredStory.objects(user_id=request.user.pk, story_guid=story_id) starred_story = MStarredStory.objects(user_id=request.user_id, story_guid=story_id)
if starred_story: if starred_story:
logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story[0].story_title[:50])) logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story[0].story_title[:50]))
starred_story.delete() starred_story.delete()

View file

@ -15,7 +15,7 @@ def render_recommended_feed(context, recommended_feeds, unmoderated=False):
if context['user'].is_authenticated(): if context['user'].is_authenticated():
usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed) usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed)
recommended_feed = recommended_feeds and recommended_feeds[0] recommended_feed = recommended_feeds and recommended_feeds[0]
feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed.pk) feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id)
if recommended_feed: if recommended_feed:
return { return {

View file

@ -29,7 +29,7 @@ def load_recommended_feed(request):
logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page+1)) logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page+1))
recommended_feed = recommended_feeds and recommended_feeds[0] recommended_feed = recommended_feeds and recommended_feeds[0]
feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed.pk) feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id)
if recommended_feed: if recommended_feed:
return render_to_response('recommendations/render_recommended_feed.xhtml', { return render_to_response('recommendations/render_recommended_feed.xhtml', {

View file

@ -21,7 +21,7 @@ class IconImporter(object):
def __init__(self, feed, force=False): def __init__(self, feed, force=False):
self.feed = feed self.feed = feed
self.force = force self.force = force
self.feed_icon, _ = MFeedIcon.objects.get_or_create(feed_id=self.feed.pk) self.feed_icon, _ = MFeedIcon.objects.get_or_create(feed_id=self.feed_id)
def save(self): def save(self):
if not self.force and self.feed.favicon_not_found: if not self.force and self.feed.favicon_not_found:
@ -137,7 +137,7 @@ class IconImporter(object):
def fetch_image_from_page_data(self): def fetch_image_from_page_data(self):
image = None image = None
image_file = None image_file = None
content = MFeedPage.get_data(feed_id=self.feed.pk) content = MFeedPage.get_data(feed_id=self.feed_id)
url = self._url_from_html(content) url = self._url_from_html(content)
if url: if url:
image, image_file = self.get_image_from_url(url) image, image_file = self.get_image_from_url(url)

View file

@ -1309,7 +1309,7 @@ class DuplicateFeed(models.Model):
return { return {
'duplicate_address': self.duplicate_address, 'duplicate_address': self.duplicate_address,
'duplicate_feed_id': self.duplicate_feed_id, 'duplicate_feed_id': self.duplicate_feed_id,
'feed_id': self.feed.pk 'feed_id': self.feed_id
} }
def merge_feeds(original_feed_id, duplicate_feed_id, force=False): def merge_feeds(original_feed_id, duplicate_feed_id, force=False):

View file

@ -137,6 +137,6 @@ class PageImporter(object):
def save_page(self, html): def save_page(self, html):
if html and len(html) > 100: if html and len(html) > 100:
feed_page, _ = MFeedPage.objects.get_or_create(feed_id=self.feed.pk) feed_page, _ = MFeedPage.objects.get_or_create(feed_id=self.feed_id)
feed_page.page_data = html feed_page.page_data = html
feed_page.save() feed_page.save()

View file

@ -207,7 +207,7 @@ def exception_change_feed_address(request):
usersub.calculate_feed_scores(silent=False) usersub.calculate_feed_scores(silent=False)
feed.update_all_statistics() feed.update_all_statistics()
classifiers = get_classifiers_for_user(usersub.user, usersub.feed.pk) classifiers = get_classifiers_for_user(usersub.user, usersub.feed_id)
feeds = { feeds = {
original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), original_feed.pk: usersub.canonical(full=True, classifiers=classifiers),
@ -215,7 +215,7 @@ def exception_change_feed_address(request):
return { return {
'code': code, 'code': code,
'feeds': feeds, 'feeds': feeds,
'new_feed_id': usersub.feed.pk, 'new_feed_id': usersub.feed_id,
} }
@ajax_login_required @ajax_login_required
@ -271,7 +271,7 @@ def exception_change_feed_link(request):
usersub.calculate_feed_scores(silent=False) usersub.calculate_feed_scores(silent=False)
feed.update_all_statistics() feed.update_all_statistics()
classifiers = get_classifiers_for_user(usersub.user, usersub.feed.pk) classifiers = get_classifiers_for_user(usersub.user, usersub.feed_id)
feeds = { feeds = {
original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), original_feed.pk: usersub.canonical(full=True, classifiers=classifiers),
@ -279,7 +279,7 @@ def exception_change_feed_link(request):
return { return {
'code': code, 'code': code,
'feeds': feeds, 'feeds': feeds,
'new_feed_id': usersub.feed.pk, 'new_feed_id': usersub.feed_id,
} }
@login_required @login_required

View file

@ -13,7 +13,7 @@ from apps.rss_feeds.models import MStory, Feed, MStarredStory
from apps.social.models import MSharedStory, MSocialServices, MSocialProfile, MSocialSubscription from apps.social.models import MSharedStory, MSocialServices, MSocialProfile, MSocialSubscription
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags
from apps.reader.models import MUserStory from apps.reader.models import MUserStory, UserSubscription
from utils import json_functions as json from utils import json_functions as json
from utils import log as logging from utils import log as logging
from utils import PyRSS2Gen as RSS from utils import PyRSS2Gen as RSS
@ -49,11 +49,11 @@ def mark_story_as_shared(request):
if not story: if not story:
return {'code': -1, 'message': 'Story not found.'} return {'code': -1, 'message': 'Story not found.'}
shared_story = MSharedStory.objects.filter(user_id=request.user.pk, story_feed_id=feed_id, story_guid=story_id) shared_story = MSharedStory.objects.filter(user_id=request.user_id, story_feed_id=feed_id, story_guid=story_id)
if not shared_story: if not shared_story:
story_db = dict([(k, v) for k, v in story._data.items() story_db = dict([(k, v) for k, v in story._data.items()
if k is not None and v is not None]) if k is not None and v is not None])
story_values = dict(user_id=request.user.pk, comments=comments, story_values = dict(user_id=request.user_id, comments=comments,
has_comments=bool(comments), **story_db) has_comments=bool(comments), **story_db)
MSharedStory.objects.create(**story_values) MSharedStory.objects.create(**story_values)
logging.user(request, "~FCSharing: ~SB~FM%s (~FB%s~FM)" % (story.story_title[:50], comments[:100])) logging.user(request, "~FCSharing: ~SB~FM%s (~FB%s~FM)" % (story.story_title[:50], comments[:100]))
@ -121,7 +121,7 @@ def load_social_stories(request, social_user_id, social_username=None):
social_user_id = int(social_user_id) social_user_id = int(social_user_id)
social_user = get_object_or_404(User, pk=social_user_id) social_user = get_object_or_404(User, pk=social_user_id)
offset = int(request.REQUEST.get('offset', 0)) offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 10)) limit = int(request.REQUEST.get('limit', 6))
page = request.REQUEST.get('page') page = request.REQUEST.get('page')
if page: offset = limit * (int(page) - 1) if page: offset = limit * (int(page) - 1)
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
@ -133,10 +133,12 @@ def load_social_stories(request, social_user_id, social_username=None):
return dict(stories=[]) return dict(stories=[])
stories = MSharedStory.stories_with_comments(stories, user, check_all=True) stories = MSharedStory.stories_with_comments(stories, user, check_all=True)
story_feed_ids = [s['story_feed_id'] for s in stories] story_feed_ids = list(set(s['story_feed_id'] for s in stories))
socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=social_user_id) socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=social_user_id)
usersubs = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids)
usersubs_map = dict((sub.feed_id, sub.mark_read_date) for sub in usersubs)
# Get intelligence classifier for user # Get intelligence classifier for user
# XXX TODO: Change all analyzers to use social feed ids instead of overlapping with feed ids. Ugh. # XXX TODO: Change all analyzers to use social feed ids instead of overlapping with feed ids. Ugh.
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=social_user_id)) classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=social_user_id))
@ -216,19 +218,19 @@ def profile(request):
if request.method == 'POST': if request.method == 'POST':
return save_profile(request) return save_profile(request)
profile = MSocialProfile.objects.get(user_id=request.user.pk) profile = MSocialProfile.objects.get(user_id=request.user_id)
return dict(code=1, user_profile=profile.to_json(full=True)) return dict(code=1, user_profile=profile.to_json(full=True))
def save_profile(request): def save_profile(request):
data = request.POST data = request.POST
profile = MSocialProfile.objects.get(user_id=request.user.pk) profile = MSocialProfile.objects.get(user_id=request.user_id)
profile.location = data['location'] profile.location = data['location']
profile.bio = data['bio'] profile.bio = data['bio']
profile.website = data['website'] profile.website = data['website']
profile.save() profile.save()
social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services = MSocialServices.objects.get(user_id=request.user_id)
social_services.set_photo(data['photo_service']) social_services.set_photo(data['photo_service'])
return dict(code=1, user_profile=profile.to_json(full=True)) return dict(code=1, user_profile=profile.to_json(full=True))
@ -237,7 +239,7 @@ def save_profile(request):
@json.json_view @json.json_view
def follow(request): def follow(request):
follow_user_id = int(request.POST['user_id']) follow_user_id = int(request.POST['user_id'])
profile = MSocialProfile.objects.get(user_id=request.user.pk) profile = MSocialProfile.objects.get(user_id=request.user_id)
profile.follow_user(follow_user_id) profile.follow_user(follow_user_id)
follow_profile = MSocialProfile.objects.get(user_id=follow_user_id) follow_profile = MSocialProfile.objects.get(user_id=follow_user_id)
@ -248,7 +250,7 @@ def follow(request):
@json.json_view @json.json_view
def unfollow(request): def unfollow(request):
unfollow_user_id = int(request.POST['user_id']) unfollow_user_id = int(request.POST['user_id'])
profile = MSocialProfile.objects.get(user_id=request.user.pk) profile = MSocialProfile.objects.get(user_id=request.user_id)
profile.unfollow_user(unfollow_user_id) profile.unfollow_user(unfollow_user_id)
unfollow_profile = MSocialProfile.objects.get(user_id=unfollow_user_id) unfollow_profile = MSocialProfile.objects.get(user_id=unfollow_user_id)
@ -278,13 +280,13 @@ def twitter_connect(request):
# Be sure that two people aren't using the same Twitter account. # Be sure that two people aren't using the same Twitter account.
existing_user = MSocialServices.objects.filter(twitter_uid=unicode(twitter_user.id)) existing_user = MSocialServices.objects.filter(twitter_uid=unicode(twitter_user.id))
if existing_user and existing_user[0].user_id != request.user.pk: if existing_user and existing_user[0].user_id != request.user_id:
user = User.objects.get(pk=existing_user[0].user_id) user = User.objects.get(pk=existing_user[0].user_id)
return dict(error=("Another user (%s, %s) has " return dict(error=("Another user (%s, %s) has "
"already connected with those Twitter credentials." "already connected with those Twitter credentials."
% (user.username, user.email_address))) % (user.username, user.email_address)))
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk) social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user_id)
social_services.twitter_uid = unicode(twitter_user.id) social_services.twitter_uid = unicode(twitter_user.id)
social_services.twitter_access_key = access_token.key social_services.twitter_access_key = access_token.key
social_services.twitter_access_secret = access_token.secret social_services.twitter_access_secret = access_token.secret
@ -332,13 +334,13 @@ def facebook_connect(request):
# Be sure that two people aren't using the same Facebook account. # Be sure that two people aren't using the same Facebook account.
existing_user = MSocialServices.objects.filter(facebook_uid=uid) existing_user = MSocialServices.objects.filter(facebook_uid=uid)
if existing_user and existing_user[0].user_id != request.user.pk: if existing_user and existing_user[0].user_id != request.user_id:
user = User.objects.get(pk=existing_user[0].user_id) user = User.objects.get(pk=existing_user[0].user_id)
return dict(error=("Another user (%s, %s) has " return dict(error=("Another user (%s, %s) has "
"already connected with those Facebook credentials." "already connected with those Facebook credentials."
% (user.username, user.email_address))) % (user.username, user.email_address)))
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk) social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user_id)
social_services.facebook_uid = uid social_services.facebook_uid = uid
social_services.facebook_access_token = access_token social_services.facebook_access_token = access_token
social_services.save() social_services.save()
@ -353,12 +355,12 @@ def facebook_connect(request):
@ajax_login_required @ajax_login_required
def twitter_disconnect(request): def twitter_disconnect(request):
social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services = MSocialServices.objects.get(user_id=request.user_id)
social_services.disconnect_twitter() social_services.disconnect_twitter()
return friends(request) return friends(request)
@ajax_login_required @ajax_login_required
def facebook_disconnect(request): def facebook_disconnect(request):
social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services = MSocialServices.objects.get(user_id=request.user_id)
social_services.disconnect_facebook() social_services.disconnect_facebook()
return friends(request) return friends(request)

View file

@ -26,7 +26,7 @@
</div> </div>
</h5> </h5>
<div class="NB-recommended" data-feed-id="{{ recommended_feed.feed.pk }}"> <div class="NB-recommended" data-feed-id="{{ recommended_feed.feed_id }}">
<div class="NB-recommended-statistics NB-javascript"></div> <div class="NB-recommended-statistics NB-javascript"></div>
<div class="NB-recommended-intelligence NB-javascript"></div> <div class="NB-recommended-intelligence NB-javascript"></div>
<img class="NB-recommended-favicon" src="data:image/png;base64,{{ feed_icon.data }}" /> <img class="NB-recommended-favicon" src="data:image/png;base64,{{ feed_icon.data }}" />

View file

@ -229,7 +229,7 @@ class ProcessFeed:
existing_stories = list(MStory.objects( existing_stories = list(MStory.objects(
# story_guid__in=story_guids, # story_guid__in=story_guids,
story_date__gte=start_date, story_date__gte=start_date,
story_feed_id=self.feed.pk story_feed_id=self.feed_id
).limit(len(story_guids))) ).limit(len(story_guids)))
# MStory.objects( # MStory.objects(