2010-04-27 17:35:51 -04:00
|
|
|
import datetime
|
2011-01-07 16:26:17 -05:00
|
|
|
import time
|
2010-06-14 01:01:21 -04:00
|
|
|
from django.shortcuts import render_to_response, get_object_or_404
|
2009-06-16 03:08:55 +00:00
|
|
|
from django.contrib.auth.decorators import login_required
|
|
|
|
from django.template import RequestContext
|
2011-05-07 17:58:53 -04:00
|
|
|
from django.template.loader import render_to_string
|
2009-09-08 04:27:27 +00:00
|
|
|
from django.db import IntegrityError
|
2010-01-24 22:53:46 -05:00
|
|
|
from django.views.decorators.cache import never_cache
|
|
|
|
from django.core.urlresolvers import reverse
|
2010-04-27 17:56:16 -04:00
|
|
|
from django.contrib.auth import login as login_user
|
2010-11-12 10:55:44 -05:00
|
|
|
from django.contrib.auth import logout as logout_user
|
2010-07-20 23:59:56 -04:00
|
|
|
from django.contrib.auth.models import User
|
2010-08-30 19:57:27 -04:00
|
|
|
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, Http404
|
2010-07-20 23:59:56 -04:00
|
|
|
from django.conf import settings
|
2010-10-19 19:09:08 -04:00
|
|
|
from django.core.mail import mail_admins
|
2011-05-07 17:58:53 -04:00
|
|
|
from django.core.validators import email_re
|
|
|
|
from django.core.mail import EmailMultiAlternatives
|
2011-01-07 19:06:36 -05:00
|
|
|
from collections import defaultdict
|
2011-01-11 10:11:25 -05:00
|
|
|
from operator import itemgetter
|
2011-01-07 16:26:17 -05:00
|
|
|
from mongoengine.queryset import OperationError
|
2011-03-02 12:05:58 -05:00
|
|
|
from apps.recommendations.models import RecommendedFeed
|
2010-08-22 18:34:40 -04:00
|
|
|
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
|
2010-01-21 13:12:29 -05:00
|
|
|
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags
|
2010-03-23 20:03:40 -04:00
|
|
|
from apps.analyzer.models import get_classifiers_for_user
|
2010-08-21 23:49:36 -04:00
|
|
|
from apps.reader.models import UserSubscription, UserSubscriptionFolders, MUserStory, Feature
|
2010-06-12 21:20:06 -04:00
|
|
|
from apps.reader.forms import SignupForm, LoginForm, FeatureForm
|
2011-04-21 23:10:43 -04:00
|
|
|
from apps.rss_feeds.models import MFeedIcon
|
2011-05-07 17:22:41 -04:00
|
|
|
from apps.statistics.models import MStatistics, MFeedback
|
2010-04-27 17:35:51 -04:00
|
|
|
try:
|
2010-11-30 10:30:18 -05:00
|
|
|
from apps.rss_feeds.models import Feed, MFeedPage, DuplicateFeed, MStory, MStarredStory, FeedLoadtime
|
2010-04-27 17:35:51 -04:00
|
|
|
except:
|
|
|
|
pass
|
2011-01-23 02:13:55 -05:00
|
|
|
from utils import json_functions as json
|
2010-07-24 15:54:25 -04:00
|
|
|
from utils.user_functions import get_user, ajax_login_required
|
2011-01-23 02:13:55 -05:00
|
|
|
from utils.feed_functions import relative_timesince
|
2010-10-31 16:03:50 -04:00
|
|
|
from utils.story_functions import format_story_link_date__short
|
|
|
|
from utils.story_functions import format_story_link_date__long
|
2011-01-07 16:26:17 -05:00
|
|
|
from utils.story_functions import bunch
|
2011-01-15 18:41:41 -05:00
|
|
|
from utils.story_functions import story_score
|
2010-08-16 12:52:39 -04:00
|
|
|
from utils import log as logging
|
2011-04-11 21:57:45 -04:00
|
|
|
from vendor.timezones.utilities import localtime_for_timezone
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2009-07-28 02:27:27 +00:00
|
|
|
SINGLE_DAY = 60*60*24
|
2009-08-08 16:52:11 +00:00
|
|
|
|
2010-01-24 22:53:46 -05:00
|
|
|
@never_cache
|
2009-06-16 03:08:55 +00:00
|
|
|
def index(request):
|
2010-04-03 00:59:03 -04:00
|
|
|
if request.method == "POST":
|
2010-04-22 21:17:00 -04:00
|
|
|
if request.POST['submit'] == 'login':
|
|
|
|
login_form = LoginForm(request.POST, prefix='login')
|
|
|
|
signup_form = SignupForm(prefix='signup')
|
|
|
|
else:
|
|
|
|
login_form = LoginForm(prefix='login')
|
|
|
|
signup_form = SignupForm(request.POST, prefix='signup')
|
2010-04-03 00:59:03 -04:00
|
|
|
else:
|
2010-04-22 21:17:00 -04:00
|
|
|
login_form = LoginForm(prefix='login')
|
|
|
|
signup_form = SignupForm(prefix='signup')
|
2010-10-17 17:25:10 -04:00
|
|
|
|
2010-10-30 00:27:52 -04:00
|
|
|
user = get_user(request)
|
2010-06-30 16:18:55 -04:00
|
|
|
features = Feature.objects.all()[:3]
|
2010-06-12 21:20:06 -04:00
|
|
|
feature_form = None
|
|
|
|
if request.user.is_staff:
|
|
|
|
feature_form = FeatureForm()
|
2010-08-17 23:40:03 -04:00
|
|
|
|
2010-10-17 17:25:10 -04:00
|
|
|
feed_count = 0
|
2010-10-30 07:14:45 -04:00
|
|
|
active_count = 0
|
2010-10-17 17:25:10 -04:00
|
|
|
train_count = 0
|
|
|
|
if request.user.is_authenticated():
|
2010-10-30 00:27:52 -04:00
|
|
|
feed_count = UserSubscription.objects.filter(user=request.user).count()
|
|
|
|
active_count = UserSubscription.objects.filter(user=request.user, active=True).count()
|
2010-10-17 17:25:10 -04:00
|
|
|
train_count = UserSubscription.objects.filter(user=request.user, active=True, is_trained=False, feed__stories_last_month__gte=1).count()
|
2011-04-09 11:13:02 -04:00
|
|
|
|
|
|
|
now = datetime.datetime.now()
|
2011-04-21 22:36:26 -04:00
|
|
|
recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now).select_related('feed')[:2]
|
2011-03-02 12:05:58 -05:00
|
|
|
# recommended_feed_feedback = RecommendedFeedUserFeedback.objects.filter(recommendation=recommended_feed)
|
2010-10-17 17:25:10 -04:00
|
|
|
|
2011-04-15 11:34:41 -04:00
|
|
|
statistics = MStatistics.all()
|
2011-05-07 17:22:41 -04:00
|
|
|
feedbacks = MFeedback.all()
|
2011-04-02 19:41:22 -04:00
|
|
|
howitworks_page = 0 # random.randint(0, 5)
|
2010-04-22 21:17:00 -04:00
|
|
|
return render_to_response('reader/feeds.xhtml', {
|
2010-10-30 00:27:52 -04:00
|
|
|
'user_profile': user.profile,
|
2010-04-22 21:17:00 -04:00
|
|
|
'login_form': login_form,
|
|
|
|
'signup_form': signup_form,
|
2010-06-12 21:20:06 -04:00
|
|
|
'feature_form': feature_form,
|
2010-06-28 19:57:49 -04:00
|
|
|
'features': features,
|
2010-08-17 20:59:47 -04:00
|
|
|
'start_import_from_google_reader': request.session.get('import_from_google_reader', False),
|
2010-07-07 16:22:26 -04:00
|
|
|
'howitworks_page': howitworks_page,
|
2010-10-17 17:25:10 -04:00
|
|
|
'feed_count': feed_count,
|
2010-10-30 00:27:52 -04:00
|
|
|
'active_count': active_count,
|
|
|
|
'train_count': active_count - train_count,
|
2010-10-17 17:25:10 -04:00
|
|
|
'account_images': range(1, 4),
|
2011-03-16 18:29:56 -04:00
|
|
|
'recommended_feeds': recommended_feeds,
|
2011-04-15 11:34:41 -04:00
|
|
|
'statistics': statistics,
|
2011-05-07 17:22:41 -04:00
|
|
|
'feedbacks': feedbacks,
|
2011-03-02 12:05:58 -05:00
|
|
|
# 'recommended_feed_feedback': recommended_feed_feedback,
|
2010-04-22 21:17:00 -04:00
|
|
|
}, context_instance=RequestContext(request))
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2010-01-24 22:53:46 -05:00
|
|
|
@never_cache
|
2010-01-17 20:00:12 -05:00
|
|
|
def login(request):
|
2010-11-12 10:55:44 -05:00
|
|
|
code = -1
|
2010-01-24 22:53:46 -05:00
|
|
|
if request.method == "POST":
|
2010-04-22 21:17:00 -04:00
|
|
|
form = LoginForm(request.POST, prefix='login')
|
2010-01-24 22:53:46 -05:00
|
|
|
if form.is_valid():
|
2010-04-27 17:56:16 -04:00
|
|
|
login_user(request, form.get_user())
|
2010-11-10 21:54:40 -05:00
|
|
|
if request.POST.get('api'):
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(form.get_user(), "~FG~BB~SKiPhone Login~FW")
|
2010-11-12 10:55:44 -05:00
|
|
|
code = 1
|
2010-11-10 21:54:40 -05:00
|
|
|
else:
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(form.get_user(), "~FG~BBLogin~FW")
|
2010-11-10 21:54:40 -05:00
|
|
|
return HttpResponseRedirect(reverse('index'))
|
2010-01-24 22:53:46 -05:00
|
|
|
|
2010-11-10 21:54:40 -05:00
|
|
|
if request.POST.get('api'):
|
2010-11-12 10:55:44 -05:00
|
|
|
return HttpResponse(json.encode(dict(code=code)), mimetype='application/json')
|
2010-11-10 21:54:40 -05:00
|
|
|
else:
|
|
|
|
return index(request)
|
2010-04-22 21:17:00 -04:00
|
|
|
|
|
|
|
@never_cache
|
|
|
|
def signup(request):
|
|
|
|
if request.method == "POST":
|
|
|
|
form = SignupForm(prefix='signup', data=request.POST)
|
|
|
|
if form.is_valid():
|
|
|
|
new_user = form.save()
|
2010-04-27 17:56:16 -04:00
|
|
|
login_user(request, new_user)
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(new_user, "~FG~SB~BBNEW SIGNUP~FW")
|
2010-04-22 21:17:00 -04:00
|
|
|
return HttpResponseRedirect(reverse('index'))
|
|
|
|
|
|
|
|
return index(request)
|
2010-01-17 20:00:12 -05:00
|
|
|
|
2010-01-24 22:53:46 -05:00
|
|
|
@never_cache
|
|
|
|
def logout(request):
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FG~BBLogout~FW")
|
2010-11-12 10:55:44 -05:00
|
|
|
logout_user(request)
|
2010-01-24 22:53:46 -05:00
|
|
|
|
2010-11-11 23:48:27 -05:00
|
|
|
if request.GET.get('api'):
|
|
|
|
return HttpResponse(json.encode(dict(code=1)), mimetype='application/json')
|
|
|
|
else:
|
|
|
|
return HttpResponseRedirect(reverse('index'))
|
2010-01-24 22:53:46 -05:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2009-06-16 03:08:55 +00:00
|
|
|
def load_feeds(request):
|
2011-04-04 12:01:29 -04:00
|
|
|
user = get_user(request)
|
|
|
|
feeds = {}
|
|
|
|
not_yet_fetched = False
|
2011-05-01 17:19:01 -04:00
|
|
|
include_favicons = request.REQUEST.get('include_favicons', False)
|
2011-04-20 09:35:59 -04:00
|
|
|
flat = request.REQUEST.get('flat', False)
|
|
|
|
|
|
|
|
if flat: return load_feeds_flat(request)
|
2010-02-11 01:28:47 -05:00
|
|
|
|
2010-03-02 10:56:25 -05:00
|
|
|
try:
|
|
|
|
folders = UserSubscriptionFolders.objects.get(user=user)
|
|
|
|
except UserSubscriptionFolders.DoesNotExist:
|
|
|
|
data = dict(feeds=[], folders=[])
|
2010-07-25 23:13:27 -04:00
|
|
|
return data
|
2010-11-04 19:32:19 -04:00
|
|
|
except UserSubscriptionFolders.MultipleObjectsReturned:
|
|
|
|
UserSubscriptionFolders.objects.filter(user=user)[1:].delete()
|
|
|
|
folders = UserSubscriptionFolders.objects.get(user=user)
|
2010-03-02 10:56:25 -05:00
|
|
|
|
2011-04-05 19:24:12 -04:00
|
|
|
user_subs = UserSubscription.objects.select_related('feed').filter(user=user)
|
2010-08-13 10:43:48 -04:00
|
|
|
|
2010-02-11 01:28:47 -05:00
|
|
|
for sub in user_subs:
|
2011-04-04 12:01:29 -04:00
|
|
|
feeds[sub.feed.pk] = sub.canonical(include_favicon=include_favicons)
|
2011-02-06 15:04:21 -05:00
|
|
|
if feeds[sub.feed.pk].get('not_yet_fetched'):
|
2010-08-11 22:02:47 -04:00
|
|
|
not_yet_fetched = True
|
2010-10-07 19:56:23 -04:00
|
|
|
if not sub.feed.active and not sub.feed.has_feed_exception and not sub.feed.has_page_exception:
|
|
|
|
sub.feed.count_subscribers()
|
|
|
|
sub.feed.schedule_feed_fetch_immediately()
|
2011-02-25 20:23:05 -05:00
|
|
|
if sub.active and sub.feed.active_subscribers <= 0:
|
2011-02-23 23:23:52 -05:00
|
|
|
sub.feed.count_subscribers()
|
|
|
|
sub.feed.schedule_feed_fetch_immediately()
|
2010-08-11 22:02:47 -04:00
|
|
|
|
|
|
|
if not_yet_fetched:
|
|
|
|
for f in feeds:
|
|
|
|
if 'not_yet_fetched' not in feeds[f]:
|
|
|
|
feeds[f]['not_yet_fetched'] = False
|
2010-12-01 14:11:42 -05:00
|
|
|
|
2010-12-02 20:18:33 -05:00
|
|
|
starred_count = MStarredStory.objects(user_id=user.pk).count()
|
2010-12-01 14:11:42 -05:00
|
|
|
|
|
|
|
data = {
|
|
|
|
'feeds': feeds,
|
|
|
|
'folders': json.decode(folders.folders),
|
|
|
|
'starred_count': starred_count,
|
|
|
|
}
|
2010-07-25 23:13:27 -04:00
|
|
|
return data
|
2010-04-14 22:58:00 -04:00
|
|
|
|
2011-04-05 19:24:12 -04:00
|
|
|
@json.json_view
|
|
|
|
def load_feed_favicons(request):
|
|
|
|
user = get_user(request)
|
|
|
|
feed_ids = request.REQUEST.getlist('feed_ids')
|
2011-04-21 10:44:50 -04:00
|
|
|
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
|
2011-05-01 17:48:10 -04:00
|
|
|
if feed_ids and len(feed_ids) > 0:
|
2011-04-05 19:24:12 -04:00
|
|
|
user_subs = user_subs.filter(feed__in=feed_ids)
|
|
|
|
|
2011-04-21 23:10:43 -04:00
|
|
|
feed_ids = [sub['feed__pk'] for sub in user_subs.values('feed__pk')]
|
|
|
|
feed_icons = dict([(i.feed_id, i.data) for i in MFeedIcon.objects(feed_id__in=feed_ids)])
|
2011-04-05 19:24:12 -04:00
|
|
|
|
2011-04-21 23:10:43 -04:00
|
|
|
return feed_icons
|
2011-04-05 19:24:12 -04:00
|
|
|
|
2011-04-20 09:35:59 -04:00
|
|
|
def load_feeds_flat(request):
|
2010-06-20 11:04:23 -04:00
|
|
|
user = get_user(request)
|
|
|
|
feeds = {}
|
|
|
|
|
|
|
|
try:
|
|
|
|
folders = UserSubscriptionFolders.objects.get(user=user)
|
|
|
|
except UserSubscriptionFolders.DoesNotExist:
|
|
|
|
data = dict(folders=[])
|
2010-07-25 23:13:27 -04:00
|
|
|
return data
|
2010-06-20 11:04:23 -04:00
|
|
|
|
|
|
|
user_subs = UserSubscription.objects.select_related('feed').filter(user=user)
|
|
|
|
|
|
|
|
for sub in user_subs:
|
|
|
|
if sub.needs_unread_recalc:
|
2010-12-01 09:30:56 -05:00
|
|
|
sub.calculate_feed_scores(silent=True)
|
2010-06-20 11:04:23 -04:00
|
|
|
feeds[sub.feed.pk] = {
|
|
|
|
'id': sub.feed.pk,
|
2010-12-11 15:26:45 -05:00
|
|
|
'feed_title': sub.user_title or sub.feed.feed_title,
|
2010-06-20 11:04:23 -04:00
|
|
|
'feed_link': sub.feed.feed_link,
|
|
|
|
'ps': sub.unread_count_positive,
|
|
|
|
'nt': sub.unread_count_neutral,
|
|
|
|
'ng': sub.unread_count_negative,
|
|
|
|
}
|
|
|
|
|
|
|
|
folders = json.decode(folders.folders)
|
|
|
|
flat_folders = {}
|
|
|
|
|
|
|
|
def make_feeds_folder(items, parent_folder="", depth=0):
|
|
|
|
for item in items:
|
2010-11-11 23:48:27 -05:00
|
|
|
if isinstance(item, int) and item in feeds:
|
2010-06-20 11:04:23 -04:00
|
|
|
feed = feeds[item]
|
|
|
|
if not parent_folder:
|
|
|
|
parent_folder = ' '
|
|
|
|
if parent_folder in flat_folders:
|
|
|
|
flat_folders[parent_folder].append(feed)
|
|
|
|
else:
|
|
|
|
flat_folders[parent_folder] = [feed]
|
|
|
|
elif isinstance(item, dict):
|
|
|
|
for folder_name in item:
|
|
|
|
folder = item[folder_name]
|
|
|
|
flat_folder_name = "%s%s%s" % (
|
|
|
|
parent_folder,
|
|
|
|
" - " if parent_folder else "",
|
|
|
|
folder_name
|
|
|
|
)
|
|
|
|
make_feeds_folder(folder, flat_folder_name, depth+1)
|
|
|
|
|
|
|
|
make_feeds_folder(folders)
|
2010-11-13 18:32:43 -05:00
|
|
|
data = dict(flat_folders=flat_folders, user=user.username)
|
2010-07-25 23:13:27 -04:00
|
|
|
return data
|
2010-06-20 11:04:23 -04:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2010-04-14 22:58:00 -04:00
|
|
|
def refresh_feeds(request):
|
2011-03-08 10:06:55 -05:00
|
|
|
start = datetime.datetime.utcnow()
|
2010-04-14 22:58:00 -04:00
|
|
|
user = get_user(request)
|
2011-03-09 09:48:24 -05:00
|
|
|
feed_ids = request.REQUEST.getlist('feed_id')
|
2010-04-14 22:58:00 -04:00
|
|
|
feeds = {}
|
2011-01-30 23:00:22 -05:00
|
|
|
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
|
2011-03-09 09:48:24 -05:00
|
|
|
if feed_ids:
|
|
|
|
user_subs = user_subs.filter(feed__in=feed_ids)
|
2011-01-15 18:41:41 -05:00
|
|
|
UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
2011-04-20 09:35:59 -04:00
|
|
|
favicons_fetching = [int(f) for f in request.REQUEST.getlist('favicons_fetching') if f]
|
2011-04-21 23:10:43 -04:00
|
|
|
feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)])
|
2010-04-14 22:58:00 -04:00
|
|
|
|
|
|
|
for sub in user_subs:
|
2011-01-15 18:41:41 -05:00
|
|
|
if (sub.needs_unread_recalc or
|
|
|
|
sub.unread_count_updated < UNREAD_CUTOFF or
|
|
|
|
sub.oldest_unread_story_date < UNREAD_CUTOFF):
|
2010-12-01 09:30:56 -05:00
|
|
|
sub.calculate_feed_scores(silent=True)
|
2010-04-14 22:58:00 -04:00
|
|
|
feeds[sub.feed.pk] = {
|
|
|
|
'ps': sub.unread_count_positive,
|
|
|
|
'nt': sub.unread_count_neutral,
|
|
|
|
'ng': sub.unread_count_negative,
|
|
|
|
}
|
2010-08-26 10:04:32 -04:00
|
|
|
if sub.feed.has_feed_exception or sub.feed.has_page_exception:
|
2010-08-18 20:35:45 -04:00
|
|
|
feeds[sub.feed.pk]['has_exception'] = True
|
2010-08-26 10:04:32 -04:00
|
|
|
feeds[sub.feed.pk]['exception_type'] = 'feed' if sub.feed.has_feed_exception else 'page'
|
2010-08-25 10:18:08 -04:00
|
|
|
feeds[sub.feed.pk]['feed_address'] = sub.feed.feed_address
|
2010-08-25 20:43:35 -04:00
|
|
|
feeds[sub.feed.pk]['exception_code'] = sub.feed.exception_code
|
2011-04-20 09:35:59 -04:00
|
|
|
if request.REQUEST.get('check_fetch_status', False):
|
2010-08-11 22:02:47 -04:00
|
|
|
feeds[sub.feed.pk]['not_yet_fetched'] = not sub.feed.fetched_once
|
2011-05-01 17:19:01 -04:00
|
|
|
if sub.feed.pk in favicons_fetching and sub.feed.pk in feed_icons:
|
2011-04-21 23:10:43 -04:00
|
|
|
feeds[sub.feed.pk]['favicon'] = feed_icons[sub.feed.pk].data
|
|
|
|
feeds[sub.feed.pk]['favicon_color'] = feed_icons[sub.feed.pk].color
|
|
|
|
feeds[sub.feed.pk]['favicon_fetching'] = bool(not (feed_icons[sub.feed.pk].not_found or
|
|
|
|
feed_icons[sub.feed.pk].data))
|
2011-04-05 19:24:12 -04:00
|
|
|
|
|
|
|
if settings.DEBUG:
|
|
|
|
diff = datetime.datetime.utcnow()-start
|
|
|
|
timediff = float("%s.%.2s" % (diff.seconds, (diff.microseconds / 1000)))
|
|
|
|
logging.user(request.user, "~FBRefreshing %s feeds (%s seconds)" % (user_subs.count(), timediff))
|
2011-03-08 10:06:55 -05:00
|
|
|
|
2010-08-11 22:02:47 -04:00
|
|
|
return {'feeds': feeds}
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2011-04-21 10:44:50 -04:00
|
|
|
def load_single_feed(request, feed_id):
|
2011-03-08 10:06:55 -05:00
|
|
|
start = datetime.datetime.utcnow()
|
2009-06-16 03:08:55 +00:00
|
|
|
user = get_user(request)
|
|
|
|
offset = int(request.REQUEST.get('offset', 0))
|
2011-03-28 21:53:54 -04:00
|
|
|
limit = int(request.REQUEST.get('limit', 12))
|
2011-04-21 10:44:50 -04:00
|
|
|
page = int(request.REQUEST.get('page', 1))
|
2009-07-05 21:45:46 +00:00
|
|
|
if page:
|
2011-04-21 10:44:50 -04:00
|
|
|
offset = limit * (page-1)
|
2011-01-14 00:59:51 -05:00
|
|
|
dupe_feed_id = None
|
2011-02-03 18:47:53 -05:00
|
|
|
if not feed_id:
|
2010-10-25 20:20:59 -04:00
|
|
|
raise Http404
|
|
|
|
|
|
|
|
try:
|
|
|
|
feed = Feed.objects.get(id=feed_id)
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
feed_address = request.REQUEST.get('feed_address')
|
|
|
|
dupe_feed = DuplicateFeed.objects.filter(duplicate_address=feed_address)
|
|
|
|
if dupe_feed:
|
|
|
|
feed = dupe_feed[0].feed
|
2011-01-14 00:59:51 -05:00
|
|
|
dupe_feed_id = feed_id
|
2010-10-25 20:20:59 -04:00
|
|
|
else:
|
|
|
|
raise Http404
|
|
|
|
|
2010-02-17 03:22:45 -05:00
|
|
|
stories = feed.get_stories(offset, limit)
|
2009-07-05 21:45:46 +00:00
|
|
|
|
2010-01-21 13:12:29 -05:00
|
|
|
# Get intelligence classifier for user
|
2011-04-05 10:50:39 -04:00
|
|
|
classifier_feeds = MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id)
|
2010-08-22 18:34:40 -04:00
|
|
|
classifier_authors = MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id)
|
2011-04-05 10:50:39 -04:00
|
|
|
classifier_titles = MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)
|
|
|
|
classifier_tags = MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)
|
2010-01-21 13:12:29 -05:00
|
|
|
|
2011-02-09 18:52:36 -05:00
|
|
|
usersub = UserSubscription.objects.get(user=user, feed=feed)
|
2010-09-10 08:32:48 -07:00
|
|
|
userstories = []
|
2011-03-15 23:42:27 -04:00
|
|
|
if usersub:
|
|
|
|
userstories_db = MUserStory.objects(user_id=user.pk,
|
|
|
|
feed_id=feed.pk,
|
|
|
|
read_date__gte=usersub.mark_read_date)
|
|
|
|
starred_stories = MStarredStory.objects(user_id=user.pk, story_feed_id=feed_id).only('story_guid', 'starred_date')
|
|
|
|
starred_stories = dict([(story.story_guid, story.starred_date) for story in starred_stories])
|
2010-12-04 13:51:46 -05:00
|
|
|
|
2011-03-15 23:42:27 -04:00
|
|
|
for us in userstories_db:
|
|
|
|
if hasattr(us.story, 'story_guid') and isinstance(us.story.story_guid, unicode):
|
|
|
|
userstories.append(us.story.story_guid)
|
|
|
|
elif hasattr(us.story, 'id') and isinstance(us.story.id, unicode):
|
|
|
|
userstories.append(us.story.id) # TODO: Remove me after migration from story.id->guid
|
2010-09-10 08:32:48 -07:00
|
|
|
|
2009-07-28 02:27:27 +00:00
|
|
|
for story in stories:
|
2010-12-16 12:59:49 -05:00
|
|
|
[x.rewind() for x in [classifier_feeds, classifier_authors, classifier_tags, classifier_titles]]
|
2010-10-31 19:32:41 -04:00
|
|
|
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
|
2011-01-12 23:30:38 -05:00
|
|
|
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
|
|
|
|
story['short_parsed_date'] = format_story_link_date__short(story_date, now)
|
|
|
|
story['long_parsed_date'] = format_story_link_date__long(story_date, now)
|
2011-03-15 23:42:27 -04:00
|
|
|
if usersub:
|
|
|
|
if story['id'] in userstories:
|
|
|
|
story['read_status'] = 1
|
|
|
|
elif not story.get('read_status') and story['story_date'] < usersub.mark_read_date:
|
|
|
|
story['read_status'] = 1
|
|
|
|
elif not story.get('read_status') and story['story_date'] > usersub.last_read_date:
|
|
|
|
story['read_status'] = 0
|
|
|
|
if story['id'] in starred_stories:
|
|
|
|
story['starred'] = True
|
|
|
|
starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone)
|
|
|
|
story['starred_date'] = format_story_link_date__long(starred_date, now)
|
|
|
|
else:
|
2009-07-28 02:27:27 +00:00
|
|
|
story['read_status'] = 1
|
2010-01-21 13:12:29 -05:00
|
|
|
story['intelligence'] = {
|
|
|
|
'feed': apply_classifier_feeds(classifier_feeds, feed),
|
|
|
|
'author': apply_classifier_authors(classifier_authors, story),
|
|
|
|
'tags': apply_classifier_tags(classifier_tags, story),
|
|
|
|
'title': apply_classifier_titles(classifier_titles, story),
|
|
|
|
}
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2010-01-21 13:12:29 -05:00
|
|
|
# Intelligence
|
2011-01-17 22:48:38 -05:00
|
|
|
feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else []
|
|
|
|
feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else []
|
2010-03-23 20:03:40 -04:00
|
|
|
classifiers = get_classifiers_for_user(user, feed_id, classifier_feeds,
|
|
|
|
classifier_authors, classifier_titles, classifier_tags)
|
2010-01-12 01:19:37 +00:00
|
|
|
|
2011-03-15 23:42:27 -04:00
|
|
|
if usersub:
|
|
|
|
usersub.feed_opens += 1
|
|
|
|
usersub.save()
|
2010-07-20 20:23:49 -04:00
|
|
|
|
2011-01-12 23:36:52 -05:00
|
|
|
diff = datetime.datetime.utcnow()-start
|
2011-01-05 10:18:40 -05:00
|
|
|
timediff = float("%s.%.2s" % (diff.seconds, (diff.microseconds / 1000)))
|
2010-10-31 16:03:50 -04:00
|
|
|
last_update = relative_timesince(feed.last_update)
|
2011-04-21 10:44:50 -04:00
|
|
|
logging.user(request.user, "~FYLoading feed: ~SB%s%s ~SN(%s seconds)" % (
|
|
|
|
feed, ('~SN/p%s' % page) if page > 1 else '', timediff))
|
2010-09-23 10:29:18 -04:00
|
|
|
FeedLoadtime.objects.create(feed=feed, loadtime=timediff)
|
|
|
|
|
2010-07-26 21:38:56 -04:00
|
|
|
data = dict(stories=stories,
|
|
|
|
feed_tags=feed_tags,
|
|
|
|
feed_authors=feed_authors,
|
2010-09-22 19:59:07 -04:00
|
|
|
classifiers=classifiers,
|
2010-10-25 20:44:52 -04:00
|
|
|
last_update=last_update,
|
|
|
|
feed_id=feed.pk)
|
2011-01-14 00:59:51 -05:00
|
|
|
|
|
|
|
if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id
|
2011-03-15 23:42:27 -04:00
|
|
|
if not usersub:
|
|
|
|
data.update(feed.canonical())
|
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
return data
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2011-04-21 22:36:26 -04:00
|
|
|
def load_feed_page(request, feed_id):
|
2011-02-03 18:47:53 -05:00
|
|
|
if not feed_id:
|
2010-08-30 19:57:27 -04:00
|
|
|
raise Http404
|
|
|
|
|
2011-01-29 19:16:40 -05:00
|
|
|
data = MFeedPage.get_data(feed_id=feed_id)
|
2011-01-14 00:59:51 -05:00
|
|
|
|
2010-08-16 12:52:39 -04:00
|
|
|
if not data:
|
2011-01-14 00:59:51 -05:00
|
|
|
data = "Fetching feed..."
|
2009-08-13 03:26:12 +00:00
|
|
|
|
|
|
|
return HttpResponse(data, mimetype='text/html')
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2010-12-02 11:09:09 -05:00
|
|
|
@json.json_view
|
|
|
|
def load_starred_stories(request):
|
|
|
|
user = get_user(request)
|
|
|
|
offset = int(request.REQUEST.get('offset', 0))
|
2010-12-03 09:49:38 -05:00
|
|
|
limit = int(request.REQUEST.get('limit', 10))
|
2010-12-02 11:09:09 -05:00
|
|
|
page = int(request.REQUEST.get('page', 0))
|
|
|
|
if page: offset = limit * page
|
|
|
|
|
2010-12-02 20:18:33 -05:00
|
|
|
mstories = MStarredStory.objects(user_id=user.pk).order_by('-starred_date')[offset:offset+limit]
|
|
|
|
stories = Feed.format_stories(mstories)
|
|
|
|
|
|
|
|
for story in stories:
|
|
|
|
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
|
2011-01-12 23:30:38 -05:00
|
|
|
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
|
|
|
|
story['short_parsed_date'] = format_story_link_date__short(story_date, now)
|
|
|
|
story['long_parsed_date'] = format_story_link_date__long(story_date, now)
|
2010-12-04 13:51:46 -05:00
|
|
|
starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone)
|
2011-01-12 23:30:38 -05:00
|
|
|
story['starred_date'] = format_story_link_date__long(starred_date, now)
|
2010-12-02 20:18:33 -05:00
|
|
|
story['read_status'] = 1
|
|
|
|
story['starred'] = True
|
|
|
|
story['intelligence'] = {
|
|
|
|
'feed': 0,
|
|
|
|
'author': 0,
|
|
|
|
'tags': 0,
|
|
|
|
'title': 0,
|
|
|
|
}
|
|
|
|
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FCLoading starred stories: ~SB%s stories" % (len(stories)))
|
2010-12-04 23:34:57 -05:00
|
|
|
|
2010-12-02 20:18:33 -05:00
|
|
|
return dict(stories=stories)
|
2010-12-12 22:52:15 -05:00
|
|
|
|
|
|
|
@json.json_view
|
|
|
|
def load_river_stories(request):
|
2011-03-28 21:53:54 -04:00
|
|
|
limit = 18
|
2011-02-09 15:45:41 -05:00
|
|
|
offset = 0
|
2011-01-15 18:41:41 -05:00
|
|
|
start = datetime.datetime.utcnow()
|
|
|
|
user = get_user(request)
|
2011-04-20 09:35:59 -04:00
|
|
|
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feeds') if feed_id]
|
2011-01-15 18:41:41 -05:00
|
|
|
original_feed_ids = list(feed_ids)
|
|
|
|
page = int(request.REQUEST.get('page', 0))+1
|
2010-12-16 12:59:49 -05:00
|
|
|
read_stories_count = int(request.REQUEST.get('read_stories_count', 0))
|
2011-01-15 18:41:41 -05:00
|
|
|
bottom_delta = datetime.timedelta(days=settings.DAYS_OF_UNREAD)
|
2010-12-16 12:59:49 -05:00
|
|
|
|
2011-01-10 09:49:26 -05:00
|
|
|
if not feed_ids:
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FCLoading empty river stories: page %s" % (page))
|
2011-01-10 09:49:26 -05:00
|
|
|
return dict(stories=[])
|
|
|
|
|
2010-12-16 12:59:49 -05:00
|
|
|
# Fetch all stories at and before the page number.
|
|
|
|
# Not a single page, because reading stories can move them up in the unread order.
|
|
|
|
# `read_stories_count` is an optimization, works best when all 25 stories before have been read.
|
2011-02-09 15:45:41 -05:00
|
|
|
limit = limit * page - read_stories_count
|
2010-12-13 21:53:22 -05:00
|
|
|
|
2010-12-16 12:59:49 -05:00
|
|
|
# Read stories to exclude
|
2010-12-13 10:15:43 -05:00
|
|
|
read_stories = MUserStory.objects(user_id=user.pk, feed_id__in=feed_ids).only('story')
|
|
|
|
read_stories = [rs.story.id for rs in read_stories]
|
2010-12-16 12:59:49 -05:00
|
|
|
|
2011-01-07 19:20:34 -05:00
|
|
|
# Determine mark_as_read dates for all feeds to ignore all stories before this date.
|
2011-01-30 10:46:53 -05:00
|
|
|
# max_feed_count = 0
|
2011-02-09 15:45:41 -05:00
|
|
|
feed_counts = {}
|
|
|
|
feed_last_reads = {}
|
2011-01-11 10:11:25 -05:00
|
|
|
for feed_id in feed_ids:
|
2011-02-03 18:38:10 -05:00
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.get(feed__pk=feed_id, user=user)
|
|
|
|
except UserSubscription.DoesNotExist:
|
|
|
|
continue
|
2011-02-22 22:38:50 -05:00
|
|
|
if not usersub: continue
|
|
|
|
feed_counts[feed_id] = (usersub.unread_count_negative * 1 +
|
|
|
|
usersub.unread_count_neutral * 10 +
|
2011-01-30 10:46:53 -05:00
|
|
|
usersub.unread_count_positive * 20)
|
|
|
|
# if feed_counts[feed_id] > max_feed_count:
|
|
|
|
# max_feed_count = feed_counts[feed_id]
|
2011-01-15 18:41:41 -05:00
|
|
|
feed_last_reads[feed_id] = int(time.mktime(usersub.mark_read_date.timetuple()))
|
2011-04-04 12:01:29 -04:00
|
|
|
feed_counts = sorted(feed_counts.items(), key=itemgetter(1))[:50]
|
2011-01-11 10:11:25 -05:00
|
|
|
feed_ids = [f[0] for f in feed_counts]
|
|
|
|
feed_last_reads = dict([(str(feed_id), feed_last_reads[feed_id]) for feed_id in feed_ids])
|
2011-01-30 10:46:53 -05:00
|
|
|
feed_counts = dict(feed_counts)
|
2011-01-07 19:20:34 -05:00
|
|
|
|
2011-01-07 16:26:17 -05:00
|
|
|
# After excluding read stories, all that's left are stories
|
|
|
|
# past the mark_read_date. Everything returned is guaranteed to be unread.
|
2010-12-13 21:53:22 -05:00
|
|
|
mstories = MStory.objects(
|
2010-12-16 16:17:03 -05:00
|
|
|
id__nin=read_stories,
|
2011-01-15 18:41:41 -05:00
|
|
|
story_feed_id__in=feed_ids,
|
|
|
|
story_date__gte=start - bottom_delta
|
2011-01-07 16:26:17 -05:00
|
|
|
).map_reduce("""function() {
|
2011-02-05 15:34:43 -05:00
|
|
|
var d = feed_last_reads[this[~story_feed_id]];
|
|
|
|
if (this[~story_date].getTime()/1000 > d) {
|
|
|
|
emit(this[~id], this);
|
2011-01-07 18:44:36 -05:00
|
|
|
}
|
|
|
|
}""",
|
|
|
|
"""function(key, values) {
|
|
|
|
return values[0];
|
|
|
|
}""",
|
2011-01-15 18:41:41 -05:00
|
|
|
scope={
|
|
|
|
'feed_last_reads': feed_last_reads
|
|
|
|
}
|
|
|
|
)
|
2011-01-07 16:26:17 -05:00
|
|
|
mstories = [story.value for story in mstories]
|
2011-01-15 18:41:41 -05:00
|
|
|
|
|
|
|
mstories = sorted(mstories, cmp=lambda x, y: cmp(story_score(y, bottom_delta), story_score(x, bottom_delta)))
|
|
|
|
|
|
|
|
# story_feed_counts = defaultdict(int)
|
|
|
|
# mstories_pruned = []
|
|
|
|
# for story in mstories:
|
|
|
|
# print story['story_title'], story_feed_counts[story['story_feed_id']]
|
|
|
|
# if story_feed_counts[story['story_feed_id']] >= 3: continue
|
|
|
|
# mstories_pruned.append(story)
|
|
|
|
# story_feed_counts[story['story_feed_id']] += 1
|
2011-01-07 16:26:17 -05:00
|
|
|
stories = []
|
|
|
|
for i, story in enumerate(mstories):
|
2011-01-07 18:44:36 -05:00
|
|
|
if i < offset: continue
|
2011-01-07 16:26:17 -05:00
|
|
|
if i >= offset + limit: break
|
|
|
|
stories.append(bunch(story))
|
|
|
|
stories = Feed.format_stories(stories)
|
2011-01-15 18:41:41 -05:00
|
|
|
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
|
2010-06-14 01:01:21 -04:00
|
|
|
|
2011-01-07 19:06:36 -05:00
|
|
|
# Find starred stories
|
2010-12-13 10:15:43 -05:00
|
|
|
starred_stories = MStarredStory.objects(
|
2010-12-13 21:53:22 -05:00
|
|
|
user_id=user.pk,
|
2011-01-15 18:41:41 -05:00
|
|
|
story_feed_id__in=found_feed_ids
|
2010-12-13 10:15:43 -05:00
|
|
|
).only('story_guid', 'starred_date')
|
|
|
|
starred_stories = dict([(story.story_guid, story.starred_date)
|
|
|
|
for story in starred_stories])
|
2010-12-12 22:52:15 -05:00
|
|
|
|
2011-01-07 19:06:36 -05:00
|
|
|
# Intelligence classifiers for all feeds involved
|
|
|
|
def sort_by_feed(classifiers):
|
|
|
|
feed_classifiers = defaultdict(list)
|
|
|
|
for classifier in classifiers:
|
|
|
|
feed_classifiers[classifier.feed_id].append(classifier)
|
|
|
|
return feed_classifiers
|
2011-01-15 18:41:41 -05:00
|
|
|
classifier_feeds = sort_by_feed(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_feed_ids))
|
|
|
|
classifier_authors = sort_by_feed(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_feed_ids))
|
|
|
|
classifier_titles = sort_by_feed(MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_feed_ids))
|
|
|
|
classifier_tags = sort_by_feed(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_feed_ids))
|
2011-01-07 19:06:36 -05:00
|
|
|
|
2011-01-07 19:20:34 -05:00
|
|
|
# Just need to format stories
|
2010-12-12 22:52:15 -05:00
|
|
|
for story in stories:
|
|
|
|
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
|
2011-01-12 23:30:38 -05:00
|
|
|
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
|
|
|
|
story['short_parsed_date'] = format_story_link_date__short(story_date, now)
|
|
|
|
story['long_parsed_date'] = format_story_link_date__long(story_date, now)
|
2010-12-14 19:23:16 -05:00
|
|
|
story['read_status'] = 0
|
2010-12-12 22:52:15 -05:00
|
|
|
if story['id'] in starred_stories:
|
|
|
|
story['starred'] = True
|
|
|
|
starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone)
|
2011-01-12 23:30:38 -05:00
|
|
|
story['starred_date'] = format_story_link_date__long(starred_date, now)
|
2010-12-12 22:52:15 -05:00
|
|
|
story['intelligence'] = {
|
2011-01-07 19:20:34 -05:00
|
|
|
'feed': apply_classifier_feeds(classifier_feeds[story['story_feed_id']], story['story_feed_id']),
|
2011-01-07 19:06:36 -05:00
|
|
|
'author': apply_classifier_authors(classifier_authors[story['story_feed_id']], story),
|
2011-01-07 19:20:34 -05:00
|
|
|
'tags': apply_classifier_tags(classifier_tags[story['story_feed_id']], story),
|
|
|
|
'title': apply_classifier_titles(classifier_titles[story['story_feed_id']], story),
|
2010-12-12 22:52:15 -05:00
|
|
|
}
|
|
|
|
|
2011-01-12 23:36:52 -05:00
|
|
|
diff = datetime.datetime.utcnow() - start
|
2011-01-05 10:18:40 -05:00
|
|
|
timediff = float("%s.%.2s" % (diff.seconds, (diff.microseconds / 1000)))
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FCLoading river stories: page %s - ~SB%s/%s "
|
|
|
|
"stories ~SN(%s/%s/%s feeds) ~FB(%s seconds)" %
|
|
|
|
(page, len(stories), len(mstories), len(found_feed_ids),
|
|
|
|
len(feed_ids), len(original_feed_ids), timediff))
|
2010-12-12 22:52:15 -05:00
|
|
|
|
|
|
|
return dict(stories=stories)
|
2011-01-10 09:49:26 -05:00
|
|
|
|
|
|
|
|
2010-07-24 15:54:25 -04:00
|
|
|
@ajax_login_required
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2010-06-14 01:01:21 -04:00
|
|
|
def mark_all_as_read(request):
|
|
|
|
code = 1
|
2011-04-24 00:05:39 -04:00
|
|
|
days = int(request.POST.get('days', 0))
|
2010-06-14 01:01:21 -04:00
|
|
|
|
|
|
|
feeds = UserSubscription.objects.filter(user=request.user)
|
|
|
|
for sub in feeds:
|
|
|
|
if days == 0:
|
|
|
|
sub.mark_feed_read()
|
|
|
|
else:
|
2010-09-16 22:04:18 -04:00
|
|
|
read_date = datetime.datetime.utcnow() - datetime.timedelta(days=days)
|
2010-06-30 17:17:07 -04:00
|
|
|
if sub.mark_read_date < read_date:
|
|
|
|
sub.needs_unread_recalc = True
|
|
|
|
sub.mark_read_date = read_date
|
|
|
|
sub.save()
|
2010-06-14 01:01:21 -04:00
|
|
|
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FMMarking all as read: ~SB%s days" % (days,))
|
2010-07-25 23:13:27 -04:00
|
|
|
return dict(code=code)
|
2010-06-14 01:01:21 -04:00
|
|
|
|
2010-07-24 15:54:25 -04:00
|
|
|
@ajax_login_required
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2009-06-16 03:08:55 +00:00
|
|
|
def mark_story_as_read(request):
|
2010-08-23 08:22:10 -04:00
|
|
|
story_ids = request.REQUEST.getlist('story_id')
|
2009-08-29 19:34:42 +00:00
|
|
|
feed_id = int(request.REQUEST['feed_id'])
|
2010-11-09 09:55:44 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
|
2011-02-09 18:52:36 -05:00
|
|
|
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
|
2010-11-09 09:55:44 -05:00
|
|
|
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
|
|
|
|
if duplicate_feed:
|
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.get(user=request.user,
|
|
|
|
feed=duplicate_feed[0].feed)
|
2011-02-09 18:52:36 -05:00
|
|
|
except (UserSubscription.DoesNotExist, Feed.DoesNotExist):
|
2010-11-09 09:55:44 -05:00
|
|
|
return dict(code=-1)
|
2011-02-22 11:27:05 -05:00
|
|
|
else:
|
|
|
|
return dict(code=-1)
|
2010-11-09 09:55:44 -05:00
|
|
|
|
2010-04-08 18:36:48 -04:00
|
|
|
if not usersub.needs_unread_recalc:
|
|
|
|
usersub.needs_unread_recalc = True
|
|
|
|
usersub.save()
|
2010-07-06 16:37:49 -04:00
|
|
|
|
2010-02-17 03:22:45 -05:00
|
|
|
data = dict(code=0, payload=story_ids)
|
|
|
|
|
2010-08-23 18:37:59 -04:00
|
|
|
if len(story_ids) > 1:
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FYRead %s stories in feed: %s" % (len(story_ids), usersub.feed))
|
2010-08-23 18:37:59 -04:00
|
|
|
else:
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FYRead story in feed: %s" % (usersub.feed))
|
2010-08-23 18:37:59 -04:00
|
|
|
|
|
|
|
for story_id in story_ids:
|
2011-02-01 18:29:22 -05:00
|
|
|
try:
|
|
|
|
story = MStory.objects(story_feed_id=feed_id, story_guid=story_id)[0]
|
|
|
|
except IndexError:
|
|
|
|
# Story has been deleted, probably by feed_fetcher.
|
|
|
|
continue
|
2010-09-16 22:04:18 -04:00
|
|
|
now = datetime.datetime.utcnow()
|
2011-02-24 18:51:30 -05:00
|
|
|
date = now if now > story.story_date else story.story_date # For handling future stories
|
|
|
|
m = MUserStory(story=story, user_id=request.user.pk, feed_id=feed_id, read_date=date)
|
2010-09-16 10:35:36 -04:00
|
|
|
try:
|
|
|
|
m.save()
|
|
|
|
except OperationError:
|
2011-02-24 18:54:06 -05:00
|
|
|
logging.user(request.user, "~BRMarked story as read: Duplicate Story -> %s" % (story_id))
|
|
|
|
m = MUserStory.objects.get(story=story, user_id=request.user.pk, feed_id=feed_id)
|
2011-02-24 18:51:30 -05:00
|
|
|
m.read_date = date
|
|
|
|
m.save()
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
return data
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2010-12-30 19:24:52 -05:00
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def mark_story_as_unread(request):
|
2010-12-31 10:34:31 -05:00
|
|
|
story_id = request.POST['story_id']
|
|
|
|
feed_id = int(request.POST['feed_id'])
|
|
|
|
|
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id)
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id)
|
|
|
|
if duplicate_feed:
|
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.get(user=request.user,
|
|
|
|
feed=duplicate_feed[0].feed)
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
return dict(code=-1)
|
|
|
|
|
|
|
|
if not usersub.needs_unread_recalc:
|
|
|
|
usersub.needs_unread_recalc = True
|
|
|
|
usersub.save()
|
|
|
|
|
|
|
|
data = dict(code=0, payload=dict(story_id=story_id))
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FY~SBUnread~SN story in feed: %s" % (usersub.feed))
|
2010-12-31 10:34:31 -05:00
|
|
|
|
|
|
|
story = MStory.objects(story_feed_id=feed_id, story_guid=story_id)[0]
|
|
|
|
m = MUserStory.objects(story=story, user_id=request.user.pk, feed_id=feed_id)
|
|
|
|
m.delete()
|
|
|
|
|
|
|
|
return data
|
2010-12-30 19:24:52 -05:00
|
|
|
|
2010-07-24 15:54:25 -04:00
|
|
|
@ajax_login_required
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2009-06-16 03:08:55 +00:00
|
|
|
def mark_feed_as_read(request):
|
2011-02-09 18:52:36 -05:00
|
|
|
feed_ids = [int(f) for f in request.REQUEST.getlist('feed_id') if f]
|
2010-10-06 20:58:29 -04:00
|
|
|
code = 0
|
2010-09-16 10:35:36 -04:00
|
|
|
for feed_id in feed_ids:
|
2010-11-05 20:34:17 -04:00
|
|
|
try:
|
|
|
|
feed = Feed.objects.get(id=feed_id)
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
continue
|
2010-09-16 10:35:36 -04:00
|
|
|
code = 0
|
2009-06-16 03:08:55 +00:00
|
|
|
|
2010-09-16 10:35:36 -04:00
|
|
|
us = UserSubscription.objects.get(feed=feed, user=request.user)
|
|
|
|
try:
|
|
|
|
us.mark_feed_read()
|
|
|
|
except IntegrityError:
|
|
|
|
code = -1
|
|
|
|
else:
|
|
|
|
code = 1
|
2009-09-08 04:37:38 +00:00
|
|
|
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FMMarking feed as read: ~SB%s" % (feed,))
|
2010-09-16 10:35:36 -04:00
|
|
|
MUserStory.objects(user_id=request.user.pk, feed_id=feed_id).delete()
|
2010-07-25 23:13:27 -04:00
|
|
|
return dict(code=code)
|
2010-08-23 09:55:51 -04:00
|
|
|
|
2009-06-16 03:08:55 +00:00
|
|
|
def _parse_user_info(user):
|
|
|
|
return {
|
|
|
|
'user_info': {
|
2009-08-29 19:34:42 +00:00
|
|
|
'is_anonymous': json.encode(user.is_anonymous()),
|
|
|
|
'is_authenticated': json.encode(user.is_authenticated()),
|
|
|
|
'username': json.encode(user.username if user.is_authenticated() else 'Anonymous')
|
2009-06-16 03:08:55 +00:00
|
|
|
}
|
2009-06-22 15:28:20 +00:00
|
|
|
}
|
2010-03-28 17:06:19 -04:00
|
|
|
|
2010-07-24 15:54:25 -04:00
|
|
|
@ajax_login_required
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2010-04-06 20:41:00 -04:00
|
|
|
def add_url(request):
|
|
|
|
code = 0
|
|
|
|
url = request.POST['url']
|
2011-05-07 21:12:13 -04:00
|
|
|
if not url:
|
|
|
|
code = -1
|
|
|
|
message = 'Enter in the website address or the feed URL.'
|
|
|
|
else:
|
|
|
|
folder = request.POST.get('folder', '')
|
|
|
|
code, message, _ = UserSubscription.add_subscription(user=request.user, feed_address=url, folder=folder)
|
2010-04-06 20:41:00 -04:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
return dict(code=code, message=message)
|
2010-04-06 20:41:00 -04:00
|
|
|
|
2010-07-24 15:54:25 -04:00
|
|
|
@ajax_login_required
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2010-04-06 20:41:00 -04:00
|
|
|
def add_folder(request):
|
|
|
|
folder = request.POST['folder']
|
2011-04-24 00:05:39 -04:00
|
|
|
parent_folder = request.POST.get('parent_folder', '')
|
2010-04-06 20:41:00 -04:00
|
|
|
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FRAdding Folder: ~SB%s (in %s)" % (folder, parent_folder))
|
2010-08-17 17:45:51 -04:00
|
|
|
|
2010-04-06 20:41:00 -04:00
|
|
|
if folder:
|
|
|
|
code = 1
|
|
|
|
message = ""
|
|
|
|
user_sub_folders_object, _ = UserSubscriptionFolders.objects.get_or_create(user=request.user)
|
2011-03-21 10:15:18 -04:00
|
|
|
user_sub_folders_object.add_folder(parent_folder, folder)
|
2010-04-06 20:41:00 -04:00
|
|
|
else:
|
|
|
|
code = -1
|
|
|
|
message = "Gotta write in a folder name."
|
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
return dict(code=code, message=message)
|
2010-08-23 16:23:16 -04:00
|
|
|
|
2010-07-24 15:54:25 -04:00
|
|
|
@ajax_login_required
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2010-03-28 17:06:19 -04:00
|
|
|
def delete_feed(request):
|
|
|
|
feed_id = int(request.POST['feed_id'])
|
2010-09-14 23:47:21 -04:00
|
|
|
in_folder = request.POST.get('in_folder', '')
|
2010-09-14 20:49:28 -04:00
|
|
|
|
2010-09-16 10:35:36 -04:00
|
|
|
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
|
|
|
|
user_sub_folders.delete_feed(feed_id, in_folder)
|
|
|
|
|
2011-03-13 16:24:49 -04:00
|
|
|
feed = Feed.objects.filter(pk=feed_id)
|
|
|
|
if feed:
|
|
|
|
feed[0].count_subscribers()
|
|
|
|
|
2010-09-14 20:49:28 -04:00
|
|
|
return dict(code=1)
|
2010-03-28 17:06:19 -04:00
|
|
|
|
2010-09-14 20:49:28 -04:00
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def delete_folder(request):
|
2010-09-16 10:35:36 -04:00
|
|
|
folder_to_delete = request.POST['folder_name']
|
|
|
|
in_folder = request.POST.get('in_folder', '')
|
2011-02-09 18:52:36 -05:00
|
|
|
feed_ids_in_folder = [int(f) for f in request.REQUEST.getlist('feed_id') if f]
|
2010-09-22 10:12:38 -04:00
|
|
|
|
2010-09-16 10:35:36 -04:00
|
|
|
# Works piss poor with duplicate folder titles, if they are both in the same folder.
|
2010-09-22 10:12:38 -04:00
|
|
|
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
|
2010-09-16 10:35:36 -04:00
|
|
|
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
|
2010-09-22 10:12:38 -04:00
|
|
|
user_sub_folders.delete_folder(folder_to_delete, in_folder, feed_ids_in_folder)
|
2010-05-04 10:39:25 -04:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
return dict(code=1)
|
2010-06-12 21:20:06 -04:00
|
|
|
|
2010-12-11 15:26:45 -05:00
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def rename_feed(request):
|
|
|
|
feed = get_object_or_404(Feed, pk=int(request.POST['feed_id']))
|
|
|
|
user_sub = UserSubscription.objects.get(user=request.user, feed=feed)
|
|
|
|
feed_title = request.POST['feed_title']
|
|
|
|
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % (
|
|
|
|
feed.feed_title, feed_title))
|
2010-12-11 17:19:47 -05:00
|
|
|
|
2010-12-11 15:26:45 -05:00
|
|
|
user_sub.user_title = feed_title
|
|
|
|
user_sub.save()
|
|
|
|
|
|
|
|
return dict(code=1)
|
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def rename_folder(request):
|
|
|
|
folder_to_rename = request.POST['folder_name']
|
2010-12-11 17:16:12 -05:00
|
|
|
new_folder_name = request.POST['new_folder_name']
|
2010-12-11 15:26:45 -05:00
|
|
|
in_folder = request.POST.get('in_folder', '')
|
|
|
|
|
|
|
|
# Works piss poor with duplicate folder titles, if they are both in the same folder.
|
|
|
|
# renames all, but only in the same folder parent. But nobody should be doing that, right?
|
2010-12-11 17:16:12 -05:00
|
|
|
if new_folder_name:
|
|
|
|
user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user)
|
|
|
|
user_sub_folders.rename_folder(folder_to_rename, new_folder_name, in_folder)
|
2010-12-11 15:26:45 -05:00
|
|
|
|
|
|
|
return dict(code=1)
|
|
|
|
|
2010-06-12 21:20:06 -04:00
|
|
|
@login_required
|
|
|
|
def add_feature(request):
|
|
|
|
if not request.user.is_staff:
|
|
|
|
return HttpResponseForbidden()
|
|
|
|
|
|
|
|
code = -1
|
|
|
|
form = FeatureForm(request.POST)
|
|
|
|
|
|
|
|
if form.is_valid():
|
|
|
|
form.save()
|
|
|
|
code = 1
|
|
|
|
return HttpResponseRedirect(reverse('index'))
|
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
return dict(code=code)
|
2010-06-30 12:17:22 -04:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2010-06-30 12:17:22 -04:00
|
|
|
def load_features(request):
|
2011-04-20 09:35:59 -04:00
|
|
|
page = int(request.REQUEST.get('page', 0))
|
2011-03-13 16:24:49 -04:00
|
|
|
logging.user(request.user, "~FBBrowse features: ~SBPage #%s" % (page+1))
|
2010-06-30 16:18:55 -04:00
|
|
|
features = Feature.objects.all()[page*3:(page+1)*3+1].values()
|
2010-08-23 16:23:16 -04:00
|
|
|
features = [{
|
|
|
|
'description': f['description'],
|
|
|
|
'date': f['date'].strftime("%b %d, %Y")
|
|
|
|
} for f in features]
|
2010-07-25 23:13:27 -04:00
|
|
|
return features
|
2010-07-11 11:10:45 -04:00
|
|
|
|
2010-11-05 10:35:52 -04:00
|
|
|
@ajax_login_required
|
2010-07-11 11:10:45 -04:00
|
|
|
@json.json_view
|
|
|
|
def save_feed_order(request):
|
|
|
|
folders = request.POST.get('folders')
|
|
|
|
if folders:
|
|
|
|
# Test that folders can be JSON decoded
|
|
|
|
folders_list = json.decode(folders)
|
2010-07-20 20:23:49 -04:00
|
|
|
assert folders_list is not None
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FBFeed re-ordering: ~SB%s folders/feeds" % (len(folders_list)))
|
2010-07-11 11:10:45 -04:00
|
|
|
user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user)
|
|
|
|
user_sub_folders.folders = folders
|
|
|
|
user_sub_folders.save()
|
|
|
|
|
2010-07-20 23:59:56 -04:00
|
|
|
return {}
|
|
|
|
|
2010-08-01 19:12:42 -04:00
|
|
|
@json.json_view
|
2011-04-24 21:27:31 -04:00
|
|
|
def feeds_trainer(request):
|
2010-08-01 19:12:42 -04:00
|
|
|
classifiers = []
|
2011-04-24 21:27:31 -04:00
|
|
|
feed_id = request.REQUEST.get('feed_id')
|
2010-12-04 22:01:26 -05:00
|
|
|
user = get_user(request)
|
|
|
|
usersubs = UserSubscription.objects.filter(user=user, active=True)
|
2010-09-17 12:40:42 -04:00
|
|
|
if feed_id:
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
|
|
|
usersubs = usersubs.filter(feed=feed)
|
|
|
|
usersubs = usersubs.select_related('feed').order_by('-feed__stories_last_month')
|
2010-08-01 19:12:42 -04:00
|
|
|
|
|
|
|
for us in usersubs:
|
2010-09-17 12:40:42 -04:00
|
|
|
if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id:
|
2010-08-01 19:12:42 -04:00
|
|
|
classifier = dict()
|
2010-12-04 22:01:26 -05:00
|
|
|
classifier['classifiers'] = get_classifiers_for_user(user, us.feed.pk)
|
2010-08-01 19:12:42 -04:00
|
|
|
classifier['feed_id'] = us.feed.pk
|
|
|
|
classifier['stories_last_month'] = us.feed.stories_last_month
|
2011-01-17 22:48:38 -05:00
|
|
|
classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else []
|
|
|
|
classifier['feed_authors'] = json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else []
|
2010-08-01 19:12:42 -04:00
|
|
|
classifiers.append(classifier)
|
|
|
|
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(user, "~FGLoading Trainer: ~SB%s feeds" % (len(classifiers)))
|
2010-08-02 23:09:47 -04:00
|
|
|
|
2010-08-01 19:12:42 -04:00
|
|
|
return classifiers
|
2010-08-13 19:21:29 -04:00
|
|
|
|
2010-09-28 18:53:57 -04:00
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def save_feed_chooser(request):
|
2011-02-03 18:38:10 -05:00
|
|
|
approved_feeds = [int(feed_id) for feed_id in request.POST.getlist('approved_feeds') if feed_id][:64]
|
2010-09-28 18:53:57 -04:00
|
|
|
activated = 0
|
|
|
|
usersubs = UserSubscription.objects.filter(user=request.user)
|
2010-11-12 10:55:44 -05:00
|
|
|
|
2010-09-28 18:53:57 -04:00
|
|
|
for sub in usersubs:
|
2010-11-12 10:55:44 -05:00
|
|
|
try:
|
|
|
|
if sub.feed.pk in approved_feeds:
|
|
|
|
sub.active = True
|
|
|
|
activated += 1
|
|
|
|
sub.save()
|
2011-01-30 15:21:13 -05:00
|
|
|
sub.feed.count_subscribers()
|
2010-11-12 10:55:44 -05:00
|
|
|
elif sub.active:
|
|
|
|
sub.active = False
|
|
|
|
sub.save()
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
pass
|
2010-10-26 14:04:26 -04:00
|
|
|
|
|
|
|
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~BB~FW~SBActivated standard account: ~FC%s~SN/~SB%s" % (
|
|
|
|
activated,
|
|
|
|
usersubs.count()
|
|
|
|
))
|
2011-01-30 23:56:51 -05:00
|
|
|
request.user.profile.queue_new_feeds()
|
|
|
|
request.user.profile.refresh_stale_feeds(exclude_new=True)
|
2011-01-30 15:24:40 -05:00
|
|
|
|
2010-09-28 18:53:57 -04:00
|
|
|
return {'activated': activated}
|
2010-10-05 19:05:01 -04:00
|
|
|
|
2010-10-29 11:34:33 -04:00
|
|
|
@ajax_login_required
|
|
|
|
def retrain_all_sites(request):
|
|
|
|
for sub in UserSubscription.objects.filter(user=request.user):
|
|
|
|
sub.is_trained = False
|
|
|
|
sub.save()
|
|
|
|
|
2011-04-24 21:27:31 -04:00
|
|
|
return feeds_trainer(request)
|
2010-10-29 11:34:33 -04:00
|
|
|
|
2010-10-05 19:05:01 -04:00
|
|
|
@login_required
|
|
|
|
def activate_premium_account(request):
|
2010-10-19 19:09:08 -04:00
|
|
|
try:
|
|
|
|
usersubs = UserSubscription.objects.select_related('feed').filter(user=request.user)
|
|
|
|
for sub in usersubs:
|
|
|
|
sub.active = True
|
|
|
|
sub.save()
|
|
|
|
if sub.feed.premium_subscribers <= 0:
|
|
|
|
sub.feed.count_subscribers()
|
|
|
|
sub.feed.schedule_feed_fetch_immediately()
|
|
|
|
except Exception, e:
|
|
|
|
subject = "Premium activation failed"
|
|
|
|
message = "%s -- %s\n\n%s" % (request.user, usersubs, e)
|
|
|
|
mail_admins(subject, message, fail_silently=True)
|
|
|
|
|
2010-10-05 19:05:01 -04:00
|
|
|
request.user.profile.is_premium = True
|
|
|
|
request.user.profile.save()
|
|
|
|
|
|
|
|
return HttpResponseRedirect(reverse('index'))
|
|
|
|
|
2010-10-29 11:34:33 -04:00
|
|
|
@login_required
|
|
|
|
def login_as(request):
|
|
|
|
if not request.user.is_staff:
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~SKNON-STAFF LOGGING IN AS ANOTHER USER!")
|
2010-10-29 11:34:33 -04:00
|
|
|
assert False
|
|
|
|
return HttpResponseForbidden()
|
|
|
|
username = request.GET['user']
|
2011-02-22 09:37:09 -05:00
|
|
|
user = get_object_or_404(User, username__iexact=username)
|
2010-10-29 11:34:33 -04:00
|
|
|
user.backend = settings.AUTHENTICATION_BACKENDS[0]
|
|
|
|
login_user(request, user)
|
|
|
|
return HttpResponseRedirect(reverse('index'))
|
|
|
|
|
2010-08-13 19:21:29 -04:00
|
|
|
def iframe_buster(request):
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FB~SBiFrame bust!")
|
2010-11-30 10:30:18 -05:00
|
|
|
return HttpResponse(status=204)
|
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def mark_story_as_starred(request):
|
|
|
|
code = 1
|
|
|
|
feed_id = int(request.POST['feed_id'])
|
|
|
|
story_id = request.POST['story_id']
|
|
|
|
|
|
|
|
story = MStory.objects(story_feed_id=feed_id, story_guid=story_id).limit(1)
|
|
|
|
if story:
|
|
|
|
story_db = dict([(k, v) for k, v in story[0]._data.items()
|
|
|
|
if k is not None and v is not None])
|
2010-12-02 20:18:33 -05:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
story_values = dict(user_id=request.user.pk, starred_date=now, **story_db)
|
2010-11-30 10:30:18 -05:00
|
|
|
MStarredStory.objects.create(**story_values)
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FCStarring: ~SB%s" % (story[0].story_title[:50]))
|
2010-11-30 10:30:18 -05:00
|
|
|
else:
|
|
|
|
code = -1
|
|
|
|
|
|
|
|
return {'code': code}
|
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def mark_story_as_unstarred(request):
|
|
|
|
code = 1
|
|
|
|
story_id = request.POST['story_id']
|
|
|
|
|
2010-12-02 20:18:33 -05:00
|
|
|
starred_story = MStarredStory.objects(user_id=request.user.pk, story_guid=story_id)
|
2010-11-30 10:30:18 -05:00
|
|
|
if starred_story:
|
2011-02-23 13:46:47 -05:00
|
|
|
logging.user(request.user, "~FCUnstarring: ~SB%s" % (starred_story[0].story_title[:50]))
|
2010-12-04 16:42:51 -05:00
|
|
|
starred_story.delete()
|
2010-11-30 10:30:18 -05:00
|
|
|
else:
|
|
|
|
code = -1
|
|
|
|
|
2010-12-04 13:32:13 -05:00
|
|
|
return {'code': code}
|
2011-05-06 10:06:13 -04:00
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def send_story_email(request):
|
|
|
|
code = 1
|
2011-05-07 17:58:53 -04:00
|
|
|
message = 'OK'
|
2011-05-06 10:06:13 -04:00
|
|
|
story_id = request.POST['story_id']
|
|
|
|
feed_id = request.POST['feed_id']
|
|
|
|
to_address = request.POST['to']
|
2011-05-07 21:12:13 -04:00
|
|
|
from_name = request.POST['from_name']
|
|
|
|
from_email = request.POST['from_email']
|
2011-05-06 10:06:13 -04:00
|
|
|
comments = request.POST['comments']
|
2011-05-08 20:32:02 -04:00
|
|
|
from_address = 'share@newsblur.com'
|
2011-05-07 17:58:53 -04:00
|
|
|
|
|
|
|
if not email_re.match(to_address):
|
|
|
|
code = -1
|
|
|
|
message = 'You need to send the email to a valid email address.'
|
|
|
|
else:
|
|
|
|
story = MStory.objects(story_feed_id=feed_id, story_guid=story_id)[0]
|
2011-05-08 19:41:50 -04:00
|
|
|
story = Feed.format_story(story, feed_id, text=True)
|
|
|
|
feed = Feed.objects.get(pk=story['story_feed_id'])
|
2011-05-07 17:58:53 -04:00
|
|
|
text = render_to_string('mail/email_story_text.xhtml', locals())
|
|
|
|
html = render_to_string('mail/email_story_html.xhtml', locals())
|
2011-05-08 19:41:50 -04:00
|
|
|
subject = "%s is sharing a story with you: \"%s\"" % (from_name, story['story_title'])
|
2011-05-08 20:32:02 -04:00
|
|
|
msg = EmailMultiAlternatives(subject, text,
|
|
|
|
from_email=['NewsBlur <%s>' % from_address],
|
|
|
|
to=[to_address],
|
2011-05-08 20:21:09 -04:00
|
|
|
cc=['%s <%s>' % (from_name, from_email)],
|
|
|
|
headers={'Reply-To': '%s <%s>' % (from_name, from_email)})
|
2011-05-07 17:58:53 -04:00
|
|
|
msg.attach_alternative(html, "text/html")
|
|
|
|
msg.send()
|
|
|
|
|
2011-05-07 21:12:24 -04:00
|
|
|
return {'code': code, 'message': message}
|