2010-04-27 17:35:51 -04:00
import datetime
2011-01-07 16:26:17 -05:00
import time
2012-01-23 10:16:50 -08:00
import boto
2012-02-21 17:35:51 -08:00
import redis
2012-12-14 16:28:06 -08:00
import requests
2014-05-29 13:30:41 -07:00
import random
2013-12-19 11:57:58 -08:00
import zlib
2016-05-26 16:38:45 -07:00
import re
2011-12-23 18:28:16 -08:00
from django . shortcuts import get_object_or_404
2012-09-05 11:32:12 -07:00
from django . shortcuts import render
2009-06-16 03:08:55 +00:00
from django . contrib . auth . decorators import login_required
2011-05-07 17:58:53 -04:00
from django . template . loader import render_to_string
2009-09-08 04:27:27 +00:00
from django . db import IntegrityError
2013-06-27 15:21:13 -07:00
from django . db . models import Q
2010-01-24 22:53:46 -05:00
from django . views . decorators . cache import never_cache
2020-06-17 00:41:50 -04:00
from django . urls import reverse
2010-04-27 17:56:16 -04:00
from django . contrib . auth import login as login_user
2010-11-12 10:55:44 -05:00
from django . contrib . auth import logout as logout_user
2010-07-20 23:59:56 -04:00
from django . contrib . auth . models import User
2015-09-14 17:10:30 -07:00
from django . http import HttpResponse , HttpResponseRedirect , HttpResponseForbidden , Http404 , UnreadablePostError
2010-07-20 23:59:56 -04:00
from django . conf import settings
2010-10-19 19:09:08 -04:00
from django . core . mail import mail_admins
2011-05-07 17:58:53 -04:00
from django . core . mail import EmailMultiAlternatives
2020-06-05 11:00:54 -04:00
from django . core . validators import validate_email
2012-06-27 01:02:09 -07:00
from django . contrib . sites . models import Site
2013-12-19 11:57:58 -08:00
from django . utils import feedgenerator
2020-06-17 00:41:50 -04:00
from django . utils . encoding import smart_str
2012-02-10 13:53:33 -08:00
from mongoengine . queryset import OperationError
2014-01-21 15:40:37 -08:00
from mongoengine . queryset import NotUniqueError
2011-03-02 12:05:58 -05:00
from apps . recommendations . models import RecommendedFeed
2010-08-22 18:34:40 -04:00
from apps . analyzer . models import MClassifierTitle , MClassifierAuthor , MClassifierFeed , MClassifierTag
2012-04-10 17:28:00 -07:00
from apps . analyzer . models import apply_classifier_titles , apply_classifier_feeds
from apps . analyzer . models import apply_classifier_authors , apply_classifier_tags
2012-05-29 11:48:40 -07:00
from apps . analyzer . models import get_classifiers_for_user , sort_classifiers_by_feed
2020-12-10 18:33:43 -05:00
from apps . profile . models import Profile , MCustomStyling , MDashboardRiver
2013-05-10 16:11:30 -07:00
from apps . reader . models import UserSubscription , UserSubscriptionFolders , RUserStory , Feature
2010-06-12 21:20:06 -04:00
from apps . reader . forms import SignupForm , LoginForm , FeatureForm
2016-02-27 15:51:42 -08:00
from apps . rss_feeds . models import MFeedIcon , MStarredStoryCounts , MSavedSearch
2016-11-14 19:26:09 -08:00
from apps . notifications . models import MUserFeedNotification
2014-04-16 15:31:44 -07:00
from apps . search . models import MUserSearch
2018-02-26 16:00:44 -08:00
from apps . statistics . models import MStatistics , MAnalyticsLoader
2013-07-24 17:38:05 -07:00
# from apps.search.models import SearchStarredStory
2010-04-27 17:35:51 -04:00
try :
2012-09-27 15:21:38 -07:00
from apps . rss_feeds . models import Feed , MFeedPage , DuplicateFeed , MStory , MStarredStory
2010-04-27 17:35:51 -04:00
except :
pass
2012-06-27 23:57:57 -07:00
from apps . social . models import MSharedStory , MSocialProfile , MSocialServices
2013-01-30 18:28:37 -08:00
from apps . social . models import MSocialSubscription , MActivity , MInteraction
2012-08-12 16:49:14 -07:00
from apps . categories . models import MCategory
2012-06-27 01:02:09 -07:00
from apps . social . views import load_social_page
2013-01-02 12:27:08 -08:00
from apps . rss_feeds . tasks import ScheduleImmediateFetches
2011-01-23 02:13:55 -05:00
from utils import json_functions as json
2010-07-24 15:54:25 -04:00
from utils . user_functions import get_user , ajax_login_required
2020-03-17 12:20:59 -04:00
from utils . user_functions import extract_user_agent
2011-01-23 02:13:55 -05:00
from utils . feed_functions import relative_timesince
2010-10-31 16:03:50 -04:00
from utils . story_functions import format_story_link_date__short
from utils . story_functions import format_story_link_date__long
2012-07-21 16:38:37 -07:00
from utils . story_functions import strip_tags
2010-08-16 12:52:39 -04:00
from utils import log as logging
2012-03-19 15:46:59 -07:00
from utils . view_functions import get_argument_or_404 , render_to , is_true
2013-07-05 18:01:56 -07:00
from utils . view_functions import required_params
2011-11-08 13:48:54 -08:00
from utils . ratelimit import ratelimit
2011-04-11 21:57:45 -04:00
from vendor . timezones . utilities import localtime_for_timezone
2016-08-28 20:42:07 -07:00
import tweepy
2009-08-08 16:52:11 +00:00
2014-10-14 10:43:38 -07:00
BANNED_URLS = [
" brentozar.com " ,
]
2020-12-08 13:19:23 -05:00
ALLOWED_SUBDOMAINS = [
' dev ' ,
' www ' ,
2021-02-23 19:51:05 -05:00
' beta ' ,
2021-03-15 13:55:36 -04:00
' staging ' ,
' discovery ' ,
2020-12-08 13:19:23 -05:00
' debug ' ,
' debug3 ' ,
' nb ' ,
]
2014-10-14 10:43:38 -07:00
2020-06-17 00:41:50 -04:00
def get_subdomain ( request ) :
host = request . META . get ( ' HTTP_HOST ' )
2021-02-26 14:06:42 -05:00
if host and host . count ( " . " ) == 2 :
2020-06-17 00:41:50 -04:00
return host . split ( " . " ) [ 0 ]
else :
return None
2012-07-12 22:26:49 -07:00
@never_cache
2012-09-11 15:15:16 -07:00
@render_to ( ' reader/dashboard.xhtml ' )
2012-07-06 17:47:06 -07:00
def index ( request , * * kwargs ) :
2020-06-17 00:41:50 -04:00
subdomain = get_subdomain ( request )
2020-12-08 13:19:23 -05:00
if request . method == " GET " and subdomain and subdomain not in ALLOWED_SUBDOMAINS :
2021-02-26 10:03:27 -05:00
username = request . subdomain or subdomain
2015-07-29 13:30:38 -07:00
if ' . ' in username :
username = username . split ( ' . ' ) [ 0 ]
user = User . objects . filter ( username = username )
if not user :
user = User . objects . filter ( username__iexact = username )
if user :
user = user [ 0 ]
if not user :
2012-06-27 01:02:09 -07:00
return HttpResponseRedirect ( ' http:// %s %s ' % (
2012-07-29 22:31:40 -07:00
Site . objects . get_current ( ) . domain ,
2012-06-27 01:02:09 -07:00
reverse ( ' index ' ) ) )
2012-07-06 17:47:06 -07:00
return load_social_page ( request , user_id = user . pk , username = request . subdomain , * * kwargs )
2020-06-11 15:13:12 -04:00
if request . user . is_anonymous :
2012-09-11 15:15:16 -07:00
return welcome ( request , * * kwargs )
2010-04-03 00:59:03 -04:00
else :
2012-09-11 15:15:16 -07:00
return dashboard ( request , * * kwargs )
def dashboard ( request , * * kwargs ) :
user = request . user
feed_count = UserSubscription . objects . filter ( user = request . user ) . count ( )
2011-12-14 21:56:27 -08:00
recommended_feeds = RecommendedFeed . objects . filter ( is_public = True ,
2013-02-04 14:51:15 -08:00
approved_date__lte = datetime . datetime . now ( )
) . select_related ( ' feed ' ) [ : 2 ]
unmoderated_feeds = [ ]
if user . is_staff :
unmoderated_feeds = RecommendedFeed . objects . filter ( is_public = False ,
declined_date__isnull = True
) . select_related ( ' feed ' ) [ : 2 ]
2011-12-14 21:56:27 -08:00
statistics = MStatistics . all ( )
2012-07-28 19:53:38 -07:00
social_profile = MSocialProfile . get_user ( user . pk )
2017-05-18 16:59:35 -07:00
custom_styling = MCustomStyling . get_user ( user . pk )
2021-03-25 20:17:13 -04:00
dashboard_rivers = MDashboardRiver . get_user_rivers ( user . pk )
2020-06-25 14:57:31 -04:00
preferences = json . decode ( user . profile . preferences )
2012-04-05 13:51:08 -07:00
2013-05-13 18:03:17 -07:00
if not user . is_active :
2015-12-09 21:44:01 +01:00
url = " https:// %s %s " % ( Site . objects . get_current ( ) . domain ,
reverse ( ' stripe-form ' ) )
return HttpResponseRedirect ( url )
2013-07-10 13:03:48 -07:00
logging . user ( request , " ~FBLoading dashboard " )
2011-12-23 18:28:16 -08:00
return {
2012-09-11 15:15:16 -07:00
' user_profile ' : user . profile ,
2020-06-25 14:57:31 -04:00
' preferences ' : preferences ,
2011-05-11 20:22:53 -04:00
' feed_count ' : feed_count ,
2017-05-18 16:59:35 -07:00
' custom_styling ' : custom_styling ,
2020-12-09 18:20:55 -05:00
' dashboard_rivers ' : dashboard_rivers ,
2020-06-15 05:15:36 -04:00
' account_images ' : list ( range ( 1 , 4 ) ) ,
2011-05-11 20:22:53 -04:00
' recommended_feeds ' : recommended_feeds ,
2011-07-11 18:22:28 -07:00
' unmoderated_feeds ' : unmoderated_feeds ,
2011-05-11 20:22:53 -04:00
' statistics ' : statistics ,
2012-07-28 19:53:38 -07:00
' social_profile ' : social_profile ,
2013-06-28 12:18:45 -07:00
' debug ' : settings . DEBUG ,
2012-09-11 15:15:16 -07:00
} , " reader/dashboard.xhtml "
def welcome ( request , * * kwargs ) :
user = get_user ( request )
statistics = MStatistics . all ( )
social_profile = MSocialProfile . get_user ( user . pk )
if request . method == " POST " :
2012-09-18 13:08:25 -07:00
if request . POST . get ( ' submit ' , ' ' ) . startswith ( ' log ' ) :
2012-09-11 15:15:16 -07:00
login_form = LoginForm ( request . POST , prefix = ' login ' )
signup_form = SignupForm ( prefix = ' signup ' )
else :
signup_form = SignupForm ( request . POST , prefix = ' signup ' )
2017-12-08 16:43:30 -08:00
return {
" form " : signup_form
2019-12-21 09:54:47 -05:00
} , " accounts/signup.html "
2012-09-11 15:15:16 -07:00
else :
login_form = LoginForm ( prefix = ' login ' )
signup_form = SignupForm ( prefix = ' signup ' )
2013-07-10 13:03:48 -07:00
logging . user ( request , " ~FBLoading welcome " )
2012-09-11 15:15:16 -07:00
return {
' user_profile ' : hasattr ( user , ' profile ' ) and user . profile ,
' login_form ' : login_form ,
' signup_form ' : signup_form ,
' statistics ' : statistics ,
' social_profile ' : social_profile ,
2012-09-12 18:47:37 -07:00
' post_request ' : request . method == ' POST ' ,
2012-09-11 15:15:16 -07:00
} , " reader/welcome.xhtml "
2009-06-16 03:08:55 +00:00
2010-01-24 22:53:46 -05:00
@never_cache
2010-01-17 20:00:12 -05:00
def login ( request ) :
2010-11-12 10:55:44 -05:00
code = - 1
2011-08-13 23:00:51 -07:00
message = " "
2010-01-24 22:53:46 -05:00
if request . method == " POST " :
2010-04-22 21:17:00 -04:00
form = LoginForm ( request . POST , prefix = ' login ' )
2010-01-24 22:53:46 -05:00
if form . is_valid ( ) :
2020-07-03 02:20:42 -04:00
login_user ( request , form . get_user ( ) , backend = ' django.contrib.auth.backends.ModelBackend ' )
2010-11-10 21:54:40 -05:00
if request . POST . get ( ' api ' ) :
2011-02-23 13:46:47 -05:00
logging . user ( form . get_user ( ) , " ~FG~BB~SKiPhone Login~FW " )
2010-11-12 10:55:44 -05:00
code = 1
2010-11-10 21:54:40 -05:00
else :
2011-02-23 13:46:47 -05:00
logging . user ( form . get_user ( ) , " ~FG~BBLogin~FW " )
2020-06-08 20:40:45 -04:00
next_url = request . POST . get ( ' next ' , ' ' )
if next_url :
return HttpResponseRedirect ( next_url )
2010-11-10 21:54:40 -05:00
return HttpResponseRedirect ( reverse ( ' index ' ) )
2011-08-13 23:00:51 -07:00
else :
2020-06-15 05:15:36 -04:00
message = list ( form . errors . items ( ) ) [ 0 ] [ 1 ] [ 0 ]
2010-01-24 22:53:46 -05:00
2010-11-10 21:54:40 -05:00
if request . POST . get ( ' api ' ) :
2020-06-08 05:35:27 -04:00
return HttpResponse ( json . encode ( dict ( code = code , message = message ) ) , content_type = ' application/json ' )
2010-11-10 21:54:40 -05:00
else :
return index ( request )
2010-04-22 21:17:00 -04:00
@never_cache
2017-12-08 16:43:30 -08:00
@render_to ( ' accounts/signup.html ' )
2010-04-22 21:17:00 -04:00
def signup ( request ) :
if request . method == " POST " :
2020-04-06 09:42:11 -04:00
if settings . ENFORCE_SIGNUP_CAPTCHA :
signup_form = SignupForm ( request . POST , prefix = ' signup ' )
return {
" form " : signup_form
}
form = SignupForm ( prefix = ' signup ' , data = request . POST )
if form . is_valid ( ) :
new_user = form . save ( )
2020-07-03 02:12:59 -04:00
login_user ( request , new_user , backend = ' django.contrib.auth.backends.ModelBackend ' )
2020-04-06 09:42:11 -04:00
logging . user ( new_user , " ~FG~SB~BBNEW SIGNUP: ~FW %s " % new_user . email )
if not new_user . is_active :
url = " https:// %s %s " % ( Site . objects . get_current ( ) . domain ,
reverse ( ' stripe-form ' ) )
return HttpResponseRedirect ( url )
2020-07-21 17:04:01 -04:00
else :
return HttpResponseRedirect ( reverse ( ' index ' ) )
2013-10-11 17:46:24 -07:00
2010-04-22 21:17:00 -04:00
return index ( request )
2010-01-17 20:00:12 -05:00
2010-01-24 22:53:46 -05:00
@never_cache
def logout ( request ) :
2011-09-16 09:26:22 -07:00
logging . user ( request , " ~FG~BBLogout~FW " )
2010-11-12 10:55:44 -05:00
logout_user ( request )
2010-01-24 22:53:46 -05:00
2010-11-11 23:48:27 -05:00
if request . GET . get ( ' api ' ) :
2020-06-08 05:35:27 -04:00
return HttpResponse ( json . encode ( dict ( code = 1 ) ) , content_type = ' application/json ' )
2010-11-11 23:48:27 -05:00
else :
return HttpResponseRedirect ( reverse ( ' index ' ) )
2011-09-19 09:46:36 -07:00
def autologin ( request , username , secret ) :
2011-09-21 17:49:26 -07:00
next = request . GET . get ( ' next ' , ' ' )
2011-09-19 09:46:36 -07:00
if not username or not secret :
return HttpResponseForbidden ( )
profile = Profile . objects . filter ( user__username = username , secret_token = secret )
2011-09-21 17:49:26 -07:00
if not profile :
2011-09-19 09:46:36 -07:00
return HttpResponseForbidden ( )
2011-09-21 17:49:26 -07:00
user = profile [ 0 ] . user
user . backend = settings . AUTHENTICATION_BACKENDS [ 0 ]
2020-07-03 02:20:42 -04:00
login_user ( request , user , backend = ' django.contrib.auth.backends.ModelBackend ' )
2011-09-21 17:49:26 -07:00
logging . user ( user , " ~FG~BB~SKAuto-Login. Next stop: %s ~FW " % ( next if next else ' Homepage ' , ) )
2011-09-19 09:46:36 -07:00
2013-01-07 17:28:43 -08:00
if next and not next . startswith ( ' / ' ) :
2011-09-21 17:49:26 -07:00
next = ' ?next= ' + next
2013-01-07 17:28:43 -08:00
return HttpResponseRedirect ( reverse ( ' index ' ) + next )
2013-03-21 10:35:38 -07:00
elif next :
2013-01-07 17:28:43 -08:00
return HttpResponseRedirect ( next )
2013-03-21 10:35:38 -07:00
else :
return HttpResponseRedirect ( reverse ( ' index ' ) )
2010-01-24 22:53:46 -05:00
2016-01-11 18:25:23 -08:00
@ratelimit ( minutes = 1 , requests = 60 )
2012-07-12 22:26:49 -07:00
@never_cache
2010-07-25 23:13:27 -04:00
@json.json_view
2009-06-16 03:08:55 +00:00
def load_feeds ( request ) :
2011-04-04 12:01:29 -04:00
user = get_user ( request )
feeds = { }
2020-06-07 08:04:23 -04:00
include_favicons = is_true ( request . GET . get ( ' include_favicons ' , False ) )
flat = is_true ( request . GET . get ( ' flat ' , False ) )
update_counts = is_true ( request . GET . get ( ' update_counts ' , True ) )
version = int ( request . GET . get ( ' v ' , 1 ) )
2011-04-20 09:35:59 -04:00
2011-12-01 11:10:25 -08:00
if include_favicons == ' false ' : include_favicons = False
if update_counts == ' false ' : update_counts = False
if flat == ' false ' : flat = False
2011-04-20 09:35:59 -04:00
if flat : return load_feeds_flat ( request )
2020-06-17 18:17:29 -04:00
2020-03-17 12:20:59 -04:00
platform = extract_user_agent ( request )
if platform in [ ' iPhone ' , ' iPad ' , ' Androd ' ] :
# Remove this check once the iOS and Android updates go out which have update_counts=False
# and then guarantee a refresh_feeds call
update_counts = False
2010-03-02 10:56:25 -05:00
try :
folders = UserSubscriptionFolders . objects . get ( user = user )
except UserSubscriptionFolders . DoesNotExist :
data = dict ( feeds = [ ] , folders = [ ] )
2010-07-25 23:13:27 -04:00
return data
2010-11-04 19:32:19 -04:00
except UserSubscriptionFolders . MultipleObjectsReturned :
UserSubscriptionFolders . objects . filter ( user = user ) [ 1 : ] . delete ( )
folders = UserSubscriptionFolders . objects . get ( user = user )
2011-08-29 20:51:40 -07:00
2011-04-05 19:24:12 -04:00
user_subs = UserSubscription . objects . select_related ( ' feed ' ) . filter ( user = user )
2016-11-14 19:26:09 -08:00
notifications = MUserFeedNotification . feeds_for_user ( user . pk )
2010-08-13 10:43:48 -04:00
2013-04-01 12:21:00 -07:00
day_ago = datetime . datetime . now ( ) - datetime . timedelta ( days = 1 )
2013-01-02 12:27:08 -08:00
scheduled_feeds = [ ]
2010-02-11 01:28:47 -05:00
for sub in user_subs :
2012-01-26 09:32:24 -08:00
pk = sub . feed_id
2013-06-30 22:07:23 -07:00
if update_counts and sub . needs_unread_recalc :
2011-11-07 08:27:59 -08:00
sub . calculate_feed_scores ( silent = True )
2011-08-22 18:17:42 -07:00
feeds [ pk ] = sub . canonical ( include_favicon = include_favicons )
2013-04-01 15:45:22 -07:00
if not sub . active : continue
2016-11-14 19:26:09 -08:00
if pk in notifications :
feeds [ pk ] . update ( notifications [ pk ] )
2013-05-30 10:16:20 -07:00
if not sub . feed . active and not sub . feed . has_feed_exception :
2013-01-02 12:27:08 -08:00
scheduled_feeds . append ( sub . feed . pk )
2013-04-01 15:45:22 -07:00
elif sub . feed . active_subscribers < = 0 :
2013-01-02 12:27:08 -08:00
scheduled_feeds . append ( sub . feed . pk )
2013-04-01 12:21:00 -07:00
elif sub . feed . next_scheduled_update < day_ago :
2013-01-02 12:27:08 -08:00
scheduled_feeds . append ( sub . feed . pk )
2020-06-11 15:13:12 -04:00
if len ( scheduled_feeds ) > 0 and request . user . is_authenticated :
2013-01-02 12:27:08 -08:00
logging . user ( request , " ~SN~FMTasking the scheduling immediate fetch of ~SB %s ~SN feeds... " %
len ( scheduled_feeds ) )
2021-01-06 14:42:24 -05:00
ScheduleImmediateFetches . apply_async ( kwargs = dict ( feed_ids = scheduled_feeds , user_id = user . pk ) )
2013-01-02 12:27:08 -08:00
2013-08-16 16:02:45 -07:00
starred_counts , starred_count = MStarredStoryCounts . user_counts ( user . pk , include_total = True )
if not starred_count and len ( starred_counts ) :
2013-08-15 18:22:22 -07:00
starred_count = MStarredStory . objects ( user_id = user . pk ) . count ( )
2012-01-20 09:38:41 -08:00
2016-02-27 15:51:42 -08:00
saved_searches = MSavedSearch . user_searches ( user . pk )
2012-01-20 09:38:41 -08:00
social_params = {
' user_id ' : user . pk ,
' include_favicon ' : include_favicons ,
' update_counts ' : update_counts ,
}
social_feeds = MSocialSubscription . feeds ( * * social_params )
2012-03-07 18:35:17 -08:00
social_profile = MSocialProfile . profile ( user . pk )
2012-06-27 23:57:57 -07:00
social_services = MSocialServices . profile ( user . pk )
2021-03-25 20:17:13 -04:00
dashboard_rivers = MDashboardRiver . get_user_rivers ( user . pk )
2020-12-09 18:20:55 -05:00
2012-08-12 16:49:14 -07:00
categories = None
if not user_subs :
categories = MCategory . serialize ( )
2012-12-24 20:51:27 -08:00
2012-12-26 20:00:38 -08:00
logging . user ( request , " ~FB~SBLoading ~FY %s ~FB/~FM %s ~FB feeds/socials %s " % (
2020-06-15 05:15:36 -04:00
len ( list ( feeds . keys ( ) ) ) , len ( social_feeds ) , ' . ~FCUpdating counts. ' if update_counts else ' ' ) )
2012-12-24 20:51:27 -08:00
2010-12-01 14:11:42 -05:00
data = {
2020-06-15 05:15:36 -04:00
' feeds ' : list ( feeds . values ( ) ) if version == 2 else feeds ,
2012-01-20 09:38:41 -08:00
' social_feeds ' : social_feeds ,
2012-03-07 18:35:17 -08:00
' social_profile ' : social_profile ,
2012-06-27 23:57:57 -07:00
' social_services ' : social_services ,
2013-08-22 19:44:16 -07:00
' user_profile ' : user . profile ,
2014-12-08 15:06:27 -08:00
" is_staff " : user . is_staff ,
2016-01-22 12:46:41 -08:00
' user_id ' : user . pk ,
2010-12-01 14:11:42 -05:00
' folders ' : json . decode ( folders . folders ) ,
' starred_count ' : starred_count ,
2013-08-15 18:22:22 -07:00
' starred_counts ' : starred_counts ,
2016-02-27 15:51:42 -08:00
' saved_searches ' : saved_searches ,
2020-12-09 18:20:55 -05:00
' dashboard_rivers ' : dashboard_rivers ,
2012-08-12 16:49:14 -07:00
' categories ' : categories
2010-12-01 14:11:42 -05:00
}
2010-07-25 23:13:27 -04:00
return data
2010-04-14 22:58:00 -04:00
2011-04-05 19:24:12 -04:00
@json.json_view
def load_feed_favicons ( request ) :
user = get_user ( request )
2020-06-07 08:04:23 -04:00
feed_ids = request . GET . getlist ( ' feed_ids ' ) or request . GET . getlist ( ' feed_ids[] ' )
2012-06-28 21:11:35 -07:00
2012-06-29 00:17:07 -07:00
if not feed_ids :
user_subs = UserSubscription . objects . select_related ( ' feed ' ) . filter ( user = user , active = True )
feed_ids = [ sub [ ' feed__pk ' ] for sub in user_subs . values ( ' feed__pk ' ) ]
2011-04-05 19:24:12 -04:00
2011-08-29 21:28:12 -07:00
feed_icons = dict ( [ ( i . feed_id , i . data ) for i in MFeedIcon . objects ( feed_id__in = feed_ids ) ] )
2011-04-05 19:24:12 -04:00
2011-04-21 23:10:43 -04:00
return feed_icons
2011-07-20 21:08:57 -07:00
2011-04-20 09:35:59 -04:00
def load_feeds_flat ( request ) :
2011-07-20 21:08:57 -07:00
user = request . user
2020-06-07 08:04:23 -04:00
include_favicons = is_true ( request . GET . get ( ' include_favicons ' , False ) )
update_counts = is_true ( request . GET . get ( ' update_counts ' , True ) )
include_inactive = is_true ( request . GET . get ( ' include_inactive ' , False ) )
background_ios = is_true ( request . GET . get ( ' background_ios ' , False ) )
2012-06-24 22:29:07 -07:00
2010-06-20 11:04:23 -04:00
feeds = { }
2016-01-28 15:07:37 -08:00
inactive_feeds = { }
2014-06-08 10:54:29 -07:00
day_ago = datetime . datetime . now ( ) - datetime . timedelta ( days = 1 )
scheduled_feeds = [ ]
2015-09-16 18:42:02 -07:00
iphone_version = " 2.1 " # Preserved forever. Don't change.
2015-09-23 22:47:03 -07:00
latest_ios_build = " 52 "
latest_ios_version = " 5.0.0b2 "
2010-06-20 11:04:23 -04:00
2011-12-01 18:55:54 -08:00
if include_favicons == ' false ' : include_favicons = False
2012-06-24 22:29:07 -07:00
if update_counts == ' false ' : update_counts = False
2011-12-01 18:55:54 -08:00
2020-06-11 15:13:12 -04:00
if not user . is_authenticated :
2011-07-20 21:08:57 -07:00
return HttpResponseForbidden ( )
2010-06-20 11:04:23 -04:00
try :
folders = UserSubscriptionFolders . objects . get ( user = user )
except UserSubscriptionFolders . DoesNotExist :
2012-08-02 19:04:20 -07:00
folders = [ ]
2010-06-20 11:04:23 -04:00
2011-09-15 18:28:19 -07:00
user_subs = UserSubscription . objects . select_related ( ' feed ' ) . filter ( user = user , active = True )
2016-11-17 19:13:42 -08:00
notifications = MUserFeedNotification . feeds_for_user ( user . pk )
2014-06-08 10:54:29 -07:00
if not user_subs and folders :
folders . auto_activate ( )
user_subs = UserSubscription . objects . select_related ( ' feed ' ) . filter ( user = user , active = True )
2016-01-28 16:23:38 -08:00
if include_inactive :
inactive_subs = UserSubscription . objects . select_related ( ' feed ' ) . filter ( user = user , active = False )
2016-01-28 15:07:37 -08:00
2010-06-20 11:04:23 -04:00
for sub in user_subs :
2016-11-17 19:13:42 -08:00
pk = sub . feed_id
2012-08-01 18:52:54 -07:00
if update_counts and sub . needs_unread_recalc :
2010-12-01 09:30:56 -05:00
sub . calculate_feed_scores ( silent = True )
2016-11-17 19:13:42 -08:00
feeds [ pk ] = sub . canonical ( include_favicon = include_favicons )
2013-06-15 12:48:35 -07:00
if not sub . feed . active and not sub . feed . has_feed_exception :
scheduled_feeds . append ( sub . feed . pk )
elif sub . feed . active_subscribers < = 0 :
scheduled_feeds . append ( sub . feed . pk )
elif sub . feed . next_scheduled_update < day_ago :
scheduled_feeds . append ( sub . feed . pk )
2016-11-17 19:13:42 -08:00
if pk in notifications :
feeds [ pk ] . update ( notifications [ pk ] )
2010-06-20 11:04:23 -04:00
2016-01-28 16:23:38 -08:00
if include_inactive :
for sub in inactive_subs :
inactive_feeds [ sub . feed_id ] = sub . canonical ( include_favicon = include_favicons )
2016-01-28 15:07:37 -08:00
2020-06-11 15:13:12 -04:00
if len ( scheduled_feeds ) > 0 and request . user . is_authenticated :
2013-06-15 12:48:35 -07:00
logging . user ( request , " ~SN~FMTasking the scheduling immediate fetch of ~SB %s ~SN feeds... " %
len ( scheduled_feeds ) )
2021-01-06 14:42:24 -05:00
ScheduleImmediateFetches . apply_async ( kwargs = dict ( feed_ids = scheduled_feeds , user_id = user . pk ) )
2010-06-20 11:04:23 -04:00
2014-01-27 15:29:55 -08:00
flat_folders = [ ]
2016-01-28 16:17:25 -08:00
flat_folders_with_inactive = [ ]
2012-08-02 19:04:20 -07:00
if folders :
2016-01-28 16:17:25 -08:00
flat_folders = folders . flatten_folders ( feeds = feeds )
flat_folders_with_inactive = folders . flatten_folders ( feeds = feeds ,
inactive_feeds = inactive_feeds )
2010-06-20 11:04:23 -04:00
2012-06-24 22:29:07 -07:00
social_params = {
' user_id ' : user . pk ,
' include_favicon ' : include_favicons ,
' update_counts ' : update_counts ,
}
social_feeds = MSocialSubscription . feeds ( * * social_params )
social_profile = MSocialProfile . profile ( user . pk )
2013-03-04 20:21:29 -08:00
social_services = MSocialServices . profile ( user . pk )
2014-05-19 17:07:42 -07:00
starred_counts , starred_count = MStarredStoryCounts . user_counts ( user . pk , include_total = True )
if not starred_count and len ( starred_counts ) :
starred_count = MStarredStory . objects ( user_id = user . pk ) . count ( )
2012-08-12 16:49:14 -07:00
categories = None
if not user_subs :
categories = MCategory . serialize ( )
2018-04-17 17:56:42 -07:00
2020-04-24 19:42:26 -04:00
saved_searches = MSavedSearch . user_searches ( user . pk )
2017-04-04 13:36:34 -07:00
logging . user ( request , " ~FB~SBLoading ~FY %s ~FB/~FM %s ~FB/~FR %s ~FB feeds/socials/inactive ~FMflat~FB %s %s " % (
2020-06-15 05:15:36 -04:00
len ( list ( feeds . keys ( ) ) ) , len ( social_feeds ) , len ( inactive_feeds ) , ' . ~FCUpdating counts. ' if update_counts else ' ' ,
2017-04-04 13:36:34 -07:00
' ~BB(background fetch) ' if background_ios else ' ' ) )
2012-08-10 10:35:11 -07:00
2012-06-24 22:29:07 -07:00
data = {
" flat_folders " : flat_folders ,
2016-01-28 16:17:25 -08:00
" flat_folders_with_inactive " : flat_folders_with_inactive ,
2020-06-15 20:05:25 -04:00
" feeds " : feeds ,
2016-01-28 16:23:38 -08:00
" inactive_feeds " : inactive_feeds if include_inactive else { " 0 " : " Include `include_inactive=true` " } ,
2012-06-24 22:29:07 -07:00
" social_feeds " : social_feeds ,
" social_profile " : social_profile ,
2013-03-04 20:21:29 -08:00
" social_services " : social_services ,
2012-06-24 22:29:07 -07:00
" user " : user . username ,
2016-01-22 12:47:31 -08:00
" user_id " : user . pk ,
2014-12-05 21:41:31 -08:00
" is_staff " : user . is_staff ,
2012-07-13 18:42:44 -07:00
" user_profile " : user . profile ,
2012-06-24 22:29:07 -07:00
" iphone_version " : iphone_version ,
2015-09-16 18:42:02 -07:00
" latest_ios_build " : latest_ios_build ,
" latest_ios_version " : latest_ios_version ,
2012-08-12 16:49:14 -07:00
" categories " : categories ,
2012-10-15 09:16:01 -07:00
' starred_count ' : starred_count ,
2014-05-19 17:07:42 -07:00
' starred_counts ' : starred_counts ,
2020-04-24 19:42:26 -04:00
' saved_searches ' : saved_searches ,
2015-11-05 12:17:52 -08:00
' share_ext_token ' : user . profile . secret_token ,
2012-06-24 22:29:07 -07:00
}
2010-07-25 23:13:27 -04:00
return data
2010-06-20 11:04:23 -04:00
2020-03-17 12:20:59 -04:00
class ratelimit_refresh_feeds ( ratelimit ) :
def should_ratelimit ( self , request ) :
feed_ids = request . POST . getlist ( ' feed_id ' ) or request . POST . getlist ( ' feed_id[] ' )
if len ( feed_ids ) == 1 :
return False
return True
@ratelimit_refresh_feeds ( minutes = 1 , requests = 30 )
2012-07-12 22:26:49 -07:00
@never_cache
2010-07-25 23:13:27 -04:00
@json.json_view
2010-04-14 22:58:00 -04:00
def refresh_feeds ( request ) :
2015-09-02 13:18:43 -07:00
start = datetime . datetime . now ( )
2018-02-26 18:11:25 -08:00
start_time = time . time ( )
2010-04-14 22:58:00 -04:00
user = get_user ( request )
2020-06-07 08:04:23 -04:00
feed_ids = request . GET . getlist ( ' feed_id ' ) or request . GET . getlist ( ' feed_id[] ' )
check_fetch_status = request . GET . get ( ' check_fetch_status ' )
favicons_fetching = request . GET . getlist ( ' favicons_fetching ' ) or request . GET . getlist ( ' favicons_fetching[] ' )
2012-04-03 19:24:02 -07:00
social_feed_ids = [ feed_id for feed_id in feed_ids if ' social: ' in feed_id ]
feed_ids = list ( set ( feed_ids ) - set ( social_feed_ids ) )
feeds = { }
if feed_ids or ( not social_feed_ids and not feed_ids ) :
feeds = UserSubscription . feeds_with_updated_counts ( user , feed_ids = feed_ids ,
check_fetch_status = check_fetch_status )
2015-09-02 13:18:43 -07:00
checkpoint1 = datetime . datetime . now ( )
2012-04-03 19:24:02 -07:00
social_feeds = { }
if social_feed_ids or ( not social_feed_ids and not feed_ids ) :
social_feeds = MSocialSubscription . feeds_with_updated_counts ( user , social_feed_ids = social_feed_ids )
2015-09-02 13:18:43 -07:00
checkpoint2 = datetime . datetime . now ( )
2012-04-03 16:02:32 -07:00
2012-03-26 11:04:05 -07:00
favicons_fetching = [ int ( f ) for f in favicons_fetching if f ]
2014-05-29 15:04:29 -07:00
feed_icons = { }
if favicons_fetching :
feed_icons = dict ( [ ( i . feed_id , i ) for i in MFeedIcon . objects ( feed_id__in = favicons_fetching ) ] )
2020-06-15 05:15:36 -04:00
for feed_id , feed in list ( feeds . items ( ) ) :
2015-09-02 13:18:43 -07:00
if feed_id in favicons_fetching and feed_id in feed_icons :
feeds [ feed_id ] [ ' favicon ' ] = feed_icons [ feed_id ] . data
feeds [ feed_id ] [ ' favicon_color ' ] = feed_icons [ feed_id ] . color
feeds [ feed_id ] [ ' favicon_fetching ' ] = feed . get ( ' favicon_fetching ' )
2012-03-26 11:04:05 -07:00
2014-05-29 15:04:29 -07:00
user_subs = UserSubscription . objects . filter ( user = user , active = True ) . only ( ' feed ' )
2012-03-30 16:03:07 -07:00
sub_feed_ids = [ s . feed_id for s in user_subs ]
2012-03-26 11:04:05 -07:00
2011-08-29 20:51:40 -07:00
if favicons_fetching :
moved_feed_ids = [ f for f in favicons_fetching if f not in sub_feed_ids ]
for moved_feed_id in moved_feed_ids :
2011-08-30 08:04:42 -07:00
duplicate_feeds = DuplicateFeed . objects . filter ( duplicate_feed_id = moved_feed_id )
2012-03-26 21:06:13 -07:00
2011-08-30 08:04:42 -07:00
if duplicate_feeds and duplicate_feeds [ 0 ] . feed . pk in feeds :
2012-01-26 09:32:24 -08:00
feeds [ moved_feed_id ] = feeds [ duplicate_feeds [ 0 ] . feed_id ]
feeds [ moved_feed_id ] [ ' dupe_feed_id ' ] = duplicate_feeds [ 0 ] . feed_id
2012-03-30 16:03:07 -07:00
if check_fetch_status :
missing_feed_ids = list ( set ( feed_ids ) - set ( sub_feed_ids ) )
if missing_feed_ids :
duplicate_feeds = DuplicateFeed . objects . filter ( duplicate_feed_id__in = missing_feed_ids )
for duplicate_feed in duplicate_feeds :
2012-03-30 17:15:31 -07:00
feeds [ duplicate_feed . duplicate_feed_id ] = { ' id ' : duplicate_feed . feed_id }
2013-01-30 18:28:37 -08:00
interactions_count = MInteraction . user_unread_count ( user . pk )
2012-03-26 11:04:05 -07:00
2013-04-14 20:44:47 -07:00
if True or settings . DEBUG or check_fetch_status :
2015-09-02 13:18:43 -07:00
end = datetime . datetime . now ( )
extra_fetch = " "
if check_fetch_status or favicons_fetching :
extra_fetch = " ( %s / %s ) " % ( check_fetch_status , len ( favicons_fetching ) )
logging . user ( request , " ~FBRefreshing %s + %s feeds %s ( %.4s / %.4s / %.4s ) " % (
2020-06-15 05:15:36 -04:00
len ( list ( feeds . keys ( ) ) ) , len ( list ( social_feeds . keys ( ) ) ) , extra_fetch ,
2015-09-02 13:18:43 -07:00
( checkpoint1 - start ) . total_seconds ( ) ,
( checkpoint2 - start ) . total_seconds ( ) ,
( end - start ) . total_seconds ( ) ,
) )
2018-02-26 18:07:13 -08:00
2018-02-26 18:11:25 -08:00
MAnalyticsLoader . add ( page_load = time . time ( ) - start_time )
2018-02-26 18:07:13 -08:00
2013-01-30 18:28:37 -08:00
return {
' feeds ' : feeds ,
' social_feeds ' : social_feeds ,
' interactions_count ' : interactions_count ,
}
2009-06-16 03:08:55 +00:00
2013-02-04 16:16:03 -08:00
@json.json_view
def interactions_count ( request ) :
user = get_user ( request )
interactions_count = MInteraction . user_unread_count ( user . pk )
return {
' interactions_count ' : interactions_count ,
}
2012-10-22 17:07:57 -07:00
@never_cache
2013-04-15 15:43:18 -07:00
@ajax_login_required
2012-10-22 17:07:57 -07:00
@json.json_view
def feed_unread_count ( request ) :
2020-12-05 17:55:29 -05:00
get_post = getattr ( request , request . method )
2018-02-26 18:07:13 -08:00
start = time . time ( )
2013-04-15 15:43:18 -07:00
user = request . user
2020-12-05 17:55:29 -05:00
feed_ids = get_post . getlist ( ' feed_id ' ) or get_post . getlist ( ' feed_id[] ' )
2020-06-11 02:43:05 -04:00
force = request . GET . get ( ' force ' , False )
2012-10-22 17:07:57 -07:00
social_feed_ids = [ feed_id for feed_id in feed_ids if ' social: ' in feed_id ]
feed_ids = list ( set ( feed_ids ) - set ( social_feed_ids ) )
feeds = { }
if feed_ids :
2013-07-31 11:17:04 -07:00
feeds = UserSubscription . feeds_with_updated_counts ( user , feed_ids = feed_ids , force = force )
2012-10-22 17:07:57 -07:00
social_feeds = { }
if social_feed_ids :
social_feeds = MSocialSubscription . feeds_with_updated_counts ( user , social_feed_ids = social_feed_ids )
2013-06-21 11:55:54 -07:00
if len ( feed_ids ) == 1 :
if settings . DEBUG :
2012-10-22 17:07:57 -07:00
feed_title = Feed . get_by_id ( feed_ids [ 0 ] ) . feed_title
else :
2013-06-21 11:55:54 -07:00
feed_title = feed_ids [ 0 ]
elif len ( social_feed_ids ) == 1 :
feed_title = MSocialProfile . objects . get ( user_id = social_feed_ids [ 0 ] . replace ( ' social: ' , ' ' ) ) . username
2013-06-21 11:54:34 -07:00
else :
feed_title = " %s feeds " % ( len ( feeds ) + len ( social_feeds ) )
2013-06-21 11:55:54 -07:00
logging . user ( request , " ~FBUpdating unread count on: %s " % feed_title )
2018-02-26 18:11:25 -08:00
MAnalyticsLoader . add ( page_load = time . time ( ) - start )
2012-10-22 17:07:57 -07:00
return { ' feeds ' : feeds , ' social_feeds ' : social_feeds }
2011-08-21 13:46:43 -07:00
def refresh_feed ( request , feed_id ) :
2018-02-26 18:07:13 -08:00
start = time . time ( )
2011-08-21 13:46:43 -07:00
user = get_user ( request )
feed = get_object_or_404 ( Feed , pk = feed_id )
feed = feed . update ( force = True , compute_scores = False )
usersub = UserSubscription . objects . get ( user = user , feed = feed )
usersub . calculate_feed_scores ( silent = False )
2013-04-14 20:44:47 -07:00
logging . user ( request , " ~FBRefreshing feed: %s " % feed )
2018-02-26 18:11:25 -08:00
MAnalyticsLoader . add ( page_load = time . time ( ) - start )
2013-04-14 20:44:47 -07:00
2011-08-21 13:46:43 -07:00
return load_single_feed ( request , feed_id )
2012-07-12 22:26:49 -07:00
@never_cache
2012-07-12 22:48:58 -07:00
@json.json_view
2011-04-21 10:44:50 -04:00
def load_single_feed ( request , feed_id ) :
2013-02-04 14:53:49 -08:00
start = time . time ( )
user = get_user ( request )
2020-06-07 08:04:23 -04:00
# offset = int(request.GET.get('offset', 0))
# limit = int(request.GET.get('limit', 6))
2013-05-16 11:16:51 -07:00
limit = 6
2020-06-07 08:04:23 -04:00
page = int ( request . GET . get ( ' page ' , 1 ) )
delay = int ( request . GET . get ( ' delay ' , 0 ) )
2013-05-16 11:16:51 -07:00
offset = limit * ( page - 1 )
2020-06-07 08:04:23 -04:00
order = request . GET . get ( ' order ' , ' newest ' )
read_filter = request . GET . get ( ' read_filter ' , ' all ' )
query = request . GET . get ( ' query ' , ' ' ) . strip ( )
include_story_content = is_true ( request . GET . get ( ' include_story_content ' , True ) )
include_hidden = is_true ( request . GET . get ( ' include_hidden ' , False ) )
include_feeds = is_true ( request . GET . get ( ' include_feeds ' , False ) )
2013-07-25 17:05:32 -07:00
message = None
2014-04-16 15:31:44 -07:00
user_search = None
2018-02-26 15:06:06 -08:00
2011-01-14 00:59:51 -05:00
dupe_feed_id = None
2012-08-02 11:04:11 -07:00
user_profiles = [ ]
2012-01-24 09:02:23 -08:00
now = localtime_for_timezone ( datetime . datetime . now ( ) , user . profile . timezone )
2011-06-09 14:59:22 -04:00
if not feed_id : raise Http404
2012-07-18 18:34:19 -07:00
2020-06-07 08:04:23 -04:00
feed_address = request . GET . get ( ' feed_address ' )
2012-07-18 18:34:19 -07:00
feed = Feed . get_by_id ( feed_id , feed_address = feed_address )
if not feed :
raise Http404
2012-07-19 23:29:11 -07:00
try :
usersub = UserSubscription . objects . get ( user = user , feed = feed )
except UserSubscription . DoesNotExist :
usersub = None
2012-12-19 14:21:46 -08:00
2016-03-17 11:22:05 -07:00
if feed . is_newsletter and not usersub :
# User must be subscribed to a newsletter in order to read it
raise Http404
2018-07-10 22:12:10 -04:00
if page > 200 :
logging . user ( request , " ~BR~FK~SBOver page 200 on single feed: %s " % page )
raise Http404
2012-12-19 14:21:46 -08:00
if query :
2013-07-25 17:05:32 -07:00
if user . profile . is_premium :
2014-04-16 15:31:44 -07:00
user_search = MUserSearch . get_user ( user . pk )
user_search . touch_search_date ( )
2014-04-23 15:05:47 -07:00
stories = feed . find_stories ( query , order = order , offset = offset , limit = limit )
2013-07-25 17:05:32 -07:00
else :
stories = [ ]
message = " You must be a premium subscriber to search. "
2014-05-28 17:35:51 -07:00
elif read_filter == ' starred ' :
mstories = MStarredStory . objects (
user_id = user . pk ,
story_feed_id = feed_id
) . order_by ( ' %s starred_date ' % ( ' - ' if order == ' newest ' else ' ' ) ) [ offset : offset + limit ]
stories = Feed . format_stories ( mstories )
2012-12-19 14:21:46 -08:00
elif usersub and ( read_filter == ' unread ' or order == ' oldest ' ) :
2013-09-16 16:42:49 -07:00
stories = usersub . get_stories ( order = order , read_filter = read_filter , offset = offset , limit = limit ,
2013-12-05 15:10:09 -08:00
default_cutoff_date = user . profile . unread_cutoff )
2012-07-18 18:34:19 -07:00
else :
stories = feed . get_stories ( offset , limit )
2012-07-25 11:36:28 -07:00
checkpoint1 = time . time ( )
2012-02-21 17:35:51 -08:00
try :
2012-06-29 21:06:33 -07:00
stories , user_profiles = MSharedStory . stories_with_comments_and_profiles ( stories , user . pk )
2012-02-21 17:35:51 -08:00
except redis . ConnectionError :
logging . user ( request , " ~BR~FK~SBRedis is unavailable for shared stories. " )
2012-07-25 11:36:28 -07:00
checkpoint2 = time . time ( )
2010-01-21 13:12:29 -05:00
# Get intelligence classifier for user
2012-05-29 11:48:40 -07:00
2013-06-25 15:14:02 -07:00
if usersub and usersub . is_trained :
classifier_feeds = list ( MClassifierFeed . objects ( user_id = user . pk , feed_id = feed_id , social_user_id = 0 ) )
classifier_authors = list ( MClassifierAuthor . objects ( user_id = user . pk , feed_id = feed_id ) )
classifier_titles = list ( MClassifierTitle . objects ( user_id = user . pk , feed_id = feed_id ) )
classifier_tags = list ( MClassifierTag . objects ( user_id = user . pk , feed_id = feed_id ) )
else :
classifier_feeds = [ ]
classifier_authors = [ ]
classifier_titles = [ ]
classifier_tags = [ ]
2012-05-29 11:48:40 -07:00
classifiers = get_classifiers_for_user ( user , feed_id = feed_id ,
classifier_feeds = classifier_feeds ,
classifier_authors = classifier_authors ,
classifier_titles = classifier_titles ,
classifier_tags = classifier_tags )
2012-07-25 11:36:28 -07:00
checkpoint3 = time . time ( )
2010-01-21 13:12:29 -05:00
2013-05-02 16:49:44 -07:00
unread_story_hashes = [ ]
2012-07-19 23:29:11 -07:00
if stories :
2013-07-29 15:11:20 -07:00
if ( read_filter == ' all ' or query ) and usersub :
2013-10-07 15:57:42 -07:00
unread_story_hashes = UserSubscription . story_hashes ( user . pk , read_filter = ' unread ' ,
feed_ids = [ usersub . feed_id ] ,
usersubs = [ usersub ] ,
group_by_feed = False ,
2013-09-16 16:42:49 -07:00
cutoff_date = user . profile . unread_cutoff )
2015-01-05 21:14:07 -08:00
story_hashes = [ story [ ' story_hash ' ] for story in stories if story [ ' story_hash ' ] ]
2011-09-14 18:03:09 -07:00
starred_stories = MStarredStory . objects ( user_id = user . pk ,
2012-03-22 15:27:28 -07:00
story_feed_id = feed . pk ,
2013-05-02 16:49:44 -07:00
story_hash__in = story_hashes ) \
2018-08-07 18:37:58 -04:00
. hint ( [ ( ' user_id ' , 1 ) , ( ' story_hash ' , 1 ) ] )
2014-04-29 11:58:18 -07:00
shared_story_hashes = MSharedStory . check_shared_story_hashes ( user . pk , story_hashes )
shared_stories = [ ]
if shared_story_hashes :
shared_stories = MSharedStory . objects ( user_id = user . pk ,
story_hash__in = shared_story_hashes ) \
2016-11-09 11:32:51 -08:00
. hint ( [ ( ' story_hash ' , 1 ) ] ) \
2014-04-29 11:58:18 -07:00
. only ( ' story_hash ' , ' shared_date ' , ' comments ' )
2018-08-07 18:37:58 -04:00
starred_stories = dict ( [ ( story . story_hash , story )
2013-08-13 17:21:41 -07:00
for story in starred_stories ] )
shared_stories = dict ( [ ( story . story_hash , dict ( shared_date = story . shared_date ,
comments = story . comments ) )
2011-12-15 09:10:37 -08:00
for story in shared_stories ] )
2010-09-10 08:32:48 -07:00
2012-07-25 11:36:28 -07:00
checkpoint4 = time . time ( )
2011-06-09 14:59:22 -04:00
2009-07-28 02:27:27 +00:00
for story in stories :
2013-02-04 14:53:49 -08:00
if not include_story_content :
del story [ ' story_content ' ]
2010-10-31 19:32:41 -04:00
story_date = localtime_for_timezone ( story [ ' story_date ' ] , user . profile . timezone )
2013-11-06 23:39:20 -08:00
nowtz = localtime_for_timezone ( now , user . profile . timezone )
story [ ' short_parsed_date ' ] = format_story_link_date__short ( story_date , nowtz )
story [ ' long_parsed_date ' ] = format_story_link_date__long ( story_date , nowtz )
2011-03-15 23:42:27 -04:00
if usersub :
2013-05-02 16:49:44 -07:00
story [ ' read_status ' ] = 1
2015-09-02 16:05:18 -07:00
if story [ ' story_date ' ] < user . profile . unread_cutoff :
story [ ' read_status ' ] = 1
elif ( read_filter == ' all ' or query ) and usersub :
2013-05-10 13:50:38 -07:00
story [ ' read_status ' ] = 1 if story [ ' story_hash ' ] not in unread_story_hashes else 0
2013-07-29 15:11:20 -07:00
elif read_filter == ' unread ' and usersub :
story [ ' read_status ' ] = 0
2013-05-02 16:49:44 -07:00
if story [ ' story_hash ' ] in starred_stories :
2011-03-15 23:42:27 -04:00
story [ ' starred ' ] = True
2018-08-07 18:37:58 -04:00
starred_story = Feed . format_story ( starred_stories [ story [ ' story_hash ' ] ] )
starred_date = localtime_for_timezone ( starred_story [ ' starred_date ' ] ,
2013-05-02 16:49:44 -07:00
user . profile . timezone )
2011-03-15 23:42:27 -04:00
story [ ' starred_date ' ] = format_story_link_date__long ( starred_date , now )
2014-09-25 11:43:23 -07:00
story [ ' starred_timestamp ' ] = starred_date . strftime ( ' %s ' )
2018-08-07 18:37:58 -04:00
story [ ' user_tags ' ] = starred_story [ ' user_tags ' ]
2020-07-16 15:49:28 -04:00
story [ ' user_notes ' ] = starred_story [ ' user_notes ' ]
2018-08-07 18:37:58 -04:00
story [ ' highlights ' ] = starred_story [ ' highlights ' ]
2013-05-02 16:49:44 -07:00
if story [ ' story_hash ' ] in shared_stories :
2011-12-15 09:10:37 -08:00
story [ ' shared ' ] = True
2013-05-02 16:49:44 -07:00
shared_date = localtime_for_timezone ( shared_stories [ story [ ' story_hash ' ] ] [ ' shared_date ' ] ,
user . profile . timezone )
2011-12-15 09:10:37 -08:00
story [ ' shared_date ' ] = format_story_link_date__long ( shared_date , now )
2013-05-02 16:49:44 -07:00
story [ ' shared_comments ' ] = strip_tags ( shared_stories [ story [ ' story_hash ' ] ] [ ' comments ' ] )
2011-03-15 23:42:27 -04:00
else :
2009-07-28 02:27:27 +00:00
story [ ' read_status ' ] = 1
2010-01-21 13:12:29 -05:00
story [ ' intelligence ' ] = {
' feed ' : apply_classifier_feeds ( classifier_feeds , feed ) ,
' author ' : apply_classifier_authors ( classifier_authors , story ) ,
' tags ' : apply_classifier_tags ( classifier_tags , story ) ,
' title ' : apply_classifier_titles ( classifier_titles , story ) ,
}
2015-01-22 11:38:43 -08:00
story [ ' score ' ] = UserSubscription . score_story ( story [ ' intelligence ' ] )
2010-01-21 13:12:29 -05:00
# Intelligence
2011-01-17 22:48:38 -05:00
feed_tags = json . decode ( feed . data . popular_tags ) if feed . data . popular_tags else [ ]
feed_authors = json . decode ( feed . data . popular_authors ) if feed . data . popular_authors else [ ]
2010-01-12 01:19:37 +00:00
2016-09-14 18:49:40 -07:00
if include_feeds :
feeds = Feed . objects . filter ( pk__in = set ( [ story [ ' story_feed_id ' ] for story in stories ] ) )
2017-04-24 05:33:39 -07:00
feeds = [ f . canonical ( include_favicon = False ) for f in feeds ]
2016-09-14 18:49:40 -07:00
2011-03-15 23:42:27 -04:00
if usersub :
usersub . feed_opens + = 1
2012-08-10 14:22:51 -07:00
usersub . needs_unread_recalc = True
2015-04-09 17:50:52 -07:00
usersub . save ( update_fields = [ ' feed_opens ' , ' needs_unread_recalc ' ] )
2011-08-18 21:47:38 -07:00
diff1 = checkpoint1 - start
diff2 = checkpoint2 - start
diff3 = checkpoint3 - start
2012-07-25 11:36:28 -07:00
diff4 = checkpoint4 - start
2011-06-09 14:59:22 -04:00
timediff = time . time ( ) - start
2010-10-31 16:03:50 -04:00
last_update = relative_timesince ( feed . last_update )
2013-05-30 11:24:20 -07:00
time_breakdown = " "
if timediff > 1 or settings . DEBUG :
time_breakdown = " ~SN~FR(~SB %.4s / %.4s / %.4s / %.4s ~SN) " % (
diff1 , diff2 , diff3 , diff4 )
2013-07-25 17:13:46 -07:00
search_log = " ~SN~FG(~SB %s ~SN) " % query if query else " "
logging . user ( request , " ~FYLoading feed: ~SB %s %s ( %s / %s ) %s %s " % (
feed . feed_title [ : 22 ] , ( ' ~SN/p %s ' % page ) if page > 1 else ' ' , order , read_filter , search_log , time_breakdown ) )
2018-02-26 16:00:44 -08:00
MAnalyticsLoader . add ( page_load = timediff )
2015-01-22 11:38:43 -08:00
if not include_hidden :
hidden_stories_removed = 0
new_stories = [ ]
for story in stories :
if story [ ' score ' ] > = 0 :
new_stories . append ( story )
else :
hidden_stories_removed + = 1
stories = new_stories
2010-09-23 10:29:18 -04:00
2016-09-27 12:11:41 -07:00
data = dict ( stories = stories ,
2012-02-22 09:48:45 -08:00
user_profiles = user_profiles ,
2010-07-26 21:38:56 -04:00
feed_tags = feed_tags ,
feed_authors = feed_authors ,
2010-09-22 19:59:07 -04:00
classifiers = classifiers ,
2013-01-28 16:13:12 -08:00
updated = last_update ,
2014-04-16 15:31:44 -07:00
user_search = user_search ,
2011-11-25 11:58:40 -05:00
feed_id = feed . pk ,
2013-07-25 17:05:32 -07:00
elapsed_time = round ( float ( timediff ) , 2 ) ,
message = message )
2011-01-14 00:59:51 -05:00
2016-09-14 18:49:40 -07:00
if include_feeds : data [ ' feeds ' ] = feeds
2015-01-22 11:38:43 -08:00
if not include_hidden : data [ ' hidden_stories_removed ' ] = hidden_stories_removed
2011-01-14 00:59:51 -05:00
if dupe_feed_id : data [ ' dupe_feed_id ' ] = dupe_feed_id
2011-03-15 23:42:27 -04:00
if not usersub :
data . update ( feed . canonical ( ) )
2013-09-05 12:32:53 -07:00
# if not usersub and feed.num_subscribers <= 1:
# data = dict(code=-1, message="You must be subscribed to this feed.")
2013-06-12 19:49:10 -07:00
2018-11-23 22:57:50 -05:00
if delay and user . is_staff :
# import random
# time.sleep(random.randint(2, 7) / 10.0)
# time.sleep(random.randint(1, 10))
time . sleep ( delay )
2020-06-10 20:56:34 -04:00
# if page == 1:
# time.sleep(1)
# else:
# time.sleep(20)
2013-10-10 12:58:40 -07:00
# if page == 2:
# assert False
2013-06-07 02:47:43 -04:00
2010-07-25 23:13:27 -04:00
return data
2009-06-16 03:08:55 +00:00
2011-04-21 22:36:26 -04:00
def load_feed_page ( request , feed_id ) :
2011-02-03 18:47:53 -05:00
if not feed_id :
2010-08-30 19:57:27 -04:00
raise Http404
2012-09-05 11:32:12 -07:00
feed = Feed . get_by_id ( feed_id )
2012-09-18 17:09:07 -07:00
2012-12-14 16:28:06 -08:00
if feed and feed . has_page and not feed . has_page_exception :
if settings . BACKED_BY_AWS . get ( ' pages_on_node ' ) :
2021-01-03 11:09:25 -05:00
domain = Site . objects . get_current ( ) . domain
url = " https:// %s /original_page/ %s " % (
domain ,
2012-12-14 16:28:06 -08:00
feed . pk ,
)
2018-07-13 11:38:43 -04:00
try :
page_response = requests . get ( url )
except requests . ConnectionError :
page_response = None
if page_response and page_response . status_code == 200 :
2020-06-08 05:35:27 -04:00
response = HttpResponse ( page_response . content , content_type = " text/html; charset=utf-8 " )
2012-12-17 15:08:52 -08:00
response [ ' Content-Encoding ' ] = ' gzip '
response [ ' Last-Modified ' ] = page_response . headers . get ( ' Last-modified ' )
response [ ' Etag ' ] = page_response . headers . get ( ' Etag ' )
response [ ' Content-Length ' ] = str ( len ( page_response . content ) )
2018-01-26 19:27:19 -08:00
logging . user ( request , " ~FYLoading original page ( %s ), proxied from node: ~SB %s bytes " %
2018-01-26 19:24:17 -08:00
( feed_id , len ( page_response . content ) ) )
2012-12-17 15:08:52 -08:00
return response
if settings . BACKED_BY_AWS [ ' pages_on_s3 ' ] and feed . s3_page :
2012-12-14 16:28:06 -08:00
if settings . PROXY_S3_PAGES :
2017-03-28 21:17:06 -07:00
key = settings . S3_CONN . get_bucket ( settings . S3_PAGES_BUCKET_NAME ) . get_key ( feed . s3_pages_key )
2012-12-14 16:28:06 -08:00
if key :
compressed_data = key . get_contents_as_string ( )
2020-06-08 05:35:27 -04:00
response = HttpResponse ( compressed_data , content_type = " text/html; charset=utf-8 " )
2012-12-14 16:28:06 -08:00
response [ ' Content-Encoding ' ] = ' gzip '
2012-09-19 09:38:46 -07:00
2012-12-14 16:28:06 -08:00
logging . user ( request , " ~FYLoading original page, proxied: ~SB %s bytes " %
( len ( compressed_data ) ) )
return response
else :
logging . user ( request , " ~FYLoading original page, non-proxied " )
return HttpResponseRedirect ( ' // %s / %s ' % ( settings . S3_PAGES_BUCKET_NAME ,
feed . s3_pages_key ) )
2012-09-18 17:09:07 -07:00
2011-01-29 19:16:40 -05:00
data = MFeedPage . get_data ( feed_id = feed_id )
2012-09-05 11:32:12 -07:00
2012-12-12 14:47:25 -08:00
if not data or not feed or not feed . has_page or feed . has_page_exception :
2012-09-19 09:38:46 -07:00
logging . user ( request , " ~FYLoading original page, ~FRmissing " )
2012-09-05 11:32:12 -07:00
return render ( request , ' static/404_original_page.xhtml ' , { } ,
content_type = ' text/html ' ,
status = 404 )
2009-08-13 03:26:12 +00:00
2012-09-19 09:38:46 -07:00
logging . user ( request , " ~FYLoading original page, from the db " )
2020-06-08 05:35:27 -04:00
return HttpResponse ( data , content_type = " text/html; charset=utf-8 " )
2014-01-15 17:12:24 -08:00
2010-12-02 11:09:09 -05:00
@json.json_view
def load_starred_stories ( request ) :
2014-02-18 12:39:57 -08:00
user = get_user ( request )
2020-06-07 08:04:23 -04:00
offset = int ( request . GET . get ( ' offset ' , 0 ) )
limit = int ( request . GET . get ( ' limit ' , 10 ) )
page = int ( request . GET . get ( ' page ' , 0 ) )
query = request . GET . get ( ' query ' , ' ' ) . strip ( )
order = request . GET . get ( ' order ' , ' newest ' )
tag = request . GET . get ( ' tag ' )
2020-08-10 17:49:20 -04:00
highlights = is_true ( request . GET . get ( ' highlights ' , False ) )
2020-06-07 08:04:23 -04:00
story_hashes = request . GET . getlist ( ' h ' ) or request . GET . getlist ( ' h[] ' )
2017-04-04 17:13:43 -07:00
story_hashes = story_hashes [ : 100 ]
2020-06-07 08:04:23 -04:00
version = int ( request . GET . get ( ' v ' , 1 ) )
2014-02-18 12:39:57 -08:00
now = localtime_for_timezone ( datetime . datetime . now ( ) , user . profile . timezone )
message = None
2014-06-11 15:20:59 -07:00
order_by = ' - ' if order == " newest " else " "
2011-08-29 22:25:25 -07:00
if page : offset = limit * ( page - 1 )
2012-12-19 14:21:46 -08:00
if query :
2013-07-24 17:21:26 -07:00
# results = SearchStarredStory.query(user.pk, query)
# story_ids = [result.db_id for result in results]
2013-07-25 17:05:32 -07:00
if user . profile . is_premium :
2014-06-11 15:20:59 -07:00
stories = MStarredStory . find_stories ( query , user . pk , tag = tag , offset = offset , limit = limit ,
order = order )
2013-07-25 17:05:32 -07:00
else :
2013-07-30 12:01:45 -07:00
stories = [ ]
2013-07-25 17:05:32 -07:00
message = " You must be a premium subscriber to search. "
2020-07-10 17:59:11 -04:00
elif highlights :
if user . profile . is_premium :
mstories = MStarredStory . objects (
user_id = user . pk ,
2020-07-10 18:58:01 -04:00
highlights__exists = True ,
__raw__ = { " $where " : " this.highlights.length > 0 " }
2020-07-10 17:59:11 -04:00
) . order_by ( ' %s starred_date ' % order_by ) [ offset : offset + limit ]
stories = Feed . format_stories ( mstories )
else :
stories = [ ]
message = " You must be a premium subscriber to read through saved story highlights. "
2013-08-23 18:07:13 -07:00
elif tag :
2013-09-09 18:18:13 -07:00
if user . profile . is_premium :
mstories = MStarredStory . objects (
user_id = user . pk ,
user_tags__contains = tag
2014-06-11 15:20:59 -07:00
) . order_by ( ' %s starred_date ' % order_by ) [ offset : offset + limit ]
2013-09-09 18:18:13 -07:00
stories = Feed . format_stories ( mstories )
else :
stories = [ ]
message = " You must be a premium subscriber to read saved stories by tag. "
2013-08-22 21:01:38 -07:00
elif story_hashes :
2019-09-24 10:12:35 -04:00
limit = 100
2013-08-22 21:01:38 -07:00
mstories = MStarredStory . objects (
user_id = user . pk ,
story_hash__in = story_hashes
2014-06-11 15:20:59 -07:00
) . order_by ( ' %s starred_date ' % order_by ) [ offset : offset + limit ]
2013-08-22 21:01:38 -07:00
stories = Feed . format_stories ( mstories )
2012-12-19 14:21:46 -08:00
else :
mstories = MStarredStory . objects (
user_id = user . pk
2014-06-11 15:20:59 -07:00
) . order_by ( ' %s starred_date ' % order_by ) [ offset : offset + limit ]
2013-07-30 12:01:45 -07:00
stories = Feed . format_stories ( mstories )
2012-11-16 15:43:39 -08:00
stories , user_profiles = MSharedStory . stories_with_comments_and_profiles ( stories , user . pk , check_all = True )
2013-05-02 16:49:44 -07:00
story_hashes = [ story [ ' story_hash ' ] for story in stories ]
2012-01-27 10:26:52 -08:00
story_feed_ids = list ( set ( s [ ' story_feed_id ' ] for s in stories ) )
usersub_ids = UserSubscription . objects . filter ( user__pk = user . pk , feed__pk__in = story_feed_ids ) . values ( ' feed__pk ' )
usersub_ids = [ us [ ' feed__pk ' ] for us in usersub_ids ]
unsub_feed_ids = list ( set ( story_feed_ids ) . difference ( set ( usersub_ids ) ) )
unsub_feeds = Feed . objects . filter ( pk__in = unsub_feed_ids )
unsub_feeds = dict ( ( feed . pk , feed . canonical ( include_favicon = False ) ) for feed in unsub_feeds )
2019-01-18 13:38:36 -05:00
for story in stories :
if story [ ' story_feed_id ' ] in unsub_feeds : continue
duplicate_feed = DuplicateFeed . objects . filter ( duplicate_feed_id = story [ ' story_feed_id ' ] )
if not duplicate_feed : continue
feed_id = duplicate_feed [ 0 ] . feed_id
try :
saved_story = MStarredStory . objects . get ( user_id = user . pk , story_hash = story [ ' story_hash ' ] )
saved_story . feed_id = feed_id
2019-01-18 13:39:40 -05:00
_ , story_hash = MStory . split_story_hash ( story [ ' story_hash ' ] )
2019-01-18 13:38:36 -05:00
saved_story . story_hash = " %s : %s " % ( feed_id , story_hash )
2019-01-18 13:42:57 -05:00
saved_story . story_feed_id = feed_id
story [ ' story_hash ' ] = saved_story . story_hash
story [ ' story_feed_id ' ] = saved_story . story_feed_id
2019-01-18 13:38:36 -05:00
saved_story . save ( )
2019-01-18 13:41:44 -05:00
logging . user ( request , " ~FCSaving new feed for starred story: ~SB %s -> %s " % ( story [ ' story_hash ' ] , feed_id ) )
2020-05-04 09:48:45 -04:00
except ( MStarredStory . DoesNotExist , MStarredStory . MultipleObjectsReturned ) :
2019-01-18 13:38:36 -05:00
logging . user ( request , " ~FCCan ' t find feed for starred story: ~SB %s " % ( story [ ' story_hash ' ] ) )
continue
2014-04-29 11:58:18 -07:00
shared_story_hashes = MSharedStory . check_shared_story_hashes ( user . pk , story_hashes )
shared_stories = [ ]
if shared_story_hashes :
shared_stories = MSharedStory . objects ( user_id = user . pk ,
story_hash__in = shared_story_hashes ) \
2016-11-09 11:35:48 -08:00
. hint ( [ ( ' story_hash ' , 1 ) ] ) \
2014-04-29 11:58:18 -07:00
. only ( ' story_hash ' , ' shared_date ' , ' comments ' )
2013-05-02 16:49:44 -07:00
shared_stories = dict ( [ ( story . story_hash , dict ( shared_date = story . shared_date ,
comments = story . comments ) )
2012-11-16 15:43:39 -08:00
for story in shared_stories ] )
2012-01-27 10:26:52 -08:00
2013-11-06 23:39:20 -08:00
nowtz = localtime_for_timezone ( now , user . profile . timezone )
2010-12-02 20:18:33 -05:00
for story in stories :
2012-01-27 10:26:52 -08:00
story_date = localtime_for_timezone ( story [ ' story_date ' ] , user . profile . timezone )
2013-11-06 23:39:20 -08:00
story [ ' short_parsed_date ' ] = format_story_link_date__short ( story_date , nowtz )
story [ ' long_parsed_date ' ] = format_story_link_date__long ( story_date , nowtz )
2012-01-27 10:26:52 -08:00
starred_date = localtime_for_timezone ( story [ ' starred_date ' ] , user . profile . timezone )
2013-11-06 23:39:20 -08:00
story [ ' starred_date ' ] = format_story_link_date__long ( starred_date , nowtz )
2014-09-25 11:43:23 -07:00
story [ ' starred_timestamp ' ] = starred_date . strftime ( ' %s ' )
2012-01-27 10:26:52 -08:00
story [ ' read_status ' ] = 1
story [ ' starred ' ] = True
story [ ' intelligence ' ] = {
2012-10-17 15:07:53 -07:00
' feed ' : 1 ,
2010-12-02 20:18:33 -05:00
' author ' : 0 ,
2012-01-27 10:26:52 -08:00
' tags ' : 0 ,
' title ' : 0 ,
2010-12-02 20:18:33 -05:00
}
2013-05-02 16:49:44 -07:00
if story [ ' story_hash ' ] in shared_stories :
2012-11-16 15:43:39 -08:00
story [ ' shared ' ] = True
2013-05-02 16:49:44 -07:00
story [ ' shared_comments ' ] = strip_tags ( shared_stories [ story [ ' story_hash ' ] ] [ ' comments ' ] )
2010-12-02 20:18:33 -05:00
2013-07-25 17:13:46 -07:00
search_log = " ~SN~FG(~SB %s ~SN) " % query if query else " "
logging . user ( request , " ~FCLoading starred stories: ~SB %s stories %s " % ( len ( stories ) , search_log ) )
2010-12-04 23:34:57 -05:00
2012-11-16 15:43:39 -08:00
return {
" stories " : stories ,
" user_profiles " : user_profiles ,
2020-06-15 05:15:36 -04:00
' feeds ' : list ( unsub_feeds . values ( ) ) if version == 2 else unsub_feeds ,
2013-07-25 17:05:32 -07:00
" message " : message ,
2012-11-16 15:43:39 -08:00
}
2010-12-12 22:52:15 -05:00
2013-07-23 11:31:27 -07:00
@json.json_view
def starred_story_hashes ( request ) :
user = get_user ( request )
2020-06-07 08:04:23 -04:00
include_timestamps = is_true ( request . GET . get ( ' include_timestamps ' , False ) )
2013-07-23 11:31:27 -07:00
mstories = MStarredStory . objects (
user_id = user . pk
2020-08-21 11:48:57 -04:00
) . only ( ' story_hash ' , ' starred_date ' , ' starred_updated ' ) . order_by ( ' -starred_date ' )
2013-07-23 11:31:27 -07:00
if include_timestamps :
2020-08-21 11:48:57 -04:00
story_hashes = [ ]
for s in mstories :
date = s . starred_date
if s . starred_updated :
date = s . starred_updated
story_hashes . append ( ( s . story_hash , date . strftime ( " %s " ) ) )
2013-07-23 11:31:27 -07:00
else :
story_hashes = [ s . story_hash for s in mstories ]
logging . user ( request , " ~FYLoading ~FCstarred story hashes~FY: %s story hashes " %
( len ( story_hashes ) ) )
return dict ( starred_story_hashes = story_hashes )
2013-12-19 11:08:22 -08:00
def starred_stories_rss_feed ( request , user_id , secret_token , tag_slug ) :
try :
user = User . objects . get ( pk = user_id )
except User . DoesNotExist :
raise Http404
2013-12-19 11:57:58 -08:00
try :
tag_counts = MStarredStoryCounts . objects . get ( user_id = user_id , slug = tag_slug )
2014-07-13 16:11:44 -07:00
except MStarredStoryCounts . MultipleObjectsReturned :
tag_counts = MStarredStoryCounts . objects ( user_id = user_id , slug = tag_slug ) . first ( )
2013-12-19 11:57:58 -08:00
except MStarredStoryCounts . DoesNotExist :
raise Http404
2013-12-19 11:08:22 -08:00
data = { }
2013-12-19 11:57:58 -08:00
data [ ' title ' ] = " Saved Stories - %s " % tag_counts . tag
data [ ' link ' ] = " %s %s " % (
settings . NEWSBLUR_URL ,
reverse ( ' saved-stories-tag ' , kwargs = dict ( tag_name = tag_slug ) ) )
data [ ' description ' ] = " Stories saved by %s on NewsBlur with the tag \" %s \" . " % ( user . username ,
tag_counts . tag )
2013-12-19 11:08:22 -08:00
data [ ' lastBuildDate ' ] = datetime . datetime . utcnow ( )
2013-12-19 11:57:58 -08:00
data [ ' generator ' ] = ' NewsBlur - %s ' % settings . NEWSBLUR_URL
2013-12-19 11:08:22 -08:00
data [ ' docs ' ] = None
data [ ' author_name ' ] = user . username
2013-12-19 11:57:58 -08:00
data [ ' feed_url ' ] = " %s %s " % (
settings . NEWSBLUR_URL ,
reverse ( ' starred-stories-rss-feed ' ,
kwargs = dict ( user_id = user_id , secret_token = secret_token , tag_slug = tag_slug ) ) ,
2013-12-19 11:08:22 -08:00
)
rss = feedgenerator . Atom1Feed ( * * data )
2014-08-04 15:54:50 -07:00
if not tag_counts . tag :
starred_stories = MStarredStory . objects (
user_id = user . pk
) . order_by ( ' -starred_date ' ) . limit ( 25 )
2020-07-14 18:39:42 -04:00
elif tag_counts . is_highlights :
2020-07-10 18:59:35 -04:00
starred_stories = MStarredStory . objects (
user_id = user . pk ,
highlights__exists = True ,
__raw__ = { " $where " : " this.highlights.length > 0 " }
) . order_by ( ' -starred_date ' ) . limit ( 25 )
2014-08-04 15:54:50 -07:00
else :
starred_stories = MStarredStory . objects (
user_id = user . pk ,
user_tags__contains = tag_counts . tag
) . order_by ( ' -starred_date ' ) . limit ( 25 )
2013-12-19 11:57:58 -08:00
for starred_story in starred_stories :
2013-12-19 11:08:22 -08:00
story_data = {
2013-12-19 11:57:58 -08:00
' title ' : starred_story . story_title ,
' link ' : starred_story . story_permalink ,
' description ' : ( starred_story . story_content_z and
zlib . decompress ( starred_story . story_content_z ) ) ,
' author_name ' : starred_story . story_author_name ,
' categories ' : starred_story . story_tags ,
' unique_id ' : starred_story . story_guid ,
' pubdate ' : starred_story . starred_date ,
2013-12-19 11:08:22 -08:00
}
rss . add_item ( * * story_data )
2013-12-19 11:57:58 -08:00
logging . user ( request , " ~FBGenerating ~SB %s ~SN ' s saved story RSS feed ( %s , %s stories): ~FM %s " % (
2013-12-19 11:08:22 -08:00
user . username ,
2013-12-19 11:57:58 -08:00
tag_counts . tag ,
tag_counts . count ,
2013-12-19 11:08:22 -08:00
request . META . get ( ' HTTP_USER_AGENT ' , " " ) [ : 24 ]
) )
return HttpResponse ( rss . writeString ( ' utf-8 ' ) , content_type = ' application/rss+xml ' )
2015-08-22 19:31:24 -07:00
def folder_rss_feed ( request , user_id , secret_token , unread_filter , folder_slug ) :
2015-08-24 15:16:54 -07:00
domain = Site . objects . get_current ( ) . domain
2015-08-22 19:31:24 -07:00
try :
user = User . objects . get ( pk = user_id )
except User . DoesNotExist :
raise Http404
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = user )
feed_ids , folder_title = user_sub_folders . feed_ids_under_folder_slug ( folder_slug )
usersubs = UserSubscription . subs_for_feeds ( user . pk , feed_ids = feed_ids )
2015-08-22 19:37:07 -07:00
if feed_ids and user . profile . is_premium :
2015-08-22 19:31:24 -07:00
params = {
" user_id " : user . pk ,
" feed_ids " : feed_ids ,
" offset " : 0 ,
" limit " : 20 ,
" order " : ' newest ' ,
" read_filter " : ' all ' ,
" cache_prefix " : " RSS: "
}
story_hashes , unread_feed_story_hashes = UserSubscription . feed_stories ( * * params )
else :
story_hashes = [ ]
2015-08-24 11:55:32 -07:00
mstories = MStory . objects ( story_hash__in = story_hashes ) . order_by ( ' -story_date ' )
stories = Feed . format_stories ( mstories )
filtered_stories = [ ]
found_feed_ids = list ( set ( [ story [ ' story_feed_id ' ] for story in stories ] ) )
trained_feed_ids = [ sub . feed_id for sub in usersubs if sub . is_trained ]
found_trained_feed_ids = list ( set ( trained_feed_ids ) & set ( found_feed_ids ) )
if found_trained_feed_ids :
classifier_feeds = list ( MClassifierFeed . objects ( user_id = user . pk ,
feed_id__in = found_trained_feed_ids ,
social_user_id = 0 ) )
classifier_authors = list ( MClassifierAuthor . objects ( user_id = user . pk ,
feed_id__in = found_trained_feed_ids ) )
classifier_titles = list ( MClassifierTitle . objects ( user_id = user . pk ,
feed_id__in = found_trained_feed_ids ) )
classifier_tags = list ( MClassifierTag . objects ( user_id = user . pk ,
feed_id__in = found_trained_feed_ids ) )
else :
classifier_feeds = [ ]
classifier_authors = [ ]
classifier_titles = [ ]
classifier_tags = [ ]
2016-12-08 16:05:13 -08:00
sort_classifiers_by_feed ( user = user , feed_ids = found_feed_ids ,
classifier_feeds = classifier_feeds ,
classifier_authors = classifier_authors ,
classifier_titles = classifier_titles ,
classifier_tags = classifier_tags )
2015-08-24 11:55:32 -07:00
for story in stories :
story [ ' intelligence ' ] = {
' feed ' : apply_classifier_feeds ( classifier_feeds , story [ ' story_feed_id ' ] ) ,
' author ' : apply_classifier_authors ( classifier_authors , story ) ,
' tags ' : apply_classifier_tags ( classifier_tags , story ) ,
' title ' : apply_classifier_titles ( classifier_titles , story ) ,
}
story [ ' score ' ] = UserSubscription . score_story ( story [ ' intelligence ' ] )
if unread_filter == ' focus ' and story [ ' score ' ] > = 1 :
filtered_stories . append ( story )
elif unread_filter == ' unread ' and story [ ' score ' ] > = 0 :
filtered_stories . append ( story )
stories = filtered_stories
2015-08-22 19:31:24 -07:00
data = { }
data [ ' title ' ] = " %s from %s ( %s sites) " % ( folder_title , user . username , len ( feed_ids ) )
2015-12-09 21:44:01 +01:00
data [ ' link ' ] = " https:// %s %s " % (
2015-08-24 15:16:54 -07:00
domain ,
2015-12-09 21:44:01 +01:00
reverse ( ' folder ' , kwargs = dict ( folder_name = folder_title ) ) )
2015-08-22 19:31:24 -07:00
data [ ' description ' ] = " Unread stories in %s on NewsBlur. From %s ' s account and contains %s sites. " % (
folder_title ,
user . username ,
len ( feed_ids ) )
data [ ' lastBuildDate ' ] = datetime . datetime . utcnow ( )
data [ ' generator ' ] = ' NewsBlur - %s ' % settings . NEWSBLUR_URL
data [ ' docs ' ] = None
data [ ' author_name ' ] = user . username
2015-12-09 21:44:01 +01:00
data [ ' feed_url ' ] = " https:// %s %s " % (
2015-08-24 15:18:15 -07:00
domain ,
2015-08-22 19:31:24 -07:00
reverse ( ' folder-rss-feed ' ,
kwargs = dict ( user_id = user_id , secret_token = secret_token , unread_filter = unread_filter , folder_slug = folder_slug ) ) ,
)
rss = feedgenerator . Atom1Feed ( * * data )
for story in stories :
2015-08-24 11:55:32 -07:00
feed = Feed . get_by_id ( story [ ' story_feed_id ' ] )
2016-12-09 14:54:07 -08:00
story_content = """ %s <br><br><img src= " // %s /rss_feeds/icon/ %s " width= " 16 " height= " 16 " > %s """ % (
2020-06-17 00:41:50 -04:00
smart_str ( story [ ' story_content ' ] ) ,
2015-08-22 19:31:24 -07:00
Site . objects . get_current ( ) . domain ,
2015-08-24 11:55:32 -07:00
story [ ' story_feed_id ' ] ,
2016-12-09 14:57:01 -08:00
feed . feed_title if feed else " "
2015-12-04 08:17:45 +01:00
)
2015-08-22 19:31:24 -07:00
story_data = {
2016-12-09 14:54:07 -08:00
' title ' : " %s %s " % ( ( " %s : " % feed . feed_title ) if feed else " " , story [ ' story_title ' ] ) ,
2015-08-24 11:55:32 -07:00
' link ' : story [ ' story_permalink ' ] ,
2015-08-22 19:31:24 -07:00
' description ' : story_content ,
2015-08-24 11:55:32 -07:00
' categories ' : story [ ' story_tags ' ] ,
2015-12-09 21:44:01 +01:00
' unique_id ' : ' https:// %s /site/ %s / %s / ' % ( domain , story [ ' story_feed_id ' ] , story [ ' guid_hash ' ] ) ,
2015-08-24 11:55:32 -07:00
' pubdate ' : localtime_for_timezone ( story [ ' story_date ' ] , user . profile . timezone ) ,
2015-08-22 19:31:24 -07:00
}
2015-08-24 15:16:54 -07:00
if story [ ' story_authors ' ] :
story_data [ ' author_name ' ] = story [ ' story_authors ' ]
2015-08-22 19:31:24 -07:00
rss . add_item ( * * story_data )
2015-08-22 19:37:07 -07:00
if not user . profile . is_premium :
2015-08-24 12:33:12 -07:00
story_data = {
2015-08-22 19:37:07 -07:00
' title ' : " You must have a premium account on NewsBlur to have RSS feeds for folders. " ,
2015-12-09 21:44:01 +01:00
' link ' : " https:// %s " % domain ,
2015-08-24 12:33:12 -07:00
' description ' : " You must have a premium account on NewsBlur to have RSS feeds for folders. " ,
2015-12-09 21:44:01 +01:00
' unique_id ' : " https:// %s /premium_only " % domain ,
2015-08-22 19:37:07 -07:00
' pubdate ' : localtime_for_timezone ( datetime . datetime . now ( ) , user . profile . timezone ) ,
2015-08-24 12:33:12 -07:00
}
rss . add_item ( * * story_data )
2015-08-22 19:37:07 -07:00
2015-08-22 19:31:24 -07:00
logging . user ( request , " ~FBGenerating ~SB %s ~SN ' s folder RSS feed ( %s , %s stories): ~FM %s " % (
user . username ,
folder_title ,
len ( stories ) ,
request . META . get ( ' HTTP_USER_AGENT ' , " " ) [ : 24 ]
) )
return HttpResponse ( rss . writeString ( ' utf-8 ' ) , content_type = ' application/rss+xml ' )
2013-12-05 16:08:45 -08:00
@json.json_view
def load_read_stories ( request ) :
user = get_user ( request )
2020-06-07 08:04:23 -04:00
offset = int ( request . GET . get ( ' offset ' , 0 ) )
limit = int ( request . GET . get ( ' limit ' , 10 ) )
page = int ( request . GET . get ( ' page ' , 0 ) )
order = request . GET . get ( ' order ' , ' newest ' )
query = request . GET . get ( ' query ' , ' ' ) . strip ( )
2013-12-05 16:08:45 -08:00
now = localtime_for_timezone ( datetime . datetime . now ( ) , user . profile . timezone )
message = None
if page : offset = limit * ( page - 1 )
if query :
stories = [ ]
message = " Not implemented yet. "
# if user.profile.is_premium:
# stories = MStarredStory.find_stories(query, user.pk, offset=offset, limit=limit)
# else:
# stories = []
# message = "You must be a premium subscriber to search."
else :
2014-06-11 15:04:46 -07:00
story_hashes = RUserStory . get_read_stories ( user . pk , offset = offset , limit = limit , order = order )
2013-12-05 16:08:45 -08:00
mstories = MStory . objects ( story_hash__in = story_hashes )
stories = Feed . format_stories ( mstories )
2014-06-11 15:04:46 -07:00
stories = sorted ( stories , key = lambda story : story_hashes . index ( story [ ' story_hash ' ] ) ,
reverse = bool ( order == " oldest " ) )
2013-12-05 16:08:45 -08:00
stories , user_profiles = MSharedStory . stories_with_comments_and_profiles ( stories , user . pk , check_all = True )
story_hashes = [ story [ ' story_hash ' ] for story in stories ]
story_feed_ids = list ( set ( s [ ' story_feed_id ' ] for s in stories ) )
usersub_ids = UserSubscription . objects . filter ( user__pk = user . pk , feed__pk__in = story_feed_ids ) . values ( ' feed__pk ' )
usersub_ids = [ us [ ' feed__pk ' ] for us in usersub_ids ]
unsub_feed_ids = list ( set ( story_feed_ids ) . difference ( set ( usersub_ids ) ) )
unsub_feeds = Feed . objects . filter ( pk__in = unsub_feed_ids )
2014-06-25 13:02:01 -07:00
unsub_feeds = [ feed . canonical ( include_favicon = False ) for feed in unsub_feeds ]
2013-12-05 16:08:45 -08:00
shared_stories = MSharedStory . objects ( user_id = user . pk ,
story_hash__in = story_hashes ) \
2016-11-09 11:35:48 -08:00
. hint ( [ ( ' story_hash ' , 1 ) ] ) \
2013-12-05 16:08:45 -08:00
. only ( ' story_hash ' , ' shared_date ' , ' comments ' )
shared_stories = dict ( [ ( story . story_hash , dict ( shared_date = story . shared_date ,
comments = story . comments ) )
for story in shared_stories ] )
starred_stories = MStarredStory . objects ( user_id = user . pk ,
story_hash__in = story_hashes ) \
2018-08-07 18:37:58 -04:00
. hint ( [ ( ' user_id ' , 1 ) , ( ' story_hash ' , 1 ) ] )
starred_stories = dict ( [ ( story . story_hash , story )
2013-12-05 16:08:45 -08:00
for story in starred_stories ] )
2014-06-25 13:02:01 -07:00
2013-12-05 16:08:45 -08:00
nowtz = localtime_for_timezone ( now , user . profile . timezone )
for story in stories :
story_date = localtime_for_timezone ( story [ ' story_date ' ] , user . profile . timezone )
story [ ' short_parsed_date ' ] = format_story_link_date__short ( story_date , nowtz )
story [ ' long_parsed_date ' ] = format_story_link_date__long ( story_date , nowtz )
story [ ' read_status ' ] = 1
story [ ' intelligence ' ] = {
' feed ' : 1 ,
' author ' : 0 ,
' tags ' : 0 ,
' title ' : 0 ,
}
if story [ ' story_hash ' ] in starred_stories :
story [ ' starred ' ] = True
2018-08-07 18:37:58 -04:00
starred_story = Feed . format_story ( starred_stories [ story [ ' story_hash ' ] ] )
starred_date = localtime_for_timezone ( starred_story [ ' starred_date ' ] ,
2013-12-05 16:08:45 -08:00
user . profile . timezone )
story [ ' starred_date ' ] = format_story_link_date__long ( starred_date , now )
2014-09-25 11:43:23 -07:00
story [ ' starred_timestamp ' ] = starred_date . strftime ( ' %s ' )
2013-12-05 16:08:45 -08:00
if story [ ' story_hash ' ] in shared_stories :
story [ ' shared ' ] = True
story [ ' shared_comments ' ] = strip_tags ( shared_stories [ story [ ' story_hash ' ] ] [ ' comments ' ] )
search_log = " ~SN~FG(~SB %s ~SN) " % query if query else " "
logging . user ( request , " ~FCLoading read stories: ~SB %s stories %s " % ( len ( stories ) , search_log ) )
return {
" stories " : stories ,
" user_profiles " : user_profiles ,
" feeds " : unsub_feeds ,
" message " : message ,
}
2012-07-26 22:25:56 -07:00
@json.json_view
def load_river_stories__redis ( request ) :
2020-11-12 18:24:38 -05:00
# get_post is request.REQUEST, since this endpoint needs to handle either
# GET or POST requests, since the parameters for this endpoint can be
# very long, at which point the max size of a GET url request is exceeded.
get_post = getattr ( request , request . method )
limit = int ( get_post . get ( ' limit ' , 12 ) )
2012-07-16 19:13:32 -07:00
start = time . time ( )
user = get_user ( request )
2013-07-25 15:39:12 -07:00
message = None
2020-11-12 18:24:38 -05:00
feed_ids = get_post . getlist ( ' feeds ' ) or get_post . getlist ( ' feeds[] ' )
2017-04-04 17:13:43 -07:00
feed_ids = [ int ( feed_id ) for feed_id in feed_ids if feed_id ]
2013-07-15 12:26:21 -07:00
if not feed_ids :
2020-11-12 18:24:38 -05:00
feed_ids = get_post . getlist ( ' f ' ) or get_post . getlist ( ' f[] ' )
feed_ids = [ int ( feed_id ) for feed_id in get_post . getlist ( ' f ' ) if feed_id ]
story_hashes = get_post . getlist ( ' h ' ) or get_post . getlist ( ' h[] ' )
2017-04-04 17:13:43 -07:00
story_hashes = story_hashes [ : 100 ]
2012-07-16 19:13:32 -07:00
original_feed_ids = list ( feed_ids )
2020-11-12 18:24:38 -05:00
page = int ( get_post . get ( ' page ' , 1 ) )
order = get_post . get ( ' order ' , ' newest ' )
read_filter = get_post . get ( ' read_filter ' , ' unread ' )
query = get_post . get ( ' query ' , ' ' ) . strip ( )
include_hidden = is_true ( get_post . get ( ' include_hidden ' , False ) )
include_feeds = is_true ( get_post . get ( ' include_feeds ' , False ) )
initial_dashboard = is_true ( get_post . get ( ' initial_dashboard ' , False ) )
infrequent = is_true ( get_post . get ( ' infrequent ' , False ) )
2017-11-05 14:01:25 -08:00
if infrequent :
2020-11-12 18:24:38 -05:00
infrequent = get_post . get ( ' infrequent ' )
2012-07-16 19:13:32 -07:00
now = localtime_for_timezone ( datetime . datetime . now ( ) , user . profile . timezone )
2013-06-28 18:30:20 -07:00
usersubs = [ ]
2013-09-12 17:03:18 -07:00
code = 1
2014-04-16 15:31:44 -07:00
user_search = None
2016-12-01 14:27:18 -08:00
offset = ( page - 1 ) * limit
story_date_order = " %s story_date " % ( ' ' if order == ' oldest ' else ' - ' )
2018-02-26 15:02:26 -08:00
2017-10-25 14:42:28 -07:00
if infrequent :
2017-11-05 14:01:25 -08:00
feed_ids = Feed . low_volume_feeds ( feed_ids , stories_per_month = infrequent )
2013-06-16 14:09:28 -07:00
if story_hashes :
2013-07-01 16:50:09 -07:00
unread_feed_story_hashes = None
2016-12-13 17:17:36 -08:00
read_filter = ' all '
2013-08-07 15:43:25 -07:00
mstories = MStory . objects ( story_hash__in = story_hashes ) . order_by ( story_date_order )
stories = Feed . format_stories ( mstories )
elif query :
if user . profile . is_premium :
2014-04-16 15:31:44 -07:00
user_search = MUserSearch . get_user ( user . pk )
user_search . touch_search_date ( )
2013-08-07 15:43:25 -07:00
usersubs = UserSubscription . subs_for_feeds ( user . pk , feed_ids = feed_ids ,
read_filter = ' all ' )
feed_ids = [ sub . feed_id for sub in usersubs ]
2017-11-06 19:39:50 -08:00
if infrequent :
feed_ids = Feed . low_volume_feeds ( feed_ids , stories_per_month = infrequent )
2014-04-16 17:21:53 -07:00
stories = Feed . find_feed_stories ( feed_ids , query , order = order , offset = offset , limit = limit )
2013-08-07 15:43:25 -07:00
mstories = stories
unread_feed_story_hashes = UserSubscription . story_hashes ( user . pk , feed_ids = feed_ids ,
read_filter = " unread " , order = order ,
2013-09-16 16:42:49 -07:00
group_by_feed = False ,
cutoff_date = user . profile . unread_cutoff )
2013-08-07 15:43:25 -07:00
else :
stories = [ ]
2014-04-16 16:06:39 -07:00
mstories = [ ]
2013-08-07 15:43:25 -07:00
message = " You must be a premium subscriber to search. "
2014-05-28 18:06:52 -07:00
elif read_filter == ' starred ' :
mstories = MStarredStory . objects (
user_id = user . pk ,
story_feed_id__in = feed_ids
) . order_by ( ' %s starred_date ' % ( ' - ' if order == ' newest ' else ' ' ) ) [ offset : offset + limit ]
stories = Feed . format_stories ( mstories )
2013-06-16 14:09:28 -07:00
else :
2013-06-28 18:30:20 -07:00
usersubs = UserSubscription . subs_for_feeds ( user . pk , feed_ids = feed_ids ,
read_filter = read_filter )
2013-10-02 13:08:19 -07:00
all_feed_ids = [ f for f in feed_ids ]
2013-06-28 18:30:20 -07:00
feed_ids = [ sub . feed_id for sub in usersubs ]
2017-11-06 19:39:50 -08:00
if infrequent :
feed_ids = Feed . low_volume_feeds ( feed_ids , stories_per_month = infrequent )
2013-07-01 16:50:09 -07:00
if feed_ids :
params = {
" user_id " : user . pk ,
" feed_ids " : feed_ids ,
2013-10-02 13:08:19 -07:00
" all_feed_ids " : all_feed_ids ,
2013-07-01 16:50:09 -07:00
" offset " : offset ,
" limit " : limit ,
" order " : order ,
" read_filter " : read_filter ,
" usersubs " : usersubs ,
2013-09-16 16:42:49 -07:00
" cutoff_date " : user . profile . unread_cutoff ,
2017-06-15 15:25:01 -07:00
" cache_prefix " : " dashboard: " if initial_dashboard else " " ,
2013-07-01 16:50:09 -07:00
}
story_hashes , unread_feed_story_hashes = UserSubscription . feed_stories ( * * params )
else :
story_hashes = [ ]
unread_feed_story_hashes = [ ]
2013-08-07 15:43:25 -07:00
mstories = MStory . objects ( story_hash__in = story_hashes ) . order_by ( story_date_order )
stories = Feed . format_stories ( mstories )
2013-06-28 18:30:20 -07:00
2011-01-15 18:41:41 -05:00
found_feed_ids = list ( set ( [ story [ ' story_feed_id ' ] for story in stories ] ) )
2012-08-16 22:25:58 -07:00
stories , user_profiles = MSharedStory . stories_with_comments_and_profiles ( stories , user . pk )
2013-06-28 18:30:20 -07:00
if not usersubs :
usersubs = UserSubscription . subs_for_feeds ( user . pk , feed_ids = found_feed_ids ,
read_filter = read_filter )
2016-12-13 17:17:36 -08:00
2013-06-28 18:30:20 -07:00
trained_feed_ids = [ sub . feed_id for sub in usersubs if sub . is_trained ]
found_trained_feed_ids = list ( set ( trained_feed_ids ) & set ( found_feed_ids ) )
2011-01-07 19:06:36 -05:00
# Find starred stories
2012-03-26 13:14:02 -07:00
if found_feed_ids :
2014-05-28 18:06:52 -07:00
if read_filter == ' starred ' :
starred_stories = mstories
else :
2016-11-14 12:50:07 -08:00
story_hashes = [ s [ ' story_hash ' ] for s in stories ]
2014-05-28 18:06:52 -07:00
starred_stories = MStarredStory . objects (
user_id = user . pk ,
2018-08-07 18:37:58 -04:00
story_hash__in = story_hashes )
2013-08-13 17:21:41 -07:00
starred_stories = dict ( [ ( story . story_hash , dict ( starred_date = story . starred_date ,
2018-08-07 18:37:58 -04:00
user_tags = story . user_tags ,
2020-07-16 15:49:28 -04:00
highlights = story . highlights ,
user_notes = story . user_notes ) )
2011-11-29 17:57:20 -08:00
for story in starred_stories ] )
2012-03-26 13:14:02 -07:00
else :
2011-11-29 17:57:20 -08:00
starred_stories = { }
2012-05-29 11:48:40 -07:00
2011-01-07 19:06:36 -05:00
# Intelligence classifiers for all feeds involved
2013-06-25 15:41:42 -07:00
if found_trained_feed_ids :
2012-06-19 17:50:01 -07:00
classifier_feeds = list ( MClassifierFeed . objects ( user_id = user . pk ,
2015-07-09 11:22:01 -07:00
feed_id__in = found_trained_feed_ids ,
social_user_id = 0 ) )
2012-06-19 17:50:01 -07:00
classifier_authors = list ( MClassifierAuthor . objects ( user_id = user . pk ,
2013-06-27 17:21:01 -07:00
feed_id__in = found_trained_feed_ids ) )
2012-06-19 17:50:01 -07:00
classifier_titles = list ( MClassifierTitle . objects ( user_id = user . pk ,
2013-06-27 17:21:01 -07:00
feed_id__in = found_trained_feed_ids ) )
2012-06-19 17:50:01 -07:00
classifier_tags = list ( MClassifierTag . objects ( user_id = user . pk ,
2013-06-27 17:21:01 -07:00
feed_id__in = found_trained_feed_ids ) )
2012-06-19 17:50:01 -07:00
else :
classifier_feeds = [ ]
classifier_authors = [ ]
classifier_titles = [ ]
classifier_tags = [ ]
2012-05-29 11:48:40 -07:00
classifiers = sort_classifiers_by_feed ( user = user , feed_ids = found_feed_ids ,
classifier_feeds = classifier_feeds ,
classifier_authors = classifier_authors ,
classifier_titles = classifier_titles ,
classifier_tags = classifier_tags )
2011-10-24 08:55:28 -07:00
2011-01-07 19:20:34 -05:00
# Just need to format stories
2013-11-06 23:39:20 -08:00
nowtz = localtime_for_timezone ( now , user . profile . timezone )
2010-12-12 22:52:15 -05:00
for story in stories :
2014-05-28 18:06:52 -07:00
if read_filter == ' starred ' :
story [ ' read_status ' ] = 1
else :
story [ ' read_status ' ] = 0
2014-04-16 16:55:26 -07:00
if read_filter == ' all ' or query :
2013-07-01 16:50:09 -07:00
if ( unread_feed_story_hashes is not None and
story [ ' story_hash ' ] not in unread_feed_story_hashes ) :
2013-01-10 12:48:20 -08:00
story [ ' read_status ' ] = 1
2010-12-12 22:52:15 -05:00
story_date = localtime_for_timezone ( story [ ' story_date ' ] , user . profile . timezone )
2013-11-06 23:39:20 -08:00
story [ ' short_parsed_date ' ] = format_story_link_date__short ( story_date , nowtz )
story [ ' long_parsed_date ' ] = format_story_link_date__long ( story_date , nowtz )
2013-05-02 16:49:44 -07:00
if story [ ' story_hash ' ] in starred_stories :
2010-12-12 22:52:15 -05:00
story [ ' starred ' ] = True
2013-08-13 17:21:41 -07:00
starred_date = localtime_for_timezone ( starred_stories [ story [ ' story_hash ' ] ] [ ' starred_date ' ] ,
2013-05-02 16:49:44 -07:00
user . profile . timezone )
2011-01-12 23:30:38 -05:00
story [ ' starred_date ' ] = format_story_link_date__long ( starred_date , now )
2014-09-25 11:43:23 -07:00
story [ ' starred_timestamp ' ] = starred_date . strftime ( ' %s ' )
2013-08-13 17:21:41 -07:00
story [ ' user_tags ' ] = starred_stories [ story [ ' story_hash ' ] ] [ ' user_tags ' ]
2020-07-16 15:49:28 -04:00
story [ ' user_notes ' ] = starred_stories [ story [ ' story_hash ' ] ] [ ' user_notes ' ]
2018-08-07 18:37:58 -04:00
story [ ' highlights ' ] = starred_stories [ story [ ' story_hash ' ] ] [ ' highlights ' ]
2010-12-12 22:52:15 -05:00
story [ ' intelligence ' ] = {
2012-06-05 12:23:17 -07:00
' feed ' : apply_classifier_feeds ( classifier_feeds , story [ ' story_feed_id ' ] ) ,
' author ' : apply_classifier_authors ( classifier_authors , story ) ,
' tags ' : apply_classifier_tags ( classifier_tags , story ) ,
' title ' : apply_classifier_titles ( classifier_titles , story ) ,
2010-12-12 22:52:15 -05:00
}
2015-01-22 11:38:43 -08:00
story [ ' score ' ] = UserSubscription . score_story ( story [ ' intelligence ' ] )
2013-07-25 15:39:12 -07:00
2016-09-14 18:49:40 -07:00
if include_feeds :
feeds = Feed . objects . filter ( pk__in = set ( [ story [ ' story_feed_id ' ] for story in stories ] ) )
feeds = [ feed . canonical ( include_favicon = False ) for feed in feeds ]
if not user . profile . is_premium and not include_feeds :
2013-07-25 15:39:12 -07:00
message = " The full River of News is a premium feature. "
2013-09-12 17:03:18 -07:00
code = 0
# if page > 1:
# stories = []
# else:
# stories = stories[:5]
2015-01-22 11:38:43 -08:00
if not include_hidden :
hidden_stories_removed = 0
new_stories = [ ]
for story in stories :
if story [ ' score ' ] > = 0 :
new_stories . append ( story )
else :
hidden_stories_removed + = 1
stories = new_stories
2017-02-17 12:29:51 -08:00
# Clean stories to remove potentially old stories on dashboard
if initial_dashboard :
new_stories = [ ]
2017-06-15 15:25:01 -07:00
now = datetime . datetime . utcnow ( )
hour = now + datetime . timedelta ( hours = 1 )
month_ago = now - datetime . timedelta ( days = settings . DAYS_OF_UNREAD )
2017-02-17 12:29:51 -08:00
for story in stories :
2017-06-15 15:25:01 -07:00
if story [ ' story_date ' ] > = month_ago and story [ ' story_date ' ] < hour :
2017-02-17 12:29:51 -08:00
new_stories . append ( story )
stories = new_stories
2016-12-01 18:05:41 -08:00
# if page >= 1:
2013-06-16 21:41:25 -07:00
# import random
2016-12-01 18:05:41 -08:00
# time.sleep(random.randint(3, 6))
2013-07-25 15:39:12 -07:00
2018-02-26 16:00:44 -08:00
diff = time . time ( ) - start
timediff = round ( float ( diff ) , 2 )
logging . user ( request , " ~FYLoading ~FC %s river stories~FY: ~SBp %s ~SN ( %s / %s "
" stories, ~SN %s / %s / %s feeds, %s / %s ) " %
( " ~FB~SBinfrequent~SN~FC " if infrequent else " " ,
page , len ( stories ) , len ( mstories ) , len ( found_feed_ids ) ,
len ( feed_ids ) , len ( original_feed_ids ) , order , read_filter ) )
MAnalyticsLoader . add ( page_load = diff )
2015-01-22 11:38:43 -08:00
data = dict ( code = code ,
2013-09-12 17:03:18 -07:00
message = message ,
2013-07-25 15:39:12 -07:00
stories = stories ,
2012-08-16 22:25:58 -07:00
classifiers = classifiers ,
elapsed_time = timediff ,
2014-04-16 15:31:44 -07:00
user_search = user_search ,
2012-08-16 22:25:58 -07:00
user_profiles = user_profiles )
2016-09-14 18:49:40 -07:00
if include_feeds : data [ ' feeds ' ] = feeds
2015-01-22 11:38:43 -08:00
if not include_hidden : data [ ' hidden_stories_removed ' ] = hidden_stories_removed
2018-02-26 15:16:20 -08:00
2018-02-26 16:00:44 -08:00
2015-01-22 11:38:43 -08:00
return data
2011-01-10 09:49:26 -05:00
2017-01-12 13:16:34 -08:00
@json.json_view
def complete_river ( request ) :
user = get_user ( request )
2017-04-04 17:13:43 -07:00
feed_ids = request . POST . getlist ( ' feeds ' ) or request . POST . getlist ( ' feeds[] ' )
feed_ids = [ int ( feed_id ) for feed_id in feed_ids if feed_id ]
2017-01-12 13:16:34 -08:00
page = int ( request . POST . get ( ' page ' , 1 ) )
read_filter = request . POST . get ( ' read_filter ' , ' unread ' )
2017-01-12 13:38:22 -08:00
stories_truncated = 0
2017-01-12 13:16:34 -08:00
usersubs = UserSubscription . subs_for_feeds ( user . pk , feed_ids = feed_ids ,
read_filter = read_filter )
feed_ids = [ sub . feed_id for sub in usersubs ]
if feed_ids :
2017-06-15 15:27:51 -07:00
stories_truncated = UserSubscription . truncate_river ( user . pk , feed_ids , read_filter , cache_prefix = " dashboard: " )
2017-01-12 13:16:34 -08:00
2017-01-12 13:17:51 -08:00
logging . user ( request , " ~FC~BBRiver complete on page ~SB %s ~SN, truncating ~SB %s ~SN stories from ~SB %s ~SN feeds " % ( page , stories_truncated , len ( feed_ids ) ) )
2017-01-12 13:16:34 -08:00
return dict ( code = 1 , message = " Truncated %s stories from %s " % ( stories_truncated , len ( feed_ids ) ) )
2013-05-17 13:51:46 -07:00
@json.json_view
2013-06-27 15:21:13 -07:00
def unread_story_hashes__old ( request ) :
2013-05-17 13:51:46 -07:00
user = get_user ( request )
2020-11-12 18:24:38 -05:00
feed_ids = request . GET . getlist ( ' feed_id ' ) or request . GET . getlist ( ' feed_id[] ' )
2017-04-04 17:13:43 -07:00
feed_ids = [ int ( feed_id ) for feed_id in feed_ids if feed_id ]
2020-11-12 18:24:38 -05:00
include_timestamps = is_true ( request . GET . get ( ' include_timestamps ' , False ) )
2013-06-26 17:04:30 -07:00
usersubs = { }
2013-05-17 13:51:46 -07:00
if not feed_ids :
2013-06-27 15:21:13 -07:00
usersubs = UserSubscription . objects . filter ( Q ( unread_count_neutral__gt = 0 ) |
Q ( unread_count_positive__gt = 0 ) ,
user = user , active = True )
2013-05-17 13:51:46 -07:00
feed_ids = [ sub . feed_id for sub in usersubs ]
2013-06-26 17:04:30 -07:00
else :
2013-06-27 15:21:13 -07:00
usersubs = UserSubscription . objects . filter ( Q ( unread_count_neutral__gt = 0 ) |
Q ( unread_count_positive__gt = 0 ) ,
user = user , active = True , feed__in = feed_ids )
2013-05-17 13:51:46 -07:00
unread_feed_story_hashes = { }
2013-05-17 13:54:41 -07:00
story_hash_count = 0
2013-06-27 15:21:13 -07:00
usersubs = dict ( ( sub . feed_id , sub ) for sub in usersubs )
for feed_id in feed_ids :
if feed_id in usersubs :
us = usersubs [ feed_id ]
else :
continue
if not us . unread_count_neutral and not us . unread_count_positive :
continue
unread_feed_story_hashes [ feed_id ] = us . get_stories ( read_filter = ' unread ' , limit = 500 ,
withscores = include_timestamps ,
2013-09-16 16:42:49 -07:00
hashes_only = True ,
2013-12-05 15:10:09 -08:00
default_cutoff_date = user . profile . unread_cutoff )
2013-06-27 15:21:13 -07:00
story_hash_count + = len ( unread_feed_story_hashes [ feed_id ] )
2013-06-26 23:39:01 -07:00
2013-06-27 15:21:13 -07:00
logging . user ( request , " ~FYLoading ~FCunread story hashes~FY: ~SB %s feeds~SN ( %s story hashes) " %
( len ( feed_ids ) , len ( story_hash_count ) ) )
return dict ( unread_feed_story_hashes = unread_feed_story_hashes )
@json.json_view
def unread_story_hashes ( request ) :
2013-06-27 17:21:01 -07:00
user = get_user ( request )
2020-06-07 08:04:23 -04:00
feed_ids = request . GET . getlist ( ' feed_id ' ) or request . GET . getlist ( ' feed_id[] ' )
2017-04-04 17:13:43 -07:00
feed_ids = [ int ( feed_id ) for feed_id in feed_ids if feed_id ]
2020-06-07 08:04:23 -04:00
include_timestamps = is_true ( request . GET . get ( ' include_timestamps ' , False ) )
order = request . GET . get ( ' order ' , ' newest ' )
read_filter = request . GET . get ( ' read_filter ' , ' unread ' )
2013-06-27 15:21:13 -07:00
2013-06-27 17:21:01 -07:00
story_hashes = UserSubscription . story_hashes ( user . pk , feed_ids = feed_ids ,
order = order , read_filter = read_filter ,
2013-09-16 16:42:49 -07:00
include_timestamps = include_timestamps ,
cutoff_date = user . profile . unread_cutoff )
2015-09-18 15:02:15 -07:00
2013-05-17 13:54:41 -07:00
logging . user ( request , " ~FYLoading ~FCunread story hashes~FY: ~SB %s feeds~SN ( %s story hashes) " %
2013-06-26 23:39:01 -07:00
( len ( feed_ids ) , len ( story_hashes ) ) )
return dict ( unread_feed_story_hashes = story_hashes )
2013-05-17 13:51:46 -07:00
2010-07-24 15:54:25 -04:00
@ajax_login_required
2010-07-25 23:13:27 -04:00
@json.json_view
2010-06-14 01:01:21 -04:00
def mark_all_as_read ( request ) :
code = 1
2013-05-31 16:03:34 -07:00
try :
2020-06-07 08:04:23 -04:00
days = int ( request . POST . get ( ' days ' , 0 ) )
2013-05-31 16:03:34 -07:00
except ValueError :
return dict ( code = - 1 , message = " Days parameter must be an integer, not: %s " %
2020-06-07 08:04:23 -04:00
request . POST . get ( ' days ' ) )
2013-04-22 13:30:29 -07:00
read_date = datetime . datetime . utcnow ( ) - datetime . timedelta ( days = days )
2010-06-14 01:01:21 -04:00
feeds = UserSubscription . objects . filter ( user = request . user )
2017-12-18 17:04:54 -08:00
2020-06-07 08:04:23 -04:00
infrequent = is_true ( request . POST . get ( ' infrequent ' , False ) )
2017-12-18 17:04:54 -08:00
if infrequent :
2020-06-07 08:04:23 -04:00
infrequent = request . POST . get ( ' infrequent ' )
2017-12-18 17:04:54 -08:00
feed_ids = Feed . low_volume_feeds ( [ usersub . feed . pk for usersub in feeds ] , stories_per_month = infrequent )
feeds = UserSubscription . objects . filter ( user = request . user , feed_id__in = feed_ids )
2013-04-22 11:25:04 -07:00
socialsubs = MSocialSubscription . objects . filter ( user_id = request . user . pk )
for subtype in [ feeds , socialsubs ] :
for sub in subtype :
if days == 0 :
sub . mark_feed_read ( )
else :
if sub . mark_read_date < read_date :
sub . needs_unread_recalc = True
sub . mark_read_date = read_date
sub . save ( )
2010-06-14 01:01:21 -04:00
2016-12-14 14:59:32 -08:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2017-12-18 17:04:54 -08:00
logging . user ( request , " ~FMMarking %s as read: ~SB %s days " % ( ( " all " if not infrequent else " infrequent stories " ) , days , ) )
2010-07-25 23:13:27 -04:00
return dict ( code = code )
2010-06-14 01:01:21 -04:00
2010-07-24 15:54:25 -04:00
@ajax_login_required
2010-07-25 23:13:27 -04:00
@json.json_view
2009-06-16 03:08:55 +00:00
def mark_story_as_read ( request ) :
2020-06-07 08:04:23 -04:00
story_ids = request . POST . getlist ( ' story_id ' ) or request . POST . getlist ( ' story_id[] ' )
2013-07-01 12:09:05 -07:00
try :
feed_id = int ( get_argument_or_404 ( request , ' feed_id ' ) )
except ValueError :
return dict ( code = - 1 , errors = [ " You must pass a valid feed_id: %s " %
2020-06-07 08:04:23 -04:00
request . POST . get ( ' feed_id ' ) ] )
2013-06-16 21:41:25 -07:00
2010-11-09 09:55:44 -05:00
try :
usersub = UserSubscription . objects . select_related ( ' feed ' ) . get ( user = request . user , feed = feed_id )
2012-02-01 09:14:19 -08:00
except Feed . DoesNotExist :
2010-11-09 09:55:44 -05:00
duplicate_feed = DuplicateFeed . objects . filter ( duplicate_feed_id = feed_id )
if duplicate_feed :
2012-01-27 15:29:14 -08:00
feed_id = duplicate_feed [ 0 ] . feed_id
2010-11-09 09:55:44 -05:00
try :
usersub = UserSubscription . objects . get ( user = request . user ,
feed = duplicate_feed [ 0 ] . feed )
2012-01-27 15:29:14 -08:00
except ( Feed . DoesNotExist ) :
2012-02-03 11:41:01 -08:00
return dict ( code = - 1 , errors = [ " No feed exists for feed_id %d . " % feed_id ] )
2011-02-22 11:27:05 -05:00
else :
2012-02-03 11:41:01 -08:00
return dict ( code = - 1 , errors = [ " No feed exists for feed_id %d . " % feed_id ] )
2012-01-27 15:29:14 -08:00
except UserSubscription . DoesNotExist :
2012-02-03 11:41:01 -08:00
usersub = None
2012-02-01 09:14:19 -08:00
if usersub :
data = usersub . mark_story_ids_as_read ( story_ids , request = request )
2012-01-27 15:29:14 -08:00
else :
2012-02-03 11:41:01 -08:00
data = dict ( code = - 1 , errors = [ " User is not subscribed to this feed. " ] )
2012-02-01 09:14:19 -08:00
2010-07-25 23:13:27 -04:00
return data
2013-07-17 14:49:54 -07:00
@ajax_login_required
@json.json_view
def mark_story_hashes_as_read ( request ) :
2019-01-05 16:00:55 -05:00
retrying_failed = is_true ( request . POST . get ( ' retrying_failed ' , False ) )
2013-07-17 15:01:19 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
2015-09-14 17:10:30 -07:00
try :
2020-06-07 08:04:23 -04:00
story_hashes = request . POST . getlist ( ' story_hash ' ) or request . POST . getlist ( ' story_hash[] ' )
2015-09-14 17:10:30 -07:00
except UnreadablePostError :
2015-12-11 17:11:27 -08:00
return dict ( code = - 1 , message = " Missing `story_hash` list parameter. " )
2013-07-17 15:01:19 -07:00
2016-12-05 17:10:47 -08:00
feed_ids , friend_ids = RUserStory . mark_story_hashes_read ( request . user . pk , story_hashes , username = request . user . username )
2014-03-24 14:44:21 -07:00
2013-07-17 15:01:19 -07:00
if friend_ids :
socialsubs = MSocialSubscription . objects . filter (
user_id = request . user . pk ,
subscription_user_id__in = friend_ids )
for socialsub in socialsubs :
if not socialsub . needs_unread_recalc :
socialsub . needs_unread_recalc = True
socialsub . save ( )
r . publish ( request . user . username , ' social: %s ' % socialsub . subscription_user_id )
2013-07-17 14:49:54 -07:00
2013-07-17 15:01:19 -07:00
# Also count on original subscription
for feed_id in feed_ids :
usersubs = UserSubscription . objects . filter ( user = request . user . pk , feed = feed_id )
if usersubs :
usersub = usersubs [ 0 ]
2017-05-01 11:43:06 -07:00
usersub . last_read_date = datetime . datetime . now ( )
2013-07-17 15:01:19 -07:00
if not usersub . needs_unread_recalc :
usersub . needs_unread_recalc = True
2017-05-01 11:39:24 -07:00
usersub . save ( update_fields = [ ' needs_unread_recalc ' , ' last_read_date ' ] )
2017-05-01 11:43:06 -07:00
else :
usersub . save ( update_fields = [ ' last_read_date ' ] )
2013-07-17 15:01:19 -07:00
r . publish ( request . user . username , ' feed: %s ' % feed_id )
2013-07-17 15:32:08 -07:00
hash_count = len ( story_hashes )
2019-01-05 16:00:55 -05:00
logging . user ( request , " ~FYRead %s %s in feed/socialsubs: %s / %s : %s %s " % (
hash_count , ' story ' if hash_count == 1 else ' stories ' , feed_ids , friend_ids ,
story_hashes ,
' (retrying failed) ' if retrying_failed else ' ' ) )
2013-07-17 15:01:19 -07:00
2014-03-24 14:44:21 -07:00
return dict ( code = 1 , story_hashes = story_hashes ,
feed_ids = feed_ids , friend_user_ids = friend_ids )
2013-07-17 14:49:54 -07:00
2011-11-05 16:25:04 -07:00
@ajax_login_required
@json.json_view
def mark_feed_stories_as_read ( request ) :
2013-06-18 12:21:27 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
2020-06-07 08:04:23 -04:00
feeds_stories = request . POST . get ( ' feeds_stories ' , " {} " )
2011-11-10 18:29:41 -08:00
feeds_stories = json . decode ( feeds_stories )
2013-04-02 09:36:43 -07:00
data = {
' code ' : - 1 ,
' message ' : ' Nothing was marked as read '
}
2020-06-15 05:15:36 -04:00
for feed_id , story_ids in list ( feeds_stories . items ( ) ) :
2013-09-09 13:46:14 -07:00
try :
feed_id = int ( feed_id )
except ValueError :
continue
2011-11-05 16:25:04 -07:00
try :
usersub = UserSubscription . objects . select_related ( ' feed ' ) . get ( user = request . user , feed = feed_id )
2013-06-16 14:09:28 -07:00
data = usersub . mark_story_ids_as_read ( story_ids , request = request )
2012-02-03 11:41:01 -08:00
except UserSubscription . DoesNotExist :
return dict ( code = - 1 , error = " You are not subscribed to this feed_id: %d " % feed_id )
except Feed . DoesNotExist :
2011-11-05 16:25:04 -07:00
duplicate_feed = DuplicateFeed . objects . filter ( duplicate_feed_id = feed_id )
2012-02-03 11:41:01 -08:00
try :
if not duplicate_feed : raise Feed . DoesNotExist
usersub = UserSubscription . objects . get ( user = request . user ,
feed = duplicate_feed [ 0 ] . feed )
2013-06-16 14:09:28 -07:00
data = usersub . mark_story_ids_as_read ( story_ids , request = request )
2012-02-03 11:41:01 -08:00
except ( UserSubscription . DoesNotExist , Feed . DoesNotExist ) :
return dict ( code = - 1 , error = " No feed exists for feed_id: %d " % feed_id )
2012-10-22 16:25:36 -07:00
r . publish ( request . user . username , ' feed: %s ' % feed_id )
2012-02-03 11:41:01 -08:00
return data
@ajax_login_required
@json.json_view
def mark_social_stories_as_read ( request ) :
code = 1
errors = [ ]
2012-02-10 13:53:33 -08:00
data = { }
2013-06-18 12:21:27 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
2020-06-07 08:04:23 -04:00
users_feeds_stories = request . POST . get ( ' users_feeds_stories ' , " {} " )
2012-02-03 11:41:01 -08:00
users_feeds_stories = json . decode ( users_feeds_stories )
2020-06-15 05:15:36 -04:00
for social_user_id , feeds in list ( users_feeds_stories . items ( ) ) :
for feed_id , story_ids in list ( feeds . items ( ) ) :
2012-02-03 11:41:01 -08:00
feed_id = int ( feed_id )
try :
socialsub = MSocialSubscription . objects . get ( user_id = request . user . pk ,
subscription_user_id = social_user_id )
data = socialsub . mark_story_ids_as_read ( story_ids , feed_id , request = request )
2020-06-15 05:15:36 -04:00
except OperationError as e :
2012-02-10 13:53:33 -08:00
code = - 1
errors . append ( " Already read story: %s " % e )
2012-02-03 11:41:01 -08:00
except MSocialSubscription . DoesNotExist :
2012-11-30 16:12:37 -08:00
MSocialSubscription . mark_unsub_story_ids_as_read ( request . user . pk , social_user_id ,
story_ids , feed_id ,
request = request )
2012-02-03 11:41:01 -08:00
except Feed . DoesNotExist :
duplicate_feed = DuplicateFeed . objects . filter ( duplicate_feed_id = feed_id )
if duplicate_feed :
try :
socialsub = MSocialSubscription . objects . get ( user_id = request . user . pk ,
subscription_user_id = social_user_id )
data = socialsub . mark_story_ids_as_read ( story_ids , duplicate_feed [ 0 ] . feed . pk , request = request )
except ( UserSubscription . DoesNotExist , Feed . DoesNotExist ) :
code = - 1
errors . append ( " No feed exists for feed_id %d . " % feed_id )
else :
2011-11-05 16:25:04 -07:00
continue
2012-10-22 16:25:36 -07:00
r . publish ( request . user . username , ' feed: %s ' % feed_id )
2012-10-22 17:07:57 -07:00
r . publish ( request . user . username , ' social: %s ' % social_user_id )
2012-02-10 13:53:33 -08:00
data . update ( code = code , errors = errors )
return data
2011-11-05 16:25:04 -07:00
2013-07-05 18:04:19 -07:00
@required_params ( ' story_id ' , feed_id = int )
2010-12-30 19:24:52 -05:00
@ajax_login_required
@json.json_view
def mark_story_as_unread ( request ) :
2020-06-07 08:04:23 -04:00
story_id = request . POST . get ( ' story_id ' , None )
feed_id = int ( request . POST . get ( ' feed_id ' , 0 ) )
2012-04-30 13:26:20 -07:00
try :
usersub = UserSubscription . objects . select_related ( ' feed ' ) . get ( user = request . user , feed = feed_id )
feed = usersub . feed
except UserSubscription . DoesNotExist :
usersub = None
2012-08-25 20:46:48 -07:00
feed = Feed . get_by_id ( feed_id )
2012-04-30 13:26:20 -07:00
if usersub and not usersub . needs_unread_recalc :
2010-12-31 10:34:31 -05:00
usersub . needs_unread_recalc = True
2015-04-09 17:50:52 -07:00
usersub . save ( update_fields = [ ' needs_unread_recalc ' ] )
2010-12-31 10:34:31 -05:00
data = dict ( code = 0 , payload = dict ( story_id = story_id ) )
2011-11-08 19:12:56 -08:00
2012-07-26 22:12:48 -07:00
story , found_original = MStory . find_story ( feed_id , story_id )
2011-11-08 19:12:56 -08:00
2013-07-01 11:50:30 -07:00
if not story :
logging . user ( request , " ~FY~SBUnread~SN story in feed: %s (NOT FOUND) " % ( feed ) )
return dict ( code = - 1 , message = " Story not found. " )
2014-03-28 13:49:03 -07:00
if usersub :
data = usersub . invert_read_stories_after_unread_story ( story , request )
2013-01-23 10:02:14 -08:00
2014-03-24 14:44:21 -07:00
message = RUserStory . story_can_be_marked_read_by_user ( story , request . user )
if message :
2013-01-23 10:02:14 -08:00
data [ ' code ' ] = - 1
2014-03-24 14:44:21 -07:00
data [ ' message ' ] = message
2013-10-07 10:02:44 -07:00
return data
2012-04-30 13:45:09 -07:00
social_subs = MSocialSubscription . mark_dirty_sharing_story ( user_id = request . user . pk ,
story_feed_id = feed_id ,
story_guid_hash = story . guid_hash )
2012-05-22 15:02:37 -07:00
dirty_count = social_subs and social_subs . count ( )
2012-04-30 13:45:09 -07:00
dirty_count = ( " ( %s social_subs) " % dirty_count ) if dirty_count else " "
2016-12-05 17:10:47 -08:00
RUserStory . mark_story_hash_unread ( request . user , story_hash = story . story_hash )
2013-01-23 10:02:14 -08:00
2013-06-18 12:21:27 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
2012-10-22 16:25:36 -07:00
r . publish ( request . user . username , ' feed: %s ' % feed_id )
2012-04-30 13:45:09 -07:00
logging . user ( request , " ~FY~SBUnread~SN story in feed: %s %s " % ( feed , dirty_count ) )
2010-12-31 10:34:31 -05:00
return data
2014-03-28 13:49:03 -07:00
@ajax_login_required
@json.json_view
@required_params ( ' story_hash ' )
def mark_story_hash_as_unread ( request ) :
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
2020-06-07 08:04:23 -04:00
story_hash = request . POST . get ( ' story_hash ' )
2014-03-28 13:49:03 -07:00
feed_id , _ = MStory . split_story_hash ( story_hash )
story , _ = MStory . find_story ( feed_id , story_hash )
2014-08-04 14:31:02 -07:00
if not story :
data = dict ( code = - 1 , message = " That story has been removed from the feed, no need to mark it unread. " )
return data
2014-03-28 13:49:03 -07:00
message = RUserStory . story_can_be_marked_read_by_user ( story , request . user )
if message :
data = dict ( code = - 1 , message = message )
return data
2010-12-30 19:24:52 -05:00
2014-03-28 13:49:03 -07:00
# Also count on original subscription
usersubs = UserSubscription . objects . filter ( user = request . user . pk , feed = feed_id )
if usersubs :
usersub = usersubs [ 0 ]
if not usersub . needs_unread_recalc :
usersub . needs_unread_recalc = True
2015-04-09 17:50:52 -07:00
usersub . save ( update_fields = [ ' needs_unread_recalc ' ] )
2014-03-28 13:49:03 -07:00
data = usersub . invert_read_stories_after_unread_story ( story , request )
r . publish ( request . user . username , ' feed: %s ' % feed_id )
2016-12-05 17:10:47 -08:00
feed_id , friend_ids = RUserStory . mark_story_hash_unread ( request . user , story_hash )
2014-03-28 13:49:03 -07:00
if friend_ids :
socialsubs = MSocialSubscription . objects . filter (
user_id = request . user . pk ,
subscription_user_id__in = friend_ids )
for socialsub in socialsubs :
if not socialsub . needs_unread_recalc :
socialsub . needs_unread_recalc = True
socialsub . save ( )
r . publish ( request . user . username , ' social: %s ' % socialsub . subscription_user_id )
logging . user ( request , " ~FYUnread story in feed/socialsubs: %s / %s " % ( feed_id , friend_ids ) )
return dict ( code = 1 , story_hash = story_hash , feed_id = feed_id , friend_user_ids = friend_ids )
2010-07-24 15:54:25 -04:00
@ajax_login_required
2010-07-25 23:13:27 -04:00
@json.json_view
2009-06-16 03:08:55 +00:00
def mark_feed_as_read ( request ) :
2013-06-18 12:21:27 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
2017-04-04 17:13:43 -07:00
feed_ids = request . POST . getlist ( ' feed_id ' ) or request . POST . getlist ( ' feed_id[] ' )
2020-06-07 08:04:23 -04:00
cutoff_timestamp = int ( request . POST . get ( ' cutoff_timestamp ' , 0 ) )
direction = request . POST . get ( ' direction ' , ' older ' )
infrequent = is_true ( request . POST . get ( ' infrequent ' , False ) )
2017-12-15 19:48:51 -08:00
if infrequent :
2020-06-07 08:04:23 -04:00
infrequent = request . POST . get ( ' infrequent ' )
2012-04-20 12:32:17 -07:00
multiple = len ( feed_ids ) > 1
code = 1
2013-06-21 12:49:30 -07:00
errors = [ ]
2013-09-13 15:02:10 -07:00
cutoff_date = datetime . datetime . fromtimestamp ( cutoff_timestamp ) if cutoff_timestamp else None
2012-04-20 12:32:17 -07:00
2017-12-15 19:48:51 -08:00
if infrequent :
feed_ids = Feed . low_volume_feeds ( feed_ids , stories_per_month = infrequent )
2020-06-15 05:15:36 -04:00
feed_ids = [ str ( f ) for f in feed_ids ] # This method expects strings
2017-12-15 19:48:51 -08:00
2016-03-07 19:14:41 -08:00
if cutoff_date :
2016-03-14 11:51:18 -07:00
logging . user ( request , " ~FMMark %s feeds read, %s - cutoff: %s / %s " %
( len ( feed_ids ) , direction , cutoff_timestamp , cutoff_date ) )
2016-03-07 19:14:41 -08:00
2010-09-16 10:35:36 -04:00
for feed_id in feed_ids :
2012-04-20 12:32:17 -07:00
if ' social: ' in feed_id :
user_id = int ( feed_id . replace ( ' social: ' , ' ' ) )
2015-01-05 16:39:33 -08:00
try :
sub = MSocialSubscription . objects . get ( user_id = request . user . pk ,
subscription_user_id = user_id )
except MSocialSubscription . DoesNotExist :
logging . user ( request , " ~FRCouldn ' t find socialsub: %s " % user_id )
continue
2012-04-20 12:32:17 -07:00
if not multiple :
sub_user = User . objects . get ( pk = sub . subscription_user_id )
logging . user ( request , " ~FMMarking social feed as read: ~SB %s " % ( sub_user . username , ) )
else :
try :
feed = Feed . objects . get ( id = feed_id )
sub = UserSubscription . objects . get ( feed = feed , user = request . user )
if not multiple :
logging . user ( request , " ~FMMarking feed as read: ~SB %s " % ( feed , ) )
2020-06-15 05:15:36 -04:00
except ( Feed . DoesNotExist , UserSubscription . DoesNotExist ) as e :
2013-06-21 12:49:30 -07:00
errors . append ( " User not subscribed: %s " % e )
continue
2020-06-15 05:15:36 -04:00
except ( ValueError ) as e :
2013-06-21 12:49:30 -07:00
errors . append ( " Invalid feed_id: %s " % e )
2012-04-20 12:32:17 -07:00
continue
2012-07-15 17:59:19 -07:00
if not sub :
2013-06-21 12:49:30 -07:00
errors . append ( " User not subscribed: %s " % feed_id )
2012-07-15 17:59:19 -07:00
continue
2010-09-16 10:35:36 -04:00
try :
2013-12-05 14:25:15 -08:00
if direction == " older " :
marked_read = sub . mark_feed_read ( cutoff_date = cutoff_date )
else :
marked_read = sub . mark_newer_stories_read ( cutoff_date = cutoff_date )
2013-09-13 15:54:52 -07:00
if marked_read and not multiple :
2013-05-13 14:22:49 -07:00
r . publish ( request . user . username , ' feed: %s ' % feed_id )
2020-06-15 05:15:36 -04:00
except IntegrityError as e :
2013-06-21 12:49:30 -07:00
errors . append ( " Could not mark feed as read: %s " % e )
2010-09-16 10:35:36 -04:00
code = - 1
2011-10-31 18:40:07 -07:00
if multiple :
2012-04-20 12:32:17 -07:00
logging . user ( request , " ~FMMarking ~SB %s ~SN feeds as read " % len ( feed_ids ) )
2013-09-13 15:54:52 -07:00
r . publish ( request . user . username , ' refresh: %s ' % ' , ' . join ( feed_ids ) )
2015-01-14 14:06:24 -08:00
if errors :
logging . user ( request , " ~FMMarking read had errors: ~FR %s " % errors )
2014-03-28 13:25:40 -07:00
return dict ( code = code , errors = errors , cutoff_date = cutoff_date , direction = direction )
2010-08-23 09:55:51 -04:00
2009-06-16 03:08:55 +00:00
def _parse_user_info ( user ) :
return {
' user_info ' : {
2020-06-11 15:13:12 -04:00
' is_anonymous ' : json . encode ( user . is_anonymous ) ,
' is_authenticated ' : json . encode ( user . is_authenticated ) ,
' username ' : json . encode ( user . username if user . is_authenticated else ' Anonymous ' )
2009-06-16 03:08:55 +00:00
}
2009-06-22 15:28:20 +00:00
}
2010-03-28 17:06:19 -04:00
2010-07-24 15:54:25 -04:00
@ajax_login_required
2010-07-25 23:13:27 -04:00
@json.json_view
2010-04-06 20:41:00 -04:00
def add_url ( request ) :
code = 0
url = request . POST [ ' url ' ]
2021-01-13 19:32:36 -05:00
folder = request . POST . get ( ' folder ' , ' ' ) . replace ( ' river: ' , ' ' )
2021-01-14 14:16:51 -05:00
new_folder = request . POST . get ( ' new_folder ' , ' ' ) . replace ( ' river: ' , ' ' )
2012-06-30 18:51:22 -07:00
auto_active = is_true ( request . POST . get ( ' auto_active ' , 1 ) )
2012-08-12 20:34:30 -07:00
skip_fetch = is_true ( request . POST . get ( ' skip_fetch ' , False ) )
2012-08-31 13:51:24 -07:00
feed = None
2012-03-19 15:46:59 -07:00
2011-05-07 21:12:13 -04:00
if not url :
code = - 1
message = ' Enter in the website address or the feed URL. '
2014-10-14 10:43:38 -07:00
elif any ( [ ( banned_url in url ) for banned_url in BANNED_URLS ] ) :
code = - 1
message = " The publisher of this website has banned NewsBlur. "
2016-05-26 17:32:13 -07:00
elif re . match ( ' (https?://)?twitter.com/ \ w+/?$ ' , url ) :
if not request . user . profile . is_premium :
message = " You must be a premium subscriber to add Twitter feeds. "
code = - 1
else :
2016-05-26 16:38:45 -07:00
# Check if Twitter API is active for user
ss = MSocialServices . get_user ( request . user . pk )
try :
if not ss . twitter_uid :
raise tweepy . TweepError ( " No API token " )
ss . twitter_api ( ) . me ( )
2016-12-08 16:05:13 -08:00
except tweepy . TweepError :
2016-05-26 16:38:45 -07:00
code = - 1
2019-10-19 16:43:10 -04:00
message = " Your Twitter connection isn ' t setup. Go to Manage - Friends/Followers and reconnect Twitter. "
2016-05-26 17:32:13 -07:00
if code == - 1 :
return dict ( code = code , message = message )
if new_folder :
usf , _ = UserSubscriptionFolders . objects . get_or_create ( user = request . user )
usf . add_folder ( folder , new_folder )
folder = new_folder
code , message , us = UserSubscription . add_subscription ( user = request . user , feed_address = url ,
folder = folder , auto_active = auto_active ,
skip_fetch = skip_fetch )
feed = us and us . feed
if feed :
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload: %s ' % feed . pk )
MUserSearch . schedule_index_feeds_for_search ( feed . pk , request . user . pk )
2012-08-31 13:51:24 -07:00
return dict ( code = code , message = message , feed = feed )
2010-04-06 20:41:00 -04:00
2010-07-24 15:54:25 -04:00
@ajax_login_required
2010-07-25 23:13:27 -04:00
@json.json_view
2010-04-06 20:41:00 -04:00
def add_folder ( request ) :
2021-01-13 19:32:36 -05:00
folder = request . POST [ ' folder ' ] . replace ( ' river: ' , ' ' )
parent_folder = request . POST . get ( ' parent_folder ' , ' ' ) . replace ( ' river: ' , ' ' )
2014-06-28 08:22:32 -07:00
folders = None
2011-09-16 09:26:22 -07:00
logging . user ( request , " ~FRAdding Folder: ~SB %s (in %s ) " % ( folder , parent_folder ) )
2010-08-17 17:45:51 -04:00
2010-04-06 20:41:00 -04:00
if folder :
code = 1
message = " "
user_sub_folders_object , _ = UserSubscriptionFolders . objects . get_or_create ( user = request . user )
2011-03-21 10:15:18 -04:00
user_sub_folders_object . add_folder ( parent_folder , folder )
2014-06-28 08:22:32 -07:00
folders = json . decode ( user_sub_folders_object . folders )
2013-09-13 15:54:52 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2010-04-06 20:41:00 -04:00
else :
code = - 1
message = " Gotta write in a folder name. "
2014-06-28 08:22:32 -07:00
return dict ( code = code , message = message , folders = folders )
2010-08-23 16:23:16 -04:00
2010-07-24 15:54:25 -04:00
@ajax_login_required
2010-07-25 23:13:27 -04:00
@json.json_view
2010-03-28 17:06:19 -04:00
def delete_feed ( request ) :
feed_id = int ( request . POST [ ' feed_id ' ] )
2021-01-14 14:16:51 -05:00
in_folder = request . POST . get ( ' in_folder ' , ' ' ) . replace ( ' river: ' , ' ' )
2013-11-06 11:12:41 -08:00
if not in_folder or in_folder == ' ' :
2011-10-20 10:10:09 -07:00
in_folder = " "
2010-09-14 20:49:28 -04:00
2010-09-16 10:35:36 -04:00
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
user_sub_folders . delete_feed ( feed_id , in_folder )
2011-03-13 16:24:49 -04:00
feed = Feed . objects . filter ( pk = feed_id )
if feed :
feed [ 0 ] . count_subscribers ( )
2013-09-13 16:35:48 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2013-07-22 13:27:27 -07:00
return dict ( code = 1 , message = " Removed %s from ' %s ' . " % ( feed , in_folder ) )
2012-03-19 15:46:59 -07:00
@ajax_login_required
@json.json_view
def delete_feed_by_url ( request ) :
message = " "
code = 0
url = request . POST [ ' url ' ]
2021-01-13 19:32:36 -05:00
in_folder = request . POST . get ( ' in_folder ' , ' ' ) . replace ( ' river: ' , ' ' )
2012-03-19 15:46:59 -07:00
if in_folder == ' ' :
in_folder = " "
2016-02-24 12:11:41 -08:00
logging . user ( request . user , " ~FBFinding feed (delete_feed_by_url): %s " % url )
2012-03-19 15:46:59 -07:00
feed = Feed . get_feed_from_url ( url , create = False )
if feed :
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
user_sub_folders . delete_feed ( feed . pk , in_folder )
code = 1
feed = Feed . objects . filter ( pk = feed . pk )
if feed :
feed [ 0 ] . count_subscribers ( )
else :
code = - 1
message = " URL not found. "
return dict ( code = code , message = message )
2010-03-28 17:06:19 -04:00
2010-09-14 20:49:28 -04:00
@ajax_login_required
@json.json_view
def delete_folder ( request ) :
2011-11-30 09:53:00 -08:00
folder_to_delete = request . POST . get ( ' folder_name ' ) or request . POST . get ( ' folder_to_delete ' )
2021-01-14 14:16:51 -05:00
in_folder = request . POST . get ( ' in_folder ' , None )
2020-06-07 08:04:23 -04:00
feed_ids_in_folder = request . POST . getlist ( ' feed_id ' ) or request . POST . getlist ( ' feed_id[] ' )
2017-04-04 17:13:43 -07:00
feed_ids_in_folder = [ int ( f ) for f in feed_ids_in_folder if f ]
2014-09-19 17:00:23 -07:00
2017-03-18 14:41:12 -07:00
request . user . profile . send_opml_export_email ( reason = " You have deleted an entire folder of feeds, so here ' s a backup of all of your subscriptions just in case. " )
2010-09-22 10:12:38 -04:00
2010-09-16 10:35:36 -04:00
# Works piss poor with duplicate folder titles, if they are both in the same folder.
2010-09-22 10:12:38 -04:00
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
2010-09-16 10:35:36 -04:00
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
2010-09-22 10:12:38 -04:00
user_sub_folders . delete_folder ( folder_to_delete , in_folder , feed_ids_in_folder )
2014-06-28 08:22:32 -07:00
folders = json . decode ( user_sub_folders . folders )
2010-05-04 10:39:25 -04:00
2013-09-13 16:35:48 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2014-06-28 08:22:32 -07:00
return dict ( code = 1 , folders = folders )
2014-10-10 16:30:30 -07:00
@required_params ( ' feeds_by_folder ' )
@ajax_login_required
@json.json_view
def delete_feeds_by_folder ( request ) :
2014-10-14 15:29:08 -07:00
feeds_by_folder = json . decode ( request . POST [ ' feeds_by_folder ' ] )
2014-10-10 16:30:30 -07:00
2017-03-18 14:41:12 -07:00
request . user . profile . send_opml_export_email ( reason = " You have deleted a number of feeds at once, so here ' s a backup of all of your subscriptions just in case. " )
2014-10-10 16:30:30 -07:00
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# Deletes all, but only in the same folder parent. But nobody should be doing that, right?
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
user_sub_folders . delete_feeds_by_folder ( feeds_by_folder )
folders = json . decode ( user_sub_folders . folders )
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2010-06-12 21:20:06 -04:00
2014-10-10 16:30:30 -07:00
return dict ( code = 1 , folders = folders )
2010-12-11 15:26:45 -05:00
@ajax_login_required
@json.json_view
def rename_feed ( request ) :
feed = get_object_or_404 ( Feed , pk = int ( request . POST [ ' feed_id ' ] ) )
user_sub = UserSubscription . objects . get ( user = request . user , feed = feed )
feed_title = request . POST [ ' feed_title ' ]
2011-09-16 09:26:22 -07:00
logging . user ( request , " ~FRRenaming feed ' ~SB %s ~SN ' to: ~SB %s " % (
2011-02-23 13:46:47 -05:00
feed . feed_title , feed_title ) )
2010-12-11 17:19:47 -05:00
2010-12-11 15:26:45 -05:00
user_sub . user_title = feed_title
user_sub . save ( )
return dict ( code = 1 )
@ajax_login_required
@json.json_view
def rename_folder ( request ) :
2011-11-30 09:53:00 -08:00
folder_to_rename = request . POST . get ( ' folder_name ' ) or request . POST . get ( ' folder_to_rename ' )
2010-12-11 17:16:12 -05:00
new_folder_name = request . POST [ ' new_folder_name ' ]
2021-01-13 19:32:36 -05:00
in_folder = request . POST . get ( ' in_folder ' , ' ' ) . replace ( ' river: ' , ' ' )
2016-05-11 19:51:56 -07:00
if ' Top Level ' in in_folder : in_folder = ' '
2011-11-30 09:53:00 -08:00
code = 0
2010-12-11 15:26:45 -05:00
# Works piss poor with duplicate folder titles, if they are both in the same folder.
# renames all, but only in the same folder parent. But nobody should be doing that, right?
2011-11-30 09:53:00 -08:00
if folder_to_rename and new_folder_name :
2010-12-11 17:16:12 -05:00
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
user_sub_folders . rename_folder ( folder_to_rename , new_folder_name , in_folder )
2011-11-30 09:53:00 -08:00
code = 1
else :
code = - 1
return dict ( code = code )
2010-12-11 15:26:45 -05:00
2014-06-28 08:22:32 -07:00
@ajax_login_required
@json.json_view
def move_feed_to_folders ( request ) :
feed_id = int ( request . POST [ ' feed_id ' ] )
2017-04-04 17:13:43 -07:00
in_folders = request . POST . getlist ( ' in_folders ' , ' ' ) or request . POST . getlist ( ' in_folders[] ' , ' ' )
to_folders = request . POST . getlist ( ' to_folders ' , ' ' ) or request . POST . getlist ( ' to_folders[] ' , ' ' )
2014-06-28 08:22:32 -07:00
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
user_sub_folders = user_sub_folders . move_feed_to_folders ( feed_id , in_folders = in_folders ,
to_folders = to_folders )
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
return dict ( code = 1 , folders = json . decode ( user_sub_folders . folders ) )
2011-11-07 20:50:46 -08:00
@ajax_login_required
@json.json_view
def move_feed_to_folder ( request ) :
feed_id = int ( request . POST [ ' feed_id ' ] )
in_folder = request . POST . get ( ' in_folder ' , ' ' )
to_folder = request . POST . get ( ' to_folder ' , ' ' )
2011-12-03 18:22:14 -08:00
2011-11-07 20:50:46 -08:00
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
2014-06-28 08:22:32 -07:00
user_sub_folders = user_sub_folders . move_feed_to_folder ( feed_id , in_folder = in_folder ,
to_folder = to_folder )
2013-09-13 16:35:48 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2011-11-07 20:50:46 -08:00
return dict ( code = 1 , folders = json . decode ( user_sub_folders . folders ) )
2011-11-08 09:20:10 -08:00
@ajax_login_required
@json.json_view
def move_folder_to_folder ( request ) :
folder_name = request . POST [ ' folder_name ' ]
in_folder = request . POST . get ( ' in_folder ' , ' ' )
to_folder = request . POST . get ( ' to_folder ' , ' ' )
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
user_sub_folders = user_sub_folders . move_folder_to_folder ( folder_name , in_folder = in_folder , to_folder = to_folder )
2013-09-13 16:35:48 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2011-11-08 09:20:10 -08:00
return dict ( code = 1 , folders = json . decode ( user_sub_folders . folders ) )
2014-10-10 16:30:30 -07:00
@required_params ( ' feeds_by_folder ' , ' to_folder ' )
@ajax_login_required
@json.json_view
def move_feeds_by_folder_to_folder ( request ) :
2014-10-14 15:29:08 -07:00
feeds_by_folder = json . decode ( request . POST [ ' feeds_by_folder ' ] )
2014-10-10 16:30:30 -07:00
to_folder = request . POST [ ' to_folder ' ]
2014-10-15 16:00:31 -07:00
new_folder = request . POST . get ( ' new_folder ' , None )
2014-10-14 15:59:46 -07:00
2017-03-18 14:41:12 -07:00
request . user . profile . send_opml_export_email ( reason = " You have moved a number of feeds at once, so here ' s a backup of all of your subscriptions just in case. " )
2014-10-10 16:30:30 -07:00
user_sub_folders = get_object_or_404 ( UserSubscriptionFolders , user = request . user )
2014-10-15 16:00:31 -07:00
if new_folder :
user_sub_folders . add_folder ( to_folder , new_folder )
to_folder = new_folder
2014-10-10 16:30:30 -07:00
user_sub_folders = user_sub_folders . move_feeds_by_folder_to_folder ( feeds_by_folder , to_folder )
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
return dict ( code = 1 , folders = json . decode ( user_sub_folders . folders ) )
2011-11-08 09:20:10 -08:00
2010-06-12 21:20:06 -04:00
@login_required
def add_feature ( request ) :
if not request . user . is_staff :
return HttpResponseForbidden ( )
code = - 1
form = FeatureForm ( request . POST )
if form . is_valid ( ) :
form . save ( )
code = 1
return HttpResponseRedirect ( reverse ( ' index ' ) )
2010-07-25 23:13:27 -04:00
return dict ( code = code )
2010-06-30 12:17:22 -04:00
2010-07-25 23:13:27 -04:00
@json.json_view
2010-06-30 12:17:22 -04:00
def load_features ( request ) :
2011-03-06 12:42:32 -05:00
user = get_user ( request )
2020-06-11 02:43:05 -04:00
page = max ( int ( request . GET . get ( ' page ' , 0 ) ) , 0 )
2015-10-07 17:37:17 -07:00
if page > 1 :
logging . user ( request , " ~FBBrowse features: ~SBPage # %s " % ( page + 1 ) )
2020-06-15 05:15:36 -04:00
features = list ( Feature . objects . all ( ) [ page * 3 : ( page + 1 ) * 3 + 1 ] . values ( ) )
2010-08-23 16:23:16 -04:00
features = [ {
' description ' : f [ ' description ' ] ,
2011-03-06 12:42:32 -05:00
' date ' : localtime_for_timezone ( f [ ' date ' ] , user . profile . timezone ) . strftime ( " % b %d , % Y " )
2010-08-23 16:23:16 -04:00
} for f in features ]
2010-07-25 23:13:27 -04:00
return features
2010-07-11 11:10:45 -04:00
2010-11-05 10:35:52 -04:00
@ajax_login_required
2010-07-11 11:10:45 -04:00
@json.json_view
def save_feed_order ( request ) :
folders = request . POST . get ( ' folders ' )
if folders :
# Test that folders can be JSON decoded
folders_list = json . decode ( folders )
2010-07-20 20:23:49 -04:00
assert folders_list is not None
2011-09-16 09:26:22 -07:00
logging . user ( request , " ~FBFeed re-ordering: ~SB %s folders/feeds " % ( len ( folders_list ) ) )
2010-07-11 11:10:45 -04:00
user_sub_folders = UserSubscriptionFolders . objects . get ( user = request . user )
user_sub_folders . folders = folders
user_sub_folders . save ( )
2010-07-20 23:59:56 -04:00
return { }
2010-08-01 19:12:42 -04:00
@json.json_view
2011-04-24 21:27:31 -04:00
def feeds_trainer ( request ) :
2010-08-01 19:12:42 -04:00
classifiers = [ ]
2020-06-07 08:04:23 -04:00
feed_id = request . GET . get ( ' feed_id ' )
2010-12-04 22:01:26 -05:00
user = get_user ( request )
usersubs = UserSubscription . objects . filter ( user = user , active = True )
2012-01-03 10:05:43 -08:00
2010-09-17 12:40:42 -04:00
if feed_id :
feed = get_object_or_404 ( Feed , pk = feed_id )
usersubs = usersubs . filter ( feed = feed )
usersubs = usersubs . select_related ( ' feed ' ) . order_by ( ' -feed__stories_last_month ' )
2010-08-01 19:12:42 -04:00
for us in usersubs :
2010-09-17 12:40:42 -04:00
if ( not us . is_trained and us . feed . stories_last_month > 0 ) or feed_id :
2010-08-01 19:12:42 -04:00
classifier = dict ( )
2012-05-26 22:14:34 -07:00
classifier [ ' classifiers ' ] = get_classifiers_for_user ( user , feed_id = us . feed . pk )
2012-01-26 09:32:24 -08:00
classifier [ ' feed_id ' ] = us . feed_id
2010-08-01 19:12:42 -04:00
classifier [ ' stories_last_month ' ] = us . feed . stories_last_month
2011-07-27 22:17:34 -07:00
classifier [ ' num_subscribers ' ] = us . feed . num_subscribers
2011-01-17 22:48:38 -05:00
classifier [ ' feed_tags ' ] = json . decode ( us . feed . data . popular_tags ) if us . feed . data . popular_tags else [ ]
classifier [ ' feed_authors ' ] = json . decode ( us . feed . data . popular_authors ) if us . feed . data . popular_authors else [ ]
2010-08-01 19:12:42 -04:00
classifiers . append ( classifier )
2012-03-19 14:15:38 -07:00
user . profile . has_trained_intelligence = True
user . profile . save ( )
2011-02-23 13:46:47 -05:00
logging . user ( user , " ~FGLoading Trainer: ~SB %s feeds " % ( len ( classifiers ) ) )
2010-08-02 23:09:47 -04:00
2010-08-01 19:12:42 -04:00
return classifiers
2010-08-13 19:21:29 -04:00
2010-09-28 18:53:57 -04:00
@ajax_login_required
@json.json_view
def save_feed_chooser ( request ) :
2013-03-25 13:01:04 -07:00
is_premium = request . user . profile . is_premium
2017-04-04 17:13:43 -07:00
approved_feeds = request . POST . getlist ( ' approved_feeds ' ) or request . POST . getlist ( ' approved_feeds[] ' )
approved_feeds = [ int ( feed_id ) for feed_id in approved_feeds if feed_id ]
2016-08-17 15:14:10 -07:00
approve_all = False
2014-06-08 20:42:45 -07:00
if not is_premium :
approved_feeds = approved_feeds [ : 64 ]
2016-08-17 15:14:10 -07:00
elif is_premium and not approved_feeds :
approve_all = True
2010-09-28 18:53:57 -04:00
activated = 0
usersubs = UserSubscription . objects . filter ( user = request . user )
2010-11-12 10:55:44 -05:00
2010-09-28 18:53:57 -04:00
for sub in usersubs :
2010-11-12 10:55:44 -05:00
try :
2016-08-17 15:14:10 -07:00
if sub . feed_id in approved_feeds or approve_all :
2010-11-12 10:55:44 -05:00
activated + = 1
2011-11-28 18:01:39 -08:00
if not sub . active :
sub . active = True
sub . save ( )
2013-03-13 23:07:50 -07:00
if sub . feed . active_subscribers < = 0 :
sub . feed . count_subscribers ( )
2010-11-12 10:55:44 -05:00
elif sub . active :
sub . active = False
sub . save ( )
except Feed . DoesNotExist :
pass
2014-06-08 20:42:45 -07:00
2015-07-28 18:46:37 -07:00
UserSubscription . queue_new_feeds ( request . user )
UserSubscription . refresh_stale_feeds ( request . user , exclude_new = True )
2013-09-13 15:54:52 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' reload:feeds ' )
2014-06-08 20:42:45 -07:00
logging . user ( request , " ~BB~FW~SBFeed chooser: ~FC %s ~SN/~SB %s " % (
2011-02-23 13:46:47 -05:00
activated ,
usersubs . count ( )
) )
2013-03-13 23:07:50 -07:00
2010-09-28 18:53:57 -04:00
return { ' activated ' : activated }
2010-10-05 19:05:01 -04:00
2010-10-29 11:34:33 -04:00
@ajax_login_required
def retrain_all_sites ( request ) :
for sub in UserSubscription . objects . filter ( user = request . user ) :
sub . is_trained = False
sub . save ( )
2011-04-24 21:27:31 -04:00
return feeds_trainer ( request )
2010-10-29 11:34:33 -04:00
2010-10-05 19:05:01 -04:00
@login_required
def activate_premium_account ( request ) :
2010-10-19 19:09:08 -04:00
try :
usersubs = UserSubscription . objects . select_related ( ' feed ' ) . filter ( user = request . user )
for sub in usersubs :
sub . active = True
sub . save ( )
if sub . feed . premium_subscribers < = 0 :
sub . feed . count_subscribers ( )
sub . feed . schedule_feed_fetch_immediately ( )
2020-06-15 05:15:36 -04:00
except Exception as e :
2010-10-19 19:09:08 -04:00
subject = " Premium activation failed "
message = " %s -- %s \n \n %s " % ( request . user , usersubs , e )
mail_admins ( subject , message , fail_silently = True )
2010-10-05 19:05:01 -04:00
request . user . profile . is_premium = True
request . user . profile . save ( )
return HttpResponseRedirect ( reverse ( ' index ' ) )
2010-10-29 11:34:33 -04:00
@login_required
def login_as ( request ) :
if not request . user . is_staff :
2011-09-16 09:26:22 -07:00
logging . user ( request , " ~SKNON-STAFF LOGGING IN AS ANOTHER USER! " )
2010-10-29 11:34:33 -04:00
assert False
return HttpResponseForbidden ( )
username = request . GET [ ' user ' ]
2011-02-22 09:37:09 -05:00
user = get_object_or_404 ( User , username__iexact = username )
2010-10-29 11:34:33 -04:00
user . backend = settings . AUTHENTICATION_BACKENDS [ 0 ]
2020-07-03 02:20:42 -04:00
login_user ( request , user , backend = ' django.contrib.auth.backends.ModelBackend ' )
2010-10-29 11:34:33 -04:00
return HttpResponseRedirect ( reverse ( ' index ' ) )
2010-08-13 19:21:29 -04:00
def iframe_buster ( request ) :
2011-09-16 09:26:22 -07:00
logging . user ( request , " ~FB~SBiFrame bust! " )
2010-11-30 10:30:18 -05:00
return HttpResponse ( status = 204 )
2013-08-21 12:42:28 -07:00
@required_params ( ' story_id ' , feed_id = int )
2010-11-30 10:30:18 -05:00
@ajax_login_required
@json.json_view
def mark_story_as_starred ( request ) :
2014-03-28 15:11:58 -07:00
return _mark_story_as_starred ( request )
@required_params ( ' story_hash ' )
@ajax_login_required
@json.json_view
def mark_story_hash_as_starred ( request ) :
return _mark_story_as_starred ( request )
def _mark_story_as_starred ( request ) :
code = 1
2020-06-07 08:04:23 -04:00
feed_id = int ( request . POST . get ( ' feed_id ' , 0 ) )
story_id = request . POST . get ( ' story_id ' , None )
story_hash = request . POST . get ( ' story_hash ' , None )
user_tags = request . POST . getlist ( ' user_tags ' ) or request . POST . getlist ( ' user_tags[] ' )
2020-08-10 17:49:20 -04:00
user_notes = request . POST . get ( ' user_notes ' , None )
highlights = request . POST . getlist ( ' highlights ' ) or request . POST . getlist ( ' highlights[] ' ) or [ ]
2014-03-28 15:11:58 -07:00
message = " "
if story_hash :
story , _ = MStory . find_story ( story_hash = story_hash )
2014-05-28 20:12:35 -07:00
feed_id = story and story . story_feed_id
2014-03-28 15:11:58 -07:00
else :
story , _ = MStory . find_story ( story_feed_id = feed_id , story_id = story_id )
2013-08-13 17:21:41 -07:00
2013-07-05 16:53:03 -07:00
if not story :
return { ' code ' : - 1 , ' message ' : " Could not find story to save. " }
2020-06-15 05:15:36 -04:00
story_db = dict ( [ ( k , v ) for k , v in list ( story . _data . items ( ) )
2013-07-05 16:53:03 -07:00
if k is not None and v is not None ] )
2020-07-16 15:49:28 -04:00
# Pop all existing user-specific fields because we don't want to reuse them from the found story
# in case MStory.find_story uses somebody else's saved/shared story (because the original is deleted)
2013-07-05 16:53:03 -07:00
story_db . pop ( ' user_id ' , None )
story_db . pop ( ' starred_date ' , None )
story_db . pop ( ' id ' , None )
2013-08-13 17:21:41 -07:00
story_db . pop ( ' user_tags ' , None )
2018-08-07 18:37:58 -04:00
story_db . pop ( ' highlights ' , None )
2020-07-16 15:49:28 -04:00
story_db . pop ( ' user_notes ' , None )
2013-07-05 16:53:03 -07:00
now = datetime . datetime . now ( )
2020-07-16 15:49:28 -04:00
story_values = dict ( starred_date = now , user_tags = user_tags , highlights = highlights , user_notes = user_notes , * * story_db )
2014-01-21 16:23:34 -08:00
params = dict ( story_guid = story . story_guid , user_id = request . user . pk )
2014-01-21 16:03:34 -08:00
starred_story = MStarredStory . objects ( * * params ) . limit ( 1 )
created = False
2020-07-16 15:49:28 -04:00
changed_user_notes = False
2014-04-29 12:36:42 -07:00
removed_user_tags = [ ]
2020-07-10 17:59:11 -04:00
removed_highlights = [ ]
2014-01-21 16:03:34 -08:00
if not starred_story :
params . update ( story_values )
2020-06-15 05:15:36 -04:00
if ' story_latest_content_z ' in params :
2016-11-10 17:47:05 -08:00
params . pop ( ' story_latest_content_z ' )
2017-04-24 05:33:39 -07:00
try :
starred_story = MStarredStory . objects . create ( * * params )
2020-06-15 05:15:36 -04:00
except OperationError as e :
2017-04-24 05:33:39 -07:00
logging . user ( request , " ~FCStarring ~FRfailed~FC: ~SB %s (~FM~SB %s ~FC~SN) " % ( story . story_title [ : 32 ] , e ) )
return { ' code ' : - 1 , ' message ' : " Could not save story due to: %s " % e }
2014-01-21 16:03:34 -08:00
created = True
2013-07-05 16:53:03 -07:00
MActivity . new_starred_story ( user_id = request . user . pk ,
story_title = story . story_title ,
story_feed_id = feed_id ,
story_id = starred_story . story_guid )
2014-04-29 12:36:42 -07:00
new_user_tags = user_tags
2020-07-10 17:59:11 -04:00
new_highlights = highlights
2020-07-16 15:49:28 -04:00
changed_user_notes = bool ( user_notes )
2014-05-28 20:12:35 -07:00
MStarredStoryCounts . adjust_count ( request . user . pk , feed_id = feed_id , amount = 1 )
2010-11-30 10:30:18 -05:00
else :
2014-01-21 16:03:34 -08:00
starred_story = starred_story [ 0 ]
2014-04-29 12:36:42 -07:00
new_user_tags = list ( set ( user_tags ) - set ( starred_story . user_tags or [ ] ) )
removed_user_tags = list ( set ( starred_story . user_tags or [ ] ) - set ( user_tags ) )
2020-07-10 17:59:11 -04:00
new_highlights = list ( set ( highlights ) - set ( starred_story . highlights or [ ] ) )
removed_highlights = list ( set ( starred_story . highlights or [ ] ) - set ( highlights ) )
2020-07-16 15:49:28 -04:00
changed_user_notes = bool ( user_notes != starred_story . user_notes )
2013-08-13 17:21:41 -07:00
starred_story . user_tags = user_tags
2018-08-07 18:37:58 -04:00
starred_story . highlights = highlights
2020-07-16 15:49:28 -04:00
starred_story . user_notes = user_notes
2013-08-13 17:21:41 -07:00
starred_story . save ( )
2013-08-15 18:22:22 -07:00
2020-07-10 17:59:11 -04:00
if len ( highlights ) == 1 and len ( new_highlights ) == 1 :
MStarredStoryCounts . adjust_count ( request . user . pk , highlights = True , amount = 1 )
elif len ( highlights ) == 0 and len ( removed_highlights ) :
MStarredStoryCounts . adjust_count ( request . user . pk , highlights = True , amount = - 1 )
2014-04-29 12:36:42 -07:00
for tag in new_user_tags :
2014-05-28 20:12:35 -07:00
MStarredStoryCounts . adjust_count ( request . user . pk , tag = tag , amount = 1 )
2014-04-29 12:36:42 -07:00
for tag in removed_user_tags :
2014-05-28 20:12:35 -07:00
MStarredStoryCounts . adjust_count ( request . user . pk , tag = tag , amount = - 1 )
2014-05-29 13:30:41 -07:00
if random . random ( ) < 0.01 :
MStarredStoryCounts . schedule_count_tags_for_user ( request . user . pk )
2014-05-28 17:35:51 -07:00
MStarredStoryCounts . count_for_user ( request . user . pk , total_only = True )
2014-11-10 18:25:31 -08:00
starred_counts , starred_count = MStarredStoryCounts . user_counts ( request . user . pk , include_total = True )
if not starred_count and len ( starred_counts ) :
2015-01-05 21:14:07 -08:00
starred_count = MStarredStory . objects ( user_id = request . user . pk ) . count ( )
2013-08-15 18:22:22 -07:00
2020-07-16 15:49:28 -04:00
if not changed_user_notes :
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' story:starred: %s ' % story . story_hash )
2016-12-08 16:05:13 -08:00
2013-08-15 18:22:22 -07:00
if created :
logging . user ( request , " ~FCStarring: ~SB %s (~FM~SB %s ~FC~SN) " % ( story . story_title [ : 32 ] , starred_story . user_tags ) )
else :
2020-07-16 15:49:28 -04:00
logging . user ( request , " ~FCUpdating starred:~SN~FC ~SB %s ~SN (~FM~SB %s ~FC~SN/~FM %s ~FC) " % ( story . story_title [ : 32 ] , starred_story . user_tags , starred_story . user_notes ) )
2010-11-30 10:30:18 -05:00
2014-11-10 18:25:31 -08:00
return { ' code ' : code , ' message ' : message , ' starred_count ' : starred_count , ' starred_counts ' : starred_counts }
2010-11-30 10:30:18 -05:00
2013-08-21 12:42:28 -07:00
@required_params ( ' story_id ' )
2010-11-30 10:30:18 -05:00
@ajax_login_required
@json.json_view
def mark_story_as_unstarred ( request ) :
2014-03-28 15:11:58 -07:00
return _mark_story_as_unstarred ( request )
@required_params ( ' story_hash ' )
@ajax_login_required
@json.json_view
def mark_story_hash_as_unstarred ( request ) :
return _mark_story_as_unstarred ( request )
def _mark_story_as_unstarred ( request ) :
2010-11-30 10:30:18 -05:00
code = 1
2014-03-28 15:11:58 -07:00
story_id = request . POST . get ( ' story_id ' , None )
2020-06-08 05:35:27 -04:00
story_hash = request . POST . get ( ' story_hash ' , None )
2013-08-15 18:22:22 -07:00
starred_counts = None
2014-03-28 15:11:58 -07:00
starred_story = None
2013-08-05 16:25:36 -07:00
2014-03-28 15:11:58 -07:00
if story_id :
starred_story = MStarredStory . objects ( user_id = request . user . pk , story_guid = story_id )
if not story_id or not starred_story :
starred_story = MStarredStory . objects ( user_id = request . user . pk , story_hash = story_hash or story_id )
2010-11-30 10:30:18 -05:00
if starred_story :
2014-01-15 11:53:18 -08:00
starred_story = starred_story [ 0 ]
logging . user ( request , " ~FCUnstarring: ~SB %s " % ( starred_story . story_title [ : 50 ] ) )
2014-04-29 12:36:42 -07:00
user_tags = starred_story . user_tags
2014-05-28 20:12:35 -07:00
feed_id = starred_story . story_feed_id
2014-01-15 11:53:18 -08:00
MActivity . remove_starred_story ( user_id = request . user . pk ,
story_feed_id = starred_story . story_feed_id ,
story_id = starred_story . story_guid )
2014-01-21 15:41:21 -08:00
starred_story . user_id = 0
2014-01-21 15:37:59 -08:00
try :
starred_story . save ( )
2014-01-21 15:40:37 -08:00
except NotUniqueError :
2014-01-21 15:37:59 -08:00
starred_story . delete ( )
2014-05-28 20:12:35 -07:00
MStarredStoryCounts . adjust_count ( request . user . pk , feed_id = feed_id , amount = - 1 )
2014-04-29 12:36:42 -07:00
for tag in user_tags :
try :
2014-05-28 20:12:35 -07:00
MStarredStoryCounts . adjust_count ( request . user . pk , tag = tag , amount = - 1 )
2014-04-29 12:36:42 -07:00
except MStarredStoryCounts . DoesNotExist :
pass
2018-12-24 19:47:06 -05:00
MStarredStoryCounts . schedule_count_tags_for_user ( request . user . pk )
2014-05-28 17:35:51 -07:00
MStarredStoryCounts . count_for_user ( request . user . pk , total_only = True )
2013-08-15 18:22:22 -07:00
starred_counts = MStarredStoryCounts . user_counts ( request . user . pk )
2016-12-08 16:05:13 -08:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
r . publish ( request . user . username , ' story:unstarred: %s ' % starred_story . story_hash )
2010-11-30 10:30:18 -05:00
else :
code = - 1
2013-08-15 18:22:22 -07:00
return { ' code ' : code , ' starred_counts ' : starred_counts }
2011-05-06 10:06:13 -04:00
2016-12-08 16:05:13 -08:00
@ajax_login_required
@json.json_view
def starred_counts ( request ) :
starred_counts , starred_count = MStarredStoryCounts . user_counts ( request . user . pk , include_total = True )
logging . user ( request , " ~FCRequesting starred counts: ~SB %s stories ( %s tags) " % ( starred_count , len ( [ s for s in starred_counts if s [ ' tag ' ] ] ) ) )
return { ' starred_count ' : starred_count , ' starred_counts ' : starred_counts }
2011-05-06 10:06:13 -04:00
@ajax_login_required
@json.json_view
def send_story_email ( request ) :
2020-06-05 11:00:54 -04:00
def validate_email_as_bool ( email ) :
try :
validate_email ( email )
return True
except :
return False
2011-05-06 10:06:13 -04:00
code = 1
2011-05-07 17:58:53 -04:00
message = ' OK '
2017-08-14 11:54:47 -07:00
user = get_user ( request )
2011-05-06 10:06:13 -04:00
story_id = request . POST [ ' story_id ' ]
feed_id = request . POST [ ' feed_id ' ]
2013-01-02 13:38:04 -08:00
to_addresses = request . POST . get ( ' to ' , ' ' ) . replace ( ' , ' , ' ' ) . replace ( ' ' , ' ' ) . strip ( ) . split ( ' ' )
2011-05-07 21:12:13 -04:00
from_name = request . POST [ ' from_name ' ]
from_email = request . POST [ ' from_email ' ]
2013-01-23 11:06:28 -08:00
email_cc = is_true ( request . POST . get ( ' email_cc ' , ' true ' ) )
2011-05-06 10:06:13 -04:00
comments = request . POST [ ' comments ' ]
2011-05-09 10:17:14 -04:00
comments = comments [ : 2048 ] # Separated due to PyLint
2011-05-08 20:32:02 -04:00
from_address = ' share@newsblur.com '
2013-04-15 15:43:18 -07:00
share_user_profile = MSocialProfile . get_user ( request . user . pk )
2017-08-14 11:54:47 -07:00
2018-01-31 14:36:56 -08:00
quota = 32 if user . profile . is_premium else 1
2017-08-14 11:54:47 -07:00
if share_user_profile . over_story_email_quota ( quota = quota ) :
code = - 1
if user . profile . is_premium :
message = ' You can only send %s stories per day by email. ' % quota
else :
message = ' Upgrade to a premium subscription to send more than one story per day by email. '
logging . user ( request , ' ~BRNOT ~BMSharing story by email to %s recipient, over quota: %s / %s ' %
( len ( to_addresses ) , story_id , feed_id ) )
elif not to_addresses :
2012-05-21 11:29:57 -07:00
code = - 1
message = ' Please provide at least one email address. '
2020-06-05 11:00:54 -04:00
elif not all ( validate_email_as_bool ( to_address ) for to_address in to_addresses if to_addresses ) :
2011-05-07 17:58:53 -04:00
code = - 1
message = ' You need to send the email to a valid email address. '
2020-06-05 11:00:54 -04:00
elif not validate_email_as_bool ( from_email ) :
2011-05-08 20:34:52 -04:00
code = - 1
message = ' You need to provide your email address. '
2011-05-09 10:17:14 -04:00
elif not from_name :
2011-05-08 20:34:52 -04:00
code = - 1
message = ' You need to provide your name. '
2011-05-07 17:58:53 -04:00
else :
2012-07-30 06:32:34 -07:00
story , _ = MStory . find_story ( feed_id , story_id )
2011-05-08 19:41:50 -04:00
story = Feed . format_story ( story , feed_id , text = True )
2012-10-25 16:14:25 -07:00
feed = Feed . get_by_id ( story [ ' story_feed_id ' ] )
2013-01-30 18:28:37 -08:00
params = {
" to_addresses " : to_addresses ,
" from_name " : from_name ,
" from_email " : from_email ,
" email_cc " : email_cc ,
" comments " : comments ,
" from_address " : from_address ,
" story " : story ,
" feed " : feed ,
" share_user_profile " : share_user_profile ,
}
2013-06-13 11:43:34 -07:00
text = render_to_string ( ' mail/email_story.txt ' , params )
html = render_to_string ( ' mail/email_story.xhtml ' , params )
2013-07-05 17:11:18 -07:00
subject = ' %s ' % ( story [ ' story_title ' ] )
2013-01-23 11:06:28 -08:00
cc = None
if email_cc :
cc = [ ' %s < %s > ' % ( from_name , from_email ) ]
2011-11-11 11:26:34 -08:00
subject = subject . replace ( ' \n ' , ' ' )
2011-05-08 20:32:02 -04:00
msg = EmailMultiAlternatives ( subject , text ,
2011-05-08 20:34:52 -04:00
from_email = ' NewsBlur < %s > ' % from_address ,
2012-05-21 11:29:57 -07:00
to = to_addresses ,
2013-01-23 11:06:28 -08:00
cc = cc ,
2018-03-06 19:42:40 -08:00
headers = { ' Reply-To ' : " %s < %s > " % ( from_name , from_email ) } )
2011-05-07 17:58:53 -04:00
msg . attach_alternative ( html , " text/html " )
2012-01-23 10:16:50 -08:00
try :
msg . send ( )
2020-06-15 05:15:36 -04:00
except boto . ses . connection . BotoServerError as e :
2012-01-23 10:16:50 -08:00
code = - 1
message = " Email error: %s " % str ( e )
2017-08-14 11:54:47 -07:00
share_user_profile . save_sent_email ( )
2020-10-06 11:04:30 -04:00
logging . user ( request , ' ~BMSharing story by email to %s recipient %s ( %s ): ~FY~SB %s ~SN~BM~FY/~SB %s ' %
( len ( to_addresses ) , ' ' if len ( to_addresses ) == 1 else ' s ' , to_addresses ,
2013-02-14 15:59:03 -08:00
story [ ' story_title ' ] [ : 50 ] , feed and feed . feed_title [ : 50 ] ) )
2011-05-07 17:58:53 -04:00
2011-05-07 21:12:24 -04:00
return { ' code ' : code , ' message ' : message }
2011-05-18 22:33:42 -04:00
@json.json_view
def load_tutorial ( request ) :
2020-06-11 02:43:05 -04:00
if request . GET . get ( ' finished ' ) :
2011-09-16 09:26:22 -07:00
logging . user ( request , ' ~BY~FW~SBFinishing Tutorial ' )
2011-05-20 00:18:23 -04:00
return { }
else :
newsblur_feed = Feed . objects . filter ( feed_address__icontains = ' blog.newsblur.com ' ) . order_by ( ' -pk ' ) [ 0 ]
2011-09-16 09:26:22 -07:00
logging . user ( request , ' ~BY~FW~SBLoading Tutorial ' )
2011-05-20 00:18:23 -04:00
return {
' newsblur_feed ' : newsblur_feed . canonical ( )
}
2017-03-03 18:12:27 -05:00
@required_params ( ' query ' , ' feed_id ' )
@json.json_view
def save_search ( request ) :
feed_id = request . POST [ ' feed_id ' ]
query = request . POST [ ' query ' ]
MSavedSearch . save_search ( user_id = request . user . pk , feed_id = feed_id , query = query )
saved_searches = MSavedSearch . user_searches ( request . user . pk )
return {
' saved_searches ' : saved_searches ,
}
2017-03-06 19:55:18 -08:00
@required_params ( ' query ' , ' feed_id ' )
@json.json_view
def delete_search ( request ) :
feed_id = request . POST [ ' feed_id ' ]
query = request . POST [ ' query ' ]
MSavedSearch . delete_search ( user_id = request . user . pk , feed_id = feed_id , query = query )
saved_searches = MSavedSearch . user_searches ( request . user . pk )
return {
' saved_searches ' : saved_searches ,
}
2020-12-10 12:32:20 -05:00
2020-12-10 18:33:43 -05:00
@required_params ( ' river_id ' , ' river_side ' , ' river_order ' )
@json.json_view
def save_dashboard_river ( request ) :
river_id = request . POST [ ' river_id ' ]
river_side = request . POST [ ' river_side ' ]
river_order = request . POST [ ' river_order ' ]
logging . user ( request , " ~FCSaving dashboard river: ~SB %s ~SN ( %s %s ) " % ( river_id , river_side , river_order ) )
MDashboardRiver . save_user ( request . user . pk , river_id , river_side , river_order )
2021-03-25 20:17:13 -04:00
dashboard_rivers = MDashboardRiver . get_user_rivers ( request . user . pk )
2020-12-10 18:33:43 -05:00
return {
' dashboard_rivers ' : dashboard_rivers ,
}