2010-08-30 18:41:53 -04:00
|
|
|
import datetime
|
2020-07-01 16:59:21 -04:00
|
|
|
import base64
|
2020-06-15 02:54:37 -04:00
|
|
|
from urllib.parse import urlparse
|
2010-08-16 12:55:45 -04:00
|
|
|
from utils import log as logging
|
2020-06-12 01:27:07 -04:00
|
|
|
from django.shortcuts import get_object_or_404, render
|
2012-03-29 16:03:06 -07:00
|
|
|
from django.views.decorators.http import condition
|
2012-07-17 14:18:26 -07:00
|
|
|
from django.http import HttpResponseForbidden, HttpResponseRedirect, HttpResponse, Http404
|
2012-01-26 18:59:40 -08:00
|
|
|
from django.conf import settings
|
2011-03-01 09:59:06 -05:00
|
|
|
from django.contrib.auth.decorators import login_required
|
2010-12-23 16:02:17 -05:00
|
|
|
# from django.db import IntegrityError
|
2010-08-25 21:02:21 -04:00
|
|
|
from apps.rss_feeds.models import Feed, merge_feeds
|
2013-04-15 14:30:31 -07:00
|
|
|
from apps.rss_feeds.models import MFetchHistory
|
2012-04-09 15:19:25 -07:00
|
|
|
from apps.rss_feeds.models import MFeedIcon
|
2013-06-29 11:29:23 -07:00
|
|
|
from apps.push.models import PushSubscription
|
2011-03-04 12:27:31 -05:00
|
|
|
from apps.analyzer.models import get_classifiers_for_user
|
2010-11-10 18:04:17 -05:00
|
|
|
from apps.reader.models import UserSubscription
|
2013-01-08 18:33:30 -08:00
|
|
|
from apps.rss_feeds.models import MStory
|
2010-08-25 20:43:35 -04:00
|
|
|
from utils.user_functions import ajax_login_required
|
2020-06-15 12:30:30 -04:00
|
|
|
from utils import json_functions as json, feedfinder_forman as feedfinder
|
2010-07-28 18:18:01 -04:00
|
|
|
from utils.feed_functions import relative_timeuntil, relative_timesince
|
2011-03-11 20:05:41 -05:00
|
|
|
from utils.user_functions import get_user
|
2011-10-28 10:29:11 -07:00
|
|
|
from utils.view_functions import get_argument_or_404
|
2013-01-08 18:33:30 -08:00
|
|
|
from utils.view_functions import required_params
|
2016-02-26 20:39:18 -08:00
|
|
|
from utils.view_functions import is_true
|
2013-06-29 11:37:21 -07:00
|
|
|
from vendor.timezones.utilities import localtime_for_timezone
|
2015-07-21 10:20:06 -07:00
|
|
|
from utils.ratelimit import ratelimit
|
2011-04-25 20:53:29 -04:00
|
|
|
|
2015-01-20 10:15:13 -08:00
|
|
|
IGNORE_AUTOCOMPLETE = [
|
|
|
|
"facebook.com/feeds/notifications.php",
|
|
|
|
"inbox",
|
|
|
|
"secret",
|
|
|
|
"password",
|
2015-04-27 17:16:50 -07:00
|
|
|
"latitude",
|
2015-01-20 10:15:13 -08:00
|
|
|
]
|
|
|
|
|
2016-02-24 12:31:45 -08:00
|
|
|
@ajax_login_required
|
2011-04-25 20:53:29 -04:00
|
|
|
@json.json_view
|
|
|
|
def search_feed(request):
|
2020-06-07 08:04:23 -04:00
|
|
|
address = request.GET.get('address')
|
|
|
|
offset = int(request.GET.get('offset', 0))
|
2012-02-28 11:54:28 -08:00
|
|
|
if not address:
|
|
|
|
return dict(code=-1, message="Please provide a URL/address.")
|
2016-02-24 12:05:21 -08:00
|
|
|
|
2016-02-24 12:11:41 -08:00
|
|
|
logging.user(request.user, "~FBFinding feed (search_feed): %s" % address)
|
|
|
|
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR']
|
|
|
|
logging.user(request.user, "~FBIP: %s" % ip)
|
2020-06-11 15:13:12 -04:00
|
|
|
aggressive = request.user.is_authenticated
|
2016-02-24 12:14:41 -08:00
|
|
|
feed = Feed.get_feed_from_url(address, create=False, aggressive=aggressive, offset=offset)
|
2011-04-25 20:53:29 -04:00
|
|
|
if feed:
|
|
|
|
return feed.canonical()
|
|
|
|
else:
|
|
|
|
return dict(code=-1, message="No feed found matching that XML or website address.")
|
|
|
|
|
2011-03-04 12:27:31 -05:00
|
|
|
@json.json_view
|
2011-04-24 20:42:42 -04:00
|
|
|
def load_single_feed(request, feed_id):
|
2011-03-11 20:05:41 -05:00
|
|
|
user = get_user(request)
|
2011-04-24 20:42:42 -04:00
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2012-02-15 18:00:10 -08:00
|
|
|
classifiers = get_classifiers_for_user(user, feed_id=feed.pk)
|
2011-03-04 12:27:31 -05:00
|
|
|
|
|
|
|
payload = feed.canonical(full=True)
|
|
|
|
payload['classifiers'] = classifiers
|
|
|
|
|
|
|
|
return payload
|
2012-01-26 18:59:40 -08:00
|
|
|
|
2012-03-29 16:03:06 -07:00
|
|
|
def feed_favicon_etag(request, feed_id):
|
2012-01-26 18:59:40 -08:00
|
|
|
try:
|
2012-03-29 16:03:06 -07:00
|
|
|
feed_icon = MFeedIcon.objects.get(feed_id=feed_id)
|
|
|
|
except MFeedIcon.DoesNotExist:
|
|
|
|
return
|
|
|
|
|
|
|
|
return feed_icon.color
|
|
|
|
|
|
|
|
@condition(etag_func=feed_favicon_etag)
|
|
|
|
def load_feed_favicon(request, feed_id):
|
|
|
|
not_found = False
|
2012-01-26 18:59:40 -08:00
|
|
|
try:
|
|
|
|
feed_icon = MFeedIcon.objects.get(feed_id=feed_id)
|
|
|
|
except MFeedIcon.DoesNotExist:
|
2020-03-17 12:20:59 -04:00
|
|
|
logging.user(request, "~FBNo feed icon found: %s" % feed_id)
|
2012-01-26 18:59:40 -08:00
|
|
|
not_found = True
|
|
|
|
|
|
|
|
if not_found or not feed_icon.data:
|
2013-03-03 17:37:54 -08:00
|
|
|
return HttpResponseRedirect(settings.MEDIA_URL + 'img/icons/circular/world.png')
|
2012-01-26 18:59:40 -08:00
|
|
|
|
2020-07-01 16:59:21 -04:00
|
|
|
icon_data = base64.b64decode(feed_icon.data)
|
2020-06-07 08:04:23 -04:00
|
|
|
return HttpResponse(icon_data, content_type='image/png')
|
2012-01-26 18:59:40 -08:00
|
|
|
|
2010-12-23 16:02:17 -05:00
|
|
|
@json.json_view
|
|
|
|
def feed_autocomplete(request):
|
2013-02-22 16:02:45 -08:00
|
|
|
query = request.GET.get('term') or request.GET.get('query')
|
2012-08-06 18:20:56 -07:00
|
|
|
version = int(request.GET.get('v', 1))
|
2013-02-22 16:02:45 -08:00
|
|
|
format = request.GET.get('format', 'autocomplete')
|
2012-08-06 18:20:56 -07:00
|
|
|
|
2013-04-15 14:30:31 -07:00
|
|
|
# user = get_user(request)
|
2013-04-08 12:54:02 -07:00
|
|
|
# if True or not user.profile.is_premium:
|
|
|
|
# return dict(code=-1, message="Overloaded, no autocomplete results.", feeds=[], term=query)
|
2013-03-13 20:14:15 -07:00
|
|
|
|
2011-12-17 13:05:36 -08:00
|
|
|
if not query:
|
2013-03-29 20:35:58 -07:00
|
|
|
return dict(code=-1, message="Specify a search 'term'.", feeds=[], term=query)
|
2010-12-23 16:02:17 -05:00
|
|
|
|
2013-04-08 13:09:43 -07:00
|
|
|
if '.' in query:
|
|
|
|
try:
|
|
|
|
parts = urlparse(query)
|
|
|
|
if not parts.hostname and not query.startswith('http'):
|
|
|
|
parts = urlparse('http://%s' % query)
|
|
|
|
if parts.hostname:
|
2013-12-03 14:27:39 -08:00
|
|
|
query = [parts.hostname]
|
|
|
|
query.extend([p for p in parts.path.split('/') if p])
|
2013-12-03 14:32:26 -08:00
|
|
|
query = ' '.join(query)
|
2013-04-08 13:09:43 -07:00
|
|
|
except:
|
|
|
|
logging.user(request, "~FGAdd search, could not parse url in ~FR%s" % query)
|
2013-12-03 14:27:39 -08:00
|
|
|
|
|
|
|
query_params = query.split(' ')
|
2013-12-03 14:29:23 -08:00
|
|
|
tries_left = 5
|
|
|
|
while len(query_params) and tries_left:
|
|
|
|
tries_left -= 1
|
2013-12-03 14:27:39 -08:00
|
|
|
feed_ids = Feed.autocomplete(' '.join(query_params))
|
2013-12-03 14:29:23 -08:00
|
|
|
if feed_ids:
|
|
|
|
break
|
|
|
|
else:
|
2013-12-03 14:27:39 -08:00
|
|
|
query_params = query_params[:-1]
|
|
|
|
|
2013-07-09 20:09:41 -07:00
|
|
|
feeds = list(set([Feed.get_by_id(feed_id) for feed_id in feed_ids]))
|
2013-07-10 14:50:13 -07:00
|
|
|
feeds = [feed for feed in feeds if feed and not feed.branch_from_feed]
|
2015-01-20 10:15:13 -08:00
|
|
|
feeds = [feed for feed in feeds if all([x not in feed.feed_address for x in IGNORE_AUTOCOMPLETE])]
|
|
|
|
|
2013-02-22 16:02:45 -08:00
|
|
|
if format == 'autocomplete':
|
|
|
|
feeds = [{
|
|
|
|
'id': feed.pk,
|
|
|
|
'value': feed.feed_address,
|
|
|
|
'label': feed.feed_title,
|
|
|
|
'tagline': feed.data and feed.data.feed_tagline,
|
|
|
|
'num_subscribers': feed.num_subscribers,
|
2013-07-10 14:50:13 -07:00
|
|
|
} for feed in feeds]
|
2013-02-22 16:02:45 -08:00
|
|
|
else:
|
2013-07-10 14:50:13 -07:00
|
|
|
feeds = [feed.canonical(full=True) for feed in feeds]
|
2013-04-08 12:54:02 -07:00
|
|
|
feeds = sorted(feeds, key=lambda f: -1 * f['num_subscribers'])
|
2010-12-23 16:02:17 -05:00
|
|
|
|
2012-08-07 23:13:03 -07:00
|
|
|
feed_ids = [f['id'] for f in feeds]
|
|
|
|
feed_icons = dict((icon.feed_id, icon) for icon in MFeedIcon.objects.filter(feed_id__in=feed_ids))
|
|
|
|
|
|
|
|
for feed in feeds:
|
|
|
|
if feed['id'] in feed_icons:
|
|
|
|
feed_icon = feed_icons[feed['id']]
|
|
|
|
if feed_icon.data:
|
|
|
|
feed['favicon_color'] = feed_icon.color
|
|
|
|
feed['favicon'] = feed_icon.data
|
2012-08-07 23:14:31 -07:00
|
|
|
|
|
|
|
logging.user(request, "~FGAdd Search: ~SB%s ~SN(%s matches)" % (query, len(feeds),))
|
2012-08-07 23:13:03 -07:00
|
|
|
|
2012-08-06 18:20:56 -07:00
|
|
|
if version > 1:
|
|
|
|
return {
|
|
|
|
'feeds': feeds,
|
|
|
|
'term': query,
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
return feeds
|
2010-12-23 16:02:17 -05:00
|
|
|
|
2016-01-05 11:32:36 -08:00
|
|
|
@ratelimit(minutes=1, requests=30)
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2011-04-24 01:52:44 -04:00
|
|
|
def load_feed_statistics(request, feed_id):
|
2013-01-28 15:23:19 -08:00
|
|
|
user = get_user(request)
|
2020-01-08 10:28:28 -08:00
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
|
|
|
stats = assemble_statistics(user, feed_id)
|
|
|
|
|
|
|
|
logging.user(request, "~FBStatistics: ~SB%s" % (feed))
|
|
|
|
|
|
|
|
return stats
|
|
|
|
|
|
|
|
def load_feed_statistics_embedded(request, feed_id):
|
|
|
|
user = get_user(request)
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
|
|
|
stats = assemble_statistics(user, feed_id)
|
|
|
|
|
|
|
|
logging.user(request, "~FBStatistics (~FCembedded~FB): ~SB%s" % (feed))
|
|
|
|
|
2020-06-12 01:40:31 -04:00
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
'rss_feeds/statistics.xhtml',
|
|
|
|
{
|
|
|
|
'stats': json.json_encode(stats),
|
|
|
|
'feed_js': json.json_encode(feed.canonical()),
|
|
|
|
'feed': feed,
|
|
|
|
}
|
|
|
|
)
|
2020-06-28 14:00:09 -04:00
|
|
|
|
2020-01-08 10:28:28 -08:00
|
|
|
def assemble_statistics(user, feed_id):
|
2020-11-23 18:58:00 -05:00
|
|
|
user_timezone = user.profile.timezone
|
2010-07-25 23:13:27 -04:00
|
|
|
stats = dict()
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2013-04-23 17:04:21 -07:00
|
|
|
feed.update_all_statistics()
|
2013-03-28 12:17:30 -07:00
|
|
|
feed.set_next_scheduled_update(verbose=True, skip_scheduling=True)
|
2010-10-25 20:44:52 -04:00
|
|
|
feed.save_feed_story_history_statistics()
|
2011-04-09 11:06:36 -04:00
|
|
|
feed.save_classifier_counts()
|
2010-07-25 23:13:27 -04:00
|
|
|
|
|
|
|
# Dates of last and next update
|
2012-02-13 11:07:32 -08:00
|
|
|
stats['active'] = feed.active
|
2010-07-28 18:18:01 -04:00
|
|
|
stats['last_update'] = relative_timesince(feed.last_update)
|
2013-03-30 20:33:00 -07:00
|
|
|
stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
|
|
|
|
stats['push'] = feed.is_push
|
2013-06-29 11:26:42 -07:00
|
|
|
if feed.is_push:
|
|
|
|
try:
|
2013-06-29 11:37:21 -07:00
|
|
|
stats['push_expires'] = localtime_for_timezone(feed.push.lease_expires,
|
2020-11-23 18:58:00 -05:00
|
|
|
user_timezone).strftime("%Y-%m-%d %H:%M:%S")
|
2013-06-29 11:26:42 -07:00
|
|
|
except PushSubscription.DoesNotExist:
|
|
|
|
stats['push_expires'] = 'Missing push'
|
|
|
|
feed.is_push = False
|
|
|
|
feed.save()
|
2012-01-13 18:00:33 -08:00
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
# Minutes between updates
|
2013-04-18 16:56:54 -07:00
|
|
|
update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False)
|
2013-03-30 20:33:00 -07:00
|
|
|
stats['update_interval_minutes'] = update_interval_minutes
|
2012-01-13 17:54:17 -08:00
|
|
|
original_active_premium_subscribers = feed.active_premium_subscribers
|
|
|
|
original_premium_subscribers = feed.premium_subscribers
|
|
|
|
feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1)
|
|
|
|
feed.premium_subscribers += 1
|
2013-09-11 10:25:46 -07:00
|
|
|
premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False,
|
|
|
|
premium_speed=True)
|
2012-01-13 17:54:17 -08:00
|
|
|
feed.active_premium_subscribers = original_active_premium_subscribers
|
|
|
|
feed.premium_subscribers = original_premium_subscribers
|
2013-03-30 20:33:00 -07:00
|
|
|
stats['premium_update_interval_minutes'] = premium_update_interval_minutes
|
2013-04-17 14:36:13 -07:00
|
|
|
stats['errors_since_good'] = feed.errors_since_good
|
2010-07-25 23:13:27 -04:00
|
|
|
|
|
|
|
# Stories per month - average and month-by-month breakout
|
2011-01-17 22:48:38 -05:00
|
|
|
average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
|
2010-07-26 14:37:17 -04:00
|
|
|
stats['average_stories_per_month'] = average_stories_per_month
|
2016-01-05 11:32:36 -08:00
|
|
|
story_count_history = story_count_history and json.decode(story_count_history)
|
|
|
|
if story_count_history and isinstance(story_count_history, dict):
|
|
|
|
stats['story_count_history'] = story_count_history['months']
|
|
|
|
stats['story_days_history'] = story_count_history['days']
|
|
|
|
stats['story_hours_history'] = story_count_history['hours']
|
|
|
|
else:
|
|
|
|
stats['story_count_history'] = story_count_history
|
2010-07-25 23:13:27 -04:00
|
|
|
|
2016-01-14 16:01:00 -08:00
|
|
|
# Rotate hours to match user's timezone offset
|
2020-11-23 18:58:00 -05:00
|
|
|
localoffset = user_timezone.utcoffset(datetime.datetime.utcnow())
|
2016-01-14 16:01:00 -08:00
|
|
|
hours_offset = int(localoffset.total_seconds() / 3600)
|
|
|
|
rotated_hours = {}
|
2020-06-15 02:54:37 -04:00
|
|
|
for hour, value in list(stats['story_hours_history'].items()):
|
2016-01-14 16:01:00 -08:00
|
|
|
rotated_hours[str(int(hour)+hours_offset)] = value
|
|
|
|
stats['story_hours_history'] = rotated_hours
|
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
# Subscribers
|
|
|
|
stats['subscriber_count'] = feed.num_subscribers
|
2013-06-14 10:54:11 -07:00
|
|
|
stats['num_subscribers'] = feed.num_subscribers
|
2010-11-06 13:42:58 -04:00
|
|
|
stats['stories_last_month'] = feed.stories_last_month
|
|
|
|
stats['last_load_time'] = feed.last_load_time
|
2010-11-06 13:44:23 -04:00
|
|
|
stats['premium_subscribers'] = feed.premium_subscribers
|
|
|
|
stats['active_subscribers'] = feed.active_subscribers
|
2012-01-09 19:08:22 -08:00
|
|
|
stats['active_premium_subscribers'] = feed.active_premium_subscribers
|
2013-06-14 10:54:11 -07:00
|
|
|
|
2011-04-09 11:06:36 -04:00
|
|
|
# Classifier counts
|
|
|
|
stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
|
|
|
|
|
2011-02-13 14:47:58 -05:00
|
|
|
# Fetch histories
|
2020-11-23 18:58:00 -05:00
|
|
|
fetch_history = MFetchHistory.feed(feed_id, timezone=user_timezone)
|
2013-04-15 14:30:31 -07:00
|
|
|
stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
|
|
|
|
stats['page_fetch_history'] = fetch_history['page_fetch_history']
|
|
|
|
stats['feed_push_history'] = fetch_history['push_history']
|
2011-02-13 14:47:58 -05:00
|
|
|
|
2010-08-25 10:18:08 -04:00
|
|
|
return stats
|
2011-04-07 10:30:05 -04:00
|
|
|
|
2011-11-12 18:19:57 -08:00
|
|
|
@json.json_view
|
|
|
|
def load_feed_settings(request, feed_id):
|
|
|
|
stats = dict()
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2013-01-28 15:23:19 -08:00
|
|
|
user = get_user(request)
|
|
|
|
timezone = user.profile.timezone
|
2013-04-15 14:30:31 -07:00
|
|
|
|
|
|
|
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
|
|
|
|
stats['feed_fetch_history'] = fetch_history['feed_fetch_history']
|
|
|
|
stats['page_fetch_history'] = fetch_history['page_fetch_history']
|
|
|
|
stats['feed_push_history'] = fetch_history['push_history']
|
2011-11-12 18:19:57 -08:00
|
|
|
stats['duplicate_addresses'] = feed.duplicate_addresses.all()
|
|
|
|
|
|
|
|
return stats
|
2016-02-25 14:15:05 -08:00
|
|
|
|
2020-03-17 12:20:59 -04:00
|
|
|
@ratelimit(minutes=1, requests=30)
|
2010-08-25 10:18:08 -04:00
|
|
|
@json.json_view
|
|
|
|
def exception_retry(request):
|
2011-04-23 18:22:52 -04:00
|
|
|
user = get_user(request)
|
2011-10-28 10:29:11 -07:00
|
|
|
feed_id = get_argument_or_404(request, 'feed_id')
|
2010-08-30 22:42:44 -04:00
|
|
|
reset_fetch = json.decode(request.POST['reset_fetch'])
|
2012-07-17 14:18:26 -07:00
|
|
|
feed = Feed.get_by_id(feed_id)
|
2012-07-29 22:06:43 -07:00
|
|
|
original_feed = feed
|
2012-07-17 14:18:26 -07:00
|
|
|
|
|
|
|
if not feed:
|
|
|
|
raise Http404
|
2010-08-25 10:18:08 -04:00
|
|
|
|
2013-03-30 19:05:13 -07:00
|
|
|
feed.schedule_feed_fetch_immediately()
|
2015-07-22 13:53:20 -07:00
|
|
|
changed = False
|
|
|
|
if feed.has_page_exception:
|
|
|
|
changed = True
|
|
|
|
feed.has_page_exception = False
|
|
|
|
if feed.has_feed_exception:
|
|
|
|
changed = True
|
|
|
|
feed.has_feed_exception = False
|
|
|
|
if not feed.active:
|
|
|
|
changed = True
|
|
|
|
feed.active = True
|
|
|
|
if changed:
|
|
|
|
feed.save(update_fields=['has_page_exception', 'has_feed_exception', 'active'])
|
|
|
|
|
|
|
|
original_fetched_once = feed.fetched_once
|
2010-08-30 18:41:53 -04:00
|
|
|
if reset_fetch:
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FRRefreshing exception feed: ~SB%s" % (feed))
|
2010-08-30 18:41:53 -04:00
|
|
|
feed.fetched_once = False
|
2010-08-30 22:42:44 -04:00
|
|
|
else:
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FRForcing refreshing feed: ~SB%s" % (feed))
|
2015-07-22 13:53:20 -07:00
|
|
|
|
2010-10-27 19:27:59 -04:00
|
|
|
feed.fetched_once = True
|
2015-07-22 13:53:20 -07:00
|
|
|
if feed.fetched_once != original_fetched_once:
|
|
|
|
feed.save(update_fields=['fetched_once'])
|
2012-08-27 15:22:42 -07:00
|
|
|
|
2011-12-14 23:26:07 -08:00
|
|
|
feed = feed.update(force=True, compute_scores=False, verbose=True)
|
2012-08-27 15:22:42 -07:00
|
|
|
feed = Feed.get_by_id(feed.pk)
|
|
|
|
|
2012-07-29 22:06:43 -07:00
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.get(user=user, feed=feed)
|
|
|
|
except UserSubscription.DoesNotExist:
|
2012-07-30 06:32:34 -07:00
|
|
|
usersubs = UserSubscription.objects.filter(user=user, feed=original_feed)
|
|
|
|
if usersubs:
|
|
|
|
usersub = usersubs[0]
|
|
|
|
usersub.switch_feed(feed, original_feed)
|
|
|
|
else:
|
|
|
|
return {'code': -1}
|
2010-11-10 18:04:17 -05:00
|
|
|
usersub.calculate_feed_scores(silent=False)
|
2010-08-30 18:41:53 -04:00
|
|
|
|
2012-08-27 15:22:42 -07:00
|
|
|
feeds = {feed.pk: usersub and usersub.canonical(full=True), feed_id: usersub.canonical(full=True)}
|
2011-02-06 15:04:21 -05:00
|
|
|
return {'code': 1, 'feeds': feeds}
|
2010-08-25 20:43:35 -04:00
|
|
|
|
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def exception_change_feed_address(request):
|
|
|
|
feed_id = request.POST['feed_id']
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2011-11-17 09:49:42 -08:00
|
|
|
original_feed = feed
|
2010-08-25 21:02:21 -04:00
|
|
|
feed_address = request.POST['feed_address']
|
2012-12-28 21:42:52 -08:00
|
|
|
timezone = request.user.profile.timezone
|
2011-11-29 11:43:55 -08:00
|
|
|
code = -1
|
2012-04-06 16:21:16 -07:00
|
|
|
|
2016-08-15 18:17:15 -07:00
|
|
|
if False and (feed.has_page_exception or feed.has_feed_exception):
|
2011-11-17 09:49:42 -08:00
|
|
|
# Fix broken feed
|
2016-08-15 18:17:15 -07:00
|
|
|
logging.user(request, "~FRFixing feed exception by address: %s - ~SB%s~SN to ~SB%s" % (feed, feed.feed_address, feed_address))
|
2011-11-17 09:49:42 -08:00
|
|
|
feed.has_feed_exception = False
|
|
|
|
feed.active = True
|
|
|
|
feed.fetched_once = False
|
|
|
|
feed.feed_address = feed_address
|
2013-03-30 19:05:13 -07:00
|
|
|
duplicate_feed = feed.schedule_feed_fetch_immediately()
|
2011-11-29 11:43:55 -08:00
|
|
|
code = 1
|
2011-11-27 03:13:32 -05:00
|
|
|
if duplicate_feed:
|
|
|
|
new_feed = Feed.objects.get(pk=duplicate_feed.pk)
|
2011-11-17 09:49:42 -08:00
|
|
|
feed = new_feed
|
2013-03-30 19:05:13 -07:00
|
|
|
new_feed.schedule_feed_fetch_immediately()
|
2011-11-17 09:49:42 -08:00
|
|
|
new_feed.has_feed_exception = False
|
|
|
|
new_feed.active = True
|
2015-08-12 14:43:11 -07:00
|
|
|
new_feed = new_feed.save()
|
|
|
|
if new_feed.pk != feed.pk:
|
|
|
|
merge_feeds(new_feed.pk, feed.pk)
|
2011-11-17 09:49:42 -08:00
|
|
|
else:
|
|
|
|
# Branch good feed
|
|
|
|
logging.user(request, "~FRBranching feed by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address))
|
2016-01-07 18:38:14 -08:00
|
|
|
try:
|
|
|
|
feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed_address, feed.feed_link))
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
feed = Feed.objects.create(feed_address=feed_address, feed_link=feed.feed_link)
|
2012-12-28 21:42:52 -08:00
|
|
|
code = 1
|
2011-11-17 09:49:42 -08:00
|
|
|
if feed.pk != original_feed.pk:
|
|
|
|
try:
|
|
|
|
feed.branch_from_feed = original_feed.branch_from_feed or original_feed
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
feed.branch_from_feed = original_feed
|
|
|
|
feed.feed_address_locked = True
|
2015-08-12 14:43:11 -07:00
|
|
|
feed = feed.save()
|
2011-11-17 09:49:42 -08:00
|
|
|
|
|
|
|
feed = feed.update()
|
2012-08-27 15:22:42 -07:00
|
|
|
feed = Feed.get_by_id(feed.pk)
|
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.get(user=request.user, feed=feed)
|
|
|
|
except UserSubscription.DoesNotExist:
|
|
|
|
usersubs = UserSubscription.objects.filter(user=request.user, feed=original_feed)
|
|
|
|
if usersubs:
|
|
|
|
usersub = usersubs[0]
|
|
|
|
usersub.switch_feed(feed, original_feed)
|
|
|
|
else:
|
2013-04-15 14:30:31 -07:00
|
|
|
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
|
2012-12-28 21:42:52 -08:00
|
|
|
return {
|
|
|
|
'code': -1,
|
2013-04-15 14:30:31 -07:00
|
|
|
'feed_fetch_history': fetch_history['feed_fetch_history'],
|
|
|
|
'page_fetch_history': fetch_history['page_fetch_history'],
|
|
|
|
'push_history': fetch_history['push_history'],
|
2012-12-28 21:42:52 -08:00
|
|
|
}
|
2012-08-27 15:22:42 -07:00
|
|
|
|
2011-02-06 15:04:21 -05:00
|
|
|
usersub.calculate_feed_scores(silent=False)
|
|
|
|
|
2011-11-17 09:49:42 -08:00
|
|
|
feed.update_all_statistics()
|
2012-02-15 18:00:10 -08:00
|
|
|
classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id)
|
2011-11-17 09:49:42 -08:00
|
|
|
|
2011-11-28 12:45:07 -05:00
|
|
|
feeds = {
|
2012-08-27 15:22:42 -07:00
|
|
|
original_feed.pk: usersub and usersub.canonical(full=True, classifiers=classifiers),
|
2011-11-29 09:43:16 -08:00
|
|
|
}
|
2012-04-06 16:21:16 -07:00
|
|
|
|
|
|
|
if feed and feed.has_feed_exception:
|
|
|
|
code = -1
|
2013-04-15 14:30:31 -07:00
|
|
|
|
|
|
|
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
|
2011-11-29 09:43:16 -08:00
|
|
|
return {
|
|
|
|
'code': code,
|
|
|
|
'feeds': feeds,
|
2012-01-26 09:32:24 -08:00
|
|
|
'new_feed_id': usersub.feed_id,
|
2013-04-15 14:30:31 -07:00
|
|
|
'feed_fetch_history': fetch_history['feed_fetch_history'],
|
|
|
|
'page_fetch_history': fetch_history['page_fetch_history'],
|
|
|
|
'push_history': fetch_history['push_history'],
|
2011-11-28 12:45:07 -05:00
|
|
|
}
|
2010-08-25 20:43:35 -04:00
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def exception_change_feed_link(request):
|
|
|
|
feed_id = request.POST['feed_id']
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2011-11-15 22:10:56 -08:00
|
|
|
original_feed = feed
|
2010-08-25 20:43:35 -04:00
|
|
|
feed_link = request.POST['feed_link']
|
2012-12-28 21:42:52 -08:00
|
|
|
timezone = request.user.profile.timezone
|
2010-08-25 20:43:35 -04:00
|
|
|
code = -1
|
|
|
|
|
2016-08-15 18:17:15 -07:00
|
|
|
if False and (feed.has_page_exception or feed.has_feed_exception):
|
2011-11-15 22:10:56 -08:00
|
|
|
# Fix broken feed
|
2011-11-16 09:29:05 -08:00
|
|
|
logging.user(request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link))
|
2016-02-05 14:43:31 -08:00
|
|
|
found_feed_urls = feedfinder.find_feeds(feed_link)
|
|
|
|
if len(found_feed_urls):
|
2011-11-15 22:10:56 -08:00
|
|
|
code = 1
|
|
|
|
feed.has_page_exception = False
|
|
|
|
feed.active = True
|
|
|
|
feed.fetched_once = False
|
|
|
|
feed.feed_link = feed_link
|
2016-02-05 14:43:31 -08:00
|
|
|
feed.feed_address = found_feed_urls[0]
|
2013-03-30 19:05:13 -07:00
|
|
|
duplicate_feed = feed.schedule_feed_fetch_immediately()
|
2011-11-27 03:13:32 -05:00
|
|
|
if duplicate_feed:
|
|
|
|
new_feed = Feed.objects.get(pk=duplicate_feed.pk)
|
2011-11-17 09:49:42 -08:00
|
|
|
feed = new_feed
|
2013-03-30 19:05:13 -07:00
|
|
|
new_feed.schedule_feed_fetch_immediately()
|
2011-11-17 09:49:42 -08:00
|
|
|
new_feed.has_page_exception = False
|
|
|
|
new_feed.active = True
|
|
|
|
new_feed.save()
|
2011-11-15 22:10:56 -08:00
|
|
|
else:
|
|
|
|
# Branch good feed
|
2011-11-16 09:29:05 -08:00
|
|
|
logging.user(request, "~FRBranching feed by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link))
|
2016-01-07 18:38:14 -08:00
|
|
|
try:
|
|
|
|
feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed.feed_address, feed_link))
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
feed = Feed.objects.create(feed_address=feed.feed_address, feed_link=feed_link)
|
2012-12-28 21:42:52 -08:00
|
|
|
code = 1
|
2011-11-15 22:10:56 -08:00
|
|
|
if feed.pk != original_feed.pk:
|
2011-11-16 18:41:36 -08:00
|
|
|
try:
|
|
|
|
feed.branch_from_feed = original_feed.branch_from_feed or original_feed
|
|
|
|
except Feed.DoesNotExist:
|
|
|
|
feed.branch_from_feed = original_feed
|
2011-11-16 10:00:03 -08:00
|
|
|
feed.feed_link_locked = True
|
2011-11-15 22:10:56 -08:00
|
|
|
feed.save()
|
2011-11-16 09:29:05 -08:00
|
|
|
|
2011-11-17 09:49:42 -08:00
|
|
|
feed = feed.update()
|
2012-08-27 15:22:42 -07:00
|
|
|
feed = Feed.get_by_id(feed.pk)
|
2011-11-16 18:41:36 -08:00
|
|
|
|
2012-08-27 15:22:42 -07:00
|
|
|
try:
|
|
|
|
usersub = UserSubscription.objects.get(user=request.user, feed=feed)
|
|
|
|
except UserSubscription.DoesNotExist:
|
|
|
|
usersubs = UserSubscription.objects.filter(user=request.user, feed=original_feed)
|
|
|
|
if usersubs:
|
|
|
|
usersub = usersubs[0]
|
|
|
|
usersub.switch_feed(feed, original_feed)
|
|
|
|
else:
|
2013-04-15 14:30:31 -07:00
|
|
|
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
|
2012-12-28 21:42:52 -08:00
|
|
|
return {
|
|
|
|
'code': -1,
|
2013-04-15 14:30:31 -07:00
|
|
|
'feed_fetch_history': fetch_history['feed_fetch_history'],
|
|
|
|
'page_fetch_history': fetch_history['page_fetch_history'],
|
|
|
|
'push_history': fetch_history['push_history'],
|
2012-12-28 21:42:52 -08:00
|
|
|
}
|
2011-11-28 12:45:07 -05:00
|
|
|
|
2011-02-06 15:04:21 -05:00
|
|
|
usersub.calculate_feed_scores(silent=False)
|
|
|
|
|
2011-11-16 18:41:36 -08:00
|
|
|
feed.update_all_statistics()
|
2012-02-15 18:00:10 -08:00
|
|
|
classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id)
|
2011-11-16 18:41:36 -08:00
|
|
|
|
2012-04-06 16:21:16 -07:00
|
|
|
if feed and feed.has_feed_exception:
|
|
|
|
code = -1
|
|
|
|
|
2011-11-28 12:45:07 -05:00
|
|
|
feeds = {
|
2011-11-29 09:43:16 -08:00
|
|
|
original_feed.pk: usersub.canonical(full=True, classifiers=classifiers),
|
|
|
|
}
|
2013-04-15 14:30:31 -07:00
|
|
|
fetch_history = MFetchHistory.feed(feed_id, timezone=timezone)
|
2011-11-29 09:43:16 -08:00
|
|
|
return {
|
|
|
|
'code': code,
|
|
|
|
'feeds': feeds,
|
2012-01-26 09:32:24 -08:00
|
|
|
'new_feed_id': usersub.feed_id,
|
2013-04-15 14:30:31 -07:00
|
|
|
'feed_fetch_history': fetch_history['feed_fetch_history'],
|
|
|
|
'page_fetch_history': fetch_history['page_fetch_history'],
|
|
|
|
'push_history': fetch_history['push_history'],
|
2011-11-28 12:45:07 -05:00
|
|
|
}
|
2011-03-01 09:59:06 -05:00
|
|
|
|
|
|
|
@login_required
|
|
|
|
def status(request):
|
|
|
|
if not request.user.is_staff:
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~SKNON-STAFF VIEWING RSS FEEDS STATUS!")
|
2011-03-01 09:59:06 -05:00
|
|
|
assert False
|
|
|
|
return HttpResponseForbidden()
|
2015-07-29 16:34:48 -07:00
|
|
|
minutes = int(request.GET.get('minutes', 1))
|
2011-03-01 09:59:06 -05:00
|
|
|
now = datetime.datetime.now()
|
2011-03-01 10:15:18 -05:00
|
|
|
hour_ago = now - datetime.timedelta(minutes=minutes)
|
2011-03-01 09:59:06 -05:00
|
|
|
feeds = Feed.objects.filter(last_update__gte=hour_ago).order_by('-last_update')
|
2020-06-12 01:27:07 -04:00
|
|
|
return render(request, 'rss_feeds/status.xhtml', {
|
2011-03-01 09:59:06 -05:00
|
|
|
'feeds': feeds
|
2020-06-12 01:27:07 -04:00
|
|
|
})
|
2013-01-08 18:33:30 -08:00
|
|
|
|
|
|
|
@json.json_view
|
|
|
|
def original_text(request):
|
2020-11-13 11:02:40 -05:00
|
|
|
# iOS sends a POST, web sends a GET
|
|
|
|
GET_POST = getattr(request, request.method)
|
|
|
|
story_id = GET_POST.get('story_id')
|
|
|
|
feed_id = GET_POST.get('feed_id')
|
|
|
|
story_hash = GET_POST.get('story_hash', None)
|
|
|
|
force = GET_POST.get('force', False)
|
|
|
|
debug = GET_POST.get('debug', False)
|
2013-10-17 11:21:37 -07:00
|
|
|
|
2014-10-29 16:16:50 -07:00
|
|
|
if story_hash:
|
|
|
|
story, _ = MStory.find_story(story_hash=story_hash)
|
|
|
|
else:
|
|
|
|
story, _ = MStory.find_story(story_id=story_id, story_feed_id=feed_id)
|
2013-01-08 18:33:30 -08:00
|
|
|
|
|
|
|
if not story:
|
|
|
|
logging.user(request, "~FYFetching ~FGoriginal~FY story text: ~FRstory not found")
|
2013-05-28 18:07:19 -07:00
|
|
|
return {'code': -1, 'message': 'Story not found.', 'original_text': None, 'failed': True}
|
2013-01-08 18:33:30 -08:00
|
|
|
|
2014-07-21 14:22:07 -07:00
|
|
|
original_text = story.fetch_original_text(force=force, request=request, debug=debug)
|
2013-01-09 12:53:30 -08:00
|
|
|
|
2013-01-08 18:33:30 -08:00
|
|
|
return {
|
2017-03-23 16:06:06 -07:00
|
|
|
'feed_id': story.story_feed_id,
|
|
|
|
'story_hash': story.story_hash,
|
|
|
|
'story_id': story.story_guid,
|
2019-01-20 13:55:34 -05:00
|
|
|
'image_urls': story.image_urls,
|
|
|
|
'secure_image_urls': Feed.secure_image_urls(story.image_urls),
|
2013-01-08 18:33:30 -08:00
|
|
|
'original_text': original_text,
|
2013-05-28 18:07:19 -07:00
|
|
|
'failed': not original_text or len(original_text) < 100,
|
2013-07-15 11:06:50 -07:00
|
|
|
}
|
2014-10-29 16:16:50 -07:00
|
|
|
|
2020-12-06 10:26:09 -05:00
|
|
|
@required_params('story_hash', method="GET")
|
2014-10-29 16:16:50 -07:00
|
|
|
def original_story(request):
|
2020-06-11 02:43:05 -04:00
|
|
|
story_hash = request.GET.get('story_hash')
|
|
|
|
force = request.GET.get('force', False)
|
|
|
|
debug = request.GET.get('debug', False)
|
2014-10-29 16:16:50 -07:00
|
|
|
|
|
|
|
story, _ = MStory.find_story(story_hash=story_hash)
|
|
|
|
|
|
|
|
if not story:
|
|
|
|
logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found")
|
2018-08-09 09:53:25 -04:00
|
|
|
# return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True}
|
|
|
|
raise Http404
|
2014-10-29 16:16:50 -07:00
|
|
|
|
|
|
|
original_page = story.fetch_original_page(force=force, request=request, debug=debug)
|
|
|
|
|
|
|
|
return HttpResponse(original_page or "")
|
2016-02-26 20:01:41 -08:00
|
|
|
|
2020-12-06 10:26:09 -05:00
|
|
|
@required_params('story_hash', method="GET")
|
2016-02-26 20:01:41 -08:00
|
|
|
@json.json_view
|
|
|
|
def story_changes(request):
|
2020-06-07 08:04:23 -04:00
|
|
|
story_hash = request.GET.get('story_hash', None)
|
|
|
|
show_changes = is_true(request.GET.get('show_changes', True))
|
2016-02-26 20:01:41 -08:00
|
|
|
story, _ = MStory.find_story(story_hash=story_hash)
|
|
|
|
if not story:
|
|
|
|
logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found")
|
|
|
|
return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True}
|
|
|
|
|
|
|
|
return {
|
2016-02-26 20:39:18 -08:00
|
|
|
'story': Feed.format_story(story, show_changes=show_changes)
|
2016-02-26 20:01:41 -08:00
|
|
|
}
|
|
|
|
|