2010-08-30 18:41:53 -04:00
|
|
|
import datetime
|
2010-08-16 12:55:45 -04:00
|
|
|
from utils import log as logging
|
2011-03-01 09:59:06 -05:00
|
|
|
from django.shortcuts import get_object_or_404, render_to_response
|
2010-08-25 20:43:35 -04:00
|
|
|
from django.http import HttpResponseForbidden
|
2010-12-25 23:04:43 -05:00
|
|
|
from django.db.models import Q
|
2011-03-01 09:59:06 -05:00
|
|
|
from django.contrib.auth.decorators import login_required
|
|
|
|
from django.template import RequestContext
|
2010-12-23 16:02:17 -05:00
|
|
|
# from django.db import IntegrityError
|
2010-08-25 21:02:21 -04:00
|
|
|
from apps.rss_feeds.models import Feed, merge_feeds
|
2011-02-13 14:47:58 -05:00
|
|
|
from apps.rss_feeds.models import MFeedFetchHistory, MPageFetchHistory
|
2011-03-04 12:27:31 -05:00
|
|
|
from apps.analyzer.models import get_classifiers_for_user
|
2010-11-10 18:04:17 -05:00
|
|
|
from apps.reader.models import UserSubscription
|
2010-08-25 20:43:35 -04:00
|
|
|
from utils.user_functions import ajax_login_required
|
2010-10-23 13:06:28 -04:00
|
|
|
from utils import json_functions as json, feedfinder
|
2010-07-28 18:18:01 -04:00
|
|
|
from utils.feed_functions import relative_timeuntil, relative_timesince
|
2011-03-11 20:05:41 -05:00
|
|
|
from utils.user_functions import get_user
|
2010-07-25 23:13:27 -04:00
|
|
|
|
2011-04-25 20:53:29 -04:00
|
|
|
|
|
|
|
@json.json_view
|
|
|
|
def search_feed(request):
|
|
|
|
address = request.REQUEST['address']
|
|
|
|
offset = int(request.REQUEST.get('offset', 0))
|
|
|
|
feed = Feed.get_feed_from_url(address, create=False, aggressive=True, offset=offset)
|
|
|
|
|
|
|
|
if feed:
|
|
|
|
return feed.canonical()
|
|
|
|
else:
|
|
|
|
return dict(code=-1, message="No feed found matching that XML or website address.")
|
|
|
|
|
2011-03-04 12:27:31 -05:00
|
|
|
@json.json_view
|
2011-04-24 20:42:42 -04:00
|
|
|
def load_single_feed(request, feed_id):
|
2011-03-11 20:05:41 -05:00
|
|
|
user = get_user(request)
|
2011-04-24 20:42:42 -04:00
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2011-03-11 20:05:41 -05:00
|
|
|
classifiers = get_classifiers_for_user(user, feed.pk)
|
2011-03-04 12:27:31 -05:00
|
|
|
|
|
|
|
payload = feed.canonical(full=True)
|
|
|
|
payload['classifiers'] = classifiers
|
|
|
|
|
|
|
|
return payload
|
|
|
|
|
2010-12-23 16:02:17 -05:00
|
|
|
@json.json_view
|
|
|
|
def feed_autocomplete(request):
|
|
|
|
query = request.GET['term']
|
2010-12-23 16:19:21 -05:00
|
|
|
feeds = []
|
|
|
|
for field in ['feed_address', 'feed_link', 'feed_title']:
|
|
|
|
if not feeds:
|
2010-12-24 10:41:32 -05:00
|
|
|
feeds = Feed.objects.filter(**{
|
|
|
|
'%s__icontains' % field: query,
|
|
|
|
'num_subscribers__gt': 1,
|
2010-12-25 23:04:43 -05:00
|
|
|
}).exclude(
|
|
|
|
Q(**{'%s__icontains' % field: 'token'}) |
|
|
|
|
Q(**{'%s__icontains' % field: 'private'})
|
|
|
|
).only(
|
2010-12-23 16:19:21 -05:00
|
|
|
'feed_title',
|
|
|
|
'feed_address',
|
|
|
|
'num_subscribers'
|
|
|
|
).order_by('-num_subscribers')[:5]
|
2010-12-23 16:02:17 -05:00
|
|
|
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FRAdd Search: ~SB%s ~FG(%s matches)" % (query, len(feeds),))
|
2010-12-24 10:44:02 -05:00
|
|
|
|
2010-12-23 16:02:17 -05:00
|
|
|
feeds = [{
|
|
|
|
'value': feed.feed_address,
|
|
|
|
'label': feed.feed_title,
|
|
|
|
'num_subscribers': feed.num_subscribers,
|
|
|
|
} for feed in feeds]
|
|
|
|
|
|
|
|
return feeds
|
|
|
|
|
2010-07-25 23:13:27 -04:00
|
|
|
@json.json_view
|
2011-04-24 01:52:44 -04:00
|
|
|
def load_feed_statistics(request, feed_id):
|
2010-07-25 23:13:27 -04:00
|
|
|
stats = dict()
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2010-10-25 20:44:52 -04:00
|
|
|
feed.save_feed_story_history_statistics()
|
2011-04-09 11:06:36 -04:00
|
|
|
feed.save_classifier_counts()
|
2010-07-25 23:13:27 -04:00
|
|
|
|
|
|
|
# Dates of last and next update
|
2010-07-28 18:18:01 -04:00
|
|
|
stats['last_update'] = relative_timesince(feed.last_update)
|
|
|
|
stats['next_update'] = relative_timeuntil(feed.next_scheduled_update)
|
2010-07-25 23:13:27 -04:00
|
|
|
|
|
|
|
# Minutes between updates
|
2010-07-26 22:21:58 -04:00
|
|
|
update_interval_minutes, random_factor = feed.get_next_scheduled_update()
|
|
|
|
stats['update_interval_minutes'] = update_interval_minutes
|
2010-07-25 23:13:27 -04:00
|
|
|
|
|
|
|
# Stories per month - average and month-by-month breakout
|
2011-01-17 22:48:38 -05:00
|
|
|
average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history
|
2010-07-26 14:37:17 -04:00
|
|
|
stats['average_stories_per_month'] = average_stories_per_month
|
2010-08-13 11:03:07 -04:00
|
|
|
stats['story_count_history'] = story_count_history and json.decode(story_count_history)
|
2010-07-25 23:13:27 -04:00
|
|
|
|
|
|
|
# Subscribers
|
|
|
|
stats['subscriber_count'] = feed.num_subscribers
|
2010-11-06 13:42:58 -04:00
|
|
|
stats['stories_last_month'] = feed.stories_last_month
|
|
|
|
stats['last_load_time'] = feed.last_load_time
|
2010-11-06 13:44:23 -04:00
|
|
|
stats['premium_subscribers'] = feed.premium_subscribers
|
|
|
|
stats['active_subscribers'] = feed.active_subscribers
|
2010-07-25 23:13:27 -04:00
|
|
|
|
2011-04-09 11:06:36 -04:00
|
|
|
# Classifier counts
|
|
|
|
stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts)
|
|
|
|
|
2011-02-13 14:47:58 -05:00
|
|
|
# Fetch histories
|
|
|
|
stats['feed_fetch_history'] = MFeedFetchHistory.feed_history(feed_id)
|
|
|
|
stats['page_fetch_history'] = MPageFetchHistory.feed_history(feed_id)
|
|
|
|
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FBStatistics: ~SB%s ~FG(%s/%s/%s subs)" % (feed, feed.num_subscribers, feed.active_subscribers, feed.premium_subscribers,))
|
2011-01-30 13:33:09 -05:00
|
|
|
|
2010-08-25 10:18:08 -04:00
|
|
|
return stats
|
2011-04-07 10:30:05 -04:00
|
|
|
|
2010-08-25 10:18:08 -04:00
|
|
|
@json.json_view
|
|
|
|
def exception_retry(request):
|
2011-04-23 18:22:52 -04:00
|
|
|
user = get_user(request)
|
2010-08-25 10:18:08 -04:00
|
|
|
feed_id = request.POST['feed_id']
|
2010-08-30 22:42:44 -04:00
|
|
|
reset_fetch = json.decode(request.POST['reset_fetch'])
|
2010-08-25 10:18:08 -04:00
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
|
|
|
|
2010-10-10 23:55:00 -04:00
|
|
|
feed.next_scheduled_update = datetime.datetime.utcnow()
|
2010-08-26 10:04:32 -04:00
|
|
|
feed.has_page_exception = False
|
|
|
|
feed.has_feed_exception = False
|
2010-10-27 19:27:59 -04:00
|
|
|
feed.active = True
|
2010-08-30 18:41:53 -04:00
|
|
|
if reset_fetch:
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FRRefreshing exception feed: ~SB%s" % (feed))
|
2010-08-30 18:41:53 -04:00
|
|
|
feed.fetched_once = False
|
2010-08-30 22:42:44 -04:00
|
|
|
else:
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FRForcing refreshing feed: ~SB%s" % (feed))
|
2010-10-27 19:27:59 -04:00
|
|
|
feed.fetched_once = True
|
2010-08-25 10:18:08 -04:00
|
|
|
feed.save()
|
|
|
|
|
2011-02-06 15:04:21 -05:00
|
|
|
feed = feed.update(force=True, compute_scores=False)
|
2011-04-23 18:22:52 -04:00
|
|
|
usersub = UserSubscription.objects.get(user=user, feed=feed)
|
2010-11-10 18:04:17 -05:00
|
|
|
usersub.calculate_feed_scores(silent=False)
|
2010-08-30 18:41:53 -04:00
|
|
|
|
2011-02-06 15:04:21 -05:00
|
|
|
feeds = {feed.pk: usersub.canonical(full=True)}
|
|
|
|
return {'code': 1, 'feeds': feeds}
|
2010-08-25 20:43:35 -04:00
|
|
|
|
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def exception_change_feed_address(request):
|
|
|
|
feed_id = request.POST['feed_id']
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
2010-08-25 21:02:21 -04:00
|
|
|
feed_address = request.POST['feed_address']
|
2010-08-25 20:43:35 -04:00
|
|
|
|
2010-08-27 18:35:33 -04:00
|
|
|
if not feed.has_feed_exception and not feed.has_page_exception:
|
2010-12-05 21:05:34 -05:00
|
|
|
logging.info(" ***> [%s] ~BRIncorrect feed address change: ~SB%s" % (request.user, feed))
|
2010-08-25 20:43:35 -04:00
|
|
|
return HttpResponseForbidden()
|
|
|
|
|
2010-08-26 10:04:32 -04:00
|
|
|
feed.has_feed_exception = False
|
2010-08-25 20:43:35 -04:00
|
|
|
feed.active = True
|
|
|
|
feed.fetched_once = False
|
2010-08-25 21:02:21 -04:00
|
|
|
feed.feed_address = feed_address
|
2010-10-10 23:55:00 -04:00
|
|
|
feed.next_scheduled_update = datetime.datetime.utcnow()
|
2010-10-10 23:36:09 -04:00
|
|
|
retry_feed = feed
|
2010-11-12 10:55:44 -05:00
|
|
|
duplicate_feed_id = feed.save()
|
|
|
|
if duplicate_feed_id:
|
|
|
|
original_feed = Feed.objects.get(pk=duplicate_feed_id)
|
2010-10-10 23:36:09 -04:00
|
|
|
retry_feed = original_feed
|
2010-10-10 23:55:00 -04:00
|
|
|
original_feed.next_scheduled_update = datetime.datetime.utcnow()
|
2010-08-26 10:04:32 -04:00
|
|
|
original_feed.has_feed_exception = False
|
2010-08-25 21:02:21 -04:00
|
|
|
original_feed.active = True
|
|
|
|
original_feed.save()
|
|
|
|
merge_feeds(original_feed.pk, feed.pk)
|
2010-08-25 20:43:35 -04:00
|
|
|
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FRFixing feed exception by address: ~SB%s" % (retry_feed.feed_address))
|
2010-10-10 23:36:09 -04:00
|
|
|
retry_feed.update()
|
|
|
|
|
2011-02-06 15:04:21 -05:00
|
|
|
usersub = UserSubscription.objects.get(user=request.user, feed=retry_feed)
|
|
|
|
usersub.calculate_feed_scores(silent=False)
|
|
|
|
|
|
|
|
feeds = {feed.pk: usersub.canonical(full=True)}
|
|
|
|
return {'code': 1, 'feeds': feeds}
|
2010-08-25 20:43:35 -04:00
|
|
|
|
|
|
|
@ajax_login_required
|
|
|
|
@json.json_view
|
|
|
|
def exception_change_feed_link(request):
|
|
|
|
feed_id = request.POST['feed_id']
|
|
|
|
feed = get_object_or_404(Feed, pk=feed_id)
|
|
|
|
feed_link = request.POST['feed_link']
|
|
|
|
code = -1
|
|
|
|
|
2010-08-26 12:37:03 -04:00
|
|
|
if not feed.has_page_exception and not feed.has_feed_exception:
|
2010-12-05 21:05:34 -05:00
|
|
|
logging.info(" ***> [%s] ~BRIncorrect feed link change: ~SB%s" % (request.user, feed))
|
2010-08-25 21:02:21 -04:00
|
|
|
# This Forbidden-403 throws an error, which sounds pretty good to me right now
|
2010-08-25 20:43:35 -04:00
|
|
|
return HttpResponseForbidden()
|
|
|
|
|
2010-10-10 23:36:09 -04:00
|
|
|
retry_feed = feed
|
2010-08-25 20:43:35 -04:00
|
|
|
feed_address = feedfinder.feed(feed_link)
|
|
|
|
if feed_address:
|
|
|
|
code = 1
|
2010-08-26 10:04:32 -04:00
|
|
|
feed.has_page_exception = False
|
2010-08-25 20:43:35 -04:00
|
|
|
feed.active = True
|
|
|
|
feed.fetched_once = False
|
|
|
|
feed.feed_link = feed_link
|
|
|
|
feed.feed_address = feed_address
|
2010-10-10 23:55:00 -04:00
|
|
|
feed.next_scheduled_update = datetime.datetime.utcnow()
|
2010-11-12 10:55:44 -05:00
|
|
|
duplicate_feed_id = feed.save()
|
|
|
|
if duplicate_feed_id:
|
|
|
|
original_feed = Feed.objects.get(pk=duplicate_feed_id)
|
2010-10-10 23:36:09 -04:00
|
|
|
retry_feed = original_feed
|
2010-10-10 23:55:00 -04:00
|
|
|
original_feed.next_scheduled_update = datetime.datetime.utcnow()
|
2010-08-26 10:04:32 -04:00
|
|
|
original_feed.has_page_exception = False
|
2010-08-25 21:02:21 -04:00
|
|
|
original_feed.active = True
|
|
|
|
original_feed.save()
|
2010-08-25 20:43:35 -04:00
|
|
|
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~FRFixing feed exception by link: ~SB%s" % (retry_feed.feed_link))
|
2010-10-10 23:36:09 -04:00
|
|
|
retry_feed.update()
|
|
|
|
|
2011-02-06 15:04:21 -05:00
|
|
|
usersub = UserSubscription.objects.get(user=request.user, feed=retry_feed)
|
|
|
|
usersub.calculate_feed_scores(silent=False)
|
|
|
|
|
|
|
|
feeds = {feed.pk: usersub.canonical(full=True)}
|
|
|
|
return {'code': code, 'feeds': feeds}
|
2011-03-01 09:59:06 -05:00
|
|
|
|
|
|
|
@login_required
|
|
|
|
def status(request):
|
|
|
|
if not request.user.is_staff:
|
2011-09-16 09:26:22 -07:00
|
|
|
logging.user(request, "~SKNON-STAFF VIEWING RSS FEEDS STATUS!")
|
2011-03-01 09:59:06 -05:00
|
|
|
assert False
|
|
|
|
return HttpResponseForbidden()
|
2011-03-01 10:26:28 -05:00
|
|
|
minutes = int(request.GET.get('minutes', 10))
|
2011-03-01 09:59:06 -05:00
|
|
|
now = datetime.datetime.now()
|
2011-03-01 10:15:18 -05:00
|
|
|
hour_ago = now - datetime.timedelta(minutes=minutes)
|
2011-03-01 09:59:06 -05:00
|
|
|
feeds = Feed.objects.filter(last_update__gte=hour_ago).order_by('-last_update')
|
|
|
|
return render_to_response('rss_feeds/status.xhtml', {
|
|
|
|
'feeds': feeds
|
|
|
|
}, context_instance=RequestContext(request))
|