NewsBlur-viq/apps/reader/views.py

247 lines
8.6 KiB
Python
Raw Normal View History

2009-06-16 03:08:55 +00:00
from django.shortcuts import render_to_response, get_list_or_404, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from apps.rss_feeds.models import Feed, Story
from apps.reader.models import UserSubscription, ReadStories, UserSubscriptionFolders, StoryOpinions
2009-06-16 03:08:55 +00:00
from utils.json import json_encode
from utils.story_functions import format_story_link_date__short, format_story_link_date__long
from utils.user_functions import get_user
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpRequest
from django.core import serializers
from django.utils.safestring import mark_safe
from utils.feedcache.threading_model import fetch_feeds
import logging
2009-06-16 03:08:55 +00:00
import datetime
import threading
def index(request):
# feeds = Feed.objects.filter(usersubscription__user=request.user)
# for f in feeds:
# f.update()
# context = feeds
context = {}
user = request.user
2009-06-22 15:28:20 +00:00
user_info = _parse_user_info(user)
2009-06-16 03:08:55 +00:00
context.update(user_info)
return render_to_response('reader/feeds.xhtml', context,
context_instance=RequestContext(request))
def refresh_all_feeds(request):
force_update = request.GET.get('force', False)
2009-06-16 03:08:55 +00:00
feeds = Feed.objects.all()
t = threading.Thread(target=refresh_feeds,
args=[feeds, force_update])
t.setDaemon(True)
t.start()
2009-06-16 03:08:55 +00:00
context = {}
user = request.user
2009-06-22 15:28:20 +00:00
user_info = _parse_user_info(user)
2009-06-16 03:08:55 +00:00
context.update(user_info)
return render_to_response('reader/feeds.xhtml', context,
context_instance=RequestContext(request))
def refresh_feed(request):
feed_id = request.REQUEST['feed_id']
force_update = request.GET.get('force', False)
feeds = Feed.objects.filter(id=feed_id)
feeds = refresh_feeds(feeds, force_update)
2009-06-16 03:08:55 +00:00
context = {}
2009-06-22 15:28:20 +00:00
user = request.user
user_info = _parse_user_info(user)
2009-06-16 03:08:55 +00:00
context.update(user_info)
return render_to_response('reader/feeds.xhtml', context,
context_instance=RequestContext(request))
def refresh_feeds(feeds, force=False):
2009-06-16 03:08:55 +00:00
for f in feeds:
f.update(force)
2009-06-16 03:08:55 +00:00
return
def load_feeds(request):
user = get_user(request)
us = UserSubscriptionFolders.objects.select_related('feed', 'user_sub').filter(
2009-06-16 03:08:55 +00:00
user=user
)
# logging.info('UserSubs: %s' % us)
2009-06-16 03:08:55 +00:00
feeds = []
folders = []
for sub in us:
# logging.info("UserSub: %s" % sub)
try:
sub.feed.unread_count = sub.user_sub.unread_count
except:
logging.warn("Subscription %s does not exist outside of Folder." % (sub.feed))
sub.delete()
else:
if sub.folder not in folders:
folders.append(sub.folder)
feeds.append({'folder': sub.folder, 'feeds': []})
for folder in feeds:
if folder['folder'] == sub.folder:
folder['feeds'].append(sub.feed)
2009-06-16 03:08:55 +00:00
# Alphabetize folders, then feeds inside folders
feeds.sort(lambda x, y: cmp(x['folder'].lower(), y['folder'].lower()))
for feed in feeds:
feed['feeds'].sort(lambda x, y: cmp(x.feed_title.lower(), y.feed_title.lower()))
for f in feed['feeds']:
f.feed_address = mark_safe(f.feed_address)
2009-06-16 03:08:55 +00:00
context = feeds
data = json_encode(context)
return HttpResponse(data, mimetype='text/html')
2009-06-16 03:08:55 +00:00
def load_single_feed(request):
user = get_user(request)
offset = int(request.REQUEST.get('offset', 0))
limit = int(request.REQUEST.get('limit', 25))
page = int(request.REQUEST.get('page', 0))
if page:
offset = limit * page
2009-06-16 03:08:55 +00:00
feed_id = request.REQUEST['feed_id']
stories=Story.objects.filter(story_feed=feed_id).values('story_feed', 'story_date', 'story_permalink', 'story_title', 'story_content', 'story_author', 'id')[offset:offset+limit]
2009-06-16 03:08:55 +00:00
feed = Feed.objects.get(id=feed_id)
force_update = request.GET.get('force', False)
2009-06-16 03:08:55 +00:00
if force_update:
fetch_feeds(force_update, [feed])
us = UserSubscription.objects.filter(user=user)
2009-06-16 03:08:55 +00:00
for sub in us:
if sub.feed_id == feed_id:
2009-06-16 03:08:55 +00:00
logging.debug("Feed: " + feed.feed_title)
2009-06-16 03:08:55 +00:00
user_readstories = ReadStories.objects.filter(
user=user,
feed=feed_id
2009-06-16 03:08:55 +00:00
)
story_opinions = StoryOpinions.objects.filter(
user=user,
feed=feed_id
)
2009-06-16 03:08:55 +00:00
for story in stories:
story['short_parsed_date'] = format_story_link_date__short(story.story_date)
story['long_parsed_date'] = format_story_link_date__long(story.story_date)
story['story_feed_title'] = feed.feed_title
story['story_feed_link'] = mark_safe(feed.feed_link)
story['story_permalink'] = mark_safe(story.story_permalink)
if story in [o.story for o in story_opinions]:
for o in story_opinions:
if o.story == story:
story['opinion'] = o.opinion
break
2009-06-16 03:08:55 +00:00
if story.story_date < sub.mark_read_date:
story['read_status'] = 1
2009-06-16 03:08:55 +00:00
elif story.story_date > sub.last_read_date:
story['read_status'] = 0
2009-06-16 03:08:55 +00:00
else:
if story.id in [u_rs.story_id for u_rs in user_readstories]:
logging.debug("READ: ")
story['read_status'] = 1
2009-06-16 03:08:55 +00:00
else:
story['read_status'] = 0
logging.debug("Story: %s" % story)
2009-06-16 03:08:55 +00:00
context = stories
data = json_encode(context)
return HttpResponse(data, mimetype='text/html')
2009-06-16 03:08:55 +00:00
@login_required
def mark_story_as_read(request):
story_id = request.REQUEST['story_id']
story = Story.objects.select_related("story_feed").get(id=story_id)
read_story = ReadStories.objects.filter(story=story_id, user=request.user, feed=story.story_feed).count()
logging.debug('Marking as read: %s' % read_story)
2009-06-16 03:08:55 +00:00
if read_story:
data = json_encode(dict(code=1))
else:
us = UserSubscription.objects.get(
feed=story.story_feed,
user=request.user
)
us.mark_read()
logging.debug("Marked Read: " + str(story_id) + ' ' + str(story.id))
2009-06-16 03:08:55 +00:00
m = ReadStories(story=story, user=request.user, feed=story.story_feed)
data = json_encode(dict(code=0))
try:
m.save()
except:
data = json_encode(dict(code=2))
return HttpResponse(data)
@login_required
def mark_feed_as_read(request):
feed_id = int(request.REQUEST['feed_id'])
feed = Feed.objects.get(id=feed_id)
us = UserSubscription.objects.get(feed=feed, user=request.user)
us.mark_feed_read()
ReadStories.objects.filter(user=request.user, feed=feed_id).delete()
data = json_encode(dict(code=0))
try:
m.save()
except:
data = json_encode(dict(code=1))
return HttpResponse(data)
@login_required
def mark_story_as_like(request):
return mark_story_with_opinion(request, 1)
@login_required
def mark_story_as_dislike(request):
return mark_story_with_opinion(request, -1)
@login_required
def mark_story_with_opinion(request, opinion):
story_id = request.REQUEST['story_id']
story = Story.objects.select_related("story_feed").get(id=story_id)
previous_opinion = StoryOpinions.objects.get(story=story, user=request.user, feed=story.story_feed)
if previous_opinion and previous_opinion.opinion != opinion:
previous_opinion.opinion = opinion
data = json_encode(dict(code=0))
previous_opinion.save()
logging.debug("Changed Opinion: " + str(previous_opinion.opinion) + ' ' + str(opinion))
else:
logging.debug("Marked Opinion: " + str(story_id) + ' ' + str(opinion))
m = StoryOpinions(story=story, user=request.user, feed=story.story_feed, opinion=opinion)
data = json_encode(dict(code=0))
try:
m.save()
except:
data = json_encode(dict(code=2))
return HttpResponse(data)
2009-06-16 03:08:55 +00:00
@login_required
def get_read_feed_items(request, username):
feeds = get_list_or_404(Feed)
def _parse_user_info(user):
return {
'user_info': {
'is_anonymous': json_encode(user.is_anonymous()),
'is_authenticated': json_encode(user.is_authenticated()),
'username': json_encode(user.username if user.is_authenticated() else 'Anonymous')
2009-06-16 03:08:55 +00:00
}
2009-06-22 15:28:20 +00:00
}