Merge branch 'master' into thumbnails
* master: (112 commits) Feedfinder user agent is presenting problems. Changing to 'NewsBlur Feed Finder' Scrubbing newsletters. Ignoring more dirs in textmate. Originals Handling missing OAuth token for twitter. Fixing PuSH issue with urls for hubs that look like: '//alert-hub.appspot.com' Handling mis-shapen icons. Handling IncompleteRead from requests during icon fetch. Handling missing rss URLs when checking site from bookmarklet. Better handling of PuSH errors. Fixing add url when searching for rss feeds. app image gets pull+pip. Upgrade HTTP libs. Remove debug status widget for overlapping story syncs. Using latest app image. Make reading session inits synchro with story cursor fetches. Re-enable stateful tracking of feed story progress between feed fetches. Cleanup some timegate code for easier debug. Can't use 'sender' as it's gets mucked up with forwarding rules. Switching to reconstructing email from 'from' field. Better formatting of plaintext newsletters. Handling plain text email newsletters. ...
3
.gitignore
vendored
|
@ -35,6 +35,7 @@ config/secrets
|
|||
templates/maintenance_on.html
|
||||
vendor/mms-agent/settings.py
|
||||
apps/social/spam.py
|
||||
venv
|
||||
|
||||
# ----------------------
|
||||
# Android
|
||||
|
@ -58,7 +59,7 @@ clients/android/NewsBlurTest/gen/
|
|||
# Local configuration file (sdk path, etc)
|
||||
clients/android/NewsBlur/local.properties
|
||||
clients/android/NewsBlurTest/local.properties
|
||||
originals
|
||||
/originals
|
||||
media/safari/NewsBlur.safariextz
|
||||
|
||||
# IDE files
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
exclude = '{$exclude,*.tgz,*.gz,static/*.js,static/*.css}'
|
||||
excludeDirectories = "{$excludeDirectories,logs,data,clients/android,media/fonts,node_modules}"
|
||||
excludeDirectories = "{$excludeDirectories,logs,data,clients/android,media/fonts,node_modules,venv,vendor,fonts,clients,migrations,fixtures}"
|
|
@ -183,6 +183,17 @@ class API:
|
|||
data.append( ("feeds", feed) )
|
||||
return data
|
||||
|
||||
@request('reader/mark_story_hashes_as_read')
|
||||
def mark_story_hashes_as_read(self, story_hashes):
|
||||
'''
|
||||
Mark stories as read using their unique story_hash.
|
||||
'''
|
||||
|
||||
data = []
|
||||
for hash in story_hashes:
|
||||
data.append( ("story_hash", hash) )
|
||||
return data
|
||||
|
||||
@request('reader/mark_story_as_read')
|
||||
def mark_story_as_read(self, feed_id, story_ids):
|
||||
'''
|
||||
|
|
0
apps/newsletters/__init__.py
Normal file
127
apps/newsletters/models.py
Normal file
|
@ -0,0 +1,127 @@
|
|||
import datetime
|
||||
import re
|
||||
import redis
|
||||
from cgi import escape
|
||||
from django.db import models
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.conf import settings
|
||||
from django.utils.html import linebreaks
|
||||
from apps.rss_feeds.models import Feed, MStory, MFetchHistory
|
||||
from apps.reader.models import UserSubscription, UserSubscriptionFolders
|
||||
from apps.profile.models import Profile
|
||||
from utils import log as logging
|
||||
from utils.story_functions import linkify
|
||||
from utils.scrubber import Scrubber
|
||||
|
||||
class EmailNewsletter:
|
||||
|
||||
def receive_newsletter(self, params):
|
||||
user = self.user_from_email(params['recipient'])
|
||||
if not user:
|
||||
return
|
||||
|
||||
sender_name, sender_username, sender_domain = self.split_sender(params['from'])
|
||||
feed_address = self.feed_address(user, "%s@%s" % (sender_username, sender_domain))
|
||||
|
||||
usf = UserSubscriptionFolders.objects.get(user=user)
|
||||
usf.add_folder('', 'Newsletters')
|
||||
|
||||
try:
|
||||
feed = Feed.objects.get(feed_address=feed_address)
|
||||
except Feed.DoesNotExist:
|
||||
feed = Feed.objects.create(feed_address=feed_address,
|
||||
feed_link='http://' + sender_domain,
|
||||
feed_title=sender_name,
|
||||
fetched_once=True,
|
||||
known_good=True)
|
||||
feed.update()
|
||||
logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed))
|
||||
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
|
||||
r.publish(user.username, 'reload:%s' % feed.pk)
|
||||
try:
|
||||
usersub = UserSubscription.objects.get(user=user, feed=feed)
|
||||
except UserSubscription.DoesNotExist:
|
||||
_, _, usersub = UserSubscription.add_subscription(
|
||||
user=user,
|
||||
feed_address=feed_address,
|
||||
folder='Newsletters'
|
||||
)
|
||||
|
||||
story_hash = MStory.ensure_story_hash(params['signature'], feed.pk)
|
||||
story_content = self.get_content(params)
|
||||
story_content = self.clean_content(story_content)
|
||||
story_params = {
|
||||
"story_feed_id": feed.pk,
|
||||
"story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])),
|
||||
"story_title": params['subject'],
|
||||
"story_content": story_content,
|
||||
"story_author_name": escape(params['from']),
|
||||
"story_permalink": reverse('newsletter-story',
|
||||
kwargs={'story_hash': story_hash}),
|
||||
"story_guid": params['signature'],
|
||||
}
|
||||
try:
|
||||
story = MStory.objects.get(story_hash=story_hash)
|
||||
except MStory.DoesNotExist:
|
||||
story = MStory(**story_params)
|
||||
story.save()
|
||||
|
||||
usersub.needs_unread_recalc = True
|
||||
usersub.save()
|
||||
|
||||
self.publish_to_subscribers(feed)
|
||||
|
||||
MFetchHistory.add(feed_id=feed.pk, fetch_type='push')
|
||||
logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed))
|
||||
|
||||
return story
|
||||
|
||||
def user_from_email(self, email):
|
||||
tokens = re.search('(\w+)\+(\w+)@newsletters.newsblur.com', email)
|
||||
if not tokens:
|
||||
return
|
||||
|
||||
username, secret_token = tokens.groups()
|
||||
try:
|
||||
profiles = Profile.objects.filter(secret_token=secret_token)
|
||||
if not profiles:
|
||||
return
|
||||
profile = profiles[0]
|
||||
except Profile.DoesNotExist:
|
||||
return
|
||||
|
||||
return profile.user
|
||||
|
||||
def feed_address(self, user, sender):
|
||||
return 'newsletter:%s:%s' % (user.pk, sender)
|
||||
|
||||
def split_sender(self, sender):
|
||||
tokens = re.search('(.*?) <(.*?)@(.*?)>', sender)
|
||||
# if not tokens:
|
||||
# name, domain = params['sender'].split('@')
|
||||
# return name, sender, domain
|
||||
return tokens.group(1), tokens.group(2), tokens.group(3)
|
||||
|
||||
def get_content(self, params):
|
||||
if 'body-html' in params:
|
||||
return params['body-html']
|
||||
if 'stripped-html' in params:
|
||||
return linkify(linebreaks(params['stripped-html']))
|
||||
if 'body-plain' in params:
|
||||
return linkify(linebreaks(params['body-plain']))
|
||||
|
||||
def clean_content(self, content):
|
||||
scrubber = Scrubber()
|
||||
return scrubber.scrub(content)
|
||||
|
||||
def publish_to_subscribers(self, feed):
|
||||
try:
|
||||
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
|
||||
listeners_count = r.publish(str(feed.pk), 'story:new')
|
||||
if listeners_count:
|
||||
logging.debug(" ---> [%-30s] ~FMPublished to %s subscribers" % (feed.title[:30], listeners_count))
|
||||
except redis.ConnectionError:
|
||||
logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.title[:30],))
|
||||
|
||||
|
16
apps/newsletters/tests.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
"""
|
||||
This file demonstrates writing tests using the unittest module. These will pass
|
||||
when you run "manage.py test".
|
||||
|
||||
Replace this with more appropriate tests for your application.
|
||||
"""
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
|
||||
class SimpleTest(TestCase):
|
||||
def test_basic_addition(self):
|
||||
"""
|
||||
Tests that 1 + 1 always equals 2.
|
||||
"""
|
||||
self.assertEqual(1 + 1, 2)
|
7
apps/newsletters/urls.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
from django.conf.urls import *
|
||||
from apps.newsletters import views
|
||||
|
||||
urlpatterns = patterns('',
|
||||
url(r'^receive/?$', views.newsletter_receive, name='newsletter-receive'),
|
||||
url(r'^story/(?P<story_hash>[\w:]+)/?$', views.newsletter_story, name='newsletter-story'),
|
||||
)
|
55
apps/newsletters/views.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
from pprint import pprint
|
||||
from django.http import HttpResponse, Http404
|
||||
from django.conf import settings
|
||||
from utils import log as logging
|
||||
from apps.newsletters.models import EmailNewsletter
|
||||
from apps.rss_feeds.models import Feed, MStory
|
||||
|
||||
def newsletter_receive(request):
|
||||
# params = {
|
||||
# 'stripped-signature':'Thanks,\nBob',
|
||||
# 'From':'Test mailer <samuel@ofbrooklyn.com>',
|
||||
# 'attachment-count':'2',
|
||||
# 'To':'Alice <alice@newsletters.newsblur.com>',
|
||||
# 'subject':'Test Newsletter #2',
|
||||
# 'from':'Test mailer <samuel@ofbrooklyn.com>',
|
||||
# 'User-Agent':'Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20130308 Thunderbird/17.0.4',
|
||||
# 'stripped-html':'<html><head><meta content="text/html; charset=ISO-8859-1" http-equiv="Content-Type"></head><body text="#000000" bgcolor="#FFFFFF">\n <div class="moz-cite-prefix">\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Hi Alice,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">This is Bob.<span class="Apple-converted-space"> <img alt="" src="cid:part1.04060802.06030207@newsletters.newsblur.com" height="15" width="33"></span></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br>\n I also attached a file.<br><br></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Thanks,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Bob</div>\n <br><br></div>\n <br></body></html>',
|
||||
# 'In-Reply-To':'<517AC78B.5060404@newsletters.newsblur.com>',
|
||||
# 'Date':'Fri, 26 Apr 2013 11:50:29 -0700',
|
||||
# 'Message-Id':'<517ACC75.5010709@newsletters.newsblur.com>',
|
||||
# 'body-plain':'Hi Alice,\n\nThis is Bob.\n\nI also attached a file.\n\nThanks,\nBob\n\nOn 04/26/2013 11:29 AM, Alice wrote:\n> Hi Bob,\n>\n> This is Alice. How are you doing?\n>\n> Thanks,\n> Alice\n\n',
|
||||
# 'Mime-Version':'1.0',
|
||||
# 'Received':'from [10.20.76.69] (Unknown [50.56.129.169]) by mxa.mailgun.org with ESMTP id 517acc75.4b341f0-worker2; Fri, 26 Apr 2013 18:50:29 -0000 (UTC)',
|
||||
# 'content-id-map':'{"<part1.04060802.06030207@newsletters.newsblur.com>": "attachment-1"}',
|
||||
# 'Sender':'bob@newsletters.newsblur.com',
|
||||
# 'timestamp':'1455054990',
|
||||
# 'message-headers':'[["Received", "by luna.mailgun.net with SMTP mgrt 8788212249833; Fri, 26 Apr 2013 18:50:30 +0000"], ["Received", "from [10.20.76.69] (Unknown [50.56.129.169]) by mxa.mailgun.org with ESMTP id 517acc75.4b341f0-worker2; Fri, 26 Apr 2013 18:50:29 -0000 (UTC)"], ["Message-Id", "<517ACC75.5010709@newsletters.newsblur.com>"], ["Date", "Fri, 26 Apr 2013 11:50:29 -0700"], ["From", "Test mailer <samuel@ofbrooklyn.com>"], ["User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20130308 Thunderbird/17.0.4"], ["Mime-Version", "1.0"], ["To", "Alice <alice@newsletters.newsblur.com>"], ["Subject", "Re: Sample POST request"], ["References", "<517AC78B.5060404@newsletters.newsblur.com>"], ["In-Reply-To", "<517AC78B.5060404@newsletters.newsblur.com>"], ["X-Mailgun-Variables", "{\\"my_var_1\\": \\"Mailgun Variable #1\\", \\"my-var-2\\": \\"awesome\\"}"], ["Content-Type", "multipart/mixed; boundary=\\"------------020601070403020003080006\\""], ["Sender", "bob@newsletters.newsblur.com"]]',
|
||||
# 'stripped-text':'Hi Alice,\n\nThis is Bob.\n\nI also attached a file.',
|
||||
# 'recipient':'alice+555551235342@newsletters.newsblur.com',
|
||||
# 'sender':'samuel@ofbrooklyn.com',
|
||||
# 'X-Mailgun-Variables':'{"my_var_1": "Mailgun Variable #1", "my-var-2": "awesome"}',
|
||||
# 'token':'cb2ef40ca2fee03a099f7da78ca07384228f00f023026c77a4',
|
||||
# 'body-html':'<html>\n <head>\n <meta content="text/html; charset=ISO-8859-1"\n http-equiv="Content-Type">\n </head>\n <body text="#000000" bgcolor="#FFFFFF">\n <div class="moz-cite-prefix">\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Hi Alice,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br>\n </div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">This is Bob.<span class="Apple-converted-space"> <img\n alt="" src="cid:part1.04060802.06030207@newsletters.newsblur.com"\n height="15" width="33"></span></div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);"><br>\n I also attached a file.<br>\n <br>\n </div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Thanks,</div>\n <div style="color: rgb(34, 34, 34); font-family: arial,\n sans-serif; font-size: 12.666666984558105px; font-style: normal;\n font-variant: normal; font-weight: normal; letter-spacing:\n normal; line-height: normal; orphans: auto; text-align: start;\n text-indent: 0px; text-transform: none; white-space: normal;\n widows: auto; word-spacing: 0px; -webkit-text-size-adjust: auto;\n -webkit-text-stroke-width: 0px; background-color: rgb(255, 255,\n 255);">Bob</div>\n <br>\n On 04/26/2013 11:29 AM, Alice wrote:<br>\n </div>\n <blockquote cite="mid:517AC78B.5060404@newsletters.newsblur.com" type="cite">Hi\n Bob,\n <br>\n <br>\n This is Alice. How are you doing?\n <br>\n <br>\n Thanks,\n <br>\n Alice\n <br>\n </blockquote>\n <br>\n </body>\n</html>\n',
|
||||
# 'References':'<517AC78B.5060404@newsletters.newsblur.com>',
|
||||
# 'signature':'1369fa4dcc7de7fac51f5bb408bd5c9daa8730e80d394e8a128658d74e669049',
|
||||
# 'Content-Type':'multipart/mixed; boundary="------------020601070403020003080006"',
|
||||
# 'Subject':'Test Newsletter #1'
|
||||
# }
|
||||
response = HttpResponse('OK')
|
||||
|
||||
if settings.DEBUG:
|
||||
logging.debug(" ---> Email newsletter: %s" % pprint(request.REQUEST))
|
||||
|
||||
email_newsletter = EmailNewsletter()
|
||||
story = email_newsletter.receive_newsletter(request.REQUEST)
|
||||
|
||||
if not story:
|
||||
raise Http404
|
||||
|
||||
return response
|
||||
|
||||
def newsletter_story(request, story_hash):
|
||||
story = MStory.objects.get(story_hash=story_hash)
|
||||
story = Feed.format_story(story)
|
||||
return HttpResponse(story['story_content'])
|
|
@ -68,7 +68,7 @@ class PushSubscriptionManager(models.Manager):
|
|||
elif response and response.status_code == 202: # async verification
|
||||
subscription.verified = False
|
||||
else:
|
||||
error = response and response.content or ""
|
||||
error = response and response.text or ""
|
||||
if not force_retry and 'You may only subscribe to' in error:
|
||||
extracted_topic = re.search("You may only subscribe to (.*?) ", error)
|
||||
if extracted_topic:
|
||||
|
@ -76,7 +76,7 @@ class PushSubscriptionManager(models.Manager):
|
|||
feed=feed, hub=hub, force_retry=True)
|
||||
else:
|
||||
logging.debug(u' ---> [%-30s] ~FR~BKFeed failed to subscribe to push: %s (code: %s)' % (
|
||||
unicode(subscription.feed)[:30], error, response and response.status_code))
|
||||
unicode(subscription.feed)[:30], error[:100], response and response.status_code))
|
||||
|
||||
subscription.save()
|
||||
feed.setup_push()
|
||||
|
@ -141,7 +141,10 @@ class PushSubscription(models.Model):
|
|||
hub_url = link['href']
|
||||
elif link['rel'] == 'self':
|
||||
self_url = link['href']
|
||||
|
||||
|
||||
if hub_url and hub_url.startswith('//'):
|
||||
hub_url = "http:%s" % hub_url
|
||||
|
||||
needs_update = False
|
||||
if hub_url and self.hub != hub_url:
|
||||
# hub URL has changed; let's update our subscription
|
||||
|
|
|
@ -21,6 +21,14 @@
|
|||
"user": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"pk": 2,
|
||||
"model": "reader.usersubscriptionfolders",
|
||||
"fields": {
|
||||
"folders": "[5299728, 644144, 1187026, {\"Brainiacs & Opinion\": [569, 38, 3581, 183139, 1186180, 15]}, {\"Science & Technology\": [731503, 140145, 1272495, 76, 161, 39, {\"Hacker\": [5985150, 3323431]}]}, {\"Humor\": [212379, 3530, 5994357]}, {\"Videos\": [3240, 5168]}]",
|
||||
"user": 2
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"pk": 2,
|
||||
|
@ -161,6 +169,24 @@
|
|||
"email": "samuel@newsblur.com",
|
||||
"date_joined": "2009-01-04 17:32:58"
|
||||
}
|
||||
},
|
||||
{
|
||||
"pk": 2,
|
||||
"model": "auth.user",
|
||||
"fields": {
|
||||
"username": "Dejal",
|
||||
"first_name": "",
|
||||
"last_name": "",
|
||||
"is_active": 1,
|
||||
"is_superuser": 1,
|
||||
"is_staff": 1,
|
||||
"last_login": "2009-04-07 19:22:24",
|
||||
"groups": [],
|
||||
"user_permissions": [],
|
||||
"password": "sha1$7b94b$ac9e6cf08d0fa16a67e56e319c0935aeb26db2a2",
|
||||
"email": "dejal@newsblur.com",
|
||||
"date_joined": "2009-01-04 17:32:58"
|
||||
}
|
||||
},
|
||||
{
|
||||
"pk": 1206,
|
||||
|
|
|
@ -1288,13 +1288,17 @@ class UserSubscriptionFolders(models.Model):
|
|||
|
||||
return _arrange_folder(user_sub_folders)
|
||||
|
||||
def flatten_folders(self, feeds=None):
|
||||
def flatten_folders(self, feeds=None, inactive_feeds=None):
|
||||
folders = json.decode(self.folders)
|
||||
flat_folders = {" ": []}
|
||||
if feeds and not inactive_feeds:
|
||||
inactive_feeds = []
|
||||
|
||||
def _flatten_folders(items, parent_folder="", depth=0):
|
||||
for item in items:
|
||||
if isinstance(item, int) and ((not feeds) or (feeds and item in feeds)):
|
||||
if (isinstance(item, int) and
|
||||
(not feeds or
|
||||
(item in feeds or item in inactive_feeds))):
|
||||
if not parent_folder:
|
||||
parent_folder = ' '
|
||||
if parent_folder in flat_folders:
|
||||
|
@ -1317,6 +1321,7 @@ class UserSubscriptionFolders(models.Model):
|
|||
return flat_folders
|
||||
|
||||
def delete_feed(self, feed_id, in_folder, commit_delete=True):
|
||||
feed_id = int(feed_id)
|
||||
def _find_feed_in_folders(old_folders, folder_name='', multiples_found=False, deleted=False):
|
||||
new_folders = []
|
||||
for k, folder in enumerate(old_folders):
|
||||
|
@ -1462,6 +1467,7 @@ class UserSubscriptionFolders(models.Model):
|
|||
logging.user(self.user, "~FBMoving ~SB%s~SN feeds to folder: ~SB%s" % (
|
||||
len(feeds_by_folder), to_folder))
|
||||
for feed_id, in_folder in feeds_by_folder:
|
||||
feed_id = int(feed_id)
|
||||
self.move_feed_to_folder(feed_id, in_folder, to_folder)
|
||||
|
||||
return self
|
||||
|
|
|
@ -96,6 +96,22 @@ class ReaderTest(TestCase):
|
|||
response = self.client.get(reverse('load-feeds'))
|
||||
feeds = json.decode(response.content)
|
||||
self.assertEquals(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8, 9]}])
|
||||
|
||||
def test_move_feeds_by_folder(self):
|
||||
self.client.login(username='Dejal', password='test')
|
||||
|
||||
response = self.client.get(reverse('load-feeds'))
|
||||
feeds = json.decode(response.content)
|
||||
self.assertEquals(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379, 3530, 5994357]}, {"Videos": [3240, 5168]}])
|
||||
|
||||
# Move feeds by folder
|
||||
response = self.client.post(reverse('move-feeds-by-folder-to-folder'), {u'feeds_by_folder': u'[\n [\n "5994357",\n "Humor"\n ],\n [\n "3530",\n "Humor"\n ]\n]', u'to_folder': u'Brainiacs & Opinion'})
|
||||
response = json.decode(response.content)
|
||||
self.assertEquals(response['code'], 1)
|
||||
|
||||
response = self.client.get(reverse('load-feeds'))
|
||||
feeds = json.decode(response.content)
|
||||
self.assertEquals(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15, 5994357, 3530]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379]}, {"Videos": [3240, 5168]}])
|
||||
|
||||
def test_load_single_feed(self):
|
||||
# from django.conf import settings
|
||||
|
|
|
@ -322,8 +322,10 @@ def load_feeds_flat(request):
|
|||
user = request.user
|
||||
include_favicons = is_true(request.REQUEST.get('include_favicons', False))
|
||||
update_counts = is_true(request.REQUEST.get('update_counts', True))
|
||||
include_inactive = is_true(request.REQUEST.get('include_inactive', False))
|
||||
|
||||
feeds = {}
|
||||
inactive_feeds = {}
|
||||
day_ago = datetime.datetime.now() - datetime.timedelta(days=1)
|
||||
scheduled_feeds = []
|
||||
iphone_version = "2.1" # Preserved forever. Don't change.
|
||||
|
@ -345,7 +347,9 @@ def load_feeds_flat(request):
|
|||
if not user_subs and folders:
|
||||
folders.auto_activate()
|
||||
user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True)
|
||||
|
||||
if include_inactive:
|
||||
inactive_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=False)
|
||||
|
||||
for sub in user_subs:
|
||||
if update_counts and sub.needs_unread_recalc:
|
||||
sub.calculate_feed_scores(silent=True)
|
||||
|
@ -357,14 +361,21 @@ def load_feeds_flat(request):
|
|||
elif sub.feed.next_scheduled_update < day_ago:
|
||||
scheduled_feeds.append(sub.feed.pk)
|
||||
|
||||
if include_inactive:
|
||||
for sub in inactive_subs:
|
||||
inactive_feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons)
|
||||
|
||||
if len(scheduled_feeds) > 0 and request.user.is_authenticated():
|
||||
logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." %
|
||||
len(scheduled_feeds))
|
||||
ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk))
|
||||
|
||||
flat_folders = []
|
||||
flat_folders_with_inactive = []
|
||||
if folders:
|
||||
flat_folders = folders.flatten_folders(feeds=feeds)
|
||||
flat_folders_with_inactive = folders.flatten_folders(feeds=feeds,
|
||||
inactive_feeds=inactive_feeds)
|
||||
|
||||
social_params = {
|
||||
'user_id': user.pk,
|
||||
|
@ -382,12 +393,14 @@ def load_feeds_flat(request):
|
|||
if not user_subs:
|
||||
categories = MCategory.serialize()
|
||||
|
||||
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials ~FMflat~FB%s" % (
|
||||
len(feeds.keys()), len(social_feeds), '. ~FCUpdating counts.' if update_counts else ''))
|
||||
logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB/~FR%s~FB feeds/socials/inactive ~FMflat~FB%s" % (
|
||||
len(feeds.keys()), len(social_feeds), len(inactive_feeds), '. ~FCUpdating counts.' if update_counts else ''))
|
||||
|
||||
data = {
|
||||
"flat_folders": flat_folders,
|
||||
"feeds": feeds,
|
||||
"flat_folders_with_inactive": flat_folders_with_inactive,
|
||||
"feeds": feeds if not include_inactive else {"0": "Don't include `include_inactive=true` if you want active feeds."},
|
||||
"inactive_feeds": inactive_feeds if include_inactive else {"0": "Include `include_inactive=true`"},
|
||||
"social_feeds": social_feeds,
|
||||
"social_profile": social_profile,
|
||||
"social_services": social_services,
|
||||
|
@ -541,7 +554,7 @@ def load_single_feed(request, feed_id):
|
|||
offset = limit * (page-1)
|
||||
order = request.REQUEST.get('order', 'newest')
|
||||
read_filter = request.REQUEST.get('read_filter', 'all')
|
||||
query = request.REQUEST.get('query')
|
||||
query = request.REQUEST.get('query', '').strip()
|
||||
include_story_content = is_true(request.REQUEST.get('include_story_content', True))
|
||||
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
|
||||
message = None
|
||||
|
@ -793,7 +806,7 @@ def load_starred_stories(request):
|
|||
offset = int(request.REQUEST.get('offset', 0))
|
||||
limit = int(request.REQUEST.get('limit', 10))
|
||||
page = int(request.REQUEST.get('page', 0))
|
||||
query = request.REQUEST.get('query')
|
||||
query = request.REQUEST.get('query', '').strip()
|
||||
order = request.REQUEST.get('order', 'newest')
|
||||
tag = request.REQUEST.get('tag')
|
||||
story_hashes = request.REQUEST.getlist('h')[:100]
|
||||
|
@ -1097,7 +1110,7 @@ def load_read_stories(request):
|
|||
limit = int(request.REQUEST.get('limit', 10))
|
||||
page = int(request.REQUEST.get('page', 0))
|
||||
order = request.REQUEST.get('order', 'newest')
|
||||
query = request.REQUEST.get('query')
|
||||
query = request.REQUEST.get('query', '').strip()
|
||||
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
|
||||
message = None
|
||||
if page: offset = limit * (page - 1)
|
||||
|
@ -1185,7 +1198,7 @@ def load_river_stories__redis(request):
|
|||
page = int(request.REQUEST.get('page', 1))
|
||||
order = request.REQUEST.get('order', 'newest')
|
||||
read_filter = request.REQUEST.get('read_filter', 'unread')
|
||||
query = request.REQUEST.get('query')
|
||||
query = request.REQUEST.get('query', '').strip()
|
||||
include_hidden = is_true(request.REQUEST.get('include_hidden', False))
|
||||
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
|
||||
usersubs = []
|
||||
|
|
|
@ -10,6 +10,7 @@ import operator
|
|||
import gzip
|
||||
import datetime
|
||||
import requests
|
||||
import httplib
|
||||
from PIL import BmpImagePlugin, PngImagePlugin, Image
|
||||
from socket import error as SocketError
|
||||
from boto.s3.key import Key
|
||||
|
@ -61,11 +62,18 @@ class IconImporter(object):
|
|||
if len(image_str) > 500000:
|
||||
image = None
|
||||
if (image and
|
||||
(self.feed_icon.color != color or
|
||||
(self.force or
|
||||
self.feed_icon.data != image_str or
|
||||
self.feed_icon.icon_url != icon_url or
|
||||
self.feed_icon.not_found or
|
||||
(settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon))):
|
||||
logging.debug(" ---> [%-30s] ~SN~FBIcon difference:~FY color:%s (%s/%s) data:%s url:%s notfound:%s no-s3:%s" % (
|
||||
self.feed,
|
||||
self.feed_icon.color != color, self.feed_icon.color, color,
|
||||
self.feed_icon.data != image_str,
|
||||
self.feed_icon.icon_url != icon_url,
|
||||
self.feed_icon.not_found,
|
||||
settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon))
|
||||
self.feed_icon.data = image_str
|
||||
self.feed_icon.icon_url = icon_url
|
||||
self.feed_icon.color = color
|
||||
|
@ -197,7 +205,7 @@ class IconImporter(object):
|
|||
url = self._url_from_html(content)
|
||||
if not url:
|
||||
try:
|
||||
content = requests.get(self.feed.feed_link).content
|
||||
content = requests.get(self.cleaned_feed_link).content
|
||||
url = self._url_from_html(content)
|
||||
except (AttributeError, SocketError, requests.ConnectionError,
|
||||
requests.models.MissingSchema, requests.sessions.InvalidSchema,
|
||||
|
@ -205,12 +213,19 @@ class IconImporter(object):
|
|||
requests.models.InvalidURL,
|
||||
requests.models.ChunkedEncodingError,
|
||||
requests.models.ContentDecodingError,
|
||||
httplib.IncompleteRead,
|
||||
LocationParseError, OpenSSLError, PyAsn1Error), e:
|
||||
logging.debug(" ---> ~SN~FRFailed~FY to fetch ~FGfeed icon~FY: %s" % e)
|
||||
if url:
|
||||
image, image_file = self.get_image_from_url(url)
|
||||
return image, image_file, url
|
||||
|
||||
|
||||
@property
|
||||
def cleaned_feed_link(self):
|
||||
if self.feed.feed_link.startswith('http'):
|
||||
return self.feed.feed_link
|
||||
return 'http://' + self.feed.feed_link
|
||||
|
||||
def fetch_image_from_path(self, path='favicon.ico', force=False):
|
||||
image = None
|
||||
url = None
|
||||
|
@ -311,8 +326,9 @@ class IconImporter(object):
|
|||
# Reshape array of values to merge color bands. [[R], [G], [B], [A]] => [R, G, B, A]
|
||||
if len(shape) > 2:
|
||||
ar = ar.reshape(scipy.product(shape[:2]), shape[2])
|
||||
|
||||
|
||||
# Get NUM_CLUSTERS worth of centroids.
|
||||
ar = ar.astype(numpy.float)
|
||||
codes, _ = scipy.cluster.vq.kmeans(ar, NUM_CLUSTERS)
|
||||
|
||||
# Pare centroids, removing blacks and whites and shades of really dark and really light.
|
||||
|
@ -340,7 +356,7 @@ class IconImporter(object):
|
|||
|
||||
# Find the most frequent color, based on the counts.
|
||||
index_max = scipy.argmax(counts)
|
||||
peak = codes[index_max]
|
||||
peak = codes.astype(int)[index_max]
|
||||
color = ''.join(chr(c) for c in peak).encode('hex')
|
||||
|
||||
return color[:6]
|
||||
|
|
|
@ -20,6 +20,7 @@ from django.db import models
|
|||
from django.db import IntegrityError
|
||||
from django.conf import settings
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.utils import DatabaseError
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.sites.models import Site
|
||||
|
@ -33,7 +34,7 @@ from apps.rss_feeds.text_importer import TextImporter
|
|||
from apps.search.models import SearchStory, SearchFeed
|
||||
from apps.statistics.rstats import RStats
|
||||
from utils import json_functions as json
|
||||
from utils import feedfinder, feedparser
|
||||
from utils import feedfinder2 as feedfinder, feedparser
|
||||
from utils import urlnorm
|
||||
from utils import log as logging
|
||||
from utils.fields import AutoOneToOneField
|
||||
|
@ -148,6 +149,10 @@ class Feed(models.Model):
|
|||
if not feed_address: feed_address = ""
|
||||
if not feed_link: feed_link = ""
|
||||
return hashlib.sha1(feed_address+feed_link).hexdigest()
|
||||
|
||||
@property
|
||||
def is_newsletter(self):
|
||||
return self.feed_address.startswith('newsletter:')
|
||||
|
||||
def canonical(self, full=False, include_favicon=True):
|
||||
feed = {
|
||||
|
@ -165,6 +170,7 @@ class Feed(models.Model):
|
|||
'min_to_decay': self.min_to_decay,
|
||||
'subs': self.num_subscribers,
|
||||
'is_push': self.is_push,
|
||||
'is_newsletter': self.is_newsletter,
|
||||
'fetched_once': self.fetched_once,
|
||||
'search_indexed': self.search_indexed,
|
||||
'not_yet_fetched': not self.fetched_once, # Legacy. Doh.
|
||||
|
@ -226,6 +232,9 @@ class Feed(models.Model):
|
|||
|
||||
try:
|
||||
super(Feed, self).save(*args, **kwargs)
|
||||
except DatabaseError, e:
|
||||
logging.debug(" ---> ~FBFeed update failed, no change: %s / %s..." % (kwargs.get('update_fields', None), e))
|
||||
pass
|
||||
except IntegrityError, e:
|
||||
logging.debug(" ---> ~FRFeed save collision (%s), checking dupe..." % e)
|
||||
duplicate_feeds = Feed.objects.filter(feed_address=self.feed_address,
|
||||
|
@ -264,7 +273,7 @@ class Feed(models.Model):
|
|||
Feed.objects.get(pk=feed_id).index_feed_for_search()
|
||||
|
||||
def index_feed_for_search(self):
|
||||
if self.num_subscribers > 1 and not self.branch_from_feed:
|
||||
if self.num_subscribers > 1 and not self.branch_from_feed and not self.is_newsletter:
|
||||
SearchFeed.index(feed_id=self.pk,
|
||||
title=self.feed_title,
|
||||
address=self.feed_address,
|
||||
|
@ -365,6 +374,8 @@ class Feed(models.Model):
|
|||
def get_feed_from_url(cls, url, create=True, aggressive=False, fetch=True, offset=0):
|
||||
feed = None
|
||||
|
||||
if url and url.startswith('newsletter:'):
|
||||
return cls.objects.get(feed_address=url)
|
||||
if url and 'youtube.com/user/' in url:
|
||||
username = re.search('youtube.com/user/(\w+)', url).group(1)
|
||||
url = "http://gdata.youtube.com/feeds/base/users/%s/uploads" % username
|
||||
|
@ -395,34 +406,39 @@ class Feed(models.Model):
|
|||
|
||||
# Normalize and check for feed_address, dupes, and feed_link
|
||||
url = urlnorm.normalize(url)
|
||||
if not url:
|
||||
return
|
||||
|
||||
feed = by_url(url)
|
||||
found_feed_urls = []
|
||||
|
||||
# Create if it looks good
|
||||
if feed and len(feed) > offset:
|
||||
feed = feed[offset]
|
||||
elif create:
|
||||
create_okay = False
|
||||
if feedfinder.isFeed(url):
|
||||
create_okay = True
|
||||
elif fetch:
|
||||
# Could still be a feed. Just check if there are entries
|
||||
fp = feedparser.parse(url)
|
||||
if len(fp.entries):
|
||||
create_okay = True
|
||||
if create_okay:
|
||||
feed = cls.objects.create(feed_address=url)
|
||||
feed = feed.update()
|
||||
|
||||
# Still nothing? Maybe the URL has some clues.
|
||||
if not feed and fetch:
|
||||
feed_finder_url = feedfinder.feed(url)
|
||||
if feed_finder_url and 'comments' not in feed_finder_url:
|
||||
else:
|
||||
found_feed_urls = feedfinder.find_feeds(url)
|
||||
if len(found_feed_urls):
|
||||
feed_finder_url = found_feed_urls[0]
|
||||
logging.debug(" ---> Found feed URLs for %s: %s" % (url, found_feed_urls))
|
||||
feed = by_url(feed_finder_url)
|
||||
if not feed and create:
|
||||
if feed and len(feed) > offset:
|
||||
feed = feed[offset]
|
||||
logging.debug(" ---> Feed exists (%s), updating..." % (feed))
|
||||
feed = feed.update()
|
||||
elif create:
|
||||
logging.debug(" ---> Feed doesn't exist, creating: %s" % (feed_finder_url))
|
||||
feed = cls.objects.create(feed_address=feed_finder_url)
|
||||
feed = feed.update()
|
||||
elif feed and len(feed) > offset:
|
||||
feed = feed[offset]
|
||||
|
||||
# Still nothing? Maybe the URL has some clues.
|
||||
if not feed and fetch and len(found_feed_urls):
|
||||
feed_finder_url = found_feed_urls[0]
|
||||
feed = by_url(feed_finder_url)
|
||||
if not feed and create:
|
||||
feed = cls.objects.create(feed_address=feed_finder_url)
|
||||
feed = feed.update()
|
||||
elif feed and len(feed) > offset:
|
||||
feed = feed[offset]
|
||||
|
||||
# Not created and not within bounds, so toss results.
|
||||
if isinstance(feed, QuerySet):
|
||||
|
@ -524,18 +540,18 @@ class Feed(models.Model):
|
|||
def _1():
|
||||
feed_address = None
|
||||
feed = self
|
||||
found_feed_urls = []
|
||||
try:
|
||||
is_feed = feedfinder.isFeed(self.feed_address)
|
||||
logging.debug(" ---> Checking: %s" % self.feed_address)
|
||||
found_feed_urls = feedfinder.find_feeds(self.feed_address)
|
||||
if found_feed_urls:
|
||||
feed_address = found_feed_urls[0]
|
||||
except KeyError:
|
||||
is_feed = False
|
||||
if not is_feed:
|
||||
feed_address = feedfinder.feed(self.feed_address)
|
||||
if not feed_address and self.feed_link:
|
||||
feed_address = feedfinder.feed(self.feed_link)
|
||||
else:
|
||||
feed_address_from_link = feedfinder.feed(self.feed_link)
|
||||
if feed_address_from_link != self.feed_address:
|
||||
feed_address = feed_address_from_link
|
||||
if not len(found_feed_urls) and self.feed_link:
|
||||
found_feed_urls = feedfinder.find_feeds(self.feed_link)
|
||||
if len(found_feed_urls) and found_feed_urls[0] != self.feed_address:
|
||||
feed_address = found_feed_urls[0]
|
||||
|
||||
if feed_address:
|
||||
if (feed_address.endswith('feedburner.com/atom.xml') or
|
||||
|
@ -545,6 +561,7 @@ class Feed(models.Model):
|
|||
try:
|
||||
self.feed_address = feed_address
|
||||
feed = self.save()
|
||||
feed.count_subscribers()
|
||||
feed.schedule_feed_fetch_immediately()
|
||||
feed.has_feed_exception = False
|
||||
feed.active = True
|
||||
|
@ -603,7 +620,6 @@ class Feed(models.Model):
|
|||
self.save()
|
||||
|
||||
def count_errors_in_history(self, exception_type='feed', status_code=None, fetch_history=None):
|
||||
logging.debug(' ---> [%-30s] Counting errors in history...' % (unicode(self)[:30]))
|
||||
if not fetch_history:
|
||||
fetch_history = MFetchHistory.feed(self.pk)
|
||||
fh = fetch_history[exception_type + '_fetch_history']
|
||||
|
@ -628,6 +644,9 @@ class Feed(models.Model):
|
|||
self.has_page_exception = False
|
||||
self.save()
|
||||
|
||||
logging.debug(' ---> [%-30s] ~FBCounting any errors in history: %s (%s non errors)' %
|
||||
(unicode(self)[:30], len(errors), len(non_errors)))
|
||||
|
||||
return errors, non_errors
|
||||
|
||||
def count_redirects_in_history(self, fetch_type='feed', fetch_history=None):
|
||||
|
@ -987,7 +1006,7 @@ class Feed(models.Model):
|
|||
from utils import feed_fetcher
|
||||
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
|
||||
original_feed_id = int(self.pk)
|
||||
|
||||
|
||||
if getattr(settings, 'TEST_DEBUG', False):
|
||||
original_feed_address = self.feed_address
|
||||
original_feed_link = self.feed_link
|
||||
|
@ -1011,9 +1030,12 @@ class Feed(models.Model):
|
|||
'fpf': kwargs.get('fpf'),
|
||||
'feed_xml': kwargs.get('feed_xml'),
|
||||
}
|
||||
disp = feed_fetcher.Dispatcher(options, 1)
|
||||
disp.add_jobs([[self.pk]])
|
||||
feed = disp.run_jobs()
|
||||
if self.is_newsletter:
|
||||
feed = self.update_newsletter_icon()
|
||||
else:
|
||||
disp = feed_fetcher.Dispatcher(options, 1)
|
||||
disp.add_jobs([[self.pk]])
|
||||
feed = disp.run_jobs()
|
||||
|
||||
if feed:
|
||||
feed = Feed.get_by_id(feed.pk)
|
||||
|
@ -1031,7 +1053,14 @@ class Feed(models.Model):
|
|||
r.zrem('error_feeds', feed.pk)
|
||||
|
||||
return feed
|
||||
|
||||
|
||||
def update_newsletter_icon(self):
|
||||
from apps.rss_feeds.icon_importer import IconImporter
|
||||
icon_importer = IconImporter(self)
|
||||
icon_importer.save()
|
||||
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def get_by_id(cls, feed_id, feed_address=None):
|
||||
try:
|
||||
|
@ -1730,6 +1759,10 @@ class Feed(models.Model):
|
|||
|
||||
def schedule_feed_fetch_immediately(self, verbose=True):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
|
||||
if not self.num_subscribers:
|
||||
logging.debug(' ---> [%-30s] Not scheduling feed fetch immediately, no subs.' % (unicode(self)[:30]))
|
||||
return
|
||||
|
||||
if verbose:
|
||||
logging.debug(' ---> [%-30s] Scheduling feed fetch immediately...' % (unicode(self)[:30]))
|
||||
|
||||
|
@ -1864,9 +1897,12 @@ class MFeedPage(mongo.Document):
|
|||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.page_data:
|
||||
self.page_data = zlib.compress(self.page_data)
|
||||
self.page_data = zlib.compress(self.page_data).decode('utf-8')
|
||||
return super(MFeedPage, self).save(*args, **kwargs)
|
||||
|
||||
def page(self):
|
||||
return zlib.decompress(self.page_data)
|
||||
|
||||
@classmethod
|
||||
def get_data(cls, feed_id):
|
||||
data = None
|
||||
|
@ -2484,8 +2520,11 @@ class MStarredStoryCounts(mongo.Document):
|
|||
|
||||
if not total_only:
|
||||
cls.objects(user_id=user_id).delete()
|
||||
user_tags = cls.count_tags_for_user(user_id)
|
||||
user_feeds = cls.count_feeds_for_user(user_id)
|
||||
try:
|
||||
user_tags = cls.count_tags_for_user(user_id)
|
||||
user_feeds = cls.count_feeds_for_user(user_id)
|
||||
except pymongo.errors.OperationFailure, e:
|
||||
logging.debug(" ---> ~FBOperationError on mongo: ~SB%s" % e)
|
||||
|
||||
total_stories_count = MStarredStory.objects(user_id=user_id).count()
|
||||
cls.objects(user_id=user_id, tag=None, feed_id=None).update_one(set__count=total_stories_count,
|
||||
|
@ -2600,7 +2639,7 @@ class MFetchHistory(mongo.Document):
|
|||
history = fetch_history.push_history or []
|
||||
|
||||
history = [[date, code, message]] + history
|
||||
any_exceptions = any([c for d, c, m in history if c >= 400])
|
||||
any_exceptions = any([c for d, c, m in history if c not in [200, 304]])
|
||||
if any_exceptions:
|
||||
history = history[:25]
|
||||
else:
|
||||
|
|
|
@ -98,16 +98,16 @@ class PageImporter(object):
|
|||
logging.debug(' ***> [%-30s] Page fetch failed using requests: %s' % (self.feed, e))
|
||||
self.save_no_page()
|
||||
return
|
||||
try:
|
||||
data = response.text
|
||||
except (LookupError, TypeError):
|
||||
data = response.content
|
||||
# try:
|
||||
data = response.content
|
||||
# except (LookupError, TypeError):
|
||||
# data = response.content
|
||||
|
||||
if response.encoding and response.encoding != 'utf-8':
|
||||
try:
|
||||
data = data.encode(response.encoding)
|
||||
except LookupError:
|
||||
pass
|
||||
# if response.encoding and response.encoding != 'utf-8':
|
||||
# try:
|
||||
# data = data.encode(response.encoding)
|
||||
# except LookupError:
|
||||
# pass
|
||||
else:
|
||||
try:
|
||||
data = open(feed_link, 'r').read()
|
||||
|
@ -270,8 +270,12 @@ class PageImporter(object):
|
|||
if not saved:
|
||||
try:
|
||||
feed_page = MFeedPage.objects.get(feed_id=self.feed.pk)
|
||||
feed_page.page_data = html
|
||||
feed_page.save()
|
||||
# feed_page.page_data = html.encode('utf-8')
|
||||
if feed_page.page() == html:
|
||||
logging.debug(' ---> [%-30s] ~FYNo change in page data: %s' % (self.feed.title[:30], self.feed.feed_link))
|
||||
else:
|
||||
feed_page.page_data = html
|
||||
feed_page.save()
|
||||
except MFeedPage.DoesNotExist:
|
||||
feed_page = MFeedPage.objects.create(feed_id=self.feed.pk, page_data=html)
|
||||
return feed_page
|
||||
|
|
|
@ -4,6 +4,7 @@ import shutil
|
|||
import time
|
||||
import redis
|
||||
from celery.task import Task
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from utils import log as logging
|
||||
from utils import s3_utils as s3
|
||||
from django.conf import settings
|
||||
|
@ -29,6 +30,10 @@ class TaskFeeds(Task):
|
|||
now_timestamp = int(now.strftime("%s"))
|
||||
queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp)
|
||||
r.zremrangebyscore('scheduled_updates', 0, now_timestamp)
|
||||
if not queued_feeds:
|
||||
logging.debug(" ---> ~SN~FB~BMNo feeds to queue! Exiting...")
|
||||
return
|
||||
|
||||
r.sadd('queued_feeds', *queued_feeds)
|
||||
logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (
|
||||
len(queued_feeds),
|
||||
|
@ -124,6 +129,8 @@ class UpdateFeeds(Task):
|
|||
name = 'update-feeds'
|
||||
max_retries = 0
|
||||
ignore_result = True
|
||||
time_limit = 10*60
|
||||
soft_time_limit = 9*60
|
||||
|
||||
def run(self, feed_pks, **kwargs):
|
||||
from apps.rss_feeds.models import Feed
|
||||
|
@ -156,14 +163,21 @@ class UpdateFeeds(Task):
|
|||
if not feed or feed.pk != int(feed_pk):
|
||||
logging.info(" ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." % (feed_pk, feed and feed.pk))
|
||||
r.zrem('tasked_feeds', feed_pk)
|
||||
if feed:
|
||||
if not feed:
|
||||
continue
|
||||
try:
|
||||
feed.update(**options)
|
||||
if profiler_activated: profiler.process_celery_finished()
|
||||
except SoftTimeLimitExceeded, e:
|
||||
feed.save_feed_history(505, 'Timeout', e)
|
||||
logging.info(" ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed)
|
||||
if profiler_activated: profiler.process_celery_finished()
|
||||
|
||||
class NewFeeds(Task):
|
||||
name = 'new-feeds'
|
||||
max_retries = 0
|
||||
ignore_result = True
|
||||
time_limit = 10*60
|
||||
soft_time_limit = 9*60
|
||||
|
||||
def run(self, feed_pks, **kwargs):
|
||||
from apps.rss_feeds.models import Feed
|
||||
|
|
|
@ -396,14 +396,14 @@ def exception_change_feed_link(request):
|
|||
if not feed.known_good and (feed.has_page_exception or feed.has_feed_exception):
|
||||
# Fix broken feed
|
||||
logging.user(request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link))
|
||||
feed_address = feedfinder.feed(feed_link)
|
||||
if feed_address:
|
||||
found_feed_urls = feedfinder.find_feeds(feed_link)
|
||||
if len(found_feed_urls):
|
||||
code = 1
|
||||
feed.has_page_exception = False
|
||||
feed.active = True
|
||||
feed.fetched_once = False
|
||||
feed.feed_link = feed_link
|
||||
feed.feed_address = feed_address
|
||||
feed.feed_address = found_feed_urls[0]
|
||||
duplicate_feed = feed.schedule_feed_fetch_immediately()
|
||||
if duplicate_feed:
|
||||
new_feed = Feed.objects.get(pk=duplicate_feed.pk)
|
||||
|
|
|
@ -2452,13 +2452,17 @@ class MSocialServices(mongo.Document):
|
|||
logging.user(user, "~BG~FMTwitter import starting...")
|
||||
|
||||
api = self.twitter_api()
|
||||
try:
|
||||
twitter_user = api.me()
|
||||
except tweepy.TweepError, e:
|
||||
api = None
|
||||
|
||||
if not api:
|
||||
logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no api access.")
|
||||
self.syncing_twitter = False
|
||||
self.save()
|
||||
return
|
||||
|
||||
twitter_user = api.me()
|
||||
|
||||
self.twitter_picture_url = twitter_user.profile_image_url_https
|
||||
self.twitter_username = twitter_user.screen_name
|
||||
self.twitter_refreshed_date = datetime.datetime.utcnow()
|
||||
|
|
|
@ -47,7 +47,7 @@ def load_social_stories(request, user_id, username=None):
|
|||
page = request.REQUEST.get('page')
|
||||
order = request.REQUEST.get('order', 'newest')
|
||||
read_filter = request.REQUEST.get('read_filter', 'all')
|
||||
query = request.REQUEST.get('query')
|
||||
query = request.REQUEST.get('query', '').strip()
|
||||
stories = []
|
||||
message = None
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="com.newsblur"
|
||||
android:versionCode="116"
|
||||
android:versionName="4.7.0" >
|
||||
android:versionCode="119"
|
||||
android:versionName="4.8.0b2" >
|
||||
|
||||
<uses-sdk
|
||||
android:minSdkVersion="14"
|
||||
|
|
BIN
clients/android/NewsBlur/libs/okhttp-3.1.2.jar
Normal file
BIN
clients/android/NewsBlur/libs/okio-1.6.0.jar
Normal file
|
@ -44,6 +44,13 @@
|
|||
android:entries="@array/default_read_filter_entries"
|
||||
android:entryValues="@array/default_read_filter_values"
|
||||
android:defaultValue="@string/default_read_filter_value" />
|
||||
<ListPreference
|
||||
android:key="pref_confirm_mark_all_read"
|
||||
android:title="@string/settings_confirm_mark_all_read"
|
||||
android:dialogTitle="@string/settings_confirm_mark_all_read"
|
||||
android:entries="@array/confirm_mark_all_read_entries"
|
||||
android:entryValues="@array/confirm_mark_all_read_values"
|
||||
android:defaultValue="@string/confirm_mark_all_read_value" />
|
||||
<CheckBoxPreference
|
||||
android:defaultValue="false"
|
||||
android:key="pref_auto_open_first_unread"
|
||||
|
|
|
@ -8,5 +8,5 @@
|
|||
android:showAsAction="never" />
|
||||
<item android:id="@+id/menu_search_stories"
|
||||
android:title="@string/menu_search_stories"
|
||||
android:showAsAction="never" />
|
||||
android:showAsAction="ifRoom" android:icon="@drawable/search" />
|
||||
</menu>
|
||||
|
|
|
@ -224,7 +224,7 @@
|
|||
<string name="default_read_filter_value">ALL</string>
|
||||
|
||||
<string-array name="mark_all_read_options">
|
||||
<item>Mark entire folder read</item>
|
||||
<item>Mark all read</item>
|
||||
<item>Cancel</item>
|
||||
</string-array>
|
||||
|
||||
|
@ -279,4 +279,20 @@
|
|||
<item>DOWN_NEXT</item>
|
||||
</string-array>
|
||||
<string name="default_volume_key_navigation_value">OFF</string>
|
||||
|
||||
<string name="settings_confirm_mark_all_read">Confirm Mark All Read</string>
|
||||
<string name="none">None</string>
|
||||
<string name="feed_and_folder">Feeds and Folders</string>
|
||||
<string name="folder_only">Folders Only</string>
|
||||
<string-array name="confirm_mark_all_read_entries">
|
||||
<item>@string/feed_and_folder</item>
|
||||
<item>@string/folder_only</item>
|
||||
<item>@string/none</item>
|
||||
</string-array>
|
||||
<string-array name="confirm_mark_all_read_values">
|
||||
<item>FEED_AND_FOLDER</item>
|
||||
<item>FOLDER_ONLY</item>
|
||||
<item>NONE</item>
|
||||
</string-array>
|
||||
<string name="confirm_mark_all_read_value">FOLDER_ONLY</string>
|
||||
</resources>
|
||||
|
|
|
@ -2,23 +2,19 @@ package com.newsblur.activity;
|
|||
|
||||
import android.os.Bundle;
|
||||
import android.app.FragmentTransaction;
|
||||
import android.util.Log;
|
||||
import android.view.Menu;
|
||||
import android.view.MenuInflater;
|
||||
|
||||
import com.newsblur.R;
|
||||
import com.newsblur.fragment.AllStoriesItemListFragment;
|
||||
import com.newsblur.fragment.MarkAllReadDialogFragment;
|
||||
import com.newsblur.fragment.MarkAllReadDialogFragment.MarkAllReadDialogListener;
|
||||
import com.newsblur.util.DefaultFeedView;
|
||||
import com.newsblur.util.FeedSet;
|
||||
import com.newsblur.util.PrefConstants;
|
||||
import com.newsblur.util.PrefsUtils;
|
||||
import com.newsblur.util.ReadFilter;
|
||||
import com.newsblur.util.StoryOrder;
|
||||
import com.newsblur.util.UIUtils;
|
||||
|
||||
public class AllStoriesItemsList extends ItemsList implements MarkAllReadDialogListener {
|
||||
public class AllStoriesItemsList extends ItemsList {
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle bundle) {
|
||||
|
@ -40,17 +36,6 @@ public class AllStoriesItemsList extends ItemsList implements MarkAllReadDialogL
|
|||
protected FeedSet createFeedSet() {
|
||||
return FeedSet.allFeeds();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void markItemListAsRead() {
|
||||
MarkAllReadDialogFragment dialog = MarkAllReadDialogFragment.newInstance(getResources().getString(R.string.all_stories));
|
||||
dialog.show(fragmentManager, "dialog");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMarkAllRead() {
|
||||
super.markItemListAsRead();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onCreateOptionsMenu(Menu menu) {
|
||||
|
@ -77,10 +62,4 @@ public class AllStoriesItemsList extends ItemsList implements MarkAllReadDialogL
|
|||
itemListFragment.setDefaultFeedView(value);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCancel() {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -15,7 +15,6 @@ import com.newsblur.util.DefaultFeedView;
|
|||
import com.newsblur.util.FeedSet;
|
||||
import com.newsblur.util.PrefsUtils;
|
||||
import com.newsblur.util.ReadFilter;
|
||||
import com.newsblur.util.StoryOrder;
|
||||
import com.newsblur.util.UIUtils;
|
||||
|
||||
public class FeedItemsList extends ItemsList {
|
||||
|
|
|
@ -4,21 +4,17 @@ import android.os.Bundle;
|
|||
import android.app.FragmentTransaction;
|
||||
import android.view.Menu;
|
||||
import android.view.MenuInflater;
|
||||
import android.util.Log;
|
||||
|
||||
import com.newsblur.R;
|
||||
import com.newsblur.fragment.FolderItemListFragment;
|
||||
import com.newsblur.fragment.MarkAllReadDialogFragment;
|
||||
import com.newsblur.fragment.MarkAllReadDialogFragment.MarkAllReadDialogListener;
|
||||
import com.newsblur.util.DefaultFeedView;
|
||||
import com.newsblur.util.FeedSet;
|
||||
import com.newsblur.util.FeedUtils;
|
||||
import com.newsblur.util.PrefsUtils;
|
||||
import com.newsblur.util.ReadFilter;
|
||||
import com.newsblur.util.StoryOrder;
|
||||
import com.newsblur.util.UIUtils;
|
||||
|
||||
public class FolderItemsList extends ItemsList implements MarkAllReadDialogListener {
|
||||
public class FolderItemsList extends ItemsList {
|
||||
|
||||
public static final String EXTRA_FOLDER_NAME = "folderName";
|
||||
private String folderName;
|
||||
|
@ -54,17 +50,6 @@ public class FolderItemsList extends ItemsList implements MarkAllReadDialogListe
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void markItemListAsRead() {
|
||||
MarkAllReadDialogFragment dialog = MarkAllReadDialogFragment.newInstance(folderName);
|
||||
dialog.show(fragmentManager, "dialog");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMarkAllRead() {
|
||||
super.markItemListAsRead();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateReadFilterPreference(ReadFilter newValue) {
|
||||
PrefsUtils.setReadFilterForFolder(this, folderName, newValue);
|
||||
|
@ -82,10 +67,4 @@ public class FolderItemsList extends ItemsList implements MarkAllReadDialogListe
|
|||
itemListFragment.setDefaultFeedView(value);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCancel() {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,13 +17,17 @@ import butterknife.FindView;
|
|||
import com.newsblur.R;
|
||||
import com.newsblur.fragment.DefaultFeedViewDialogFragment;
|
||||
import com.newsblur.fragment.ItemListFragment;
|
||||
import com.newsblur.fragment.MarkAllReadDialogFragment;
|
||||
import com.newsblur.fragment.MarkAllReadDialogFragment.MarkAllReadDialogListener;
|
||||
import com.newsblur.fragment.ReadFilterDialogFragment;
|
||||
import com.newsblur.fragment.StoryOrderDialogFragment;
|
||||
import com.newsblur.service.NBSyncService;
|
||||
import com.newsblur.util.AppConstants;
|
||||
import com.newsblur.util.DefaultFeedView;
|
||||
import com.newsblur.util.DefaultFeedViewChangedListener;
|
||||
import com.newsblur.util.FeedSet;
|
||||
import com.newsblur.util.FeedUtils;
|
||||
import com.newsblur.util.MarkAllReadConfirmation;
|
||||
import com.newsblur.util.PrefsUtils;
|
||||
import com.newsblur.util.ReadFilter;
|
||||
import com.newsblur.util.ReadFilterChangedListener;
|
||||
|
@ -32,7 +36,7 @@ import com.newsblur.util.StoryOrder;
|
|||
import com.newsblur.util.StoryOrderChangedListener;
|
||||
import com.newsblur.util.UIUtils;
|
||||
|
||||
public abstract class ItemsList extends NbActivity implements StoryOrderChangedListener, ReadFilterChangedListener, DefaultFeedViewChangedListener {
|
||||
public abstract class ItemsList extends NbActivity implements StoryOrderChangedListener, ReadFilterChangedListener, DefaultFeedViewChangedListener, MarkAllReadDialogListener {
|
||||
|
||||
private static final String STORY_ORDER = "storyOrder";
|
||||
private static final String READ_FILTER = "readFilter";
|
||||
|
@ -64,7 +68,7 @@ public abstract class ItemsList extends NbActivity implements StoryOrderChangedL
|
|||
|
||||
if (PrefsUtils.isAutoOpenFirstUnread(this)) {
|
||||
if (FeedUtils.dbHelper.getUnreadCount(fs, intelState) > 0) {
|
||||
UIUtils.startReadingActivity(fs, Reading.FIND_FIRST_UNREAD, this, false);
|
||||
UIUtils.startReadingActivity(fs, Reading.FIND_FIRST_UNREAD, this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,6 +99,7 @@ public abstract class ItemsList extends NbActivity implements StoryOrderChangedL
|
|||
|
||||
@Override
|
||||
protected void onSaveInstanceState(Bundle outState) {
|
||||
super.onSaveInstanceState(outState);
|
||||
if (searchQueryInput != null) {
|
||||
String q = searchQueryInput.getText().toString().trim();
|
||||
if (q.length() > 0) {
|
||||
|
@ -126,6 +131,17 @@ public abstract class ItemsList extends NbActivity implements StoryOrderChangedL
|
|||
}
|
||||
|
||||
public void markItemListAsRead() {
|
||||
MarkAllReadConfirmation confirmation = PrefsUtils.getMarkAllReadConfirmation(this);
|
||||
if (confirmation.feedSetRequiresConfirmation(fs)) {
|
||||
MarkAllReadDialogFragment dialog = MarkAllReadDialogFragment.newInstance(fs);
|
||||
dialog.show(fragmentManager, "dialog");
|
||||
} else {
|
||||
onMarkAllRead(fs);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMarkAllRead(FeedSet feedSet) {
|
||||
if (itemListFragment != null) {
|
||||
// since v6.0 of Android, the ListView in the fragment likes to crash if the underlying
|
||||
// dataset changes rapidly as happens when marking-all-read and when the fragment is
|
||||
|
@ -205,6 +221,9 @@ public abstract class ItemsList extends NbActivity implements StoryOrderChangedL
|
|||
if (overlayStatusText != null) {
|
||||
String syncStatus = NBSyncService.getSyncStatusMessage(this, true);
|
||||
if (syncStatus != null) {
|
||||
if (AppConstants.VERBOSE_LOG) {
|
||||
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
|
||||
}
|
||||
overlayStatusText.setText(syncStatus);
|
||||
overlayStatusText.setVisibility(View.VISIBLE);
|
||||
} else {
|
||||
|
@ -231,19 +250,21 @@ public abstract class ItemsList extends NbActivity implements StoryOrderChangedL
|
|||
@Override
|
||||
public void storyOrderChanged(StoryOrder newValue) {
|
||||
updateStoryOrderPreference(newValue);
|
||||
FeedUtils.clearReadingSession();
|
||||
itemListFragment.resetEmptyState();
|
||||
itemListFragment.hasUpdated();
|
||||
itemListFragment.scrollToTop();
|
||||
NBSyncService.resetFetchState(fs);
|
||||
triggerSync();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFilterChanged(ReadFilter newValue) {
|
||||
updateReadFilterPreference(newValue);
|
||||
FeedUtils.clearReadingSession();
|
||||
itemListFragment.resetEmptyState();
|
||||
itemListFragment.hasUpdated();
|
||||
itemListFragment.scrollToTop();
|
||||
NBSyncService.resetFetchState(fs);
|
||||
triggerSync();
|
||||
}
|
||||
|
||||
protected abstract void updateReadFilterPreference(ReadFilter newValue);
|
||||
|
|
|
@ -28,16 +28,18 @@ import com.newsblur.fragment.FeedIntelligenceSelectorFragment;
|
|||
import com.newsblur.fragment.FolderListFragment;
|
||||
import com.newsblur.fragment.LoginAsDialogFragment;
|
||||
import com.newsblur.fragment.LogoutDialogFragment;
|
||||
import com.newsblur.fragment.MarkAllReadDialogFragment.MarkAllReadDialogListener;
|
||||
import com.newsblur.service.BootReceiver;
|
||||
import com.newsblur.service.NBSyncService;
|
||||
import com.newsblur.util.AppConstants;
|
||||
import com.newsblur.util.FeedSet;
|
||||
import com.newsblur.util.FeedUtils;
|
||||
import com.newsblur.util.PrefsUtils;
|
||||
import com.newsblur.util.StateFilter;
|
||||
import com.newsblur.util.UIUtils;
|
||||
import com.newsblur.view.StateToggleButton.StateChangedListener;
|
||||
|
||||
public class Main extends NbActivity implements StateChangedListener, SwipeRefreshLayout.OnRefreshListener, AbsListView.OnScrollListener, PopupMenu.OnMenuItemClickListener {
|
||||
public class Main extends NbActivity implements StateChangedListener, SwipeRefreshLayout.OnRefreshListener, AbsListView.OnScrollListener, PopupMenu.OnMenuItemClickListener, MarkAllReadDialogListener {
|
||||
|
||||
private FolderListFragment folderFeedList;
|
||||
private FragmentManager fragmentManager;
|
||||
|
@ -100,8 +102,6 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
|
|||
|
||||
NBSyncService.clearPendingStoryRequest();
|
||||
NBSyncService.flushRecounts();
|
||||
NBSyncService.setActivationMode(NBSyncService.ActivationMode.ALL);
|
||||
FeedUtils.activateAllStories();
|
||||
FeedUtils.clearReadingSession();
|
||||
|
||||
updateStatusIndicators();
|
||||
|
@ -172,6 +172,9 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
|
|||
if (overlayStatusText != null) {
|
||||
String syncStatus = NBSyncService.getSyncStatusMessage(this, false);
|
||||
if (syncStatus != null) {
|
||||
if (AppConstants.VERBOSE_LOG) {
|
||||
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
|
||||
}
|
||||
overlayStatusText.setText(syncStatus);
|
||||
overlayStatusText.setVisibility(View.VISIBLE);
|
||||
} else {
|
||||
|
@ -278,4 +281,8 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMarkAllRead(FeedSet feedSet) {
|
||||
FeedUtils.markFeedsRead(feedSet, null, null, this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -220,7 +220,7 @@ public abstract class Reading extends NbActivity implements OnPageChangeListener
|
|||
finish();
|
||||
return null;
|
||||
}
|
||||
return FeedUtils.dbHelper.getStoriesLoader(fs, intelState);
|
||||
return FeedUtils.dbHelper.getActiveStoriesLoader(fs);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -281,7 +281,6 @@ public abstract class Reading extends NbActivity implements OnPageChangeListener
|
|||
}
|
||||
}
|
||||
// if the story wasn't found, try to get more stories into the cursor
|
||||
FeedUtils.activateAllStories();
|
||||
this.checkStoryCount(readingAdapter.getCount()+1);
|
||||
}
|
||||
|
||||
|
@ -396,6 +395,9 @@ public abstract class Reading extends NbActivity implements OnPageChangeListener
|
|||
if (overlayStatusText != null) {
|
||||
String syncStatus = NBSyncService.getSyncStatusMessage(this, true);
|
||||
if (syncStatus != null) {
|
||||
if (AppConstants.VERBOSE_LOG) {
|
||||
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
|
||||
}
|
||||
overlayStatusText.setText(syncStatus);
|
||||
overlayStatusText.setVisibility(View.VISIBLE);
|
||||
} else {
|
||||
|
@ -677,13 +679,7 @@ public abstract class Reading extends NbActivity implements OnPageChangeListener
|
|||
*/
|
||||
private void nextUnread() {
|
||||
unreadSearchActive = true;
|
||||
|
||||
// the first time an unread search is triggered, also trigger an activation of unreads, so
|
||||
// we don't search for a story that doesn't exist in the cursor
|
||||
if (!unreadSearchStarted) {
|
||||
FeedUtils.activateAllStories();
|
||||
unreadSearchStarted = true;
|
||||
}
|
||||
unreadSearchStarted = true;
|
||||
|
||||
// if we somehow got tapped before construction or are running during destruction, stop and
|
||||
// let either finish. search will happen when the cursor is pushed.
|
||||
|
|
|
@ -21,10 +21,4 @@ public class SavedStoriesReading extends Reading {
|
|||
getLoaderManager().initLoader(0, null, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
// every time we see a set of saved stories, tag them so they don't disappear during this reading session
|
||||
FeedUtils.dbHelper.markSavedReadingSession();
|
||||
super.onLoadFinished(loader, cursor);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,13 +11,9 @@ import com.newsblur.util.UIUtils;
|
|||
|
||||
public class SocialFeedReading extends Reading {
|
||||
|
||||
public static final String EXTRA_IGNORE_FILTERS = "ignore_filters";
|
||||
private boolean ignoreFilters;
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceBundle) {
|
||||
super.onCreate(savedInstanceBundle);
|
||||
ignoreFilters = getIntent().hasExtra(EXTRA_IGNORE_FILTERS);
|
||||
SocialFeed socialFeed = FeedUtils.dbHelper.getSocialFeed(fs.getSingleSocialFeed().getKey());
|
||||
if (socialFeed == null) finish(); // don't open fatally stale intents
|
||||
UIUtils.setCustomActionBar(this, socialFeed.photoUrl, socialFeed.feedTitle);
|
||||
|
@ -25,14 +21,4 @@ public class SocialFeedReading extends Reading {
|
|||
getLoaderManager().initLoader(0, null, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Loader<Cursor> onCreateLoader(int loaderId, Bundle bundle) {
|
||||
// If we have navigated from the profile we want to ignore the StateFilter and ReadFilter settings
|
||||
// for the feed to ensure we can find the story.
|
||||
if (ignoreFilters) {
|
||||
return FeedUtils.dbHelper.getStoriesLoaderIgnoreFilters(fs);
|
||||
} else {
|
||||
return super.onCreateLoader(loaderId, bundle);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ public class BlurDatabase extends SQLiteOpenHelper {
|
|||
db.execSQL(DatabaseConstants.FOLDER_SQL);
|
||||
db.execSQL(DatabaseConstants.USER_SQL);
|
||||
db.execSQL(DatabaseConstants.STORY_SQL);
|
||||
db.execSQL(DatabaseConstants.READING_SESSION_SQL);
|
||||
db.execSQL(DatabaseConstants.STORY_TEXT_SQL);
|
||||
db.execSQL(DatabaseConstants.COMMENT_SQL);
|
||||
db.execSQL(DatabaseConstants.REPLY_SQL);
|
||||
|
@ -36,6 +37,7 @@ public class BlurDatabase extends SQLiteOpenHelper {
|
|||
db.execSQL(drop + DatabaseConstants.SOCIALFEED_TABLE);
|
||||
db.execSQL(drop + DatabaseConstants.FOLDER_TABLE);
|
||||
db.execSQL(drop + DatabaseConstants.STORY_TABLE);
|
||||
db.execSQL(drop + DatabaseConstants.READING_SESSION_TABLE);
|
||||
db.execSQL(drop + DatabaseConstants.STORY_TEXT_TABLE);
|
||||
db.execSQL(drop + DatabaseConstants.USER_TABLE);
|
||||
db.execSQL(drop + DatabaseConstants.COMMENT_TABLE);
|
||||
|
|
|
@ -136,10 +136,6 @@ public class BlurDatabaseHelper {
|
|||
}
|
||||
}
|
||||
|
||||
public void cleanupAllStories() {
|
||||
synchronized (RW_MUTEX) {dbRW.delete(DatabaseConstants.STORY_TABLE, null, null);}
|
||||
}
|
||||
|
||||
public void cleanupStoryText() {
|
||||
String q = "DELETE FROM " + DatabaseConstants.STORY_TEXT_TABLE +
|
||||
" WHERE " + DatabaseConstants.STORY_TEXT_STORY_HASH + " NOT IN " +
|
||||
|
@ -266,149 +262,136 @@ public class BlurDatabaseHelper {
|
|||
return urls;
|
||||
}
|
||||
|
||||
public void insertStories(StoriesResponse apiResponse, NBSyncService.ActivationMode actMode, long modeCutoff) {
|
||||
// to insert classifiers, we need to determine the feed ID of the stories in this
|
||||
// response, so sniff one out.
|
||||
String impliedFeedId = null;
|
||||
public void insertStories(StoriesResponse apiResponse, boolean forImmediateReading) {
|
||||
StateFilter intelState = PrefsUtils.getStateFilter(context);
|
||||
synchronized (RW_MUTEX) {
|
||||
// do not attempt to use beginTransactionNonExclusive() to reduce lock time for this very heavy set
|
||||
// of calls. most versions of Android incorrectly implement the underlying SQLite calls and will
|
||||
// result in crashes that poison the DB beyond repair
|
||||
dbRW.beginTransaction();
|
||||
try {
|
||||
|
||||
// to insert classifiers, we need to determine the feed ID of the stories in this
|
||||
// response, so sniff one out.
|
||||
String impliedFeedId = null;
|
||||
|
||||
// handle users
|
||||
if (apiResponse.users != null) {
|
||||
List<ContentValues> userValues = new ArrayList<ContentValues>(apiResponse.users.length);
|
||||
for (UserProfile user : apiResponse.users) {
|
||||
userValues.add(user.getValues());
|
||||
}
|
||||
bulkInsertValues(DatabaseConstants.USER_TABLE, userValues);
|
||||
}
|
||||
|
||||
// handle supplemental feed data that may have been included (usually in social requests)
|
||||
if (apiResponse.feeds != null) {
|
||||
List<ContentValues> feedValues = new ArrayList<ContentValues>(apiResponse.feeds.size());
|
||||
for (Feed feed : apiResponse.feeds) {
|
||||
feedValues.add(feed.getValues());
|
||||
}
|
||||
bulkInsertValues(DatabaseConstants.FEED_TABLE, feedValues);
|
||||
}
|
||||
|
||||
// handle story content
|
||||
List<ContentValues> storyValues = new ArrayList<ContentValues>(apiResponse.stories.length);
|
||||
List<ContentValues> socialStoryValues = new ArrayList<ContentValues>();
|
||||
for (Story story : apiResponse.stories) {
|
||||
ContentValues values = story.getValues();
|
||||
// the basic columns are fine for the stories table
|
||||
storyValues.add(values);
|
||||
// if a story was shared by a user, also insert it into the social table under their userid, too
|
||||
for (String sharedUserId : story.sharedUserIds) {
|
||||
ContentValues socialValues = new ContentValues();
|
||||
socialValues.put(DatabaseConstants.SOCIALFEED_STORY_USER_ID, sharedUserId);
|
||||
socialValues.put(DatabaseConstants.SOCIALFEED_STORY_STORYID, values.getAsString(DatabaseConstants.STORY_ID));
|
||||
socialStoryValues.add(socialValues);
|
||||
}
|
||||
impliedFeedId = story.feedId;
|
||||
}
|
||||
if (storyValues.size() > 0) {
|
||||
synchronized (RW_MUTEX) {
|
||||
dbRW.beginTransaction();
|
||||
try {
|
||||
bulkInsertValuesExtSync(DatabaseConstants.STORY_TABLE, storyValues);
|
||||
markStoriesActive(actMode, modeCutoff);
|
||||
dbRW.setTransactionSuccessful();
|
||||
} finally {
|
||||
dbRW.endTransaction();
|
||||
// handle users
|
||||
if (apiResponse.users != null) {
|
||||
List<ContentValues> userValues = new ArrayList<ContentValues>(apiResponse.users.length);
|
||||
for (UserProfile user : apiResponse.users) {
|
||||
userValues.add(user.getValues());
|
||||
}
|
||||
bulkInsertValuesExtSync(DatabaseConstants.USER_TABLE, userValues);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (socialStoryValues.size() > 0) {
|
||||
synchronized (RW_MUTEX) {
|
||||
dbRW.beginTransaction();
|
||||
try {
|
||||
|
||||
// handle supplemental feed data that may have been included (usually in social requests)
|
||||
if (apiResponse.feeds != null) {
|
||||
List<ContentValues> feedValues = new ArrayList<ContentValues>(apiResponse.feeds.size());
|
||||
for (Feed feed : apiResponse.feeds) {
|
||||
feedValues.add(feed.getValues());
|
||||
}
|
||||
bulkInsertValuesExtSync(DatabaseConstants.FEED_TABLE, feedValues);
|
||||
}
|
||||
|
||||
// handle story content
|
||||
List<ContentValues> socialStoryValues = new ArrayList<ContentValues>();
|
||||
for (Story story : apiResponse.stories) {
|
||||
ContentValues values = story.getValues();
|
||||
// immediate insert the story data
|
||||
dbRW.insertWithOnConflict(DatabaseConstants.STORY_TABLE, null, values, SQLiteDatabase.CONFLICT_REPLACE);
|
||||
// if a story was shared by a user, also insert it into the social table under their userid, too
|
||||
for (String sharedUserId : story.sharedUserIds) {
|
||||
ContentValues socialValues = new ContentValues();
|
||||
socialValues.put(DatabaseConstants.SOCIALFEED_STORY_USER_ID, sharedUserId);
|
||||
socialValues.put(DatabaseConstants.SOCIALFEED_STORY_STORYID, values.getAsString(DatabaseConstants.STORY_ID));
|
||||
socialStoryValues.add(socialValues);
|
||||
}
|
||||
// if the story is being fetched for the immediate session, also add the hash to the session table
|
||||
if (forImmediateReading && story.isStoryVisibileInState(intelState)) {
|
||||
ContentValues sessionHashValues = new ContentValues();
|
||||
sessionHashValues.put(DatabaseConstants.READING_SESSION_STORY_HASH, story.storyHash);
|
||||
dbRW.insert(DatabaseConstants.READING_SESSION_TABLE, null, sessionHashValues);
|
||||
}
|
||||
impliedFeedId = story.feedId;
|
||||
}
|
||||
if (socialStoryValues.size() > 0) {
|
||||
for(ContentValues values: socialStoryValues) {
|
||||
dbRW.insertWithOnConflict(DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE, null, values, SQLiteDatabase.CONFLICT_REPLACE);
|
||||
}
|
||||
markStoriesActive(actMode, modeCutoff);
|
||||
dbRW.setTransactionSuccessful();
|
||||
} finally {
|
||||
dbRW.endTransaction();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// handle classifiers
|
||||
if (apiResponse.classifiers != null) {
|
||||
for (Map.Entry<String,Classifier> entry : apiResponse.classifiers.entrySet()) {
|
||||
// the API might not have included a feed ID, in which case it deserialized as -1 and must be implied
|
||||
String classifierFeedId = entry.getKey();
|
||||
if (classifierFeedId.equals("-1")) {
|
||||
classifierFeedId = impliedFeedId;
|
||||
// handle classifiers
|
||||
if (apiResponse.classifiers != null) {
|
||||
for (Map.Entry<String,Classifier> entry : apiResponse.classifiers.entrySet()) {
|
||||
// the API might not have included a feed ID, in which case it deserialized as -1 and must be implied
|
||||
String classifierFeedId = entry.getKey();
|
||||
if (classifierFeedId.equals("-1")) {
|
||||
classifierFeedId = impliedFeedId;
|
||||
}
|
||||
List<ContentValues> classifierValues = entry.getValue().getContentValues();
|
||||
for (ContentValues values : classifierValues) {
|
||||
values.put(DatabaseConstants.CLASSIFIER_ID, classifierFeedId);
|
||||
}
|
||||
dbRW.delete(DatabaseConstants.CLASSIFIER_TABLE, DatabaseConstants.CLASSIFIER_ID + " = ?", new String[] { classifierFeedId });
|
||||
bulkInsertValuesExtSync(DatabaseConstants.CLASSIFIER_TABLE, classifierValues);
|
||||
}
|
||||
}
|
||||
List<ContentValues> classifierValues = entry.getValue().getContentValues();
|
||||
for (ContentValues values : classifierValues) {
|
||||
values.put(DatabaseConstants.CLASSIFIER_ID, classifierFeedId);
|
||||
}
|
||||
synchronized (RW_MUTEX) {dbRW.delete(DatabaseConstants.CLASSIFIER_TABLE, DatabaseConstants.CLASSIFIER_ID + " = ?", new String[] { classifierFeedId });}
|
||||
bulkInsertValues(DatabaseConstants.CLASSIFIER_TABLE, classifierValues);
|
||||
}
|
||||
}
|
||||
|
||||
// handle comments
|
||||
List<ContentValues> commentValues = new ArrayList<ContentValues>();
|
||||
List<ContentValues> replyValues = new ArrayList<ContentValues>();
|
||||
// track which comments were seen, so replies can be cleared before re-insertion. there isn't
|
||||
// enough data to de-dupe them for an insert/update operation
|
||||
List<String> freshCommentIds = new ArrayList<String>();
|
||||
for (Story story : apiResponse.stories) {
|
||||
for (Comment comment : story.publicComments) {
|
||||
comment.storyId = story.id;
|
||||
// we need a primary key for comments, so construct one
|
||||
comment.id = Comment.constructId(story.id, story.feedId, comment.userId);
|
||||
commentValues.add(comment.getValues());
|
||||
for (Reply reply : comment.replies) {
|
||||
reply.commentId = comment.id;
|
||||
reply.id = reply.constructId();
|
||||
replyValues.add(reply.getValues());
|
||||
// handle comments
|
||||
List<ContentValues> commentValues = new ArrayList<ContentValues>();
|
||||
List<ContentValues> replyValues = new ArrayList<ContentValues>();
|
||||
// track which comments were seen, so replies can be cleared before re-insertion. there isn't
|
||||
// enough data to de-dupe them for an insert/update operation
|
||||
List<String> freshCommentIds = new ArrayList<String>();
|
||||
for (Story story : apiResponse.stories) {
|
||||
for (Comment comment : story.publicComments) {
|
||||
comment.storyId = story.id;
|
||||
// we need a primary key for comments, so construct one
|
||||
comment.id = Comment.constructId(story.id, story.feedId, comment.userId);
|
||||
commentValues.add(comment.getValues());
|
||||
for (Reply reply : comment.replies) {
|
||||
reply.commentId = comment.id;
|
||||
reply.id = reply.constructId();
|
||||
replyValues.add(reply.getValues());
|
||||
}
|
||||
freshCommentIds.add(comment.id);
|
||||
}
|
||||
for (Comment comment : story.friendsComments) {
|
||||
comment.storyId = story.id;
|
||||
// we need a primary key for comments, so construct one
|
||||
comment.id = Comment.constructId(story.id, story.feedId, comment.userId);
|
||||
comment.byFriend = true;
|
||||
commentValues.add(comment.getValues());
|
||||
for (Reply reply : comment.replies) {
|
||||
reply.commentId = comment.id;
|
||||
reply.id = reply.constructId();
|
||||
replyValues.add(reply.getValues());
|
||||
}
|
||||
freshCommentIds.add(comment.id);
|
||||
}
|
||||
for (Comment comment : story.friendsShares) {
|
||||
comment.isPseudo = true;
|
||||
comment.storyId = story.id;
|
||||
// we need a primary key for comments, so construct one
|
||||
comment.id = Comment.constructId(story.id, story.feedId, comment.userId);
|
||||
comment.byFriend = true;
|
||||
commentValues.add(comment.getValues());
|
||||
for (Reply reply : comment.replies) {
|
||||
reply.commentId = comment.id;
|
||||
reply.id = reply.constructId();
|
||||
replyValues.add(reply.getValues());
|
||||
}
|
||||
freshCommentIds.add(comment.id);
|
||||
}
|
||||
}
|
||||
freshCommentIds.add(comment.id);
|
||||
}
|
||||
for (Comment comment : story.friendsComments) {
|
||||
comment.storyId = story.id;
|
||||
// we need a primary key for comments, so construct one
|
||||
comment.id = Comment.constructId(story.id, story.feedId, comment.userId);
|
||||
comment.byFriend = true;
|
||||
commentValues.add(comment.getValues());
|
||||
for (Reply reply : comment.replies) {
|
||||
reply.commentId = comment.id;
|
||||
reply.id = reply.constructId();
|
||||
replyValues.add(reply.getValues());
|
||||
}
|
||||
freshCommentIds.add(comment.id);
|
||||
}
|
||||
for (Comment comment : story.friendsShares) {
|
||||
comment.isPseudo = true;
|
||||
comment.storyId = story.id;
|
||||
// we need a primary key for comments, so construct one
|
||||
comment.id = Comment.constructId(story.id, story.feedId, comment.userId);
|
||||
comment.byFriend = true;
|
||||
commentValues.add(comment.getValues());
|
||||
for (Reply reply : comment.replies) {
|
||||
reply.commentId = comment.id;
|
||||
reply.id = reply.constructId();
|
||||
replyValues.add(reply.getValues());
|
||||
}
|
||||
freshCommentIds.add(comment.id);
|
||||
}
|
||||
}
|
||||
deleteRepliesForComments(freshCommentIds);
|
||||
bulkInsertValues(DatabaseConstants.COMMENT_TABLE, commentValues);
|
||||
bulkInsertValues(DatabaseConstants.REPLY_TABLE, replyValues);
|
||||
}
|
||||
|
||||
private void deleteRepliesForComments(Collection<String> commentIds) {
|
||||
// NB: attempting to do this with a "WHERE col IN (vector)" for speed can cause errors on some versions of sqlite
|
||||
synchronized (RW_MUTEX) {
|
||||
dbRW.beginTransaction();
|
||||
try {
|
||||
for (String commentId : commentIds) {
|
||||
// before inserting new replies, remove existing ones for the fetched comments
|
||||
// NB: attempting to do this with a "WHERE col IN (vector)" for speed can cause errors on some versions of sqlite
|
||||
for (String commentId : freshCommentIds) {
|
||||
dbRW.delete(DatabaseConstants.REPLY_TABLE, DatabaseConstants.REPLY_COMMENTID + " = ?", new String[]{commentId});
|
||||
}
|
||||
bulkInsertValuesExtSync(DatabaseConstants.COMMENT_TABLE, commentValues);
|
||||
bulkInsertValuesExtSync(DatabaseConstants.REPLY_TABLE, replyValues);
|
||||
|
||||
dbRW.setTransactionSuccessful();
|
||||
} finally {
|
||||
dbRW.endTransaction();
|
||||
|
@ -498,16 +481,15 @@ public class BlurDatabaseHelper {
|
|||
// update the story's read state
|
||||
ContentValues values = new ContentValues();
|
||||
values.put(DatabaseConstants.STORY_READ, read);
|
||||
values.put(DatabaseConstants.STORY_READ_THIS_SESSION, read);
|
||||
dbRW.update(DatabaseConstants.STORY_TABLE, values, DatabaseConstants.STORY_HASH + " = ?", new String[]{story.storyHash});
|
||||
// which column to inc/dec depends on story intel
|
||||
String impactedCol;
|
||||
String impactedSocialCol;
|
||||
if (story.intelTotal < 0) {
|
||||
if (story.intelligence.calcTotalIntel() < 0) {
|
||||
// negative stories don't affect counts
|
||||
dbRW.setTransactionSuccessful();
|
||||
return impactedFeeds;
|
||||
} else if (story.intelTotal == 0 ) {
|
||||
} else if (story.intelligence.calcTotalIntel() == 0 ) {
|
||||
impactedCol = DatabaseConstants.FEED_NEUTRAL_COUNT;
|
||||
impactedSocialCol = DatabaseConstants.SOCIAL_FEED_NEUTRAL_COUNT;
|
||||
} else {
|
||||
|
@ -672,7 +654,11 @@ public class BlurDatabaseHelper {
|
|||
* Get the unread count for the given feedset based on local story state.
|
||||
*/
|
||||
public int getLocalUnreadCount(FeedSet fs, StateFilter stateFilter) {
|
||||
Cursor c = getStoriesCursor(fs, stateFilter, ReadFilter.PURE_UNREAD, null, null);
|
||||
StringBuilder sel = new StringBuilder();
|
||||
ArrayList<String> selArgs = new ArrayList<String>();
|
||||
getLocalStorySelectionAndArgs(sel, selArgs, fs, stateFilter, ReadFilter.UNREAD);
|
||||
|
||||
Cursor c = dbRO.rawQuery(sel.toString(), selArgs.toArray(new String[selArgs.size()]));
|
||||
int count = c.getCount();
|
||||
c.close();
|
||||
return count;
|
||||
|
@ -796,41 +782,6 @@ public class BlurDatabaseHelper {
|
|||
synchronized (RW_MUTEX) {dbRW.insertOrThrow(DatabaseConstants.STORY_TEXT_TABLE, null, values);}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tags all saved stories with the reading session flag so they don't disappear if unsaved.
|
||||
*/
|
||||
public void markSavedReadingSession() {
|
||||
ContentValues values = new ContentValues();
|
||||
values.put(DatabaseConstants.STORY_READ_THIS_SESSION, true);
|
||||
synchronized (RW_MUTEX) {dbRW.update(DatabaseConstants.STORY_TABLE, values, DatabaseConstants.STORY_STARRED + " = 1", null);}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the read_this_session and search_hit flags for all stories.
|
||||
*/
|
||||
public void clearReadingSession() {
|
||||
ContentValues values = new ContentValues();
|
||||
values.put(DatabaseConstants.STORY_READ_THIS_SESSION, false);
|
||||
values.put(DatabaseConstants.STORY_SEARCHIT, false);
|
||||
synchronized (RW_MUTEX) {dbRW.update(DatabaseConstants.STORY_TABLE, values, null, null);}
|
||||
}
|
||||
|
||||
public void markStoriesActive(NBSyncService.ActivationMode actMode, long modeCutoff) {
|
||||
ContentValues values = new ContentValues();
|
||||
values.put(DatabaseConstants.STORY_ACTIVE, true);
|
||||
|
||||
String selection = null;
|
||||
if (actMode == NBSyncService.ActivationMode.ALL) {
|
||||
// leave the selection null to mark all
|
||||
} else if (actMode == NBSyncService.ActivationMode.OLDER) {
|
||||
selection = DatabaseConstants.STORY_TIMESTAMP + " <= " + Long.toString(modeCutoff);
|
||||
} else if (actMode == NBSyncService.ActivationMode.NEWER) {
|
||||
selection = DatabaseConstants.STORY_TIMESTAMP + " >= " + Long.toString(modeCutoff);
|
||||
}
|
||||
|
||||
synchronized (RW_MUTEX) {dbRW.update(DatabaseConstants.STORY_TABLE, values, selection, null);}
|
||||
}
|
||||
|
||||
public Loader<Cursor> getSocialFeedsLoader(final StateFilter stateFilter) {
|
||||
return new QueryCursorLoader(context) {
|
||||
protected Cursor createCursor() {return getSocialFeedsCursor(stateFilter, cancellationSignal);}
|
||||
|
@ -891,112 +842,132 @@ public class BlurDatabaseHelper {
|
|||
return dbRO.query(DatabaseConstants.STARRED_STORY_COUNT_TABLE, null, null, null, null, null, null);
|
||||
}
|
||||
|
||||
public Loader<Cursor> getStoriesLoader(final FeedSet fs, final StateFilter stateFilter) {
|
||||
public Loader<Cursor> getActiveStoriesLoader(final FeedSet fs) {
|
||||
final StoryOrder order = PrefsUtils.getStoryOrder(context, fs);
|
||||
return new QueryCursorLoader(context) {
|
||||
protected Cursor createCursor() {
|
||||
ReadFilter readFilter = PrefsUtils.getReadFilter(context, fs);
|
||||
return getStoriesCursor(fs, stateFilter, readFilter, cancellationSignal);
|
||||
return getActiveStoriesCursor(fs, order, cancellationSignal);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private Cursor getActiveStoriesCursor(FeedSet fs, StoryOrder order, CancellationSignal cancellationSignal) {
|
||||
// get the stories for this FS
|
||||
Cursor result = getActiveStoriesCursorNoPrep(fs, order, cancellationSignal);
|
||||
// if the result is blank, try to prime the session table with existing stories, in case we
|
||||
// are offline, but if a session is started, just use what was there so offsets don't change.
|
||||
if (result.getCount() < 1) {
|
||||
if (AppConstants.VERBOSE_LOG) Log.d(this.getClass().getName(), "priming reading session");
|
||||
prepareReadingSession(fs);
|
||||
result = getActiveStoriesCursorNoPrep(fs, order, cancellationSignal);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Cursor getActiveStoriesCursorNoPrep(FeedSet fs, StoryOrder order, CancellationSignal cancellationSignal) {
|
||||
// stories aren't actually queried directly via the FeedSet and filters set in the UI. rather,
|
||||
// those filters are use to push live or cached story hashes into the reading session table, and
|
||||
// those hashes are used to pull story data from the story table
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.STORY_QUERY_BASE);
|
||||
|
||||
if (fs.isAllRead()) {
|
||||
q.append(" ORDER BY " + DatabaseConstants.READ_STORY_ORDER);
|
||||
} else if (fs.isAllSaved()) {
|
||||
q.append(" ORDER BY " + DatabaseConstants.getSavedStoriesSortOrder(order));
|
||||
} else {
|
||||
q.append(" ORDER BY ").append(DatabaseConstants.getStorySortOrder(order));
|
||||
}
|
||||
return rawQuery(q.toString(), null, cancellationSignal);
|
||||
}
|
||||
|
||||
public void clearStorySession() {
|
||||
synchronized (RW_MUTEX) {dbRW.delete(DatabaseConstants.READING_SESSION_TABLE, null, null);}
|
||||
}
|
||||
|
||||
private void prepareReadingSession(FeedSet fs) {
|
||||
ReadFilter readFilter = PrefsUtils.getReadFilter(context, fs);
|
||||
StateFilter stateFilter = PrefsUtils.getStateFilter(context);
|
||||
prepareReadingSession(fs, stateFilter, readFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* When navigating to a social story from an interaction/activity we want to ignore
|
||||
* the any state so we can be sure we find the selected story.
|
||||
* Populates the reading session table with hashes of already-fetched stories that meet the
|
||||
* criteria for the given FeedSet and filters; these hashes will be supplemented by hashes
|
||||
* fetched via the API and used to actually select story data when rendering story lists.
|
||||
*/
|
||||
public Loader<Cursor> getStoriesLoaderIgnoreFilters(final FeedSet fs) {
|
||||
return new QueryCursorLoader(context) {
|
||||
protected Cursor createCursor() {return getStoriesCursor(fs, StateFilter.ALL, ReadFilter.ALL, cancellationSignal);}
|
||||
};
|
||||
private void prepareReadingSession(FeedSet fs, StateFilter stateFilter, ReadFilter readFilter) {
|
||||
// a selection filter that will be used to pull active story hashes from the stories table into the reading session table
|
||||
StringBuilder sel = new StringBuilder();
|
||||
// any selection args that need to be used within the inner select statement
|
||||
ArrayList<String> selArgs = new ArrayList<String>();
|
||||
|
||||
getLocalStorySelectionAndArgs(sel, selArgs, fs, stateFilter, readFilter);
|
||||
|
||||
// use the inner select statement to push the active hashes into the session table
|
||||
StringBuilder q = new StringBuilder("INSERT INTO " + DatabaseConstants.READING_SESSION_TABLE);
|
||||
q.append(" (" + DatabaseConstants.READING_SESSION_STORY_HASH + ") ");
|
||||
q.append(sel);
|
||||
|
||||
synchronized (RW_MUTEX) {dbRW.execSQL(q.toString(), selArgs.toArray(new String[selArgs.size()]));}
|
||||
}
|
||||
|
||||
private Cursor getStoriesCursor(FeedSet fs, StateFilter stateFilter, ReadFilter readFilter, CancellationSignal cancellationSignal) {
|
||||
if (fs == null) return null;
|
||||
StoryOrder order = PrefsUtils.getStoryOrder(context, fs);
|
||||
return getStoriesCursor(fs, stateFilter, readFilter, order, cancellationSignal);
|
||||
}
|
||||
|
||||
private Cursor getStoriesCursor(FeedSet fs, StateFilter stateFilter, ReadFilter readFilter, StoryOrder order, CancellationSignal cancellationSignal) {
|
||||
if (fs == null) return null;
|
||||
|
||||
/**
|
||||
* Gets hashes of already-fetched stories that satisfy the given FeedSet and filters. Can be used
|
||||
* both to populate a reading session or to count local unreads.
|
||||
*/
|
||||
private void getLocalStorySelectionAndArgs(StringBuilder sel, List<String> selArgs, FeedSet fs, StateFilter stateFilter, ReadFilter readFilter) {
|
||||
sel.append("SELECT " + DatabaseConstants.STORY_HASH);
|
||||
if (fs.getSingleFeed() != null) {
|
||||
|
||||
StringBuilder q = new StringBuilder("SELECT ");
|
||||
q.append(TextUtils.join(",", DatabaseConstants.STORY_COLUMNS));
|
||||
q.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
q.append(" WHERE " + DatabaseConstants.STORY_FEED_ID + " = ?");
|
||||
DatabaseConstants.appendStorySelectionGroupOrder(q, readFilter, order, stateFilter, null, (fs.getSearchQuery() != null));
|
||||
return rawQuery(q.toString(), new String[]{fs.getSingleFeed()}, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
sel.append(" WHERE " + DatabaseConstants.STORY_FEED_ID + " = ?");
|
||||
selArgs.add(fs.getSingleFeed());
|
||||
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
|
||||
|
||||
} else if (fs.getMultipleFeeds() != null) {
|
||||
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.MULTIFEED_STORIES_QUERY_BASE);
|
||||
q.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
q.append(DatabaseConstants.JOIN_FEEDS_ON_STORIES);
|
||||
q.append(" WHERE " + DatabaseConstants.STORY_TABLE + "." + DatabaseConstants.STORY_FEED_ID + " IN ( ");
|
||||
q.append(TextUtils.join(",", fs.getMultipleFeeds()) + ")");
|
||||
DatabaseConstants.appendStorySelectionGroupOrder(q, readFilter, order, stateFilter, null, (fs.getSearchQuery() != null));
|
||||
return rawQuery(q.toString(), null, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
sel.append(" WHERE " + DatabaseConstants.STORY_TABLE + "." + DatabaseConstants.STORY_FEED_ID + " IN ( ");
|
||||
sel.append(TextUtils.join(",", fs.getMultipleFeeds()) + ")");
|
||||
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
|
||||
|
||||
} else if (fs.getSingleSocialFeed() != null) {
|
||||
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.MULTIFEED_STORIES_QUERY_BASE);
|
||||
q.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
|
||||
q.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
|
||||
q.append(DatabaseConstants.JOIN_FEEDS_ON_STORIES);
|
||||
q.append(" WHERE " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE + "." + DatabaseConstants.SOCIALFEED_STORY_USER_ID + " = ? ");
|
||||
DatabaseConstants.appendStorySelectionGroupOrder(q, readFilter, order, stateFilter, null, (fs.getSearchQuery() != null));
|
||||
return rawQuery(q.toString(), new String[]{fs.getSingleSocialFeed().getKey()}, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
|
||||
sel.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
|
||||
sel.append(" WHERE " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE + "." + DatabaseConstants.SOCIALFEED_STORY_USER_ID + " = ? ");
|
||||
selArgs.add(fs.getSingleSocialFeed().getKey());
|
||||
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
|
||||
|
||||
} else if (fs.isAllNormal()) {
|
||||
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.MULTIFEED_STORIES_QUERY_BASE);
|
||||
q.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
q.append(DatabaseConstants.JOIN_FEEDS_ON_STORIES);
|
||||
q.append(" WHERE 1");
|
||||
DatabaseConstants.appendStorySelectionGroupOrder(q, readFilter, order, stateFilter, null, (fs.getSearchQuery() != null));
|
||||
return rawQuery(q.toString(), null, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
sel.append(" WHERE 1");
|
||||
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
|
||||
|
||||
} else if (fs.isAllSocial()) {
|
||||
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.MULTIFEED_STORIES_QUERY_BASE);
|
||||
q.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
|
||||
q.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
|
||||
q.append(DatabaseConstants.JOIN_FEEDS_ON_STORIES);
|
||||
q.append(DatabaseConstants.JOIN_SOCIAL_FEEDS_ON_SOCIALFEED_MAP);
|
||||
DatabaseConstants.appendStorySelectionGroupOrder(q, readFilter, order, stateFilter, DatabaseConstants.STORY_TABLE + "." + DatabaseConstants.STORY_ID, false);
|
||||
return rawQuery(q.toString(), null, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
|
||||
sel.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
|
||||
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
|
||||
|
||||
} else if (fs.isAllRead()) {
|
||||
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.MULTIFEED_STORIES_QUERY_BASE);
|
||||
q.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
q.append(DatabaseConstants.JOIN_FEEDS_ON_STORIES);
|
||||
q.append(" WHERE (" + DatabaseConstants.STORY_LAST_READ_DATE + " > 0)");
|
||||
q.append(" ORDER BY " + DatabaseConstants.READ_STORY_ORDER);
|
||||
return rawQuery(q.toString(), null, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
sel.append(" WHERE (" + DatabaseConstants.STORY_LAST_READ_DATE + " > 0)");
|
||||
|
||||
} else if (fs.isAllSaved()) {
|
||||
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.MULTIFEED_STORIES_QUERY_BASE);
|
||||
q.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
q.append(DatabaseConstants.JOIN_FEEDS_ON_STORIES);
|
||||
q.append(" WHERE ((" + DatabaseConstants.STORY_STARRED + " = 1)");
|
||||
q.append(" OR (" + DatabaseConstants.STORY_READ_THIS_SESSION + " = 1))");
|
||||
if (fs.getSearchQuery() != null) {
|
||||
q.append(" AND (" + DatabaseConstants.STORY_TABLE + "." + DatabaseConstants.STORY_SEARCHIT + " = 1)");
|
||||
}
|
||||
q.append(" ORDER BY " + DatabaseConstants.getSavedStoriesSortOrder(order));
|
||||
return rawQuery(q.toString(), null, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
|
||||
sel.append(" WHERE (" + DatabaseConstants.STORY_STARRED + " = 1)");
|
||||
DatabaseConstants.appendStorySelection(sel, selArgs, ReadFilter.ALL, StateFilter.ALL, fs.getSearchQuery());
|
||||
|
||||
} else if (fs.isGlobalShared()) {
|
||||
|
||||
StringBuilder q = new StringBuilder(DatabaseConstants.MULTIFEED_STORIES_QUERY_BASE);
|
||||
q.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
|
||||
q.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
|
||||
q.append(DatabaseConstants.JOIN_FEEDS_ON_STORIES);
|
||||
DatabaseConstants.appendStorySelectionGroupOrder(q, readFilter, order, stateFilter, DatabaseConstants.STORY_TABLE + "." + DatabaseConstants.STORY_ID, false);
|
||||
return rawQuery(q.toString(), null, cancellationSignal);
|
||||
sel.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
|
||||
sel.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
|
||||
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
|
||||
|
||||
} else {
|
||||
throw new IllegalStateException("Asked to get stories for FeedSet of unknown type.");
|
||||
}
|
||||
|
|
|
@ -77,31 +77,29 @@ public class DatabaseConstants {
|
|||
public static final String STORY_SHARED_DATE = "sharedDate";
|
||||
public static final String STORY_CONTENT = "content";
|
||||
public static final String STORY_SHORT_CONTENT = "short_content";
|
||||
public static final String STORY_COMMENT_COUNT = "comment_count";
|
||||
public static final String STORY_FEED_ID = "feed_id";
|
||||
public static final String STORY_INTELLIGENCE_AUTHORS = "intelligence_authors";
|
||||
public static final String STORY_INTELLIGENCE_TAGS = "intelligence_tags";
|
||||
public static final String STORY_INTELLIGENCE_FEED = "intelligence_feed";
|
||||
public static final String STORY_INTELLIGENCE_TITLE = "intelligence_title";
|
||||
public static final String STORY_INTELLIGENCE_TOTAL = "intelligence_total";
|
||||
public static final String STORY_PERMALINK = "permalink";
|
||||
public static final String STORY_READ = "read";
|
||||
public static final String STORY_READ_THIS_SESSION = "read_this_session";
|
||||
public static final String STORY_STARRED = "starred";
|
||||
public static final String STORY_STARRED_DATE = "starred_date";
|
||||
public static final String STORY_SHARE_COUNT = "share_count";
|
||||
public static final String STORY_SHARED_USER_IDS = "shared_user_ids";
|
||||
public static final String STORY_FRIEND_USER_IDS = "comment_user_ids";
|
||||
public static final String STORY_PUBLIC_USER_IDS = "public_user_ids";
|
||||
public static final String STORY_SHORTDATE = "shortDate";
|
||||
public static final String STORY_LONGDATE = "longDate";
|
||||
public static final String STORY_SOCIAL_USER_ID = "socialUserId";
|
||||
public static final String STORY_SOURCE_USER_ID = "sourceUserId";
|
||||
public static final String STORY_TAGS = "tags";
|
||||
public static final String STORY_HASH = "story_hash";
|
||||
public static final String STORY_ACTIVE = "active";
|
||||
public static final String STORY_IMAGE_URLS = "image_urls";
|
||||
public static final String STORY_LAST_READ_DATE = "last_read_date";
|
||||
public static final String STORY_SEARCHIT = "search_hit";
|
||||
public static final String STORY_SEARCH_HIT = "search_hit";
|
||||
|
||||
public static final String READING_SESSION_TABLE = "reading_session";
|
||||
public static final String READING_SESSION_STORY_HASH = "session_story_hash";
|
||||
|
||||
public static final String STORY_TEXT_TABLE = "storytext";
|
||||
public static final String STORY_TEXT_STORY_HASH = "story_hash";
|
||||
|
@ -213,38 +211,37 @@ public class DatabaseConstants {
|
|||
")";
|
||||
|
||||
static final String STORY_SQL = "CREATE TABLE " + STORY_TABLE + " (" +
|
||||
STORY_HASH + TEXT + ", " +
|
||||
STORY_HASH + TEXT + " PRIMARY KEY, " +
|
||||
STORY_AUTHORS + TEXT + ", " +
|
||||
STORY_CONTENT + TEXT + ", " +
|
||||
STORY_SHORT_CONTENT + TEXT + ", " +
|
||||
STORY_TIMESTAMP + INTEGER + ", " +
|
||||
STORY_SHARED_DATE + INTEGER + ", " +
|
||||
STORY_SHORTDATE + TEXT + ", " +
|
||||
STORY_LONGDATE + TEXT + ", " +
|
||||
STORY_FEED_ID + INTEGER + ", " +
|
||||
STORY_ID + TEXT + " PRIMARY KEY, " +
|
||||
STORY_ID + TEXT + ", " +
|
||||
STORY_INTELLIGENCE_AUTHORS + INTEGER + ", " +
|
||||
STORY_INTELLIGENCE_FEED + INTEGER + ", " +
|
||||
STORY_INTELLIGENCE_TAGS + INTEGER + ", " +
|
||||
STORY_INTELLIGENCE_TITLE + INTEGER + ", " +
|
||||
STORY_COMMENT_COUNT + INTEGER + ", " +
|
||||
STORY_SHARE_COUNT + INTEGER + ", " +
|
||||
STORY_INTELLIGENCE_TOTAL + INTEGER + ", " +
|
||||
STORY_SOCIAL_USER_ID + TEXT + ", " +
|
||||
STORY_SOURCE_USER_ID + TEXT + ", " +
|
||||
STORY_SHARED_USER_IDS + TEXT + ", " +
|
||||
STORY_PUBLIC_USER_IDS + TEXT + ", " +
|
||||
STORY_FRIEND_USER_IDS + TEXT + ", " +
|
||||
STORY_TAGS + TEXT + ", " +
|
||||
STORY_PERMALINK + TEXT + ", " +
|
||||
STORY_READ + INTEGER + ", " +
|
||||
STORY_READ_THIS_SESSION + INTEGER + ", " +
|
||||
STORY_STARRED + INTEGER + ", " +
|
||||
STORY_STARRED_DATE + INTEGER + ", " +
|
||||
STORY_TITLE + TEXT + ", " +
|
||||
STORY_ACTIVE + INTEGER + " DEFAULT 0, " +
|
||||
STORY_IMAGE_URLS + TEXT + ", " +
|
||||
STORY_LAST_READ_DATE + INTEGER + ", " +
|
||||
STORY_SEARCHIT + INTEGER + " DEFAULT 0" +
|
||||
STORY_SEARCH_HIT + TEXT +
|
||||
")";
|
||||
|
||||
static final String READING_SESSION_SQL = "CREATE TABLE " + READING_SESSION_TABLE + " (" +
|
||||
READING_SESSION_STORY_HASH + TEXT +
|
||||
")";
|
||||
|
||||
static final String STORY_TEXT_SQL = "CREATE TABLE " + STORY_TEXT_TABLE + " (" +
|
||||
|
@ -300,53 +297,41 @@ public class DatabaseConstants {
|
|||
SOCIAL_FEED_ID, SOCIAL_FEED_USERNAME, SOCIAL_FEED_TITLE, SOCIAL_FEED_ICON, SOCIAL_FEED_POSITIVE_COUNT, SOCIAL_FEED_NEUTRAL_COUNT, SOCIAL_FEED_NEGATIVE_COUNT,
|
||||
};
|
||||
|
||||
public static final String SUM_STORY_TOTAL = "storyTotal";
|
||||
private static String STORY_SUM_TOTAL = " CASE " +
|
||||
"WHEN MAX(" + STORY_INTELLIGENCE_AUTHORS + "," + STORY_INTELLIGENCE_TAGS + "," + STORY_INTELLIGENCE_TITLE + ") > 0 " +
|
||||
"THEN MAX(" + STORY_INTELLIGENCE_AUTHORS + "," + STORY_INTELLIGENCE_TAGS + "," + STORY_INTELLIGENCE_TITLE + ") " +
|
||||
"WHEN MIN(" + STORY_INTELLIGENCE_AUTHORS + "," + STORY_INTELLIGENCE_TAGS + "," + STORY_INTELLIGENCE_TITLE + ") < 0 " +
|
||||
"THEN MIN(" + STORY_INTELLIGENCE_AUTHORS + "," + STORY_INTELLIGENCE_TAGS + "," + STORY_INTELLIGENCE_TITLE + ") " +
|
||||
"ELSE " + STORY_INTELLIGENCE_FEED + " " +
|
||||
"END AS " + SUM_STORY_TOTAL;
|
||||
private static final String STORY_INTELLIGENCE_BEST = SUM_STORY_TOTAL + " > 0 ";
|
||||
private static final String STORY_INTELLIGENCE_SOME = SUM_STORY_TOTAL + " >= 0 ";
|
||||
private static final String STORY_INTELLIGENCE_NEUT = SUM_STORY_TOTAL + " = 0 ";
|
||||
private static final String STORY_INTELLIGENCE_NEG = SUM_STORY_TOTAL + " < 0 ";
|
||||
|
||||
public static final String[] STORY_COLUMNS = {
|
||||
STORY_AUTHORS, STORY_COMMENT_COUNT, STORY_SHORT_CONTENT, STORY_TIMESTAMP, STORY_SHARED_DATE, STORY_SHORTDATE, STORY_LONGDATE,
|
||||
STORY_TABLE + "." + STORY_FEED_ID, STORY_TABLE + "." + STORY_ID, STORY_INTELLIGENCE_AUTHORS, STORY_INTELLIGENCE_FEED, STORY_INTELLIGENCE_TAGS,
|
||||
STORY_INTELLIGENCE_TITLE, STORY_PERMALINK, STORY_READ, STORY_STARRED, STORY_STARRED_DATE, STORY_SHARE_COUNT, STORY_TAGS, STORY_TITLE,
|
||||
STORY_SOCIAL_USER_ID, STORY_SOURCE_USER_ID, STORY_SHARED_USER_IDS, STORY_FRIEND_USER_IDS, STORY_PUBLIC_USER_IDS, STORY_SUM_TOTAL, STORY_HASH,
|
||||
STORY_LAST_READ_DATE, STORY_SEARCHIT,
|
||||
private static final String[] BASE_STORY_COLUMNS = {
|
||||
STORY_AUTHORS, STORY_SHORT_CONTENT, STORY_TIMESTAMP, STORY_SHARED_DATE, STORY_LONGDATE,
|
||||
STORY_TABLE + "." + STORY_FEED_ID, STORY_TABLE + "." + STORY_ID,
|
||||
STORY_INTELLIGENCE_AUTHORS, STORY_INTELLIGENCE_FEED, STORY_INTELLIGENCE_TAGS, STORY_INTELLIGENCE_TOTAL,
|
||||
STORY_INTELLIGENCE_TITLE, STORY_PERMALINK, STORY_READ, STORY_STARRED, STORY_STARRED_DATE, STORY_TAGS, STORY_TITLE,
|
||||
STORY_SOCIAL_USER_ID, STORY_SOURCE_USER_ID, STORY_SHARED_USER_IDS, STORY_FRIEND_USER_IDS, STORY_HASH,
|
||||
STORY_LAST_READ_DATE,
|
||||
};
|
||||
|
||||
public static final String MULTIFEED_STORIES_QUERY_BASE =
|
||||
"SELECT " + TextUtils.join(",", STORY_COLUMNS) + ", " +
|
||||
private static final String STORY_COLUMNS =
|
||||
TextUtils.join(",", BASE_STORY_COLUMNS) + ", " +
|
||||
FEED_TITLE + ", " + FEED_FAVICON_URL + ", " + FEED_FAVICON_COLOR + ", " + FEED_FAVICON_BORDER + ", " + FEED_FAVICON_FADE + ", " + FEED_FAVICON_TEXT;
|
||||
|
||||
public static final String JOIN_FEEDS_ON_STORIES =
|
||||
" INNER JOIN " + FEED_TABLE + " ON " + STORY_TABLE + "." + STORY_FEED_ID + " = " + FEED_TABLE + "." + FEED_ID;
|
||||
public static final String STORY_QUERY_BASE =
|
||||
"SELECT " +
|
||||
STORY_COLUMNS +
|
||||
" FROM " + STORY_TABLE +
|
||||
" INNER JOIN " + FEED_TABLE +
|
||||
" ON " + STORY_TABLE + "." + STORY_FEED_ID + " = " + FEED_TABLE + "." + FEED_ID +
|
||||
" WHERE " + STORY_HASH + " IN (" +
|
||||
" SELECT DISTINCT " + READING_SESSION_STORY_HASH +
|
||||
" FROM " + READING_SESSION_TABLE + ")" +
|
||||
" GROUP BY " + STORY_HASH;
|
||||
|
||||
public static final String JOIN_STORIES_ON_SOCIALFEED_MAP =
|
||||
" INNER JOIN " + STORY_TABLE + " ON " + STORY_TABLE + "." + STORY_ID + " = " + SOCIALFEED_STORY_MAP_TABLE + "." + SOCIALFEED_STORY_STORYID;
|
||||
|
||||
public static final String JOIN_SOCIAL_FEEDS_ON_SOCIALFEED_MAP =
|
||||
" INNER JOIN " + SOCIALFEED_TABLE + " ON " + SOCIALFEED_TABLE + "." + SOCIAL_FEED_ID + " = " + SOCIALFEED_STORY_MAP_TABLE + "." + SOCIALFEED_STORY_USER_ID;
|
||||
|
||||
public static final String READ_STORY_ORDER = STORY_LAST_READ_DATE + " DESC";
|
||||
|
||||
/**
|
||||
* Appends to the given story query any and all selection statements that are required to satisfy the specified
|
||||
* filtration parameters, dedup column, and ordering requirements.
|
||||
* filtration parameters.
|
||||
*/
|
||||
public static void appendStorySelectionGroupOrder(StringBuilder q, ReadFilter readFilter, StoryOrder order, StateFilter stateFilter, String dedupCol, boolean requireQueryHit) {
|
||||
public static void appendStorySelection(StringBuilder q, List<String> selArgs, ReadFilter readFilter, StateFilter stateFilter, String requireQueryHit) {
|
||||
if (readFilter == ReadFilter.UNREAD) {
|
||||
// When a user is viewing "unread only" stories, what they really want are stories that were unread when they started reading,
|
||||
// or else the selection set will constantly change as they see things!
|
||||
q.append(" AND ((" + STORY_READ + " = 0) OR (" + STORY_READ_THIS_SESSION + " = 1))");
|
||||
} else if (readFilter == ReadFilter.PURE_UNREAD) {
|
||||
// This means really just unreads, useful for getting counts
|
||||
q.append(" AND (" + STORY_READ + " = 0)");
|
||||
}
|
||||
|
||||
|
@ -355,18 +340,9 @@ public class DatabaseConstants {
|
|||
q.append(" AND " + stateSelection);
|
||||
}
|
||||
|
||||
if (requireQueryHit) {
|
||||
q.append(" AND (" + STORY_TABLE + "." + STORY_SEARCHIT + " = 1)");
|
||||
}
|
||||
|
||||
q.append(" AND (" + STORY_TABLE + "." + STORY_ACTIVE + " = 1)");
|
||||
|
||||
if (dedupCol != null) {
|
||||
q.append( " GROUP BY " + dedupCol);
|
||||
}
|
||||
|
||||
if (order != null) {
|
||||
q.append(" ORDER BY " + getStorySortOrder(order));
|
||||
if (requireQueryHit != null) {
|
||||
q.append(" AND (" + STORY_TABLE + "." + STORY_SEARCH_HIT + " = ?)");
|
||||
selArgs.add(requireQueryHit);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -378,13 +354,13 @@ public class DatabaseConstants {
|
|||
case ALL:
|
||||
return null;
|
||||
case SOME:
|
||||
return STORY_INTELLIGENCE_SOME;
|
||||
return STORY_INTELLIGENCE_TOTAL + " >= 0 ";
|
||||
case NEUT:
|
||||
return STORY_INTELLIGENCE_NEUT;
|
||||
return STORY_INTELLIGENCE_TOTAL + " = 0 ";
|
||||
case BEST:
|
||||
return STORY_INTELLIGENCE_BEST;
|
||||
return STORY_INTELLIGENCE_TOTAL + " > 0 ";
|
||||
case NEG:
|
||||
return STORY_INTELLIGENCE_NEG;
|
||||
return STORY_INTELLIGENCE_TOTAL + " < 0 ";
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@ import android.database.Cursor;
|
|||
import android.text.TextUtils;
|
||||
|
||||
import com.google.gson.annotations.SerializedName;
|
||||
|
||||
import com.newsblur.database.DatabaseConstants;
|
||||
import com.newsblur.util.StateFilter;
|
||||
|
||||
public class Story implements Serializable {
|
||||
|
||||
|
@ -18,21 +20,12 @@ public class Story implements Serializable {
|
|||
@SerializedName("story_permalink")
|
||||
public String permalink;
|
||||
|
||||
@SerializedName("share_count")
|
||||
public String shareCount;
|
||||
|
||||
@SerializedName("share_user_ids")
|
||||
public String[] sharedUserIds;
|
||||
|
||||
@SerializedName("shared_by_friends")
|
||||
public String[] friendUserIds = new String[]{};
|
||||
|
||||
@SerializedName("shared_by_public")
|
||||
public String[] publicUserIds = new String[]{};
|
||||
|
||||
@SerializedName("comment_count")
|
||||
public int commentCount;
|
||||
|
||||
@SerializedName("read_status")
|
||||
public boolean read;
|
||||
|
||||
|
@ -83,11 +76,6 @@ public class Story implements Serializable {
|
|||
@SerializedName("intelligence")
|
||||
public Intelligence intelligence = new Intelligence();
|
||||
|
||||
public int intelTotal;
|
||||
|
||||
@SerializedName("short_parsed_date")
|
||||
public String shortDate;
|
||||
|
||||
@SerializedName("long_parsed_date")
|
||||
public String longDate;
|
||||
|
||||
|
@ -101,30 +89,28 @@ public class Story implements Serializable {
|
|||
// not yet vended by the API, but tracked locally and fudged (see SyncService) for remote stories
|
||||
public long lastReadTimestamp = 0L;
|
||||
|
||||
public boolean isSearchHit = false;
|
||||
// non-API and only set once when story is pushed to DB so it can be selected upon
|
||||
public String searchHit = "";
|
||||
|
||||
public ContentValues getValues() {
|
||||
final ContentValues values = new ContentValues();
|
||||
values.put(DatabaseConstants.STORY_ID, id);
|
||||
values.put(DatabaseConstants.STORY_TITLE, title.replace("\n", " ").replace("\r", " "));
|
||||
values.put(DatabaseConstants.STORY_TIMESTAMP, timestamp);
|
||||
values.put(DatabaseConstants.STORY_SHORTDATE, shortDate);
|
||||
values.put(DatabaseConstants.STORY_LONGDATE, longDate);
|
||||
values.put(DatabaseConstants.STORY_CONTENT, content);
|
||||
values.put(DatabaseConstants.STORY_SHORT_CONTENT, shortContent);
|
||||
values.put(DatabaseConstants.STORY_PERMALINK, permalink);
|
||||
values.put(DatabaseConstants.STORY_COMMENT_COUNT, commentCount);
|
||||
values.put(DatabaseConstants.STORY_SHARE_COUNT, shareCount);
|
||||
values.put(DatabaseConstants.STORY_AUTHORS, authors);
|
||||
values.put(DatabaseConstants.STORY_SOCIAL_USER_ID, socialUserId);
|
||||
values.put(DatabaseConstants.STORY_SOURCE_USER_ID, sourceUserId);
|
||||
values.put(DatabaseConstants.STORY_SHARED_USER_IDS, TextUtils.join(",", sharedUserIds));
|
||||
values.put(DatabaseConstants.STORY_FRIEND_USER_IDS, TextUtils.join(",", friendUserIds));
|
||||
values.put(DatabaseConstants.STORY_PUBLIC_USER_IDS, TextUtils.join(",", publicUserIds));
|
||||
values.put(DatabaseConstants.STORY_INTELLIGENCE_AUTHORS, intelligence.intelligenceAuthors);
|
||||
values.put(DatabaseConstants.STORY_INTELLIGENCE_FEED, intelligence.intelligenceFeed);
|
||||
values.put(DatabaseConstants.STORY_INTELLIGENCE_TAGS, intelligence.intelligenceTags);
|
||||
values.put(DatabaseConstants.STORY_INTELLIGENCE_TITLE, intelligence.intelligenceTitle);
|
||||
values.put(DatabaseConstants.STORY_INTELLIGENCE_TOTAL, intelligence.calcTotalIntel());
|
||||
values.put(DatabaseConstants.STORY_TAGS, TextUtils.join(",", tags));
|
||||
values.put(DatabaseConstants.STORY_READ, read);
|
||||
values.put(DatabaseConstants.STORY_STARRED, starred);
|
||||
|
@ -133,7 +119,7 @@ public class Story implements Serializable {
|
|||
values.put(DatabaseConstants.STORY_HASH, storyHash);
|
||||
values.put(DatabaseConstants.STORY_IMAGE_URLS, TextUtils.join(",", imageUrls));
|
||||
values.put(DatabaseConstants.STORY_LAST_READ_DATE, lastReadTimestamp);
|
||||
values.put(DatabaseConstants.STORY_SEARCHIT, isSearchHit);
|
||||
values.put(DatabaseConstants.STORY_SEARCH_HIT, searchHit);
|
||||
return values;
|
||||
}
|
||||
|
||||
|
@ -146,21 +132,16 @@ public class Story implements Serializable {
|
|||
story.shortContent = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_SHORT_CONTENT));
|
||||
story.title = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_TITLE));
|
||||
story.timestamp = cursor.getLong(cursor.getColumnIndex(DatabaseConstants.STORY_TIMESTAMP));
|
||||
story.shortDate = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_SHORTDATE));
|
||||
story.longDate = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_LONGDATE));
|
||||
story.shareCount = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_SHARE_COUNT));
|
||||
story.commentCount = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_COMMENT_COUNT));
|
||||
story.socialUserId = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_SOCIAL_USER_ID));
|
||||
story.sourceUserId = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_SOURCE_USER_ID));
|
||||
story.permalink = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_PERMALINK));
|
||||
story.sharedUserIds = TextUtils.split(cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_SHARED_USER_IDS)), ",");
|
||||
story.friendUserIds = TextUtils.split(cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_FRIEND_USER_IDS)), ",");
|
||||
story.publicUserIds = TextUtils.split(cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_PUBLIC_USER_IDS)), ",");
|
||||
story.intelligence.intelligenceAuthors = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_INTELLIGENCE_AUTHORS));
|
||||
story.intelligence.intelligenceFeed = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_INTELLIGENCE_FEED));
|
||||
story.intelligence.intelligenceTags = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_INTELLIGENCE_TAGS));
|
||||
story.intelligence.intelligenceTitle = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_INTELLIGENCE_TITLE));
|
||||
story.intelTotal = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.SUM_STORY_TOTAL));
|
||||
story.read = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_READ)) > 0;
|
||||
story.starred = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_STARRED)) > 0;
|
||||
story.starredTimestamp = cursor.getLong(cursor.getColumnIndex(DatabaseConstants.STORY_STARRED_DATE));
|
||||
|
@ -169,7 +150,6 @@ public class Story implements Serializable {
|
|||
story.id = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_ID));
|
||||
story.storyHash = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_HASH));
|
||||
story.lastReadTimestamp = cursor.getLong(cursor.getColumnIndex(DatabaseConstants.STORY_LAST_READ_DATE));
|
||||
story.isSearchHit = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_SEARCHIT)) > 0;
|
||||
return story;
|
||||
}
|
||||
|
||||
|
@ -187,8 +167,42 @@ public class Story implements Serializable {
|
|||
|
||||
@SerializedName("title")
|
||||
public int intelligenceTitle = 0;
|
||||
|
||||
public int calcTotalIntel() {
|
||||
int max = 0;
|
||||
max = Math.max(max, intelligenceAuthors);
|
||||
max = Math.max(max, intelligenceTags);
|
||||
max = Math.max(max, intelligenceTitle);
|
||||
if (max > 0) return max;
|
||||
|
||||
int min = 0;
|
||||
min = Math.min(min, intelligenceAuthors);
|
||||
min = Math.min(min, intelligenceTags);
|
||||
min = Math.min(min, intelligenceTitle);
|
||||
if (min < 0) return min;
|
||||
|
||||
return intelligenceFeed;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isStoryVisibileInState(StateFilter state) {
|
||||
int score = intelligence.calcTotalIntel();
|
||||
switch (state) {
|
||||
case ALL:
|
||||
return true;
|
||||
case SOME:
|
||||
return (score >= 0);
|
||||
case NEUT:
|
||||
return (score == 0);
|
||||
case BEST:
|
||||
return (score > 0);
|
||||
case NEG:
|
||||
return (score < 0);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom equality based on storyID/feedID equality so that a Set can de-duplicate story objects.
|
||||
*/
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.newsblur.domain;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -8,8 +9,8 @@ import java.util.Map;
|
|||
|
||||
import android.text.TextUtils;
|
||||
|
||||
import com.squareup.okhttp.FormEncodingBuilder;
|
||||
import com.squareup.okhttp.RequestBody;
|
||||
import okhttp3.FormBody;
|
||||
import okhttp3.RequestBody;
|
||||
|
||||
/**
|
||||
* A String-to-String multimap that serializes to JSON or HTTP request params.
|
||||
|
@ -18,7 +19,6 @@ import com.squareup.okhttp.RequestBody;
|
|||
public class ValueMultimap implements Serializable {
|
||||
|
||||
private Map<String, List<String>> multimap;
|
||||
private String TAG = "ValueMultimap";
|
||||
|
||||
public ValueMultimap() {
|
||||
multimap = new HashMap<String, List<String>>();
|
||||
|
@ -40,7 +40,7 @@ public class ValueMultimap implements Serializable {
|
|||
final StringBuilder builder = new StringBuilder();
|
||||
builder.append(key);
|
||||
builder.append("=");
|
||||
builder.append(value);
|
||||
builder.append(URLEncoder.encode(value));
|
||||
parameters.add(builder.toString());
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ public class ValueMultimap implements Serializable {
|
|||
}
|
||||
|
||||
public RequestBody asFormEncodedRequestBody() {
|
||||
FormEncodingBuilder formEncodingBuilder = new FormEncodingBuilder();
|
||||
FormBody.Builder formEncodingBuilder = new FormBody.Builder();
|
||||
for (String key : multimap.keySet()) {
|
||||
for (String value : multimap.get(key)) {
|
||||
formEncodingBuilder.add(key, value);
|
||||
|
|
|
@ -14,7 +14,7 @@ public class AllSharedStoriesItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.SUM_STORY_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_INTELLIGENCE_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
int[] groupTo = new int[] { R.id.row_item_title, R.id.row_item_content, R.id.row_item_author, R.id.row_item_date, R.id.row_item_sidebar, R.id.row_item_feedtitle };
|
||||
adapter = new MultipleFeedItemsAdapter(getActivity(), R.layout.row_folderitem, cursor, groupFrom, groupTo);
|
||||
adapter.setViewBinder(new SocialItemViewBinder(getActivity()));
|
||||
|
|
|
@ -14,7 +14,7 @@ public class AllStoriesItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.SUM_STORY_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_INTELLIGENCE_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
int[] groupTo = new int[] { R.id.row_item_title, R.id.row_item_content, R.id.row_item_author, R.id.row_item_date, R.id.row_item_sidebar, R.id.row_item_feedtitle };
|
||||
adapter = new MultipleFeedItemsAdapter(getActivity(), R.layout.row_folderitem, cursor, groupFrom, groupTo);
|
||||
adapter.setViewBinder(new SocialItemViewBinder(getActivity()));
|
||||
|
|
|
@ -31,7 +31,7 @@ public class FeedItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.SUM_STORY_TOTAL };
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_INTELLIGENCE_TOTAL };
|
||||
int[] groupTo = new int[] { R.id.row_item_title, R.id.row_item_content, R.id.row_item_author, R.id.row_item_date, R.id.row_item_sidebar };
|
||||
adapter = new FeedItemsAdapter(getActivity(), feed, R.layout.row_item, cursor, groupFrom, groupTo);
|
||||
adapter.setViewBinder(new FeedItemViewBinder(getActivity()));
|
||||
|
|
|
@ -21,7 +21,7 @@ public class FolderItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.FEED_TITLE, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.SUM_STORY_TOTAL, DatabaseConstants.STORY_AUTHORS };
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.FEED_TITLE, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_INTELLIGENCE_TOTAL, DatabaseConstants.STORY_AUTHORS };
|
||||
int[] groupTo = new int[] { R.id.row_item_title, R.id.row_item_content, R.id.row_item_feedtitle, R.id.row_item_date, R.id.row_item_sidebar, R.id.row_item_author };
|
||||
adapter = new MultipleFeedItemsAdapter(getActivity(), R.layout.row_folderitem, cursor, groupFrom, groupTo);
|
||||
adapter.setViewBinder(new FeedItemViewBinder(getActivity()));
|
||||
|
|
|
@ -39,6 +39,7 @@ import com.newsblur.domain.SocialFeed;
|
|||
import com.newsblur.util.AppConstants;
|
||||
import com.newsblur.util.FeedSet;
|
||||
import com.newsblur.util.FeedUtils;
|
||||
import com.newsblur.util.MarkAllReadConfirmation;
|
||||
import com.newsblur.util.PrefConstants;
|
||||
import com.newsblur.util.PrefsUtils;
|
||||
import com.newsblur.util.StateFilter;
|
||||
|
@ -211,6 +212,8 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
|
|||
case ExpandableListView.PACKED_POSITION_TYPE_GROUP:
|
||||
if (adapter.isRowSavedStories(groupPosition)) break;
|
||||
if (adapter.isRowReadStories(groupPosition)) break;
|
||||
if (groupPosition == FolderListAdapter.GLOBAL_SHARED_STORIES_GROUP_POSITION) break;
|
||||
if (groupPosition == FolderListAdapter.ALL_SHARED_STORIES_GROUP_POSITION) break;
|
||||
inflater.inflate(R.menu.context_folder, menu);
|
||||
break;
|
||||
|
||||
|
@ -244,20 +247,25 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
|
|||
return true;
|
||||
} else if (item.getItemId() == R.id.menu_mark_feed_as_read) {
|
||||
String feedId = adapter.getChild(groupPosition, childPosition);
|
||||
FeedSet fs = null;
|
||||
if (groupPosition == FolderListAdapter.ALL_SHARED_STORIES_GROUP_POSITION) {
|
||||
SocialFeed socialFeed = adapter.getSocialFeed(feedId);
|
||||
FeedUtils.markFeedsRead(FeedSet.singleSocialFeed(socialFeed.userId, socialFeed.username), null, null, getActivity());
|
||||
fs = FeedSet.singleSocialFeed(socialFeed.userId, socialFeed.username);
|
||||
} else {
|
||||
FeedUtils.markFeedsRead(FeedSet.singleFeed(feedId), null, null, getActivity());
|
||||
fs = FeedSet.singleFeed(feedId);
|
||||
}
|
||||
|
||||
markFeedsAsRead(fs);
|
||||
return true;
|
||||
} else if (item.getItemId() == R.id.menu_mark_folder_as_read) {
|
||||
if (!adapter.isFolderRoot(groupPosition)) {
|
||||
FeedSet fs = null;
|
||||
if (!adapter.isFolderRoot(groupPosition)) {
|
||||
String folderName = adapter.getGroup(groupPosition);
|
||||
FeedUtils.markFeedsRead(FeedUtils.feedSetFromFolderName(folderName), null, null, getActivity());
|
||||
fs = FeedUtils.feedSetFromFolderName(folderName);
|
||||
} else {
|
||||
FeedUtils.markFeedsRead(FeedSet.allFeeds(), null, null, getActivity());
|
||||
fs = FeedSet.allFeeds();
|
||||
}
|
||||
markFeedsAsRead(fs);
|
||||
return true;
|
||||
} else if (item.getItemId() == R.id.menu_choose_folders) {
|
||||
DialogFragment chooseFoldersFragment = ChooseFoldersFragment.newInstance(adapter.getFeed(adapter.getChild(groupPosition, childPosition)));
|
||||
|
@ -267,6 +275,16 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
|
|||
return super.onContextItemSelected(item);
|
||||
}
|
||||
|
||||
private void markFeedsAsRead(FeedSet fs) {
|
||||
MarkAllReadConfirmation confirmation = PrefsUtils.getMarkAllReadConfirmation(getActivity());
|
||||
if (confirmation.feedSetRequiresConfirmation(fs)) {
|
||||
MarkAllReadDialogFragment dialog = MarkAllReadDialogFragment.newInstance(fs);
|
||||
dialog.show(getFragmentManager(), "dialog");
|
||||
} else {
|
||||
FeedUtils.markFeedsRead(fs, null, null, getActivity());
|
||||
}
|
||||
}
|
||||
|
||||
public void changeState(StateFilter state) {
|
||||
currentState = state;
|
||||
PrefsUtils.setStateFilter(getActivity(), state);
|
||||
|
|
|
@ -24,7 +24,7 @@ public class GlobalSharedStoriesItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.SUM_STORY_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_INTELLIGENCE_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
int[] groupTo = new int[] { R.id.row_item_title, R.id.row_item_content, R.id.row_item_author, R.id.row_item_date, R.id.row_item_sidebar, R.id.row_item_feedtitle };
|
||||
adapter = new MultipleFeedItemsAdapter(getActivity(), R.layout.row_folderitem, cursor, groupFrom, groupTo, true);
|
||||
adapter.setViewBinder(new SocialItemViewBinder(getActivity(), true));
|
||||
|
|
|
@ -55,7 +55,6 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
|
|||
protected DefaultFeedView defaultFeedView;
|
||||
protected StateFilter intelState;
|
||||
private boolean cursorSeenYet = false;
|
||||
private boolean firstStorySeenYet = false;
|
||||
private boolean stopLoading = false;
|
||||
|
||||
// loading indicator for when stories are present but stale (at top of list)
|
||||
|
@ -145,7 +144,7 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
|
|||
*/
|
||||
public void resetEmptyState() {
|
||||
cursorSeenYet = false;
|
||||
firstStorySeenYet = false;
|
||||
FeedUtils.dbHelper.clearStorySession();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -203,6 +202,9 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
|
|||
|
||||
@Override
|
||||
public synchronized void onScroll(AbsListView view, int firstVisible, int visibleCount, int totalCount) {
|
||||
// the framework likes to trigger this on init before we even known counts, so disregard those
|
||||
if (!cursorSeenYet) return;
|
||||
|
||||
// load an extra page or two worth of stories past the viewport
|
||||
int desiredStoryCount = firstVisible + (visibleCount*2) + 1;
|
||||
triggerRefresh(desiredStoryCount, totalCount);
|
||||
|
@ -230,7 +232,7 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
|
|||
try { getActivity().finish(); } catch (Exception e) {;}
|
||||
return null;
|
||||
}
|
||||
return FeedUtils.dbHelper.getStoriesLoader(getFeedSet(), intelState);
|
||||
return FeedUtils.dbHelper.getActiveStoriesLoader(getFeedSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -240,20 +242,6 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
|
|||
cursorSeenYet = true;
|
||||
if (cursor.getCount() < 1) {
|
||||
triggerRefresh(1, 0);
|
||||
} else {
|
||||
if (!firstStorySeenYet) {
|
||||
// once we have at least a single story, we can instruct the sync service as to how to safely
|
||||
// activate new stories we recieve
|
||||
firstStorySeenYet = true;
|
||||
cursor.moveToFirst();
|
||||
long cutoff = cursor.getLong(cursor.getColumnIndex(DatabaseConstants.STORY_TIMESTAMP));
|
||||
cursor.moveToPosition(-1);
|
||||
if (activity.getStoryOrder() == StoryOrder.NEWEST) {
|
||||
NBSyncService.setActivationMode(NBSyncService.ActivationMode.OLDER, cutoff);
|
||||
} else {
|
||||
NBSyncService.setActivationMode(NBSyncService.ActivationMode.NEWER, cutoff);
|
||||
}
|
||||
}
|
||||
}
|
||||
adapter.swapCursor(cursor);
|
||||
}
|
||||
|
@ -344,7 +332,7 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
|
|||
int truePosition = position - 1;
|
||||
Story story = adapter.getStory(truePosition);
|
||||
if (getActivity().isFinishing()) return;
|
||||
UIUtils.startReadingActivity(getFeedSet(), story.storyHash, getActivity(), false);
|
||||
UIUtils.startReadingActivity(getFeedSet(), story.storyHash, getActivity());
|
||||
}
|
||||
|
||||
protected void setupBezelSwipeDetector(View v) {
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package com.newsblur.fragment;
|
||||
|
||||
import com.newsblur.R;
|
||||
import com.newsblur.util.FeedSet;
|
||||
import com.newsblur.util.FeedUtils;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.AlertDialog;
|
||||
|
@ -10,23 +12,22 @@ import android.os.Bundle;
|
|||
import android.app.DialogFragment;
|
||||
|
||||
public class MarkAllReadDialogFragment extends DialogFragment {
|
||||
private static final String FOLDER_NAME = "folder_name";
|
||||
private static final String FEED_SET = "feed_set";
|
||||
|
||||
public interface MarkAllReadDialogListener {
|
||||
public void onMarkAllRead();
|
||||
public void onCancel();
|
||||
void onMarkAllRead(FeedSet feedSet);
|
||||
}
|
||||
|
||||
private MarkAllReadDialogListener listener;
|
||||
|
||||
public static MarkAllReadDialogFragment newInstance(String folderName) {
|
||||
public static MarkAllReadDialogFragment newInstance(FeedSet feedSet) {
|
||||
MarkAllReadDialogFragment fragment = new MarkAllReadDialogFragment();
|
||||
Bundle args = new Bundle();
|
||||
args.putString(FOLDER_NAME, folderName);
|
||||
args.putSerializable(FEED_SET, feedSet);
|
||||
fragment.setArguments(args);
|
||||
return fragment;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onAttach(Activity activity) {
|
||||
super.onAttach(activity);
|
||||
|
@ -36,15 +37,25 @@ public class MarkAllReadDialogFragment extends DialogFragment {
|
|||
@Override
|
||||
public Dialog onCreateDialog(Bundle savedInstanceState) {
|
||||
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
|
||||
builder.setTitle(getArguments().getString(FOLDER_NAME))
|
||||
|
||||
final FeedSet feedSet = (FeedSet)getArguments().getSerializable(FEED_SET);
|
||||
String title = null;
|
||||
if (feedSet.isAllNormal()) {
|
||||
title = getResources().getString(R.string.all_stories);
|
||||
} else if (feedSet.isFolder()) {
|
||||
title = feedSet.getFolderName();
|
||||
} else if (feedSet.isSingleSocial()) {
|
||||
title = FeedUtils.getSocialFeed(feedSet.getSingleSocialFeed().getKey()).feedTitle;
|
||||
} else {
|
||||
title = FeedUtils.getFeed(feedSet.getSingleFeed()).title;
|
||||
}
|
||||
|
||||
builder.setTitle(title)
|
||||
.setItems(R.array.mark_all_read_options, new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int which) {
|
||||
if (which == 0) {
|
||||
listener.onMarkAllRead();
|
||||
} else {
|
||||
listener.onCancel();
|
||||
listener.onMarkAllRead(feedSet);
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
return builder.create();
|
||||
|
|
|
@ -144,7 +144,7 @@ public abstract class ProfileActivityDetailsFragment extends Fragment implements
|
|||
context.startActivity(intent);
|
||||
}
|
||||
} else if (activity.category == Category.STAR) {
|
||||
UIUtils.startReadingActivity(FeedSet.allSaved(), activity.storyHash, context, false);
|
||||
UIUtils.startReadingActivity(FeedSet.allSaved(), activity.storyHash, context);
|
||||
} else if (isSocialFeedCategory(activity)) {
|
||||
// Strip the social: prefix from feedId
|
||||
String socialFeedId = activity.feedId.substring(7);
|
||||
|
@ -152,7 +152,7 @@ public abstract class ProfileActivityDetailsFragment extends Fragment implements
|
|||
if (feed == null) {
|
||||
Toast.makeText(context, R.string.profile_do_not_follow, Toast.LENGTH_SHORT).show();
|
||||
} else {
|
||||
UIUtils.startReadingActivity(FeedSet.singleSocialFeed(feed.userId, feed.username), activity.storyHash, context, true);
|
||||
UIUtils.startReadingActivity(FeedSet.singleSocialFeed(feed.userId, feed.username), activity.storyHash, context);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ public class ReadStoriesItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.SUM_STORY_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_INTELLIGENCE_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
int[] groupTo = new int[] { R.id.row_item_title, R.id.row_item_content, R.id.row_item_author, R.id.row_item_date, R.id.row_item_sidebar, R.id.row_item_feedtitle };
|
||||
adapter = new MultipleFeedItemsAdapter(getActivity(), R.layout.row_folderitem, cursor, groupFrom, groupTo);
|
||||
adapter.setViewBinder(new SocialItemViewBinder(getActivity(), true));
|
||||
|
|
|
@ -30,15 +30,13 @@ public class SavedStoriesItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.SUM_STORY_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
String[] groupFrom = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_INTELLIGENCE_TOTAL, DatabaseConstants.FEED_TITLE };
|
||||
int[] groupTo = new int[] { R.id.row_item_title, R.id.row_item_content, R.id.row_item_author, R.id.row_item_date, R.id.row_item_sidebar, R.id.row_item_feedtitle };
|
||||
adapter = new MultipleFeedItemsAdapter(getActivity(), R.layout.row_folderitem, cursor, groupFrom, groupTo, true);
|
||||
adapter.setViewBinder(new SocialItemViewBinder(getActivity(), true));
|
||||
itemList.setAdapter(adapter);
|
||||
}
|
||||
super.onLoadFinished(loader, cursor);
|
||||
// every time we see a set of saved stories, tag them so they don't disappear during this reading session
|
||||
FeedUtils.dbHelper.markSavedReadingSession();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,7 +21,7 @@ public class SocialFeedItemListFragment extends ItemListFragment {
|
|||
@Override
|
||||
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
|
||||
if ((adapter == null) && (cursor != null)) {
|
||||
String[] groupFroms = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.FEED_FAVICON_URL, DatabaseConstants.FEED_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.SUM_STORY_TOTAL};
|
||||
String[] groupFroms = new String[] { DatabaseConstants.STORY_TITLE, DatabaseConstants.FEED_FAVICON_URL, DatabaseConstants.FEED_TITLE, DatabaseConstants.STORY_SHORT_CONTENT, DatabaseConstants.STORY_TIMESTAMP, DatabaseConstants.STORY_AUTHORS, DatabaseConstants.STORY_INTELLIGENCE_TOTAL};
|
||||
int[] groupTos = new int[] { R.id.row_item_title, R.id.row_item_feedicon, R.id.row_item_feedtitle, R.id.row_item_content, R.id.row_item_date, R.id.row_item_author, R.id.row_item_sidebar};
|
||||
adapter = new MultipleFeedItemsAdapter(getActivity(), R.layout.row_folderitem, cursor, groupFroms, groupTos);
|
||||
adapter.setViewBinder(new SocialItemViewBinder(getActivity()));
|
||||
|
|
|
@ -9,6 +9,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import android.content.ContentValues;
|
||||
import android.content.Context;
|
||||
|
@ -52,11 +53,11 @@ import com.newsblur.util.PrefsUtils;
|
|||
import com.newsblur.util.ReadFilter;
|
||||
import com.newsblur.util.StoryOrder;
|
||||
|
||||
import com.squareup.okhttp.FormEncodingBuilder;
|
||||
import com.squareup.okhttp.OkHttpClient;
|
||||
import com.squareup.okhttp.Request;
|
||||
import com.squareup.okhttp.RequestBody;
|
||||
import com.squareup.okhttp.Response;
|
||||
import okhttp3.FormBody;
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import okhttp3.RequestBody;
|
||||
import okhttp3.Response;
|
||||
|
||||
public class APIManager {
|
||||
|
||||
|
@ -84,7 +85,11 @@ public class APIManager {
|
|||
Build.VERSION.RELEASE + " " +
|
||||
appVersion + ")";
|
||||
|
||||
this.httpClient = new OkHttpClient();
|
||||
this.httpClient = new OkHttpClient.Builder()
|
||||
.connectTimeout(AppConstants.API_CONN_TIMEOUT_SECONDS, TimeUnit.SECONDS)
|
||||
.readTimeout(AppConstants.API_READ_TIMEOUT_SECONDS, TimeUnit.SECONDS)
|
||||
.followSslRedirects(true)
|
||||
.build();
|
||||
}
|
||||
|
||||
public LoginResponse login(final String username, final String password) {
|
||||
|
@ -111,8 +116,9 @@ public class APIManager {
|
|||
// just get the cookie from the 302 and stop, we directly use a one-off OkHttpClient.
|
||||
Request.Builder requestBuilder = new Request.Builder().url(urlString);
|
||||
addCookieHeader(requestBuilder);
|
||||
OkHttpClient noredirHttpClient = new OkHttpClient();
|
||||
noredirHttpClient.setFollowRedirects(false);
|
||||
OkHttpClient noredirHttpClient = new OkHttpClient.Builder()
|
||||
.followRedirects(false)
|
||||
.build();
|
||||
try {
|
||||
Response response = noredirHttpClient.newCall(requestBuilder.build()).execute();
|
||||
if (!response.isRedirect()) return false;
|
||||
|
@ -661,7 +667,7 @@ public class APIManager {
|
|||
}
|
||||
|
||||
private APIResponse post(final String urlString, final ContentValues values) {
|
||||
FormEncodingBuilder formEncodingBuilder = new FormEncodingBuilder();
|
||||
FormBody.Builder formEncodingBuilder = new FormBody.Builder();
|
||||
for (Entry<String, Object> entry : values.valueSet()) {
|
||||
formEncodingBuilder.add(entry.getKey(), (String)entry.getValue());
|
||||
}
|
||||
|
|
|
@ -13,9 +13,10 @@ import com.newsblur.network.domain.LoginResponse;
|
|||
import com.newsblur.network.domain.NewsBlurResponse;
|
||||
import com.newsblur.network.domain.RegisterResponse;
|
||||
import com.newsblur.util.AppConstants;
|
||||
import com.squareup.okhttp.OkHttpClient;
|
||||
import com.squareup.okhttp.Request;
|
||||
import com.squareup.okhttp.Response;
|
||||
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import okhttp3.Response;
|
||||
|
||||
/**
|
||||
* A JSON-encoded response from the API servers. This class encodes the possible outcomes of
|
||||
|
@ -53,7 +54,7 @@ public class APIResponse {
|
|||
this.responseCode = response.code();
|
||||
|
||||
if (responseCode != expectedReturnCode) {
|
||||
Log.e(this.getClass().getName(), "API returned error code " + response.code() + " calling " + request.urlString() + " - expected " + expectedReturnCode);
|
||||
Log.e(this.getClass().getName(), "API returned error code " + response.code() + " calling " + request.url().toString() + " - expected " + expectedReturnCode);
|
||||
this.isError = true;
|
||||
return;
|
||||
}
|
||||
|
@ -65,7 +66,7 @@ public class APIResponse {
|
|||
this.responseBody = response.body().string();
|
||||
readTime = System.currentTimeMillis() - startTime;
|
||||
} catch (Exception e) {
|
||||
Log.e(this.getClass().getName(), e.getClass().getName() + " (" + e.getMessage() + ") reading " + request.urlString(), e);
|
||||
Log.e(this.getClass().getName(), e.getClass().getName() + " (" + e.getMessage() + ") reading " + request.url().toString(), e);
|
||||
this.isError = true;
|
||||
return;
|
||||
}
|
||||
|
@ -83,11 +84,11 @@ public class APIResponse {
|
|||
}
|
||||
|
||||
if (AppConstants.VERBOSE_LOG_NET) {
|
||||
Log.d(this.getClass().getName(), String.format("called %s in %dms and %dms to read %dB", request.urlString(), connectTime, readTime, responseBody.length()));
|
||||
Log.d(this.getClass().getName(), String.format("called %s in %dms and %dms to read %dB", request.url().toString(), connectTime, readTime, responseBody.length()));
|
||||
}
|
||||
|
||||
} catch (IOException ioe) {
|
||||
Log.e(this.getClass().getName(), "Error (" + ioe.getMessage() + ") calling " + request.urlString(), ioe);
|
||||
Log.e(this.getClass().getName(), "Error (" + ioe.getMessage() + ") calling " + request.url().toString(), ioe);
|
||||
this.isError = true;
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -65,12 +65,6 @@ import java.util.concurrent.TimeUnit;
|
|||
*/
|
||||
public class NBSyncService extends Service {
|
||||
|
||||
/**
|
||||
* Mode switch for which newly received stories are suitable for display so
|
||||
* that they don't disrupt actively visible pager and list offsets.
|
||||
*/
|
||||
public enum ActivationMode { ALL, OLDER, NEWER };
|
||||
|
||||
private static final Object WAKELOCK_MUTEX = new Object();
|
||||
private static final Object PENDING_FEED_MUTEX = new Object();
|
||||
|
||||
|
@ -83,8 +77,6 @@ public class NBSyncService extends Service {
|
|||
private volatile static boolean DoFeedsFolders = false;
|
||||
private volatile static boolean DoUnreads = false;
|
||||
private volatile static boolean HaltNow = false;
|
||||
private volatile static ActivationMode ActMode = ActivationMode.ALL;
|
||||
private volatile static long ModeCutoff = 0L;
|
||||
|
||||
/** Informational flag only, as to whether we were offline last time we cycled. */
|
||||
public volatile static boolean OfflineNow = false;
|
||||
|
@ -113,6 +105,9 @@ public class NBSyncService extends Service {
|
|||
private static Map<FeedSet,Integer> FeedStoriesSeen;
|
||||
static { FeedStoriesSeen = new HashMap<FeedSet,Integer>(); }
|
||||
|
||||
/** Feed to reset to zero-state, so it is fetched fresh, presumably with new filters. */
|
||||
private static FeedSet ResetFeed;
|
||||
|
||||
/** Actions that may need to be double-checked locally due to overlapping API calls. */
|
||||
private static List<ReadingAction> FollowupActions;
|
||||
static { FollowupActions = new ArrayList<ReadingAction>(); }
|
||||
|
@ -398,7 +393,6 @@ public class NBSyncService extends Service {
|
|||
|
||||
if (stopSync()) return;
|
||||
if (backoffBackgroundCalls()) return;
|
||||
if (ActMode != ActivationMode.ALL) return;
|
||||
if (dbHelper.getActions(false).getCount() > 0) return;
|
||||
|
||||
FFSyncRunning = true;
|
||||
|
@ -425,7 +419,6 @@ public class NBSyncService extends Service {
|
|||
}
|
||||
|
||||
if (stopSync()) return;
|
||||
if (ActMode != ActivationMode.ALL) return;
|
||||
if (dbHelper.getActions(false).getCount() > 0) return;
|
||||
|
||||
// a metadata sync invalidates pagination and feed status
|
||||
|
@ -586,6 +579,14 @@ public class NBSyncService extends Service {
|
|||
* Fetch stories needed because the user is actively viewing a feed or folder.
|
||||
*/
|
||||
private void syncPendingFeedStories() {
|
||||
// before anything else, see if we need to quickly reset fetch state for a feed
|
||||
if (ResetFeed != null) {
|
||||
ExhaustedFeeds.remove(ResetFeed);
|
||||
FeedStoriesSeen.remove(ResetFeed);
|
||||
FeedPagesSeen.remove(ResetFeed);
|
||||
ResetFeed = null;
|
||||
}
|
||||
|
||||
FeedSet fs = PendingFeed;
|
||||
boolean finished = false;
|
||||
if (fs == null) {
|
||||
|
@ -615,9 +616,9 @@ public class NBSyncService extends Service {
|
|||
if (FlushRecounts) return;
|
||||
// don't let the page loop block actions
|
||||
if (dbHelper.getActions(false).getCount() > 0) return;
|
||||
|
||||
// bail if the active view has changed
|
||||
if (!fs.equals(PendingFeed)) {
|
||||
// the active view has changed
|
||||
if (fs == null) finished = true;
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -629,27 +630,23 @@ public class NBSyncService extends Service {
|
|||
|
||||
if (! isStoryResponseGood(apiResponse)) return;
|
||||
|
||||
if (!fs.equals(PendingFeed)) {
|
||||
return;
|
||||
}
|
||||
|
||||
insertStories(apiResponse, fs);
|
||||
// re-do any very recent actions that were incorrectly overwritten by this page
|
||||
finishActions();
|
||||
NbActivity.updateAllActivities(NbActivity.UPDATE_STORY);
|
||||
|
||||
FeedPagesSeen.put(fs, pageNumber);
|
||||
totalStoriesSeen += apiResponse.stories.length;
|
||||
FeedStoriesSeen.put(fs, totalStoriesSeen);
|
||||
|
||||
// lock in the activation cutoff based upon the timestamp of the first
|
||||
// story received for a given pagination session. it will be the newest
|
||||
// or oldest story for the feedset, as dictated by order.
|
||||
if ((pageNumber == 1) && (apiResponse.stories.length > 0)) {
|
||||
ModeCutoff = apiResponse.stories[0].timestamp;
|
||||
}
|
||||
insertStories(apiResponse, fs);
|
||||
NbActivity.updateAllActivities(NbActivity.UPDATE_STORY);
|
||||
|
||||
if (apiResponse.stories.length == 0) {
|
||||
ExhaustedFeeds.add(fs);
|
||||
finished = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// re-do any very recent actions that were incorrectly overwritten by this page
|
||||
finishActions();
|
||||
}
|
||||
finished = true;
|
||||
|
||||
|
@ -707,15 +704,15 @@ public class NBSyncService extends Service {
|
|||
// If this set of stories was found in response to the active search query, note
|
||||
// them as such in the DB so the UI can filter for them
|
||||
for (Story story : apiResponse.stories) {
|
||||
story.isSearchHit = true;
|
||||
story.searchHit = fs.getSearchQuery();
|
||||
}
|
||||
}
|
||||
|
||||
dbHelper.insertStories(apiResponse, ActMode, ModeCutoff);
|
||||
dbHelper.insertStories(apiResponse, true);
|
||||
}
|
||||
|
||||
void insertStories(StoriesResponse apiResponse) {
|
||||
dbHelper.insertStories(apiResponse, ActMode, ModeCutoff);
|
||||
dbHelper.insertStories(apiResponse, false);
|
||||
}
|
||||
|
||||
void incrementRunningChild() {
|
||||
|
@ -818,11 +815,12 @@ public class NBSyncService extends Service {
|
|||
if (ActionsRunning) return String.format(context.getResources().getString(R.string.sync_status_actions), lastActionCount);
|
||||
if (RecountsRunning) return context.getResources().getString(R.string.sync_status_recounts);
|
||||
if (FFSyncRunning) return context.getResources().getString(R.string.sync_status_ffsync);
|
||||
if (StorySyncRunning) return context.getResources().getString(R.string.sync_status_stories);
|
||||
if (UnreadsService.running()) return String.format(context.getResources().getString(R.string.sync_status_unreads), UnreadsService.getPendingCount());
|
||||
if (OriginalTextService.running()) return String.format(context.getResources().getString(R.string.sync_status_text), OriginalTextService.getPendingCount());
|
||||
if (ImagePrefetchService.running()) return String.format(context.getResources().getString(R.string.sync_status_images), ImagePrefetchService.getPendingCount());
|
||||
if (CleanupService.running()) return context.getResources().getString(R.string.sync_status_cleanup);
|
||||
if (!AppConstants.VERBOSE_LOG) return null;
|
||||
if (StorySyncRunning) return context.getResources().getString(R.string.sync_status_stories);
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -838,18 +836,6 @@ public class NBSyncService extends Service {
|
|||
FlushRecounts = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tell the service which stories can be activated if received. See ActivationMode.
|
||||
*/
|
||||
public static void setActivationMode(ActivationMode actMode) {
|
||||
ActMode = actMode;
|
||||
}
|
||||
|
||||
public static void setActivationMode(ActivationMode actMode, long modeCutoff) {
|
||||
ActMode = actMode;
|
||||
ModeCutoff = modeCutoff;
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests that the service fetch additional stories for the specified feed/folder. Returns
|
||||
* true if more will be fetched as a result of this request.
|
||||
|
@ -877,7 +863,7 @@ public class NBSyncService extends Service {
|
|||
alreadyPending = 0;
|
||||
}
|
||||
|
||||
if (AppConstants.VERBOSE_LOG) Log.d(NBSyncService.class.getName(), "have:" + alreadySeen + " want:" + desiredStoryCount + " pending:" + alreadyPending);
|
||||
if (AppConstants.VERBOSE_LOG) Log.d(NBSyncService.class.getName(), "callerhas: " + callerSeen + " have:" + alreadySeen + " want:" + desiredStoryCount + " pending:" + alreadyPending);
|
||||
if (desiredStoryCount <= alreadySeen) {
|
||||
return false;
|
||||
}
|
||||
|
@ -897,10 +883,12 @@ public class NBSyncService extends Service {
|
|||
}
|
||||
}
|
||||
|
||||
public static void resetFeeds() {
|
||||
ExhaustedFeeds.clear();
|
||||
FeedPagesSeen.clear();
|
||||
FeedStoriesSeen.clear();
|
||||
/**
|
||||
* Reset the API pagniation state for the given feedset, presumably because the order or filter changed.
|
||||
*/
|
||||
public static void resetFetchState(FeedSet fs) {
|
||||
Log.d(NBSyncService.class.getName(), "requesting feed fetch state reset");
|
||||
ResetFeed = fs;
|
||||
}
|
||||
|
||||
public static void getOriginalText(String hash) {
|
||||
|
@ -933,7 +921,9 @@ public class NBSyncService extends Service {
|
|||
clearPendingStoryRequest();
|
||||
FollowupActions.clear();
|
||||
RecountCandidates.clear();
|
||||
resetFeeds();
|
||||
ExhaustedFeeds.clear();
|
||||
FeedPagesSeen.clear();
|
||||
FeedStoriesSeen.clear();
|
||||
OriginalTextService.clear();
|
||||
UnreadsService.clear();
|
||||
ImagePrefetchService.clear();
|
||||
|
|
|
@ -52,27 +52,28 @@ public class UnreadsService extends SubService {
|
|||
NavigableMap<String,String> sortingMap = new TreeMap<String,String>();
|
||||
UnreadStoryHashesResponse unreadHashes = parent.apiManager.getUnreadStoryHashes();
|
||||
|
||||
if (parent.stopSync()) return;
|
||||
// note all the stories we thought were unread before. if any fail to appear in
|
||||
// the API request for unreads, we will mark them as read
|
||||
List<String> oldUnreadHashes = parent.dbHelper.getUnreadStoryHashes();
|
||||
|
||||
// process the api response, both bookkeeping no-longer-unread stories and populating
|
||||
// the sortation map we will use to create the fetch list for step two
|
||||
for (Entry<String, List<String[]>> entry : unreadHashes.unreadHashes.entrySet()) {
|
||||
feedloop: for (Entry<String, List<String[]>> entry : unreadHashes.unreadHashes.entrySet()) {
|
||||
String feedId = entry.getKey();
|
||||
// ignore unreads from orphaned feeds
|
||||
if( ! parent.orphanFeedIds.contains(feedId)) {
|
||||
if(parent.orphanFeedIds.contains(feedId)) continue feedloop;
|
||||
for (String[] newHash : entry.getValue()) {
|
||||
// only fetch the reported unreads if we don't already have them
|
||||
List<String> existingHashes = parent.dbHelper.getStoryHashesForFeed(feedId);
|
||||
for (String[] newHash : entry.getValue()) {
|
||||
if (!existingHashes.contains(newHash[0])) {
|
||||
sortingMap.put(newHash[1]+newHash[0], newHash[0]);
|
||||
}
|
||||
if (!oldUnreadHashes.contains(newHash[0])) {
|
||||
sortingMap.put(newHash[1]+newHash[0], newHash[0]);
|
||||
} else {
|
||||
oldUnreadHashes.remove(newHash[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (parent.stopSync()) return;
|
||||
// now that we have the sorted set of hashes, turn them into a list over which we
|
||||
// can iterate to fetch them
|
||||
if (PrefsUtils.getDefaultStoryOrder(parent) == StoryOrder.NEWEST) {
|
||||
|
@ -90,6 +91,7 @@ public class UnreadsService extends SubService {
|
|||
}
|
||||
|
||||
private void getNewUnreadStories() {
|
||||
int totalCount = StoryHashQueue.size();
|
||||
unreadsyncloop: while (StoryHashQueue.size() > 0) {
|
||||
if (parent.stopSync()) return;
|
||||
if(!PrefsUtils.isOfflineEnabled(parent)) return;
|
||||
|
|
|
@ -45,6 +45,14 @@ public class AppConstants {
|
|||
// for how long to back off from background syncs after a hard API failure
|
||||
public static final long API_BACKGROUND_BACKOFF_MILLIS = 5L * 60L * 1000L;
|
||||
|
||||
// timeouts for API calls, set to something more sane than the default of infinity
|
||||
public static final long API_CONN_TIMEOUT_SECONDS = 60L;
|
||||
public static final long API_READ_TIMEOUT_SECONDS = 120L;
|
||||
|
||||
// timeouts for image prefetching, which are a bit tighter, since they are only for caching
|
||||
public static final long IMAGE_PREFETCH_CONN_TIMEOUT_SECONDS = 10L;
|
||||
public static final long IMAGE_PREFETCH_READ_TIMEOUT_SECONDS = 30L;
|
||||
|
||||
// when generating a request for multiple feeds, limit the total number requested to prevent
|
||||
// unworkably long URLs
|
||||
public static final int MAX_FEED_LIST_SIZE = 250;
|
||||
|
@ -56,7 +64,7 @@ public class AppConstants {
|
|||
public static final int MAX_READ_STORIES_STORED = 500;
|
||||
|
||||
// how many unread stories to fetch via hash at a time
|
||||
public static final int UNREAD_FETCH_BATCH_SIZE = 100;
|
||||
public static final int UNREAD_FETCH_BATCH_SIZE = 50;
|
||||
|
||||
// how many images to prefetch before updating the countdown UI
|
||||
public static final int IMAGE_PREFETCH_BATCH_SIZE = 6;
|
||||
|
|
|
@ -172,6 +172,10 @@ public class FeedSet implements Serializable {
|
|||
return (((savedFeeds != null) && (savedFeeds.size() < 1)) || ((savedTags != null) && (savedTags.size() < 1)));
|
||||
}
|
||||
|
||||
public boolean isSingleSocial() {
|
||||
return ((socialFeeds != null) && (socialFeeds.size() == 1));
|
||||
}
|
||||
|
||||
public boolean isGlobalShared() {
|
||||
return this.isGlobalShared;
|
||||
}
|
||||
|
|
|
@ -98,29 +98,14 @@ public class FeedUtils {
|
|||
new AsyncTask<Void, Void, Void>() {
|
||||
@Override
|
||||
protected Void doInBackground(Void... arg) {
|
||||
NBSyncService.resetFeeds();
|
||||
try {
|
||||
dbHelper.clearReadingSession();
|
||||
dbHelper.clearStorySession();
|
||||
} catch (Exception e) {
|
||||
; // this one call can evade the on-upgrade DB wipe and throw exceptions
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
|
||||
}
|
||||
|
||||
public static void activateAllStories() {
|
||||
new AsyncTask<Void, Void, Void>() {
|
||||
@Override
|
||||
protected Void doInBackground(Void... arg) {
|
||||
try {
|
||||
dbHelper.markStoriesActive(NBSyncService.ActivationMode.ALL, 0L);
|
||||
} catch (Exception e) {
|
||||
; // this call can evade the on-upgrade DB wipe and throw exceptions
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
|
||||
}.execute();
|
||||
}
|
||||
|
||||
public static void markStoryUnread(final Story story, final Context context) {
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
package com.newsblur.util;
|
||||
|
||||
/**
|
||||
* Enum to represent mark all read confirmation preference.
|
||||
* @author mark
|
||||
*/
|
||||
public enum MarkAllReadConfirmation {
|
||||
|
||||
FEED_AND_FOLDER("feed_and_folder"),
|
||||
FOLDER_ONLY("folder_only"),
|
||||
NONE("none");
|
||||
|
||||
private String parameterValue;
|
||||
|
||||
MarkAllReadConfirmation(String parameterValue) {
|
||||
this.parameterValue = parameterValue;
|
||||
}
|
||||
|
||||
public boolean feedSetRequiresConfirmation(FeedSet fs) {
|
||||
if (fs.isFolder() || fs.isAllNormal()) {
|
||||
return this != NONE;
|
||||
} else {
|
||||
return this == FEED_AND_FOLDER;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,26 +4,28 @@ import android.content.Context;
|
|||
import android.net.ConnectivityManager;
|
||||
import android.net.NetworkInfo;
|
||||
|
||||
import com.squareup.okhttp.OkHttpClient;
|
||||
import com.squareup.okhttp.Request;
|
||||
import com.squareup.okhttp.Response;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import okhttp3.Response;
|
||||
|
||||
import okio.BufferedSink;
|
||||
import okio.Okio;
|
||||
|
||||
public class NetworkUtils {
|
||||
|
||||
private static OkHttpClient httpClient = new OkHttpClient();
|
||||
private static OkHttpClient ImageFetchHttpClient;
|
||||
|
||||
static {
|
||||
// By default OkHttpClient follows redirects (inc HTTP -> HTTPS and HTTPS -> HTTP).
|
||||
httpClient.setConnectTimeout(10, TimeUnit.SECONDS);
|
||||
httpClient.setReadTimeout(30, TimeUnit.SECONDS);
|
||||
ImageFetchHttpClient = new OkHttpClient.Builder()
|
||||
.connectTimeout(AppConstants.IMAGE_PREFETCH_CONN_TIMEOUT_SECONDS, TimeUnit.SECONDS)
|
||||
.readTimeout(AppConstants.IMAGE_PREFETCH_READ_TIMEOUT_SECONDS, TimeUnit.SECONDS)
|
||||
.followSslRedirects(true)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static boolean isOnline(Context context) {
|
||||
|
@ -36,7 +38,7 @@ public class NetworkUtils {
|
|||
long bytesRead = 0;
|
||||
try {
|
||||
Request.Builder requestBuilder = new Request.Builder().url(url);
|
||||
Response response = httpClient.newCall(requestBuilder.build()).execute();
|
||||
Response response = ImageFetchHttpClient.newCall(requestBuilder.build()).execute();
|
||||
if (response.isSuccessful()) {
|
||||
BufferedSink sink = Okio.buffer(Okio.sink(file));
|
||||
bytesRead = sink.writeAll(response.body().source());
|
||||
|
|
|
@ -67,4 +67,5 @@ public class PrefConstants {
|
|||
public static final String LAST_CLEANUP_TIME = "last_cleanup_time";
|
||||
|
||||
public static final String VOLUME_KEY_NAVIGATION = "volume_key_navigation";
|
||||
public static final String MARK_ALL_READ_CONFIRMATION = "pref_confirm_mark_all_read";
|
||||
}
|
||||
|
|
|
@ -237,10 +237,15 @@ public class PrefsUtils {
|
|||
prefs.edit().putLong(AppConstants.LAST_SYNC_TIME, (new Date()).getTime()).commit();
|
||||
}
|
||||
|
||||
public static boolean isTimeToVacuum(Context context) {
|
||||
private static long getLastVacuumTime(Context context) {
|
||||
SharedPreferences prefs = context.getSharedPreferences(PrefConstants.PREFERENCES, 0);
|
||||
long lastTime = prefs.getLong(PrefConstants.LAST_VACUUM_TIME, 1L);
|
||||
return ( (lastTime + AppConstants.VACUUM_TIME_MILLIS) < (new Date()).getTime() );
|
||||
return prefs.getLong(PrefConstants.LAST_VACUUM_TIME, 1L);
|
||||
}
|
||||
|
||||
public static boolean isTimeToVacuum(Context context) {
|
||||
long lastTime = getLastVacuumTime(context);
|
||||
long now = (new Date()).getTime();
|
||||
return ( (lastTime + AppConstants.VACUUM_TIME_MILLIS) < now );
|
||||
}
|
||||
|
||||
public static void updateLastVacuumTime(Context context) {
|
||||
|
@ -567,4 +572,9 @@ public class PrefsUtils {
|
|||
SharedPreferences prefs = context.getSharedPreferences(PrefConstants.PREFERENCES, 0);
|
||||
return VolumeKeyNavigation.valueOf(prefs.getString(PrefConstants.VOLUME_KEY_NAVIGATION, VolumeKeyNavigation.OFF.toString()));
|
||||
}
|
||||
|
||||
public static MarkAllReadConfirmation getMarkAllReadConfirmation(Context context) {
|
||||
SharedPreferences prefs = context.getSharedPreferences(PrefConstants.PREFERENCES, 0);
|
||||
return MarkAllReadConfirmation.valueOf(prefs.getString(PrefConstants.MARK_ALL_READ_CONFIRMATION, MarkAllReadConfirmation.FOLDER_ONLY.toString()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,8 +5,7 @@ package com.newsblur.util;
|
|||
*/
|
||||
public enum ReadFilter {
|
||||
ALL("all"),
|
||||
UNREAD("unread"), // in the app, this often means "unread and read since switching activities"
|
||||
PURE_UNREAD("unread");
|
||||
UNREAD("unread");
|
||||
|
||||
private String parameterValue;
|
||||
|
||||
|
|
|
@ -164,7 +164,7 @@ public class UIUtils {
|
|||
});
|
||||
}
|
||||
|
||||
public static void startReadingActivity(FeedSet fs, String startingHash, Context context, boolean ignoreFilters) {
|
||||
public static void startReadingActivity(FeedSet fs, String startingHash, Context context) {
|
||||
Class activityClass;
|
||||
if (fs.isAllSaved()) {
|
||||
activityClass = SavedStoriesReading.class;
|
||||
|
@ -189,9 +189,15 @@ public class UIUtils {
|
|||
Intent i = new Intent(context, activityClass);
|
||||
i.putExtra(Reading.EXTRA_FEEDSET, fs);
|
||||
i.putExtra(Reading.EXTRA_STORY_HASH, startingHash);
|
||||
if (ignoreFilters) {
|
||||
i.putExtra(SocialFeedReading.EXTRA_IGNORE_FILTERS, true);
|
||||
}
|
||||
context.startActivity(i);
|
||||
}
|
||||
|
||||
public static String getMemoryUsageDebug(Context context) {
|
||||
String memInfo = " (";
|
||||
android.app.ActivityManager activityManager = (android.app.ActivityManager) context.getSystemService(android.app.Activity.ACTIVITY_SERVICE);
|
||||
int[] pids = new int[]{android.os.Process.myPid()};
|
||||
android.os.Debug.MemoryInfo[] mi = activityManager.getProcessMemoryInfo(pids);
|
||||
memInfo = memInfo + (mi[0].getTotalPss() / 1024) + "MB used)";
|
||||
return memInfo;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ public class FeedItemViewBinder implements ViewBinder {
|
|||
((TextView) view).setText(cursor.getString(columnIndex).toUpperCase());
|
||||
}
|
||||
return true;
|
||||
} else if (TextUtils.equals(columnName, DatabaseConstants.SUM_STORY_TOTAL)) {
|
||||
} else if (TextUtils.equals(columnName, DatabaseConstants.STORY_INTELLIGENCE_TOTAL)) {
|
||||
int score = cursor.getInt(columnIndex);
|
||||
Drawable icon;
|
||||
if (score > 0) {
|
||||
|
|
|
@ -39,7 +39,7 @@ public class SocialItemViewBinder implements ViewBinder {
|
|||
String faviconUrl = cursor.getString(columnIndex);
|
||||
FeedUtils.imageLoader.displayImage(faviconUrl, ((ImageView) view), true);
|
||||
return true;
|
||||
} else if (TextUtils.equals(columnName, DatabaseConstants.SUM_STORY_TOTAL)) {
|
||||
} else if (TextUtils.equals(columnName, DatabaseConstants.STORY_INTELLIGENCE_TOTAL)) {
|
||||
if (! this.ignoreIntel) {
|
||||
int score = cursor.getInt(columnIndex);
|
||||
Drawable icon;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
BeautifulSoup==3.2.1
|
||||
six==1.4.1
|
||||
six==1.10.0
|
||||
boto==2.8.0
|
||||
celery==3.0.17
|
||||
chardet==2.1.1
|
||||
|
@ -9,6 +9,7 @@ django-celery==3.0.11
|
|||
django-compress==1.0.1
|
||||
django-extensions==1.3.3
|
||||
django-mailgun==0.2.1
|
||||
django-qurl==0.1.1
|
||||
django-redis-sessions==0.3.1
|
||||
django-redis-cache==0.9.7
|
||||
django-ses==0.4.1
|
||||
|
@ -16,9 +17,10 @@ django-subdomains==2.0.3
|
|||
Django>=1.5,<1.6
|
||||
python-digitalocean==1.6
|
||||
dnspython==1.12.0
|
||||
Fabric==1.8.3
|
||||
Fabric==1.10.2
|
||||
gunicorn==19.1.1
|
||||
# psycopg2==2.5.2
|
||||
psycopg2==2.5.2
|
||||
mysql-python==1.2.5
|
||||
httplib2==0.8
|
||||
iconv==1.0
|
||||
isodate==0.5.1
|
||||
|
@ -30,6 +32,7 @@ oauth2==1.5.211
|
|||
psutil==2.1.0
|
||||
pyes==0.99.5
|
||||
simplejson==3.4.0
|
||||
pillow==3.1.0
|
||||
pyflakes==0.6.1
|
||||
pymongo==2.6
|
||||
python-dateutil==2.1
|
||||
|
@ -48,3 +51,5 @@ django-cors-headers==1.0.0
|
|||
pyOpenSSL==0.14
|
||||
pyasn1==0.1.7
|
||||
ndg-httpsclient==0.3.2
|
||||
numpy==1.10.4
|
||||
scipy==0.17.0
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:celerybeat]
|
||||
command=/srv/newsblur/manage.py celerybeat --schedule=/srv/newsblur/data/celerybeat-schedule.db --loglevel=INFO
|
||||
command=/srv/newsblur/manage.py celery beat --schedule=/srv/newsblur/data/celerybeat-schedule.db --loglevel=INFO
|
||||
directory=/srv/newsblur
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
user=sclay
|
||||
numprocs=1
|
||||
stdout_logfile=/var/log/celerybeat.log
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
[program:celery]
|
||||
command=/srv/newsblur/manage.py celeryd --loglevel=INFO -Q new_feeds,push_feeds,update_feeds
|
||||
command=/srv/newsblur/manage.py celery worker --loglevel=INFO -Q new_feeds,push_feeds,update_feeds
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
directory=/srv/newsblur
|
||||
user=sclay
|
||||
numprocs=1
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:celeryd_beat]
|
||||
command=/srv/newsblur/manage.py celeryd --loglevel=INFO -Q beat_tasks -c 3
|
||||
command=/srv/newsblur/manage.py celery worker --loglevel=INFO -Q beat_tasks -c 3
|
||||
directory=/srv/newsblur
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
user=sclay
|
||||
numprocs=1
|
||||
stdout_logfile=/var/log/celeryd_beat.log
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:celeryd_beat_feeds]
|
||||
command=/srv/newsblur/manage.py celeryd --loglevel=INFO -Q beat_feeds_task -c 1
|
||||
command=/srv/newsblur/manage.py celery worker --loglevel=INFO -Q beat_feeds_task -c 1
|
||||
directory=/srv/newsblur
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
user=sclay
|
||||
numprocs=1
|
||||
stdout_logfile=/var/log/celeryd_beat_feeds.log
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:celery]
|
||||
command=/srv/newsblur/manage.py celeryd --loglevel=INFO -Q new_feeds,push_feeds
|
||||
command=/srv/newsblur/manage.py celery worker --loglevel=INFO -Q new_feeds,push_feeds
|
||||
directory=/srv/newsblur
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
user=sclay
|
||||
numprocs=1
|
||||
stdout_logfile=/var/log/celeryd.log
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:celeryd_search_indexer]
|
||||
command=/srv/newsblur/manage.py celeryd --loglevel=INFO -Q search_indexer -c 16
|
||||
command=/srv/newsblur/manage.py celery worker --loglevel=INFO -Q search_indexer -c 16
|
||||
directory=/srv/newsblur
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
user=sclay
|
||||
numprocs=1
|
||||
stdout_logfile=/var/log/celeryd_searchindexer.log
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:celeryd_search_indexer_tasker]
|
||||
command=/srv/newsblur/manage.py celeryd --loglevel=INFO -Q search_indexer_tasker -c 2
|
||||
command=/srv/newsblur/manage.py celery worker --loglevel=INFO -Q search_indexer_tasker -c 2
|
||||
directory=/srv/newsblur
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
user=sclay
|
||||
numprocs=1
|
||||
stdout_logfile=/var/log/celeryd_searchindexer_tasker.log
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:celeryd_work_queue]
|
||||
command=/srv/newsblur/manage.py celeryd --loglevel=INFO -Q work_queue
|
||||
command=/srv/newsblur/manage.py celery worker --loglevel=INFO -Q work_queue
|
||||
directory=/srv/newsblur
|
||||
environment=PATH="/srv/newsblur/venv/newsblur/bin"
|
||||
user=sclay
|
||||
numprocs=1
|
||||
stdout_logfile=/var/log/celeryd_workqueue.log
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
[program:gunicorn]
|
||||
#command=/home/conesus/newsblur/manage.py run_gunicorn -c gunicorn_conf.py
|
||||
command=gunicorn_django -c config/gunicorn_conf.py
|
||||
command=/srv/newsblur/venv/newsblur/bin/gunicorn -c config/gunicorn_conf.py wsgi
|
||||
directory=/srv/newsblur
|
||||
user=sclay
|
||||
autostart=true
|
||||
|
|
|
@ -8,3 +8,4 @@ autorestart=true
|
|||
priority=991
|
||||
stopsignal=HUP
|
||||
stdout_logfile = /srv/newsblur/logs/favicons.log
|
||||
environment = NODE_ENV=production
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:node_original_page]
|
||||
command=node original_page.js
|
||||
directory=/srv/newsblur/node
|
||||
environment=NODE_ENV=production
|
||||
user=sclay
|
||||
autostart=true
|
||||
autorestart=true
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[program:node_unread]
|
||||
command=node node/unread_counts.js
|
||||
directory=/srv/newsblur
|
||||
environment=NODE_ENV=production
|
||||
user=sclay
|
||||
autostart=true
|
||||
autorestart=true
|
||||
|
@ -8,4 +9,3 @@ autorestart=true
|
|||
priority=991
|
||||
stopsignal=HUP
|
||||
stdout_logfile = /srv/newsblur/logs/unread_counts.log
|
||||
environment = NODE_ENV=production
|
17
config/zshrc
|
@ -1,20 +1,15 @@
|
|||
# Path to your oh-my-zsh configuration.
|
||||
plugins=(git osx ruby gem github pip rails)
|
||||
export ZSH=$HOME/.oh-my-zsh
|
||||
|
||||
# Set to the name theme to load.
|
||||
# Look in ~/.oh-my-zsh/themes/
|
||||
export CLICOLOR='1'
|
||||
export ZSH_THEME="risto"
|
||||
export DISABLE_AUTO_UPDATE=true
|
||||
|
||||
export DISABLE_AUTO_UPDATE="true"
|
||||
export WORKON_HOME=/srv/newsblur/venv
|
||||
export PROJECT_HOME=/srv/newsblur
|
||||
source /usr/local/bin/virtualenvwrapper.sh
|
||||
|
||||
# Set to this to use case-sensitive completion
|
||||
export CASE_SENSITIVE="true"
|
||||
export LC_COLLATE='C'
|
||||
plugins=(git github pip zsh-syntax-highlighting virtualenvwrapper)
|
||||
source $ZSH/oh-my-zsh.sh
|
||||
|
||||
|
||||
export DISABLE_AUTO_UPDATE="true"
|
||||
export PYTHONSTARTUP=$HOME/.pystartup
|
||||
export LSCOLORS='gxgxcxdxBxegedabagacad'
|
||||
|
||||
|
|
198
fabfile.py
vendored
|
@ -1,15 +1,17 @@
|
|||
from fabric.api import cd, lcd, env, local, parallel, serial
|
||||
from fabric.api import put, run, settings, sudo
|
||||
from fabric.api import put, run, settings, sudo, prefix
|
||||
from fabric.operations import prompt
|
||||
from fabric.contrib import django
|
||||
from fabric.contrib import files
|
||||
from fabric.state import connections
|
||||
# from fabric.colors import red, green, blue, cyan, magenta, white, yellow
|
||||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.key import Key
|
||||
from boto.ec2.connection import EC2Connection
|
||||
from fabric.contrib import django
|
||||
from fabric.state import connections
|
||||
from vendor import yaml
|
||||
from pprint import pprint
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager as _contextmanager
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
|
@ -192,13 +194,15 @@ def setup_common():
|
|||
setup_user()
|
||||
setup_sudoers()
|
||||
setup_ulimit()
|
||||
setup_libxml()
|
||||
setup_psql_client()
|
||||
setup_repo()
|
||||
setup_repo_local_settings()
|
||||
setup_local_files()
|
||||
setup_time_calibration()
|
||||
setup_psql_client()
|
||||
setup_libxml()
|
||||
setup_python()
|
||||
setup_pip()
|
||||
setup_virtualenv()
|
||||
setup_repo_local_settings()
|
||||
pip()
|
||||
setup_supervisor()
|
||||
setup_hosts()
|
||||
config_pgbouncer()
|
||||
|
@ -235,7 +239,10 @@ def setup_app_image():
|
|||
copy_app_settings()
|
||||
setup_hosts()
|
||||
config_pgbouncer()
|
||||
pull()
|
||||
pip()
|
||||
deploy_web()
|
||||
done()
|
||||
|
||||
def setup_node():
|
||||
setup_node_app()
|
||||
|
@ -326,7 +333,6 @@ def setup_installs():
|
|||
'libffi-dev',
|
||||
'libevent-dev',
|
||||
'make',
|
||||
'pgbouncer',
|
||||
'python-setuptools',
|
||||
'python-psycopg2',
|
||||
'libyaml-0-2',
|
||||
|
@ -341,6 +347,12 @@ def setup_installs():
|
|||
'libfreetype6',
|
||||
'libfreetype6-dev',
|
||||
'python-imaging',
|
||||
'libmysqlclient-dev',
|
||||
'libblas-dev',
|
||||
'liblapack-dev',
|
||||
'libatlas-base-dev',
|
||||
'gfortran',
|
||||
'libpq-dev',
|
||||
]
|
||||
# sudo("sed -i -e 's/archive.ubuntu.com\|security.ubuntu.com/old-releases.ubuntu.com/g' /etc/apt/sources.list")
|
||||
put("config/apt_sources.conf", "/etc/apt/sources.list", use_sudo=True)
|
||||
|
@ -392,16 +404,16 @@ def setup_repo():
|
|||
sudo('ln -sfn /srv/newsblur /home/%s/newsblur' % env.user)
|
||||
|
||||
def setup_repo_local_settings():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('cp local_settings.py.template local_settings.py')
|
||||
run('mkdir -p logs')
|
||||
run('touch logs/newsblur.log')
|
||||
|
||||
def setup_local_files():
|
||||
put("config/toprc", "./.toprc")
|
||||
put("config/zshrc", "./.zshrc")
|
||||
put('config/gitconfig.txt', './.gitconfig')
|
||||
put('config/ssh.conf', './.ssh/config')
|
||||
put("config/toprc", "~/.toprc")
|
||||
put("config/zshrc", "~/.zshrc")
|
||||
put('config/gitconfig.txt', '~/.gitconfig')
|
||||
put('config/ssh.conf', '~/.ssh/config')
|
||||
|
||||
def setup_psql_client():
|
||||
sudo('apt-get -y --force-yes install postgresql-client')
|
||||
|
@ -426,34 +438,63 @@ def setup_libxml_code():
|
|||
def setup_psycopg():
|
||||
sudo('easy_install -U psycopg2')
|
||||
|
||||
def setup_python():
|
||||
sudo('easy_install -U pip')
|
||||
# sudo('easy_install -U $(<%s)' %
|
||||
# os.path.join(env.NEWSBLUR_PATH, 'config/requirements.txt'))
|
||||
pip()
|
||||
put('config/pystartup.py', '.pystartup')
|
||||
# def setup_python():
|
||||
# # sudo('easy_install -U $(<%s)' %
|
||||
# # os.path.join(env.NEWSBLUR_PATH, 'config/requirements.txt'))
|
||||
# pip()
|
||||
# put('config/pystartup.py', '.pystartup')
|
||||
#
|
||||
# # with cd(os.path.join(env.NEWSBLUR_PATH, 'vendor/cjson')):
|
||||
# # sudo('python setup.py install')
|
||||
#
|
||||
# with settings(warn_only=True):
|
||||
# sudo('echo "import sys; sys.setdefaultencoding(\'utf-8\')" | sudo tee /usr/lib/python2.7/sitecustomize.py')
|
||||
# sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/httplib2-0.8-py2.7.egg/EGG-INFO/top_level.txt")
|
||||
# sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/python_dateutil-2.1-py2.7.egg/EGG-INFO/top_level.txt")
|
||||
# sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/httplib2-0.8-py2.7.egg/httplib2/cacerts.txt")
|
||||
#
|
||||
# if env.user == 'ubuntu':
|
||||
# with settings(warn_only=True):
|
||||
# sudo('chown -R ubuntu.ubuntu /home/ubuntu/.python-eggs')
|
||||
|
||||
# with cd(os.path.join(env.NEWSBLUR_PATH, 'vendor/cjson')):
|
||||
# sudo('python setup.py install')
|
||||
|
||||
with settings(warn_only=True):
|
||||
sudo('echo "import sys; sys.setdefaultencoding(\'utf-8\')" | sudo tee /usr/lib/python2.7/sitecustomize.py')
|
||||
sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/httplib2-0.8-py2.7.egg/EGG-INFO/top_level.txt")
|
||||
sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/python_dateutil-2.1-py2.7.egg/EGG-INFO/top_level.txt")
|
||||
sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/httplib2-0.8-py2.7.egg/httplib2/cacerts.txt")
|
||||
def setup_virtualenv():
|
||||
sudo('pip install --upgrade virtualenv')
|
||||
sudo('pip install --upgrade virtualenvwrapper')
|
||||
setup_local_files()
|
||||
sudo('rm -fr ~/.cache') # Clean `sudo pip`
|
||||
with prefix('WORKON_HOME=%s' % os.path.join(env.NEWSBLUR_PATH, 'venv')):
|
||||
with prefix('source /usr/local/bin/virtualenvwrapper.sh'):
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
# sudo('rmvirtualenv newsblur')
|
||||
# sudo('rm -fr venv')
|
||||
with settings(warn_only=True):
|
||||
run('mkvirtualenv --no-site-packages newsblur')
|
||||
run('echo "import sys; sys.setdefaultencoding(\'utf-8\')" | sudo tee venv/newsblur/lib/python2.7/sitecustomize.py')
|
||||
|
||||
if env.user == 'ubuntu':
|
||||
with settings(warn_only=True):
|
||||
sudo('chown -R ubuntu.ubuntu /home/ubuntu/.python-eggs')
|
||||
@_contextmanager
|
||||
def virtualenv():
|
||||
with prefix('WORKON_HOME=%s' % os.path.join(env.NEWSBLUR_PATH, 'venv')):
|
||||
with prefix('source /usr/local/bin/virtualenvwrapper.sh'):
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with prefix('workon newsblur'):
|
||||
yield
|
||||
|
||||
def setup_pip():
|
||||
sudo('easy_install -U pip')
|
||||
|
||||
@parallel
|
||||
def pip():
|
||||
pull()
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
sudo('easy_install -U pip')
|
||||
sudo('pip install --upgrade pip')
|
||||
sudo('pip install --upgrade six') # Stupid cryptography bug requires upgraded six
|
||||
sudo('pip install -r requirements.txt')
|
||||
with virtualenv():
|
||||
with settings(warn_only=True):
|
||||
sudo('fallocate -l 4G /swapfile')
|
||||
sudo('chmod 600 /swapfile')
|
||||
sudo('mkswap /swapfile')
|
||||
sudo('swapon /swapfile')
|
||||
run('easy_install -U pip')
|
||||
run('pip install --upgrade pip')
|
||||
run('pip install -r requirements.txt')
|
||||
sudo('swapoff /swapfile')
|
||||
|
||||
# PIL - Only if python-imaging didn't install through apt-get, like on Mac OS X.
|
||||
def setup_imaging():
|
||||
|
@ -498,13 +539,14 @@ def config_pgbouncer():
|
|||
run('sleep 2')
|
||||
sudo('/etc/init.d/pgbouncer start', pty=False)
|
||||
|
||||
def bounce_pgbouncer():
|
||||
def kill_pgbouncer(bounce=False):
|
||||
sudo('su postgres -c "/etc/init.d/pgbouncer stop"', pty=False)
|
||||
run('sleep 2')
|
||||
with settings(warn_only=True):
|
||||
sudo('pkill -9 pgbouncer')
|
||||
run('sleep 2')
|
||||
run('sudo /etc/init.d/pgbouncer start', pty=False)
|
||||
if bounce:
|
||||
run('sudo /etc/init.d/pgbouncer start', pty=False)
|
||||
|
||||
def config_monit_task():
|
||||
put('config/monit_task.conf', '/etc/monit/conf.d/celery.conf', use_sudo=True)
|
||||
|
@ -669,11 +711,12 @@ def remove_gunicorn():
|
|||
with cd(env.VENDOR_PATH):
|
||||
sudo('rm -fr gunicorn')
|
||||
|
||||
def setup_gunicorn(supervisor=True):
|
||||
def setup_gunicorn(supervisor=True, restart=True):
|
||||
if supervisor:
|
||||
put('config/supervisor_gunicorn.conf', '/etc/supervisor/conf.d/gunicorn.conf', use_sudo=True)
|
||||
sudo('supervisorctl reread')
|
||||
restart_gunicorn()
|
||||
if restart:
|
||||
restart_gunicorn()
|
||||
# with cd(env.VENDOR_PATH):
|
||||
# sudo('rm -fr gunicorn')
|
||||
# run('git clone git://github.com/benoitc/gunicorn.git')
|
||||
|
@ -732,12 +775,12 @@ def copy_certificates():
|
|||
@parallel
|
||||
def maintenance_on():
|
||||
put('templates/maintenance_off.html', '%s/templates/maintenance_off.html' % env.NEWSBLUR_PATH)
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('mv templates/maintenance_off.html templates/maintenance_on.html')
|
||||
|
||||
@parallel
|
||||
def maintenance_off():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('mv templates/maintenance_on.html templates/maintenance_off.html')
|
||||
run('git checkout templates/maintenance_off.html')
|
||||
|
||||
|
@ -785,7 +828,7 @@ def config_haproxy(debug=False):
|
|||
print " !!!> Uh-oh, HAProxy config doesn't check out: %s" % haproxy_check.return_code
|
||||
|
||||
def upgrade_django():
|
||||
with cd(env.NEWSBLUR_PATH), settings(warn_only=True):
|
||||
with virtualenv(), settings(warn_only=True):
|
||||
sudo('supervisorctl stop gunicorn')
|
||||
run('./utils/kill_gunicorn.sh')
|
||||
sudo('easy_install -U django gunicorn')
|
||||
|
@ -793,7 +836,7 @@ def upgrade_django():
|
|||
sudo('supervisorctl reload')
|
||||
|
||||
def upgrade_pil():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
pull()
|
||||
sudo('pip install --upgrade pillow')
|
||||
# celery_stop()
|
||||
|
@ -802,7 +845,7 @@ def upgrade_pil():
|
|||
# kill()
|
||||
|
||||
def downgrade_pil():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
sudo('apt-get install -y python-imaging')
|
||||
sudo('rm -fr /usr/local/lib/python2.7/dist-packages/Pillow*')
|
||||
pull()
|
||||
|
@ -811,7 +854,7 @@ def downgrade_pil():
|
|||
|
||||
def setup_db_monitor():
|
||||
pull()
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
sudo('apt-get install -y python-mysqldb')
|
||||
sudo('apt-get install -y libpq-dev python-dev')
|
||||
sudo('pip install -r flask/requirements.txt')
|
||||
|
@ -849,6 +892,7 @@ def setup_db_firewall():
|
|||
env.roledefs['work'] +
|
||||
env.roledefs['push'] +
|
||||
env.roledefs['www'] +
|
||||
env.roledefs['search'] +
|
||||
env.roledefs['node']):
|
||||
sudo('ufw allow proto tcp from %s to any port %s' % (
|
||||
ip,
|
||||
|
@ -1069,7 +1113,7 @@ def setup_db_munin():
|
|||
|
||||
|
||||
def enable_celerybeat():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('mkdir -p data')
|
||||
put('config/supervisor_celerybeat.conf', '/etc/supervisor/conf.d/celerybeat.conf', use_sudo=True)
|
||||
put('config/supervisor_celeryd_work_queue.conf', '/etc/supervisor/conf.d/celeryd_work_queue.conf', use_sudo=True)
|
||||
|
@ -1111,7 +1155,8 @@ def setup_elasticsearch():
|
|||
with cd(os.path.join(env.VENDOR_PATH, 'elasticsearch-%s' % ES_VERSION)):
|
||||
run('wget http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-%s.deb' % ES_VERSION)
|
||||
sudo('dpkg -i elasticsearch-%s.deb' % ES_VERSION)
|
||||
sudo('/usr/share/elasticsearch/bin/plugin -install mobz/elasticsearch-head')
|
||||
if not files.exists('/usr/share/elasticsearch/plugins/head'):
|
||||
sudo('/usr/share/elasticsearch/bin/plugin -install mobz/elasticsearch-head')
|
||||
|
||||
def setup_db_search():
|
||||
put('config/supervisor_celeryd_search_indexer.conf', '/etc/supervisor/conf.d/celeryd_search_indexer.conf', use_sudo=True)
|
||||
|
@ -1145,14 +1190,15 @@ def setup_motd(role='app'):
|
|||
sudo('chown root.root %s' % motd)
|
||||
sudo('chmod a+x %s' % motd)
|
||||
|
||||
def enable_celery_supervisor(queue=None):
|
||||
def enable_celery_supervisor(queue=None, update=True):
|
||||
if not queue:
|
||||
put('config/supervisor_celeryd.conf', '/etc/supervisor/conf.d/celeryd.conf', use_sudo=True)
|
||||
else:
|
||||
put('config/supervisor_celeryd_%s.conf' % queue, '/etc/supervisor/conf.d/celeryd.conf', use_sudo=True)
|
||||
|
||||
sudo('supervisorctl reread')
|
||||
sudo('supervisorctl update')
|
||||
if update:
|
||||
sudo('supervisorctl update')
|
||||
|
||||
@parallel
|
||||
def copy_db_settings():
|
||||
|
@ -1198,7 +1244,7 @@ def setup_do(name, size=2, image=None):
|
|||
if image == "task":
|
||||
image = images["task_07-2015"]
|
||||
elif image == "app":
|
||||
image = images[image]
|
||||
image = images["app_02-2016"]
|
||||
else:
|
||||
images = dict((s.name, s.id) for s in doapi.get_all_images())
|
||||
print images
|
||||
|
@ -1307,7 +1353,7 @@ def setup_ec2():
|
|||
|
||||
@parallel
|
||||
def pull():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('git pull')
|
||||
|
||||
def pre_deploy():
|
||||
|
@ -1344,12 +1390,12 @@ def deploy_rebuild(fast=False):
|
|||
|
||||
@parallel
|
||||
def kill_gunicorn():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
sudo('pkill -9 -u %s -f gunicorn_django' % env.user)
|
||||
|
||||
@parallel
|
||||
def deploy_code(copy_assets=False, rebuild=False, fast=False, reload=False):
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('git pull')
|
||||
run('mkdir -p static')
|
||||
if rebuild:
|
||||
|
@ -1357,7 +1403,7 @@ def deploy_code(copy_assets=False, rebuild=False, fast=False, reload=False):
|
|||
if copy_assets:
|
||||
transfer_assets()
|
||||
|
||||
with cd(env.NEWSBLUR_PATH), settings(warn_only=True):
|
||||
with virtualenv(), settings(warn_only=True):
|
||||
if reload:
|
||||
sudo('supervisorctl reload')
|
||||
elif fast:
|
||||
|
@ -1375,7 +1421,7 @@ def kill():
|
|||
run('./utils/kill_gunicorn.sh')
|
||||
|
||||
def deploy_node():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('sudo supervisorctl restart node_unread')
|
||||
run('sudo supervisorctl restart node_favicons')
|
||||
|
||||
|
@ -1383,11 +1429,11 @@ def gunicorn_restart():
|
|||
restart_gunicorn()
|
||||
|
||||
def restart_gunicorn():
|
||||
with cd(env.NEWSBLUR_PATH), settings(warn_only=True):
|
||||
with virtualenv(), settings(warn_only=True):
|
||||
run('sudo supervisorctl restart gunicorn')
|
||||
|
||||
def gunicorn_stop():
|
||||
with cd(env.NEWSBLUR_PATH), settings(warn_only=True):
|
||||
with virtualenv(), settings(warn_only=True):
|
||||
run('sudo supervisorctl stop gunicorn')
|
||||
|
||||
def staging():
|
||||
|
@ -1410,20 +1456,20 @@ def celery():
|
|||
celery_slow()
|
||||
|
||||
def celery_slow():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('git pull')
|
||||
celery_stop()
|
||||
celery_start()
|
||||
|
||||
@parallel
|
||||
def celery_fast():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('git pull')
|
||||
celery_reload()
|
||||
|
||||
@parallel
|
||||
def celery_stop():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
sudo('supervisorctl stop celery')
|
||||
with settings(warn_only=True):
|
||||
if env.user == 'ubuntu':
|
||||
|
@ -1433,18 +1479,18 @@ def celery_stop():
|
|||
|
||||
@parallel
|
||||
def celery_start():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('sudo supervisorctl start celery')
|
||||
run('tail logs/newsblur.log')
|
||||
|
||||
@parallel
|
||||
def celery_reload():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
run('sudo supervisorctl reload celery')
|
||||
run('tail logs/newsblur.log')
|
||||
|
||||
def kill_celery():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
with virtualenv():
|
||||
with settings(warn_only=True):
|
||||
if env.user == 'ubuntu':
|
||||
sudo('./utils/kill_celery.sh')
|
||||
|
@ -1607,3 +1653,29 @@ def add_revsys_keys():
|
|||
put("~/Downloads/revsys-keys.pub", "revsys_keys")
|
||||
run('cat revsys_keys >> ~/.ssh/authorized_keys')
|
||||
run('rm revsys_keys')
|
||||
|
||||
def upgrade_to_virtualenv(role=None):
|
||||
if not role:
|
||||
print " ---> You must specify a role!"
|
||||
return
|
||||
setup_virtualenv()
|
||||
if role == "task" or role == "search":
|
||||
celery_stop()
|
||||
elif role == "app":
|
||||
gunicorn_stop()
|
||||
elif role == "work":
|
||||
sudo('/etc/init.d/supervisor stop')
|
||||
kill_pgbouncer()
|
||||
setup_installs()
|
||||
pip()
|
||||
if role == "task":
|
||||
enable_celery_supervisor(update=False)
|
||||
sudo('reboot')
|
||||
elif role == "app":
|
||||
setup_gunicorn(supervisor=True, restart=False)
|
||||
sudo('reboot')
|
||||
elif role == "search":
|
||||
setup_db_search()
|
||||
elif role == "work":
|
||||
enable_celerybeat()
|
||||
sudo('reboot')
|
|
@ -110,6 +110,15 @@ a img {
|
|||
display: none;
|
||||
}
|
||||
|
||||
.NB-manage {
|
||||
position: absolute;
|
||||
right: 204px;
|
||||
z-index: 1;
|
||||
overflow: hidden;
|
||||
width: 180px;
|
||||
margin: 0 24px 32px 0;
|
||||
}
|
||||
|
||||
.NB-account {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
|
@ -6064,7 +6073,8 @@ form.opml_import_form input {
|
|||
overflow: hidden;
|
||||
}
|
||||
|
||||
.NB-account h5.NB-module-header {
|
||||
.NB-account h5.NB-module-header,
|
||||
.NB-manage h5.NB-module-header {
|
||||
text-align: center;
|
||||
}
|
||||
.NB-module .NB-module-header-left {
|
||||
|
@ -6170,12 +6180,16 @@ form.opml_import_form input {
|
|||
float: left;
|
||||
margin: 4px 0 0 0;
|
||||
}
|
||||
|
||||
.NB-module .NB-module-item .NB-menu-manage-logout {
|
||||
float: right;
|
||||
margin: 4px 4px 0 0;
|
||||
}
|
||||
/* =========================== */
|
||||
/* = Right Modules - Account = */
|
||||
/* =========================== */
|
||||
|
||||
.NB-account .NB-module {
|
||||
.NB-account .NB-module,
|
||||
.NB-manage .NB-module {
|
||||
margin: 24px 0;
|
||||
padding: 0;
|
||||
clear: both;
|
||||
|
@ -6203,24 +6217,6 @@ form.opml_import_form input {
|
|||
opacity: .2;
|
||||
}
|
||||
|
||||
.NB-account .NB-module.NB-module-account .NB-module-account-stats {
|
||||
min-height: 0;
|
||||
}
|
||||
.NB-account .NB-module.NB-module-account .NB-module-stats-counts {
|
||||
overflow: hidden;
|
||||
}
|
||||
.NB-account .NB-module.NB-module-account .NB-module-stats-count {
|
||||
margin: 0;
|
||||
}
|
||||
.NB-account .NB-module.NB-module-account .NB-module-stats-count-graph {
|
||||
width: 33%;
|
||||
margin: 0 0 0 72px;
|
||||
clear: none;
|
||||
}
|
||||
.NB-account .NB-module-account-stats {
|
||||
margin-top: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
.NB-module.NB-module-features .NB-module-content-header {
|
||||
border-bottom: 1px solid #E8EAE4;
|
||||
padding-bottom: 4px;
|
||||
|
@ -6426,10 +6422,47 @@ form.opml_import_form input {
|
|||
display: none;
|
||||
}
|
||||
|
||||
/* ========================== */
|
||||
/* = Premium Upgrade Module = */
|
||||
/* ========================== */
|
||||
|
||||
.NB-account .NB-module-premium {
|
||||
}
|
||||
|
||||
.NB-module-premium .NB-module-item-intro {
|
||||
border: 1px solid #F7E7BC;
|
||||
background-color: #FFFCF3;
|
||||
padding: 12px;
|
||||
overflow: hidden;
|
||||
border-radius: 4px;
|
||||
font-size: 14px;
|
||||
text-align: center;
|
||||
}
|
||||
.NB-module-premium .NB-module-item .NB-modal-submit-button {
|
||||
float: none;
|
||||
}
|
||||
.NB-module-premium .NB-module-premium-price {
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
margin: 12px 0 0;
|
||||
color: #909090;
|
||||
}
|
||||
|
||||
/* ================= */
|
||||
/* = Manage Module = */
|
||||
/* ================= */
|
||||
|
||||
.NB-module-manage .NB-menu-manage li.NB-menu-item {
|
||||
background-color: #F7F8F5;
|
||||
}
|
||||
|
||||
/* ================ */
|
||||
/* = Stats Module = */
|
||||
/* ================ */
|
||||
|
||||
.NB-splash-module-section {
|
||||
overflow: hidden;
|
||||
}
|
||||
.NB-module-stats-counts {
|
||||
border-radius: 4px;
|
||||
background-color: #F7F8F5;
|
||||
|
@ -6445,6 +6478,11 @@ form.opml_import_form input {
|
|||
overflow: hidden;
|
||||
padding: 6px 0;
|
||||
}
|
||||
.NB-module-account .NB-module-stats-count {
|
||||
float: left;
|
||||
width: 33%;
|
||||
border-bottom: none;
|
||||
}
|
||||
.NB-module-stats-count:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
@ -6522,6 +6560,7 @@ form.opml_import_form input {
|
|||
|
||||
.NB-modules-center .NB-module {
|
||||
margin: 0 0 24px;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
/* ============ */
|
||||
|
@ -8921,7 +8960,7 @@ form.opml_import_form input {
|
|||
}
|
||||
|
||||
.NB-modal-feedchooser .NB-feedchooser-paypal {
|
||||
min-height: 48px;
|
||||
min-height: 84px;
|
||||
width: 50%;
|
||||
text-align: center;
|
||||
overflow: hidden;
|
||||
|
@ -9369,12 +9408,13 @@ form.opml_import_form input {
|
|||
.NB-modal-goodies .NB-goodies-mobile-link {
|
||||
float: right;
|
||||
}
|
||||
.NB-modal-goodies .NB-goodies-iphone {
|
||||
.NB-modal-goodies .NB-goodies-ios {
|
||||
float: right;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
margin: 0 6px 0 0;
|
||||
background: transparent url('/media/img/mobile/iphone.png') no-repeat 0 0;
|
||||
background: transparent url('/media/img/mobile/apple.png') no-repeat 0 0;
|
||||
background-size: 28px 28px;
|
||||
}
|
||||
.NB-modal-goodies .NB-goodies-android {
|
||||
float: right;
|
||||
|
|
BIN
media/img/circular/originals/activity.png
Normal file
After Width: | Height: | Size: 41 KiB |
BIN
media/img/circular/originals/circular.png
Normal file
After Width: | Height: | Size: 50 KiB |
BIN
media/img/circular/originals/following.png
Normal file
After Width: | Height: | Size: 54 KiB |
BIN
media/img/circular/originals/trending.png
Normal file
After Width: | Height: | Size: 44 KiB |
BIN
media/img/mobile/apple.png
Normal file
After Width: | Height: | Size: 2 KiB |
BIN
media/img/mobile/windows_hypersonic.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
BIN
media/img/originals/Chrome Web Store Banner Small.png
Normal file
After Width: | Height: | Size: 175 KiB |
BIN
media/img/originals/Mobile Loading.png
Normal file
After Width: | Height: | Size: 298 KiB |
BIN
media/img/originals/Mobile Logo.png
Normal file
After Width: | Height: | Size: 479 KiB |
BIN
media/img/originals/Mouse Indicator.png
Normal file
After Width: | Height: | Size: 49 KiB |
BIN
media/img/originals/archive/Mobile Logo 3.png
Normal file
After Width: | Height: | Size: 470 KiB |
BIN
media/img/originals/archive/Mobile Logo Filled.png
Normal file
After Width: | Height: | Size: 223 KiB |
BIN
media/img/originals/archive/Mobile Logo.png
Normal file
After Width: | Height: | Size: 243 KiB |
BIN
media/img/originals/archive/logo 2-color.png
Normal file
After Width: | Height: | Size: 904 KiB |
BIN
media/img/originals/archive/logo.png
Normal file
After Width: | Height: | Size: 235 KiB |
BIN
media/img/originals/archive/logo_newsblur.png
Normal file
After Width: | Height: | Size: 192 KiB |