Merge remote-tracking branch 'upstream/master' into mark

This commit is contained in:
Mark Anderson 2015-08-03 22:45:32 +01:00
commit 87a88f9d82
79 changed files with 1477 additions and 1164 deletions

View file

@ -71,9 +71,6 @@ def save_classifier(request):
'user_id': request.user.pk,
'feed_id': feed_id or 0,
'social_user_id': social_user_id or 0,
'defaults': {
'score': score
}
}
if content_type in ('author', 'tag', 'title'):
classifier_dict.update({content_type: post_content})
@ -81,7 +78,10 @@ def save_classifier(request):
if not post_content.startswith('social:'):
classifier_dict['feed_id'] = post_content
try:
classifier, created = ClassifierCls.objects.get_or_create(**classifier_dict)
classifier = ClassifierCls.objects.get(**classifier_dict)
except ClassifierCls.DoesNotExist:
classifier_dict.update(dict(score=score))
classifier = ClassifierCls.objects.create(**classifier_dict)
except NotUniqueError:
continue
if score == 0:

View file

@ -24,10 +24,10 @@ def login(request):
code = -1
errors = None
user_agent = request.environ.get('HTTP_USER_AGENT', '')
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR']
if not user_agent or user_agent.lower() in ['nativehost']:
errors = dict(user_agent="You must set a user agent to login.")
ip = request.META.get('HTTP_X_REAL_IP', None) or request.META['REMOTE_ADDR']
logging.user(request, "~FG~BB~SK~FRBlocked ~FGAPI Login~SN~FW: %s / %s" % (user_agent, ip))
elif request.method == "POST":
form = LoginForm(data=request.POST)
@ -35,7 +35,7 @@ def login(request):
errors = form.errors
if form.is_valid():
login_user(request, form.get_user())
logging.user(request, "~FG~BB~SKAPI Login~SN~FW: %s" % user_agent)
logging.user(request, "~FG~BB~SKAPI Login~SN~FW: %s / %s" % (user_agent, ip))
code = 1
else:
errors = dict(method="Invalid method. Use POST. You used %s" % request.method)
@ -46,7 +46,8 @@ def login(request):
def signup(request):
code = -1
errors = None
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR']
if request.method == "POST":
form = SignupForm(data=request.POST)
if form.errors:
@ -54,7 +55,7 @@ def signup(request):
if form.is_valid():
new_user = form.save()
login_user(request, new_user)
logging.user(request, "~FG~SB~BBAPI NEW SIGNUP: ~FW%s" % new_user.email)
logging.user(request, "~FG~SB~BBAPI NEW SIGNUP: ~FW%s / %s" % (new_user.email, ip))
code = 1
else:
errors = dict(method="Invalid method. Use POST. You used %s" % request.method)

View file

@ -92,7 +92,7 @@ def opml_export(request):
def reader_authorize(request):
ip = request.META.get('HTTP_X_REAL_IP', None) or request.META.get('REMOTE_ADDR', "")
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
reader_importer = GoogleReaderImporter(request.user)
if reader_importer.test():
logging.user(request, "~BB~FW~SBSkipping Google Reader import, already tokened")
@ -114,7 +114,7 @@ def reader_authorize(request):
approval_prompt="force",
)
logging.user(request, "~BB~FW~SBAuthorize Google Reader import - %s" % (
request.META.get('HTTP_X_REAL_IP', None) or request.META.get('REMOTE_ADDR', ""),
request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', ""),
))
authorize_url = FLOW.step1_get_authorize_url(redirect_uri=STEP2_URI)
@ -139,7 +139,7 @@ def reader_authorize(request):
return response
def reader_callback(request):
ip = request.META.get('HTTP_X_REAL_IP', None) or request.META.get('REMOTE_ADDR', "")
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
domain = Site.objects.get_current().domain
STEP2_URI = "http://%s%s" % (
(domain + '.com') if not domain.endswith('.com') else domain,
@ -244,7 +244,7 @@ def import_starred_stories_from_google_reader(request):
return dict(code=code, delayed=delayed, feed_count=feed_count, starred_count=starred_count)
def import_signup(request):
ip = request.META.get('HTTP_X_REAL_IP', None) or request.META.get('REMOTE_ADDR', "")
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR', "")
if request.method == "POST":
signup_form = SignupForm(prefix='signup', data=request.POST)

View file

@ -61,7 +61,7 @@ def twitter_connect(request):
except User.DoesNotExist:
existing_user.delete()
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk)
social_services = MSocialServices.get_user(request.user.pk)
social_services.twitter_uid = unicode(twitter_user.id)
social_services.twitter_access_key = access_token.key
social_services.twitter_access_secret = access_token.secret
@ -125,7 +125,7 @@ def facebook_connect(request):
except User.DoesNotExist:
existing_user.delete()
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk)
social_services = MSocialServices.get_user(request.user.pk)
social_services.facebook_uid = uid
social_services.facebook_access_token = access_token
social_services.syncing_facebook = True
@ -184,7 +184,7 @@ def appdotnet_connect(request):
except User.DoesNotExist:
existing_user.delete()
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk)
social_services = MSocialServices.get_user(request.user.pk)
social_services.appdotnet_uid = unicode(adn_userid)
social_services.appdotnet_access_token = access_token
social_services.syncing_appdotnet = True

View file

@ -21,7 +21,7 @@ class LastSeenMiddleware(object):
and hasattr(request, 'user')
and request.user.is_authenticated()):
hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60)
ip = request.META.get('HTTP_X_REAL_IP', None) or request.META['REMOTE_ADDR']
ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR']
# SUBSCRIBER_EXPIRE = datetime.datetime.utcnow() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE)
if request.user.profile.last_seen_on < hour_ago:
logging.user(request, "~FG~BBRepeat visitor: ~SB%s (%s)" % (
@ -31,10 +31,8 @@ class LastSeenMiddleware(object):
logging.user(request, "~FG~BBRepeat visitor (ignored): ~SB%s (%s)" % (
request.user.profile.last_seen_on, ip))
# if request.user.profile.last_seen_on < SUBSCRIBER_EXPIRE:
# request.user.profile.refresh_stale_feeds()
request.user.profile.last_seen_on = datetime.datetime.utcnow()
request.user.profile.last_seen_ip = ip
request.user.profile.last_seen_ip = ip[-15:]
request.user.profile.save()
return response

View file

@ -6,6 +6,7 @@ import hashlib
import redis
import uuid
import mongoengine as mongo
from pprint import pprint
from django.db import models
from django.db import IntegrityError
from django.db.utils import DatabaseError
@ -18,14 +19,15 @@ from django.core.mail import mail_admins
from django.core.mail import EmailMultiAlternatives
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from apps.reader.models import UserSubscription
from apps.rss_feeds.models import Feed, MStory, MStarredStory
from apps.rss_feeds.tasks import NewFeeds
from apps.rss_feeds.tasks import SchedulePremiumSetup
from apps.feed_import.models import GoogleReaderImporter, OPMLExporter
from apps.reader.models import UserSubscription
from apps.reader.models import RUserStory
from utils import log as logging
from utils import json_functions as json
from utils.user_functions import generate_secret_token
from utils.feed_functions import chunks
from vendor.timezones.fields import TimeZoneField
from vendor.paypal.standard.ipn.signals import subscription_signup, payment_was_successful, recurring_payment
from vendor.paypal.standard.ipn.signals import payment_was_flagged
@ -156,16 +158,9 @@ class Profile(models.Model):
logging.user(self.user, "Deleting user: %s" % self.user)
self.user.delete()
def check_if_spammer(self):
feed_opens = UserSubscription.objects.filter(user=self.user)\
.aggregate(sum=Sum('feed_opens'))['sum']
feed_count = UserSubscription.objects.filter(user=self.user).count()
if not feed_opens and not feed_count:
return True
def activate_premium(self, never_expire=False):
from apps.profile.tasks import EmailNewPremium
EmailNewPremium.delay(user_id=self.user.pk)
self.is_premium = True
@ -190,7 +185,7 @@ class Profile(models.Model):
len(scheduled_feeds))
SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds))
self.queue_new_feeds()
UserSubscription.queue_new_feeds(self.user)
self.setup_premium_history()
if never_expire:
@ -400,34 +395,123 @@ class Profile(models.Model):
return True
def queue_new_feeds(self, new_feeds=None):
if not new_feeds:
new_feeds = UserSubscription.objects.filter(user=self.user,
feed__fetched_once=False,
active=True).values('feed_id')
new_feeds = list(set([f['feed_id'] for f in new_feeds]))
logging.user(self.user, "~BB~FW~SBQueueing NewFeeds: ~FC(%s) %s" % (len(new_feeds), new_feeds))
size = 4
for t in (new_feeds[pos:pos + size] for pos in xrange(0, len(new_feeds), size)):
NewFeeds.apply_async(args=(t,), queue="new_feeds")
@classmethod
def clear_dead_spammers(self, days=30, confirm=False):
users = User.objects.filter(date_joined__gte=datetime.datetime.now()-datetime.timedelta(days=days)).order_by('-date_joined')
usernames = set()
def refresh_stale_feeds(self, exclude_new=False):
stale_cutoff = datetime.datetime.now() - datetime.timedelta(days=7)
stale_feeds = UserSubscription.objects.filter(user=self.user, active=True, feed__last_update__lte=stale_cutoff)
if exclude_new:
stale_feeds = stale_feeds.filter(feed__fetched_once=True)
all_feeds = UserSubscription.objects.filter(user=self.user, active=True)
for user in users:
opens = UserSubscription.objects.filter(user=user).aggregate(sum=Sum('feed_opens'))['sum']
reads = RUserStory.read_story_count(user.pk)
if opens is None and not reads:
usernames.add(user.username)
print user.username, user.email, opens, reads
logging.user(self.user, "~FG~BBRefreshing stale feeds: ~SB%s/%s" % (
stale_feeds.count(), all_feeds.count()))
if not confirm: return
for username in usernames:
u = User.objects.get(username=username)
u.profile.delete_user(confirm=True)
for sub in stale_feeds:
sub.feed.fetched_once = False
sub.feed.save()
RNewUserQueue.user_count()
RNewUserQueue.activate_all()
if stale_feeds:
stale_feeds = list(set([f.feed_id for f in stale_feeds]))
self.queue_new_feeds(new_feeds=stale_feeds)
@classmethod
def count_feed_subscribers(self, feed_id=None, user_id=None, verbose=False):
SUBSCRIBER_EXPIRE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE)
r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL)
entire_feed_counted = False
if verbose:
logging.debug(" ---> ~SN~FBCounting subscribers for feed:~SB~FM%s~SN~FB user:~SB~FM%s" % (feed_id, user_id))
if feed_id:
feed_ids = [feed_id]
elif user_id:
feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user_id, active=True).values('feed_id')]
else:
assert False, "feed_id or user_id required"
if feed_id and not user_id:
entire_feed_counted = True
for feed_id in feed_ids:
total = 0
premium = 0
active = 0
active_premium = 0
key = 's:%s' % feed_id
premium_key = 'sp:%s' % feed_id
if user_id:
active = UserSubscription.objects.get(feed_id=feed_id, user_id=user_id).only('active').active
user_ids = dict([(user_id, active)])
else:
user_ids = dict([(us.user_id, us.active)
for us in UserSubscription.objects.filter(feed_id=feed_id).only('user', 'active')])
profiles = Profile.objects.filter(user_id__in=user_ids.keys()).values('user_id', 'last_seen_on', 'is_premium')
feed = Feed.get_by_id(feed_id)
if entire_feed_counted:
r.delete(key)
r.delete(premium_key)
for profiles_group in chunks(profiles, 20):
pipeline = r.pipeline()
for profile in profiles_group:
last_seen_on = int(profile['last_seen_on'].strftime('%s'))
muted_feed = not bool(user_ids[profile['user_id']])
if muted_feed:
last_seen_on = 0
pipeline.zadd(key, profile['user_id'], last_seen_on)
total += 1
if profile['is_premium']:
pipeline.zadd(premium_key, profile['user_id'], last_seen_on)
premium += 1
else:
pipeline.zrem(premium_key, profile['user_id'])
if profile['last_seen_on'] > SUBSCRIBER_EXPIRE and not muted_feed:
active += 1
if profile['is_premium']:
active_premium += 1
pipeline.execute()
if entire_feed_counted:
now = int(datetime.datetime.now().strftime('%s'))
r.zadd(key, -1, now)
r.zadd(premium_key, -1, now)
logging.info(" ---> [%-30s] ~SN~FBCounting subscribers, storing in ~SBredis~SN: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s" %
(feed.title[:30], total, active, premium, active_premium))
@classmethod
def count_all_feed_subscribers_for_user(self, user):
SUBSCRIBER_EXPIRE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE)
r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL)
if not isinstance(user, User):
user = User.objects.get(pk=user)
active_feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user.pk, active=True).values('feed_id')]
muted_feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user.pk, active=False).values('feed_id')]
logging.user(user, "~SN~FBRefreshing user last_login_on for ~SB%s~SN/~SB%s subscriptions~SN" %
(len(active_feed_ids), len(muted_feed_ids)))
for feed_ids in [active_feed_ids, muted_feed_ids]:
for feeds_group in chunks(feed_ids, 20):
pipeline = r.pipeline()
for feed_id in feeds_group:
key = 's:%s' % feed_id
premium_key = 'sp:%s' % feed_id
last_seen_on = int(user.profile.last_seen_on.strftime('%s'))
if feed_ids is muted_feed_ids:
last_seen_on = 0
pipeline.zadd(key, user.pk, last_seen_on)
if user.profile.is_premium:
pipeline.zadd(premium_key, user.pk, last_seen_on)
else:
pipeline.zrem(premium_key, user.pk)
pipeline.execute()
def import_reader_starred_items(self, count=20):
importer = GoogleReaderImporter(self.user)
@ -489,13 +573,16 @@ class Profile(models.Model):
if not self.user.email:
return
sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk,
email_type='first_share')
if not created and not force:
return
params = dict(receiver_user_id=self.user.pk, email_type='first_share')
try:
sent_email = MSentEmail.objects.get(**params)
if not force:
# Return if email already sent
return
except MSentEmail.DoesNotExist:
sent_email = MSentEmail.objects.create(**params)
social_profile = MSocialProfile.objects.get(user_id=self.user.pk)
params = {
'shared_stories': MSharedStory.objects.filter(user_id=self.user.pk).count(),
@ -528,12 +615,15 @@ NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()}
if not self.user.email or not self.send_emails:
return
sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk,
email_type='new_premium')
if not created and not force:
return
params = dict(receiver_user_id=self.user.pk, email_type='new_premium')
try:
sent_email = MSentEmail.objects.get(**params)
if not force:
# Return if email already sent
return
except MSentEmail.DoesNotExist:
sent_email = MSentEmail.objects.create(**params)
user = self.user
text = render_to_string('mail/email_new_premium.txt', locals())
html = render_to_string('mail/email_new_premium.xhtml', locals())
@ -572,11 +662,15 @@ NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()}
print "Please provide an email address."
return
sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk,
email_type='new_user_queue')
if not created and not force:
return
params = dict(receiver_user_id=self.user.pk, email_type='new_user_queue')
try:
sent_email = MSentEmail.objects.get(**params)
if not force:
# Return if email already sent
return
except MSentEmail.DoesNotExist:
sent_email = MSentEmail.objects.create(**params)
user = self.user
text = render_to_string('mail/email_new_user_queue.txt', locals())
html = render_to_string('mail/email_new_user_queue.xhtml', locals())
@ -645,12 +739,15 @@ NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()}
logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email))
return
sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk,
email_type='launch_social')
if not created and not force:
logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" % self.user.email)
return
params = dict(receiver_user_id=self.user.pk, email_type='launch_social')
try:
sent_email = MSentEmail.objects.get(**params)
if not force:
# Return if email already sent
logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" % self.user.email)
return
except MSentEmail.DoesNotExist:
sent_email = MSentEmail.objects.create(**params)
delta = datetime.datetime.now() - self.last_seen_on
months_ago = delta.days / 30
@ -1061,28 +1158,28 @@ class RNewUserQueue:
@classmethod
def add_user(cls, user_id):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
now = time.time()
r.zadd(cls.KEY, user_id, now)
@classmethod
def user_count(cls):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
count = r.zcard(cls.KEY)
return count
@classmethod
def user_position(cls, user_id):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
position = r.zrank(cls.KEY, user_id)
if position >= 0:
return position + 1
@classmethod
def pop_user(cls):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
user = r.zrange(cls.KEY, 0, 0)[0]
r.zrem(cls.KEY, user)

View file

@ -59,6 +59,8 @@ class CleanupUser(Task):
def run(self, user_id):
UserSubscription.trim_user_read_stories(user_id)
UserSubscription.verify_feeds_scheduled(user_id)
Profile.count_all_feed_subscribers_for_user(user_id)
# UserSubscription.refresh_stale_feeds(user_id)
try:
ss = MSocialServices.objects.get(user_id=user_id)

View file

@ -52,7 +52,7 @@ def set_preference(request):
setattr(request.user.profile, preference_name, preference_value)
elif preference_name in SPECIAL_PREFERENCES:
if preference_name == 'autofollow_friends':
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk)
social_services = MSocialServices.get_user(request.user.pk)
social_services.autofollow = preference_value
social_services.save()
elif preference_name == 'dashboard_date':
@ -205,6 +205,12 @@ def clear_view_setting(request):
if view_setting_type == 'view' and 'v' in view_setting:
del view_setting['v']
removed += 1
if view_setting_type == 'order' and 'o' in view_setting:
del view_setting['o']
removed += 1
if view_setting_type == 'order' and 'r' in view_setting:
del view_setting['r']
removed += 1
new_view_settings[feed_id] = view_setting
request.user.profile.view_settings = json.encode(new_view_settings)
@ -404,6 +410,7 @@ def payment_history(request):
statistics = {
"created_date": user.date_joined,
"last_seen_date": user.profile.last_seen_on,
"last_seen_ip": user.profile.last_seen_ip,
"timezone": unicode(user.profile.timezone),
"stripe_id": user.profile.stripe_id,
"profile": user.profile,

View file

@ -28,7 +28,14 @@ class LoginForm(forms.Form):
username = self.cleaned_data.get('username', '').lower()
password = self.cleaned_data.get('password', '')
user = User.objects.filter(Q(username__iexact=username) | Q(email__iexact=username))
if '@' in username:
user = User.objects.filter(email=username)
if not user:
user = User.objects.filter(email__iexact=username)
else:
user = User.objects.filter(username=username)
if not user:
user = User.objects.filter(username__iexact=username)
if user:
user = user[0]
if username and user:

View file

@ -17,6 +17,7 @@ from mongoengine.queryset import OperationError
from mongoengine.queryset import NotUniqueError
from apps.reader.managers import UserSubscriptionManager
from apps.rss_feeds.models import Feed, MStory, DuplicateFeed
from apps.rss_feeds.tasks import NewFeeds
from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags
from apps.analyzer.tfidf import tfidf
@ -421,6 +422,49 @@ class UserSubscription(models.Model):
return feeds
@classmethod
def queue_new_feeds(cls, user, new_feeds=None):
if not isinstance(user, User):
user = User.objects.get(pk=user)
if not new_feeds:
new_feeds = cls.objects.filter(user=user,
feed__fetched_once=False,
active=True).values('feed_id')
new_feeds = list(set([f['feed_id'] for f in new_feeds]))
if not new_feeds:
return
logging.user(user, "~BB~FW~SBQueueing NewFeeds: ~FC(%s) %s" % (len(new_feeds), new_feeds))
size = 4
for t in (new_feeds[pos:pos + size] for pos in xrange(0, len(new_feeds), size)):
NewFeeds.apply_async(args=(t,), queue="new_feeds")
@classmethod
def refresh_stale_feeds(cls, user, exclude_new=False):
if not isinstance(user, User):
user = User.objects.get(pk=user)
stale_cutoff = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE)
# TODO: Refactor below using last_update from REDIS_FEED_UPDATE_POOL
stale_feeds = UserSubscription.objects.filter(user=user, active=True, feed__last_update__lte=stale_cutoff)
if exclude_new:
stale_feeds = stale_feeds.filter(feed__fetched_once=True)
all_feeds = UserSubscription.objects.filter(user=user, active=True)
logging.user(user, "~FG~BBRefreshing stale feeds: ~SB%s/%s" % (
stale_feeds.count(), all_feeds.count()))
for sub in stale_feeds:
sub.feed.fetched_once = False
sub.feed.save()
if stale_feeds:
stale_feeds = list(set([f.feed_id for f in stale_feeds]))
cls.queue_new_feeds(user, new_feeds=stale_feeds)
@classmethod
def identify_deleted_feed_users(cls, old_feed_id):
users = UserSubscriptionFolders.objects.filter(folders__contains=old_feed_id).only('user')
@ -638,6 +682,9 @@ class UserSubscription(models.Model):
ong = self.unread_count_negative
ont = self.unread_count_neutral
ops = self.unread_count_positive
oousd = self.oldest_unread_story_date
onur = self.needs_unread_recalc
oit = self.is_trained
# if not self.feed.fetched_once:
# if not silent:
@ -739,8 +786,16 @@ class UserSubscription(models.Model):
self.oldest_unread_story_date = oldest_unread_story_date
self.needs_unread_recalc = False
self.save()
update_fields = []
if self.unread_count_positive != ops: update_fields.append('unread_count_positive')
if self.unread_count_neutral != ont: update_fields.append('unread_count_neutral')
if self.unread_count_negative != ong: update_fields.append('unread_count_negative')
if self.oldest_unread_story_date != oousd: update_fields.append('oldest_unread_story_date')
if self.needs_unread_recalc != onur: update_fields.append('needs_unread_recalc')
if self.is_trained != oit: update_fields.append('is_trained')
if len(update_fields):
self.save(update_fields=update_fields)
if (self.unread_count_positive == 0 and
self.unread_count_neutral == 0):
self.mark_feed_read()
@ -847,7 +902,7 @@ class UserSubscription(models.Model):
@classmethod
def verify_feeds_scheduled(cls, user_id):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
user = User.objects.get(pk=user_id)
subs = cls.objects.filter(user=user)
feed_ids = [sub.feed.pk for sub in subs]

View file

@ -3,6 +3,7 @@ from apps.reader import views
urlpatterns = patterns('',
url(r'^$', views.index),
url(r'^buster', views.iframe_buster, name='iframe-buster'),
url(r'^login_as', views.login_as, name='login_as'),
url(r'^logout', views.logout, name='welcome-logout'),
url(r'^login', views.login, name='welcome-login'),
@ -54,5 +55,4 @@ urlpatterns = patterns('',
url(r'^send_story_email', views.send_story_email, name='send-story-email'),
url(r'^retrain_all_sites', views.retrain_all_sites, name='retrain-all-sites'),
url(r'^load_tutorial', views.load_tutorial, name='load-tutorial'),
url(r'^buster', views.iframe_buster, name='iframe-buster'),
)

View file

@ -68,11 +68,14 @@ BANNED_URLS = [
def index(request, **kwargs):
if request.method == "GET" and request.subdomain and request.subdomain not in ['dev', 'www', 'debug']:
username = request.subdomain
try:
if '.' in username:
username = username.split('.')[0]
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
if '.' in username:
username = username.split('.')[0]
user = User.objects.filter(username=username)
if not user:
user = User.objects.filter(username__iexact=username)
if user:
user = user[0]
if not user:
return HttpResponseRedirect('http://%s%s' % (
Site.objects.get_current().domain,
reverse('index')))
@ -1948,8 +1951,8 @@ def save_feed_chooser(request):
except Feed.DoesNotExist:
pass
request.user.profile.queue_new_feeds()
request.user.profile.refresh_stale_feeds(exclude_new=True)
UserSubscription.queue_new_feeds(request.user)
UserSubscription.refresh_stale_feeds(request.user, exclude_new=True)
r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL)
r.publish(request.user.username, 'reload:feeds')

View file

@ -72,14 +72,15 @@ class IconImporter(object):
self.feed_icon.save()
if settings.BACKED_BY_AWS.get('icons_on_s3'):
self.save_to_s3(image_str)
self.feed.favicon_color = color
self.feed.favicon_not_found = False
if self.feed.favicon_color != color:
self.feed.favicon_color = color
self.feed.favicon_not_found = False
self.feed.save(update_fields=['favicon_color', 'favicon_not_found'])
if not image:
self.feed_icon.not_found = True
self.feed.favicon_not_found = True
self.feed.save()
return not self.feed.favicon_not_found
def save_to_s3(self, image_str):

View file

@ -50,6 +50,8 @@ class Command(BaseCommand):
feeds = Feed.objects.all()
elif options['username']:
feeds = Feed.objects.filter(subscribers__user=User.objects.get(username=options['username']))
elif options['feed']:
feeds = Feed.objects.filter(pk=options['feed'])
else:
feeds = Feed.objects.filter(next_scheduled_update__lte=now, active=True)

View file

@ -1,7 +1,7 @@
from django.core.management.base import BaseCommand
from django.conf import settings
from optparse import make_option
from apps.rss_feeds.tasks import TaskFeeds
from apps.rss_feeds.tasks import TaskFeeds, TaskBrokenFeeds
import datetime
@ -9,9 +9,13 @@ class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-f", "--feed", default=None),
make_option("-a", "--all", default=False, action='store_true'),
make_option("-b", "--broken", help="Task broken feeds that havent been fetched in a day.", default=False, action='store_true'),
make_option('-V', '--verbose', action='store_true',
dest='verbose', default=False, help='Verbose output.'),
)
def handle(self, *args, **options):
TaskFeeds.apply()
if options['broken']:
TaskBrokenFeeds.apply()
else:
TaskFeeds.apply()

View file

@ -3,5 +3,5 @@ import redis
from apps.social.models import *
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
print "Redis: %s" % r

View file

@ -7,7 +7,7 @@ from apps.rss_feeds.models import Feed
class Migration(DataMigration):
def forwards(self, orm):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
start = 0
for f in xrange(start, Feed.objects.latest('pk').pk, 1000):
print " ---> %s" % f

View file

@ -243,7 +243,7 @@ class Feed(models.Model):
return self
@classmethod
def index_all_for_search(cls, offset=0):
def index_all_for_search(cls, offset=0, subscribers=2):
if not offset:
SearchFeed.create_elasticsearch_mapping(delete=True)
@ -252,7 +252,7 @@ class Feed(models.Model):
print " ---> %s / %s (%.2s%%)" % (f, last_pk, float(f)/last_pk*100)
feeds = Feed.objects.filter(pk__in=range(f, f+1000),
active=True,
active_subscribers__gte=1)\
active_subscribers__gte=subscribers)\
.values_list('pk')
for feed_id, in feeds:
Feed.objects.get(pk=feed_id).index_feed_for_search()
@ -427,14 +427,14 @@ class Feed(models.Model):
@classmethod
def task_feeds(cls, feeds, queue_size=12, verbose=True):
if not feeds: return
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
if isinstance(feeds, Feed):
if verbose:
logging.debug(" ---> ~SN~FBTasking feed: ~SB%s" % feeds)
feeds = [feeds.pk]
elif verbose:
logging.debug(" ---> ~SN~FBTasking ~SB%s~SN feeds..." % len(feeds))
logging.debug(" ---> ~SN~FBTasking ~SB~FC%s~FB~SN feeds..." % len(feeds))
if isinstance(feeds, QuerySet):
feeds = [f.pk for f in feeds]
@ -452,7 +452,7 @@ class Feed(models.Model):
@classmethod
def drain_task_feeds(cls):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
tasked_feeds = r.zrange('tasked_feeds', 0, -1)
logging.debug(" ---> ~FRDraining %s tasked feeds..." % len(tasked_feeds))
@ -465,11 +465,12 @@ class Feed(models.Model):
r.zremrangebyrank('error_feeds', 0, -1)
def update_all_statistics(self, full=True, force=False):
self.count_subscribers()
recount = not self.counts_converted_to_redis
self.count_subscribers(recount=recount)
self.calculate_last_story_date()
count_extra = False
if random.random() > .99 or not self.data.popular_tags or not self.data.popular_authors:
if random.random() < 0.01 or not self.data.popular_tags or not self.data.popular_authors:
count_extra = True
if force or full:
@ -494,9 +495,10 @@ class Feed(models.Model):
if not last_story_date or seconds_timesince(last_story_date) < 0:
last_story_date = datetime.datetime.now()
self.last_story_date = last_story_date
self.save()
if last_story_date != self.last_story_date:
self.last_story_date = last_story_date
self.save(update_fields=['last_story_date'])
@classmethod
def setup_feeds_for_premium_subscribers(cls, feed_ids):
@ -632,44 +634,115 @@ class Feed(models.Model):
return redirects, non_redirects
def count_subscribers(self, verbose=False):
SUBSCRIBER_EXPIRE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE)
from apps.reader.models import UserSubscription
@property
def original_feed_id(self):
if self.branch_from_feed:
original_feed_id = self.branch_from_feed.pk
return self.branch_from_feed.pk
else:
original_feed_id = self.pk
feed_ids = [f['id'] for f in Feed.objects.filter(branch_from_feed=original_feed_id).values('id')]
feed_ids.append(original_feed_id)
return self.pk
@property
def counts_converted_to_redis(self):
r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL)
return r.zscore("s:%s" % self.original_feed_id, -1)
def count_subscribers(self, recount=True, verbose=False):
if recount:
from apps.profile.models import Profile
Profile.count_feed_subscribers(feed_id=self.pk)
SUBSCRIBER_EXPIRE_DATE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE)
subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime('%s'))
now = int(datetime.datetime.now().strftime('%s'))
r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL)
total = 0
active = 0
premium = 0
active_premium = 0
# Include all branched feeds in counts
feed_ids = [f['id'] for f in Feed.objects.filter(branch_from_feed=self.original_feed_id).values('id')]
feed_ids.append(self.original_feed_id)
feed_ids = list(set(feed_ids))
subs = UserSubscription.objects.filter(feed__in=feed_ids)
self.num_subscribers = subs.count()
if self.counts_converted_to_redis:
# For each branched feed, count different subscribers
for feed_id in feed_ids:
pipeline = r.pipeline()
# now+1 ensures `-1` flag will be corrected for later with - 1
total_key = "s:%s" % feed_id
premium_key = "sp:%s" % feed_id
pipeline.zcard(total_key)
pipeline.zcount(total_key, subscriber_expire, now+1)
pipeline.zcard(premium_key)
pipeline.zcount(premium_key, subscriber_expire, now+1)
results = pipeline.execute()
# -1 due to key=-1 signaling counts_converted_to_redis
total += results[0] - 1
active += results[1] - 1
premium += results[2] - 1
active_premium += results[3] - 1
# Check for expired feeds with no active users who would ahve triggered a cleanup
last_recount = r.zscore(total_key, -1)
if last_recount and last_recount < subscriber_expire:
logging.info(" ***> ~SN~BW~FBFeed has expired redis subscriber counts (%s < %s), clearing..." % (last_recount, subscriber_expire))
r.delete(total_key, -1)
r.delete(premium_key, -1)
original_num_subscribers = self.num_subscribers
original_active_subs = self.active_subscribers
original_premium_subscribers = self.premium_subscribers
original_active_premium_subscribers = self.active_premium_subscribers
logging.info(" ---> [%-30s] ~SN~FBCounting subscribers from ~FCredis~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s" %
(self.title[:30], total, active, premium, active_premium))
else:
from apps.reader.models import UserSubscription
subs = UserSubscription.objects.filter(feed__in=feed_ids)
original_num_subscribers = self.num_subscribers
total = subs.count()
active_subs = UserSubscription.objects.filter(
feed__in=feed_ids,
active=True,
user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE
)
self.active_subscribers = active_subs.count()
active_subs = UserSubscription.objects.filter(
feed__in=feed_ids,
active=True,
user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE
)
original_active_subs = self.active_subscribers
active = active_subs.count()
premium_subs = UserSubscription.objects.filter(
feed__in=feed_ids,
active=True,
user__profile__is_premium=True
)
self.premium_subscribers = premium_subs.count()
premium_subs = UserSubscription.objects.filter(
feed__in=feed_ids,
active=True,
user__profile__is_premium=True
)
original_premium_subscribers = self.premium_subscribers
premium = premium_subs.count()
active_premium_subscribers = UserSubscription.objects.filter(
feed__in=feed_ids,
active=True,
user__profile__is_premium=True,
user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE
)
self.active_premium_subscribers = active_premium_subscribers.count()
self.save()
active_premium_subscribers = UserSubscription.objects.filter(
feed__in=feed_ids,
active=True,
user__profile__is_premium=True,
user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE
)
original_active_premium_subscribers = self.active_premium_subscribers
active_premium = active_premium_subscribers.count()
logging.debug(" ---> [%-30s] ~SN~FBCounting subscribers from ~FYpostgres~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s" %
(self.title[:30], total, active, premium, active_premium))
# If any counts have changed, save them
self.num_subscribers = total
self.active_subscribers = active
self.premium_subscribers = premium
self.active_premium_subscribers = active_premium
if (self.num_subscribers != original_num_subscribers or
self.active_subscribers != original_active_subs or
self.premium_subscribers != original_premium_subscribers or
self.active_premium_subscribers != original_active_premium_subscribers):
self.save(update_fields=['num_subscribers', 'active_subscribers',
'premium_subscribers', 'active_premium_subscribers'])
if verbose:
if self.num_subscribers <= 1:
@ -681,7 +754,7 @@ class Feed(models.Model):
'' if self.num_subscribers == 1 else 's',
self.feed_title,
),
def _split_favicon_color(self):
color = self.favicon_color
if color:
@ -753,9 +826,9 @@ class Feed(models.Model):
month_ago = datetime.datetime.utcnow() - datetime.timedelta(days=30)
stories_last_month = MStory.objects(story_feed_id=self.pk,
story_date__gte=month_ago).count()
self.stories_last_month = stories_last_month
self.save()
if self.stories_last_month != stories_last_month:
self.stories_last_month = stories_last_month
self.save(update_fields=['stories_last_month'])
if verbose:
print " ---> %s [%s]: %s stories last month" % (self.feed_title, self.pk,
@ -828,13 +901,18 @@ class Feed(models.Model):
months.append((key, dates.get(key, 0)))
total += dates.get(key, 0)
month_count += 1
original_story_count_history = self.data.story_count_history
self.data.story_count_history = json.encode(months)
self.data.save()
if self.data.story_count_history != original_story_count_history:
self.data.save(update_fields=['story_count_history'])
original_average_stories_per_month = self.average_stories_per_month
if not total or not month_count:
self.average_stories_per_month = 0
else:
self.average_stories_per_month = int(round(total / float(month_count)))
self.save()
if self.average_stories_per_month != original_average_stories_per_month:
self.save(update_fields=['average_stories_per_month'])
def save_classifier_counts(self):
@ -888,15 +966,18 @@ class Feed(models.Model):
def update(self, **kwargs):
from utils import feed_fetcher
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
original_feed_id = int(self.pk)
if getattr(settings, 'TEST_DEBUG', False):
original_feed_address = self.feed_address
original_feed_link = self.feed_link
self.feed_address = self.feed_address.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR)
if self.feed_link:
self.feed_link = self.feed_link.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR)
self.save()
if self.feed_address != original_feed_address or self.feed_link != original_feed_link:
self.save(update_fields=['feed_address', 'feed_link'])
options = {
'verbose': kwargs.get('verbose'),
'timeout': 10,
@ -1139,8 +1220,9 @@ class Feed(models.Model):
# popular tags the size of a small planet. I'm looking at you
# Tumblr writers.
if len(popular_tags) < 1024:
self.data.popular_tags = popular_tags
self.data.save()
if self.data.popular_tags != popular_tags:
self.data.popular_tags = popular_tags
self.data.save(update_fields=['popular_tags'])
return
tags_list = []
@ -1160,8 +1242,9 @@ class Feed(models.Model):
popular_authors = json.encode(feed_authors)
if len(popular_authors) < 1023:
self.data.popular_authors = popular_authors
self.data.save()
if self.data.popular_authors != popular_authors:
self.data.popular_authors = popular_authors
self.data.save(update_fields=['popular_authors'])
return
if len(feed_authors) > 1:
@ -1468,28 +1551,38 @@ class Feed(models.Model):
if premium_speed:
self.active_premium_subscribers += 1
self.active_subscribers -= 1
upd = self.stories_last_month / 30.0
spd = self.stories_last_month / 30.0
subs = (self.active_premium_subscribers +
((self.active_subscribers - self.active_premium_subscribers) / 10.0))
# UPD = 1 Subs > 1: t = 5 # 11625 * 1440/5 = 3348000
# UPD = 1 Subs = 1: t = 60 # 17231 * 1440/60 = 413544
# UPD < 1 Subs > 1: t = 60 # 37904 * 1440/60 = 909696
# UPD < 1 Subs = 1: t = 60 * 12 # 143012 * 1440/(60*12) = 286024
# UPD = 0 Subs > 1: t = 60 * 3 # 28351 * 1440/(60*3) = 226808
# UPD = 0 Subs = 1: t = 60 * 24 # 807690 * 1440/(60*24) = 807690
if upd >= 1:
if subs > 1:
total = 10
# Calculate sub counts:
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 10 AND stories_last_month >= 30;
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 1 AND active_premium_subscribers < 10 AND stories_last_month >= 30;
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers = 1 AND stories_last_month >= 30;
# SpD > 1 Subs > 10: t = 6 # 4267 * 1440/6 = 1024080
# SpD > 1 Subs > 1: t = 15 # 18973 * 1440/15 = 1821408
# SpD > 1 Subs = 1: t = 60 # 65503 * 1440/60 = 1572072
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 1 AND stories_last_month < 30 AND stories_last_month > 0;
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers = 1 AND stories_last_month < 30 AND stories_last_month > 0;
# SpD < 1 Subs > 1: t = 60 # 77618 * 1440/60 = 1862832
# SpD < 1 Subs = 1: t = 60 * 12 # 282186 * 1440/(60*12) = 564372
# SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 1 AND stories_last_month = 0;
# SELECT COUNT(*) FROM feeds WHERE active_subscribers > 0 AND active_premium_subscribers <= 1 AND stories_last_month = 0;
# SpD = 0 Subs > 1: t = 60 * 3 # 30158 * 1440/(60*3) = 241264
# SpD = 0 Subs = 1: t = 60 * 24 # 514131 * 1440/(60*24) = 514131
if spd >= 1:
if subs > 10:
total = 6
elif subs > 1:
total = 15
else:
total = 60
elif upd > 0:
elif spd > 0:
if subs > 1:
total = 60 - (upd * 60)
total = 60 - (spd * 60)
else:
total = 60*12 - (upd * 60*12)
elif upd == 0:
total = 60*12 - (spd * 60*12)
elif spd == 0:
if subs > 1:
total = 60 * 6
else:
@ -1518,20 +1611,20 @@ class Feed(models.Model):
if len(fetch_history['push_history']):
total = total * 12
# 3 day max
# 2 day max
total = min(total, 60*24*2)
if verbose:
logging.debug(" ---> [%-30s] Fetched every %s min - Subs: %s/%s/%s Stories: %s" % (
logging.debug(" ---> [%-30s] Fetched every %s min - Subs: %s/%s/%s Stories/day: %s" % (
unicode(self)[:30], total,
self.num_subscribers,
self.active_subscribers,
self.active_premium_subscribers,
upd))
spd))
return total
def set_next_scheduled_update(self, verbose=False, skip_scheduling=False):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
total = self.get_next_scheduled_update(force=True, verbose=verbose)
error_count = self.error_count
@ -1546,8 +1639,9 @@ class Feed(models.Model):
random_factor = random.randint(0, total) / 4
next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta(
minutes = total + random_factor)
original_min_to_decay = self.min_to_decay
self.min_to_decay = total
delta = self.next_scheduled_update - datetime.datetime.now()
minutes_to_next_fetch = (delta.seconds + (delta.days * 24 * 3600)) / 60
if minutes_to_next_fetch > self.min_to_decay or not skip_scheduling:
@ -1556,19 +1650,21 @@ class Feed(models.Model):
r.zadd('scheduled_updates', self.pk, self.next_scheduled_update.strftime('%s'))
r.zrem('tasked_feeds', self.pk)
r.srem('queued_feeds', self.pk)
self.save()
updated_fields = ['last_update', 'next_scheduled_update']
if self.min_to_decay != original_min_to_decay:
updated_fields.append('min_to_decay')
self.save(update_fields=updated_fields)
@property
def error_count(self):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
fetch_errors = int(r.zscore('error_feeds', self.pk) or 0)
return fetch_errors + self.errors_since_good
def schedule_feed_fetch_immediately(self, verbose=True):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
if verbose:
logging.debug(' ---> [%-30s] Scheduling feed fetch immediately...' % (unicode(self)[:30]))
@ -1588,7 +1684,7 @@ class Feed(models.Model):
self.save()
def queue_pushed_feed_xml(self, xml, latest_push_date_delta=None):
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
queue_size = r.llen("push_feeds")
if latest_push_date_delta:

View file

@ -8,6 +8,8 @@ from utils import log as logging
from utils import s3_utils as s3
from django.conf import settings
FEED_TASKING_MAX = 10000
class TaskFeeds(Task):
name = 'task-feeds'
@ -16,7 +18,7 @@ class TaskFeeds(Task):
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
start = time.time()
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
tasked_feeds_size = r.zcard('tasked_feeds')
hour_ago = now - datetime.timedelta(hours=1)
@ -33,14 +35,34 @@ class TaskFeeds(Task):
r.zcard('scheduled_updates')))
# Regular feeds
if tasked_feeds_size < 10000:
feeds = r.srandmember('queued_feeds', 10000)
if tasked_feeds_size < FEED_TASKING_MAX:
feeds = r.srandmember('queued_feeds', FEED_TASKING_MAX)
Feed.task_feeds(feeds, verbose=True)
active_count = len(feeds)
else:
logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size)
active_count = 0
cp1 = time.time()
logging.debug(" ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (
active_count,
int((time.time() - start)),
r.zcard('tasked_feeds'),
r.scard('queued_feeds'),
r.zcard('scheduled_updates')))
class TaskBrokenFeeds(Task):
name = 'task-broken-feeds'
max_retries = 0
ignore_result = True
def run(self, **kwargs):
from apps.rss_feeds.models import Feed
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
start = time.time()
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
logging.debug(" ---> ~SN~FBQueuing broken feeds...")
# Force refresh feeds
refresh_feeds = Feed.objects.filter(
@ -49,8 +71,10 @@ class TaskFeeds(Task):
active_subscribers__gte=1
).order_by('?')[:100]
refresh_count = refresh_feeds.count()
cp2 = time.time()
cp1 = time.time()
logging.debug(" ---> ~SN~FBFound %s active, unfetched broken feeds" % refresh_count)
# Mistakenly inactive feeds
hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s')
old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago)
@ -62,11 +86,11 @@ class TaskFeeds(Task):
r.zincrby('error_feeds', feed_id, 1)
feed = Feed.get_by_id(feed_id)
feed.set_next_scheduled_update()
logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped feeds (~SB%s/%s~SN queued/tasked)" % (
inactive_count,
r.scard('queued_feeds'),
r.zcard('tasked_feeds')))
cp3 = time.time()
logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" % (
inactive_count,
r.scard('queued_feeds'),
r.zcard('tasked_feeds')))
cp2 = time.time()
old = now - datetime.timedelta(days=1)
old_feeds = Feed.objects.filter(
@ -74,28 +98,25 @@ class TaskFeeds(Task):
active_subscribers__gte=1
).order_by('?')[:500]
old_count = old_feeds.count()
cp4 = time.time()
cp3 = time.time()
logging.debug(" ---> ~FBTasking ~SB~FC%s~SN~FB/~FC%s~FB (~FC%s~FB/~FC%s~SN~FB) feeds... (%.4s/%.4s/%.4s/%.4s)" % (
active_count,
logging.debug(" ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" % (
refresh_count,
inactive_count,
old_count,
cp1 - start,
cp2 - cp1,
cp3 - cp2,
cp4 - cp3
))
Feed.task_feeds(refresh_feeds, verbose=False)
Feed.task_feeds(old_feeds, verbose=False)
logging.debug(" ---> ~SN~FBTasking took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (
logging.debug(" ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % (
int((time.time() - start)),
r.zcard('tasked_feeds'),
r.scard('queued_feeds'),
r.zcard('scheduled_updates')))
class UpdateFeeds(Task):
name = 'update-feeds'
@ -105,7 +126,7 @@ class UpdateFeeds(Task):
def run(self, feed_pks, **kwargs):
from apps.rss_feeds.models import Feed
from apps.statistics.models import MStatistics
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
compute_scores = bool(mongodb_replication_lag < 10)

View file

@ -22,6 +22,7 @@ from utils.user_functions import get_user
from utils.view_functions import get_argument_or_404
from utils.view_functions import required_params
from vendor.timezones.utilities import localtime_for_timezone
from utils.ratelimit import ratelimit
IGNORE_AUTOCOMPLETE = [
@ -149,6 +150,7 @@ def feed_autocomplete(request):
else:
return feeds
@ratelimit(minutes=1, requests=10)
@json.json_view
def load_feed_statistics(request, feed_id):
user = get_user(request)
@ -242,16 +244,29 @@ def exception_retry(request):
raise Http404
feed.schedule_feed_fetch_immediately()
feed.has_page_exception = False
feed.has_feed_exception = False
feed.active = True
changed = False
if feed.has_page_exception:
changed = True
feed.has_page_exception = False
if feed.has_feed_exception:
changed = True
feed.has_feed_exception = False
if not feed.active:
changed = True
feed.active = True
if changed:
feed.save(update_fields=['has_page_exception', 'has_feed_exception', 'active'])
original_fetched_once = feed.fetched_once
if reset_fetch:
logging.user(request, "~FRRefreshing exception feed: ~SB%s" % (feed))
feed.fetched_once = False
else:
logging.user(request, "~FRForcing refreshing feed: ~SB%s" % (feed))
feed.fetched_once = True
feed.save()
if feed.fetched_once != original_fetched_once:
feed.save(update_fields=['fetched_once'])
feed = feed.update(force=True, compute_scores=False, verbose=True)
feed = Feed.get_by_id(feed.pk)
@ -439,7 +454,7 @@ def status(request):
logging.user(request, "~SKNON-STAFF VIEWING RSS FEEDS STATUS!")
assert False
return HttpResponseForbidden()
minutes = int(request.GET.get('minutes', 10))
minutes = int(request.GET.get('minutes', 1))
now = datetime.datetime.now()
hour_ago = now - datetime.timedelta(minutes=minutes)
feeds = Feed.objects.filter(last_update__gte=hour_ago).order_by('-last_update')

View file

@ -5,9 +5,11 @@ from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-o", "--offset", dest="offset", type="int", default=0, help="Specify offset to start at"),
make_option("-s", "--subscribers", dest="subscribers", type="int", default=2, help="Specify minimum number of subscribers"),
)
def handle(self, *args, **options):
offset = options['offset']
Feed.index_all_for_search(offset=offset)
subscribers = options.get('subscribers', None)
Feed.index_all_for_search(offset=offset, subscribers=subscribers)

View file

@ -1,16 +1,26 @@
import re
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from apps.rss_feeds.models import Feed
from apps.rss_feeds.models import Feed, MStory
from apps.reader.models import UserSubscription
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--user", dest="user", nargs=1, help="Specify user id or username"),
make_option("-R", "--reindex", dest="reindex", action="store_true", help="Drop index and reindex all stories."),
)
def handle(self, *args, **options):
if options['reindex']:
MStory.index_all_for_search()
return
if not options['user']:
print "Missing user. Did you want to reindex everything? Use -R."
return
if re.match(r"([0-9]+)", options['user']):
user = User.objects.get(pk=int(options['user']))
else:

View file

@ -5,7 +5,6 @@ import pyes
import redis
import celery
import mongoengine as mongo
from pyes.query import MatchQuery
from django.conf import settings
from django.contrib.auth.models import User
from apps.search.tasks import IndexSubscriptionsForSearch
@ -141,6 +140,12 @@ class MUserSearch(mongo.Document):
@classmethod
def remove_all(cls, drop_index=False):
# You only need to drop the index if there is data you want to clear.
# A new search server won't need this, as there isn't anything to drop.
if drop_index:
logging.info(" ---> ~FRRemoving stories search index...")
SearchStory.drop()
user_searches = cls.objects.all()
logging.info(" ---> ~SN~FRRemoving ~SB%s~SN user searches..." % user_searches.count())
for user_search in user_searches:
@ -149,19 +154,12 @@ class MUserSearch(mongo.Document):
except Exception, e:
print " ****> Error on search removal: %s" % e
# You only need to drop the index if there is data you want to clear.
# A new search server won't need this, as there isn't anything to drop.
if drop_index:
logging.info(" ---> ~FRRemoving stories search index...")
SearchStory.drop()
def remove(self):
from apps.rss_feeds.models import Feed
from apps.reader.models import UserSubscription
user = User.objects.get(pk=self.user_id)
subscriptions = UserSubscription.objects.filter(user=self.user_id,
feed__search_indexed=True)
subscriptions = UserSubscription.objects.filter(user=self.user_id)
total = subscriptions.count()
removed = 0
@ -170,6 +168,8 @@ class MUserSearch(mongo.Document):
feed = sub.feed
except Feed.DoesNotExist:
continue
if not feed.search_indexed:
continue
feed.search_indexed = False
feed.save()
removed += 1
@ -202,28 +202,28 @@ class SearchStory:
'index': 'analyzed',
'store': 'no',
'type': 'string',
'analyzer': 'snowball',
'analyzer': 'standard',
},
'content': {
'boost': 1.0,
'index': 'analyzed',
'store': 'no',
'type': 'string',
'analyzer': 'snowball',
'analyzer': 'simple',
},
'tags': {
'boost': 2.0,
'index': 'analyzed',
'store': 'no',
'type': 'string',
'analyzer': 'snowball',
'analyzer': 'standard',
},
'author': {
'boost': 1.0,
'index': 'analyzed',
'store': 'no',
'type': 'string',
'analyzer': 'keyword',
'analyzer': 'simple',
},
'feed_id': {
'store': 'no',
@ -272,7 +272,7 @@ class SearchStory:
cls.ES.indices.refresh()
sort = "date:desc" if order == "newest" else "date:asc"
string_q = pyes.query.StringQuery(query, default_operator="AND")
string_q = pyes.query.QueryStringQuery(query, default_operator="AND")
feed_q = pyes.query.TermsQuery('feed_id', feed_ids[:1000])
q = pyes.query.BoolQuery(must=[string_q, feed_q])
try:
@ -290,8 +290,17 @@ class SearchStory:
class SearchFeed:
ES = pyes.ES(settings.ELASTICSEARCH_FEED_HOSTS)
_es_client = None
name = "feeds"
@classmethod
def ES(cls):
if cls._es_client is None:
cls._es_client = pyes.ES(settings.ELASTICSEARCH_FEED_HOSTS)
if not cls._es_client.indices.exists_index(cls.index_name()):
cls.create_elasticsearch_mapping()
return cls._es_client
@classmethod
def index_name(cls):
@ -300,11 +309,12 @@ class SearchFeed:
@classmethod
def type_name(cls):
return "%s-type" % cls.name
@classmethod
def create_elasticsearch_mapping(cls, delete=False):
if delete:
cls.ES.indices.delete_index_if_exists("%s-index" % cls.name)
cls.ES().indices.delete_index_if_exists(cls.index_name())
settings = {
"index" : {
"analysis": {
@ -314,46 +324,23 @@ class SearchFeed:
"tokenizer": "lowercase",
"type": "custom"
},
"ngram_analyzer": {
"filter": ["ngram"],
"tokenizer": "lowercase",
"type": "custom"
}
},
"filter": {
"edgengram": {
"max_gram": "15",
"min_gram": "2",
"min_gram": "1",
"type": "edgeNGram"
},
"ngram": {
"max_gram": "15",
"min_gram": "3",
"type": "nGram"
}
},
"tokenizer": {
"edgengram_tokenizer": {
"max_gram": "15",
"min_gram": "2",
"side": "front",
"type": "edgeNGram"
},
"ngram_tokenizer": {
"max_gram": "15",
"min_gram": "3",
"type": "nGram"
}
}
}
}
}
cls.ES.indices.create_index_if_missing("%s-index" % cls.name, settings)
cls.ES().indices.create_index_if_missing(cls.index_name(), settings)
mapping = {
"address": {
"analyzer": "edgengram_analyzer",
"store": True,
"store": False,
"term_vector": "with_positions_offsets",
"type": "string"
},
@ -368,15 +355,22 @@ class SearchFeed:
},
"title": {
"analyzer": "edgengram_analyzer",
"store": True,
"store": False,
"term_vector": "with_positions_offsets",
"type": "string"
},
"link": {
"analyzer": "edgengram_analyzer",
"store": False,
"term_vector": "with_positions_offsets",
"type": "string"
}
}
cls.ES.indices.put_mapping("%s-type" % cls.name, {
cls.ES().indices.put_mapping(cls.type_name(), {
'properties': mapping,
}, ["%s-index" % cls.name])
}, [cls.index_name()])
cls.ES().indices.flush()
@classmethod
def index(cls, feed_id, title, address, link, num_subscribers):
doc = {
@ -387,37 +381,27 @@ class SearchFeed:
"num_subscribers" : num_subscribers,
}
try:
cls.ES.index(doc, "%s-index" % cls.name, "%s-type" % cls.name, feed_id)
cls.ES().index(doc, cls.index_name(), cls.type_name(), feed_id)
except pyes.exceptions.NoServerAvailable:
logging.debug(" ***> ~FRNo search server available.")
@classmethod
def query(cls, text):
cls.create_elasticsearch_mapping()
try:
cls.ES.default_indices = cls.index_name()
cls.ES.indices.refresh()
cls.ES().default_indices = cls.index_name()
cls.ES().indices.refresh()
except pyes.exceptions.NoServerAvailable:
logging.debug(" ***> ~FRNo search server available.")
return []
logging.info("~FGSearch ~FCfeeds~FG by address: ~SB%s" % text)
q = MatchQuery('address', text, operator="and", type="phrase")
results = cls.ES.search(query=q, sort="num_subscribers:desc", size=5,
doc_types=[cls.type_name()])
if not results.total:
logging.info("~FGSearch ~FCfeeds~FG by title: ~SB%s" % text)
q = MatchQuery('title', text, operator="and")
results = cls.ES.search(query=q, sort="num_subscribers:desc", size=5,
doc_types=[cls.type_name()])
if not results.total:
logging.info("~FGSearch ~FCfeeds~FG by link: ~SB%s" % text)
q = MatchQuery('link', text, operator="and")
results = cls.ES.search(query=q, sort="num_subscribers:desc", size=5,
doc_types=[cls.type_name()])
logging.info("~FGSearch ~FCfeeds~FG: ~SB%s" % text)
q = pyes.query.BoolQuery()
q.add_should(pyes.query.MatchQuery('address', text, analyzer="simple", cutoff_frequency=0.0005, minimum_should_match="75%"))
q.add_should(pyes.query.MatchQuery('link', text, analyzer="simple", cutoff_frequency=0.0005, minimum_should_match="75%"))
q.add_should(pyes.query.MatchQuery('title', text, analyzer="simple", cutoff_frequency=0.0005, minimum_should_match="75%"))
q = pyes.Search(q, min_score=1)
results = cls.ES().search(query=q, size=5, doc_types=[cls.type_name()], sort="num_subscribers:desc")
return results
@classmethod

View file

@ -151,9 +151,12 @@ class MSocialProfile(mongo.Document):
@classmethod
def get_user(cls, user_id):
profile, created = cls.objects.get_or_create(user_id=user_id)
if created:
try:
profile = cls.objects.get(user_id=user_id)
except cls.DoesNotExist:
profile = cls.objects.create(user_id=user_id)
profile.save()
return profile
def save(self, *args, **kwargs):
@ -482,8 +485,11 @@ class MSocialProfile(mongo.Document):
MInteraction.new_follow(follower_user_id=self.user_id, followee_user_id=user_id)
MActivity.new_follow(follower_user_id=self.user_id, followee_user_id=user_id)
socialsub, _ = MSocialSubscription.objects.get_or_create(user_id=self.user_id,
subscription_user_id=user_id)
params = dict(user_id=self.user_id, subscription_user_id=user_id)
try:
socialsub = MSocialSubscription.objects.get(**params)
except MSocialSubscription.DoesNotExist:
socialsub = MSocialSubscription.objects.create(**params)
socialsub.needs_unread_recalc = True
socialsub.save()
@ -1740,10 +1746,12 @@ class MSharedStory(mongo.Document):
story_values = {
'user_id': popular_profile.user_id,
'story_guid': story_db['story_guid'],
'defaults': story_db,
}
shared_story, created = MSharedStory.objects.get_or_create(**story_values)
if created:
try:
shared_story = MSharedStory.objects.get(**story_values)
except MSharedStory.DoesNotExist:
story_values.update(story_db)
shared_story = MSharedStory.objects.create(**story_values)
shared_story.post_to_service('twitter')
shared += 1
shared_feed_ids.append(story.story_feed_id)
@ -2879,7 +2887,9 @@ class MInteraction(mongo.Document):
'category': 'follow',
}
try:
cls.objects.get_or_create(**params)
cls.objects.get(**params)
except cls.DoesNotExist:
cls.objects.create(**params)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**params).order_by('-date')
logging.debug(" ---> ~FRDeleting dupe follow interactions. %s found." % dupes.count())
@ -2933,16 +2943,17 @@ class MInteraction(mongo.Document):
@classmethod
def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments):
cls.objects.get_or_create(user_id=comment_user_id,
with_user_id=liking_user_id,
category="comment_like",
feed_id="social:%s" % comment_user_id,
story_feed_id=story_feed_id,
content_id=story_id,
defaults={
"title": story_title,
"content": comments,
})
params = dict(user_id=comment_user_id,
with_user_id=liking_user_id,
category="comment_like",
feed_id="social:%s" % comment_user_id,
story_feed_id=story_feed_id,
content_id=story_id)
try:
cls.objects.get(**params)
except cls.DoesNotExist:
params.update(dict(title=story_title, content=comments))
cls.objects.create(**params)
cls.publish_update_to_subscribers(comment_user_id)
@ -3092,11 +3103,15 @@ class MActivity(mongo.Document):
@classmethod
def new_starred_story(cls, user_id, story_title, story_feed_id, story_id):
cls.objects.get_or_create(user_id=user_id,
category='star',
story_feed_id=story_feed_id,
content_id=story_id,
defaults=dict(content=story_title))
params = dict(user_id=user_id,
category='star',
story_feed_id=story_feed_id,
content_id=story_id)
try:
cls.objects.get(**params)
except cls.DoesNotExist:
params.update(dict(content=story_title))
cls.objects.create(**params)
@classmethod
def remove_starred_story(cls, user_id, story_feed_id, story_id):
@ -3117,7 +3132,10 @@ class MActivity(mongo.Document):
"feed_id": feed_id,
}
try:
cls.objects.get_or_create(defaults=dict(content=feed_title), **params)
cls.objects.get(**params)
except cls.DoesNotExist:
params.update(dict(content=feed_title))
cls.objects.create(**params)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**params).order_by('-date')
logging.debug(" ---> ~FRDeleting dupe feed subscription activities. %s found." % dupes.count())
@ -3133,7 +3151,9 @@ class MActivity(mongo.Document):
'category': 'follow',
}
try:
cls.objects.get_or_create(**params)
cls.objects.get(**params)
except cls.DoesNotExist:
cls.objects.create(**params)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**params).order_by('-date')
logging.debug(" ---> ~FRDeleting dupe follow activities. %s found." % dupes.count())
@ -3181,16 +3201,17 @@ class MActivity(mongo.Document):
@classmethod
def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments):
cls.objects.get_or_create(user_id=liking_user_id,
with_user_id=comment_user_id,
category="comment_like",
feed_id="social:%s" % comment_user_id,
story_feed_id=story_feed_id,
content_id=story_id,
defaults={
"title": story_title,
"content": comments,
})
params = dict(user_id=liking_user_id,
with_user_id=comment_user_id,
category="comment_like",
feed_id="social:%s" % comment_user_id,
story_feed_id=story_feed_id,
content_id=story_id)
try:
cls.objects.get(**params)
except cls.DoesNotExist:
params.update(dict(title=story_title, content=comments))
cls.objects.create(**params)
@classmethod
def new_shared_story(cls, user_id, source_user_id, story_title, comments, story_feed_id, story_id, share_date=None):
@ -3201,13 +3222,17 @@ class MActivity(mongo.Document):
"story_feed_id": story_feed_id,
"content_id": story_id,
}
extradata = {'with_user_id': source_user_id,
'title': story_title,
'content': comments}
try:
a, _ = cls.objects.get_or_create(defaults={
'with_user_id': source_user_id,
'title': story_title,
'content': comments,
}, **data)
a = cls.objects.get(**data)
except cls.DoesNotExist:
merged = {}
merged.update(data)
merged.update(extradata)
a = cls.objects.create(**merged)
except cls.MultipleObjectsReturned:
dupes = cls.objects.filter(**data)
logging.debug(" ---> ~FRDeleting dupe shared story activities. %s found." % dupes.count())
@ -3243,9 +3268,12 @@ class MActivity(mongo.Document):
@classmethod
def new_signup(cls, user_id):
cls.objects.get_or_create(user_id=user_id,
with_user_id=user_id,
category="signup")
params = dict(user_id=user_id, with_user_id=user_id, category="signup")
try:
return cls.objects.get(**params)
except cls.DoesNotExist:
return cls.objects.create(**params)
class MFollowRequest(mongo.Document):
follower_user_id = mongo.IntField(unique_with='followee_user_id')
@ -3262,8 +3290,11 @@ class MFollowRequest(mongo.Document):
@classmethod
def add(cls, follower_user_id, followee_user_id):
cls.objects.get_or_create(follower_user_id=follower_user_id,
followee_user_id=followee_user_id)
params = dict(follower_user_id=follower_user_id, followee_user_id=followee_user_id)
try:
cls.objects.get(**params)
except cls.DoesNotExist:
cls.objects.create(**params)
@classmethod
def remove(cls, follower_user_id, followee_user_id):

View file

@ -482,8 +482,10 @@ def load_social_page(request, user_id, username=None, **kwargs):
'social_services': social_services,
}
logging.user(request, "~FYLoading ~FMsocial ~SBpage~SN~FY: ~SB%s%s" % (
social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else ''))
logging.user(request, "~FYLoading ~FMsocial page~FY: ~SB%s%s ~FM%s/%s" % (
social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else '',
request.META.get('HTTP_USER_AGENT', "")[:40],
request.META.get('HTTP_X_FORWARDED_FOR', "")))
if format == 'html':
template = 'social/social_stories.xhtml'
else:
@ -913,7 +915,10 @@ def profile(request):
@json.json_view
def load_user_profile(request):
social_profile = MSocialProfile.get_user(request.user.pk)
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk)
try:
social_services = MSocialServices.objects.get(user_id=request.user.pk)
except MSocialServices.DoesNotExist:
social_services = MSocialServices.objects.create(user_id=request.user.pk)
logging.user(request, "~BB~FRLoading social profile and blurblog settings")

View file

@ -12,6 +12,7 @@ from utils import db_functions
class MStatistics(mongo.Document):
key = mongo.StringField(unique=True)
value = mongo.DynamicField()
expiration_date = mongo.DateTimeField()
meta = {
'collection': 'statistics',
@ -27,12 +28,20 @@ class MStatistics(mongo.Document):
obj = cls.objects.filter(key=key).first()
if not obj:
return default
if obj.expiration_date and obj.expiration_date < datetime.datetime.now():
obj.delete()
return default
return obj.value
@classmethod
def set(cls, key, value):
obj, _ = cls.objects.get_or_create(key=key)
def set(cls, key, value, expiration_sec=None):
try:
obj = cls.objects.get(key=key)
except cls.DoesNotExist:
obj = cls.objects.create(key=key)
obj.value = value
if expiration_sec:
obj.expiration_date = datetime.datetime.now() + datetime.timedelta(seconds=expiration_sec)
obj.save()
@classmethod

View file

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.newsblur"
android:versionCode="99"
android:versionName="4.3.1b1" >
android:versionCode="101"
android:versionName="4.3.2" >
<uses-sdk
android:minSdkVersion="11"

Binary file not shown.

After

Width:  |  Height:  |  Size: 421 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 311 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.5 KiB

View file

@ -1,50 +1,198 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.v4.widget.SwipeRefreshLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/swipe_container"
android:layout_width="match_parent"
android:layout_height="match_parent">
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<RelativeLayout
android:layout_width="fill_parent"
android:layout_height="fill_parent" >
android:id="@+id/main_top_bar"
android:layout_width="match_parent"
android:layout_height="50dp"
android:layout_alignParentTop="true"
style="?actionbarBackground" >
<fragment
android:id="@+id/fragment_feedintelligenceselector"
android:name="com.newsblur.fragment.FeedIntelligenceSelectorFragment"
android:layout_width="fill_parent"
<ImageView
android:id="@+id/main_user_image"
android:src="@drawable/logo"
android:layout_height="41dp"
android:layout_width="41dp"
android:layout_marginTop="4dp"
android:layout_marginBottom="5dp"
android:layout_marginLeft="10dp"
android:layout_marginRight="4dp"
android:layout_alignParentLeft="true"
android:layout_alignParentTop="true" />
<TextView
android:id="@+id/main_user_name"
android:layout_height="22dp"
android:layout_width="match_parent"
android:layout_marginTop="4dp"
android:layout_marginLeft="6dp"
android:layout_marginBottom="1dp"
android:layout_toRightOf="@id/main_user_image"
android:layout_alignParentTop="true"
android:textStyle="bold"
android:textSize="16dp" />
<ImageView
android:id="@+id/main_unread_count_neut_icon"
android:layout_width="11dp"
android:layout_height="11dp"
android:layout_below="@+id/main_user_name"
android:layout_toRightOf="@id/main_user_image"
android:layout_marginLeft="6dp"
android:layout_marginTop="5dp"
android:src="@drawable/g_icn_unread" />
<TextView
android:id="@+id/main_unread_count_neut_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@+id/main_user_name"
android:layout_toRightOf="@id/main_unread_count_neut_icon"
android:layout_marginLeft="3dp"
android:textSize="14sp" />
<ImageView
android:id="@+id/main_unread_count_posi_icon"
android:layout_width="11dp"
android:layout_height="11dp"
android:layout_below="@+id/main_user_name"
android:layout_toRightOf="@id/main_unread_count_neut_text"
android:layout_marginLeft="8dp"
android:layout_marginTop="5dp"
android:src="@drawable/g_icn_focus" />
<TextView
android:id="@+id/main_unread_count_posi_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@+id/main_user_name"
android:layout_toRightOf="@id/main_unread_count_posi_icon"
android:layout_marginLeft="3dp"
android:textSize="14sp" />
<Button
android:id="@+id/main_profile_button"
android:layout_width="34dp"
android:layout_height="34dp"
android:layout_margin="8dp"
android:background="@drawable/ic_menu_profile"
android:contentDescription="@string/menu_profile"
android:layout_alignParentTop="true"
android:layout_alignParentRight="true" />
</RelativeLayout>
<!-- The all/unread/focus toggle. Despite being at the bottom, this needs
to be defined first so that other things can be placed above it. -->
<fragment
android:id="@+id/fragment_feedintelligenceselector"
android:name="com.newsblur.fragment.FeedIntelligenceSelectorFragment"
android:layout_width="match_parent"
android:layout_height="44dp"
android:layout_alignParentBottom="true"
android:tag="feedIntelligenceSelector" />
<Button
android:id="@+id/main_menu_button"
android:layout_width="34dp"
android:layout_height="34dp"
android:layout_margin="5dp"
android:background="@drawable/ic_menu_moreoverflow"
android:contentDescription="@string/description_menu"
android:layout_alignParentBottom="true"
android:layout_alignParentRight="true" />
<Button
android:id="@+id/main_add_button"
android:layout_width="36dp"
android:layout_height="36dp"
android:layout_margin="4dp"
android:background="@drawable/ic_menu_add"
android:contentDescription="@string/menu_add_feed"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true" />
<!-- A pane that sits behind the feed list, acting as a background, except
when the list is empty, and it shows through as an explainer. -->
<RelativeLayout
android:id="@+id/folderfeedlist_background"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="@id/main_top_bar"
android:layout_above="@id/fragment_feedintelligenceselector"
style="?listBackground">
<TextView
android:id="@+id/empty_view_text"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:tag="feedIntelligenceSelector" />
android:layout_marginBottom="15dp"
android:gravity="center_horizontal"
style="?explainerText"
android:visibility="invisible"
android:textSize="16dp" />
<ImageView
android:id="@+id/empty_view_image"
android:layout_width="80dp"
android:layout_height="80dp"
android:contentDescription="@string/description_empty_list_image"
android:src="@drawable/world_big"
android:layout_above="@id/empty_view_text"
android:layout_marginBottom="15dp"
android:layout_centerHorizontal="true"
android:scaleType="fitCenter"
android:visibility="invisible" />
</RelativeLayout>
<!-- The scrollable and pull-able feed list. -->
<android.support.v4.widget.SwipeRefreshLayout
android:id="@+id/swipe_container"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/main_top_bar"
android:layout_above="@id/fragment_feedintelligenceselector" >
<fragment
android:id="@+id/fragment_folderfeedlist"
android:name="com.newsblur.fragment.FolderListFragment"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_above="@id/fragment_feedintelligenceselector"
android:layout_alignParentTop="true"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:tag="folderFeedListFragment" />
<View
android:id="@+id/feedintelligenceselector_border"
android:layout_width="fill_parent"
android:layout_height="1dp"
android:background="@color/midgray"
android:layout_below="@id/fragment_folderfeedlist" />
</android.support.v4.widget.SwipeRefreshLayout>
<TextView
android:id="@+id/main_sync_status"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_above="@+id/fragment_feedintelligenceselector"
android:padding="2dp"
android:textSize="14sp"
android:gravity="center"
android:textColor="@color/status_overlay_text"
android:background="@color/status_overlay_background"
android:text="SYNC STATUS" />
<View
android:id="@+id/top_bar_border"
android:layout_width="fill_parent"
android:layout_height="1dp"
android:background="@color/midgray"
android:layout_above="@id/swipe_container" />
</RelativeLayout>
</android.support.v4.widget.SwipeRefreshLayout>
<View
android:id="@+id/feedintelligenceselector_border"
android:layout_width="fill_parent"
android:layout_height="1dp"
android:background="@color/midgray"
android:layout_below="@id/swipe_container" />
<TextView
android:id="@+id/main_sync_status"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_above="@+id/fragment_feedintelligenceselector"
android:padding="2dp"
android:textSize="14sp"
android:gravity="center"
android:textColor="@color/status_overlay_text"
android:background="@color/status_overlay_background"
android:text="SYNC STATUS" />
</RelativeLayout>

View file

@ -4,4 +4,6 @@
android:layout_width="match_parent"
android:layout_height="match_parent"
android:divider="@null"
/>
android:background="#00000000"
android:cacheColorHint="#00000000"
/>

View file

@ -13,13 +13,25 @@
<TextView
android:id="@+id/empty_view_text"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerInParent="true"
android:gravity="center"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_marginTop="50dp"
android:gravity="center_horizontal"
android:text="@string/empty_list_view_loading"
style="?defaultText"
android:textSize="13dp"
android:textStyle="italic" />
android:textSize="16dp" />
<ImageView
android:id="@+id/empty_view_image"
android:layout_width="80dp"
android:layout_height="80dp"
android:contentDescription="@string/description_empty_list_image"
android:src="@drawable/world_big"
android:layout_below="@id/empty_view_text"
android:layout_marginTop="15dp"
android:layout_centerHorizontal="true"
android:scaleType="fitCenter"
android:visibility="invisible" />
<com.newsblur.view.ProgressThrobber
android:id="@+id/empty_view_loading_throb"

View file

@ -28,53 +28,6 @@
android:textSize="14dp"
android:textStyle="bold" />
<LinearLayout
android:id="@+id/row_foldersums"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_centerVertical="true"
android:layout_marginRight="6dp" >
<TextView
android:id="@+id/row_foldersumneu"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="3dp"
android:layout_alignParentRight="true"
android:background="@drawable/neutral_count_rect"
android:gravity="center"
android:paddingLeft="3dp"
android:paddingRight="3dp"
android:paddingTop="1dp"
android:paddingBottom="2dp"
android:shadowColor="@color/neutral_drop_shadow"
android:shadowDy="1"
android:shadowRadius="1"
android:textColor="@color/white"
android:textSize="14sp"
android:textStyle="bold" />
<TextView
android:id="@+id/row_foldersumpos"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginRight="3dp"
android:layout_alignParentRight="true"
android:background="@drawable/positive_count_rect"
android:gravity="center"
android:paddingLeft="3dp"
android:paddingRight="3dp"
android:paddingTop="1dp"
android:paddingBottom="2dp"
android:shadowColor="@color/positive_drop_shadow"
android:shadowDy="1"
android:shadowRadius="1"
android:textColor="@color/white"
android:textSize="14sp"
android:textStyle="bold" />
</LinearLayout>
<View
android:layout_width="match_parent"
android:layout_height="1dp"

View file

@ -28,6 +28,7 @@
<attr name="itemHeaderDivider" format="string" />
<attr name="storyCommentDivider" format="string" />
<attr name="activityDetailsPager" format="string" />
<attr name="explainerText" format="string" />
<declare-styleable name="FlowLayout">
<attr name="flow" />

View file

@ -33,6 +33,8 @@
<string name="description_activity_icon">An icon illustrating the user\'s activity</string>
<string name="description_follow_button">Follow or unfollow a user</string>
<string name="description_comment_user">Comment user image</string>
<string name="description_empty_list_image">Empty list placeholder</string>
<string name="description_menu">Menu</string>
<string name="reading_shared_count">%s shares</string>
<string name="reading_comment_count">%s comments</string>
@ -147,6 +149,9 @@
<string name="empty_list_view_loading">Loading…</string>
<string name="empty_list_view_no_stories">No stories to read</string>
<string name="empty_list_view_no_unread_stories">You have no unread stories.</string>
<string name="empty_list_view_no_focus_stories">You have no unread stories in Focus mode.\n\nSwitch to All or Unread.</string>
<string name="login_registration_register">Register</string>
<string name="get_started">Let\'s get started</string>

View file

@ -259,4 +259,13 @@
<style name="storyCommentDivider.dark">
<item name="android:background">@color/dark_story_comment_divider</item>
</style>
<style name="explainerText">
<item name="android:textColor">@color/midgray</item>
</style>
<style name="explainerText.dark">
<item name="android:textColor">@color/lightgray</item>
</style>
</resources>

View file

@ -29,6 +29,7 @@
<item name="itemHeaderDivider">@style/itemHeaderDivider</item>
<item name="storyCommentDivider">@style/storyCommentDivider</item>
<item name="activityDetailsPager">@style/activityDetailsPager</item>
<item name="explainerText">@style/explainerText</item>
</style>
<style name="NewsBlurDarkTheme" parent="@android:style/Theme.Holo" >
@ -60,5 +61,6 @@
<item name="itemHeaderDivider">@style/itemHeaderDivider.dark</item>
<item name="storyCommentDivider">@style/storyCommentDivider.dark</item>
<item name="activityDetailsPager">@style/activityDetailsPager.dark</item>
<item name="explainerText">@style/explainerText.dark</item>
</style>
</resources>

View file

@ -1,7 +1,7 @@
package com.newsblur.activity;
import android.app.ActionBar;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.app.DialogFragment;
@ -15,10 +15,14 @@ import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.widget.AbsListView;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.PopupMenu;
import android.widget.TextView;
import butterknife.ButterKnife;
import butterknife.FindView;
import butterknife.OnClick;
import com.newsblur.R;
import com.newsblur.fragment.FeedIntelligenceSelectorFragment;
@ -34,7 +38,7 @@ import com.newsblur.util.StateFilter;
import com.newsblur.util.UIUtils;
import com.newsblur.view.StateToggleButton.StateChangedListener;
public class Main extends NbActivity implements StateChangedListener, SwipeRefreshLayout.OnRefreshListener, AbsListView.OnScrollListener {
public class Main extends NbActivity implements StateChangedListener, SwipeRefreshLayout.OnRefreshListener, AbsListView.OnScrollListener, PopupMenu.OnMenuItemClickListener {
private FolderListFragment folderFeedList;
private FragmentManager fragmentManager;
@ -42,6 +46,13 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
private SwipeRefreshLayout swipeLayout;
private boolean wasSwipeEnabled = false;
@FindView(R.id.main_sync_status) TextView overlayStatusText;
@FindView(R.id.empty_view_image) ImageView emptyViewImage;
@FindView(R.id.empty_view_text) TextView emptyViewText;
@FindView(R.id.main_menu_button) Button menuButton;
@FindView(R.id.main_user_image) ImageView userImage;
@FindView(R.id.main_user_name) TextView userName;
@FindView(R.id.main_unread_count_neut_text) TextView unreadCountNeutText;
@FindView(R.id.main_unread_count_posi_text) TextView unreadCountPosiText;
@Override
public void onCreate(Bundle savedInstanceState) {
@ -57,7 +68,7 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
getActionBar().hide();
swipeLayout = (SwipeRefreshLayout)findViewById(R.id.swipe_container);
swipeLayout.setColorScheme(R.color.refresh_1, R.color.refresh_2, R.color.refresh_3, R.color.refresh_4);
@ -70,6 +81,13 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
// make sure the interval sync is scheduled, since we are the root Activity
BootReceiver.scheduleSyncService(this);
Bitmap userPicture = PrefsUtils.getUserImage(this);
if (userPicture != null) {
userPicture = UIUtils.roundCorners(userPicture, 5);
userImage.setImageBitmap(userPicture);
}
userName.setText(PrefsUtils.getUserDetails(this).username);
}
@Override
@ -83,6 +101,7 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
FeedUtils.clearReadingSession();
updateStatusIndicators();
folderFeedList.pushUnreadCounts();
triggerSync();
if (PrefsUtils.isLightThemeSelected(this) != isLightTheme) {
@ -90,70 +109,6 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
}
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
MenuItem loginAsItem = menu.findItem(R.id.menu_loginas);
if (NBSyncService.isStaff == Boolean.TRUE) {
loginAsItem.setVisible(true);
} else {
loginAsItem.setVisible(false);
}
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main, menu);
MenuItem feedbackItem = menu.findItem(R.id.menu_feedback);
if (AppConstants.ENABLE_FEEDBACK) {
feedbackItem.setTitle(feedbackItem.getTitle() + " (v" + PrefsUtils.getVersion(this) + ")");
} else {
feedbackItem.setVisible(false);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.menu_profile) {
Intent profileIntent = new Intent(this, Profile.class);
startActivity(profileIntent);
return true;
} else if (item.getItemId() == R.id.menu_refresh) {
NBSyncService.forceFeedsFolders();
triggerSync();
return true;
} else if (item.getItemId() == R.id.menu_add_feed) {
Intent intent = new Intent(this, SearchForFeeds.class);
startActivityForResult(intent, 0);
return true;
} else if (item.getItemId() == R.id.menu_logout) {
DialogFragment newFragment = new LogoutDialogFragment();
newFragment.show(getFragmentManager(), "dialog");
} else if (item.getItemId() == R.id.menu_settings) {
Intent settingsIntent = new Intent(this, Settings.class);
startActivity(settingsIntent);
return true;
} else if (item.getItemId() == R.id.menu_feedback) {
try {
Intent i = new Intent(Intent.ACTION_VIEW);
i.setData(Uri.parse(PrefsUtils.createFeedbackLink(this)));
startActivity(i);
} catch (Exception e) {
Log.wtf(this.getClass().getName(), "device cannot even open URLs to report feedback");
}
return true;
} else if (item.getItemId() == R.id.menu_loginas) {
DialogFragment newFragment = new LoginAsDialogFragment();
newFragment.show(getFragmentManager(), "dialog");
}
return super.onOptionsItemSelected(item);
}
@Override
public void changedState(StateFilter state) {
folderFeedList.changeState(state);
@ -170,6 +125,30 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
folderFeedList.startLoaders();
}
public void updateUnreadCounts(int neutCount, int posiCount) {
unreadCountNeutText.setText(Integer.toString(neutCount));
unreadCountPosiText.setText(Integer.toString(posiCount));
if ((neutCount+posiCount) <= 0) {
if (NBSyncService.isFeedCountSyncRunning()) {
emptyViewImage.setVisibility(View.INVISIBLE);
emptyViewText.setText(R.string.loading);
emptyViewText.setVisibility(View.VISIBLE);
} else {
emptyViewImage.setVisibility(View.VISIBLE);
if (folderFeedList.currentState == StateFilter.BEST) {
emptyViewText.setText(R.string.empty_list_view_no_focus_stories);
} else {
emptyViewText.setText(R.string.empty_list_view_no_unread_stories);
}
emptyViewText.setVisibility(View.VISIBLE);
}
} else {
emptyViewImage.setVisibility(View.INVISIBLE);
emptyViewText.setVisibility(View.INVISIBLE);
}
}
private void updateStatusIndicators() {
if (NBSyncService.isFeedFolderSyncRunning()) {
swipeLayout.setRefreshing(true);
@ -194,6 +173,77 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
triggerSync();
}
@OnClick(R.id.main_menu_button) void onClickMenuButton() {
PopupMenu pm = new PopupMenu(this, menuButton);
Menu menu = pm.getMenu();
pm.getMenuInflater().inflate(R.menu.main, menu);
MenuItem loginAsItem = menu.findItem(R.id.menu_loginas);
if (NBSyncService.isStaff == Boolean.TRUE) {
loginAsItem.setVisible(true);
} else {
loginAsItem.setVisible(false);
}
MenuItem feedbackItem = menu.findItem(R.id.menu_feedback);
if (AppConstants.ENABLE_FEEDBACK) {
feedbackItem.setTitle(feedbackItem.getTitle() + " (v" + PrefsUtils.getVersion(this) + ")");
} else {
feedbackItem.setVisible(false);
}
pm.setOnMenuItemClickListener(this);
pm.show();
}
@Override
public boolean onMenuItemClick(MenuItem item) {
if (item.getItemId() == R.id.menu_profile) {
Intent i = new Intent(this, Profile.class);
startActivity(i);
return true;
} else if (item.getItemId() == R.id.menu_refresh) {
NBSyncService.forceFeedsFolders();
triggerSync();
return true;
} else if (item.getItemId() == R.id.menu_add_feed) {
Intent i = new Intent(this, SearchForFeeds.class);
startActivity(i);
return true;
} else if (item.getItemId() == R.id.menu_logout) {
DialogFragment newFragment = new LogoutDialogFragment();
newFragment.show(getFragmentManager(), "dialog");
} else if (item.getItemId() == R.id.menu_settings) {
Intent settingsIntent = new Intent(this, Settings.class);
startActivity(settingsIntent);
return true;
} else if (item.getItemId() == R.id.menu_feedback) {
try {
Intent i = new Intent(Intent.ACTION_VIEW);
i.setData(Uri.parse(PrefsUtils.createFeedbackLink(this)));
startActivity(i);
} catch (Exception e) {
Log.wtf(this.getClass().getName(), "device cannot even open URLs to report feedback");
}
return true;
} else if (item.getItemId() == R.id.menu_loginas) {
DialogFragment newFragment = new LoginAsDialogFragment();
newFragment.show(getFragmentManager(), "dialog");
return true;
}
return false;
}
@OnClick(R.id.main_add_button) void onClickAddButton() {
Intent i = new Intent(this, SearchForFeeds.class);
startActivity(i);
}
@OnClick(R.id.main_profile_button) void onClickProfileButton() {
Intent i = new Intent(this, Profile.class);
startActivity(i);
}
@Override
public void onScrollStateChanged(AbsListView absListView, int i) {
// not required

View file

@ -678,6 +678,16 @@ public class BlurDatabaseHelper {
ContentValues values = new ContentValues();
values.put(DatabaseConstants.STORY_STARRED, starred);
synchronized (RW_MUTEX) {dbRW.update(DatabaseConstants.STORY_TABLE, values, DatabaseConstants.STORY_HASH + " = ?", new String[]{hash});}
// since local star/unstar operations are, so far, never retried or done automatically,
// we don't do the complex transactional accounting done with counts as with read/unread
// operations. Though this could get off by 1 in very loaded circumstances, the count will
// get refreshed on the next feed/folder sync. Unfortunately, we also can't do local
// counting like with unreads, since we never get a full set of starred stories.
String operator = (starred ? " + 1" : " - 1");
StringBuilder q = new StringBuilder("UPDATE " + DatabaseConstants.STARRED_STORY_COUNT_TABLE);
q.append(" SET ").append(DatabaseConstants.STARRED_STORY_COUNT_COUNT).append(" = ").append(DatabaseConstants.STARRED_STORY_COUNT_COUNT).append(operator);
synchronized (RW_MUTEX) {dbRW.execSQL(q.toString());}
}
public void setStoryShared(String hash) {

View file

@ -59,9 +59,9 @@ public class FolderListAdapter extends BaseExpandableListAdapter {
/** Positive counts for active feeds, indexed by feed ID. */
private Map<String,Integer> feedPosCounts;
/** Total neutral unreads for all feeds. */
private int totalNeutCount = 0;
public int totalNeutCount = 0;
/** Total positive unreads for all feeds. */
private int totalPosCount = 0;
public int totalPosCount = 0;
/** Folders, indexed by canonical name. */
private Map<String,Folder> folders = Collections.emptyMap();
@ -140,7 +140,6 @@ public class FolderListAdapter extends BaseExpandableListAdapter {
((ImageView) v.findViewById(R.id.row_folder_indicator)).setImageResource(isExpanded ? R.drawable.indicator_expanded : R.drawable.indicator_collapsed);
} else if (isFolderRoot(groupPosition)) {
v = inflater.inflate(R.layout.row_all_stories, null, false);
bindCountViews(v, totalNeutCount, totalPosCount, true);
} else if (isRowReadStories(groupPosition)) {
if (convertView == null) {
v = inflater.inflate(R.layout.row_read_stories, null, false);

View file

@ -29,6 +29,7 @@ import butterknife.OnGroupExpand;
import com.newsblur.R;
import com.newsblur.activity.AllStoriesItemsList;
import com.newsblur.activity.FeedItemsList;
import com.newsblur.activity.Main;
import com.newsblur.activity.ItemsList;
import com.newsblur.activity.ReadStoriesItemsList;
import com.newsblur.activity.SavedStoriesItemsList;
@ -56,6 +57,7 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
public StateFilter currentState = StateFilter.SOME;
private SharedPreferences sharedPreferences;
@FindView(R.id.folderfeed_list) ExpandableListView list;
private Main activity;
@Override
public void onCreate(Bundle savedInstanceState) {
@ -70,6 +72,7 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
sharedPreferences = getActivity().getSharedPreferences(PrefConstants.PREFERENCES, 0);
activity = (Main) getActivity();
}
@Override
@ -109,6 +112,7 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
throw new IllegalArgumentException("unknown loader created");
}
checkOpenFolderPreferences();
pushUnreadCounts();
} catch (Exception e) {
// for complex folder sets, these ops can take so long that they butt heads
// with the destruction of the fragment and adapter. crashes can ensue.
@ -123,10 +127,16 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
public void hasUpdated() {
if (isAdded()) {
getLoaderManager().restartLoader(SOCIALFEEDS_LOADER, null, this);
getLoaderManager().restartLoader(FOLDERS_LOADER, null, this);
getLoaderManager().restartLoader(FEEDS_LOADER, null, this);
getLoaderManager().restartLoader(SAVEDCOUNT_LOADER, null, this);
try {
getLoaderManager().restartLoader(SOCIALFEEDS_LOADER, null, this);
getLoaderManager().restartLoader(FOLDERS_LOADER, null, this);
getLoaderManager().restartLoader(FEEDS_LOADER, null, this);
getLoaderManager().restartLoader(SAVEDCOUNT_LOADER, null, this);
} catch (Exception e) {
// on heavily loaded devices, the time between isAdded() going false
// and the loader subsystem shutting down can be nontrivial, causing
// IllegalStateExceptions to be thrown here.
}
}
}
@ -257,6 +267,15 @@ public class FolderListFragment extends NbFragment implements OnCreateContextMen
hasUpdated();
}
/**
* Every time unread counts are updated in the adapter, ping the Main activity with
* the new data. It is, unfortunately, quite expensive to compute given the current
* DB model, so having Main also load it would cause some lag.
*/
public void pushUnreadCounts() {
activity.updateUnreadCounts(adapter.totalNeutCount, adapter.totalPosCount);
}
@OnGroupClick(R.id.folderfeed_list) boolean onGroupClick(ExpandableListView list, View group, int groupPosition, long id) {
if (adapter.isFolderRoot(groupPosition)) {
Intent i = new Intent(getActivity(), AllStoriesItemsList.class);

View file

@ -4,6 +4,7 @@ import android.app.Activity;
import android.app.LoaderManager;
import android.content.Loader;
import android.database.Cursor;
import android.graphics.Typeface;
import android.os.Bundle;
import android.util.Log;
import android.view.ContextMenu;
@ -21,6 +22,7 @@ import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
@ -85,6 +87,7 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
getResources().getColor(R.color.refresh_4));
itemList.addHeaderView(headerView, null, false);
itemList.setHeaderDividersEnabled(false);
View footerView = inflater.inflate(R.layout.row_loading_throbber, null);
footerProgressView = (ProgressThrobber) footerView.findViewById(R.id.itemlist_loading_throb);
footerProgressView.setColors(getResources().getColor(R.color.refresh_1),
@ -93,6 +96,7 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
getResources().getColor(R.color.refresh_4));
itemList.addFooterView(footerView, null, false);
itemList.setFooterDividersEnabled(false);
itemList.setEmptyView(v.findViewById(R.id.empty_view));
setupBezelSwipeDetector(itemList);
itemList.setOnScrollListener(this);
@ -157,12 +161,17 @@ public abstract class ItemListFragment extends NbFragment implements OnScrollLis
}
View emptyView = itemList.getEmptyView();
TextView textView = (TextView) emptyView.findViewById(R.id.empty_view_text);
ImageView imageView = (ImageView) emptyView.findViewById(R.id.empty_view_image);
boolean isLoading = NBSyncService.isFeedSetSyncing(getFeedSet(), activity);
if (isLoading || (!cursorSeenYet)) {
textView.setText(R.string.empty_list_view_loading);
textView.setTypeface(null, Typeface.ITALIC);
imageView.setVisibility(View.INVISIBLE);
} else {
textView.setText(R.string.empty_list_view_no_stories);
textView.setTypeface(null, Typeface.NORMAL);
imageView.setVisibility(View.VISIBLE);
}
}

View file

@ -542,21 +542,39 @@ public class ReadingItemFragment extends NbFragment implements ClassifierDialogF
}
}
private static final Pattern altSniff1 = Pattern.compile("<img[^>]*src=(['\"])((?:(?!\\1).)*)\\1[^>]*alt=(['\"])((?:(?!\\3).)*)\\3[^>]*>", Pattern.CASE_INSENSITIVE);
private static final Pattern altSniff2 = Pattern.compile("<img[^>]*alt=(['\"])((?:(?!\\1).)*)\\1[^>]*src=(['\"])((?:(?!\\3).)*)\\3[^>]*>", Pattern.CASE_INSENSITIVE);
private static final Pattern altSniff3 = Pattern.compile("<img[^>]*src=(['\"])((?:(?!\\1).)*)\\1[^>]*title=(['\"])((?:(?!\\3).)*)\\3[^>]*>", Pattern.CASE_INSENSITIVE);
private static final Pattern altSniff4 = Pattern.compile("<img[^>]*title=(['\"])((?:(?!\\1).)*)\\1[^>]*src=(['\"])((?:(?!\\3).)*)\\3[^>]*>", Pattern.CASE_INSENSITIVE);
private void sniffAltTexts(String html) {
// Find images with alt tags and cache the text for use on long-press
// NOTE: if doing this via regex has a smell, you have a good nose! This method is far from perfect
// and may miss valid cases or trucate tags, but it works for popular feeds (read: XKCD) and doesn't
// require us to import a proper parser lib of hundreds of kilobytes just for this one feature.
imageAltTexts = new HashMap<String,String>();
// sniff for alts first
Matcher imgTagMatcher = altSniff1.matcher(html);
while (imgTagMatcher.find()) {
imageAltTexts.put(imgTagMatcher.group(2), imgTagMatcher.group(4));
}
imgTagMatcher = altSniff2.matcher(html);
while (imgTagMatcher.find()) {
imageAltTexts.put(imgTagMatcher.group(4), imgTagMatcher.group(2));
}
// then sniff for 'title' tags, so they will overwrite alts and take precedence
imgTagMatcher = altSniff3.matcher(html);
while (imgTagMatcher.find()) {
imageAltTexts.put(imgTagMatcher.group(2), imgTagMatcher.group(4));
}
imgTagMatcher = altSniff4.matcher(html);
while (imgTagMatcher.find()) {
imageAltTexts.put(imgTagMatcher.group(4), imgTagMatcher.group(2));
}
// while were are at it, create a place where we can later cache offline image remaps so that when
// we do an alt-text lookup, we can search for the right URL key.
imageUrlRemaps = new HashMap<String,String>();
Matcher imgTagMatcher1 = Pattern.compile("<img[^>]*src=\"([^\"]*)\"[^>]*alt=\"([^\"]*)\"[^>]*>", Pattern.CASE_INSENSITIVE).matcher(html);
while (imgTagMatcher1.find()) {
imageAltTexts.put(imgTagMatcher1.group(1), imgTagMatcher1.group(2));
}
Matcher imgTagMatcher2 = Pattern.compile("<img[^>]*alt=\"([^\"]*)\"[^>]*src=\"([^\"]*)\"[^>]*>", Pattern.CASE_INSENSITIVE).matcher(html);
while (imgTagMatcher2.find()) {
imageAltTexts.put(imgTagMatcher2.group(2), imgTagMatcher2.group(1));
}
}
private String swapInOfflineImages(String html) {

View file

@ -133,6 +133,7 @@ public class APIResponse {
try {
T response = classOfT.newInstance();
response.message = this.errorMessage;
response.isProtocolError = true;
return ((T) response);
} catch (Exception e) {
// this should never fail unless the constructor of the base response bean fails

View file

@ -10,6 +10,10 @@ import java.util.regex.Pattern;
*/
public class NewsBlurResponse {
// not part of the response schema, but populated by the API manager to indicate
// that we never got *any* valid JSON back
public boolean isProtocolError = false;
public boolean authenticated;
public int code;
public String message;

View file

@ -324,19 +324,20 @@ public class NBSyncService extends Service {
NewsBlurResponse response = ra.doRemote(apiManager);
// if we attempted a call and it failed, do not mark the action as done
if (response != null) {
if (response.isError()) {
if (response.isUserError()) {
Log.d(this.getClass().getName(), "Discarding reading action with user error.");
} else {
continue actionsloop;
}
}
if (response == null) {
Log.e(this.getClass().getName(), "Discarding reading action with internal API error.");
dbHelper.clearAction(id);
} else if (response.isProtocolError) {
// the network failed or we got a non-200, so be sure we retry
continue actionsloop;
} else if (response.isError()) {
Log.e(this.getClass().getName(), "Discarding reading action with user error.");
dbHelper.clearAction(id);
} else {
// success!
dbHelper.clearAction(id);
FollowupActions.add(ra);
}
dbHelper.clearAction(id);
FollowupActions.add(ra);
}
} finally {
closeQuietly(c);
@ -580,10 +581,6 @@ public class NBSyncService extends Service {
if (! isStoryResponseGood(apiResponse)) return;
// if any reading activities happened during the API call, the result is now stale.
// discard it and start again
if (dbHelper.getActions(false).getCount() > 0) return;
FeedPagesSeen.put(fs, pageNumber);
totalStoriesSeen += apiResponse.stories.length;
FeedStoriesSeen.put(fs, totalStoriesSeen);
@ -721,6 +718,10 @@ public class NBSyncService extends Service {
return (HousekeepingRunning || ActionsRunning || RecountsRunning || FFSyncRunning || CleanupService.running() || UnreadsService.running() || StorySyncRunning || OriginalTextService.running() || ImagePrefetchService.running());
}
public static boolean isFeedCountSyncRunning() {
return (HousekeepingRunning || RecountsRunning || FFSyncRunning);
}
/**
* Is there a sync for a given FeedSet running?
*/

View file

@ -64,7 +64,7 @@ public class InteractionsAdapter extends ActivityDetailsAdapter {
stringBuilder.append(String.format(nowFollowingYou, userString));
stringBuilder.setSpan(linkColor, 0, activity.user.username.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
stringBuilder.setSpan(contentColor, activity.user.username.length() + 1, activity.user.username.length() + 1 + nowFollowingYou.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
stringBuilder.setSpan(contentColor, activity.user.username.length() + 1, stringBuilder.length() - 0, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
return stringBuilder;
}

View file

@ -1,7 +1,5 @@
global
maxconn 100000
user haproxy
group haproxy
daemon
ca-base /srv/newsblur/config/certificates
crt-base /srv/newsblur/config/certificates
@ -44,7 +42,8 @@ frontend public
acl gunicorn_dead nbsrv(gunicorn) lt 1
acl nginx_dead nbsrv(nginx) lt 1
acl mx_mode nbsrv(maintenance) lt 1
acl is_unread_count url_beg /reader/feed_unread_count
monitor-uri /status
monitor fail if gunicorn_dead
monitor fail if nginx_dead
@ -61,7 +60,7 @@ frontend public
use_backend nginx if mx_mode
use_backend gunicorn_counts if { path_beg /reader/feed_unread_count }
use_backend gunicorn_counts if is_unread_count
use_backend gunicorn unless gunicorn_dead || nginx_dead
backend node_socket

View file

@ -25,5 +25,7 @@ else:
if workers <= 4:
workers = max(int(math.floor(GIGS_OF_MEMORY * 1000 / 512)), 4)
if workers > 8:
workers = 8
workers = 8

View file

@ -16,7 +16,7 @@ logappend=true
slowms=100
syncdelay=15
syncdelay=5
rest = true
#profile = 2

View file

@ -152,7 +152,7 @@ else {
}
$sql = "select count(*) from pg_stat_activity ";
$sql .= " where datname = ? and current_query like '<IDLE>%'";
$sql .= " where datname = ? and state = 'idle'";
print "# $sql\n" if $debug;
$sth = $dbh->prepare($sql);
$sth->execute($dbname);

View file

@ -40,6 +40,13 @@ server {
return 200;
}
location /nginx_status {
stub_status on; # activate stub_status module
access_log off;
allow 127.0.0.1; # restrict access to local only
deny all;
}
error_page 502 @down;
location @down {
root /srv/newsblur/;

106
config/postgres_hba.conf Normal file
View file

@ -0,0 +1,106 @@
# PostgreSQL Client Authentication Configuration File
# ===================================================
#
# Refer to the "Client Authentication" section in the PostgreSQL
# documentation for a complete description of this file. A short
# synopsis follows.
#
# This file controls: which hosts are allowed to connect, how clients
# are authenticated, which PostgreSQL user names they can use, which
# databases they can access. Records take one of these forms:
#
# local DATABASE USER METHOD [OPTIONS]
# host DATABASE USER ADDRESS METHOD [OPTIONS]
# hostssl DATABASE USER ADDRESS METHOD [OPTIONS]
# hostnossl DATABASE USER ADDRESS METHOD [OPTIONS]
#
# (The uppercase items must be replaced by actual values.)
#
# The first field is the connection type: "local" is a Unix-domain
# socket, "host" is either a plain or SSL-encrypted TCP/IP socket,
# "hostssl" is an SSL-encrypted TCP/IP socket, and "hostnossl" is a
# plain TCP/IP socket.
#
# DATABASE can be "all", "sameuser", "samerole", "replication", a
# database name, or a comma-separated list thereof. The "all"
# keyword does not match "replication". Access to replication
# must be enabled in a separate record (see example below).
#
# USER can be "all", a user name, a group name prefixed with "+", or a
# comma-separated list thereof. In both the DATABASE and USER fields
# you can also write a file name prefixed with "@" to include names
# from a separate file.
#
# ADDRESS specifies the set of hosts the record matches. It can be a
# host name, or it is made up of an IP address and a CIDR mask that is
# an integer (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that
# specifies the number of significant bits in the mask. A host name
# that starts with a dot (.) matches a suffix of the actual host name.
# Alternatively, you can write an IP address and netmask in separate
# columns to specify the set of hosts. Instead of a CIDR-address, you
# can write "samehost" to match any of the server's own IP addresses,
# or "samenet" to match any address in any subnet that the server is
# directly connected to.
#
# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi",
# "krb5", "ident", "peer", "pam", "ldap", "radius" or "cert". Note that
# "password" sends passwords in clear text; "md5" is preferred since
# it sends encrypted passwords.
#
# OPTIONS are a set of options for the authentication in the format
# NAME=VALUE. The available options depend on the different
# authentication methods -- refer to the "Client Authentication"
# section in the documentation for a list of which options are
# available for which authentication methods.
#
# Database and user names containing spaces, commas, quotes and other
# special characters must be quoted. Quoting one of the keywords
# "all", "sameuser", "samerole" or "replication" makes the name lose
# its special character, and just match a database or username with
# that name.
#
# This file is read on server startup and when the postmaster receives
# a SIGHUP signal. If you edit the file on a running system, you have
# to SIGHUP the postmaster for the changes to take effect. You can
# use "pg_ctl reload" to do that.
# Put your actual configuration here
# ----------------------------------
#
# If you want to allow non-local connections, you need to add more
# "host" records. In that case you will also need to make PostgreSQL
# listen on a non-local interface via the listen_addresses
# configuration parameter, or via the -i or -h command line switches.
# DO NOT DISABLE!
# If you change this first entry you will need to make sure that the
# database superuser can access the database using some other method.
# Noninteractive access to all databases is required during automatic
# maintenance (custom daily cronjobs, replication, and similar tasks).
#
# Database administrative login by Unix domain socket
local all postgres peer
# TYPE DATABASE USER ADDRESS METHOD
# "local" is for Unix domain socket connections only
local all all peer
# IPv4 local connections:
host all all 127.0.0.1/32 md5
# IPv6 local connections:
host all all ::1/128 md5
# Allow replication connections from localhost, by a user with the
# replication privilege.
#local replication postgres peer
#host replication postgres 127.0.0.1/32 md5
#host replication postgres ::1/128 md5
local all all peer
local all all trust
host all all 0.0.0.0/0 trust
host newsblur newsblur 0.0.0.0/0 trust
host replication all 0.0.0.0/0 trust
host replication postgres 0.0.0.0/0 trust

View file

@ -38,15 +38,15 @@
# The default values of these variables are driven from the -D command-line
# option or PGDATA environment variable, represented here as ConfigDir.
data_directory = '/var/lib/postgresql/9.2/main' # use data in another directory
data_directory = '/var/lib/postgresql/9.4/main' # use data in another directory
# (change requires restart)
hba_file = '/etc/postgresql/9.2/main/pg_hba.conf' # host-based authentication file
hba_file = '/etc/postgresql/9.4/main/pg_hba.conf' # host-based authentication file
# (change requires restart)
ident_file = '/etc/postgresql/9.2/main/pg_ident.conf' # ident configuration file
ident_file = '/etc/postgresql/9.4/main/pg_ident.conf' # ident configuration file
# (change requires restart)
# If external_pid_file is not explicitly set, no extra PID file is written.
external_pid_file = '/var/run/postgresql/9.2-main.pid' # write an extra PID file
external_pid_file = '/var/run/postgresql/9.4-main.pid' # write an extra PID file
# (change requires restart)
@ -65,7 +65,7 @@ max_connections = 1000 # (change requires restart)
# Note: Increasing max_connections costs ~400 bytes of shared memory per
# connection slot, plus lock space (see max_locks_per_transaction).
#superuser_reserved_connections = 3 # (change requires restart)
unix_socket_directory = '/var/run/postgresql' # (change requires restart)
unix_socket_directories = '/var/run/postgresql' # (change requires restart)
#unix_socket_group = '' # (change requires restart)
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
# (change requires restart)
@ -106,7 +106,7 @@ ssl = false # (change requires restart)
# - Memory -
shared_buffers = 512MB # min 128kB
shared_buffers = 16GB # min 128kB
# (change requires restart)
#temp_buffers = 8MB # min 800kB
#max_prepared_transactions = 0 # zero disables the feature
@ -125,6 +125,8 @@ maintenance_work_mem = 512MB # min 1MB
# (change requires restart)
#shared_preload_libraries = '' # (change requires restart)
huge_pages = on
# - Cost-Based Vacuum Delay -
#vacuum_cost_delay = 0ms # 0-100 milliseconds
@ -171,7 +173,7 @@ wal_buffers = 16MB # min 32kB
# - Checkpoints -
checkpoint_segments = 10 # in logfile segments, min 1, 16MB each
checkpoint_segments = 40 # in logfile segments, min 1, 16MB each
#checkpoint_timeout = 5min # range 30s-1h
#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0
#checkpoint_warning = 30s # 0 disables
@ -195,7 +197,7 @@ wal_keep_segments = 320 # in logfile segments, 16MB each; 0 disables
# - Standby Servers -
#hot_standby = off # "on" allows queries during recovery
hot_standby = on # "on" allows queries during recovery
# (change requires restart)
#max_standby_archive_delay = 30s # max delay before canceling queries
# when reading WAL from archive;
@ -229,7 +231,7 @@ wal_keep_segments = 320 # in logfile segments, 16MB each; 0 disables
#cpu_tuple_cost = 0.01 # same scale as above
#cpu_index_tuple_cost = 0.005 # same scale as above
#cpu_operator_cost = 0.0025 # same scale as above
effective_cache_size = 6GB
effective_cache_size = 32GB
# - Genetic Query Optimizer -
@ -338,7 +340,7 @@ effective_cache_size = 6GB
# fatal
# panic (effectively off)
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
log_min_duration_statement = 1000 # -1 is disabled, 0 logs all statements
# and their durations, > 0 logs only
# statements running at least this number
# of milliseconds
@ -350,13 +352,13 @@ effective_cache_size = 6GB
#debug_print_rewritten = off
#debug_print_plan = off
#debug_pretty_print = on
#log_checkpoints = off
log_checkpoints = on
#log_connections = off
#log_disconnections = off
#log_duration = off
#log_error_verbosity = default # terse, default, or verbose messages
#log_hostname = off
log_line_prefix = '%t %h' # special values:
log_line_prefix = '\033[32m%t %h \033[0m' # special values:
# %a = application name
# %u = user name
# %d = database name
@ -376,7 +378,7 @@ log_line_prefix = '%t %h' # special values:
# processes
# %% = '%'
# e.g. '<%u%%%d> '
#log_lock_waits = off # log lock waits >= deadlock_timeout
log_lock_waits = on # log lock waits >= deadlock_timeout
#log_statement = 'none' # none, ddl, mod, all
#log_temp_files = -1 # log temporary files equal or larger
# than the specified size in kilobytes;
@ -494,7 +496,7 @@ default_text_search_config = 'pg_catalog.english'
# LOCK MANAGEMENT
#------------------------------------------------------------------------------
#deadlock_timeout = 1s
deadlock_timeout = 1s
#max_locks_per_transaction = 64 # min 10
# (change requires restart)
# Note: Each lock table slot uses ~270 bytes of shared memory, and there are

View file

@ -17,4 +17,4 @@ trigger_file = '/var/lib/postgresql/9.4/main/standby.trigger'
# required for the standby server, this may not be necessary. But
# a large workload can cause segments to be recycled before the standby
# is fully synchronized, requiring you to start again from a new base backup.
restore_command = 'rsync -a db01:/var/lib/postgresql/9.2/archive/%f "%p"'
restore_command = 'rsync -a db01:/var/lib/postgresql/9.4/archive/%f "%p"'

View file

@ -1,529 +0,0 @@
# -----------------------------
# PostgreSQL configuration file
# -----------------------------
#
# This file consists of lines of the form:
#
# name = value
#
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
# "#" anywhere on a line. The complete list of parameter names and allowed
# values can be found in the PostgreSQL documentation.
#
# The commented-out settings shown in this file represent the default values.
# Re-commenting a setting is NOT sufficient to revert it to the default value;
# you need to reload the server.
#
# This file is read on server startup and when the server receives a SIGHUP
# signal. If you edit the file on a running system, you have to SIGHUP the
# server for the changes to take effect, or use "pg_ctl reload". Some
# parameters, which are marked below, require a server shutdown and restart to
# take effect.
#
# Any parameter can also be given as a command-line option to the server, e.g.,
# "postgres -c log_connections=on". Some parameters can be changed at run time
# with the "SET" SQL command.
#
# Memory units: kB = kilobytes Time units: ms = milliseconds
# MB = megabytes s = seconds
# GB = gigabytes min = minutes
# h = hours
# d = days
#------------------------------------------------------------------------------
# FILE LOCATIONS
#------------------------------------------------------------------------------
# The default values of these variables are driven from the -D command-line
# option or PGDATA environment variable, represented here as ConfigDir.
data_directory = '/var/lib/postgresql/9.2/main' # use data in another directory
# (change requires restart)
hba_file = '/etc/postgresql/9.2/main/pg_hba.conf' # host-based authentication file
# (change requires restart)
ident_file = '/etc/postgresql/9.2/main/pg_ident.conf' # ident configuration file
# (change requires restart)
# If external_pid_file is not explicitly set, no extra PID file is written.
external_pid_file = '/var/run/postgresql/9.2-main.pid' # write an extra PID file
# (change requires restart)
#------------------------------------------------------------------------------
# CONNECTIONS AND AUTHENTICATION
#------------------------------------------------------------------------------
# - Connection Settings -
listen_addresses = '*' # what IP address(es) to listen on;
# comma-separated list of addresses;
# defaults to 'localhost', '*' = all
# (change requires restart)
port = 5432 # (change requires restart)
max_connections = 1000 # (change requires restart)
# Note: Increasing max_connections costs ~400 bytes of shared memory per
# connection slot, plus lock space (see max_locks_per_transaction).
#superuser_reserved_connections = 3 # (change requires restart)
unix_socket_directory = '/var/run/postgresql' # (change requires restart)
#unix_socket_group = '' # (change requires restart)
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
# (change requires restart)
#bonjour = off # advertise server via Bonjour
# (change requires restart)
#bonjour_name = '' # defaults to the computer name
# (change requires restart)
# - Security and Authentication -
#authentication_timeout = 1min # 1s-600s
ssl = false # (change requires restart)
#ssl_ciphers = 'ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH' # allowed SSL ciphers
# (change requires restart)
#ssl_renegotiation_limit = 512MB # amount of data between renegotiations
#password_encryption = on
#db_user_namespace = off
# Kerberos and GSSAPI
#krb_server_keyfile = ''
#krb_srvname = 'postgres' # (Kerberos only)
#krb_caseins_users = off
# - TCP Keepalives -
# see "man 7 tcp" for details
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
# 0 selects the system default
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
# 0 selects the system default
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
# 0 selects the system default
#------------------------------------------------------------------------------
# RESOURCE USAGE (except WAL)
#------------------------------------------------------------------------------
# - Memory -
shared_buffers = 512MB # min 128kB
# (change requires restart)
#temp_buffers = 8MB # min 800kB
#max_prepared_transactions = 0 # zero disables the feature
# (change requires restart)
# Note: Increasing max_prepared_transactions costs ~600 bytes of shared memory
# per transaction slot, plus lock space (see max_locks_per_transaction).
# It is not advisable to set max_prepared_transactions nonzero unless you
# actively intend to use prepared transactions.
work_mem = 5MB # min 64kB
maintenance_work_mem = 512MB # min 1MB
#max_stack_depth = 2MB # min 100kB
# - Kernel Resource Usage -
#max_files_per_process = 1000 # min 25
# (change requires restart)
#shared_preload_libraries = '' # (change requires restart)
# - Cost-Based Vacuum Delay -
#vacuum_cost_delay = 0ms # 0-100 milliseconds
#vacuum_cost_page_hit = 1 # 0-10000 credits
#vacuum_cost_page_miss = 10 # 0-10000 credits
#vacuum_cost_page_dirty = 20 # 0-10000 credits
#vacuum_cost_limit = 200 # 1-10000 credits
# - Background Writer -
#bgwriter_delay = 200ms # 10-10000ms between rounds
#bgwriter_lru_maxpages = 100 # 0-1000 max buffers written/round
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multipler on buffers scanned/round
# - Asynchronous Behavior -
#effective_io_concurrency = 1 # 1-1000. 0 disables prefetching
#------------------------------------------------------------------------------
# WRITE AHEAD LOG
#------------------------------------------------------------------------------
# - Settings -
wal_level = hot_standby # minimal, archive, or hot_standby
# (change requires restart)
#fsync = on # turns forced synchronization on or off
synchronous_commit = off # immediate fsync at commit
#wal_sync_method = fsync # the default is the first option
# supported by the operating system:
# open_datasync
# fdatasync (default on Linux)
# fsync
# fsync_writethrough
# open_sync
#full_page_writes = on # recover from partial page writes
wal_buffers = 16MB # min 32kB
# (change requires restart)
#wal_writer_delay = 200ms # 1-10000 milliseconds
#commit_delay = 0 # range 0-100000, in microseconds
#commit_siblings = 5 # range 1-1000
# - Checkpoints -
checkpoint_segments = 10 # in logfile segments, min 1, 16MB each
#checkpoint_timeout = 5min # range 30s-1h
#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0
#checkpoint_warning = 30s # 0 disables
# - Archiving -
archive_mode = on # allows archiving to be done
# (change requires restart)
archive_command = 'cp -f %p ../archive/%f'
# command to use to archive a logfile segment
#archive_timeout = 0 # force a logfile segment switch after this
# number of seconds; 0 disables
# - Streaming Replication -
max_wal_senders = 5 # max number of walsender processes
# (change requires restart)
#wal_sender_delay = 200ms # walsender cycle time, 1-10000 milliseconds
wal_keep_segments = 32 # in logfile segments, 16MB each; 0 disables
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
# - Standby Servers -
hot_standby = on # "on" allows queries during recovery
# (change requires restart)
#max_standby_archive_delay = 30s # max delay before canceling queries
# when reading WAL from archive;
# -1 allows indefinite delay
#max_standby_streaming_delay = 30s # max delay before canceling queries
# when reading streaming WAL;
# -1 allows indefinite delay
#------------------------------------------------------------------------------
# QUERY TUNING
#------------------------------------------------------------------------------
# - Planner Method Configuration -
#enable_bitmapscan = on
#enable_hashagg = on
#enable_hashjoin = on
#enable_indexscan = on
#enable_material = on
#enable_mergejoin = on
#enable_nestloop = on
#enable_seqscan = on
#enable_sort = on
#enable_tidscan = on
# - Planner Cost Constants -
#seq_page_cost = 1.0 # measured on an arbitrary scale
#random_page_cost = 4.0 # same scale as above
#cpu_tuple_cost = 0.01 # same scale as above
#cpu_index_tuple_cost = 0.005 # same scale as above
#cpu_operator_cost = 0.0025 # same scale as above
effective_cache_size = 6GB
# - Genetic Query Optimizer -
#geqo = on
#geqo_threshold = 12
#geqo_effort = 5 # range 1-10
#geqo_pool_size = 0 # selects default based on effort
#geqo_generations = 0 # selects default based on effort
#geqo_selection_bias = 2.0 # range 1.5-2.0
#geqo_seed = 0.0 # range 0.0-1.0
# - Other Planner Options -
#default_statistics_target = 100 # range 1-10000
#constraint_exclusion = partition # on, off, or partition
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
#from_collapse_limit = 8
#join_collapse_limit = 8 # 1 disables collapsing of explicit
# JOIN clauses
#------------------------------------------------------------------------------
# ERROR REPORTING AND LOGGING
#------------------------------------------------------------------------------
# - Where to Log -
#log_destination = 'stderr' # Valid values are combinations of
# stderr, csvlog, syslog, and eventlog,
# depending on platform. csvlog
# requires logging_collector to be on.
# This is used when logging to stderr:
#logging_collector = off # Enable capturing of stderr and csvlog
# into log files. Required to be on for
# csvlogs.
# (change requires restart)
# These are only used if logging_collector is on:
#log_directory = 'pg_log' # directory where log files are written,
# can be absolute or relative to PGDATA
#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern,
# can include strftime() escapes
#log_truncate_on_rotation = off # If on, an existing log file of the
# same name as the new log file will be
# truncated rather than appended to.
# But such truncation only occurs on
# time-driven rotation, not on restarts
# or size-driven rotation. Default is
# off, meaning append to existing files
# in all cases.
#log_rotation_age = 1d # Automatic rotation of logfiles will
# happen after that time. 0 disables.
#log_rotation_size = 10MB # Automatic rotation of logfiles will
# happen after that much log output.
# 0 disables.
# These are relevant when logging to syslog:
#syslog_facility = 'LOCAL0'
#syslog_ident = 'postgres'
#silent_mode = off # Run server silently.
# DO NOT USE without syslog or
# logging_collector
# (change requires restart)
# - When to Log -
#client_min_messages = notice # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# log
# notice
# warning
# error
#log_min_messages = warning # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic
#log_min_error_statement = error # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic (effectively off)
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
# and their durations, > 0 logs only
# statements running at least this number
# of milliseconds
# - What to Log -
#debug_print_parse = off
#debug_print_rewritten = off
#debug_print_plan = off
#debug_pretty_print = on
#log_checkpoints = off
#log_connections = off
#log_disconnections = off
#log_duration = off
#log_error_verbosity = default # terse, default, or verbose messages
#log_hostname = off
log_line_prefix = '%t %h' # special values:
# %a = application name
# %u = user name
# %d = database name
# %r = remote host and port
# %h = remote host
# %p = process ID
# %t = timestamp without milliseconds
# %m = timestamp with milliseconds
# %i = command tag
# %e = SQL state
# %c = session ID
# %l = session line number
# %s = session start timestamp
# %v = virtual transaction ID
# %x = transaction ID (0 if none)
# %q = stop here in non-session
# processes
# %% = '%'
# e.g. '<%u%%%d> '
#log_lock_waits = off # log lock waits >= deadlock_timeout
#log_statement = 'none' # none, ddl, mod, all
#log_temp_files = -1 # log temporary files equal or larger
# than the specified size in kilobytes;
# -1 disables, 0 logs all temp files
#log_timezone = unknown # actually, defaults to TZ environment
# setting
#------------------------------------------------------------------------------
# RUNTIME STATISTICS
#------------------------------------------------------------------------------
# - Query/Index Statistics Collector -
#track_activities = on
track_counts = on
#track_functions = none # none, pl, all
#track_activity_query_size = 1024 # (change requires restart)
#update_process_title = on
#stats_temp_directory = 'pg_stat_tmp'
# - Statistics Monitoring -
#log_parser_stats = off
#log_planner_stats = off
#log_executor_stats = off
#log_statement_stats = off
#------------------------------------------------------------------------------
# AUTOVACUUM PARAMETERS
#------------------------------------------------------------------------------
autovacuum = on # Enable autovacuum subprocess? 'on'
# requires track_counts to also be on.
#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and
# their durations, > 0 logs only
# actions running at least this number
# of milliseconds.
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
# (change requires restart)
#autovacuum_naptime = 1min # time between autovacuum runs
#autovacuum_vacuum_threshold = 50 # min number of row updates before
# vacuum
#autovacuum_analyze_threshold = 50 # min number of row updates before
# analyze
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
# (change requires restart)
#autovacuum_vacuum_cost_delay = 20ms # default vacuum cost delay for
# autovacuum, in milliseconds;
# -1 means use vacuum_cost_delay
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
# autovacuum, -1 means use
# vacuum_cost_limit
#------------------------------------------------------------------------------
# CLIENT CONNECTION DEFAULTS
#------------------------------------------------------------------------------
# - Statement Behavior -
#search_path = '"$user",public' # schema names
#default_tablespace = '' # a tablespace name, '' uses the default
#temp_tablespaces = '' # a list of tablespace names, '' uses
# only default tablespace
#check_function_bodies = on
#default_transaction_isolation = 'read committed'
#default_transaction_read_only = off
#session_replication_role = 'origin'
#statement_timeout = 0 # in milliseconds, 0 is disabled
#vacuum_freeze_min_age = 50000000
#vacuum_freeze_table_age = 150000000
#bytea_output = 'hex' # hex, escape
#xmlbinary = 'base64'
#xmloption = 'content'
# - Locale and Formatting -
datestyle = 'iso, mdy'
#intervalstyle = 'postgres'
#timezone = unknown # actually, defaults to TZ environment
# setting
#timezone_abbreviations = 'Default' # Select the set of available time zone
# abbreviations. Currently, there are
# Default
# Australia
# India
# You can create your own file in
# share/timezonesets/.
#extra_float_digits = 0 # min -15, max 3
#client_encoding = sql_ascii # actually, defaults to database
# encoding
# These settings are initialized by initdb, but they can be changed.
lc_messages = 'en_US.UTF-8' # locale for system error message
# strings
lc_monetary = 'en_US.UTF-8' # locale for monetary formatting
lc_numeric = 'en_US.UTF-8' # locale for number formatting
lc_time = 'en_US.UTF-8' # locale for time formatting
# default configuration for text search
default_text_search_config = 'pg_catalog.english'
# - Other Defaults -
#dynamic_library_path = '$libdir'
#local_preload_libraries = ''
#------------------------------------------------------------------------------
# LOCK MANAGEMENT
#------------------------------------------------------------------------------
#deadlock_timeout = 1s
#max_locks_per_transaction = 64 # min 10
# (change requires restart)
# Note: Each lock table slot uses ~270 bytes of shared memory, and there are
# max_locks_per_transaction * (max_connections + max_prepared_transactions)
# lock table slots.
#------------------------------------------------------------------------------
# VERSION/PLATFORM COMPATIBILITY
#------------------------------------------------------------------------------
# - Previous PostgreSQL Versions -
#array_nulls = on
#backslash_quote = safe_encoding # on, off, or safe_encoding
#default_with_oids = off
#escape_string_warning = on
#lo_compat_privileges = off
#sql_inheritance = on
#standard_conforming_strings = off
#synchronize_seqscans = on
# - Other Platforms and Clients -
#transform_null_equals = off
#------------------------------------------------------------------------------
# CUSTOMIZED OPTIONS
#------------------------------------------------------------------------------
#custom_variable_classes = '' # list of custom variable class names

View file

@ -1 +1 @@
slaveof db_redis 6379
slaveof db_redis_story 6379

View file

@ -1,5 +1,5 @@
BeautifulSoup==3.2.1
six==1.6.1
six==1.6
boto==2.8.0
celery==3.0.17
chardet==2.1.1
@ -27,7 +27,7 @@ mongoengine==0.8.2
nltk==2.0.5
oauth2==1.5.211
psutil==2.1.0
pyes==0.90.1
pyes==0.99.5
simplejson==3.4.0
pyflakes==0.6.1
pymongo==2.6
@ -36,8 +36,8 @@ python-gflags==2.0
pytz==2013b
raven==3.1.17
readline==6.2.4.1
redis==2.8.0
hiredis==0.1.1
redis==2.10.3
hiredis==0.2.0
requests==2.5.2
seacucumber==1.5
South==0.7.6

96
fabfile.py vendored
View file

@ -436,7 +436,7 @@ def setup_python():
# sudo('python setup.py install')
with settings(warn_only=True):
sudo('su -c \'echo "import sys; sys.setdefaultencoding(\\\\"utf-8\\\\")" > /usr/lib/python2.7/sitecustomize.py\'')
sudo('echo "import sys; sys.setdefaultencoding(\"utf-8\")" | sudo tee /usr/lib/python2.7/sitecustomize.py')
sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/httplib2-0.8-py2.7.egg/EGG-INFO/top_level.txt")
sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/python_dateutil-2.1-py2.7.egg/EGG-INFO/top_level.txt")
sudo("chmod a+r /usr/local/lib/python2.7/dist-packages/httplib2-0.8-py2.7.egg/httplib2/cacerts.txt")
@ -445,6 +445,7 @@ def setup_python():
with settings(warn_only=True):
sudo('chown -R ubuntu.ubuntu /home/ubuntu/.python-eggs')
@parallel
def pip():
pull()
with cd(env.NEWSBLUR_PATH):
@ -467,14 +468,14 @@ def setup_supervisor():
@parallel
def setup_hosts():
put(os.path.join(env.SECRETS_PATH, 'configs/hosts'), '/etc/hosts', use_sudo=True)
sudo('echo "\n\n127.0.0.1 `hostname`" >> /etc/hosts')
sudo('echo "\n\n127.0.0.1 `hostname`" | sudo tee -a /etc/hosts')
def config_pgbouncer():
put('config/pgbouncer.conf', 'pgbouncer.conf')
sudo('mv pgbouncer.conf /etc/pgbouncer/pgbouncer.ini')
put(os.path.join(env.SECRETS_PATH, 'configs/pgbouncer_auth.conf'), 'userlist.txt')
sudo('mv userlist.txt /etc/pgbouncer/userlist.txt')
sudo('echo "START=1" > /etc/default/pgbouncer')
sudo('echo "START=1" | sudo tee /etc/default/pgbouncer')
sudo('su postgres -c "/etc/init.d/pgbouncer stop"', pty=False)
with settings(warn_only=True):
sudo('pkill -9 pgbouncer -e')
@ -491,27 +492,27 @@ def bounce_pgbouncer():
def config_monit_task():
put('config/monit_task.conf', '/etc/monit/conf.d/celery.conf', use_sudo=True)
sudo('echo "START=yes" > /etc/default/monit')
sudo('echo "START=yes" | sudo tee /etc/default/monit')
sudo('/etc/init.d/monit restart')
def config_monit_node():
put('config/monit_node.conf', '/etc/monit/conf.d/node.conf', use_sudo=True)
sudo('echo "START=yes" > /etc/default/monit')
sudo('echo "START=yes" | sudo tee /etc/default/monit')
sudo('/etc/init.d/monit restart')
def config_monit_original():
put('config/monit_original.conf', '/etc/monit/conf.d/node_original.conf', use_sudo=True)
sudo('echo "START=yes" > /etc/default/monit')
sudo('echo "START=yes" | sudo tee /etc/default/monit')
sudo('/etc/init.d/monit restart')
def config_monit_app():
put('config/monit_app.conf', '/etc/monit/conf.d/gunicorn.conf', use_sudo=True)
sudo('echo "START=yes" > /etc/default/monit')
sudo('echo "START=yes" | sudo tee /etc/default/monit')
sudo('/etc/init.d/monit restart')
def config_monit_work():
put('config/monit_work.conf', '/etc/monit/conf.d/work.conf', use_sudo=True)
sudo('echo "START=yes" > /etc/default/monit')
sudo('echo "START=yes" | sudo tee /etc/default/monit')
sudo('/etc/init.d/monit restart')
def config_monit_redis():
@ -520,7 +521,7 @@ def config_monit_redis():
put('config/monit_debug.sh', '/etc/monit/monit_debug.sh', use_sudo=True)
sudo('chmod a+x /etc/monit/monit_debug.sh')
put('config/monit_redis.conf', '/etc/monit/conf.d/redis.conf', use_sudo=True)
sudo('echo "START=yes" > /etc/default/monit')
sudo('echo "START=yes" | sudo tee /etc/default/monit')
sudo('/etc/init.d/monit restart')
def setup_mongoengine_repo():
@ -580,15 +581,11 @@ def setup_ulimit():
run('export FILEMAX=`sysctl -n fs.file-max`', pty=False)
sudo('mv /etc/security/limits.conf /etc/security/limits.conf.bak', pty=False)
sudo('touch /etc/security/limits.conf', pty=False)
sudo('chmod 666 /etc/security/limits.conf', pty=False)
run('echo "root soft nofile 100000" >> /etc/security/limits.conf', pty=False)
run('echo "root hard nofile 100000" >> /etc/security/limits.conf', pty=False)
run('echo "* soft nofile 100000" >> /etc/security/limits.conf', pty=False)
run('echo "* hard nofile 100090" >> /etc/security/limits.conf', pty=False)
sudo('chmod 644 /etc/security/limits.conf', pty=False)
sudo('chmod 666 /etc/sysctl.conf', pty=False)
run('echo "fs.file-max = 100000" >> /etc/sysctl.conf', pty=False)
sudo('chmod 644 /etc/sysctl.conf', pty=False)
run('echo "root soft nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False)
run('echo "root hard nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False)
run('echo "* soft nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False)
run('echo "* hard nofile 100090\n" | sudo tee -a /etc/security/limits.conf', pty=False)
run('echo "fs.file-max = 100000\n" | sudo tee -a /etc/sysctl.conf', pty=False)
sudo('sysctl -p')
sudo('ulimit -n 100000')
connections.connect(env.host_string)
@ -602,11 +599,11 @@ def setup_ulimit():
# sudo chmod 644 /etc/sysctl.conf
def setup_syncookies():
sudo('echo 1 > /proc/sys/net/ipv4/tcp_syncookies')
sudo('echo 1 | sudo tee /proc/sys/net/ipv4/tcp_syncookies')
sudo('sudo /sbin/sysctl -w net.ipv4.tcp_syncookies=1')
def setup_sudoers(user=None):
sudo('su - root -c "echo \\\\"%s ALL=(ALL) NOPASSWD: ALL\\\\" >> /etc/sudoers"' % (user or env.user))
sudo('echo "%s ALL=(ALL) NOPASSWD: ALL\n" | sudo tee -a /etc/sudoers"' % (user or env.user))
def setup_nginx():
NGINX_VERSION = '1.6.2'
@ -744,7 +741,7 @@ def setup_haproxy(debug=False):
else:
put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'),
'/etc/haproxy/haproxy.cfg', use_sudo=True)
sudo('echo "ENABLED=1" > /etc/default/haproxy')
sudo('echo "ENABLED=1" | sudo tee /etc/default/haproxy')
cert_path = "%s/config/certificates" % env.NEWSBLUR_PATH
run('cat %s/newsblur.com.crt > %s/newsblur.pem' % (cert_path, cert_path))
run('cat %s/newsblur.com.key >> %s/newsblur.pem' % (cert_path, cert_path))
@ -838,7 +835,7 @@ def setup_db_firewall():
sudo('ufw --force enable')
def setup_rabbitmq():
sudo('echo "deb http://www.rabbitmq.com/debian/ testing main" >> /etc/apt/sources.list')
sudo('echo "deb http://www.rabbitmq.com/debian/ testing main" | sudo tee -a /etc/apt/sources.list')
run('wget http://www.rabbitmq.com/rabbitmq-signing-key-public.asc')
sudo('apt-key add rabbitmq-signing-key-public.asc')
run('rm rabbitmq-signing-key-public.asc')
@ -852,16 +849,17 @@ def setup_rabbitmq():
# sudo('apt-get -y install memcached')
def setup_postgres(standby=False):
shmmax = 2300047872
sudo('su root -c "echo \"deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main\" > /etc/apt/sources.list.d/pgdg.list"') # You might have to run this manually
shmmax = 17672445952
hugepages = 9000
sudo('echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | sudo tee /etc/apt/sources.list.d/pgdg.list')
sudo('wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -')
sudo('apt-get update')
sudo('apt-get -y install postgresql-9.4 postgresql-client-9.4 postgresql-contrib-9.4 libpq-dev')
put('config/postgresql%s.conf' % (
('_standby' if standby else ''),
), '/etc/postgresql/9.4/main/postgresql.conf', use_sudo=True)
sudo('echo "%s" > /proc/sys/kernel/shmmax' % shmmax)
sudo('echo "\nkernel.shmmax = %s" > /etc/sysctl.conf' % shmmax)
put('config/postgresql.conf', '/etc/postgresql/9.4/main/postgresql.conf', use_sudo=True)
put('config/postgresql_hba.conf', '/etc/postgresql/9.4/main/pg_hba.conf', use_sudo=True)
sudo('echo "%s" | sudo tee /proc/sys/kernel/shmmax' % shmmax)
sudo('echo "\nkernel.shmmax = %s" | sudo tee -a /etc/sysctl.conf' % shmmax)
sudo('echo "\nvm.nr_hugepages = %s\n" | sudo tee -a /etc/sysctl.conf' % hugepages)
sudo('sysctl -p')
if standby:
@ -870,6 +868,11 @@ def setup_postgres(standby=False):
sudo('/etc/init.d/postgresql stop')
sudo('/etc/init.d/postgresql start')
def config_postgres(standby=False):
put('config/postgresql.conf', '/etc/postgresql/9.4/main/postgresql.conf', use_sudo=True)
sudo('/etc/init.d/postgresql reload 9.4')
def copy_postgres_to_standby(master='db01'):
# http://www.rassoc.com/gregr/weblog/2013/02/16/zero-to-postgresql-streaming-replication-in-10-mins/
@ -877,16 +880,16 @@ def copy_postgres_to_standby(master='db01'):
# Need to give postgres accounts keys in authroized_keys.
# sudo('su postgres -c "psql -c \"SELECT pg_start_backup(\'label\', true)\""', pty=False)
# sudo('su postgres -c \"rsync -a --stats --progress /var/lib/postgresql/9.2/main postgres@%s:/var/lib/postgresql/9.2/ --exclude postmaster.pid\"' % slave, pty=False)
# sudo('su postgres -c \"rsync -a --stats --progress /var/lib/postgresql/9.4/main postgres@%s:/var/lib/postgresql/9.4/ --exclude postmaster.pid\"' % slave, pty=False)
# sudo('su postgres -c "psql -c \"SELECT pg_stop_backup()\""', pty=False)
sudo('su postgres pg_basebackup -h %s -D /var/lib/postgresql/9.2/main -v -P -X fetch' % master)
sudo('cp /var/lib/postgresql/9.2/recovery.conf /var/lib/postgresql/9.2/main/')
# sudo('su postgres -c "pg_basebackup -h %s -D /var/lib/postgresql/9.4/main -v -P -X fetch"' % master)
put('config/postgresql_recovery.conf', '/var/lib/postgresql/9.4/main/recovery.conf', use_sudo=True)
def setup_mongo():
sudo('apt-key adv --keyserver keyserver.ubuntu.com --recv 7F0CEB10')
# sudo('echo "deb http://downloads.mongodb.org/distros/ubuntu 10.10 10gen" >> /etc/apt/sources.list.d/10gen.list')
sudo('echo "deb http://downloads-distro.mongodb.org/repo/debian-sysvinit dist 10gen" >> /etc/apt/sources.list')
sudo('echo "\ndeb http://downloads-distro.mongodb.org/repo/debian-sysvinit dist 10gen" | sudo tee -a /etc/apt/sources.list')
sudo('apt-get update')
sudo('apt-get -y install mongodb-10gen')
put('config/mongodb.%s.conf' % ('prod' if env.user != 'ubuntu' else 'ec2'),
@ -932,12 +935,12 @@ def setup_mongo_mms():
run('rm mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb')
put(os.path.join(env.SECRETS_PATH, 'settings/mongo_mms_config.txt'),
'mongo_mms_config.txt')
sudo("echo \"\n\" >> /etc/mongodb-mms/monitoring-agent.config")
sudo('cat mongo_mms_config.txt >> /etc/mongodb-mms/monitoring-agent.config')
sudo("echo \"\n\" | sudo tee -a /etc/mongodb-mms/monitoring-agent.config")
sudo('cat mongo_mms_config.txt | sudo tee -a /etc/mongodb-mms/monitoring-agent.config')
sudo('start mongodb-mms-monitoring-agent')
def setup_redis(slave=False):
redis_version = '2.8.19'
redis_version = '3.0.3'
with cd(env.VENDOR_PATH):
run('wget http://download.redis.io/releases/redis-%s.tar.gz' % redis_version)
run('tar -xzf redis-%s.tar.gz' % redis_version)
@ -954,13 +957,13 @@ def setup_redis(slave=False):
# sudo('chmod 666 /proc/sys/vm/overcommit_memory', pty=False)
# run('echo "1" > /proc/sys/vm/overcommit_memory', pty=False)
# sudo('chmod 644 /proc/sys/vm/overcommit_memory', pty=False)
sudo("su root -c \"echo \\\"1\\\" > /proc/sys/vm/overcommit_memory\"")
sudo('chmod 666 /etc/sysctl.conf', pty=False)
run('echo "vm.overcommit_memory = 1" >> /etc/sysctl.conf', pty=False)
sudo('chmod 644 /etc/sysctl.conf', pty=False)
sudo("echo 1 | sudo tee /proc/sys/vm/overcommit_memory\"")
sudo('echo "vm.overcommit_memory = 1" | sudo tee -a /etc/sysctl.conf')
sudo("sysctl vm.overcommit_memory=1")
put('config/redis_rclocal.txt', '/etc/rc.local', use_sudo=True)
sudo("su root -c \"echo \\\"never\\\" > /sys/kernel/mm/transparent_hugepage/enabled\"")
sudo("chown root.root /etc/rc.local")
sudo("chmod a+x /etc/rc.local")
sudo('echo "never" | sudo tee /sys/kernel/mm/transparent_hugepage/enabled')
sudo('mkdir -p /var/lib/redis')
sudo('update-rc.d redis defaults')
sudo('/etc/init.d/redis stop')
@ -1067,7 +1070,7 @@ def setup_original_page_server():
sudo('supervisorctl reload')
def setup_elasticsearch():
ES_VERSION = "0.90.13"
ES_VERSION = "1.7.1"
sudo('apt-get update')
sudo('apt-get install openjdk-7-jre -y')
@ -1142,7 +1145,10 @@ def copy_spam():
# =========================
def setup_do(name, size=2, image=None):
INSTANCE_SIZE = "%sGB" % size
if int(size) == 512:
INSTANCE_SIZE = "512MB"
else:
INSTANCE_SIZE = "%sGB" % size
doapi = dop.client.Client(django_settings.DO_CLIENT_KEY, django_settings.DO_API_KEY)
sizes = dict((s.name, s.id) for s in doapi.sizes())
size_id = sizes[INSTANCE_SIZE]
@ -1446,8 +1452,8 @@ def setup_postgres_backups():
# crontab for postgres backups
crontab = """
0 4 * * * python /srv/newsblur/utils/backups/backup_psql.py
0 * * * * sudo find /var/lib/postgresql/9.2/archive -mtime +1 -exec rm {} \;
0 * * * * sudo find /var/lib/postgresql/9.2/archive -type f -mmin +180 -delete"""
0 * * * * sudo find /var/lib/postgresql/9.4/archive -mtime +1 -exec rm {} \;
0 * * * * sudo find /var/lib/postgresql/9.4/archive -type f -mmin +180 -delete"""
run('(crontab -l ; echo "%s") | sort - | uniq - | crontab -' % crontab)
run('crontab -l')

View file

@ -79,6 +79,9 @@ DATABASES = {
'USER': 'newsblur',
'PASSWORD': '',
'HOST': '127.0.0.1',
'OPTIONS': {
"autocommit": True,
},
},
}

View file

@ -7249,6 +7249,10 @@ form.opml_import_form input {
.NB-menu-manage .NB-menu-manage-story-thirdparty .NB-menu-manage-thirdparty-tumblr {
background: transparent url('/media/embed/reader/tumblr.png') no-repeat 0 0;
}
.NB-menu-manage .NB-menu-manage-story-thirdparty .NB-menu-manage-thirdparty-blogger {
background: transparent url('/media/embed/reader/blogger.png') no-repeat 0 0;
background-size: 16px;
}
.NB-menu-manage .NB-menu-manage-story-thirdparty .NB-menu-manage-thirdparty-delicious {
background: transparent url('/media/embed/reader/delicious.png') no-repeat 0 0;
}
@ -7301,6 +7305,7 @@ form.opml_import_form input {
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-facebook .NB-menu-manage-thirdparty-email,
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-readitlater .NB-menu-manage-thirdparty-email,
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-tumblr .NB-menu-manage-thirdparty-email,
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-blogger .NB-menu-manage-thirdparty-email,
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-delicious .NB-menu-manage-thirdparty-email,
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-pinboard .NB-menu-manage-thirdparty-email,
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-pinterest .NB-menu-manage-thirdparty-email,
@ -7325,6 +7330,9 @@ form.opml_import_form input {
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-tumblr .NB-menu-manage-thirdparty-tumblr {
opacity: 1;
}
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-blogger .NB-menu-manage-thirdparty-blogger {
opacity: 1;
}
.NB-menu-manage .NB-menu-manage-story-thirdparty.NB-menu-manage-highlight-delicious .NB-menu-manage-thirdparty-delicious {
opacity: 1;
}
@ -9544,6 +9552,10 @@ form.opml_import_form input {
.NB-modal-preferences .NB-preference-story-share label[for=NB-preference-story-share-tumblr] {
background: transparent url('/media/embed/reader/tumblr.png') no-repeat 0 0;
}
.NB-modal-preferences .NB-preference-story-share label[for=NB-preference-story-share-blogger] {
background: transparent url('/media/embed/reader/blogger.png') no-repeat 0 0;
background-size: 16px;
}
.NB-modal-preferences .NB-preference-story-share label[for=NB-preference-story-share-delicious] {
background: transparent url('/media/embed/reader/delicious.png') no-repeat 0 0;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

View file

@ -1388,11 +1388,6 @@
NEWSBLUR.app.story_unread_counter.remove();
}
if (NEWSBLUR.assets.view_setting(NEWSBLUR.reader.active_feed, 'layout') == 'full') {
NEWSBLUR.app.story_list.show_loading(options);
} else {
NEWSBLUR.app.story_titles.show_loading(options);
}
NEWSBLUR.app.taskbar_info.hide_stories_error();
this.iframe_scroll = null;
this.set_correct_story_view_for_feed(feed.id);
@ -1400,6 +1395,12 @@
this.switch_taskbar_view(this.story_view);
this.switch_story_layout();
if (NEWSBLUR.assets.view_setting(NEWSBLUR.reader.active_feed, 'layout') == 'full') {
NEWSBLUR.app.story_list.show_loading(options);
} else {
NEWSBLUR.app.story_titles.show_loading(options);
}
_.delay(_.bind(function() {
if (!options.delay || feed.id == self.next_feed) {
this.model.load_feed(feed.id, 1, true, $.rescope(this.post_open_feed, this),
@ -2339,6 +2340,20 @@
NEWSBLUR.assets.stories.mark_read(story, {skip_delay: true});
},
send_story_to_blogger: function(story_id) {
var story = this.model.get_story(story_id);
var url = 'https://www.blogger.com/blog-this.g';
var blogger_url = [
url,
'?n=',
encodeURIComponent(story.get('story_title')),
'&source=newsblur&b=',
encodeURIComponent(story.get('story_permalink'))
].join('');
window.open(blogger_url, '_blank');
NEWSBLUR.assets.stories.mark_read(story, {skip_delay: true});
},
send_story_to_delicious: function(story_id) {
var story = this.model.get_story(story_id);
var url = 'http://www.delicious.com/save';
@ -3390,6 +3405,11 @@
}, this)).bind('mouseleave', _.bind(function(e) {
$(e.target).siblings('.NB-menu-manage-title').text('Email story').parent().removeClass('NB-menu-manage-highlight-tumblr');
}, this))),
(NEWSBLUR.Preferences['story_share_blogger'] && $.make('div', { className: 'NB-menu-manage-thirdparty-icon NB-menu-manage-thirdparty-blogger'}).bind('mouseenter', _.bind(function(e) {
$(e.target).siblings('.NB-menu-manage-title').text('Blogger').parent().addClass('NB-menu-manage-highlight-blogger');
}, this)).bind('mouseleave', _.bind(function(e) {
$(e.target).siblings('.NB-menu-manage-title').text('Email story').parent().removeClass('NB-menu-manage-highlight-blogger');
}, this))),
(NEWSBLUR.Preferences['story_share_delicious'] && $.make('div', { className: 'NB-menu-manage-thirdparty-icon NB-menu-manage-thirdparty-delicious'}).bind('mouseenter', _.bind(function(e) {
$(e.target).siblings('.NB-menu-manage-title').text('Delicious').parent().addClass('NB-menu-manage-highlight-delicious');
}, this)).bind('mouseleave', _.bind(function(e) {
@ -3455,6 +3475,8 @@
this.send_story_to_readitlater(story.id);
} else if ($target.hasClass('NB-menu-manage-thirdparty-tumblr')) {
this.send_story_to_tumblr(story.id);
} else if ($target.hasClass('NB-menu-manage-thirdparty-blogger')) {
this.send_story_to_blogger(story.id);
} else if ($target.hasClass('NB-menu-manage-thirdparty-delicious')) {
this.send_story_to_delicious(story.id);
} else if ($target.hasClass('NB-menu-manage-thirdparty-readability')) {

View file

@ -93,6 +93,8 @@ _.extend(NEWSBLUR.ReaderUserAdmin.prototype, {
$.make('dd', data.statistics.created_date),
$.make('dt', 'Last seen:'),
$.make('dd', data.statistics.last_seen_date),
$.make('dt', 'Last IP:'),
$.make('dd', data.statistics.last_seen_ip),
$.make('dt', 'Timezone:'),
$.make('dd', data.statistics.timezone),
$.make('dt', 'Email:'),

View file

@ -872,7 +872,7 @@ var classifier_prototype = {
serialize_classifier: function() {
var data = {};
$('.NB-classifier', this.$modal).each(function() {
var value = _.string.trim($('.NB-classifier-input-like', this).val());
var value = $('.NB-classifier-input-like', this).val();
if ($('.NB-classifier-input-like, .NB-classifier-input-dislike', this).is(':checked')) {
var name = $('input:checked', this).attr('name');
if (!data[name]) data[name] = [];

View file

@ -421,7 +421,8 @@ _.extend(NEWSBLUR.ReaderPreferences.prototype, {
]),
$.make('div', { className: 'NB-preference-label'}, [
'Default story order',
$.make('div', { className: 'NB-preference-sublabel' }, 'You can override this on a per-site and per-folder basis.')
$.make('div', { className: 'NB-preference-sublabel' }, 'You can override this on a per-site and per-folder basis.'),
$.make('div', { className: 'NB-clear-overrides-order NB-preference-sublabel-link NB-splash-link' }, "Clear all overrides")
])
]),
$.make('div', { className: 'NB-preference NB-preference-openfeedaction' }, [
@ -657,6 +658,10 @@ _.extend(NEWSBLUR.ReaderPreferences.prototype, {
$.make('input', { type: 'checkbox', id: 'NB-preference-story-share-tumblr', name: 'story_share_tumblr' }),
$.make('label', { 'for': 'NB-preference-story-share-tumblr' })
]),
$.make('div', { className: 'NB-preference-option', title: 'Blogger' }, [
$.make('input', { type: 'checkbox', id: 'NB-preference-story-share-blogger', name: 'story_share_blogger' }),
$.make('label', { 'for': 'NB-preference-story-share-blogger' })
]),
$.make('div', { className: 'NB-preference-option', title: 'Delicious' }, [
$.make('input', { type: 'checkbox', id: 'NB-preference-story-share-delicious', name: 'story_share_delicious' }),
$.make('label', { 'for': 'NB-preference-story-share-delicious' })
@ -1353,6 +1358,10 @@ _.extend(NEWSBLUR.ReaderPreferences.prototype, {
e.preventDefault();
self.clear_overrides('view');
});
$.targetIs(e, { tagSelector: '.NB-clear-overrides-order' }, function($t, $p) {
e.preventDefault();
self.clear_overrides('order');
});
$.targetIs(e, { tagSelector: '.NB-clear-overrides-layout' }, function($t, $p) {
e.preventDefault();
self.clear_overrides('layout');

View file

@ -755,10 +755,17 @@ NEWSBLUR.Views.StoryDetailView = Backbone.View.extend({
},
scroll_to_comments: function() {
NEWSBLUR.app.story_list.scroll_to_selected_story(this.model, {
scroll_to_comments: true,
scroll_offset: -50
});
if (_.contains(['list', 'grid'], NEWSBLUR.assets.view_setting(NEWSBLUR.reader.active_feed, 'layout'))) {
NEWSBLUR.app.story_titles.scroll_to_selected_story(this.model, {
scroll_to_comments: true,
scroll_offset: -50
});
} else {
NEWSBLUR.app.story_list.scroll_to_selected_story(this.model, {
scroll_to_comments: true,
scroll_offset: -50
});
}
}

View file

@ -159,7 +159,7 @@ NEWSBLUR.Views.StoryTitlesView = Backbone.View.extend({
show_loading: function(options) {
options = options || {};
if (NEWSBLUR.assets.flags['no_more_stories']) return;
var $story_titles = NEWSBLUR.reader.$s.$story_titles;
this.$('.NB-end-line').remove();
var $endline = $.make('div', { className: "NB-end-line NB-short" });
@ -280,6 +280,9 @@ NEWSBLUR.Views.StoryTitlesView = Backbone.View.extend({
_.contains(['list', 'grid'], NEWSBLUR.assets.view_setting(NEWSBLUR.reader.active_feed, 'layout'))) {
var container_offset = NEWSBLUR.reader.$s.$story_titles.position().top;
var scroll = story_title_view.$el.find('.NB-story-title').position().top;
if (options.scroll_to_comments) {
scroll = story_title_view.$el.find('.NB-feed-story-comments').position().top;
}
var container = NEWSBLUR.reader.$s.$story_titles.scrollTop();
var height = NEWSBLUR.reader.$s.$story_titles.outerHeight();
var position = scroll+container-height/5;

View file

@ -406,6 +406,11 @@ CELERYBEAT_SCHEDULE = {
'schedule': datetime.timedelta(minutes=1),
'options': {'queue': 'beat_feeds_task'},
},
'task-broken-feeds': {
'task': 'task-broken-feeds',
'schedule': datetime.timedelta(hours=6),
'options': {'queue': 'beat_feeds_task'},
},
'freshen-homepage': {
'task': 'freshen-homepage',
'schedule': datetime.timedelta(hours=1),
@ -602,6 +607,7 @@ MONGO_DB_DEFAULTS = {
'alias': 'default',
}
MONGO_DB = dict(MONGO_DB_DEFAULTS, **MONGO_DB)
# MONGO_URI = 'mongodb://%s' % (MONGO_DB.pop('host'),)
# if MONGO_DB.get('read_preference', pymongo.ReadPreference.PRIMARY) != pymongo.ReadPreference.PRIMARY:
# MONGO_PRIMARY_DB = MONGO_DB.copy()
@ -609,6 +615,7 @@ MONGO_DB = dict(MONGO_DB_DEFAULTS, **MONGO_DB)
# MONGOPRIMARYDB = connect(MONGO_PRIMARY_DB.pop('name'), **MONGO_PRIMARY_DB)
# else:
# MONGOPRIMARYDB = MONGODB
# MONGODB = connect(MONGO_DB.pop('name'), host=MONGO_URI, **MONGO_DB)
MONGODB = connect(MONGO_DB.pop('name'), **MONGO_DB)
MONGO_ANALYTICS_DB_DEFAULTS = {
@ -617,6 +624,8 @@ MONGO_ANALYTICS_DB_DEFAULTS = {
'alias': 'nbanalytics',
}
MONGO_ANALYTICS_DB = dict(MONGO_ANALYTICS_DB_DEFAULTS, **MONGO_ANALYTICS_DB)
# MONGO_ANALYTICS_URI = 'mongodb://%s' % (MONGO_ANALYTICS_DB.pop('host'),)
# MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB.pop('name'), host=MONGO_ANALYTICS_URI, **MONGO_ANALYTICS_DB)
MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB.pop('name'), **MONGO_ANALYTICS_DB)
@ -643,13 +652,14 @@ CACHES = {
REDIS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=0)
REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=2)
REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=3)
REDIS_FEED_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
REDIS_SESSION_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=5)
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=6379, db=0)
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=1)
REDIS_FEED_UPDATE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=4)
# REDIS_STORY_HASH_POOL2 = redis.ConnectionPool(host=REDIS['host'], port=6379, db=8)
REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=10)
# REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS['host'], port=6379, db=6) # Duped in CACHES
REDIS_SESSION_POOL = redis.ConnectionPool(host=SESSION_REDIS_HOST, port=6379, db=5)
REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=1)
REDIS_FEED_SUB_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=6379, db=2)
REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=6379, db=0)
# ==========
# = Assets =

View file

@ -85,7 +85,7 @@
<img src="/media/img/logo_512.png" class="logo">
<h1>NewsBlur is in <span class="error404">maintenance mode</span></h1>
<div class="description">
<p>Digital Ocean, NewsBlur's hosting provider, is undergoing maintenance and should be back within the next half hour. I'll keep you updated until then, but for now NewsBlur is taking a rest.</p>
<p>Switching to a new primary PostgreSQL database server. This should take no more than 2 minutes. This is another attempt to fix the intermittent 502/downtime that we've been having for the past week.</p>
<p>To pass the time, <a href="http://mlkshk.com/popular">check out what's popular on MLKSHK</a>.</p>
</div>
</div>

View file

@ -6,6 +6,8 @@
{% block content %}
<div class="NB-module">
<table class="NB-status">
<tr>
@ -18,11 +20,12 @@
<th>Premium</th>
<th>Act. Prem</th>
<th>Per Month</th>
<th>Last Month</th>
</tr>
{% for feed in feeds %}
<tr>
<td>{{ feed.pk }}</td>
<td><img class="NB-favicon" src="data:image/png;base64,{{ feed.icon.data }}" /> {{ feed.feed_title|truncatewords:4 }}</td>
<td><img class="NB-favicon" src="/rss_feeds/icon/{{ feed.pk }}" /> {{ feed.feed_title|truncatewords:4 }}</td>
<td class="NB-status-update">
{{ feed.last_update|date:"M j H:i:s" }}
<br>
@ -30,13 +33,16 @@
</td>
<td>{{ feed.min_to_decay }}</td>
<td>{{ feed.num_subscribers }}</td>
<td {% if feed.active_subscribers == 0 %}style="color: lightgrey"{% endif %}>{{ feed.active_subscribers }}</td>
<td {% if feed.premium_subscribers == 0 %}style="color: lightgrey"{% endif %}>{{ feed.premium_subscribers }}</td>
<td {% if feed.active_premium_subscribers == 0 %}style="color: lightgrey"{% endif %}>{{ feed.active_premium_subscribers }}</td>
<td {% if feed.average_stories_per_month == 0 %}style="color: lightgrey"{% endif %}>{{ feed.average_stories_per_month }}</td>
<td style="color: {% if feed.active_subscribers == 0 %}lightgrey{% else %}darkblue{% endif %}">{{ feed.active_subscribers }}</td>
<td style="color: {% if feed.premium_subscribers == 0 %}lightgrey{% else %}darkblue{% endif %}">{{ feed.premium_subscribers }}</td>
<td style="color: {% if feed.active_premium_subscribers == 0 %}lightgrey{% else %}darkblue{% endif %}">{{ feed.active_premium_subscribers }}</td>
<td style="color: {% if feed.average_stories_per_month == 0 %}lightgrey{% else %}{% endif %}">{{ feed.average_stories_per_month }}</td>
<td style="color: {% if feed.stories_last_month == 0 %}lightgrey{% else %}{% endif %}">{{ feed.stories_last_month }}</td>
</tr>
{% endfor %}
</table>
</div>
{% endblock content %}

View file

@ -15,7 +15,7 @@ db_name = 'newsblur'
db_pass = settings.DATABASES['default']['PASSWORD']
os.environ['PGPASSWORD'] = db_pass
filename = 'backup_postgresql_%s.sql.gz' % time.strftime('%Y-%m-%d-%H-%M')
cmd = '/usr/lib/postgresql/9.2/bin/pg_dump -U newsblur -h 127.0.0.1 -Fc %s > %s' % (db_name, filename)
cmd = '/usr/lib/postgresql/9.4/bin/pg_dump -U newsblur -h 127.0.0.1 -Fc %s > %s' % (db_name, filename)
print 'Backing up PostgreSQL: %s' % cmd
os.system(cmd)

View file

@ -353,27 +353,40 @@ class ProcessFeed:
# the feed has changed (or it is the first time we parse it)
# saving the etag and last_modified fields
original_etag = self.feed.etag
self.feed.etag = self.fpf.get('etag')
if self.feed.etag:
self.feed.etag = self.feed.etag[:255]
# some times this is None (it never should) *sigh*
if self.feed.etag is None:
self.feed.etag = ''
if self.feed.etag != original_etag:
self.feed.save(update_fields=['etag'])
original_last_modified = self.feed.last_modified
try:
self.feed.last_modified = mtime(self.fpf.modified)
except:
self.feed.last_modified = None
pass
if self.feed.last_modified != original_last_modified:
self.feed.save(update_fields=['last_modified'])
self.fpf.entries = self.fpf.entries[:100]
original_title = self.feed.feed_title
if self.fpf.feed.get('title'):
self.feed.feed_title = strip_tags(self.fpf.feed.get('title'))
if self.feed.feed_title != original_title:
self.feed.save(update_fields=['feed_title'])
tagline = self.fpf.feed.get('tagline', self.feed.data.feed_tagline)
if tagline:
original_tagline = self.feed.data.feed_tagline
self.feed.data.feed_tagline = utf8encode(tagline)
self.feed.data.save()
if self.feed.data.feed_tagline != original_tagline:
self.feed.data.save(update_fields=['feed_tagline'])
if not self.feed.feed_link_locked:
new_feed_link = self.fpf.feed.get('link') or self.fpf.feed.get('id') or self.feed.feed_link
if new_feed_link != self.feed.feed_link:
@ -382,8 +395,7 @@ class ProcessFeed:
self.feed.save_page_history(301, "HTTP Redirect (%s to go)" % (20-len(redirects)))
if len(redirects) >= 20 or len(non_redirects) == 0:
self.feed.feed_link = new_feed_link
self.feed = self.feed.save()
self.feed.save(update_fields=['feed_link'])
# Determine if stories aren't valid and replace broken guids
guids_seen = set()
@ -430,7 +442,7 @@ class ProcessFeed:
# story_date__gte=start_date,
# story_feed_id=self.feed.pk
))
ret_values = self.feed.add_update_stories(stories, existing_stories,
verbose=self.options['verbose'],
updates_off=self.options['updates_off'])
@ -467,7 +479,7 @@ class ProcessFeed:
self.feed.title[:30]))
self.feed.is_push = False
self.feed = self.feed.save()
logging.debug(u' ---> [%-30s] ~FYParsed Feed: %snew=%s~SN~FY %sup=%s~SN same=%s%s~SN %serr=%s~SN~FY total=~SB%s' % (
self.feed.title[:30],
'~FG~SB' if ret_values['new'] else '', ret_values['new'],
@ -480,7 +492,7 @@ class ProcessFeed:
self.feed.trim_feed()
self.feed.expire_redis()
self.feed.save_feed_history(200, "OK")
if self.options['verbose']:
logging.debug(u' ---> [%-30s] ~FBTIME: feed parse in ~FM%.4ss' % (
self.feed.title[:30], time.time() - start))
@ -631,6 +643,7 @@ class Dispatcher:
if not feed: continue
feed = self.refresh_feed(feed.pk)
if ((self.options['force']) or
(random.random() > .9) or
(fetched_feed and
@ -660,7 +673,7 @@ class Dispatcher:
if (not settings.DEBUG and hasattr(settings, 'RAVEN_CLIENT') and
settings.RAVEN_CLIENT):
settings.RAVEN_CLIENT.captureException()
feed = self.refresh_feed(feed.pk)
logging.debug(u' ---> [%-30s] ~FYFetching icon: %s' % (feed.title[:30], feed.feed_link))
force = self.options['force']
@ -692,7 +705,7 @@ class Dispatcher:
feed.last_load_time = round(delta)
feed.fetched_once = True
try:
feed = feed.save()
feed = feed.save(update_fields=['last_load_time', 'fetched_once'])
except IntegrityError:
logging.debug(" ---> [%-30s] ~FRIntegrityError on feed: %s" % (feed.title[:30], feed.feed_address,))
@ -710,7 +723,7 @@ class Dispatcher:
total=total_duration, feed_code=feed_code)
self.feed_stats[ret_feed] += 1
if len(feed_queue) == 1:
return feed

View file

@ -30,12 +30,12 @@ class MongoDumpMiddleware(object):
# self.orig_rs_send_message_with_response = \
# MongoReplicaSetClient._send_message_with_response
# instrument methods to record messages
MongoClient._send_message = \
self._instrument(MongoClient._send_message)
# MongoClient._send_message = \
# self._instrument(MongoClient._send_message)
MongoClient._send_message_with_response = \
self._instrument(MongoClient._send_message_with_response)
MongoReplicaSetClient._send_message = \
self._instrument(MongoReplicaSetClient._send_message)
# MongoReplicaSetClient._send_message = \
# self._instrument(MongoReplicaSetClient._send_message)
MongoReplicaSetClient._send_message_with_response = \
self._instrument(MongoReplicaSetClient._send_message_with_response)
return None
@ -55,6 +55,8 @@ class MongoDumpMiddleware(object):
def _instrument(self, original_method):
def instrumented_method(*args, **kwargs):
# query = args[1].get_message(False, False)
# message = _mongodb_decode_wire_protocol(query[1])
message = _mongodb_decode_wire_protocol(args[1][1])
if not message or message['msg_id'] in self._used_msg_ids:
return original_method(*args, **kwargs)

View file

@ -23,16 +23,42 @@ class NBMuninGraph(MuninGraph):
from apps.rss_feeds.models import Feed, DuplicateFeed
from apps.push.models import PushSubscription
from django.conf import settings
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
from apps.statistics.models import MStatistics
exception_feeds = MStatistics.get('munin:exception_feeds')
if not exception_feeds:
exception_feeds = Feed.objects.filter(has_feed_exception=True).count()
MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12)
exception_pages = MStatistics.get('munin:exception_pages')
if not exception_pages:
exception_pages = Feed.objects.filter(has_page_exception=True).count()
MStatistics.set('munin:exception_pages', exception_pages, 60*60*12)
duplicate_feeds = MStatistics.get('munin:duplicate_feeds')
if not duplicate_feeds:
duplicate_feeds = DuplicateFeed.objects.count()
MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12)
active_feeds = MStatistics.get('munin:active_feeds')
if not active_feeds:
active_feeds = Feed.objects.filter(active_subscribers__gt=0).count()
MStatistics.set('munin:active_feeds', active_feeds, 60*60*12)
push_feeds = MStatistics.get('munin:push_feeds')
if not push_feeds:
push_feeds = PushSubscription.objects.filter(verified=True).count()
MStatistics.set('munin:push_feeds', push_feeds, 60*60*12)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
return {
'scheduled_feeds': r.zcard('scheduled_updates'),
'exception_feeds': Feed.objects.filter(has_feed_exception=True).count(),
'exception_pages': Feed.objects.filter(has_page_exception=True).count(),
'duplicate_feeds': DuplicateFeed.objects.count(),
'active_feeds': Feed.objects.filter(active_subscribers__gt=0).count(),
'push_feeds': PushSubscription.objects.filter(verified=True).count(),
'exception_feeds': exception_feeds,
'exception_pages': exception_pages,
'duplicate_feeds': duplicate_feeds,
'active_feeds': active_feeds,
'push_feeds': push_feeds,
}
if __name__ == '__main__':

View file

@ -20,9 +20,21 @@ class NBMuninGraph(MuninGraph):
from apps.rss_feeds.models import Feed
from apps.reader.models import UserSubscription
from apps.social.models import MSocialProfile, MSocialSubscription
from apps.statistics.models import MStatistics
feeds_count = MStatistics.get('munin:feeds_count')
if not feeds_count:
feeds_count = Feed.objects.all().count()
MStatistics.set('munin:feeds_count', feeds_count, 60*60*12)
subscriptions_count = MStatistics.get('munin:subscriptions_count')
if not subscriptions_count:
subscriptions_count = UserSubscription.objects.all().count()
MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12)
return {
'feeds': Feed.objects.latest('pk').pk,
'subscriptions': UserSubscription.objects.latest('pk').pk,
'feeds': feeds_count,
'subscriptions': subscriptions_count,
'profiles': MSocialProfile.objects.count(),
'social_subscriptions': MSocialSubscription.objects.count(),
}

View file

@ -26,7 +26,7 @@ class NBMuninGraph(MuninGraph):
def calculate_metrics(self):
from django.conf import settings
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
return {
'update_queue': r.scard("queued_feeds"),