mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-09-18 21:50:56 +00:00
Merged in master branch.
This commit is contained in:
commit
c6cf31e7e2
29 changed files with 184 additions and 257 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -12,7 +12,7 @@ build/
|
|||
.DS_Store
|
||||
**/*.perspectivev*
|
||||
data/
|
||||
config/certificates/*
|
||||
config/certificates
|
||||
**/*.xcuserstate
|
||||
UserInterfaceState.xcuserstate
|
||||
UserInterfaceState\.xcuserstate
|
||||
|
|
|
@ -7,7 +7,7 @@ import re
|
|||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.hashcompat import sha_constructor
|
||||
import hashlib
|
||||
|
||||
from apps.push import signals
|
||||
from apps.rss_feeds.models import Feed
|
||||
|
@ -120,7 +120,7 @@ class PushSubscription(models.Model):
|
|||
def generate_token(self, mode):
|
||||
assert self.pk is not None, \
|
||||
'Subscription must be saved before generating token'
|
||||
token = mode[:20] + sha_constructor('%s%i%s' % (
|
||||
token = mode[:20] + haslib.sha1('%s%i%s' % (
|
||||
settings.SECRET_KEY, self.pk, mode)).hexdigest()
|
||||
self.verify_token = token
|
||||
self.save()
|
||||
|
|
|
@ -356,13 +356,8 @@ class UserSubscription(models.Model):
|
|||
logging.user(request, "~FYRead story in feed: %s" % (self.feed))
|
||||
|
||||
for story_id in set(story_ids):
|
||||
try:
|
||||
story = MStory.objects.get(story_feed_id=self.feed_id, story_guid=story_id)
|
||||
except MStory.DoesNotExist:
|
||||
# Story has been deleted, probably by feed_fetcher.
|
||||
continue
|
||||
except MStory.MultipleObjectsReturned:
|
||||
story = MStory.objects.filter(story_feed_id=self.feed_id, story_guid=story_id)[0]
|
||||
story, _ = MStory.find_story(story_feed_id=self.feed_id, story_id=story_id)
|
||||
if not story: continue
|
||||
now = datetime.datetime.utcnow()
|
||||
date = now if now > story.story_date else story.story_date # For handling future stories
|
||||
m, _ = MUserStory.objects.get_or_create(story_id=story_id, user_id=self.user_id,
|
||||
|
@ -510,11 +505,12 @@ class UserSubscription(models.Model):
|
|||
duplicate_story = user_story.story
|
||||
if duplicate_story:
|
||||
story_guid = duplicate_story.story_guid if hasattr(duplicate_story, 'story_guid') else duplicate_story.id
|
||||
original_story = MStory.objects(story_feed_id=new_feed.pk,
|
||||
story_guid=story_guid)
|
||||
original_story, _ = MStory.find_story(story_feed_id=new_feed.pk,
|
||||
story_id=story_guid,
|
||||
original_only=True)
|
||||
|
||||
if original_story:
|
||||
user_story.story = original_story[0]
|
||||
user_story.story = original_story
|
||||
try:
|
||||
user_story.save()
|
||||
except OperationError:
|
||||
|
|
|
@ -585,7 +585,7 @@ def load_single_feed(request, feed_id):
|
|||
last_update = relative_timesince(feed.last_update)
|
||||
time_breakdown = ("~SN~FR(~SB%.4s/%.4s/%.4s/%.4s(%s)~SN)" % (
|
||||
diff1, diff2, diff3, diff4, userstories_db and userstories_db.count() or '~SN0~SB')
|
||||
if timediff > 0.50 else "")
|
||||
if timediff > 1 else "")
|
||||
logging.user(request, "~FYLoading feed: ~SB%s%s (%s/%s) %s" % (
|
||||
feed.feed_title[:22], ('~SN/p%s' % page) if page > 1 else '', order, read_filter, time_breakdown))
|
||||
|
||||
|
|
|
@ -847,11 +847,13 @@ class Feed(models.Model):
|
|||
try:
|
||||
existing_story = MStory.objects.get(id=existing_story.id)
|
||||
except ValidationError:
|
||||
existing_story = MStory.objects.get(story_feed_id=existing_story.story_feed_id,
|
||||
story_guid=existing_story.id)
|
||||
existing_story, _ = MStory.find_story(existing_story.story_feed_id,
|
||||
existing_story.id,
|
||||
original_only=True)
|
||||
elif existing_story and existing_story.story_guid:
|
||||
existing_story = MStory.objects.get(story_feed_id=existing_story.story_feed_id,
|
||||
story_guid=existing_story.story_guid)
|
||||
existing_story, _ = MStory.find_story(existing_story.story_feed_id,
|
||||
existing_story.story_guid,
|
||||
original_only=True)
|
||||
else:
|
||||
raise MStory.DoesNotExist
|
||||
except (MStory.DoesNotExist, OperationError):
|
||||
|
@ -927,7 +929,8 @@ class Feed(models.Model):
|
|||
|
||||
def save_popular_tags(self, feed_tags=None, verbose=False):
|
||||
if not feed_tags:
|
||||
all_tags = MStory.objects(story_feed_id=self.pk, story_tags__exists=True).item_frequencies('story_tags')
|
||||
all_tags = MStory.objects(story_feed_id=self.pk,
|
||||
story_tags__exists=True).item_frequencies('story_tags')
|
||||
feed_tags = sorted([(k, v) for k, v in all_tags.items() if int(v) > 0],
|
||||
key=itemgetter(1),
|
||||
reverse=True)[:25]
|
||||
|
@ -1230,7 +1233,7 @@ class Feed(models.Model):
|
|||
# .5 hours for 2 subscribers.
|
||||
# .25 hours for 3 subscribers.
|
||||
# 1 min for 10 subscribers.
|
||||
subscriber_bonus = 6 * 60 / max(.167, max(0, self.active_subscribers)**3)
|
||||
subscriber_bonus = 12 * 60 / max(.167, max(0, self.active_subscribers)**3)
|
||||
if self.premium_subscribers > 0:
|
||||
subscriber_bonus /= min(self.active_subscribers+self.premium_subscribers, 5)
|
||||
|
||||
|
@ -1242,13 +1245,13 @@ class Feed(models.Model):
|
|||
slow_punishment = 2 * self.last_load_time
|
||||
elif self.last_load_time >= 200:
|
||||
slow_punishment = 6 * self.last_load_time
|
||||
total = max(5, int(updates_per_day_delay + subscriber_bonus + slow_punishment))
|
||||
total = max(10, int(updates_per_day_delay + subscriber_bonus + slow_punishment))
|
||||
|
||||
if self.active_premium_subscribers > 3:
|
||||
if self.active_premium_subscribers > 5:
|
||||
total = min(total, 60) # 1 hour minimum for premiums
|
||||
|
||||
if ((self.stories_last_month == 0 or self.average_stories_per_month == 0)):
|
||||
total = total * random.randint(1, 12)
|
||||
total = total * random.randint(1, 24)
|
||||
|
||||
if self.is_push:
|
||||
total = total * 20
|
||||
|
@ -1429,7 +1432,7 @@ class MFeedPage(mongo.Document):
|
|||
|
||||
class MStory(mongo.Document):
|
||||
'''A feed item'''
|
||||
story_feed_id = mongo.IntField(unique_with='story_guid')
|
||||
story_feed_id = mongo.IntField()
|
||||
story_date = mongo.DateTimeField()
|
||||
story_title = mongo.StringField(max_length=1024)
|
||||
story_content = mongo.StringField()
|
||||
|
@ -1504,9 +1507,10 @@ class MStory(mongo.Document):
|
|||
def find_story(cls, story_feed_id, story_id, original_only=False):
|
||||
from apps.social.models import MSharedStory
|
||||
original_found = True
|
||||
|
||||
story = cls.objects(story_feed_id=story_feed_id,
|
||||
story_guid=story_id).limit(1).first()
|
||||
|
||||
guid_hash = hashlib.sha1(story_id).hexdigest()[:6]
|
||||
story_hash = "%s:%s" % (story_feed_id, guid_hash)
|
||||
story = cls.objects(story_hash=story_hash).limit(1).first()
|
||||
|
||||
if not story:
|
||||
original_found = False
|
||||
|
|
|
@ -11,7 +11,8 @@ from django.conf import settings
|
|||
from django.utils.text import compress_string
|
||||
from utils import log as logging
|
||||
from apps.rss_feeds.models import MFeedPage
|
||||
from utils.feed_functions import timelimit, mail_feed_error_to_admin
|
||||
from utils.feed_functions import timelimit
|
||||
# from utils.feed_functions import mail_feed_error_to_admin
|
||||
|
||||
BROKEN_PAGES = [
|
||||
'tag:',
|
||||
|
@ -120,7 +121,10 @@ class PageImporter(object):
|
|||
logging.debug(tb)
|
||||
logging.debug('[%d] ! -------------------------' % (self.feed.id,))
|
||||
self.feed.save_page_history(500, "Error", tb)
|
||||
mail_feed_error_to_admin(self.feed, e, local_vars=locals())
|
||||
# mail_feed_error_to_admin(self.feed, e, local_vars=locals())
|
||||
if (not settings.DEBUG and hasattr(settings, 'RAVEN_CLIENT') and
|
||||
settings.RAVEN_CLIENT):
|
||||
settings.RAVEN_CLIENT.captureException()
|
||||
if not urllib_fallback:
|
||||
self.fetch_page(urllib_fallback=True)
|
||||
else:
|
||||
|
|
|
@ -20,7 +20,7 @@ class TaskFeeds(Task):
|
|||
next_scheduled_update__lte=now,
|
||||
active=True,
|
||||
active_premium_subscribers__gte=1
|
||||
).order_by('?')[:1000]
|
||||
).order_by('?')[:400]
|
||||
popular_count = popular_feeds.count()
|
||||
|
||||
# Regular feeds
|
||||
|
@ -28,7 +28,7 @@ class TaskFeeds(Task):
|
|||
next_scheduled_update__lte=now,
|
||||
active=True,
|
||||
active_subscribers__gte=1
|
||||
).order_by('?')[:500]
|
||||
).order_by('?')[:200]
|
||||
active_count = feeds.count()
|
||||
|
||||
# Mistakenly inactive feeds
|
||||
|
|
|
@ -33,6 +33,7 @@ from utils.feed_functions import relative_timesince
|
|||
from utils.story_functions import truncate_chars, strip_tags, linkify, image_size
|
||||
from utils.scrubber import SelectiveScriptScrubber
|
||||
from utils import s3_utils
|
||||
from StringIO import StringIO
|
||||
|
||||
RECOMMENDATIONS_LIMIT = 5
|
||||
IGNORE_IMAGE_SOURCES = [
|
||||
|
@ -1207,7 +1208,7 @@ class MSharedStory(mongo.Document):
|
|||
('user_id', 'story_db_id'),
|
||||
'shared_date', 'story_guid', 'story_feed_id'],
|
||||
'index_drop_dups': True,
|
||||
'ordering': ['shared_date'],
|
||||
'ordering': ['-shared_date'],
|
||||
'allow_inheritance': False,
|
||||
}
|
||||
|
||||
|
@ -1879,11 +1880,13 @@ class MSharedStory(mongo.Document):
|
|||
soup = BeautifulSoup(zlib.decompress(self.story_content_z))
|
||||
image_sources = [img.get('src') for img in soup.findAll('img')]
|
||||
image_sizes = []
|
||||
|
||||
for image_source in image_sources[:10]:
|
||||
if any(ignore in image_source for ignore in IGNORE_IMAGE_SOURCES):
|
||||
continue
|
||||
r = requests.get(image_source, prefetch=False, headers=headers)
|
||||
_, width, height = image_size(r.raw)
|
||||
req = requests.get(image_source, headers=headers, stream=True)
|
||||
datastream = StringIO(req.content[:30])
|
||||
_, width, height = image_size(datastream)
|
||||
if width <= 16 or height <= 16:
|
||||
continue
|
||||
image_sizes.append({'src': image_source, 'size': (width, height)})
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from bson.objectid import ObjectId
|
||||
from celery.task import Task
|
||||
from apps.social.models import MSharedStory, MSocialProfile, MSocialServices
|
||||
from apps.social.models import MSharedStory, MSocialProfile, MSocialServices, MSocialSubscription
|
||||
from django.contrib.auth.models import User
|
||||
from utils import log as logging
|
||||
|
||||
|
||||
|
@ -57,4 +59,19 @@ class SharePopularStories(Task):
|
|||
if not shared:
|
||||
shared = MSharedStory.share_popular_stories(interactive=False, days=2)
|
||||
|
||||
|
||||
|
||||
class UpdateRecalcForSubscription(Task):
|
||||
|
||||
def run(self, subscription_user_id, shared_story_id):
|
||||
user = User.objects.get(pk=subscription_user_id)
|
||||
socialsubs = MSocialSubscription.objects.filter(subscription_user_id=subscription_user_id)
|
||||
logging.debug(" ---> ~FM~SNFlipping unread recalc for ~SB%s~SN subscriptions to ~SB%s's blurblog~SN" % (
|
||||
socialsubs.count(),
|
||||
user.username
|
||||
))
|
||||
for socialsub in socialsubs:
|
||||
socialsub.needs_unread_recalc = True
|
||||
socialsub.save()
|
||||
|
||||
shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id))
|
||||
shared_story.publish_update_to_subscribers()
|
||||
|
|
|
@ -17,6 +17,7 @@ from apps.rss_feeds.models import MStory, Feed, MStarredStory
|
|||
from apps.social.models import MSharedStory, MSocialServices, MSocialProfile, MSocialSubscription, MCommentReply
|
||||
from apps.social.models import MInteraction, MActivity, MFollowRequest
|
||||
from apps.social.tasks import PostToService, EmailCommentReplies, EmailStoryReshares
|
||||
from apps.social.tasks import UpdateRecalcForSubscription
|
||||
from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag
|
||||
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags
|
||||
from apps.analyzer.models import get_classifiers_for_user, sort_classifiers_by_feed
|
||||
|
@ -519,10 +520,8 @@ def mark_story_as_shared(request):
|
|||
shared_story = MSharedStory.objects.create(**story_db)
|
||||
if source_user_id:
|
||||
shared_story.set_source_user_id(int(source_user_id))
|
||||
socialsubs = MSocialSubscription.objects.filter(subscription_user_id=request.user.pk)
|
||||
for socialsub in socialsubs:
|
||||
socialsub.needs_unread_recalc = True
|
||||
socialsub.save()
|
||||
UpdateRecalcForSubscription.delay(subscription_user_id=request.user.pk,
|
||||
shared_story_id=str(shared_story.id))
|
||||
logging.user(request, "~FCSharing ~FM%s: ~SB~FB%s" % (story.story_title[:20], comments[:30]))
|
||||
else:
|
||||
shared_story.comments = comments
|
||||
|
@ -533,7 +532,6 @@ def mark_story_as_shared(request):
|
|||
|
||||
if original_story_found:
|
||||
story.count_comments()
|
||||
shared_story.publish_update_to_subscribers()
|
||||
|
||||
story = Feed.format_story(story)
|
||||
check_all = not original_story_found
|
||||
|
@ -575,10 +573,10 @@ def mark_story_as_unshared(request):
|
|||
format = request.REQUEST.get('format', 'json')
|
||||
original_story_found = True
|
||||
|
||||
story = MStory.objects(story_feed_id=feed_id, story_guid=story_id).limit(1).first()
|
||||
if not story:
|
||||
original_story_found = False
|
||||
|
||||
story, original_story_found = MStory.find_story(story_feed_id=feed_id,
|
||||
story_id=story_id,
|
||||
original_only=True)
|
||||
|
||||
shared_story = MSharedStory.objects(user_id=request.user.pk,
|
||||
story_feed_id=feed_id,
|
||||
story_guid=story_id).limit(1).first()
|
||||
|
@ -1208,7 +1206,7 @@ def shared_stories_rss_feed(request, user_id, username):
|
|||
user.username,
|
||||
request.META['HTTP_USER_AGENT'][:24]
|
||||
))
|
||||
return HttpResponse(rss.writeString('utf-8'))
|
||||
return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml')
|
||||
|
||||
@required_params('user_id')
|
||||
@json.json_view
|
||||
|
@ -1322,4 +1320,4 @@ def comment(request, comment_id):
|
|||
except MSharedStory.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
return shared_story.comments_with_author()
|
||||
return shared_story.comments_with_author()
|
||||
|
|
|
@ -1,136 +0,0 @@
|
|||
global
|
||||
maxconn 4096
|
||||
user haproxy
|
||||
group haproxy
|
||||
daemon
|
||||
ca-base /srv/newsblur/config/certificates
|
||||
crt-base /srv/newsblur/config/certificates
|
||||
log 127.0.0.1 local0 notice
|
||||
# log 127.0.0.1 local1 info
|
||||
|
||||
defaults
|
||||
log global
|
||||
maxconn 4096
|
||||
mode http
|
||||
option forwardfor
|
||||
option http-server-close
|
||||
option log-health-checks
|
||||
option httplog
|
||||
option redispatch
|
||||
option abortonclose
|
||||
timeout connect 5s
|
||||
timeout client 30s
|
||||
timeout server 30s
|
||||
timeout tunnel 1h
|
||||
retries 3
|
||||
errorfile 502 /srv/newsblur/templates/502.http
|
||||
errorfile 503 /srv/newsblur/templates/502.http
|
||||
errorfile 504 /srv/newsblur/templates/502.http
|
||||
|
||||
frontend public
|
||||
bind :80
|
||||
bind :443 ssl crt newsblur.pem
|
||||
option http-server-close
|
||||
# Redirect all HTTP traffic to HTTPS
|
||||
# redirect scheme https if !{ ssl_fc }
|
||||
|
||||
acl gunicorn_dead nbsrv(gunicorn) lt 1
|
||||
acl nginx_dead nbsrv(nginx) lt 1
|
||||
acl mx_mode nbsrv(maintenance) lt 1
|
||||
monitor-uri /status
|
||||
monitor fail if gunicorn_dead
|
||||
monitor fail if nginx_dead
|
||||
monitor fail if mx_mode
|
||||
|
||||
use_backend node if { path_beg /socket.io/ }
|
||||
use_backend nginx if { path_beg /media/ }
|
||||
use_backend nginx if { path_beg /static/ }
|
||||
use_backend nginx if { path_beg /favicon }
|
||||
use_backend nginx if { path_beg /crossdomain/ }
|
||||
use_backend nginx if { path_beg /robots }
|
||||
use_backend nginx if { path_beg /munin/ }
|
||||
use_backend nginx if { path_beg /rss_feeds/icon }
|
||||
|
||||
use_backend nginx if mx_mode
|
||||
|
||||
use_backend gunicorn unless gunicorn_dead || nginx_dead
|
||||
|
||||
backend node
|
||||
balance roundrobin
|
||||
# stick-table type ip size 1m expire 60m store gpc0,conn_rate(30s)
|
||||
# server node10 198.211.109.197:8888 check inter 2000ms weight 0
|
||||
server node11 198.211.110.131:8888 check inter 2000ms
|
||||
# server node12 198.211.110.230:8888 check inter 2000ms
|
||||
# server node13 192.34.61.227:8888 check inter 2000ms
|
||||
# server node14 198.211.109.155:8888 check inter 2000ms
|
||||
# server node15 198.211.107.87:8888 check inter 2000ms
|
||||
# server node16 198.211.105.155:8888 check inter 2000ms
|
||||
# server node17 198.211.104.133:8888 check inter 2000ms
|
||||
# server node18 198.211.103.214:8888 check inter 2000ms
|
||||
# server node19 198.211.106.22:8888 check inter 2000ms
|
||||
# server node20 198.211.110.189:8888 check inter 2000ms
|
||||
# server node21 198.211.106.215:8888 check inter 2000ms
|
||||
# server node22 192.81.209.42:8888 check inter 2000ms
|
||||
# server node23 198.211.102.245:8888 check inter 2000ms
|
||||
|
||||
backend nginx
|
||||
balance roundrobin
|
||||
server nginx10 198.211.109.197:81 check inter 2000ms
|
||||
server nginx11 198.211.110.131:80 check inter 2000ms
|
||||
server nginx12 198.211.110.230:80 check inter 2000ms
|
||||
server nginx13 192.34.61.227:80 check inter 2000ms
|
||||
server nginx14 198.211.109.155:80 check inter 2000ms
|
||||
server nginx15 198.211.107.87:80 check inter 2000ms
|
||||
server nginx16 198.211.105.155:80 check inter 2000ms
|
||||
server nginx17 198.211.104.133:80 check inter 2000ms
|
||||
server nginx18 198.211.103.214:80 check inter 2000ms
|
||||
server nginx19 198.211.106.22:80 check inter 2000ms
|
||||
server nginx20 198.211.110.189:80 check inter 2000ms
|
||||
server nginx21 198.211.106.215:80 check inter 2000ms
|
||||
server nginx22 192.81.209.42:80 check inter 2000ms
|
||||
server nginx23 198.211.102.245:80 check inter 2000ms
|
||||
server nginx24 198.211.109.236:80 check inter 2000ms
|
||||
server nginx25 198.211.113.54:80 check inter 2000ms
|
||||
server nginx26 198.211.113.206:80 check inter 2000ms
|
||||
server nginx27 198.211.113.86:80 check inter 2000ms
|
||||
server nginx28 198.211.113.196:80 check inter 2000ms
|
||||
|
||||
backend gunicorn
|
||||
balance roundrobin
|
||||
server gunicorn10 198.211.109.197:8000 check inter 2000ms
|
||||
# server gunicorn11 198.211.110.131:8000 check inter 2000ms
|
||||
server gunicorn12 198.211.110.230:8000 check inter 2000ms
|
||||
server gunicorn13 192.34.61.227:8000 check inter 2000ms
|
||||
server gunicorn14 198.211.109.155:8000 check inter 2000ms
|
||||
server gunicorn15 198.211.107.87:8000 check inter 2000ms
|
||||
server gunicorn16 198.211.105.155:8000 check inter 2000ms
|
||||
server gunicorn17 198.211.104.133:8000 check inter 2000ms
|
||||
server gunicorn18 198.211.103.214:8000 check inter 2000ms
|
||||
server gunicorn19 198.211.106.22:8000 check inter 2000ms
|
||||
server gunicorn20 198.211.110.189:8000 check inter 2000ms
|
||||
server gunicorn21 198.211.106.215:8000 check inter 2000ms
|
||||
server gunicorn22 192.81.209.42:8000 check inter 2000ms
|
||||
server gunicorn23 198.211.102.245:8000 check inter 2000ms
|
||||
server gunicorn24 198.211.109.236:8000 check inter 2000ms
|
||||
server gunicorn25 198.211.113.54:8000 check inter 2000ms
|
||||
server gunicorn26 198.211.113.206:8000 check inter 2000ms
|
||||
server gunicorn27 198.211.113.86:8000 check inter 2000ms
|
||||
server gunicorn28 198.211.113.196:8000 check inter 2000ms
|
||||
|
||||
backend maintenance
|
||||
option httpchk HEAD /maintenance HTTP/1.1\r\nHost:\ www
|
||||
http-check expect status 404
|
||||
http-check send-state
|
||||
server nginx10 198.211.109.197:81 check inter 2000ms
|
||||
|
||||
frontend stats
|
||||
bind :1936 ssl crt newsblur.pem
|
||||
default_backend stats
|
||||
|
||||
backend stats
|
||||
stats enable
|
||||
stats hide-version
|
||||
stats realm Haproxy\ Statistics
|
||||
stats uri /
|
||||
stats auth sclay:password
|
||||
stats refresh 15s
|
77
fabfile.py
vendored
77
fabfile.py
vendored
|
@ -74,7 +74,12 @@ env.roledefs ={
|
|||
'db10.newsblur.com',
|
||||
'db11.newsblur.com',
|
||||
'db12.newsblur.com',
|
||||
'db20.newsblur.com',
|
||||
'db21.newsblur.com',
|
||||
],
|
||||
'dbdo':['198.211.115.113',
|
||||
'198.211.115.153',
|
||||
],
|
||||
'task': ['task01.newsblur.com',
|
||||
'task02.newsblur.com',
|
||||
'task03.newsblur.com',
|
||||
|
@ -96,6 +101,9 @@ env.roledefs ={
|
|||
|
||||
'ec2-54-234-211-75.compute-1.amazonaws.com',
|
||||
'ec2-50-16-97-13.compute-1.amazonaws.com',
|
||||
'ec2-54-242-131-232.compute-1.amazonaws.com',
|
||||
'ec2-75-101-195-131.compute-1.amazonaws.com',
|
||||
'ec2-54-242-105-17.compute-1.amazonaws.com',
|
||||
],
|
||||
'vps': ['task01.newsblur.com',
|
||||
'task03.newsblur.com',
|
||||
|
@ -339,6 +347,7 @@ def setup_common():
|
|||
setup_installs()
|
||||
setup_user()
|
||||
setup_sudoers()
|
||||
setup_ulimit()
|
||||
setup_repo()
|
||||
setup_repo_local_settings()
|
||||
setup_local_files()
|
||||
|
@ -378,7 +387,7 @@ def setup_app(skip_common=False):
|
|||
deploy()
|
||||
config_monit_app()
|
||||
|
||||
def setup_db(skip_common=False, role=None):
|
||||
def setup_db(skip_common=False, engine=None):
|
||||
if not skip_common:
|
||||
setup_common()
|
||||
setup_baremetal()
|
||||
|
@ -386,13 +395,13 @@ def setup_db(skip_common=False, role=None):
|
|||
setup_db_motd()
|
||||
copy_task_settings()
|
||||
setup_memcached()
|
||||
if role == "postgres":
|
||||
if engine == "postgres":
|
||||
setup_postgres(standby=False)
|
||||
elif role == "postgres_slave":
|
||||
elif engine == "postgres_slave":
|
||||
setup_postgres(standby=True)
|
||||
elif role == "mongo":
|
||||
elif engine == "mongo":
|
||||
setup_mongo()
|
||||
elif role == "redis":
|
||||
elif engine == "redis":
|
||||
setup_redis()
|
||||
setup_gunicorn(supervisor=False)
|
||||
setup_db_munin()
|
||||
|
@ -430,8 +439,9 @@ def setup_installs():
|
|||
run('curl -O http://peak.telecommunity.com/dist/ez_setup.py')
|
||||
sudo('python ez_setup.py -U setuptools && rm ez_setup.py')
|
||||
sudo('chsh %s -s /bin/zsh' % env.user)
|
||||
sudo('mkdir -p %s' % env.VENDOR_PATH)
|
||||
sudo('chown %s.%s %s' % (env.user, env.user, env.VENDOR_PATH))
|
||||
with settings(warn_only=True):
|
||||
sudo('mkdir -p %s' % env.VENDOR_PATH)
|
||||
sudo('chown %s.%s %s' % (env.user, env.user, env.VENDOR_PATH))
|
||||
|
||||
def setup_user():
|
||||
# run('useradd -c "NewsBlur" -m newsblur -s /bin/zsh')
|
||||
|
@ -453,8 +463,8 @@ def setup_repo():
|
|||
with settings(warn_only=True):
|
||||
run('git clone https://github.com/samuelclay/NewsBlur.git ~/newsblur')
|
||||
sudo('mkdir -p /srv')
|
||||
with settings(warn_only=True):
|
||||
sudo('ln -f -s /home/%s/code /srv/' % env.user)
|
||||
# with settings(warn_only=True):
|
||||
# sudo('ln -f -s /home/%s/code /srv/' % env.user)
|
||||
sudo('ln -f -s /home/%s/newsblur /srv/' % env.user)
|
||||
|
||||
def setup_repo_local_settings():
|
||||
|
@ -489,15 +499,16 @@ def setup_libxml_code():
|
|||
|
||||
def setup_psycopg():
|
||||
sudo('easy_install -U psycopg2')
|
||||
|
||||
|
||||
def setup_python():
|
||||
# sudo('easy_install -U pip')
|
||||
sudo('easy_install -U fabric django==1.3.1 readline chardet pyflakes iconv celery django-celery django-celery-with-redis django-compress South django-extensions pymongo==2.2.0 stripe BeautifulSoup pyyaml nltk lxml oauth2 pytz boto seacucumber django_ses django-mailgun mongoengine redis requests django-subdomains psutil python-gflags cssutils raven pyes')
|
||||
|
||||
sudo('easy_install -U $(<%s)' %
|
||||
os.path.join(env.NEWSBLUR_PATH, 'config/requirements.txt'))
|
||||
put('config/pystartup.py', '.pystartup')
|
||||
|
||||
# with cd(os.path.join(env.NEWSBLUR_PATH, 'vendor/cjson')):
|
||||
# sudo('python setup.py install')
|
||||
|
||||
|
||||
with settings(warn_only=True):
|
||||
sudo('su -c \'echo "import sys; sys.setdefaultencoding(\\\\"utf-8\\\\")" > /usr/lib/python2.7/sitecustomize.py\'')
|
||||
|
||||
|
@ -591,15 +602,15 @@ def setup_logrotate():
|
|||
|
||||
def setup_ulimit():
|
||||
# Increase File Descriptor limits.
|
||||
run('export FILEMAX=`sysctl -n fs.file-max`')
|
||||
sudo('mv /etc/security/limits.conf /etc/security/limits.conf.bak')
|
||||
sudo('touch /etc/security/limits.conf')
|
||||
sudo('chmod 666 /etc/security/limits.conf')
|
||||
run('echo "root soft nofile $FILEMAX" >> /etc/security/limits.conf')
|
||||
run('"root hard nofile $FILEMAX" >> /etc/security/limits.conf')
|
||||
run('echo "* soft nofile $FILEMAX" >> /etc/security/limits.conf')
|
||||
run('echo "* hard nofile $FILEMAX" >> /etc/security/limits.conf')
|
||||
sudo('chmod 644 /etc/security/limits.conf')
|
||||
run('export FILEMAX=`sysctl -n fs.file-max`', pty=False)
|
||||
sudo('mv /etc/security/limits.conf /etc/security/limits.conf.bak', pty=False)
|
||||
sudo('touch /etc/security/limits.conf', pty=False)
|
||||
sudo('chmod 666 /etc/security/limits.conf', pty=False)
|
||||
run('echo "root soft nofile $FILEMAX" >> /etc/security/limits.conf', pty=False)
|
||||
run('echo "root hard nofile $FILEMAX" >> /etc/security/limits.conf', pty=False)
|
||||
run('echo "* soft nofile $FILEMAX" >> /etc/security/limits.conf', pty=False)
|
||||
run('echo "* hard nofile $FILEMAX" >> /etc/security/limits.conf', pty=False)
|
||||
sudo('chmod 644 /etc/security/limits.conf', pty=False)
|
||||
|
||||
# run('touch /home/ubuntu/.bash_profile')
|
||||
# run('echo "ulimit -n $FILEMAX" >> /home/ubuntu/.bash_profile')
|
||||
|
@ -720,7 +731,8 @@ def maintenance_on():
|
|||
@parallel
|
||||
def maintenance_off():
|
||||
with cd(env.NEWSBLUR_PATH):
|
||||
run('mv templates/maintenance_on.html templates/maintenance_off.html')
|
||||
with settings(warn_only=True):
|
||||
run('mv templates/maintenance_on.html templates/maintenance_off.html')
|
||||
run('git checkout templates/maintenance_off.html')
|
||||
|
||||
def setup_haproxy():
|
||||
|
@ -733,7 +745,7 @@ def setup_haproxy():
|
|||
sudo('make install')
|
||||
put('config/haproxy-init', '/etc/init.d/haproxy', use_sudo=True)
|
||||
sudo('chmod u+x /etc/init.d/haproxy')
|
||||
put('config/haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True)
|
||||
put('../secrets-newsblur/configs/haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True)
|
||||
sudo('echo "ENABLED=1" > /etc/default/haproxy')
|
||||
cert_path = "%s/config/certificates" % env.NEWSBLUR_PATH
|
||||
run('cat %s/newsblur.com.crt > %s/newsblur.pem' % (cert_path, cert_path))
|
||||
|
@ -749,7 +761,7 @@ def config_haproxy(debug=False):
|
|||
if debug:
|
||||
put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True)
|
||||
else:
|
||||
put('config/haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True)
|
||||
put('../secrets-newsblur/configs/haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True)
|
||||
sudo('/etc/init.d/haproxy reload')
|
||||
|
||||
# ==============
|
||||
|
@ -774,7 +786,7 @@ def setup_db_firewall():
|
|||
sudo('ufw allow proto tcp from 199.15.248.0/21 to any port %s ' % ','.join(map(str, ports)))
|
||||
|
||||
# DigitalOcean
|
||||
for ip in set(env.roledefs['app']):
|
||||
for ip in set(env.roledefs['app'] + env.roledefs['dbdo']):
|
||||
if 'newsblur.com' in ip: continue
|
||||
sudo('ufw allow proto tcp from %s to any port %s' % (
|
||||
ip,
|
||||
|
@ -838,7 +850,7 @@ def setup_mongo():
|
|||
# sudo('echo "deb http://downloads.mongodb.org/distros/ubuntu 10.10 10gen" >> /etc/apt/sources.list.d/10gen.list')
|
||||
sudo('echo "deb http://downloads-distro.mongodb.org/repo/debian-sysvinit dist 10gen" >> /etc/apt/sources.list')
|
||||
sudo('apt-get update')
|
||||
sudo('apt-get -y install mongodb-10gen numactl')
|
||||
sudo('apt-get -y install mongodb-10gen')
|
||||
put('config/mongodb.%s.conf' % ('prod' if env.user != 'ubuntu' else 'ec2'),
|
||||
'/etc/mongodb.conf', use_sudo=True)
|
||||
sudo('/etc/init.d/mongodb restart')
|
||||
|
@ -949,8 +961,8 @@ def copy_task_settings():
|
|||
# = Setup - Digital Ocean =
|
||||
# =========================
|
||||
|
||||
def setup_do(name):
|
||||
INSTANCE_SIZE = "2GB"
|
||||
def setup_do(name, size=2):
|
||||
INSTANCE_SIZE = "%sGB" % size
|
||||
IMAGE_NAME = "Ubuntu 12.04 x64 Server"
|
||||
doapi = dop.client.Client(django_settings.DO_CLIENT_KEY, django_settings.DO_API_KEY)
|
||||
sizes = dict((s.name, s.id) for s in doapi.sizes())
|
||||
|
@ -1096,4 +1108,9 @@ def delete_all_backups():
|
|||
|
||||
for i, key in enumerate(bucket.get_all_keys()):
|
||||
print "deleting %s" % (key.name)
|
||||
key.delete()
|
||||
key.delete()
|
||||
|
||||
def add_revsys_keys():
|
||||
put("~/Downloads/revsys-keys.pub", "revsys_keys")
|
||||
run('cat revsys_keys >> ~/.ssh/authorized_keys')
|
||||
run('rm revsys_keys')
|
||||
|
|
|
@ -1800,6 +1800,10 @@ background: transparent;
|
|||
#story_pane .NB-feed-story:first-child .NB-feed-story-header {
|
||||
padding-top: 0;
|
||||
}
|
||||
#story_pane .NB-feed-stories pre {
|
||||
overflow-x: auto;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
#story_pane .NB-feed-story-header-info {
|
||||
font-weight: bold;
|
||||
|
@ -1901,10 +1905,6 @@ background: transparent;
|
|||
.NB-feed-story .NB-feed-story-content div {
|
||||
max-width: 100%;
|
||||
}
|
||||
.NB-feed-story .NB-feed-story-content pre,
|
||||
.NB-feed-story .NB-feed-story-content code {
|
||||
white-space: normal;
|
||||
}
|
||||
.NB-feed-story .NB-feed-story-content img {
|
||||
max-width: 100% !important;
|
||||
width: auto;
|
||||
|
|
|
@ -270,6 +270,14 @@ NEWSBLUR.ReaderKeyboard.prototype = {
|
|||
'?'
|
||||
])
|
||||
])
|
||||
]),
|
||||
$.make('div', { className: 'NB-keyboard-group' }, [
|
||||
$.make('div', { className: 'NB-keyboard-shortcut NB-last' }, [
|
||||
$.make('div', { className: 'NB-keyboard-shortcut-explanation' }, 'Add Site/Folder'),
|
||||
$.make('div', { className: 'NB-keyboard-shortcut-key' }, [
|
||||
'a'
|
||||
])
|
||||
])
|
||||
])
|
||||
]);
|
||||
},
|
||||
|
|
11
settings.py
11
settings.py
|
@ -70,6 +70,7 @@ EMAIL_BACKEND = 'django_ses.SESBackend'
|
|||
CIPHER_USERNAMES = False
|
||||
DEBUG_ASSETS = DEBUG
|
||||
HOMEPAGE_USERNAME = 'popular'
|
||||
ALLOWED_HOSTS = ['.newsblur.com', '127.0.0.1']
|
||||
|
||||
# ===============
|
||||
# = Enviornment =
|
||||
|
@ -144,12 +145,13 @@ LOGGING = {
|
|||
'mail_admins': {
|
||||
'level': 'ERROR',
|
||||
'class': 'django.utils.log.AdminEmailHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'include_html': True,
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'django.request': {
|
||||
'handlers': ['mail_admins'],
|
||||
'handlers': ['console', 'log_file'],
|
||||
'level': 'ERROR',
|
||||
'propagate': True,
|
||||
},
|
||||
|
@ -168,7 +170,12 @@ LOGGING = {
|
|||
'level': 'INFO',
|
||||
'propagate': True,
|
||||
},
|
||||
}
|
||||
},
|
||||
'filters': {
|
||||
'require_debug_false': {
|
||||
'()': 'django.utils.log.RequireDebugFalse'
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
# ==========================
|
||||
|
|
|
@ -66,7 +66,7 @@
|
|||
<h1><span class="error404">404</span> · NewsBlur can't find that page</h1>
|
||||
<div class="description">
|
||||
<p>Either the URL you accessed doesn't point to anything or there is no site, story, or data with these parameters.</p>
|
||||
<p><a href="{% url index %}">Return to NewsBlur</a></p>
|
||||
<p><a href="{% url "index" %}">Return to NewsBlur</a></p>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
|
|
|
@ -63,7 +63,7 @@
|
|||
<h1>NewsBlur experienced an error</h1>
|
||||
<div class="description">
|
||||
<p>The error has been logged and will be fixed soon so you won't have to see this message again.</p>
|
||||
<p><a href="{% url index %}">Return to NewsBlur</a></p>
|
||||
<p><a href="{% url "index" %}">Return to NewsBlur</a></p>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
|
|
|
@ -215,7 +215,7 @@
|
|||
var self = this;
|
||||
var $submit = $('.NB-bookmarklet-button-subscribe', this.$modal);
|
||||
var folder = $('.NB-folders').val();
|
||||
var add_site_url = "http://"+this.domain+"{% url api-add-site token %}?callback=?";
|
||||
var add_site_url = "http://"+this.domain+"{% url "api-add-site" token %}?callback=?";
|
||||
|
||||
$submit.addClass('NB-disabled').text('Fetching and parsing...');
|
||||
|
||||
|
@ -272,7 +272,7 @@
|
|||
var $comments = $('textarea[name=newsblur_comment]', this.$modal);
|
||||
var $content_wrapper = $('.NB-bookmarklet-page-content-wrapper', this.$modal);
|
||||
var $content = $('.NB-bookmarklet-page-content', this.$modal);
|
||||
var check_story_url = "http://"+this.domain+"{% url api-check-share-on-site token %}?callback=?";
|
||||
var check_story_url = "http://"+this.domain+"{% url "api-check-share-on-site" token %}?callback=?";
|
||||
var data = {
|
||||
story_url: window.location.href,
|
||||
rss_url: this.get_page_rss_url()
|
||||
|
@ -416,7 +416,7 @@
|
|||
|
||||
|
||||
$.ajax({
|
||||
url: '//'+this.domain+"{% url api-share-story token %}",
|
||||
url: '//'+this.domain+"{% url "api-share-story" token %}",
|
||||
type: 'POST',
|
||||
data: {
|
||||
title: $(".NB-bookmarklet-page-title", this.$modal).html() || this.story_title,
|
||||
|
|
|
@ -72,13 +72,13 @@
|
|||
'story_share_email' : true
|
||||
};
|
||||
NEWSBLUR.URLs = {
|
||||
'google-reader-authorize' : "{% url google-reader-authorize %}",
|
||||
'upload-avatar' : "{% url upload-avatar %}",
|
||||
'opml-upload' : "{% url opml-upload %}",
|
||||
'opml-export' : "{% url opml-export %}",
|
||||
'google-reader-authorize' : "{% url "google-reader-authorize" %}",
|
||||
'upload-avatar' : "{% url "upload-avatar" %}",
|
||||
'opml-upload' : "{% url "opml-upload" %}",
|
||||
'opml-export' : "{% url "opml-export" %}",
|
||||
'domain' : "{% current_domain %}",
|
||||
'favicon' : "/rss_feeds/icon/{id}",
|
||||
'delete-account' : "{% url profile-delete-account %}"
|
||||
'delete-account' : "{% url "profile-delete-account" %}"
|
||||
};
|
||||
NEWSBLUR.Models = {};
|
||||
NEWSBLUR.Collections = {};
|
||||
|
@ -112,18 +112,18 @@
|
|||
|
||||
<div class="NB-body-inner">
|
||||
<div class="NB-splash-info NB-splash-top">
|
||||
<a href="{% url index %}"><img class="NB-splash-title" src="{{ MEDIA_URL }}/img/logo_newsblur_blur.png" /></a>
|
||||
<a href="{% url "index" %}"><img class="NB-splash-title" src="{{ MEDIA_URL }}/img/logo_newsblur_blur.png" /></a>
|
||||
</div>
|
||||
|
||||
{% block content %}{% endblock %}
|
||||
|
||||
<div class="NB-splash-info NB-splash-bottom">
|
||||
<ul class="NB-splash-links NB-inner">
|
||||
<li class="NB-splash-link NB-splash-link-logo"><a href="{% url index %}"><img src="{{ MEDIA_URL }}/img/logo_newsblur_blur.png" style="height: 32px; width: 183px;" /></a></li>
|
||||
<li class="NB-splash-link NB-splash-link-about"><a href="{% url about %}">About</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-faq"><a href="{% url faq %}">FAQ</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-api"><a href="{% url api %}">API</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-press"><a href="{% url press %}">Press</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-logo"><a href="{% url "index" %}"><img src="{{ MEDIA_URL }}/img/logo_newsblur_blur.png" style="height: 32px; width: 183px;" /></a></li>
|
||||
<li class="NB-splash-link NB-splash-link-about"><a href="{% url "about" %}">About</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-faq"><a href="{% url "faq" %}">FAQ</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-api"><a href="{% url "api" %}">API</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-press"><a href="{% url "press" %}">Press</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-github"><a href="http://github.com/samuelclay">GitHub</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-getsatisfaction"><a href="https://getsatisfaction.com/newsblur">Feedback</a></li>
|
||||
<li class="NB-splash-link NB-splash-link-blog"><a href="http://blog.newsblur.com">The Blog</a></li>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
</h5>
|
||||
|
||||
<div class="NB-import-signup" style="margin-top: 24px;">
|
||||
<form method="post" action="{% url import-signup %}">
|
||||
<form method="post" action="{% url "import-signup" %}">
|
||||
<div>
|
||||
{{ signup_form.username.label_tag }}
|
||||
{{ signup_form.username }}
|
||||
|
|
|
@ -77,7 +77,7 @@
|
|||
<img src="/media/img/logo_512.png" class="logo">
|
||||
<h1>NewsBlur is in <span class="error404">maintenance mode</span></h1>
|
||||
<div class="description">
|
||||
<p>Doing a crazy-quick upgrade to the database. Be back in about about a tenth of a jiffy.</p>
|
||||
<p>Doing a hefty, yet quick, upgrade to the database. Be back in about about ten minutes. MongoDB obnoxiously re-created a huge index that I dropped, and I can't kill the index creation. This is taking way longer than I anticipated, but once finished (15 more minutes - 11:30AM PT?) it means I'm only a day away from a scaled-out MongoDB.</p>
|
||||
<p>To pass the time, go surf <a href="http://mlkshk.com/popular">MLKSHK's popular page</a>.</p>
|
||||
<p></p>
|
||||
</div>
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
<div class="NB-module NB-module-account">
|
||||
<h5 class="NB-module-header">
|
||||
<div class="NB-module-header-right">
|
||||
<a href="{% url logout %}?next=/" class="NB-splash-link">Logout</a>
|
||||
<a href="{% url "logout" %}?next=/" class="NB-splash-link">Logout</a>
|
||||
</div>
|
||||
Welcome, <span class="NB-module-account-username">{{ user.username }}</span>
|
||||
</h5>
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
{% endfor %}
|
||||
</table>
|
||||
{% if user.is_staff %}
|
||||
<form action="{% url add-feature %}" id="add-feature-form" method="post">
|
||||
<form action="{% url "add-feature" %}" id="add-feature-form" method="post">
|
||||
{{ feature_form.description }}
|
||||
<input type="submit" value="Add feature" />
|
||||
</form>
|
||||
|
|
|
@ -70,7 +70,7 @@
|
|||
<div class="NB-module-header-signup">Sign up</div>
|
||||
</div>
|
||||
<div class="NB-login">
|
||||
<form method="post" action="{% url login %}">
|
||||
<form method="post" action="{% url "login" %}">
|
||||
<div>
|
||||
{{ login_form.username.label_tag }}
|
||||
{{ login_form.username }}
|
||||
|
@ -78,7 +78,7 @@
|
|||
<div>
|
||||
<div class="NB-signup-optional">
|
||||
{% if login_form.errors and login_form.errors|length %}
|
||||
<a href="{% url profile-forgot-password %}" class="NB-splash-link">Forgot?</a>
|
||||
<a href="{% url "profile-forgot-password" %}" class="NB-splash-link">Forgot?</a>
|
||||
{% else %}
|
||||
Optional
|
||||
{% endif %}
|
||||
|
@ -99,7 +99,7 @@
|
|||
</div>
|
||||
|
||||
<div class="NB-signup">
|
||||
<form method="post" action="{% url signup %}">
|
||||
<form method="post" action="{% url "signup" %}">
|
||||
<div>
|
||||
{{ signup_form.username.label_tag }}
|
||||
{{ signup_form.username }}
|
||||
|
@ -123,7 +123,7 @@
|
|||
{% endfor %}
|
||||
{% endif %}
|
||||
<div class="NB-signup-orline {% if signup_form.errors %}NB-signup-orline-reduced{% endif %}">— <span class="NB-signup-orline-or">or</span> —</div>
|
||||
<a href="{% url google-reader-authorize %}" class="NB-splash-link NB-signup-google">Import from<br /><img src="{{ MEDIA_URL }}img/welcome/Google_Reader_logo.png" width="16" height="16" style="vertical-align: bottom;"> Google Reader</a>
|
||||
<a href="{% url "google-reader-authorize" %}" class="NB-splash-link NB-signup-google">Import from<br /><img src="{{ MEDIA_URL }}img/welcome/Google_Reader_logo.png" width="16" height="16" style="vertical-align: bottom;"> Google Reader</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -237,7 +237,7 @@
|
|||
<a href="http://github.com/samuelclay" title="GitHub"><img src="{{ MEDIA_URL }}img/welcome/github_favicon.png"></a>
|
||||
<a href="http://twitter.com/newsblur" title="Twitter"><img src="{{ MEDIA_URL }}img/welcome/twitter_favicon.png"></a>
|
||||
</div>
|
||||
<a href="{% url index %}"><img src="{{ MEDIA_URL }}/img/logo_newsblur_blur.png" style="height: 32px; width: 183px;" class="NB-footer-logo" title="NewsBlur" alt="NewsBlur" /></a>
|
||||
<a href="{% url "index" %}"><img src="{{ MEDIA_URL }}/img/logo_newsblur_blur.png" style="height: 32px; width: 183px;" class="NB-footer-logo" title="NewsBlur" alt="NewsBlur" /></a>
|
||||
is built in New York City and San Francisco by
|
||||
<a href="http://twitter.com/samuelclay" class="NB-splash-link">
|
||||
<img src="http://a0.twimg.com/profile_images/1382021023/Campeche_Steps_reasonably_small.jpg" class="NB-twitter-avatar">
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
<div class="NB-thanks">
|
||||
<div style="color: #175700;font-size: 24px"><b><em>Muted!</em></b></div>
|
||||
<div>You will no longer receive email notifications for this story.</div>
|
||||
<div><a href="{% url index %}">Continue to NewsBlur »</a></div>
|
||||
<div><a href="{% url "index" %}">Continue to NewsBlur »</a></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
<head>
|
||||
<title>{{ social_profile.title }}</title>
|
||||
<link rel="alternate" type="application/rss+xml" href="{% url shared-stories-rss-feed social_profile.user_id social_profile.username|slugify %}" title="{{ social_profile.feed_title }} RSS feed">
|
||||
<link rel="alternate" type="application/rss+xml" href="{% url "shared-stories-rss-feed" social_profile.user_id social_profile.username|slugify %}" title="{{ social_profile.feed_title }} RSS feed">
|
||||
<link rel="shortcut icon" href="{{ social_profile.photo_url }}">
|
||||
<link rel="icon" href="{{ social_profile.photo_url }}">
|
||||
<meta name="viewport" content="initial-scale=1, maximum-scale=1.5">
|
||||
|
@ -148,7 +148,7 @@
|
|||
|
||||
</h1>
|
||||
<div class="NB-header-feed">
|
||||
{# <a type="application/rss+xml" href="{% url shared-stories-rss-feed social_profile.user_id social_profile.username|slugify %}">RSS feed for this page</a> #}
|
||||
{# <a type="application/rss+xml" href="{% url "shared-stories-rss-feed" social_profile.user_id social_profile.username|slugify %}">RSS feed for this page</a> #}
|
||||
</div>
|
||||
</header>
|
||||
|
||||
|
|
|
@ -229,6 +229,11 @@
|
|||
optional: true
|
||||
default: newest
|
||||
example: oldest
|
||||
- key: read_filter
|
||||
desc: "Show all stories or only unread stories"
|
||||
optional: true
|
||||
default: unread
|
||||
example: all
|
||||
|
||||
- url: /reader/mark_story_as_read
|
||||
method: POST
|
||||
|
|
|
@ -18,7 +18,8 @@ from apps.statistics.models import MAnalyticsFetcher
|
|||
from utils import feedparser
|
||||
from utils.story_functions import pre_process_story
|
||||
from utils import log as logging
|
||||
from utils.feed_functions import timelimit, TimeoutError, mail_feed_error_to_admin, utf8encode
|
||||
from utils.feed_functions import timelimit, TimeoutError, utf8encode
|
||||
# from utils.feed_functions import mail_feed_error_to_admin
|
||||
|
||||
|
||||
# Refresh feed code adapted from Feedjack.
|
||||
|
@ -386,7 +387,7 @@ class Dispatcher:
|
|||
feed.save_feed_history(500, "Error", tb)
|
||||
feed_code = 500
|
||||
fetched_feed = None
|
||||
mail_feed_error_to_admin(feed, e, local_vars=locals())
|
||||
# mail_feed_error_to_admin(feed, e, local_vars=locals())
|
||||
if (not settings.DEBUG and hasattr(settings, 'RAVEN_CLIENT') and
|
||||
settings.RAVEN_CLIENT):
|
||||
settings.RAVEN_CLIENT.captureException()
|
||||
|
@ -431,8 +432,10 @@ class Dispatcher:
|
|||
feed.save_page_history(550, "Page Error", tb)
|
||||
fetched_feed = None
|
||||
page_data = None
|
||||
mail_feed_error_to_admin(feed, e, local_vars=locals())
|
||||
settings.RAVEN_CLIENT.captureException()
|
||||
# mail_feed_error_to_admin(feed, e, local_vars=locals())
|
||||
if (not settings.DEBUG and hasattr(settings, 'RAVEN_CLIENT') and
|
||||
settings.RAVEN_CLIENT):
|
||||
settings.RAVEN_CLIENT.captureException()
|
||||
|
||||
feed = self.refresh_feed(feed.pk)
|
||||
logging.debug(u' ---> [%-30s] ~FYFetching icon: %s' % (feed.title[:30], feed.feed_link))
|
||||
|
@ -449,8 +452,10 @@ class Dispatcher:
|
|||
logging.error(tb)
|
||||
logging.debug('[%d] ! -------------------------' % (feed_id,))
|
||||
# feed.save_feed_history(560, "Icon Error", tb)
|
||||
mail_feed_error_to_admin(feed, e, local_vars=locals())
|
||||
settings.RAVEN_CLIENT.captureException()
|
||||
# mail_feed_error_to_admin(feed, e, local_vars=locals())
|
||||
if (not settings.DEBUG and hasattr(settings, 'RAVEN_CLIENT') and
|
||||
settings.RAVEN_CLIENT):
|
||||
settings.RAVEN_CLIENT.captureException()
|
||||
else:
|
||||
logging.debug(u' ---> [%-30s] ~FBSkipping page fetch: (%s on %s stories) %s' % (feed.title[:30], self.feed_trans[ret_feed], feed.stories_last_month, '' if feed.has_page else ' [HAS NO PAGE]'))
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
from django.utils.hashcompat import sha_constructor
|
||||
import hashlib
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.cache import cache
|
||||
from django.utils.hashcompat import md5_constructor
|
||||
from django.utils.http import urlquote
|
||||
from django.http import HttpResponseForbidden
|
||||
from django.conf import settings
|
||||
|
@ -63,14 +62,14 @@ def get_user(request):
|
|||
return user
|
||||
|
||||
def invalidate_template_cache(fragment_name, *variables):
|
||||
args = md5_constructor(u':'.join([urlquote(var) for var in variables]))
|
||||
args = hashlib.md5(u':'.join([urlquote(var) for var in variables]))
|
||||
cache_key = 'template.cache.%s.%s' % (fragment_name, args.hexdigest())
|
||||
cache.delete(cache_key)
|
||||
|
||||
def generate_secret_token(phrase, size=12):
|
||||
"""Generate a (SHA1) security hash from the provided info."""
|
||||
info = (phrase, settings.SECRET_KEY)
|
||||
return sha_constructor("".join(info)).hexdigest()[:size]
|
||||
return hashlib.sha1("".join(info)).hexdigest()[:size]
|
||||
|
||||
def extract_user_agent(request):
|
||||
user_agent = request.environ.get('HTTP_USER_AGENT', '')
|
||||
|
|
Loading…
Add table
Reference in a new issue