mirror of
https://github.com/viq/NewsBlur.git
synced 2025-09-18 21:43:31 +00:00
Merge branch 'jammit' into social
* jammit: Adding new iphone mask image. Moving to proper unicode string for feed fetching. Colorizing feed fetching. Colorizing feed fetching. Adding two error classes to requests exception handling. Adding two error classes to requests exception handling. Adding two error classes to requests exception handling. jQuery 1.6.1 -> 1.7.1. Fixes massive Chrome issues. Adding shift+u as a shortcut for marking a story unread. Adding more advanced exception handling for new requests module, with fallback to urllib2. Adding urllib fallback to requests in page fetching. Adding a little bit more leeway in page importer by falling back to urllib2 when requests fail. Adding slight bit of color to feed fetcher. Revert "Revert "Switching to requests from urllib2/httplib. Bring on the page errors."" Conflicts: assets.yml local_settings.py.template media/js/jquery-1.7.1.js media/js/jquery-1.7.js media/js/vendor/jquery-1.7.js
This commit is contained in:
commit
8f4dd6566c
17 changed files with 433 additions and 449 deletions
|
@ -1,16 +0,0 @@
|
||||||
Metadata-Version: 1.0
|
|
||||||
Name: django-compress
|
|
||||||
Version: 1.0.1
|
|
||||||
Summary: django-compress provides an automated system for compressing CSS and JavaScript files
|
|
||||||
Home-page: http://code.google.com/p/django-compress/
|
|
||||||
Author: Andreas Pelme
|
|
||||||
Author-email: Andreas Pelme <andreas@pelme.se>
|
|
||||||
License: UNKNOWN
|
|
||||||
Description: UNKNOWN
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Environment :: Web Environment
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
|
||||||
Classifier: Operating System :: OS Independent
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Topic :: Utilities
|
|
|
@ -1,33 +0,0 @@
|
||||||
README
|
|
||||||
setup.py
|
|
||||||
compress/__init__.py
|
|
||||||
compress/filter_base.py
|
|
||||||
compress/models.py
|
|
||||||
compress/signals.py
|
|
||||||
compress/utils.py
|
|
||||||
compress/conf/__init__.py
|
|
||||||
compress/conf/settings.py
|
|
||||||
compress/filters/__init__.py
|
|
||||||
compress/filters/csstidy/__init__.py
|
|
||||||
compress/filters/csstidy_python/__init__.py
|
|
||||||
compress/filters/csstidy_python/csstidy.py
|
|
||||||
compress/filters/csstidy_python/data.py
|
|
||||||
compress/filters/csstidy_python/optimizer.py
|
|
||||||
compress/filters/csstidy_python/output.py
|
|
||||||
compress/filters/csstidy_python/tools.py
|
|
||||||
compress/filters/jsmin/__init__.py
|
|
||||||
compress/filters/jsmin/jsmin.py
|
|
||||||
compress/filters/yui/__init__.py
|
|
||||||
compress/management/__init__.py
|
|
||||||
compress/management/commands/__init__.py
|
|
||||||
compress/management/commands/synccompress.py
|
|
||||||
compress/templatetags/__init__.py
|
|
||||||
compress/templatetags/compressed.py
|
|
||||||
compress/versioning/__init__.py
|
|
||||||
compress/versioning/base.py
|
|
||||||
compress/versioning/hash/__init__.py
|
|
||||||
compress/versioning/mtime/__init__.py
|
|
||||||
django_compress.egg-info/PKG-INFO
|
|
||||||
django_compress.egg-info/SOURCES.txt
|
|
||||||
django_compress.egg-info/dependency_links.txt
|
|
||||||
django_compress.egg-info/top_level.txt
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
compress
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -5,6 +5,7 @@ from apps.rss_feeds.models import Feed
|
||||||
from optparse import make_option
|
from optparse import make_option
|
||||||
from utils import feed_fetcher
|
from utils import feed_fetcher
|
||||||
from utils.management_functions import daemonize
|
from utils.management_functions import daemonize
|
||||||
|
import django
|
||||||
import socket
|
import socket
|
||||||
import datetime
|
import datetime
|
||||||
import redis
|
import redis
|
||||||
|
@ -69,12 +70,14 @@ class Command(BaseCommand):
|
||||||
feeds_queue = []
|
feeds_queue = []
|
||||||
for _ in range(num_workers):
|
for _ in range(num_workers):
|
||||||
feeds_queue.append([])
|
feeds_queue.append([])
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
for feed in feeds:
|
for feed in feeds:
|
||||||
feeds_queue[i%num_workers].append(feed.pk)
|
feeds_queue[i%num_workers].append(feed.pk)
|
||||||
i += 1
|
i += 1
|
||||||
disp.add_jobs(feeds_queue, i)
|
disp.add_jobs(feeds_queue, i)
|
||||||
|
|
||||||
|
django.db.connection.close()
|
||||||
|
|
||||||
print " ---> Fetching %s feeds..." % feeds.count()
|
print " ---> Fetching %s feeds..." % feeds.count()
|
||||||
disp.run_jobs()
|
disp.run_jobs()
|
||||||
|
|
|
@ -142,7 +142,7 @@ class Feed(models.Model):
|
||||||
except IntegrityError, e:
|
except IntegrityError, e:
|
||||||
duplicate_feed = Feed.objects.filter(feed_address=self.feed_address)
|
duplicate_feed = Feed.objects.filter(feed_address=self.feed_address)
|
||||||
logging.debug("%s: %s" % (self.feed_address, duplicate_feed))
|
logging.debug("%s: %s" % (self.feed_address, duplicate_feed))
|
||||||
logging.debug(' ***> [%-30s] Feed deleted. Could not save: %s' % (self, e))
|
logging.debug(' ***> [%-30s] Feed deleted. Could not save: %s' % (unicode(self)[:30], e))
|
||||||
if duplicate_feed:
|
if duplicate_feed:
|
||||||
merge_feeds(self.pk, duplicate_feed[0].pk)
|
merge_feeds(self.pk, duplicate_feed[0].pk)
|
||||||
return duplicate_feed[0]
|
return duplicate_feed[0]
|
||||||
|
@ -277,7 +277,7 @@ class Feed(models.Model):
|
||||||
try:
|
try:
|
||||||
feed_address = _1()
|
feed_address = _1()
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
logging.debug(' ---> [%-30s] Feed address check timed out...' % (unicode(self.feed_title)[:30]))
|
logging.debug(' ---> [%-30s] Feed address check timed out...' % (unicode(self)[:30]))
|
||||||
self.save_feed_history(505, 'Timeout', '')
|
self.save_feed_history(505, 'Timeout', '')
|
||||||
feed_address = None
|
feed_address = None
|
||||||
|
|
||||||
|
@ -769,19 +769,21 @@ class Feed(models.Model):
|
||||||
story_feed_id=self.pk,
|
story_feed_id=self.pk,
|
||||||
).order_by('-story_date')
|
).order_by('-story_date')
|
||||||
if stories.count() > trim_cutoff:
|
if stories.count() > trim_cutoff:
|
||||||
logging.debug(' ---> [%-30s] Found %s stories. Trimming to %s...' % (self, stories.count(), trim_cutoff))
|
logging.debug(' ---> [%-30s] ~FBFound %s stories. Trimming to ~SB%s~SN...' % (unicode(self)[:30], stories.count(), trim_cutoff))
|
||||||
try:
|
try:
|
||||||
story_trim_date = stories[trim_cutoff].story_date
|
story_trim_date = stories[trim_cutoff].story_date
|
||||||
except IndexError, e:
|
except IndexError, e:
|
||||||
logging.debug(' ***> [%-30s] Error trimming feed: %s' % (self, e))
|
logging.debug(' ***> [%-30s] ~BRError trimming feed: %s' % (unicode(self)[:30], e))
|
||||||
return
|
return
|
||||||
extra_stories = MStory.objects(story_feed_id=self.pk, story_date__lte=story_trim_date)
|
extra_stories = MStory.objects(story_feed_id=self.pk, story_date__lte=story_trim_date)
|
||||||
extra_stories_count = extra_stories.count()
|
extra_stories_count = extra_stories.count()
|
||||||
extra_stories.delete()
|
extra_stories.delete()
|
||||||
print "Deleted %s stories, %s left." % (extra_stories_count, MStory.objects(story_feed_id=self.pk).count())
|
if verbose:
|
||||||
|
print "Deleted %s stories, %s left." % (extra_stories_count, MStory.objects(story_feed_id=self.pk).count())
|
||||||
userstories = MUserStory.objects(feed_id=self.pk, story_date__lte=story_trim_date)
|
userstories = MUserStory.objects(feed_id=self.pk, story_date__lte=story_trim_date)
|
||||||
if userstories.count():
|
if userstories.count():
|
||||||
print "Found %s user stories. Deleting..." % userstories.count()
|
if verbose:
|
||||||
|
print "Found %s user stories. Deleting..." % userstories.count()
|
||||||
userstories.delete()
|
userstories.delete()
|
||||||
|
|
||||||
def get_stories(self, offset=0, limit=25, force=False, slave=False):
|
def get_stories(self, offset=0, limit=25, force=False, slave=False):
|
||||||
|
|
|
@ -1,18 +1,16 @@
|
||||||
import urllib2, httplib
|
import requests
|
||||||
import re
|
import re
|
||||||
import urlparse
|
import urlparse
|
||||||
import traceback
|
import traceback
|
||||||
import feedparser
|
import feedparser
|
||||||
import time
|
import time
|
||||||
|
import urllib2
|
||||||
|
import httplib
|
||||||
|
from django.conf import settings
|
||||||
from utils import log as logging
|
from utils import log as logging
|
||||||
from apps.rss_feeds.models import MFeedPage
|
from apps.rss_feeds.models import MFeedPage
|
||||||
from utils.feed_functions import timelimit, mail_feed_error_to_admin
|
from utils.feed_functions import timelimit, mail_feed_error_to_admin
|
||||||
|
|
||||||
HEADERS = {
|
|
||||||
'User-Agent': 'NewsBlur Page Fetcher - http://www.newsblur.com',
|
|
||||||
'Connection': 'close',
|
|
||||||
}
|
|
||||||
|
|
||||||
BROKEN_PAGES = [
|
BROKEN_PAGES = [
|
||||||
'tag:',
|
'tag:',
|
||||||
'info:',
|
'info:',
|
||||||
|
@ -23,50 +21,76 @@ BROKEN_PAGES = [
|
||||||
|
|
||||||
class PageImporter(object):
|
class PageImporter(object):
|
||||||
|
|
||||||
def __init__(self, url, feed):
|
def __init__(self, feed):
|
||||||
self.url = url
|
|
||||||
self.feed = feed
|
self.feed = feed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
s = requests.session()
|
||||||
|
s.config['keep_alive'] = False
|
||||||
|
return {
|
||||||
|
'User-Agent': 'NewsBlur Page Fetcher (%s subscriber%s) - %s '
|
||||||
|
'(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) '
|
||||||
|
'AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 '
|
||||||
|
'Safari/534.48.3)' % (
|
||||||
|
self.feed.num_subscribers,
|
||||||
|
's' if self.feed.num_subscribers != 1 else '',
|
||||||
|
settings.NEWSBLUR_URL
|
||||||
|
),
|
||||||
|
'Connection': 'close',
|
||||||
|
}
|
||||||
|
|
||||||
@timelimit(15)
|
@timelimit(15)
|
||||||
def fetch_page(self):
|
def fetch_page(self, urllib_fallback=False):
|
||||||
if not self.url:
|
feed_link = self.feed.feed_link
|
||||||
|
if not feed_link:
|
||||||
self.save_no_page()
|
self.save_no_page()
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.url.startswith('http'):
|
if feed_link.startswith('www'):
|
||||||
request = urllib2.Request(self.url, headers=HEADERS)
|
self.feed.feed_link = 'http://' + feed_link
|
||||||
response = urllib2.urlopen(request)
|
if feed_link.startswith('http'):
|
||||||
time.sleep(0.01) # Grrr, GIL.
|
if urllib_fallback:
|
||||||
data = response.read()
|
request = urllib2.Request(feed_link, headers=self.headers)
|
||||||
elif any(self.url.startswith(s) for s in BROKEN_PAGES):
|
response = urllib2.urlopen(request)
|
||||||
|
time.sleep(0.01) # Grrr, GIL.
|
||||||
|
data = response.read()
|
||||||
|
else:
|
||||||
|
response = requests.get(feed_link, headers=self.headers)
|
||||||
|
data = response.content
|
||||||
|
elif any(feed_link.startswith(s) for s in BROKEN_PAGES):
|
||||||
self.save_no_page()
|
self.save_no_page()
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
data = open(self.url, 'r').read()
|
data = open(feed_link, 'r').read()
|
||||||
html = self.rewrite_page(data)
|
html = self.rewrite_page(data)
|
||||||
self.save_page(html)
|
self.save_page(html)
|
||||||
except (ValueError, urllib2.URLError, httplib.BadStatusLine, httplib.InvalidURL), e:
|
except (ValueError, urllib2.URLError, httplib.BadStatusLine, httplib.InvalidURL), e:
|
||||||
self.feed.save_page_history(401, "Bad URL", e)
|
self.feed.save_page_history(401, "Bad URL", e)
|
||||||
fp = feedparser.parse(self.feed.feed_address)
|
fp = feedparser.parse(self.feed.feed_address)
|
||||||
self.feed.feed_link = fp.feed.get('link', "")
|
feed_link = fp.feed.get('link', "")
|
||||||
self.feed.save()
|
self.feed.save()
|
||||||
except (urllib2.HTTPError), e:
|
except (urllib2.HTTPError), e:
|
||||||
self.feed.save_page_history(e.code, e.msg, e.fp.read())
|
self.feed.save_page_history(e.code, e.msg, e.fp.read())
|
||||||
return
|
|
||||||
except (httplib.IncompleteRead), e:
|
except (httplib.IncompleteRead), e:
|
||||||
self.feed.save_page_history(500, "IncompleteRead", e)
|
self.feed.save_page_history(500, "IncompleteRead", e)
|
||||||
return
|
except (requests.exceptions.RequestException,
|
||||||
|
LookupError,
|
||||||
|
requests.packages.urllib3.exceptions.HTTPError), e:
|
||||||
|
logging.debug(' ***> [%-30s] Page fetch failed using requests: %s' % (self.feed, e))
|
||||||
|
return self.fetch_page(urllib_fallback=True)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logging.debug('[%d] ! -------------------------' % (self.feed.id,))
|
logging.debug('[%d] ! -------------------------' % (self.feed.id,))
|
||||||
tb = traceback.format_exc()
|
tb = traceback.format_exc()
|
||||||
logging.debug(tb)
|
logging.debug(tb)
|
||||||
logging.debug('[%d] ! -------------------------' % (self.feed.id,))
|
logging.debug('[%d] ! -------------------------' % (self.feed.id,))
|
||||||
self.feed.save_page_history(500, "Error", tb)
|
self.feed.save_page_history(500, "Error", tb)
|
||||||
mail_feed_error_to_admin(self.feed, e)
|
mail_feed_error_to_admin(self.feed, e, locals())
|
||||||
return
|
if not urllib_fallback:
|
||||||
|
self.fetch_page(urllib_fallback=True)
|
||||||
self.feed.save_page_history(200, "OK")
|
else:
|
||||||
|
self.feed.save_page_history(200, "OK")
|
||||||
|
|
||||||
def save_no_page(self):
|
def save_no_page(self):
|
||||||
self.feed.has_page = False
|
self.feed.has_page = False
|
||||||
|
|
|
@ -22,7 +22,7 @@ compress_assets: on
|
||||||
|
|
||||||
javascripts:
|
javascripts:
|
||||||
common:
|
common:
|
||||||
- media/js/vendor/jquery-1.7.js
|
- media/js/vendor/jquery-1.7.1.js
|
||||||
- media/js/vendor/jquery.json.js
|
- media/js/vendor/jquery.json.js
|
||||||
- media/js/vendor/jquery.easing.js
|
- media/js/vendor/jquery.easing.js
|
||||||
- media/js/vendor/jquery.newsblur.js
|
- media/js/vendor/jquery.newsblur.js
|
||||||
|
|
|
@ -1,27 +1,24 @@
|
||||||
import logging
|
import logging
|
||||||
import pymongo
|
import pymongo
|
||||||
|
|
||||||
DATABASES = {
|
# ===================
|
||||||
'default': {
|
# = Server Settings =
|
||||||
'NAME': 'newsblur',
|
# ===================
|
||||||
'ENGINE': 'django.db.backends.mysql',
|
|
||||||
'USER': 'newsblur',
|
|
||||||
'PASSWORD': '',
|
|
||||||
'HOST': '127.0.0.1',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
MONGO_DB = {
|
ADMINS = (
|
||||||
'name': 'newsblur',
|
('Samuel Clay', 'samuel@ofbrooklyn.com'),
|
||||||
'host': '127.0.0.1',
|
)
|
||||||
'port': 27017,
|
|
||||||
'read_preference': pymongo.ReadPreference.PRIMARY,
|
SERVER_EMAIL = 'server@newsblur.com'
|
||||||
}
|
HELLO_EMAIL = 'hello@newsblur.com'
|
||||||
|
NEWSBLUR_URL = 'http://www.newsblur.com'
|
||||||
|
|
||||||
|
# ==================
|
||||||
|
# = Global Settngs =
|
||||||
|
# ==================
|
||||||
|
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
|
|
||||||
MEDIA_URL = '/media/'
|
MEDIA_URL = '/media/'
|
||||||
|
|
||||||
SECRET_KEY = 'YOUR SECRET KEY'
|
SECRET_KEY = 'YOUR SECRET KEY'
|
||||||
|
|
||||||
CACHE_BACKEND = 'dummy:///'
|
CACHE_BACKEND = 'dummy:///'
|
||||||
|
@ -37,6 +34,34 @@ HOMEPAGE_USERNAME = 'conesus'
|
||||||
OAUTH_KEY = 'www.example.com'
|
OAUTH_KEY = 'www.example.com'
|
||||||
OAUTH_SECRET = 'SECRET_KEY_FROM_GOOGLE'
|
OAUTH_SECRET = 'SECRET_KEY_FROM_GOOGLE'
|
||||||
|
|
||||||
|
S3_ACCESS_KEY = 'XXX'
|
||||||
|
S3_SECRET = 'SECRET'
|
||||||
|
S3_BACKUP_BUCKET = 'newsblur_backups'
|
||||||
|
|
||||||
|
# =============
|
||||||
|
# = Databases =
|
||||||
|
# =============
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
'NAME': 'newsblur',
|
||||||
|
'ENGINE': 'django.db.backends.mysql',
|
||||||
|
'USER': 'newsblur',
|
||||||
|
'PASSWORD': '',
|
||||||
|
'HOST': '127.0.0.1',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
MONGO_DB = {
|
||||||
|
'name': 'newsblur',
|
||||||
|
'host': '127.0.0.1',
|
||||||
|
'port': 27017
|
||||||
|
}
|
||||||
|
|
||||||
|
MONGODB_SLAVE = {
|
||||||
|
'host': '127.0.0.1'
|
||||||
|
}
|
||||||
|
|
||||||
# Celery RabbitMQ Broker
|
# Celery RabbitMQ Broker
|
||||||
BROKER_HOST = "127.0.0.1"
|
BROKER_HOST = "127.0.0.1"
|
||||||
|
|
||||||
|
@ -65,6 +90,7 @@ if len(logging._handlerList) < 1:
|
||||||
format='%(asctime)-12s: %(message)s',
|
format='%(asctime)-12s: %(message)s',
|
||||||
datefmt='%b %d %H:%M:%S',
|
datefmt='%b %d %H:%M:%S',
|
||||||
handler=logging.StreamHandler)
|
handler=logging.StreamHandler)
|
||||||
|
<<<<<<< HEAD
|
||||||
|
|
||||||
S3_ACCESS_KEY = 'XXX'
|
S3_ACCESS_KEY = 'XXX'
|
||||||
S3_SECRET = 'SECRET'
|
S3_SECRET = 'SECRET'
|
||||||
|
@ -78,3 +104,5 @@ FACEBOOK_APP_ID = '111111111111111'
|
||||||
FACEBOOK_SECRET = '99999999999999999999999999999999'
|
FACEBOOK_SECRET = '99999999999999999999999999999999'
|
||||||
TWITTER_CONSUMER_KEY = 'ooooooooooooooooooooo'
|
TWITTER_CONSUMER_KEY = 'ooooooooooooooooooooo'
|
||||||
TWITTER_CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
|
TWITTER_CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
|
||||||
|
=======
|
||||||
|
>>>>>>> jammit
|
||||||
|
|
BIN
media/img/mobile/iphone_512_mask.png
Normal file
BIN
media/img/mobile/iphone_512_mask.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 40 KiB |
|
@ -7080,6 +7080,13 @@
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
self.open_river_stories();
|
self.open_river_stories();
|
||||||
});
|
});
|
||||||
|
$document.bind('keydown', 'shift+u', function(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
if (self.active_story) {
|
||||||
|
var story_id = self.active_story.id;
|
||||||
|
self.mark_story_as_unread(story_id);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
File diff suppressed because it is too large
Load diff
22
settings.py
22
settings.py
|
@ -5,6 +5,18 @@ from mongoengine import connect
|
||||||
import redis
|
import redis
|
||||||
from utils import jammit
|
from utils import jammit
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# = Server Settings =
|
||||||
|
# ===================
|
||||||
|
|
||||||
|
ADMINS = (
|
||||||
|
('Samuel Clay', 'samuel@ofbrooklyn.com'),
|
||||||
|
)
|
||||||
|
|
||||||
|
SERVER_EMAIL = 'server@newsblur.com'
|
||||||
|
HELLO_EMAIL = 'hello@newsblur.com'
|
||||||
|
NEWSBLUR_URL = 'http://www.newsblur.com'
|
||||||
|
|
||||||
# ===========================
|
# ===========================
|
||||||
# = Directory Declaractions =
|
# = Directory Declaractions =
|
||||||
# ===========================
|
# ===========================
|
||||||
|
@ -27,14 +39,10 @@ if '/utils' not in ' '.join(sys.path):
|
||||||
sys.path.append(UTILS_ROOT)
|
sys.path.append(UTILS_ROOT)
|
||||||
if '/vendor' not in ' '.join(sys.path):
|
if '/vendor' not in ' '.join(sys.path):
|
||||||
sys.path.append(VENDOR_ROOT)
|
sys.path.append(VENDOR_ROOT)
|
||||||
|
|
||||||
# ===================
|
# ===================
|
||||||
# = Global Settings =
|
# = Global Settings =
|
||||||
# ===================
|
# ===================
|
||||||
|
|
||||||
ADMINS = (
|
|
||||||
('Samuel Clay', 'samuel@ofbrooklyn.com'),
|
|
||||||
)
|
|
||||||
TEST_DEBUG = False
|
TEST_DEBUG = False
|
||||||
SEND_BROKEN_LINK_EMAILS = False
|
SEND_BROKEN_LINK_EMAILS = False
|
||||||
MANAGERS = ADMINS
|
MANAGERS = ADMINS
|
||||||
|
@ -159,7 +167,7 @@ LOGGING = {
|
||||||
COMPRESS_JS = {
|
COMPRESS_JS = {
|
||||||
'all': {
|
'all': {
|
||||||
'source_filenames': (
|
'source_filenames': (
|
||||||
'js/jquery-1.6.1.js',
|
'js/jquery-1.7.1.js',
|
||||||
'js/inflector.js',
|
'js/inflector.js',
|
||||||
'js/jquery.json.js',
|
'js/jquery.json.js',
|
||||||
'js/jquery.easing.js',
|
'js/jquery.easing.js',
|
||||||
|
@ -216,7 +224,7 @@ COMPRESS_JS = {
|
||||||
},
|
},
|
||||||
'mobile': {
|
'mobile': {
|
||||||
'source_filenames': (
|
'source_filenames': (
|
||||||
'js/jquery-1.6.1.js',
|
'js/jquery-1.7.1.js',
|
||||||
'js/mobile/jquery.mobile-1.0b1.js',
|
'js/mobile/jquery.mobile-1.0b1.js',
|
||||||
'js/jquery.ajaxmanager.3.js',
|
'js/jquery.ajaxmanager.3.js',
|
||||||
'js/underscore.js',
|
'js/underscore.js',
|
||||||
|
@ -310,8 +318,6 @@ SESSION_ENGINE = "django.contrib.sessions.backends.db"
|
||||||
TEST_RUNNER = "utils.testrunner.TestRunner"
|
TEST_RUNNER = "utils.testrunner.TestRunner"
|
||||||
SESSION_COOKIE_NAME = 'newsblur_sessionid'
|
SESSION_COOKIE_NAME = 'newsblur_sessionid'
|
||||||
SESSION_COOKIE_AGE = 60*60*24*365*2 # 2 years
|
SESSION_COOKIE_AGE = 60*60*24*365*2 # 2 years
|
||||||
SERVER_EMAIL = 'server@newsblur.com'
|
|
||||||
HELLO_EMAIL = 'hello@newsblur.com'
|
|
||||||
|
|
||||||
# ===========
|
# ===========
|
||||||
# = Logging =
|
# = Logging =
|
||||||
|
|
|
@ -23,7 +23,6 @@ import redis
|
||||||
# Refresh feed code adapted from Feedjack.
|
# Refresh feed code adapted from Feedjack.
|
||||||
# http://feedjack.googlecode.com
|
# http://feedjack.googlecode.com
|
||||||
|
|
||||||
URL = 'http://www.newsblur.com/'
|
|
||||||
SLOWFEED_WARNING = 10
|
SLOWFEED_WARNING = 10
|
||||||
ENTRY_NEW, ENTRY_UPDATED, ENTRY_SAME, ENTRY_ERR = range(4)
|
ENTRY_NEW, ENTRY_UPDATED, ENTRY_SAME, ENTRY_ERR = range(4)
|
||||||
FEED_OK, FEED_SAME, FEED_ERRPARSE, FEED_ERRHTTP, FEED_ERREXC = range(5)
|
FEED_OK, FEED_SAME, FEED_ERRPARSE, FEED_ERRHTTP, FEED_ERREXC = range(5)
|
||||||
|
@ -46,7 +45,7 @@ class FetchFeed:
|
||||||
Uses feedparser to download the feed. Will be parsed later.
|
Uses feedparser to download the feed. Will be parsed later.
|
||||||
"""
|
"""
|
||||||
identity = self.get_identity()
|
identity = self.get_identity()
|
||||||
log_msg = u'%2s ---> [%-30s] Fetching feed (%d), last update: %s' % (identity,
|
log_msg = u'%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY), last update: %s' % (identity,
|
||||||
unicode(self.feed)[:30],
|
unicode(self.feed)[:30],
|
||||||
self.feed.id,
|
self.feed.id,
|
||||||
datetime.datetime.now() - self.feed.last_update)
|
datetime.datetime.now() - self.feed.last_update)
|
||||||
|
@ -63,7 +62,7 @@ class FetchFeed:
|
||||||
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 Safari/534.48.3 (NewsBlur Feed Fetcher - %s subscriber%s - %s)' % (
|
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 Safari/534.48.3 (NewsBlur Feed Fetcher - %s subscriber%s - %s)' % (
|
||||||
self.feed.num_subscribers,
|
self.feed.num_subscribers,
|
||||||
's' if self.feed.num_subscribers != 1 else '',
|
's' if self.feed.num_subscribers != 1 else '',
|
||||||
URL
|
settings.NEWSBLUR_URL
|
||||||
)
|
)
|
||||||
|
|
||||||
self.fpf = feedparser.parse(self.feed.feed_address,
|
self.fpf = feedparser.parse(self.feed.feed_address,
|
||||||
|
@ -288,7 +287,7 @@ class Dispatcher:
|
||||||
ENTRY_SAME: 0,
|
ENTRY_SAME: 0,
|
||||||
ENTRY_ERR: 0
|
ENTRY_ERR: 0
|
||||||
}
|
}
|
||||||
start_time = datetime.datetime.utcnow()
|
start_time = time.time()
|
||||||
ret_feed = FEED_ERREXC
|
ret_feed = FEED_ERREXC
|
||||||
try:
|
try:
|
||||||
feed = self.refresh_feed(feed_id)
|
feed = self.refresh_feed(feed_id)
|
||||||
|
@ -345,12 +344,12 @@ class Dispatcher:
|
||||||
(ret_feed == FEED_OK or
|
(ret_feed == FEED_OK or
|
||||||
(ret_feed == FEED_SAME and feed.stories_last_month > 10)))):
|
(ret_feed == FEED_SAME and feed.stories_last_month > 10)))):
|
||||||
|
|
||||||
logging.debug(u' ---> [%-30s] Fetching page: %s' % (unicode(feed)[:30], feed.feed_link))
|
logging.debug(u' ---> [%-30s] ~FYFetching page: %s' % (unicode(feed)[:30], feed.feed_link))
|
||||||
page_importer = PageImporter(feed.feed_link, feed)
|
page_importer = PageImporter(feed)
|
||||||
try:
|
try:
|
||||||
page_importer.fetch_page()
|
page_importer.fetch_page()
|
||||||
except TimeoutError, e:
|
except TimeoutError, e:
|
||||||
logging.debug(' ---> [%-30s] Page fetch timed out...' % (unicode(feed)[:30]))
|
logging.debug(' ---> [%-30s] ~FRPage fetch timed out...' % (unicode(feed)[:30]))
|
||||||
feed.save_page_history(555, 'Timeout', '')
|
feed.save_page_history(555, 'Timeout', '')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logging.debug('[%d] ! -------------------------' % (feed_id,))
|
logging.debug('[%d] ! -------------------------' % (feed_id,))
|
||||||
|
@ -361,12 +360,12 @@ class Dispatcher:
|
||||||
fetched_feed = None
|
fetched_feed = None
|
||||||
mail_feed_error_to_admin(feed, e)
|
mail_feed_error_to_admin(feed, e)
|
||||||
|
|
||||||
logging.debug(u' ---> [%-30s] Fetching icon: %s' % (unicode(feed)[:30], feed.feed_link))
|
logging.debug(u' ---> [%-30s] ~FYFetching icon: %s' % (unicode(feed)[:30], feed.feed_link))
|
||||||
icon_importer = IconImporter(feed, force=self.options['force'])
|
icon_importer = IconImporter(feed, force=self.options['force'])
|
||||||
try:
|
try:
|
||||||
icon_importer.save()
|
icon_importer.save()
|
||||||
except TimeoutError, e:
|
except TimeoutError, e:
|
||||||
logging.debug(' ---> [%-30s] Icon fetch timed out...' % (unicode(feed)[:30]))
|
logging.debug(' ---> [%-30s] ~FRIcon fetch timed out...' % (unicode(feed)[:30]))
|
||||||
feed.save_page_history(556, 'Timeout', '')
|
feed.save_page_history(556, 'Timeout', '')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logging.debug('[%d] ! -------------------------' % (feed_id,))
|
logging.debug('[%d] ! -------------------------' % (feed_id,))
|
||||||
|
@ -376,23 +375,23 @@ class Dispatcher:
|
||||||
# feed.save_feed_history(560, "Icon Error", tb)
|
# feed.save_feed_history(560, "Icon Error", tb)
|
||||||
mail_feed_error_to_admin(feed, e)
|
mail_feed_error_to_admin(feed, e)
|
||||||
else:
|
else:
|
||||||
logging.debug(u' ---> [%-30s] Skipping page fetch: %s (%s on %s stories) %s' % (unicode(feed)[:30], unicode(feed.feed_link)[:30], self.feed_trans[ret_feed], feed.stories_last_month, '' if feed.has_page else ' [HAS NO PAGE]'))
|
logging.debug(u' ---> [%-30s] ~FBSkipping page fetch: (%s on %s stories) %s' % (unicode(feed)[:30], self.feed_trans[ret_feed], feed.stories_last_month, '' if feed.has_page else ' [HAS NO PAGE]'))
|
||||||
|
|
||||||
feed = self.refresh_feed(feed_id)
|
feed = self.refresh_feed(feed_id)
|
||||||
delta = datetime.datetime.utcnow() - start_time
|
delta = time.time() - start_time
|
||||||
|
|
||||||
feed.last_load_time = max(1, delta.seconds)
|
feed.last_load_time = round(delta)
|
||||||
feed.fetched_once = True
|
feed.fetched_once = True
|
||||||
try:
|
try:
|
||||||
feed.save()
|
feed.save()
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
logging.debug(" ---> [%-30s] IntegrityError on feed: %s" % (unicode(feed)[:30], feed.feed_address,))
|
logging.debug(" ---> [%-30s] ~FRIntegrityError on feed: %s" % (unicode(feed)[:30], feed.feed_address,))
|
||||||
|
|
||||||
if ret_entries[ENTRY_NEW]:
|
if ret_entries[ENTRY_NEW]:
|
||||||
self.publish_to_subscribers(feed)
|
self.publish_to_subscribers(feed)
|
||||||
|
|
||||||
done_msg = (u'%2s ---> [%-30s] Processed in %s (%s) [%s]' % (
|
done_msg = (u'%2s ---> [%-30s] ~FYProcessed in ~FG~SB%.4ss~FY~SN (~FB%s~FY) [%s]' % (
|
||||||
identity, feed.feed_title[:30], unicode(delta),
|
identity, feed.feed_title[:30], delta,
|
||||||
feed.pk, self.feed_trans[ret_feed],))
|
feed.pk, self.feed_trans[ret_feed],))
|
||||||
logging.debug(done_msg)
|
logging.debug(done_msg)
|
||||||
|
|
||||||
|
|
|
@ -176,13 +176,14 @@ def add_object_to_folder(obj, folder, folders, parent='', added=False):
|
||||||
folders[k][f_k] = add_object_to_folder(obj, folder, f_v, f_k, added)
|
folders[k][f_k] = add_object_to_folder(obj, folder, f_v, f_k, added)
|
||||||
return folders
|
return folders
|
||||||
|
|
||||||
def mail_feed_error_to_admin(feed, e):
|
def mail_feed_error_to_admin(feed, e, local_vars=None):
|
||||||
# Mail the admins with the error
|
# Mail the admins with the error
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
subject = 'Feed update error: %s' % repr(e)
|
subject = 'Feed update error: %s' % repr(e)
|
||||||
message = 'Traceback:\n%s\n\Feed:\n%s' % (
|
message = 'Traceback:\n%s\n\Feed:\n%s\nLocals:\n%s' % (
|
||||||
'\n'.join(traceback.format_exception(*exc_info)),
|
'\n'.join(traceback.format_exception(*exc_info)),
|
||||||
pprint.pformat(feed.__dict__)
|
pprint.pformat(feed.__dict__),
|
||||||
|
pprint.pformat(local_vars)
|
||||||
)
|
)
|
||||||
# print message
|
# print message
|
||||||
mail_admins(subject, message)
|
mail_admins(subject, message)
|
|
@ -37,7 +37,7 @@ def user(u, msg):
|
||||||
elif 'WP7' in user_agent:
|
elif 'WP7' in user_agent:
|
||||||
platform = 'WP7'
|
platform = 'WP7'
|
||||||
premium = '*' if u.is_authenticated() and u.profile.is_premium else ''
|
premium = '*' if u.is_authenticated() and u.profile.is_premium else ''
|
||||||
username = cipher(u.__unicode__()) if settings.CIPHER_USERNAMES else u
|
username = cipher(unicode(u)) if settings.CIPHER_USERNAMES else u
|
||||||
info(' ---> [~FB~SN%-6s~SB] [%s%s] %s' % (platform, username, premium, msg))
|
info(' ---> [~FB~SN%-6s~SB] [%s%s] %s' % (platform, username, premium, msg))
|
||||||
|
|
||||||
def cipher(msg):
|
def cipher(msg):
|
||||||
|
|
Loading…
Add table
Reference in a new issue