Merge branch 'master' into social

* master:
  Fixing starred stories from not loading past the second page due to a missing flag reset.
  Fixing user-found bug around tooltips continuing to show if the original dom element (and the reference to the tooltip) are destroyed too early, leaving a tooltip floating in the air.
  Geometrically delaying the fetching of bad feeds, while they're still somewhat fresh.
  Fixing error codes on statistics. Also fixing simple error on invalid feed.
  Update media/js/newsblur/reader.js

Conflicts:
	media/js/newsblur/reader/reader.js
This commit is contained in:
Samuel Clay 2012-02-24 13:01:48 -08:00
commit 408cb7e801
8 changed files with 49 additions and 44 deletions

View file

@ -4,7 +4,6 @@ import random
import re
import math
import mongoengine as mongo
import redis
import zlib
import urllib
import hashlib
@ -13,7 +12,6 @@ from operator import itemgetter
# from nltk.collocations import TrigramCollocationFinder, BigramCollocationFinder, TrigramAssocMeasures, BigramAssocMeasures
from django.db import models
from django.db import IntegrityError
from django.core.cache import cache
from django.conf import settings
from django.db.models.query import QuerySet
from django.contrib.auth.models import User
@ -290,6 +288,10 @@ class Feed(models.Model):
self.save_feed_history(505, 'Timeout', '')
feed_address = None
if feed_address:
self.feed.has_feed_exception = True
self.feed.schedule_feed_fetch_immediately()
return not not feed_address
def save_feed_history(self, status_code, message, exception=None):
@ -307,7 +309,8 @@ class Feed(models.Model):
# for history in old_fetch_histories:
# history.delete()
if status_code not in (200, 304):
self.count_errors_in_history('feed', status_code)
errors, non_errors = self.count_errors_in_history('feed', status_code)
self.set_next_scheduled_update(error_count=len(errors), non_error_count=len(non_errors))
elif self.has_feed_exception:
self.has_feed_exception = False
self.active = True
@ -336,8 +339,8 @@ class Feed(models.Model):
history_class.objects(feed_id=self.pk)[:50])
non_errors = [h for h in fetch_history if int(h) in (200, 304)]
errors = [h for h in fetch_history if int(h) not in (200, 304)]
if len(non_errors) == 0 and len(errors) >= 1:
if len(non_errors) == 0 and len(errors) > 1:
if exception_type == 'feed':
self.has_feed_exception = True
self.active = False
@ -348,6 +351,10 @@ class Feed(models.Model):
elif self.exception_code > 0:
self.active = True
self.exception_code = 0
if exception_type == 'feed':
self.has_feed_exception = False
elif exception_type == 'page':
self.has_page_exception = False
self.save()
return errors, non_errors
@ -1017,11 +1024,12 @@ class Feed(models.Model):
return total, random_factor*2
def set_next_scheduled_update(self, multiplier=1):
def set_next_scheduled_update(self, error_count=0, non_error_count=0):
total, random_factor = self.get_next_scheduled_update(force=True, verbose=False)
if multiplier > 1:
total = total * multiplier
if error_count:
logging.debug(' ---> [%-30s] ~FBScheduling feed fetch geometrically: ~SB%s errors, %s non-errors' % (unicode(self)[:30], error_count, non_error_count))
total = total * error_count
next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta(
minutes = total + random_factor)
@ -1032,14 +1040,11 @@ class Feed(models.Model):
self.save()
def schedule_feed_fetch_immediately(self):
logging.debug(' ---> [%-30s] Scheduling feed fetch immediately...' % (unicode(self)[:30]))
self.next_scheduled_update = datetime.datetime.utcnow()
self.save()
def schedule_feed_fetch_geometrically(self):
errors, non_errors = self.count_errors_in_history('feed')
self.set_next_scheduled_update(multiplier=len(errors))
# def calculate_collocations_story_content(self,
# collocation_measures=TrigramAssocMeasures,
# collocation_finder=TrigramCollocationFinder):

View file

@ -52,7 +52,7 @@ class MStatistics(mongo.Document):
def collect_statistics_feeds_fetched(cls, last_day=None):
if not last_day:
last_day = datetime.datetime.now() - datetime.timedelta(hours=24)
last_biweek = datetime.datetime.now() - datetime.timedelta(days=14)
last_month = datetime.datetime.now() - datetime.timedelta(days=30)
feeds_fetched = MFeedFetchHistory.objects.filter(fetch_date__lt=last_day).count()
cls.objects(key='feeds_fetched').update_one(upsert=True, key='feeds_fetched', value=feeds_fetched)
@ -64,8 +64,8 @@ class MStatistics(mongo.Document):
def delete_old_history():
MFeedFetchHistory.objects(fetch_date__lt=last_day, status_code__in=[200, 304]).delete()
MPageFetchHistory.objects(fetch_date__lt=last_day, status_code__in=[200, 304]).delete()
MFeedFetchHistory.objects(fetch_date__lt=last_biweek).delete()
MPageFetchHistory.objects(fetch_date__lt=last_biweek).delete()
MFeedFetchHistory.objects(fetch_date__lt=last_month).delete()
MPageFetchHistory.objects(fetch_date__lt=last_month).delete()
try:
delete_old_history()
except TimeoutError:

1
fabfile.py vendored
View file

@ -65,6 +65,7 @@ def task():
# = Deploy =
# ==========
@parallel
def pull():
with cd(env.NEWSBLUR_PATH):
run('git pull')

View file

@ -4915,7 +4915,7 @@ background: transparent;
.NB-modal-statistics .NB-statistics-history-fetch.NB-ok {
color: #135500;
}
.NB-modal-statistics .NB-statistics-history-fetch.NB-error {
.NB-modal-statistics .NB-statistics-history-fetch.NB-errorcode {
color: #6A1000;
}
.NB-modal-statistics .NB-statistics-history-fetch .NB-statistics-history-fetch-code {
@ -6908,4 +6908,4 @@ background: transparent;
max-width: 108px;
max-height: 108px;
margin: 0 8px 0 0;
}
}

View file

@ -1030,7 +1030,7 @@
find_story_with_action_preference_on_open_feed: function() {
var open_feed_action = this.model.preference('open_feed_action');
console.log(["action_preference_on_open_feed", open_feed_action, this.counts['page']]);
if (this.counts['page'] != 1) return;
if (open_feed_action == 'newest') {
@ -1173,7 +1173,7 @@
return -1;
} else if (!feedA && feedB) {
return 1;
} else if (!feedA && !feedB && a && b && !_.isNumber(a) && !_.isNumber(b) && !(a.e instanceof jQuery) && (!b.e instanceof jQuery)) {
} else if (!feedA && !feedB && a && b && !_.isNumber(a) && !_.isNumber(b) && !(a.e instanceof jQuery) && !(b.e instanceof jQuery)) {
// console.log(['a b 1', a, b, feedA, feedB]);
var folderA = _.keys(a)[0];
var folderB = _.keys(b)[0];
@ -1188,7 +1188,7 @@
return -1;
} else if (!feedA && feedB) {
return 1;
} else if (!feedA && !feedB && a && b && !_.isNumber(a) && !_.isNumber(b) && !(a.e instanceof jQuery) && (!b.e instanceof jQuery)) {
} else if (!feedA && !feedB && a && b && !_.isNumber(a) && !_.isNumber(b) && !(a.e instanceof jQuery) && !(b.e instanceof jQuery)) {
// console.log(['a b 2', a, b]);
var folderA = _.keys(a)[0];
var folderB = _.keys(b)[0];
@ -2064,6 +2064,7 @@
post_open_starred_stories: function(data, first_load) {
if (this.active_feed == 'starred') {
// NEWSBLUR.log(['post_open_starred_stories', data.stories.length, first_load]);
this.flags['opening_feed'] = false;
this.flags['feed_view_positions_calculated'] = false;
this.counts['feed_view_positions_timer'] = 0;
this.story_titles_clear_loading_endbar();
@ -2876,8 +2877,9 @@
trigger: 'manual',
offsetOpposite: -1
});
$star.tipsy('enable');
$star.tipsy('show');
var tipsy = $star.data('tipsy');
tipsy.enable();
tipsy.show();
$star.animate({
'opacity': 1
@ -2885,8 +2887,10 @@
'duration': 850,
'queue': false,
'complete': function() {
$(this).tipsy('hide');
$(this).tipsy('disable');
if (tipsy.enabled) {
tipsy.hide();
tipsy.disable();
}
}
});
@ -2920,11 +2924,14 @@
trigger: 'manual',
offsetOpposite: -1
});
$star.tipsy('enable');
$star.tipsy('show');
var tipsy = $star.data('tipsy');
tipsy.enable();
tipsy.show();
_.delay(function() {
$star.tipsy('hide');
$star.tipsy('disable');
if (tipsy.enabled) {
tipsy.hide();
tipsy.disable();
}
}, 850);
$story_title.removeClass('NB-story-starred');

View file

@ -217,7 +217,7 @@ _.extend(NEWSBLUR.ReaderStatistics.prototype, {
var $history = _.map(fetches, function(fetch) {
var feed_ok = _.contains([200, 304], fetch.status_code);
var status_class = feed_ok ? ' NB-ok ' : ' NB-error ';
var status_class = feed_ok ? ' NB-ok ' : ' NB-errorcode ';
return $.make('div', { className: 'NB-statistics-history-fetch' + status_class, title: feed_ok ? '' : fetch.exception }, [
$.make('div', { className: 'NB-statistics-history-fetch-date' }, fetch.fetch_date),
$.make('div', { className: 'NB-statistics-history-fetch-message' }, [

View file

@ -47,11 +47,10 @@ class FetchFeed:
datetime.datetime.now() - self.feed.last_update)
logging.debug(log_msg)
self.feed.set_next_scheduled_update()
etag=self.feed.etag
modified = self.feed.last_modified.utctimetuple()[:7] if self.feed.last_modified else None
if self.options.get('force') or not self.feed.fetched_once:
if self.options.get('force') or not self.feed.fetched_once or not self.feed.known_good:
modified = None
etag = None
@ -126,10 +125,9 @@ class ProcessFeed:
if self.fpf.status in (302, 301):
if not self.fpf.href.endswith('feedburner.com/atom.xml'):
self.feed.feed_address = self.fpf.href
if not self.feed.fetched_once:
self.feed.has_feed_exception = True
if not self.feed.known_good:
self.feed.fetched_once = True
logging.debug(" ---> [%-30s] Feed is 302'ing, but it's not new. Refetching..." % (unicode(self.feed)[:30]))
logging.debug(" ---> [%-30s] Feed is %s'ing. Refetching..." % (unicode(self.feed)[:30], self.fpf.status))
self.feed.schedule_feed_fetch_immediately()
if not self.fpf.entries:
self.feed.save()
@ -142,9 +140,6 @@ class ProcessFeed:
fixed_feed = self.feed.check_feed_link_for_feed_address()
if not fixed_feed:
self.feed.save_feed_history(self.fpf.status, "HTTP Error")
else:
self.feed.has_feed_exception = True
self.feed.schedule_feed_fetch_geometrically()
self.feed.save()
return FEED_ERRHTTP, ret_values
@ -156,9 +151,6 @@ class ProcessFeed:
fixed_feed = self.feed.check_feed_link_for_feed_address()
if not fixed_feed:
self.feed.save_feed_history(502, 'Non-xml feed', self.fpf.bozo_exception)
else:
self.feed.has_feed_exception = True
self.feed.schedule_feed_fetch_immediately()
self.feed.save()
return FEED_ERRPARSE, ret_values
elif self.fpf.bozo and isinstance(self.fpf.bozo_exception, xml.sax._exceptions.SAXException):
@ -169,9 +161,6 @@ class ProcessFeed:
fixed_feed = self.feed.check_feed_link_for_feed_address()
if not fixed_feed:
self.feed.save_feed_history(503, 'SAX Exception', self.fpf.bozo_exception)
else:
self.feed.has_feed_exception = True
self.feed.schedule_feed_fetch_immediately()
self.feed.save()
return FEED_ERRPARSE, ret_values

View file

@ -3924,7 +3924,10 @@ def parse(url_file_stream_or_string, etag=None, modified=None, agent=None, refer
break
# if no luck and we have auto-detection library, try that
if (not known_encoding) and chardet:
proposed_encoding = unicode(chardet.detect(data)['encoding'], 'ascii', 'ignore')
# import pdb; pdb.set_trace()
proposed_encoding = chardet.detect(data)['encoding']
if proposed_encoding:
proposed_encoding = unicode(proposed_encoding, 'ascii', 'ignore')
if proposed_encoding and (proposed_encoding not in tried_encodings):
tried_encodings.append(proposed_encoding)
try: