Correcting a double-encoding bug for story permalinks that was from way back when.

This commit is contained in:
Samuel Clay 2012-03-27 18:37:04 -07:00
parent fd1e26a5b0
commit 9ee6487f58
5 changed files with 39 additions and 22 deletions

View file

@ -41,8 +41,7 @@ class PushSubscriptionManager(models.Manager):
callback_path = reverse('push-callback',
args=(subscription.pk,))
except Resolver404:
raise TypeError(
'callback cannot be None if there is not a reverable URL')
raise TypeError('callback cannot be None if there is not a reverable URL')
else:
# callback = 'http://' + Site.objects.get_current() + \
callback = 'http://' + "dev.newsblur.com" + \
@ -56,7 +55,7 @@ class PushSubscriptionManager(models.Manager):
'hub.verify_token': subscription.generate_token('subscribe'),
'hub.lease_seconds': lease_seconds,
})
import pdb; pdb.set_trace()
if response.status_code == 204:
subscription.verified = True
elif response.status_code == 202: # async verification
@ -67,8 +66,7 @@ class PushSubscriptionManager(models.Manager):
topic, hub, error))
subscription.save()
feed.is_push = subscription.verified
feed.save()
feed.setup_push()
if subscription.verified:
signals.verified.send(sender=subscription)
return subscription

View file

@ -26,6 +26,7 @@ def push_callback(request, push_id):
verify_token=verify_token)
subscription.verified = True
subscription.set_expiration(int(lease_seconds))
subscription.feed.setup_push()
verified.send(sender=subscription)
return HttpResponse(challenge, content_type='text/plain')
@ -57,6 +58,8 @@ def push_callback(request, push_id):
callback=request.build_absolute_uri(),
lease_seconds=seconds)
subscription.feed.update(fpf=parsed)
updated.send(sender=subscription, update=parsed)
return HttpResponse('')
return Http404

View file

@ -633,6 +633,7 @@ class Feed(models.Model):
'fake': kwargs.get('fake'),
'quick': kwargs.get('quick'),
'debug': kwargs.get('debug'),
'fpf': kwargs.get('fpf'),
}
disp = feed_fetcher.Dispatcher(options, 1)
disp.add_jobs([[self.pk]])
@ -1069,6 +1070,16 @@ class Feed(models.Model):
return self.save()
def setup_push(self):
from apps.push.models import PushSubscription
if not self.is_push:
try:
push = self.push
except PushSubscription.DoesNotExist:
return
self.is_push = push.verified
self.save()
# def calculate_collocations_story_content(self,
# collocation_measures=TrigramAssocMeasures,
# collocation_finder=TrigramCollocationFinder):

View file

@ -63,11 +63,16 @@ class FetchFeed:
's' if self.feed.num_subscribers != 1 else '',
settings.NEWSBLUR_URL
)
self.fpf = feedparser.parse(self.feed.feed_address,
agent=USER_AGENT,
etag=etag,
modified=modified)
if self.options.get('fpf'):
self.fpf = self.options.get('fpf')
logging.debug(u' ---> [%-30s] ~FM~BKFeed fetched in real-time with fat ping.' % (
unicode(self.feed)[:30]))
else:
self.fpf = feedparser.parse(self.feed.feed_address,
agent=USER_AGENT,
etag=etag,
modified=modified)
if self.options['verbose'] and getattr(self.fpf, 'status', None) == 200:
logging.debug(u' ---> [%-30s] ~FBTIME: feed fetch in ~FM%.4ss' % (
@ -240,7 +245,7 @@ class ProcessFeed:
hub_url = link['href']
elif link['rel'] == 'self':
self_url = link['href']
if hub_url and self_url:
if hub_url and self_url and not settings.DEBUG:
logging.debug(u' ---> [%-30s] ~BB~SK~FWSubscribing to PuSH hub: %s' % (
unicode(self.feed)[:30], hub_url))
PushSubscription.objects.subscribe(self_url, feed=self.feed, hub=hub_url)
@ -314,7 +319,8 @@ class Dispatcher:
weight = "-"
quick = "-"
rand = "-"
elif self.options.get('quick') and not self.options['force'] and feed.known_good and feed.fetched_once:
elif (self.options.get('quick') and not self.options['force'] and
feed.known_good and feed.fetched_once and not feed.is_push):
weight = feed.stories_last_month * feed.num_subscribers
random_weight = random.randint(1, max(weight, 1))
quick = float(self.options['quick'])

View file

@ -1,6 +1,5 @@
from django.utils.dateformat import DateFormat
import datetime
from django.utils.http import urlquote
from django.conf import settings
from itertools import chain
@ -59,21 +58,21 @@ def pre_process_story(entry):
publish_date = entry.get('published_parsed', entry.get('updated_parsed'))
entry['published'] = datetime.datetime(*publish_date[:6]) if publish_date else datetime.datetime.utcnow()
entry_link = entry.get('link') or ''
protocol_index = entry_link.find("://")
if protocol_index != -1:
entry['link'] = (entry_link[:protocol_index+3]
+ urlquote(entry_link[protocol_index+3:]))
else:
entry['link'] = urlquote(entry_link)
# entry_link = entry.get('link') or ''
# protocol_index = entry_link.find("://")
# if protocol_index != -1:
# entry['link'] = (entry_link[:protocol_index+3]
# + urlquote(entry_link[protocol_index+3:]))
# else:
# entry['link'] = urlquote(entry_link)
if isinstance(entry.get('guid'), dict):
entry['guid'] = unicode(entry['guid'])
# Normalize story content/summary
if entry.get('content'):
entry['story_content'] = entry['content'][0].get('value', '')
entry['story_content'] = entry['content'][0].get('value', '').strip()
else:
entry['story_content'] = entry.get('summary', '')
entry['story_content'] = entry.get('summary', '').strip()
# Add each media enclosure as a Download link
for media_content in chain(entry.get('media_content', [])[:5], entry.get('links', [])[:5]):