Merge branch 'master' into social

* master:
  Being more aggressive in determining if a URL is a feed.
  Increasing modal zindex to battle Svbtle blogs sidebar.
  Handle cases of user reading both old and new guid stories.
  Checking for a signup that is really a login when importing GR.
This commit is contained in:
Samuel Clay 2012-03-21 14:42:42 -07:00
commit f32dc6bd1c
4 changed files with 12 additions and 6 deletions

View file

@ -193,7 +193,7 @@ def import_signup(request):
if signup_form.is_valid():
new_user = signup_form.save()
user_token = None
user_token = OAuthToken.objects.filter(user=new_user)
if not user_token:
user_uuid = request.COOKIES.get('newsblur_reader_uuid')
if user_uuid:

View file

@ -783,8 +783,12 @@ class Feed(models.Model):
read_stories = MUserStory.objects.filter(feed_id=self.pk, story_id=old_story_guid)
for story in read_stories:
story.story_id = new_story_guid
story.save()
try:
story.save()
except OperationError:
# User read both new and old. Just toss.
pass
def save_popular_tags(self, feed_tags=None, verbose=False):
if not feed_tags:
all_tags = MStory.objects(story_feed_id=self.pk, story_tags__exists=True).item_frequencies('story_tags')

View file

@ -186,7 +186,7 @@
maxWidth: null,
autoResize: true,
autoPosition: true,
zIndex: 1000,
zIndex: 10000,
close: true,
closeHTML: '<a class="modalCloseImg" title="Close"></a>',
closeClass: 'simplemodal-close',

View file

@ -47,6 +47,7 @@ Also Jason Diamond, Brian Lalor for bug reporting and patches"""
_debug = 0
import sgmllib, urllib, urlparse, re, sys, robotparser
import requests
from StringIO import StringIO
from lxml import etree
@ -75,7 +76,8 @@ class URLGatekeeper:
self.urlopener = urllib.FancyURLopener()
self.urlopener.version = "NewsBlur Feed Finder (Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 Safari/534.48.3)"
_debuglog(self.urlopener.version)
self.urlopener.addheaders = [('User-agent', self.urlopener.version), ('Accept', '*')]
self.urlopener.addheaders = [('User-Agent', self.urlopener.version)]
# self.urlopener.addheaders = [('User-Agent', self.urlopener.version), ('Accept', '*')]
robotparser.URLopener.version = self.urlopener.version
robotparser.URLopener.addheaders = self.urlopener.addheaders
@ -103,7 +105,7 @@ class URLGatekeeper:
def get(self, url, check=True):
if check and not self.can_fetch(url): return ''
try:
return self.urlopener.open(url).read()
return requests.get(url, headers=dict(self.urlopener.addheaders)).content
except:
return ''