mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-04-13 09:42:01 +00:00
Everything is smart bytes when it comes to original pages.
This commit is contained in:
parent
aac3fba63d
commit
c6ce8cc36a
4 changed files with 11 additions and 9 deletions
|
@ -2943,10 +2943,10 @@ class MStarredStory(mongo.DynamicDocument):
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if self.story_content:
|
if self.story_content:
|
||||||
self.story_content_z = zlib.compress(self.story_content)
|
self.story_content_z = zlib.compress(smart_bytes(self.story_content))
|
||||||
self.story_content = None
|
self.story_content = None
|
||||||
if self.story_original_content:
|
if self.story_original_content:
|
||||||
self.story_original_content_z = zlib.compress(self.story_original_content)
|
self.story_original_content_z = zlib.compress(smart_bytes(self.story_original_content))
|
||||||
self.story_original_content = None
|
self.story_original_content = None
|
||||||
self.story_hash = self.feed_guid_hash
|
self.story_hash = self.feed_guid_hash
|
||||||
self.starred_updated = datetime.datetime.now()
|
self.starred_updated = datetime.datetime.now()
|
||||||
|
|
|
@ -7,6 +7,7 @@ import urllib.request, urllib.error, urllib.parse
|
||||||
import http.client
|
import http.client
|
||||||
import zlib
|
import zlib
|
||||||
from django.contrib.sites.models import Site
|
from django.contrib.sites.models import Site
|
||||||
|
from django.utils.encoding import smart_bytes
|
||||||
from mongoengine.queryset import NotUniqueError
|
from mongoengine.queryset import NotUniqueError
|
||||||
from socket import error as SocketError
|
from socket import error as SocketError
|
||||||
from boto.s3.key import Key
|
from boto.s3.key import Key
|
||||||
|
@ -227,7 +228,7 @@ class PageImporter(object):
|
||||||
return html
|
return html
|
||||||
|
|
||||||
def save_story(self, html):
|
def save_story(self, html):
|
||||||
self.story.original_page_z = zlib.compress(html.encode('utf-8'))
|
self.story.original_page_z = zlib.compress(smart_bytes(html))
|
||||||
try:
|
try:
|
||||||
self.story.save()
|
self.story.save()
|
||||||
except NotUniqueError:
|
except NotUniqueError:
|
||||||
|
@ -297,11 +298,11 @@ class PageImporter(object):
|
||||||
logging.debug(' ---> [%-30s] ~FYNo change in page data: %s' % (self.feed.log_title[:30], self.feed.feed_link))
|
logging.debug(' ---> [%-30s] ~FYNo change in page data: %s' % (self.feed.log_title[:30], self.feed.feed_link))
|
||||||
else:
|
else:
|
||||||
# logging.debug(' ---> [%-30s] ~FYChange in page data: %s (%s/%s %s/%s)' % (self.feed.log_title[:30], self.feed.feed_link, type(html), type(feed_page.page()), len(html), len(feed_page.page())))
|
# logging.debug(' ---> [%-30s] ~FYChange in page data: %s (%s/%s %s/%s)' % (self.feed.log_title[:30], self.feed.feed_link, type(html), type(feed_page.page()), len(html), len(feed_page.page())))
|
||||||
feed_page.page_data = zlib.compress(html.encode('utf-8'))
|
feed_page.page_data = zlib.compress(smart_bytes(html))
|
||||||
feed_page.save()
|
feed_page.save()
|
||||||
except MFeedPage.DoesNotExist:
|
except MFeedPage.DoesNotExist:
|
||||||
feed_page = MFeedPage.objects.create(feed_id=self.feed.pk,
|
feed_page = MFeedPage.objects.create(feed_id=self.feed.pk,
|
||||||
page_data=zlib.compress(html.encode('utf-8')))
|
page_data=zlib.compress(smart_bytes(html)))
|
||||||
return feed_page
|
return feed_page
|
||||||
|
|
||||||
def save_page_node(self, html):
|
def save_page_node(self, html):
|
||||||
|
@ -310,7 +311,7 @@ class PageImporter(object):
|
||||||
domain,
|
domain,
|
||||||
self.feed.pk,
|
self.feed.pk,
|
||||||
)
|
)
|
||||||
compressed_html = zlib.compress(html.encode('utf-8'))
|
compressed_html = zlib.compress(smart_bytes(html))
|
||||||
response = requests.post(url, files={
|
response = requests.post(url, files={
|
||||||
'original_page': compressed_html,
|
'original_page': compressed_html,
|
||||||
# 'original_page': html,
|
# 'original_page': html,
|
||||||
|
|
|
@ -153,7 +153,7 @@ class TextImporter:
|
||||||
|
|
||||||
if content and len(content) > len(original_story_content):
|
if content and len(content) > len(original_story_content):
|
||||||
if self.story and not skip_save:
|
if self.story and not skip_save:
|
||||||
self.story.original_text_z = zlib.compress(smart_str(content).encode())
|
self.story.original_text_z = zlib.compress(smart_bytes(content))
|
||||||
try:
|
try:
|
||||||
self.story.save()
|
self.story.save()
|
||||||
except NotUniqueError as e:
|
except NotUniqueError as e:
|
||||||
|
|
|
@ -23,6 +23,7 @@ from django.urls import reverse
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.template.defaultfilters import slugify
|
from django.template.defaultfilters import slugify
|
||||||
from django.core.mail import EmailMultiAlternatives
|
from django.core.mail import EmailMultiAlternatives
|
||||||
|
from django.utils.encoding import smart_bytes
|
||||||
from apps.reader.models import UserSubscription, RUserStory
|
from apps.reader.models import UserSubscription, RUserStory
|
||||||
from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle
|
from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle
|
||||||
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags
|
from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags
|
||||||
|
@ -1514,10 +1515,10 @@ class MSharedStory(mongo.DynamicDocument):
|
||||||
|
|
||||||
if self.story_content:
|
if self.story_content:
|
||||||
self.story_content = scrubber.scrub(self.story_content)
|
self.story_content = scrubber.scrub(self.story_content)
|
||||||
self.story_content_z = zlib.compress(self.story_content)
|
self.story_content_z = zlib.compress(smart_bytes(self.story_content))
|
||||||
self.story_content = None
|
self.story_content = None
|
||||||
if self.story_original_content:
|
if self.story_original_content:
|
||||||
self.story_original_content_z = zlib.compress(self.story_original_content)
|
self.story_original_content_z = zlib.compress(smart_bytes(self.story_original_content))
|
||||||
self.story_original_content = None
|
self.story_original_content = None
|
||||||
|
|
||||||
self.story_guid_hash = self.guid_hash
|
self.story_guid_hash = self.guid_hash
|
||||||
|
|
Loading…
Add table
Reference in a new issue