From 753db6bcaebc4f86274fb2ef2032ae14eb257531 Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Sat, 2 Mar 2024 08:37:20 -0500 Subject: [PATCH] Updating certs. --- .gitignore | 1 + docker/haproxy/haproxy.consul.cfg.j2 | 3 +- docker/haproxy/haproxy.staging.cfg | 2 +- newsblur_web/settings.py | 4 +- utils/feed_fetcher.py | 62 ++++++++++++++++------------ 5 files changed, 43 insertions(+), 29 deletions(-) diff --git a/.gitignore b/.gitignore index 6a7be83b6..862b89555 100644 --- a/.gitignore +++ b/.gitignore @@ -69,3 +69,4 @@ media/safari/NewsBlur.safariextz *.tfstate* .terraform* grafana.ini +apps/api/ip_addresses.txt diff --git a/docker/haproxy/haproxy.consul.cfg.j2 b/docker/haproxy/haproxy.consul.cfg.j2 index 62fdb766b..77c0097ea 100644 --- a/docker/haproxy/haproxy.consul.cfg.j2 +++ b/docker/haproxy/haproxy.consul.cfg.j2 @@ -38,7 +38,8 @@ defaults frontend public bind :80 - bind :443 ssl crt /srv/newsblur/config/certificates/newsblur.com.pem ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES128-SHA:AES256-SHA256:AES256-SHA no-sslv3 + bind :443 ssl crt /srv/newsblur/config/certificates/newsblur.com.pem ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305 + http-response add-header Strict-Transport-Security max-age=0;\ includeSubDomains option http-server-close diff --git a/docker/haproxy/haproxy.staging.cfg b/docker/haproxy/haproxy.staging.cfg index a037e54aa..b075ada5f 100644 --- a/docker/haproxy/haproxy.staging.cfg +++ b/docker/haproxy/haproxy.staging.cfg @@ -32,7 +32,7 @@ defaults frontend public bind :80 - bind :443 ssl crt /srv/newsblur/config/certificates/newsblur.com.pem ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES128-SHA:AES256-SHA256:AES256-SHA no-sslv3 + bind :443 ssl crt /srv/newsblur/config/certificates/newsblur.com.pem ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305 http-response add-header Strict-Transport-Security max-age=0;\ includeSubDomains option http-server-close diff --git a/newsblur_web/settings.py b/newsblur_web/settings.py index 892326766..046b59782 100644 --- a/newsblur_web/settings.py +++ b/newsblur_web/settings.py @@ -618,7 +618,9 @@ except ModuleNotFoundError: if not started_task_or_app: print(" ---> Starting NewsBlur development server...") -if "task-work" in SERVER_NAME or SERVER_NAME.startswith("task-"): +if DOCKERBUILD: + CELERY_WORKER_CONCURRENCY = 2 +elif "task-work" in SERVER_NAME or SERVER_NAME.startswith("task-"): CELERY_WORKER_CONCURRENCY = 4 else: CELERY_WORKER_CONCURRENCY = 24 diff --git a/utils/feed_fetcher.py b/utils/feed_fetcher.py index 52e3b0ece..b4cca52e9 100644 --- a/utils/feed_fetcher.py +++ b/utils/feed_fetcher.py @@ -1,58 +1,63 @@ -import time import datetime -import traceback import multiprocessing +import time +import traceback import django django.setup() -import urllib.request, urllib.error, urllib.parse -import http, http.client +import http +import http.client +import urllib.error +import urllib.parse +import urllib.request http.client._MAXHEADERS = 10000 -import xml.sax -import redis import random -import pymongo import re -import requests +import xml.sax + import dateutil.parser +import feedparser import isodate +import pymongo +import redis +import requests from django.conf import settings -from django.db import IntegrityError from django.core.cache import cache +from django.db import IntegrityError from sentry_sdk import set_user + +from apps.notifications.models import MUserFeedNotification +from apps.notifications.tasks import QueueNotifications +from apps.push.models import PushSubscription from apps.reader.models import UserSubscription +from apps.rss_feeds.icon_importer import IconImporter from apps.rss_feeds.models import Feed, MStory from apps.rss_feeds.page_importer import PageImporter -from apps.rss_feeds.icon_importer import IconImporter -from apps.notifications.tasks import QueueNotifications -from apps.notifications.models import MUserFeedNotification -from apps.push.models import PushSubscription from apps.statistics.models import MAnalyticsFetcher, MStatistics -import feedparser - feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(['iframe']) feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(['text']) -from utils.story_functions import pre_process_story, strip_tags, linkify -from utils import log as logging -from utils.feed_functions import timelimit, TimeoutError -from sentry_sdk import capture_exception, flush -from qurl import qurl from bs4 import BeautifulSoup -from mongoengine import connect, connection -from django.utils import feedgenerator -from django.utils.html import linebreaks -from django.utils.encoding import smart_str -from utils import json_functions as json from celery.exceptions import SoftTimeLimitExceeded -from utils.twitter_fetcher import TwitterFetcher +from django.utils import feedgenerator +from django.utils.encoding import smart_str +from django.utils.html import linebreaks +from mongoengine import connect, connection +from qurl import qurl +from sentry_sdk import capture_exception, flush + +from utils import json_functions as json +from utils import log as logging from utils.facebook_fetcher import FacebookFetcher +from utils.feed_functions import TimeoutError, timelimit from utils.json_fetcher import JSONFetcher +from utils.story_functions import linkify, pre_process_story, strip_tags +from utils.twitter_fetcher import TwitterFetcher # from utils.feed_functions import mail_feed_error_to_admin @@ -321,6 +326,11 @@ class FetchFeed: username = username_groups.group(1) except IndexError: return + elif 'youtube.com/@' in address: + try: + username = address.split('youtube.com/@')[1] + except IndexError: + return elif 'youtube.com/feeds/videos.xml?user=' in address: try: username = urllib.parse.parse_qs(urllib.parse.urlparse(address).query)['user'][0]