Merge master into sictiru

This commit is contained in:
sictiru 2022-02-16 21:16:24 -08:00
parent 18361d682c
commit df6a225da2
202 changed files with 20269 additions and 7859 deletions

3
.gitignore vendored
View file

@ -43,7 +43,8 @@ templates/maintenance_on.html
vendor/mms-agent/settings.py
apps/social/spam.py
venv*
/backups
backup
backups
config/mongodb_keyfile.key
# Docker Jinja templates

View file

@ -14,11 +14,12 @@
"media/ios": true,
"**/*.map": true,
"ansible/playbooks/*/*": true,
"archive/*": true,
// "archive/*": true,
"logs/*": true,
// "static/*": true,
"media/fonts": true,
"static/*.css": true,
"static/*.js": true,
"static/js/*.*.js": true,
"blog/.jekyll-cache": true,
"blog/_site": true,
"docker/volumes": true,

View file

@ -7,25 +7,31 @@ newsblur := $(shell docker ps -qf "name=newsblur_web")
#creates newsblur, but does not rebuild images or create keys
start:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose up -d
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d
metrics:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose -f docker-compose.yml -f docker-compose.metrics.yml up -d
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose -f docker-compose.yml -f docker-compose.metrics.yml up -d
metrics-ps:
- RUNWITHMAKEBUILD=True docker-compose -f docker-compose.yml -f docker-compose.metrics.yml ps
- RUNWITHMAKEBUILD=True docker compose -f docker-compose.yml -f docker-compose.metrics.yml ps
rebuild:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose down
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose up -d
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose down
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d
collectstatic:
- rm -fr static
- docker run --rm -v $(shell pwd):/srv/newsblur newsblur/newsblur_deploy
#creates newsblur, builds new images, and creates/refreshes SSL keys
nb: pull
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose down
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose down
- [[ -d config/certificates ]] && echo "keys exist" || make keys
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose up -d --build --remove-orphans
- RUNWITHMAKEBUILD=True docker-compose exec newsblur_web ./manage.py migrate
- RUNWITHMAKEBUILD=True docker-compose exec newsblur_web ./manage.py loaddata config/fixtures/bootstrap.json
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d --build --remove-orphans
- docker exec newsblur_web ./manage.py migrate
- docker exec newsblur_web ./manage.py loaddata config/fixtures/bootstrap.json
nbup:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d --build --remove-orphans
coffee:
- coffee -c -w **/*.coffee
@ -37,19 +43,19 @@ bash:
debug:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker attach ${newsblur}
log:
- RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20 newsblur_web newsblur_node
- RUNWITHMAKEBUILD=True docker compose logs -f --tail 20 newsblur_web newsblur_node
logweb: log
logcelery:
- RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20 task_celery
- RUNWITHMAKEBUILD=True docker compose logs -f --tail 20 task_celery
logtask: logcelery
logmongo:
- RUNWITHMAKEBUILD=True docker-compose logs -f db_mongo
- RUNWITHMAKEBUILD=True docker compose logs -f db_mongo
alllogs:
- RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20
- RUNWITHMAKEBUILD=True docker compose logs -f --tail 20
logall: alllogs
# brings down containers
down:
- RUNWITHMAKEBUILD=True docker-compose -f docker-compose.yml -f docker-compose.metrics.yml down
- RUNWITHMAKEBUILD=True docker compose -f docker-compose.yml -f docker-compose.metrics.yml down
nbdown: down
jekyll:
- cd blog && bundle exec jekyll serve
@ -58,8 +64,8 @@ jekyll_drafts:
# runs tests
test:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} TEST=True docker-compose -f docker-compose.yml up -d newsblur_web
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose exec newsblur_web bash -c "NOSE_EXCLUDE_DIRS=./vendor DJANGO_SETTINGS_MODULE=newsblur_web.test_settings python3 manage.py test -v 3 --failfast"
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} TEST=True docker compose -f docker-compose.yml up -d newsblur_web
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose exec newsblur_web bash -c "NOSE_EXCLUDE_DIRS=./vendor DJANGO_SETTINGS_MODULE=newsblur_web.test_settings python3 manage.py test -v 3 --failfast"
keys:
- mkdir config/certificates
@ -98,20 +104,27 @@ pull:
- docker pull newsblur/newsblur_node
- docker pull newsblur/newsblur_monitor
local_build_web:
# - docker buildx build --load . --file=docker/newsblur_base_image.Dockerfile --tag=newsblur/newsblur_python3
- docker build . --file=docker/newsblur_base_image.Dockerfile --tag=newsblur/newsblur_python3
build_web:
- docker image build . --platform linux/amd64 --file=docker/newsblur_base_image.Dockerfile --tag=newsblur/newsblur_python3
- docker buildx build . --platform linux/amd64,linux/arm64 --file=docker/newsblur_base_image.Dockerfile --tag=newsblur/newsblur_python3
build_node:
- docker image build . --platform linux/amd64 --file=docker/node/Dockerfile --tag=newsblur/newsblur_node
- docker buildx build . --platform linux/amd64,linux/arm64 --file=docker/node/Dockerfile --tag=newsblur/newsblur_node
build_monitor:
- docker image build . --platform linux/amd64 --file=docker/monitor/Dockerfile --tag=newsblur/newsblur_monitor
build: build_web build_node build_monitor
push_web: build_web
- docker push newsblur/newsblur_python3
push_node: build_node
- docker push newsblur/newsblur_node
push_monitor: build_monitor
- docker push newsblur/newsblur_monitor
push_images: push_web push_node push_monitor
- docker buildx build . --platform linux/amd64,linux/arm64 --file=docker/monitor/Dockerfile --tag=newsblur/newsblur_monitor
build_deploy:
- docker buildx build . --platform linux/amd64,linux/arm64 --file=docker/newsblur_deploy.Dockerfile --tag=newsblur/newsblur_deploy
build: build_web build_node build_monitor build_deploy
push_web:
- docker buildx build . --push --platform linux/amd64,linux/arm64 --file=docker/newsblur_base_image.Dockerfile --tag=newsblur/newsblur_python3
push_node:
- docker buildx build . --push --platform linux/amd64,linux/arm64 --file=docker/node/Dockerfile --tag=newsblur/newsblur_node
push_monitor:
- docker buildx build . --push --platform linux/amd64,linux/arm64 --file=docker/monitor/Dockerfile --tag=newsblur/newsblur_monitor
push_deploy:
- docker buildx build . --push --platform linux/amd64,linux/arm64 --file=docker/newsblur_deploy.Dockerfile --tag=newsblur/newsblur_deploy
push_images: push_web push_node push_monitor push_deploy
push: build push_images
# Tasks

View file

@ -7,6 +7,7 @@ private_key_file = /srv/secrets-newsblur/keys/docker.key
remote_tmp = ~/.ansible/tmp
forks = 20
interpreter_python = python3
stdout_callback = debug
[inventory]
enable_plugins = ini, constructed

View file

@ -1,6 +1,6 @@
---
- import_playbook: playbooks/deploy_app.yml
when: "'app' in group_names"
when: "'app' in group_names or 'staging' in group_names"
- import_playbook: playbooks/deploy_www.yml
when: "'haproxy' in group_names"
- import_playbook: playbooks/deploy_node.yml

View file

@ -19,15 +19,31 @@
-H 'Content-Type: application/json' \
-d '{"version": "{{ lookup('pipe', 'date "+%Y-%m-%d %H:%M:%S"') }}"}'
- name: Cleanup static assets before compression
run_once: yes
connection: local
file:
state: absent
path: /srv/newsblur/static
tags:
- never
- static
- name: Updating NewsBlur Deploy container
run_once: yes
connection: local
command: chdir=/srv/newsblur docker pull newsblur/newsblur_deploy
tags:
- never
- static
- name: Compressing JS/CSS assets
run_once: yes
connection: local
command: chdir=/srv/newsblur jammit -c /srv/newsblur/newsblur_web/assets.yml --base-url https://www.newsblur.com --output /srv/newsblur/static
command: chdir=/srv/newsblur docker run --rm -v /srv/newsblur:/srv/newsblur newsblur/newsblur_deploy
tags:
- never
- static
- jammit
- name: Archive JS/CSS assets for uploading
run_once: yes
@ -39,11 +55,22 @@
- never
- static
- name: Ensure AWS dependencies installed
run_once: yes
connection: local
pip:
name:
- boto3
- botocore
tags:
- never
- static
- name: Uploading JS/CSS assets to S3
run_once: yes
connection: local
amazon.aws.aws_s3:
bucket: newsblur_backups
bucket: newsblur-backups
object: /static_py3.tgz
src: /srv/newsblur/static.tgz
mode: put
@ -67,9 +94,9 @@
vars:
ansible_python_interpreter: /usr/bin/python3
amazon.aws.aws_s3:
bucket: newsblur_backups
bucket: newsblur-backups
object: /static_py3.tgz
dest: /srv/newsblur/static/static.tgz
dest: /srv/newsblur/static.tgz
mode: get
overwrite: different
aws_access_key: "{{ lookup('ini', 'aws_access_key_id section=default file=/srv/secrets-newsblur/keys/aws.s3.token') }}"

View file

@ -8,6 +8,7 @@
tasks:
- name: Update Sentry release
connection: local
run_once: yes
shell: >
curl {{ sentry_task_release_webhook }}/ \
-X POST \

View file

@ -1,7 +1,7 @@
---
- name: SETUP -> app containers
hosts: web
serial: "50%"
# serial: "50%"
vars_files:
- ../env_vars/base.yml
vars:

View file

@ -1,6 +1,6 @@
---
- name: SETUP -> www containers
hosts: www
hosts: haproxy
vars:
- update_apt_cache: yes
- motd_role: app

View file

@ -17,14 +17,14 @@
- name: Set backup vars
set_fact:
redis_story_filename: backup_redis_story_2021-04-13-04-00.rdb.gz
postgres_filename: backup_postgresql_2022-01-06-19-46.sql.gz
postgres_filename: backup_postgresql_2022-02-03-04-00.sql.gz
mongo_filename: backup_mongo_2021-03-15-04-00.tgz
redis_filename: backup_redis_2021-03-15-04-00.rdb.gz
tags: never, restore_postgres, restore_mongo, restore_redis, restore_redis_story
- name: Download archives
amazon.aws.aws_s3:
bucket: newsblur_backups
bucket: "newsblur-backups"
object: "{{ item.dir }}{{ item.file }}"
dest: "/srv/newsblur/backups/{{ item.file }}"
mode: get

View file

@ -48,6 +48,7 @@
timeout: 10s
retries: 3
start_period: 30s
user: 1000:1001
volumes:
- /srv/newsblur:/srv/newsblur
- /etc/hosts:/etc/hosts

View file

@ -1,4 +1,4 @@
#!/srv/newsblur/venv/newsblur3/bin/python
#!/usr/bin/env python
import os
import digitalocean

View file

@ -13,8 +13,9 @@
- name: Installing Consul
become: yes
apt:
pkg: consul
state: latest
allow_downgrades: yes
pkg: consul=1.10.4
state: present
- name: Register Manager IP
run_once: yes

View file

@ -58,9 +58,16 @@
- name: Make backup directory
become: yes
file:
path: /opt/mongo/newsblur/backup/
path: "/mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}/backup/"
state: directory
mode: 0666
mode: 0777
- name: Create symlink to mounted volume for backups to live
file:
state: link
src: "/mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}/backup"
path: /srv/newsblur/backup
force: yes
- name: Start db-mongo docker container
become: yes
@ -86,7 +93,7 @@
- /srv/newsblur/ansible/roles/mongo/templates/mongo.conf:/etc/mongod.conf
- /srv/newsblur/config/mongodb_keyfile.key:/srv/newsblur/config/mongodb_keyfile.key
- /var/log/mongodb/:/var/log/mongodb/
- /opt/mongo/newsblur/backup/:/backup/
- /mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}/backup/:/backup/
when: (inventory_hostname | regex_replace('[0-9]+', '')) in ['db-mongo', 'db-mongo-primary', 'db-mongo-secondary']
- name: Start db-mongo-analytics docker container
@ -114,7 +121,7 @@
- /srv/newsblur/ansible/roles/mongo/templates/mongo.analytics.conf:/etc/mongod.conf
- /srv/newsblur/config/mongodb_keyfile.key:/srv/newsblur/config/mongodb_keyfile.key
- /var/log/mongodb/:/var/log/mongodb/
- /opt/mongo/newsblur/backup/:/backup/
- /mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}/backup/:/backup/
when: (inventory_hostname | regex_replace('[0-9]+', '')) == 'db-mongo-analytics'
- name: Create mongo database user
@ -185,12 +192,18 @@
docker run --rm -it
OUTPUT=$(eval sudo df / | head -n 2 | tail -1);
-v /srv/newsblur:/srv/newsblur
--network=newsblurnet
--network=host
--hostname {{ ansible_hostname }}
newsblur/newsblur_python3 /srv/newsblur/utils/monitor_disk_usage.py $OUTPUT
tags:
- sanity-checker
- name: Copy common secrets
copy:
src: /srv/secrets-newsblur/settings/common_settings.py
dest: /srv/newsblur/newsblur_web/local_settings.py
register: app_changed
- name: Add mongo backup
cron:
name: mongo backup
@ -201,19 +214,21 @@
tags:
- mongo-backup
- name: Add mongo starred_stories+stories backup
cron:
name: mongo starred/shared/all stories backup
minute: "0"
hour: "5"
job: /srv/newsblur/docker/mongo/backup_mongo_stories.sh
when: '"db-mongo-secondary1" in inventory_hostname'
tags:
- mongo-backup
# - name: Add mongo starred_stories+stories backup
# cron:
# name: mongo starred/shared/all stories backup
# minute: "0"
# hour: "5"
# job: /srv/newsblur/docker/mongo/backup_mongo.sh stories
# when: '"db-mongo-secondary1" in inventory_hostname'
# tags:
# - mongo-backup
# Renaming a db-mongo3 to db-mongo2:
# - Change hostname to db-mongo2 on Digital Ocean (doctl)
# - Change hostname to db-mongo2 in /etc/hostname
# Renaming a db-mongo-primary3 to db-mongo-primary2:
# - Change hostname to db-mongo-primary2 on Digital Ocean
# - make list; doctl compute droplet-action rename <id> --droplet-name db-mongo-primary2
# - Change hostname to db-mongo-primary2 in /etc/hostname
# - make inventory
# - Symlink /mnt/mongo2 to /mnt/mongo3
# - tf state mv "digitalocean_droplet.db-mongo-primary[2]" "digitalocean_droplet.db-mongo-primary[1]"
# - tf state mv "digitalocean_volume.mongo_volume[2]" "digitalocean_volume.mongo_volume[1]"

View file

@ -1,6 +1,6 @@
{
"service": {
"name": "db-mongo",
"name": "db-mongo-staging",
"id": "{{ inventory_hostname }}",
"tags": [
"db"

View file

@ -15,7 +15,7 @@
become: yes
docker_container:
name: nginx
image: nginx:1.19
image: nginx:1.21
state: started
networks_cli_compatible: yes
network_mode: default

View file

@ -110,7 +110,7 @@
- /srv/newsblur/node:/srv/node
with_items:
- container_name: imageproxy
image: willnorris/imageproxy
image: ghcr.io/willnorris/imageproxy
ports: 8088:8080
target_host: node-images
when: item.target_host in inventory_hostname

View file

@ -14,6 +14,13 @@
state: directory
mode: 0777
- name: Ensure postgres backup directory
become: yes
file:
path: /srv/newsblur/backups
state: directory
mode: 0777
- name: Start postgres docker containers
become: yes
docker_container:
@ -63,6 +70,12 @@
notify:
- reload consul
- name: Copy common secrets
copy:
src: /srv/secrets-newsblur/settings/common_settings.py
dest: /srv/newsblur/newsblur_web/local_settings.py
register: app_changed
- name: Add sanity checkers cronjob for disk usage
become: yes
cron:
@ -78,19 +91,19 @@
--hostname {{ ansible_hostname }}
newsblur/newsblur_python3 /srv/newsblur/utils/monitor_disk_usage.py $OUTPUT
- name: Add postgres backup log
become: yes
file:
path: /var/log/postgres_backup.log
state: touch
mode: 0777
owner: 1000
group: 1001
- name: Add postgres backup
cron:
name: postgres backup
minute: "0"
hour: "4"
job: >-
NOW=$(eval date +%F-%H-%M);
BACKUP_FILE=backup_postgresql_${NOW}.sql;
sudo docker exec -it postgres
/usr/lib/postgresql/13/bin/pg_dump -U newsblur -h 127.0.0.1 -Fc newsblur > backup/$BACKUP_FILE;
sudo docker run --rm -it
-v /srv/newsblur:/srv/newsblur
-v /backup/:/backup/
--network=newsblurnet
newsblur/newsblur_python3
python /srv/newsblur/utils/backups/backup_psql.py
job: /srv/newsblur/docker/postgres/backup_postgres.sh 1> /var/log/postgres_backup.log 2>&1

View file

@ -77,6 +77,7 @@
ports:
- "8000:8000"
restart_policy: unless-stopped
user: 1000:1001
volumes:
- /srv/newsblur:/srv/newsblur
- /etc/hosts:/etc/hosts

View file

@ -128,7 +128,7 @@ def add_site(request, token):
url = request.GET['url']
folder = request.GET['folder']
new_folder = request.GET.get('new_folder')
callback = request.GET['callback']
callback = request.GET.get('callback', '')
if not url:
code = -1
@ -217,6 +217,10 @@ def check_share_on_site(request, token):
logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url)
feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False)
if not feed:
rss_url = urllib.parse.urljoin(story_url, rss_url)
logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url)
feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False)
if not feed:
logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % story_url)
feed = Feed.get_feed_from_url(story_url, create=False, fetch=False)

View file

@ -193,6 +193,9 @@ class EmailNewsletter:
if 'body-plain' in params:
return linkify(linebreaks(params['body-plain']))
if force_plain:
return self._get_content(params, force_plain=False)
def _clean_content(self, content):
original = content
scrubber = Scrubber()

View file

@ -89,8 +89,8 @@ class Profile(models.Model):
self.secret_token = generate_secret_token(self.user.username, 12)
try:
super(Profile, self).save(*args, **kwargs)
except DatabaseError:
print(" ---> Profile not saved. Table isn't there yet.")
except DatabaseError as e:
print(f" ---> Profile not saved: {e}")
def delete_user(self, confirm=False, fast=False):
if not confirm:

View file

@ -7,6 +7,7 @@ import re
from django.conf import settings
from django.db import models
from django.urls import reverse
import hashlib
from apps.push import signals
@ -42,13 +43,9 @@ class PushSubscriptionManager(models.Manager):
subscription.save()
if callback is None:
# try:
# callback_path = reverse('push-callback', args=(subscription.pk,))
# except Resolver404:
# raise TypeError('callback cannot be None if there is not a reverable URL')
# else:
# # callback = 'http://' + Site.objects.get_current() + callback_path
callback = "https://push.newsblur.com/push/%s" % subscription.pk # + callback_path
callback_path = reverse('push-callback', args=(subscription.pk,))
callback = 'https://' + settings.PUSH_DOMAIN + callback_path
# callback = "https://push.newsblur.com/push/%s" % subscription.pk # + callback_path
try:
response = self._send_request(hub, {

View file

@ -14,6 +14,7 @@ urlpatterns = [
url(r'^page/(?P<feed_id>\d+)', views.load_feed_page, name='load-feed-page'),
url(r'^refresh_feed/(?P<feed_id>\d+)', views.refresh_feed, name='refresh-feed'),
url(r'^favicons', views.load_feed_favicons, name='load-feed-favicons'),
url(r'^river_stories_widget', views.load_river_stories_widget, name='load-river-stories-widget'),
url(r'^river_stories', views.load_river_stories__redis, name='load-river-stories'),
url(r'^complete_river', views.complete_river, name='complete-river'),
url(r'^refresh_feeds', views.refresh_feeds, name='refresh-feeds'),

View file

@ -4,7 +4,13 @@ import redis
import requests
import random
import zlib
import concurrent
import re
import ssl
import socket
import base64
import urllib.parse
import urllib.request
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
@ -846,10 +852,9 @@ def load_single_feed(request, feed_id):
# if not usersub and feed.num_subscribers <= 1:
# data = dict(code=-1, message="You must be subscribed to this feed.")
# time.sleep(random.randint(1, 3))
if delay and user.is_staff:
# import random
# time.sleep(random.randint(2, 7) / 10.0)
# time.sleep(random.randint(1, 10))
time.sleep(delay)
# if page == 1:
# time.sleep(1)
@ -1454,7 +1459,7 @@ def load_river_stories__redis(request):
story_hashes = []
unread_feed_story_hashes = []
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)
mstories = MStory.objects(story_hash__in=story_hashes[:limit]).order_by(story_date_order)
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
@ -1596,6 +1601,57 @@ def load_river_stories__redis(request):
return data
@json.json_view
def load_river_stories_widget(request):
logging.user(request, "Widget load")
river_stories_data = json.decode(load_river_stories__redis(request).content)
timeout = 3
start = time.time()
def load_url(url):
original_url = url
url = urllib.parse.urljoin(settings.NEWSBLUR_URL, url)
scontext = ssl.SSLContext(ssl.PROTOCOL_TLS)
scontext.verify_mode = ssl.VerifyMode.CERT_NONE
try:
conn = urllib.request.urlopen(url, context=scontext, timeout=timeout)
except urllib.request.URLError:
url = url.replace('localhost', 'haproxy')
conn = urllib.request.urlopen(url, context=scontext, timeout=timeout)
except urllib.request.URLError as e:
logging.user(request.user, '"%s" not fetched in %ss: %s' % (url, (time.time() - start), e))
return None
except socket.timeout:
logging.user(request.user, '"%s" not fetched in %ss' % (url, (time.time() - start)))
return None
data = conn.read()
logging.user(request.user, '"%s" fetched in %ss' % (url, (time.time() - start)))
return dict(url=original_url, data=data)
# Find the image thumbnails and download in parallel
thumbnail_urls = []
for story in river_stories_data['stories']:
thumbnail_values = list(story['secure_image_thumbnails'].values())
if thumbnail_values:
thumbnail_urls.append(thumbnail_values[0])
with concurrent.futures.ThreadPoolExecutor(max_workers=6) as executor:
pages = executor.map(load_url, thumbnail_urls)
# Reassemble thumbnails back into stories
thumbnail_data = dict()
for page in pages:
if not page: continue
thumbnail_data[page['url']] = base64.b64encode(page['data']).decode('utf-8')
for story in river_stories_data['stories']:
thumbnail_values = list(story['secure_image_thumbnails'].values())
if thumbnail_values and thumbnail_values[0] in thumbnail_data:
story['select_thumbnail_data'] = thumbnail_data[thumbnail_values[0]]
logging.user(request, ("Elapsed Time: %ss" % (time.time() - start)))
return river_stories_data
@json.json_view
def complete_river(request):
user = get_user(request)
@ -2199,7 +2255,11 @@ def delete_feeds_by_folder(request):
@json.json_view
def rename_feed(request):
feed = get_object_or_404(Feed, pk=int(request.POST['feed_id']))
try:
user_sub = UserSubscription.objects.get(user=request.user, feed=feed)
except UserSubscription.DoesNotExist:
return dict(code=-1, message=f"You are not subscribed to {feed.feed_title}")
feed_title = request.POST['feed_title']
logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % (

View file

@ -30,9 +30,18 @@ def privacy(request):
def tos(request):
return render(request, 'static/tos.xhtml')
def webmanifest(request):
filename = settings.MEDIA_ROOT + '/extensions/edge/manifest.json'
manifest = open(filename).read()
return HttpResponse(manifest, content_type='application/manifest+json')
def apple_app_site_assoc(request):
return render(request, 'static/apple_app_site_assoc.xhtml')
def apple_developer_merchantid(request):
return render(request, 'static/apple_developer_merchantid.xhtml')
def feedback(request):
return render(request, 'static/feedback.xhtml')

Some files were not shown because too many files have changed in this diff Show more