Merge branch 'master' into sictiru

* master: (136 commits)
  Fixing missing usernames in msocialprofile, due to botched deleted user refactor.
  Removing usused imports
  Removing boto2, upgrading to boto3. Needs testing.
  Adding elasticsearch to py3, upgrading monitor, need to fix boto -> boto3.
  Adding db mongo analytics back.
  Correcting mongodb on local installs since there is no auth.
  Upping data upload limit to 5mb.
  Default username and password for analytics.
  Forcing reconnect of analytics.
  Don't reconnect analytics server.
  Removing unused mongodb connect code
  Using nbanalytics db as name
  Typo
  Mongoengine connect is so persnickity.
  Adding db name
  No port in mongo analytics
  New mongodb connect string for analytics.
  Have to use UID
  Mongo analytics needs correct user.
  Mongo analytics is its own server, with authentication.
  ...
This commit is contained in:
Samuel Clay 2021-08-11 15:54:49 -04:00
commit b49e6bb7e8
130 changed files with 4672 additions and 1956 deletions

1
.gitignore vendored
View file

@ -44,6 +44,7 @@ vendor/mms-agent/settings.py
apps/social/spam.py apps/social/spam.py
venv* venv*
/backups /backups
config/mongodb_keyfile.key
# Docker Jinja templates # Docker Jinja templates
docker/haproxy/haproxy.consul.cfg docker/haproxy/haproxy.consul.cfg

View file

@ -21,5 +21,7 @@
"static/*.js": true, "static/*.js": true,
"blog/.jekyll-cache": true, "blog/.jekyll-cache": true,
"blog/_site": true, "blog/_site": true,
"docker/volumes": true,
"requirements.txt": true, // It's just a symlink to config/requirements.txt, which has git history
}, },
} }

View file

@ -20,12 +20,14 @@ rebuild:
#creates newsblur, builds new images, and creates/refreshes SSL keys #creates newsblur, builds new images, and creates/refreshes SSL keys
nb: pull nb: pull
- CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose down - RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose down
- [[ -d config/certificates ]] && echo "keys exist" || make keys - [[ -d config/certificates ]] && echo "keys exist" || make keys
- cd node && npm install & cd .. - cd node && npm install & cd ..
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose up -d --build --remove-orphans - RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose up -d --build --remove-orphans
- RUNWITHMAKEBUILD=True docker-compose exec newsblur_web ./manage.py migrate - RUNWITHMAKEBUILD=True docker-compose exec newsblur_web ./manage.py migrate
- RUNWITHMAKEBUILD=True docker-compose exec newsblur_web ./manage.py loaddata config/fixtures/bootstrap.json - RUNWITHMAKEBUILD=True docker-compose exec newsblur_web ./manage.py loaddata config/fixtures/bootstrap.json
coffee:
- coffee -c -w **/*.coffee
shell: shell:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose exec newsblur_web ./manage.py shell_plus - RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker-compose exec newsblur_web ./manage.py shell_plus
@ -37,11 +39,15 @@ debug:
- CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker attach ${newsblur} - CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker attach ${newsblur}
log: log:
- RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20 newsblur_web newsblur_node - RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20 newsblur_web newsblur_node
logweb: log
logcelery:
- RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20 task_celery
logtask: logcelery
logmongo: logmongo:
- RUNWITHMAKEBUILD=True docker-compose logs -f db_mongo - RUNWITHMAKEBUILD=True docker-compose logs -f db_mongo
alllogs: alllogs:
- RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20 - RUNWITHMAKEBUILD=True docker-compose logs -f --tail 20
logall: alllogs
# brings down containers # brings down containers
down: down:
- RUNWITHMAKEBUILD=True docker-compose -f docker-compose.yml down - RUNWITHMAKEBUILD=True docker-compose -f docker-compose.yml down
@ -69,6 +75,10 @@ keys:
# Digital Ocean / Terraform # Digital Ocean / Terraform
list: list:
- doctl -t `cat /srv/secrets-newsblur/keys/digital_ocean.token` compute droplet list - doctl -t `cat /srv/secrets-newsblur/keys/digital_ocean.token` compute droplet list
sizes:
- doctl -t `cat /srv/secrets-newsblur/keys/digital_ocean.token` compute size list
ratelimit:
- doctl -t `cat /srv/secrets-newsblur/keys/digital_ocean.token` account ratelimit
ansible-deps: ansible-deps:
ansible-galaxy install -p roles -r ansible/roles/requirements.yml --roles-path ansible/roles ansible-galaxy install -p roles -r ansible/roles/requirements.yml --roles-path ansible/roles
tfrefresh: tfrefresh:
@ -79,6 +89,8 @@ apply:
terraform -chdir=terraform apply -refresh=false -parallelism=15 terraform -chdir=terraform apply -refresh=false -parallelism=15
inventory: inventory:
- ./ansible/utils/generate_inventory.py - ./ansible/utils/generate_inventory.py
oldinventory:
- OLD=1 ./ansible/utils/generate_inventory.py
# Docker # Docker
pull: pull:

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -7,6 +7,9 @@ git_secrets_repo: ssh://git@github.com/samuelclay/newsblur-secrets
create_user: nb create_user: nb
local_key: "{{ lookup('file', lookup('env','HOME') + '/.ssh/id_rsa.pub') }}" local_key: "{{ lookup('file', lookup('env','HOME') + '/.ssh/id_rsa.pub') }}"
copy_local_key: "{{ lookup('file', '/srv/secrets-newsblur/keys/docker.key.pub') }}" copy_local_key: "{{ lookup('file', '/srv/secrets-newsblur/keys/docker.key.pub') }}"
mongodb_keyfile: "{{ lookup('file', '/srv/secrets-newsblur/keys/mongodb_keyfile.key') }}"
mongodb_username: "{{ lookup('ini', 'mongodb_username section=admin file=/srv/secrets-newsblur/configs/mongodb_auth.ini') }}"
mongodb_password: "{{ lookup('ini', 'mongodb_password section=admin file=/srv/secrets-newsblur/configs/mongodb_auth.ini') }}"
sys_packages: [ sys_packages: [
'git', 'git',
'python3', 'python3',

View file

@ -29,7 +29,7 @@ groups:
elasticsearch: inventory_hostname.startswith('db-elasticsearch') elasticsearch: inventory_hostname.startswith('db-elasticsearch')
redis: inventory_hostname.startswith('db-redis') redis: inventory_hostname.startswith('db-redis')
postgres: inventory_hostname.startswith('db-postgres') postgres: inventory_hostname.startswith('db-postgres')
mongo: inventory_hostname.startswith('db-mongo') mongo: inventory_hostname.startswith('db-mongo') and not inventory_hostname.startswith('db-mongo-analytics')
mongo_analytics: inventory_hostname.startswith('db-mongo-analytics') mongo_analytics: inventory_hostname.startswith('db-mongo-analytics')
consul: inventory_hostname.startswith('db-consul') consul: inventory_hostname.startswith('db-consul')
metrics: inventory_hostname.startswith('db-metrics') metrics: inventory_hostname.startswith('db-metrics')

View file

@ -24,7 +24,11 @@
rescue: rescue:
- name: Restart celery - name: Restart celery
become: yes become: yes
command: "docker start task-work" command: "docker start {{ item.service_name }}"
when: item.service_name in inventory_hostname
with_items:
- service_name: task-celery
- service_name: task-work
- name: Stop celery - name: Stop celery
become: yes become: yes

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-manager', tags: 'consul'} - {role: 'consul-manager', tags: 'consul'}

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
# - {role: 'dnsmasq', tags: 'dnsmasq'} # - {role: 'dnsmasq', tags: 'dnsmasq'}
# - {role: 'consul', tags: 'consul'} # - {role: 'consul', tags: 'consul'}
# - {role: 'consul-client', tags: 'consul'} # - {role: 'consul-client', tags: 'consul'}

View file

@ -10,10 +10,10 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}
- {role: 'node-exporter', tags: ['node-exporter', 'metrics']}
- {role: 'elasticsearch', tags: 'elasticsearch'} - {role: 'elasticsearch', tags: 'elasticsearch'}
- {role: 'node-exporter', tags: ['node-exporter', 'metrics']}
- {role: 'monitor', tags: 'monitor'} - {role: 'monitor', tags: 'monitor'}

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -1,6 +1,6 @@
--- ---
- name: SETUP -> mongo containers - name: SETUP -> mongo containers
hosts: mongo hosts: mongo, mongo_analytics
vars: vars:
- update_apt_cache: yes - update_apt_cache: yes
- motd_role: db - motd_role: db
@ -11,11 +11,12 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}
- {role: 'node-exporter', tags: ['node-exporter', 'metrics']}
- {role: 'mongo', tags: 'mongo'} - {role: 'mongo', tags: 'mongo'}
- {role: 'node-exporter', tags: ['node-exporter', 'metrics']}
- {role: 'mongo-exporter', tags: 'mongo-exporter'}
- {role: 'monitor', tags: 'monitor'} - {role: 'monitor', tags: 'monitor'}
- {role: 'benchmark', tags: 'benchmark'} # - {role: 'benchmark', tags: 'benchmark'}

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -11,7 +11,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -10,7 +10,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -12,7 +12,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -10,7 +10,7 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}

View file

@ -11,11 +11,10 @@
- {role: 'base', tags: 'base'} - {role: 'base', tags: 'base'}
- {role: 'ufw', tags: 'ufw'} - {role: 'ufw', tags: 'ufw'}
- {role: 'docker', tags: 'docker'} - {role: 'docker', tags: 'docker'}
- {role: 'repo', tags: 'repo'} - {role: 'repo', tags: ['repo', 'pull']}
- {role: 'dnsmasq', tags: 'dnsmasq'} - {role: 'dnsmasq', tags: 'dnsmasq'}
- {role: 'consul', tags: 'consul'} - {role: 'consul', tags: 'consul'}
- {role: 'consul-client', tags: 'consul'} - {role: 'consul-client', tags: 'consul'}
- {role: 'node-exporter', tags: ['node-exporter', 'metrics']} - {role: 'node-exporter', tags: ['node-exporter', 'metrics']}
- {role: 'monitor', tags: 'monitor'}
- {role: 'letsencrypt', tags: 'letsencrypt'} - {role: 'letsencrypt', tags: 'letsencrypt'}
- {role: 'haproxy', tags: 'haproxy'} - {role: 'haproxy', tags: 'haproxy'}

View file

@ -32,6 +32,7 @@
image: newsblur/newsblur_python3 image: newsblur/newsblur_python3
state: started state: started
pull: yes pull: yes
hostname: "{{ inventory_hostname }}"
container_default_behavior: no_defaults container_default_behavior: no_defaults
env: env:
DOCKERBUILD: "" DOCKERBUILD: ""
@ -73,7 +74,7 @@
container_default_behavior: no_defaults container_default_behavior: no_defaults
env: env:
AUTOHEAL_CONTAINER_LABEL: all AUTOHEAL_CONTAINER_LABEL: all
restart_policy: always restart_policy: unless-stopped
volumes: volumes:
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
@ -104,50 +105,57 @@
- container_name: task-work - container_name: task-work
changed_when: app_changed.changed changed_when: app_changed.changed
- name: Ensure permissions on sanity checker log
become: yes
file:
path: /var/log/sanity_checker.log
state: touch
mode: 0666
- name: Add sanity checkers cronjob for feeds fetched - name: Add sanity checkers cronjob for feeds fetched
become: yes become: yes
cron: copy:
name: feeds_fetched_sanity_checker owner: root
user: root dest: /etc/cron.d/feeds_fetched_sanity_checker
cron_file: /etc/cron.hourly/feeds_fetched_sanity_checker mode: 0744
job: >- content: |
docker pull newsblur/newsblur_python3:latest; MAILTO=""
docker run --rm -it SHELL=/bin/sh
-v /srv/newsblur/:/srv/newsblur PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
-h `cat /etc/hostname` 0 * * * * nb sudo docker run -v /srv/newsblur/:/srv/newsblur -v /var/log/sanity_checker.log:/var/log/sanity_checker.log -h `cat /etc/hostname` --network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_task_fetches.py >> /var/log/sanity_checker.log
--network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_task_fetches.py
when: "'task-work' in inventory_hostname" when: "'task-work' in inventory_hostname"
tags: tags:
- sanity-checker - sanity-checker
- name: Add sanity checkers cronjob for newsletter - name: Add sanity checkers cronjob for newsletter
become: yes become: yes
cron: copy:
name: newsletter_sanity_checker owner: root
user: root dest: /etc/cron.d/newsletter_sanity_checker
cron_file: /etc/cron.hourly/newsletter_sanity_checker mode: 0744
job: >- content: |
docker pull newsblur/newsblur_python3:latest; MAILTO=""
docker run --rm -it SHELL=/bin/sh
-v /srv/newsblur/:/srv/newsblur PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
-h `cat /etc/hostname` 0 * * * * nb sudo docker run -v /srv/newsblur/:/srv/newsblur -v /var/log/sanity_checker.log:/var/log/sanity_checker.log -h `cat /etc/hostname` --network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_newsletter_delivery.py >> /var/log/sanity_checker.log
--network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_newsletter_delivery.py
when: "'task-work' in inventory_hostname" when: "'task-work' in inventory_hostname"
tags: tags:
- sanity-checker - sanity-checker
- name: Add sanity checkers cronjob for work queue - name: Add sanity checkers cronjob for work queue
become: yes become: yes
cron: copy:
name: work_queue_sanity_checker owner: root
user: root dest: /etc/cron.d/work_queue_sanity_checker
cron_file: /etc/cron.hourly/work_queue_sanity_checker mode: 0744
job: >- content: |
docker pull newsblur/newsblur_python3:latest; MAILTO=""
docker run --rm -it SHELL=/bin/sh
-v /srv/newsblur/:/srv/newsblur PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
-h `cat /etc/hostname` 0 * * * * nb sudo docker run -v /srv/newsblur/:/srv/newsblur -v /var/log/sanity_checker.log:/var/log/sanity_checker.log -h `cat /etc/hostname` --network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_work_queue.py >> /var/log/sanity_checker.log
--network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_work_queue.py
when: "'task-work' in inventory_hostname" when: "'task-work' in inventory_hostname"
tags: tags:
- sanity-checker - sanity-checker

View file

@ -7,3 +7,12 @@ server=/consul/127.0.0.1#8600
{# dnsmasq should not needlessly read /etc/resolv.conf #} {# dnsmasq should not needlessly read /etc/resolv.conf #}
no-resolv no-resolv
interface=lo
interface=eth0
interface=eth1
bind-interfaces
# log-dhcp
# log-queries
# log-facility=/var/log/dnsmasq.log

View file

@ -24,10 +24,7 @@
- name: Turn off Docker iptables firewall exclusion - name: Turn off Docker iptables firewall exclusion
become: yes become: yes
copy: template:
src: daemon.json
dest: /etc/docker/daemon.json dest: /etc/docker/daemon.json
content: |
{
"iptables": false
}
notify: restart docker notify: restart docker

View file

@ -0,0 +1,3 @@
{
"iptables": false
}

View file

@ -1,15 +1,48 @@
--- ---
- name: Permissions for elasticsearch
become: yes
file:
state: directory
mode: 0777
path: /var/log/elasticsearch
- name: Permissions for elasticsearch volume
become: yes
file:
state: directory
path: /srv/newsblur/docker/volumes
recurse: yes
owner: nb
group: nb
- name: Make docker network for newsblurnet
become: yes
docker_network:
name: newsblurnet
notify: restart docker
- name: Start Elasticsearch Docker container - name: Start Elasticsearch Docker container
become: yes become: yes
docker_container: docker_container:
name: elasticsearch name: elasticsearch
image: elasticsearch:1.7.6 image: elasticsearch:7.14.0
state: started state: started
hostname: "{{ inventory_hostname }}"
ports: ports:
- '9200:9200' - '9200:9200'
restart_policy: unless-stopped restart_policy: unless-stopped
container_default_behavior: no_defaults
networks_cli_compatible: yes
# network_mode: host
network_mode: default
networks:
- name: newsblurnet
aliases:
- elasticsearch
user: 1000:1001
volumes: volumes:
- /srv/newsblur/docker/volumes/elasticsearch:/usr/share/elasticsearch/data - /srv/newsblur/docker/volumes/elasticsearch:/usr/share/elasticsearch/data
- /var/log/elasticsearch/:/var/log/elasticsearch/
- name: Register elasticsearch in consul - name: Register elasticsearch in consul
tags: consul tags: consul

View file

@ -20,6 +20,7 @@
name: grafana name: grafana
image: grafana/grafana:7.5.7 image: grafana/grafana:7.5.7
restart_policy: unless-stopped restart_policy: unless-stopped
hostname: "{{ inventory_hostname }}"
user: root user: root
networks_cli_compatible: yes networks_cli_compatible: yes
network_mode: default network_mode: default

View file

@ -84,6 +84,7 @@
# - "80:80" # - "80:80"
# - "443:443" # - "443:443"
# - "1936:1936" # - "1936:1936"
hostname: "{{ inventory_hostname }}"
restart_policy: unless-stopped restart_policy: unless-stopped
container_default_behavior: no_defaults container_default_behavior: no_defaults
command: "haproxy -f /srv/newsblur/docker/haproxy/haproxy.consul.cfg" command: "haproxy -f /srv/newsblur/docker/haproxy/haproxy.consul.cfg"

View file

@ -16,7 +16,8 @@
networks: networks:
- name: newsblurnet - name: newsblurnet
env: env:
MONGODB_URI: 'mongodb://db-mongo.service.nyc1.consul:27017/admin?' MONGODB_URI: 'mongodb://{{ inventory_hostname }}.node.nyc1.consul:27017/admin?'
# MONGODB_URI: 'mongodb://{{ mongodb_username }}:{{ mongodb_password }}@{{ inventory_hostname }}.node.nyc1.consul:27017/admin?authSource=admin'
ports: ports:
- '9216:9216' - '9216:9216'
@ -29,4 +30,4 @@
notify: notify:
- reload consul - reload consul
- name: Command to register mongo-exporter - name: Command to register mongo-exporter
command: "consul services register /etc/consul.d/mongo-exporter.json" command: "consul services register /etc/consul.d/mongo-exporter.json"

View file

@ -6,12 +6,53 @@
mode: 0777 mode: 0777
path: /var/log/mongodb path: /var/log/mongodb
- name: Get the volume name
shell: ls /dev/disk/by-id/ | grep -v part
register: volume_name_raw
- set_fact:
volume_name: "{{ volume_name_raw.stdout }}"
- debug:
msg: "{{ volume_name }}"
- name: Create the mount point
become: yes
file:
path: "/mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}"
state: directory
- name: Mount volume read-write
become: yes
mount:
path: "/mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}"
src: "/dev/disk/by-id/{{ volume_name }}"
fstype: xfs
opts: defaults,discard
state: mounted
- name: Copy MongoDB keyfile
copy:
content: "{{ mongodb_keyfile }}"
dest: /srv/newsblur/config/mongodb_keyfile.key
owner: nb
mode: 0400
tags:
- keyfile
- name: Make docker network for newsblurnet - name: Make docker network for newsblurnet
become: yes become: yes
docker_network: docker_network:
name: newsblurnet name: newsblurnet
notify: restart docker notify: restart docker
- name: Make backup directory
become: yes
file:
path: /opt/mongo/newsblur/backup/
state: directory
mode: 0666
- name: Start db-mongo docker container - name: Start db-mongo docker container
become: yes become: yes
docker_container: docker_container:
@ -19,19 +60,53 @@
image: mongo:3.6 image: mongo:3.6
state: started state: started
container_default_behavior: no_defaults container_default_behavior: no_defaults
hostname: "{{ inventory_hostname }}"
restart_policy: unless-stopped restart_policy: unless-stopped
networks_cli_compatible: yes networks_cli_compatible: yes
network_mode: default network_mode: host
networks: # network_mode: default
- name: newsblurnet # networks:
ports: # - name: newsblurnet
- "27017:27017" # aliases:
# - mongo
# ports:
# - "27017:27017"
command: --config /etc/mongod.conf command: --config /etc/mongod.conf
volumes: volumes:
- /mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}:/data/db - /mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}:/data/db
- /srv/newsblur/ansible/roles/mongo/templates/mongo.conf:/etc/mongod.conf - /srv/newsblur/ansible/roles/mongo/templates/mongo.conf:/etc/mongod.conf
- /srv/newsblur/config/mongodb_keyfile.key:/srv/newsblur/config/mongodb_keyfile.key
- /var/log/mongodb/:/var/log/mongodb/ - /var/log/mongodb/:/var/log/mongodb/
- /opt/mongo/newsblur/backup:/backup/' - /opt/mongo/newsblur/backup/:/backup/
when: (inventory_hostname | regex_replace('[0-9]+', '')) in ['db-mongo', 'db-mongo-secondary']
- name: Start db-mongo-analytics docker container
become: yes
docker_container:
name: mongo
image: mongo:3.6
state: started
container_default_behavior: no_defaults
hostname: "{{ inventory_hostname }}"
restart_policy: unless-stopped
networks_cli_compatible: yes
# network_mode: host
network_mode: default
networks:
- name: newsblurnet
aliases:
- mongo
ports:
- "27017:27017"
command: --config /etc/mongod.conf
user: 1000:1001
volumes:
- /mnt/{{ inventory_hostname | regex_replace('db-|-', '') }}:/data/db
- /srv/newsblur/ansible/roles/mongo/templates/mongo.analytics.conf:/etc/mongod.conf
- /srv/newsblur/config/mongodb_keyfile.key:/srv/newsblur/config/mongodb_keyfile.key
- /var/log/mongodb/:/var/log/mongodb/
- /opt/mongo/newsblur/backup/:/backup/
when: (inventory_hostname | regex_replace('[0-9]+', '')) == 'db-mongo-analytics'
- name: Register mongo in consul - name: Register mongo in consul
tags: consul tags: consul
@ -39,7 +114,7 @@
template: template:
src: consul_service.json src: consul_service.json
dest: /etc/consul.d/mongo.json dest: /etc/consul.d/mongo.json
when: (inventory_hostname | regex_replace('[0-9]+', '')) in ['db-mongo', 'db-mongo-secondary'] or inventory_hostname.startswith('db2') when: (inventory_hostname | regex_replace('[0-9]+', '')) in ['db-mongo', 'db-mongo-secondary']
notify: notify:
- reload consul - reload consul
@ -49,7 +124,7 @@
template: template:
src: consul_service.analytics.json src: consul_service.analytics.json
dest: /etc/consul.d/mongo.json dest: /etc/consul.d/mongo.json
when: (inventory_hostname | regex_replace('[0-9]+', '')) == 'db-mongo-analytics' or inventory_hostname.startswith('db3') when: (inventory_hostname | regex_replace('[0-9]+', '')) == 'db-mongo-analytics'
notify: notify:
- reload consul - reload consul
@ -81,38 +156,24 @@
name: mongo backup name: mongo backup
minute: "0" minute: "0"
hour: "4" hour: "4"
job: job: /srv/newsblur/docker/mongo/backup_mongo.sh
collections=( when: '"db-mongo-secondary1" in inventory_hostname'
classifier_tag tags:
classifier_author - mongo-backup
classifier_feed
classifier_title
userstories
shared_stories
category
category_site
sent_emails
social_profile
social_subscription
social_services
statistics
user_search
feedback
)
for collection in collections; do
echo Dumping $collection
now=$(date '+%Y-%m-%d-%H-%M')
docker exec -it mongo mongodump --db newsblur --collection $collection -o /backup/backup_mongo_${now} - name: Add mongo starred_stories+stories backup
cron:
echo Compressing /opt/mongo/newsblur/backup/backup_mongo_${now}.tgz name: mongo starred/shared/all stories backup
tar -zcf /opt/mongo/newsblur/backup/backup_mongo_${now}.tgz /opt/mongo/newsblur/backup/backup_mongo_${now}) minute: "0"
hour: "5"
done; job: /srv/newsblur/docker/mongo/backup_mongo_stories.sh
when: '"db-mongo-secondary1" in inventory_hostname'
tags:
- mongo-backup
echo Uploading backups to S3 # Renaming a db-mongo3 to db-mongo2:
docker run --rm # - Change hostname to db-mongo2 on Digital Ocean (doctl)
-v /srv/newsblur:/srv/newsblur # - Change hostname to db-mongo2 in /etc/hostname
-v /opt/mongo/newsblur/backup/:/opt/mongo/newsblur/backup/ # - Symlink /mnt/mongo2 to /mnt/mongo3
--network=newsblurnet # - tf state mv "digitalocean_droplet.db-mongo-primary[2]" "digitalocean_droplet.db-mongo-primary[1]"
newsblur/newsblur_python3:latest /srv/newsblur/utils/backups/backup_mongo.py # - tf state mv "digitalocean_volume.mongo_volume[2]" "digitalocean_volume.mongo_volume[1]"

View file

@ -8,7 +8,7 @@
"port": 27017, "port": 27017,
"checks": [{ "checks": [{
"id": "mongo-analytics-ping", "id": "mongo-analytics-ping",
"http": "{% if inventory_hostname.startswith('db-mongo') %}http://{{ ansible_ssh_host }}:5579/db_check/mongo{% else %}http://{{ ansible_ssh_host }}:5000/db_check/mongo{% endif %}", "http": "http://{{ ansible_ssh_host }}:5579/db_check/mongo_analytics",
"interval": "15s" "interval": "15s"
}] }]
} }

View file

@ -8,7 +8,7 @@
"port": 27017, "port": 27017,
"checks": [{ "checks": [{
"id": "mongo-ping", "id": "mongo-ping",
"http": "{% if inventory_hostname.startswith('db-mongo') %}http://{{ ansible_ssh_host }}:5579/db_check/mongo{% else %}http://{{ ansible_ssh_host }}:5000/db_check/mongo{% endif %}", "http": "http://{{ ansible_ssh_host }}:5579/db_check/mongo",
"interval": "15s", "interval": "15s",
"failures_before_critical": 4 "failures_before_critical": 4
}] }]

View file

@ -0,0 +1,48 @@
# mongod.conf
# for documentation of all options, see:
# http://docs.mongodb.org/manual/reference/configuration-options/
# Where and how to store data.
storage:
dbPath: /data/db
journal:
enabled: true
# engine:
# mmapv1:
# wiredTiger:
# where to write logging data.
systemLog:
destination: file
logAppend: true
path: /var/log/mongodb/mongod.log
# network interfaces
net:
port: 27017
bindIpAll: true
# how the process runs
processManagement:
timeZoneInfo: /usr/share/zoneinfo
security:
keyFile: /srv/newsblur/config/mongodb_keyfile.key
authorization: enabled
# transitionToAuth: true
operationProfiling:
mode: slowOp
slowOpThresholdMs: 1000
# replication:
# replSetName: nbset
#sharding:
## Enterprise-Only Options:
#auditLog:
#snmp:

View file

@ -27,7 +27,10 @@ net:
processManagement: processManagement:
timeZoneInfo: /usr/share/zoneinfo timeZoneInfo: /usr/share/zoneinfo
# security: security:
keyFile: /srv/newsblur/config/mongodb_keyfile.key
authorization: enabled
# transitionToAuth: true
operationProfiling: operationProfiling:
mode: slowOp mode: slowOp

View file

@ -15,7 +15,7 @@
lineinfile: lineinfile:
path: /srv/newsblur/newsblur_web/app_env.py path: /srv/newsblur/newsblur_web/app_env.py
line: 'SERVER_NAME = "{{ inventory_hostname }}"' line: 'SERVER_NAME = "{{ inventory_hostname }}"'
- name: Make docker network for newsblurnet - name: Make docker network for newsblurnet
become: yes become: yes
docker_network: docker_network:
@ -40,3 +40,8 @@
- name: newsblurnet - name: newsblurnet
ports: ports:
- "5579:5579" - "5579:5579"
- name: Restart monitor
become: yes
shell:
cmd: docker restart monitor

View file

@ -5,6 +5,10 @@
name: node-exporter name: node-exporter
image: prom/node-exporter image: prom/node-exporter
container_default_behavior: no_defaults container_default_behavior: no_defaults
networks_cli_compatible: yes
network_mode: default
networks:
- name: newsblurnet
restart_policy: unless-stopped restart_policy: unless-stopped
ports: ports:
- '9100:9100' - '9100:9100'
@ -18,4 +22,4 @@
dest: /etc/consul.d/node-exporter.json dest: /etc/consul.d/node-exporter.json
notify: notify:
- reload consul - reload consul
when: disable_consul_services_ie_staging is not defined when: disable_consul_services_ie_staging is not defined

View file

@ -1,7 +1,7 @@
--- ---
- name: Copy node secrets - name: Copy node secrets
copy: copy:
src: /srv/secrets-newsblur/settings/dotenv.env src: /srv/secrets-newsblur/settings/node_settings.env
dest: /srv/newsblur/node/.env dest: /srv/newsblur/node/.env
register: app_changed register: app_changed
notify: restart node notify: restart node
@ -18,6 +18,44 @@
path: /srv/newsblur/node/.env path: /srv/newsblur/node/.env
line: 'SERVER_NAME = "{{ inventory_hostname }}"' line: 'SERVER_NAME = "{{ inventory_hostname }}"'
- name: Get the volume name
shell: ls /dev/disk/by-id/ | grep -v part
register: volume_name_raw
when: '"node-page" in inventory_hostname'
- set_fact:
volume_name: "{{ volume_name_raw.stdout }}"
when: '"node-page" in inventory_hostname'
- debug:
msg: "{{ volume_name }}"
when: '"node-page" in inventory_hostname'
- name: Create the mount point
become: yes
file:
path: "/mnt/{{ inventory_hostname | regex_replace('-', '') }}"
state: directory
when: '"node-page" in inventory_hostname'
- name: Mount volume read-write
become: yes
mount:
path: "/mnt/{{ inventory_hostname | regex_replace('-', '') }}"
src: "/dev/disk/by-id/{{ volume_name }}"
fstype: xfs
opts: defaults,discard
state: mounted
when: '"node-page" in inventory_hostname'
- name: Symlink node-page volume from /srv/originals
become: yes
file:
dest: /srv/originals
src: "/mnt/{{ inventory_hostname | regex_replace('-', '') }}"
state: link
when: '"node-page" in inventory_hostname'
- name: Make docker network for newsblurnet - name: Make docker network for newsblurnet
become: yes become: yes
docker_network: docker_network:
@ -35,6 +73,7 @@
pull: true pull: true
networks_cli_compatible: yes networks_cli_compatible: yes
network_mode: default network_mode: default
hostname: "{{ inventory_hostname }}"
networks: networks:
- name: newsblurnet - name: newsblurnet
ports: ports:
@ -44,6 +83,8 @@
restart_policy: unless-stopped restart_policy: unless-stopped
volumes: volumes:
- /srv/newsblur/node:/srv/node - /srv/newsblur/node:/srv/node
- /srv/originals:/srv/originals
- "/mnt/{{ inventory_hostname | regex_replace('-', '') }}:/mnt/{{ inventory_hostname | regex_replace('-', '') }}"
with_items: with_items:
- node-socket - node-socket
- node-page - node-page
@ -59,6 +100,7 @@
image: "{{ item.image }}" image: "{{ item.image }}"
state: started state: started
container_default_behavior: no_defaults container_default_behavior: no_defaults
hostname: "{{ inventory_hostname }}"
pull: true pull: true
ports: ports:
- "{{ item.ports }}" - "{{ item.ports }}"
@ -108,4 +150,4 @@
-v /srv/newsblur:/srv/newsblur -v /srv/newsblur:/srv/newsblur
--network=newsblurnet --network=newsblurnet
--hostname {{ ansible_hostname }} --hostname {{ ansible_hostname }}
newsblur/newsblur_python3 /srv/newsblur/utils/monitor_disk_usage.py $OUTPUT newsblur/newsblur_python3 /srv/newsblur/utils/monitor_disk_usage.py $OUTPUT

View file

@ -5,7 +5,7 @@
name: postgres name: postgres
image: postgres:13.1 image: postgres:13.1
state: started state: started
hostname: postgres hostname: "{{ inventory_hostname }}"
env: env:
POSTGRES_USER: newsblur POSTGRES_USER: newsblur
POSTGRES_PASSWORD: newsblur POSTGRES_PASSWORD: newsblur
@ -62,4 +62,4 @@
-v /backup/:/backup/ -v /backup/:/backup/
--network=newsblurnet --network=newsblurnet
newsblur/newsblur_python3 newsblur/newsblur_python3
/srv/newsblur/utils/backups/backup_psql.py $BUCKET /srv/newsblur/utils/backups/backup_psql.py $BUCKET

View file

@ -2,7 +2,7 @@
- name: Template file for prometheus - name: Template file for prometheus
vars: vars:
monitor_server: "{{ 'staging.newsblur.com' if disable_consul_services_ie_staging is defined else 'beta.newsblur.com' }}" monitor_server: "{{ 'staging.newsblur.com' if disable_consul_services_ie_staging is defined else 'newsblur.com' }}"
template: template:
src: /srv/newsblur/docker/prometheus/prometheus.consul.yml.j2 src: /srv/newsblur/docker/prometheus/prometheus.consul.yml.j2
dest: /srv/newsblur/docker/prometheus/prometheus.yml dest: /srv/newsblur/docker/prometheus/prometheus.yml
@ -42,4 +42,4 @@
container_default_behavior: no_defaults container_default_behavior: no_defaults
volumes: volumes:
- /srv/newsblur/docker/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml - /srv/newsblur/docker/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
- /srv/newsblur/docker/volumes/prometheus_data:/prometheus - /mnt/metrics/prometheus_data:/prometheus

View file

@ -5,6 +5,7 @@
name: redis name: redis
image: redis:6.2.1 image: redis:6.2.1
state: started state: started
hostname: "{{ inventory_hostname }}"
ports: ports:
- 6379:6379 - 6379:6379
restart_policy: unless-stopped restart_policy: unless-stopped

View file

@ -30,6 +30,10 @@
with_items: with_items:
- 10.0.0.0/8 - 10.0.0.0/8
- 172.18.0.0/16 - 172.18.0.0/16
- 172.17.0.0/16
tags:
- firewall
- ufw
- name: Allow all access from inventory hosts old + new - name: Allow all access from inventory hosts old + new
become: yes become: yes

View file

@ -60,6 +60,7 @@
DOCKERBUILD: "" DOCKERBUILD: ""
state: started state: started
command: gunicorn --config /srv/newsblur/config/gunicorn_conf.py newsblur_web.wsgi:application command: gunicorn --config /srv/newsblur/config/gunicorn_conf.py newsblur_web.wsgi:application
hostname: "{{ inventory_hostname }}"
networks_cli_compatible: yes networks_cli_compatible: yes
network_mode: default network_mode: default
networks: networks:

View file

@ -10,7 +10,7 @@
- import_playbook: playbooks/setup_postgres.yml - import_playbook: playbooks/setup_postgres.yml
when: "'postgres' in group_names" when: "'postgres' in group_names"
- import_playbook: playbooks/setup_mongo.yml - import_playbook: playbooks/setup_mongo.yml
when: "'mongo' in group_names" when: "'mongo' in group_names or 'mongo_analytics' in group_names"
- import_playbook: playbooks/setup_redis.yml - import_playbook: playbooks/setup_redis.yml
when: "'redis' in group_names" when: "'redis' in group_names"
- import_playbook: playbooks/setup_elasticsearch.yml - import_playbook: playbooks/setup_elasticsearch.yml

View file

@ -85,8 +85,8 @@ def add_site_load_script(request, token):
starred_counts = {} starred_counts = {}
def image_base64(image_name, path='icons/circular/'): def image_base64(image_name, path='icons/circular/'):
image_file = open(os.path.join(settings.MEDIA_ROOT, 'img/%s%s' % (path, image_name))) image_file = open(os.path.join(settings.MEDIA_ROOT, 'img/%s%s' % (path, image_name)), 'rb')
return base64.b64encode(image_file.read()) return base64.b64encode(image_file.read()).decode('utf-8')
accept_image = image_base64('newuser_icn_setup.png') accept_image = image_base64('newuser_icn_setup.png')
error_image = image_base64('newuser_icn_sharewith_active.png') error_image = image_base64('newuser_icn_sharewith_active.png')
@ -500,7 +500,7 @@ def save_story(request, token=None):
def ip_addresses(request): def ip_addresses(request):
import digitalocean import digitalocean
doapi = digitalocean.Manager(token=settings.DO_TOKEN_FABRIC) doapi = digitalocean.Manager(token=settings.DO_TOKEN_API_IPADDRESSES)
droplets = doapi.get_all_droplets() droplets = doapi.get_all_droplets()
addresses = '\n'.join([d.ip_address for d in droplets]) addresses = '\n'.join([d.ip_address for d in droplets])
return HttpResponse(addresses, content_type='text/plain') return HttpResponse(addresses, content_type='text/plain')

View file

@ -12,6 +12,7 @@ from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings from django.conf import settings
from django.urls import reverse from django.urls import reverse
from django.contrib.auth import login as login_user from django.contrib.auth import login as login_user
from mongoengine.errors import ValidationError
from apps.reader.forms import SignupForm from apps.reader.forms import SignupForm
from apps.reader.models import UserSubscription from apps.reader.models import UserSubscription
from apps.feed_import.models import OAuthToken from apps.feed_import.models import OAuthToken
@ -36,10 +37,10 @@ def opml_upload(request):
xml_opml = file.read() xml_opml = file.read()
try: try:
UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml) UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml)
except (UnicodeDecodeError, InvalidStringData): except (UnicodeDecodeError, ValidationError, InvalidStringData):
folders = None folders = None
code = -1 code = -1
message = "There was a Unicode decode error when reading your OPML file." message = "There was a Unicode decode error when reading your OPML file. Ensure it's a text file with a .opml or .xml extension. Is it a zip file?"
opml_importer = OPMLImporter(xml_opml, request.user) opml_importer = OPMLImporter(xml_opml, request.user)
try: try:

View file

@ -12,9 +12,17 @@ class DbTimes(View):
'sql_avg': MStatistics.get('latest_sql_avg'), 'sql_avg': MStatistics.get('latest_sql_avg'),
'mongo_avg': MStatistics.get('latest_mongo_avg'), 'mongo_avg': MStatistics.get('latest_mongo_avg'),
'redis_avg': MStatistics.get('latest_redis_avg'), 'redis_avg': MStatistics.get('latest_redis_avg'),
'redis_user_avg': MStatistics.get('latest_redis_user_avg'),
'redis_story_avg': MStatistics.get('latest_redis_story_avg'),
'redis_session_avg': MStatistics.get('latest_redis_session_avg'),
'redis_pubsub_avg': MStatistics.get('latest_redis_pubsub_avg'),
'task_sql_avg': MStatistics.get('latest_task_sql_avg'), 'task_sql_avg': MStatistics.get('latest_task_sql_avg'),
'task_mongo_avg': MStatistics.get('latest_task_mongo_avg'), 'task_mongo_avg': MStatistics.get('latest_task_mongo_avg'),
'task_redis_avg': MStatistics.get('latest_task_redis_avg'), 'task_redis_avg': MStatistics.get('latest_task_redis_avg'),
'task_redis_user_avg': MStatistics.get('latest_task_redis_user_avg'),
'task_redis_story_avg': MStatistics.get('latest_task_redis_story_avg'),
'task_redis_session_avg': MStatistics.get('latest_task_redis_session_avg'),
'task_redis_pubsub_avg': MStatistics.get('latest_task_redis_pubsub_avg'),
} }
chart_name = "db_times" chart_name = "db_times"
chart_type = "counter" chart_type = "counter"

View file

@ -4,7 +4,6 @@ import html
import redis import redis
import re import re
import mongoengine as mongo import mongoengine as mongo
from boto.ses.connection import BotoServerError
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.contrib.sites.models import Site from django.contrib.sites.models import Site
@ -22,7 +21,7 @@ from utils import mongoengine_fields
from apns2.errors import BadDeviceToken, Unregistered from apns2.errors import BadDeviceToken, Unregistered
from apns2.client import APNsClient from apns2.client import APNsClient
from apns2.payload import Payload from apns2.payload import Payload
from bs4 import BeautifulSoup, Tag from bs4 import BeautifulSoup
import urllib.parse import urllib.parse
class NotificationFrequency(enum.Enum): class NotificationFrequency(enum.Enum):
@ -311,11 +310,11 @@ class MUserFeedNotification(mongo.Document):
from_email='NewsBlur <%s>' % from_address, from_email='NewsBlur <%s>' % from_address,
to=[to_address]) to=[to_address])
msg.attach_alternative(html, "text/html") msg.attach_alternative(html, "text/html")
try: # try:
msg.send() msg.send()
except BotoServerError as e: # except BotoServerError as e:
logging.user(usersub.user, '~BMStory notification by email error: ~FR%s' % e) # logging.user(usersub.user, '~BMStory notification by email error: ~FR%s' % e)
return # return
logging.user(usersub.user, '~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s' % logging.user(usersub.user, '~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s' %
(story['story_title'][:50], usersub.feed.feed_title[:50])) (story['story_title'][:50], usersub.feed.feed_title[:50]))

View file

@ -13,6 +13,6 @@ class Command(BaseCommand):
c = db_conn.cursor() c = db_conn.cursor()
connected = True connected = True
print("Connected to postgres") print("Connected to postgres")
except OperationalError: except OperationalError as e:
print("Waiting for db_postgres") print(f"Waiting for db_postgres: {e}")
time.sleep(5) time.sleep(5)

View file

@ -146,15 +146,27 @@ class SQLLogToConsoleMiddleware:
for query in queries: for query in queries:
if query.get('mongo'): if query.get('mongo'):
query['sql'] = "~FM%s: %s" % (query['mongo']['collection'], query['mongo']['query']) query['sql'] = "~FM%s: %s" % (query['mongo']['collection'], query['mongo']['query'])
elif query.get('db_redis'):
query['sql'] = "~FC%s" % (query['db_redis']['query'])
elif query.get('redis'): elif query.get('redis'):
query['sql'] = "~FC%s" % (query['redis']['query']) query['sql'] = "~FC%s" % (query['redis']['query'])
elif query.get('redis_user'):
query['sql'] = "~FC%s" % (query['redis_user']['query'])
elif query.get('redis_story'):
query['sql'] = "~FC%s" % (query['redis_story']['query'])
elif query.get('redis_session'):
query['sql'] = "~FC%s" % (query['redis_session']['query'])
elif query.get('redis_pubsub'):
query['sql'] = "~FC%s" % (query['redis_pubsub']['query'])
elif 'sql' not in query:
logging.debug(" ***> Query log missing: %s" % query)
else: else:
query['sql'] = re.sub(r'SELECT (.*?) FROM', 'SELECT * FROM', query['sql']) query['sql'] = re.sub(r'SELECT (.*?) FROM', 'SELECT * FROM', query['sql'])
query['sql'] = re.sub(r'SELECT', '~FYSELECT', query['sql']) query['sql'] = re.sub(r'SELECT', '~FYSELECT', query['sql'])
query['sql'] = re.sub(r'INSERT', '~FGINSERT', query['sql']) query['sql'] = re.sub(r'INSERT', '~FGINSERT', query['sql'])
query['sql'] = re.sub(r'UPDATE', '~FY~SBUPDATE', query['sql']) query['sql'] = re.sub(r'UPDATE', '~FY~SBUPDATE', query['sql'])
query['sql'] = re.sub(r'DELETE', '~FR~SBDELETE', query['sql']) query['sql'] = re.sub(r'DELETE', '~FR~SBDELETE', query['sql'])
if settings.DEBUG and settings.DEBUG_QUERIES: if settings.DEBUG and settings.DEBUG_QUERIES and not getattr(settings, 'DEBUG_QUERIES_SUMMARY_ONLY', False):
t = Template("{% for sql in sqllog %}{% if not forloop.first %} {% endif %}[{{forloop.counter}}] ~FC{{sql.time}}s~FW: {{sql.sql|safe}}{% if not forloop.last %}\n{% endif %}{% endfor %}") t = Template("{% for sql in sqllog %}{% if not forloop.first %} {% endif %}[{{forloop.counter}}] ~FC{{sql.time}}s~FW: {{sql.sql|safe}}{% if not forloop.last %}\n{% endif %}{% endfor %}")
logging.debug(t.render(Context({ logging.debug(t.render(Context({
'sqllog': queries, 'sqllog': queries,
@ -164,9 +176,17 @@ class SQLLogToConsoleMiddleware:
times_elapsed = { times_elapsed = {
'sql': sum([float(q['time']) 'sql': sum([float(q['time'])
for q in queries if not q.get('mongo') and for q in queries if not q.get('mongo') and
not q.get('redis')]), not q.get('redis_user') and
not q.get('redis_story') and
not q.get('redis_session') and
not q.get('redis_pubsub') and
not q.get('db_redis')]),
'mongo': sum([float(q['time']) for q in queries if q.get('mongo')]), 'mongo': sum([float(q['time']) for q in queries if q.get('mongo')]),
'redis': sum([float(q['time']) for q in queries if q.get('redis')]), 'db_redis': sum([float(q['time']) for q in queries if q.get('db_redis')]),
'redis_user': sum([float(q['time']) for q in queries if q.get('redis_user')]),
'redis_story': sum([float(q['time']) for q in queries if q.get('redis_story')]),
'redis_session': sum([float(q['time']) for q in queries if q.get('redis_session')]),
'redis_pubsub': sum([float(q['time']) for q in queries if q.get('redis_pubsub')]),
} }
setattr(request, 'sql_times_elapsed', times_elapsed) setattr(request, 'sql_times_elapsed', times_elapsed)
else: else:

View file

@ -1125,12 +1125,13 @@ def paypal_signup(sender, **kwargs):
try: try:
user = User.objects.get(username__iexact=ipn_obj.custom) user = User.objects.get(username__iexact=ipn_obj.custom)
except User.DoesNotExist: except User.DoesNotExist:
user = User.objects.get(email__iexact=ipn_obj.payer_email) try:
except User.DoesNotExist: user = User.objects.get(email__iexact=ipn_obj.payer_email)
logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % ( except User.DoesNotExist:
ipn_obj.payer_email, logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % (
ipn_obj.custom)) ipn_obj.payer_email,
return {"code": -1, "message": "User doesn't exist."} ipn_obj.custom))
return {"code": -1, "message": "User doesn't exist."}
logging.user(user, "~BC~SB~FBPaypal subscription signup") logging.user(user, "~BC~SB~FBPaypal subscription signup")
try: try:
@ -1149,12 +1150,13 @@ def paypal_payment_history_sync(sender, **kwargs):
try: try:
user = User.objects.get(username__iexact=ipn_obj.custom) user = User.objects.get(username__iexact=ipn_obj.custom)
except User.DoesNotExist: except User.DoesNotExist:
user = User.objects.get(email__iexact=ipn_obj.payer_email) try:
except User.DoesNotExist: user = User.objects.get(email__iexact=ipn_obj.payer_email)
logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % ( except User.DoesNotExist:
ipn_obj.payer_email, logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % (
ipn_obj.custom)) ipn_obj.payer_email,
return {"code": -1, "message": "User doesn't exist."} ipn_obj.custom))
return {"code": -1, "message": "User doesn't exist."}
logging.user(user, "~BC~SB~FBPaypal subscription payment") logging.user(user, "~BC~SB~FBPaypal subscription payment")
try: try:
@ -1168,13 +1170,13 @@ def paypal_payment_was_flagged(sender, **kwargs):
try: try:
user = User.objects.get(username__iexact=ipn_obj.custom) user = User.objects.get(username__iexact=ipn_obj.custom)
except User.DoesNotExist: except User.DoesNotExist:
if ipn_obj.payer_email: try:
user = User.objects.get(email__iexact=ipn_obj.payer_email) user = User.objects.get(email__iexact=ipn_obj.payer_email)
except User.DoesNotExist: except User.DoesNotExist:
logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % ( logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % (
ipn_obj.payer_email, ipn_obj.payer_email,
ipn_obj.custom)) ipn_obj.custom))
return {"code": -1, "message": "User doesn't exist."} return {"code": -1, "message": "User doesn't exist."}
try: try:
user.profile.setup_premium_history() user.profile.setup_premium_history()

View file

@ -154,7 +154,7 @@ class PushSubscription(models.Model):
if needs_update: if needs_update:
logging.debug(u' ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s' % ( logging.debug(u' ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s' % (
unicode(self.feed)[:30], hub_url, self_url)) self.feed, hub_url, self_url))
expiration_time = self.lease_expires - datetime.now() expiration_time = self.lease_expires - datetime.now()
seconds = expiration_time.days*86400 + expiration_time.seconds seconds = expiration_time.days*86400 + expiration_time.seconds
try: try:
@ -163,7 +163,7 @@ class PushSubscription(models.Model):
lease_seconds=seconds) lease_seconds=seconds)
except TimeoutError: except TimeoutError:
logging.debug(u' ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s' % ( logging.debug(u' ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s' % (
unicode(self.feed)[:30], hub_url, self_url)) self.feed, hub_url, self_url))
def __str__(self): def __str__(self):
@ -173,6 +173,3 @@ class PushSubscription(models.Model):
verified = u'unverified' verified = u'unverified'
return u'to %s on %s: %s' % ( return u'to %s on %s: %s' % (
self.topic, self.hub, verified) self.topic, self.hub, verified)
def __str__(self):
return str(unicode(self))

View file

@ -32,7 +32,7 @@ def push_callback(request, push_id):
subscription.save() subscription.save()
subscription.feed.setup_push() subscription.feed.setup_push()
logging.debug(' ---> [%-30s] [%s] ~BBVerified PuSH' % (unicode(subscription.feed)[:30], subscription.feed_id)) logging.debug(' ---> [%-30s] [%s] ~BBVerified PuSH' % (subscription.feed, subscription.feed_id))
verified.send(sender=subscription) verified.send(sender=subscription)
@ -46,7 +46,7 @@ def push_callback(request, push_id):
latest_push_date = datetime.datetime.strptime(latest_push, '%Y-%m-%d %H:%M:%S') latest_push_date = datetime.datetime.strptime(latest_push, '%Y-%m-%d %H:%M:%S')
latest_push_date_delta = datetime.datetime.now() - latest_push_date latest_push_date_delta = datetime.datetime.now() - latest_push_date
if latest_push_date > datetime.datetime.now() - datetime.timedelta(minutes=1): if latest_push_date > datetime.datetime.now() - datetime.timedelta(minutes=1):
logging.debug(' ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago' % (unicode(subscription.feed)[:30], latest_push_date_delta.seconds)) logging.debug(' ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago' % (subscription.feed, latest_push_date_delta.seconds))
return HttpResponse('Slow down, you just pushed %s seconds ago...' % latest_push_date_delta.seconds, status=429) return HttpResponse('Slow down, you just pushed %s seconds ago...' % latest_push_date_delta.seconds, status=429)
# XXX TODO: Optimize this by removing feedparser. It just needs to find out # XXX TODO: Optimize this by removing feedparser. It just needs to find out
@ -62,7 +62,7 @@ def push_callback(request, push_id):
MFetchHistory.add(feed_id=subscription.feed_id, MFetchHistory.add(feed_id=subscription.feed_id,
fetch_type='push') fetch_type='push')
else: else:
logging.debug(' ---> [%-30s] ~FBSkipping feed fetch, no actives: %s' % (unicode(subscription.feed)[:30], subscription.feed)) logging.debug(' ---> [%-30s] ~FBSkipping feed fetch, no actives: %s' % (subscription.feed, subscription.feed))
return HttpResponse('OK') return HttpResponse('OK')
return Http404 return Http404

View file

@ -22,7 +22,8 @@ urlpatterns = [
url(r'^starred_stories', views.load_starred_stories, name='load-starred-stories'), url(r'^starred_stories', views.load_starred_stories, name='load-starred-stories'),
url(r'^read_stories', views.load_read_stories, name='load-read-stories'), url(r'^read_stories', views.load_read_stories, name='load-read-stories'),
url(r'^starred_story_hashes', views.starred_story_hashes, name='starred-story-hashes'), url(r'^starred_story_hashes', views.starred_story_hashes, name='starred-story-hashes'),
url(r'^starred_rss/(?P<user_id>\d+)/(?P<secret_token>\w+)/(?P<tag_slug>[-\w]+)?/?$', views.starred_stories_rss_feed, name='starred-stories-rss-feed'), url(r'^starred_rss/(?P<user_id>\d+)/(?P<secret_token>\w+)/?$', views.starred_stories_rss_feed, name='starred-stories-rss-feed'),
url(r'^starred_rss/(?P<user_id>\d+)/(?P<secret_token>\w+)/(?P<tag_slug>[-\w]+)?/?$', views.starred_stories_rss_feed_tag, name='starred-stories-rss-feed-tag'),
url(r'^folder_rss/(?P<user_id>\d+)/(?P<secret_token>\w+)/(?P<unread_filter>\w+)/(?P<folder_slug>[-\w]+)?/?$', views.folder_rss_feed, name='folder-rss-feed'), url(r'^folder_rss/(?P<user_id>\d+)/(?P<secret_token>\w+)/(?P<unread_filter>\w+)/(?P<folder_slug>[-\w]+)?/?$', views.folder_rss_feed, name='folder-rss-feed'),
url(r'^unread_story_hashes', views.unread_story_hashes, name='unread-story-hashes'), url(r'^unread_story_hashes', views.unread_story_hashes, name='unread-story-hashes'),
url(r'^starred_counts', views.starred_counts, name='starred-counts'), url(r'^starred_counts', views.starred_counts, name='starred-counts'),

View file

@ -1,6 +1,5 @@
import datetime import datetime
import time import time
import boto
import redis import redis
import requests import requests
import random import random
@ -77,7 +76,6 @@ ALLOWED_SUBDOMAINS = [
'debug', 'debug',
'debug3', 'debug3',
'nb', 'nb',
'old',
] ]
def get_subdomain(request): def get_subdomain(request):
@ -891,9 +889,9 @@ def load_feed_page(request, feed_id):
if settings.BACKED_BY_AWS['pages_on_s3'] and feed.s3_page: if settings.BACKED_BY_AWS['pages_on_s3'] and feed.s3_page:
if settings.PROXY_S3_PAGES: if settings.PROXY_S3_PAGES:
key = settings.S3_CONN.get_bucket(settings.S3_PAGES_BUCKET_NAME).get_key(feed.s3_pages_key) key = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=feed.s3_pages_key)
if key: if key:
compressed_data = key.get_contents_as_string() compressed_data = key.get()["Body"]
response = HttpResponse(compressed_data, content_type="text/html; charset=utf-8") response = HttpResponse(compressed_data, content_type="text/html; charset=utf-8")
response['Content-Encoding'] = 'gzip' response['Content-Encoding'] = 'gzip'
@ -1070,38 +1068,50 @@ def starred_story_hashes(request):
return dict(starred_story_hashes=story_hashes) return dict(starred_story_hashes=story_hashes)
def starred_stories_rss_feed(request, user_id, secret_token, tag_slug): def starred_stories_rss_feed(request, user_id, secret_token):
return starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug=None)
def starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug):
try: try:
user = User.objects.get(pk=user_id) user = User.objects.get(pk=user_id)
except User.DoesNotExist: except User.DoesNotExist:
raise Http404 raise Http404
try: if tag_slug:
tag_counts = MStarredStoryCounts.objects.get(user_id=user_id, slug=tag_slug) try:
except MStarredStoryCounts.MultipleObjectsReturned: tag_counts = MStarredStoryCounts.objects.get(user_id=user_id, slug=tag_slug)
tag_counts = MStarredStoryCounts.objects(user_id=user_id, slug=tag_slug).first() except MStarredStoryCounts.MultipleObjectsReturned:
except MStarredStoryCounts.DoesNotExist: tag_counts = MStarredStoryCounts.objects(user_id=user_id, slug=tag_slug).first()
raise Http404 except MStarredStoryCounts.DoesNotExist:
raise Http404
else:
_, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True)
data = {} data = {}
data['title'] = "Saved Stories - %s" % tag_counts.tag if tag_slug:
data['title'] = "Saved Stories - %s" % tag_counts.tag
else:
data['title'] = "Saved Stories"
data['link'] = "%s%s" % ( data['link'] = "%s%s" % (
settings.NEWSBLUR_URL, settings.NEWSBLUR_URL,
reverse('saved-stories-tag', kwargs=dict(tag_name=tag_slug))) reverse('saved-stories-tag', kwargs=dict(tag_name=tag_slug)))
data['description'] = "Stories saved by %s on NewsBlur with the tag \"%s\"." % (user.username, if tag_slug:
tag_counts.tag) data['description'] = "Stories saved by %s on NewsBlur with the tag \"%s\"." % (user.username,
tag_counts.tag)
else:
data['description'] = "Stories saved by %s on NewsBlur." % (user.username)
data['lastBuildDate'] = datetime.datetime.utcnow() data['lastBuildDate'] = datetime.datetime.utcnow()
data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL
data['docs'] = None data['docs'] = None
data['author_name'] = user.username data['author_name'] = user.username
data['feed_url'] = "%s%s" % ( data['feed_url'] = "%s%s" % (
settings.NEWSBLUR_URL, settings.NEWSBLUR_URL,
reverse('starred-stories-rss-feed', reverse('starred-stories-rss-feed-tag',
kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug)), kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug)),
) )
rss = feedgenerator.Atom1Feed(**data) rss = feedgenerator.Atom1Feed(**data)
if not tag_counts.tag: if not tag_slug or not tag_counts.tag:
starred_stories = MStarredStory.objects( starred_stories = MStarredStory.objects(
user_id=user.pk user_id=user.pk
).order_by('-starred_date').limit(25) ).order_by('-starred_date').limit(25)
@ -1131,8 +1141,8 @@ def starred_stories_rss_feed(request, user_id, secret_token, tag_slug):
logging.user(request, "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" % ( logging.user(request, "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" % (
user.username, user.username,
tag_counts.tag, tag_counts.tag if tag_slug else "[All stories]",
tag_counts.count, tag_counts.count if tag_slug else starred_count,
request.META.get('HTTP_USER_AGENT', "")[:24] request.META.get('HTTP_USER_AGENT', "")[:24]
)) ))
return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml')
@ -2693,11 +2703,11 @@ def send_story_email(request):
cc=cc, cc=cc,
headers={'Reply-To': "%s <%s>" % (from_name, from_email)}) headers={'Reply-To': "%s <%s>" % (from_name, from_email)})
msg.attach_alternative(html, "text/html") msg.attach_alternative(html, "text/html")
try: # try:
msg.send() msg.send()
except boto.ses.connection.BotoServerError as e: # except boto.ses.connection.BotoServerError as e:
code = -1 # code = -1
message = "Email error: %s" % str(e) # message = "Email error: %s" % str(e)
share_user_profile.save_sent_email() share_user_profile.save_sent_email()

View file

@ -15,7 +15,7 @@ import base64
import http.client import http.client
from PIL import BmpImagePlugin, PngImagePlugin, Image from PIL import BmpImagePlugin, PngImagePlugin, Image
from socket import error as SocketError from socket import error as SocketError
from boto.s3.key import Key import boto3
from io import BytesIO from io import BytesIO
from django.conf import settings from django.conf import settings
from django.http import HttpResponse from django.http import HttpResponse
@ -106,12 +106,14 @@ class IconImporter(object):
def save_to_s3(self, image_str): def save_to_s3(self, image_str):
expires = datetime.datetime.now() + datetime.timedelta(days=60) expires = datetime.datetime.now() + datetime.timedelta(days=60)
expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
k = Key(settings.S3_CONN.get_bucket(settings.S3_ICONS_BUCKET_NAME)) base64.b64decode(image_str)
k.key = self.feed.s3_icons_key settings.S3_CONN.Object(settings.S3_ICONS_BUCKET_NAME,
k.set_metadata('Content-Type', 'image/png') self.feed.s3_icons_key).put(Body=base64.b64decode(image_str),
k.set_metadata('Expires', expires) ExtraArgs={
k.set_contents_from_string(base64.b64decode(image_str)) 'Content-Type': 'image/png',
k.set_acl('public-read') 'Expires': expires,
'ACL': 'public-read',
})
self.feed.s3_icon = True self.feed.s3_icon = True
self.feed.save() self.feed.save()
@ -217,8 +219,8 @@ class IconImporter(object):
except requests.ConnectionError: except requests.ConnectionError:
pass pass
elif settings.BACKED_BY_AWS.get('pages_on_s3') and self.feed.s3_page: elif settings.BACKED_BY_AWS.get('pages_on_s3') and self.feed.s3_page:
key = settings.S3_CONN.get_bucket(settings.S3_PAGES_BUCKET_NAME).get_key(self.feed.s3_pages_key) key = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=self.feed.s3_pages_key)
compressed_content = key.get_contents_as_string() compressed_content = key.get()["Body"].read()
stream = BytesIO(compressed_content) stream = BytesIO(compressed_content)
gz = gzip.GzipFile(fileobj=stream) gz = gzip.GzipFile(fileobj=stream)
try: try:

View file

@ -1,8 +0,0 @@
from django.core.management.base import BaseCommand
from apps.rss_feeds.tasks import BackupMongo
class Command(BaseCommand):
option_list = BaseCommand.option_list
def handle(self, *args, **options):
BackupMongo().apply()

View file

@ -2827,6 +2827,8 @@ class MStory(mongo.Document):
continue continue
if image_url and len(image_url) >= 1024: if image_url and len(image_url) >= 1024:
continue continue
if 'feedburner.com' in image_url:
continue
image_url = urllib.parse.urljoin(self.story_permalink, image_url) image_url = urllib.parse.urljoin(self.story_permalink, image_url)
image_urls.append(image_url) image_urls.append(image_url)

View file

@ -10,7 +10,6 @@ from django.contrib.sites.models import Site
from django.utils.encoding import smart_bytes from django.utils.encoding import smart_bytes
from mongoengine.queryset import NotUniqueError from mongoengine.queryset import NotUniqueError
from socket import error as SocketError from socket import error as SocketError
from boto.s3.key import Key
from django.conf import settings from django.conf import settings
from django.utils.text import compress_string as compress_string_with_gzip from django.utils.text import compress_string as compress_string_with_gzip
from utils import log as logging from utils import log as logging
@ -323,13 +322,16 @@ class PageImporter(object):
def save_page_s3(self, html): def save_page_s3(self, html):
k = Key(settings.S3_CONN.get_bucket(settings.S3_PAGES_BUCKET_NAME)) s3_object = settings.S3_CONN.Object(settings.S3_PAGES_BUCKET_NAME,
k.key = self.feed.s3_pages_key self.feed.s3_pages_key)
k.set_metadata('Content-Encoding', 'gzip') s3_object.put(Body=compress_string_with_gzip(html.encode('utf-8')),
k.set_metadata('Content-Type', 'text/html') ExtraArgs={
k.set_metadata('Access-Control-Allow-Origin', '*') 'Content-Type': 'text/html',
k.set_contents_from_string(compress_string_with_gzip(html.encode('utf-8'))) 'Content-Encoding': 'gzip',
k.set_acl('public-read') 'Access-Control-Allow-Origin': '*',
'Expires': expires,
'ACL': 'public-read',
})
try: try:
feed_page = MFeedPage.objects.get(feed_id=self.feed.pk) feed_page = MFeedPage.objects.get(feed_id=self.feed.pk)
@ -345,8 +347,7 @@ class PageImporter(object):
return True return True
def delete_page_s3(self): def delete_page_s3(self):
k = Key(settings.S3_CONN.get_bucket(settings.S3_PAGES_BUCKET_NAME)) k = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=self.feed.s3_pages_key)
k.key = self.feed.s3_pages_key
k.delete() k.delete()
self.feed.s3_page = False self.feed.s3_page = False

View file

@ -6,7 +6,6 @@ import redis
from newsblur_web.celeryapp import app from newsblur_web.celeryapp import app
from celery.exceptions import SoftTimeLimitExceeded from celery.exceptions import SoftTimeLimitExceeded
from utils import log as logging from utils import log as logging
from utils import s3_utils as s3
from django.conf import settings from django.conf import settings
from apps.profile.middleware import DBProfilerMiddleware from apps.profile.middleware import DBProfilerMiddleware
from utils.mongo_raw_log_middleware import MongoDumpMiddleware from utils.mongo_raw_log_middleware import MongoDumpMiddleware
@ -189,33 +188,6 @@ def PushFeeds(feed_id, xml):
if feed: if feed:
feed.update(options=options) feed.update(options=options)
@app.task(name='backup-mongo', ignore_result=True)
def BackupMongo():
COLLECTIONS = "classifier_tag classifier_author classifier_feed classifier_title userstories starred_stories shared_stories category category_site sent_emails social_profile social_subscription social_services statistics feedback"
date = time.strftime('%Y-%m-%d-%H-%M')
collections = COLLECTIONS.split(' ')
db_name = 'newsblur'
dir_name = 'backup_mongo_%s' % date
filename = '%s.tgz' % dir_name
os.mkdir(dir_name)
for collection in collections:
cmd = 'mongodump --db %s --collection %s -o %s' % (db_name, collection, dir_name)
logging.debug(' ---> ~FMDumping ~SB%s~SN: %s' % (collection, cmd))
os.system(cmd)
cmd = 'tar -jcf %s %s' % (filename, dir_name)
os.system(cmd)
logging.debug(' ---> ~FRUploading ~SB~FM%s~SN~FR to S3...' % filename)
s3.save_file_in_s3(filename)
shutil.rmtree(dir_name)
os.remove(filename)
logging.debug(' ---> ~FRFinished uploading ~SB~FM%s~SN~FR to S3.' % filename)
@app.task() @app.task()
def ScheduleImmediateFetches(feed_ids, user_id=None): def ScheduleImmediateFetches(feed_ids, user_id=None):
from apps.rss_feeds.models import Feed from apps.rss_feeds.models import Feed

View file

@ -1,11 +1,11 @@
import requests import requests
import urllib3 import urllib3
import zlib import zlib
from vendor import readability
from simplejson.decoder import JSONDecodeError from simplejson.decoder import JSONDecodeError
from requests.packages.urllib3.exceptions import LocationParseError from requests.packages.urllib3.exceptions import LocationParseError
from socket import error as SocketError from socket import error as SocketError
from mongoengine.queryset import NotUniqueError from mongoengine.queryset import NotUniqueError
from vendor.readability import readability
from lxml.etree import ParserError from lxml.etree import ParserError
from utils import log as logging from utils import log as logging
from utils.feed_functions import timelimit, TimeoutError from utils.feed_functions import timelimit, TimeoutError
@ -57,6 +57,7 @@ class TextImporter:
if not use_mercury or not results: if not use_mercury or not results:
logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY with Mercury, trying readability...", warn_color=False) logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY with Mercury, trying readability...", warn_color=False)
results = self.fetch_manually(skip_save=skip_save, return_document=return_document) results = self.fetch_manually(skip_save=skip_save, return_document=return_document)
return results return results
@ -106,10 +107,18 @@ class TextImporter:
if not resp: if not resp:
return return
@timelimit(5)
def extract_text(resp):
try:
text = resp.text
except (LookupError, TypeError):
text = resp.content
return text
try: try:
text = resp.text text = extract_text(resp)
except (LookupError, TypeError): except TimeoutError:
text = resp.content logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: timed out on resp.text")
return
# if self.debug: # if self.debug:
# logging.user(self.request, "~FBOriginal text's website: %s" % text) # logging.user(self.request, "~FBOriginal text's website: %s" % text)
@ -227,6 +236,8 @@ class TextImporter:
headers["content-type"] = "application/json" headers["content-type"] = "application/json"
headers["x-api-key"] = mercury_api_key headers["x-api-key"] = mercury_api_key
domain = Site.objects.get_current().domain domain = Site.objects.get_current().domain
if settings.DOCKERBUILD:
domain = 'haproxy'
url = f"https://{domain}/rss_feeds/original_text_fetcher?url={url}" url = f"https://{domain}/rss_feeds/original_text_fetcher?url={url}"
try: try:

View file

@ -161,6 +161,9 @@ class MSocialProfile(mongo.Document):
profile = cls.objects.create(user_id=user_id) profile = cls.objects.create(user_id=user_id)
profile.save() profile.save()
if not profile.username:
profile.save()
return profile return profile
@property @property
@ -171,6 +174,8 @@ class MSocialProfile(mongo.Document):
return None return None
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if not self.username:
self.import_user_fields()
if not self.subscription_count: if not self.subscription_count:
self.count_follows(skip_save=True) self.count_follows(skip_save=True)
if self.bio and len(self.bio) > MSocialProfile.bio.max_length: if self.bio and len(self.bio) > MSocialProfile.bio.max_length:
@ -433,6 +438,11 @@ class MSocialProfile(mongo.Document):
return [u for u in self.follower_user_ids if u != self.user_id] return [u for u in self.follower_user_ids if u != self.user_id]
return self.follower_user_ids return self.follower_user_ids
def import_user_fields(self):
user = User.objects.get(pk=self.user_id)
self.username = user.username
self.email = user.email
def count_follows(self, skip_save=False): def count_follows(self, skip_save=False):
self.subscription_count = UserSubscription.objects.filter(user__pk=self.user_id).count() self.subscription_count = UserSubscription.objects.filter(user__pk=self.user_id).count()
self.shared_stories_count = MSharedStory.objects.filter(user_id=self.user_id).count() self.shared_stories_count = MSharedStory.objects.filter(user_id=self.user_id).count()

View file

@ -29,6 +29,7 @@ urlpatterns = [
# url(r'^remove_like_reply/?$', views.remove_like_reply, name='social-remove-like-reply'), # url(r'^remove_like_reply/?$', views.remove_like_reply, name='social-remove-like-reply'),
url(r'^comment/(?P<comment_id>\w+)/reply/(?P<reply_id>\w+)/?$', views.comment_reply, name='social-comment-reply'), url(r'^comment/(?P<comment_id>\w+)/reply/(?P<reply_id>\w+)/?$', views.comment_reply, name='social-comment-reply'),
url(r'^comment/(?P<comment_id>\w+)/?$', views.comment, name='social-comment'), url(r'^comment/(?P<comment_id>\w+)/?$', views.comment, name='social-comment'),
url(r'^rss/(?P<user_id>\d+)/?$', views.shared_stories_rss_feed, name='shared-stories-rss-feed'),
url(r'^rss/(?P<user_id>\d+)/(?P<username>[-\w]+)?$', views.shared_stories_rss_feed, name='shared-stories-rss-feed'), url(r'^rss/(?P<user_id>\d+)/(?P<username>[-\w]+)?$', views.shared_stories_rss_feed, name='shared-stories-rss-feed'),
url(r'^stories/(?P<user_id>\w+)/(?P<username>[-\w]+)?/?$', views.load_social_stories, name='load-social-stories'), url(r'^stories/(?P<user_id>\w+)/(?P<username>[-\w]+)?/?$', views.load_social_stories, name='load-social-stories'),
url(r'^page/(?P<user_id>\w+)/(?P<username>[-\w]+)?/?$', views.load_social_page, name='load-social-page'), url(r'^page/(?P<user_id>\w+)/(?P<username>[-\w]+)?/?$', views.load_social_page, name='load-social-page'),

View file

@ -1315,7 +1315,7 @@ def shared_stories_rss_feed_noid(request):
return index return index
@ratelimit(minutes=1, requests=5) @ratelimit(minutes=1, requests=5)
def shared_stories_rss_feed(request, user_id, username): def shared_stories_rss_feed(request, user_id, username=None):
try: try:
user = User.objects.get(pk=user_id) user = User.objects.get(pk=user_id)
except User.DoesNotExist: except User.DoesNotExist:

View file

@ -198,12 +198,26 @@ class MStatistics(mongo.Document):
r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL)
db_times = {} db_times = {}
latest_db_times = {} latest_db_times = {}
for db in ['sql', 'mongo', 'redis', 'task_sql', 'task_mongo', 'task_redis']: for db in ['sql',
'mongo',
'redis',
'redis_user',
'redis_story',
'redis_session',
'redis_pubsub',
'task_sql',
'task_mongo',
'task_redis',
'task_redis_user',
'task_redis_story',
'task_redis_session',
'task_redis_pubsub',
]:
db_times[db] = [] db_times[db] = []
for hour in range(24): for hour in range(24):
start_hours_ago = now - datetime.timedelta(hours=hour+1) start_hours_ago = now - datetime.timedelta(hours=hour+1)
pipe = r.pipeline() pipe = r.pipeline()
for m in range(60): for m in range(60):
minute = start_hours_ago + datetime.timedelta(minutes=m) minute = start_hours_ago + datetime.timedelta(minutes=m)
@ -239,9 +253,17 @@ class MStatistics(mongo.Document):
('latest_sql_avg', latest_db_times['sql']), ('latest_sql_avg', latest_db_times['sql']),
('latest_mongo_avg', latest_db_times['mongo']), ('latest_mongo_avg', latest_db_times['mongo']),
('latest_redis_avg', latest_db_times['redis']), ('latest_redis_avg', latest_db_times['redis']),
('latest_redis_user_avg', latest_db_times['redis_user']),
('latest_redis_story_avg', latest_db_times['redis_story']),
('latest_redis_session_avg',latest_db_times['redis_session']),
('latest_redis_pubsub_avg', latest_db_times['redis_pubsub']),
('latest_task_sql_avg', latest_db_times['task_sql']), ('latest_task_sql_avg', latest_db_times['task_sql']),
('latest_task_mongo_avg', latest_db_times['task_mongo']), ('latest_task_mongo_avg', latest_db_times['task_mongo']),
('latest_task_redis_avg', latest_db_times['task_redis']), ('latest_task_redis_avg', latest_db_times['task_redis']),
('latest_task_redis_user_avg', latest_db_times['task_redis_user']),
('latest_task_redis_story_avg', latest_db_times['task_redis_story']),
('latest_task_redis_session_avg',latest_db_times['task_redis_session']),
('latest_task_redis_pubsub_avg', latest_db_times['task_redis_pubsub']),
) )
for key, value in values: for key, value in values:
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value) cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)

View file

View file

@ -28,10 +28,10 @@ class MongoReplicaSetLag(MuninMongoDBPlugin):
member_state = member['state'] member_state = member['state']
optime = member['optime'] optime = member['optime']
if member_state == PRIMARY_STATE: if member_state == PRIMARY_STATE:
primary_optime = optime.time primary_optime = optime['ts'].time
elif member_state == SECONDARY_STATE: elif member_state == SECONDARY_STATE:
if not oldest_secondary_optime or optime.time < oldest_secondary_optime: if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime:
oldest_secondary_optime = optime.time oldest_secondary_optime = optime['ts'].time
if not primary_optime or not oldest_secondary_optime: if not primary_optime or not oldest_secondary_optime:
raise Exception("Replica set is not healthy") raise Exception("Replica set is not healthy")

View file

@ -7,9 +7,8 @@ attrs==21.1.0
beautifulsoup4==4.9.3 beautifulsoup4==4.9.3
billiard==3.6.4.0 billiard==3.6.4.0
bleach==3.2.1 bleach==3.2.1
boto==2.49.0 boto3==1.18.12
boto3==1.17.67 botocore==1.21.12
botocore==1.20.67
celery==4.4.7 celery==4.4.7
certifi==2020.12.5 certifi==2020.12.5
cffi==1.14.5 cffi==1.14.5
@ -38,7 +37,7 @@ dopy==0.3.7
elasticsearch==7.12.1 elasticsearch==7.12.1
factory-boy==3.2.0 factory-boy==3.2.0
Faker==8.8.2 Faker==8.8.2
feedparser==6.0.2 feedparser>=6,<7
filelock==3.0.12 filelock==3.0.12
Flask==1.1.2 Flask==1.1.2
Flask-BasicAuth==0.2.0 Flask-BasicAuth==0.2.0
@ -99,12 +98,11 @@ pytz==2020.4
PyYAML==5.3.1 PyYAML==5.3.1
pyzmq==22.0.3 pyzmq==22.0.3
raven==6.10.0 raven==6.10.0
# readability-lxml==0.8.1.1 # Was vendorized due to noscript # Vendorized again due to 0.8.1.1 not out yet
redis==3.5.3 redis==3.5.3
requests==2.25.0 requests==2.25.0
requests-oauthlib==1.3.0 requests-oauthlib==1.3.0
s3transfer==0.4.2
scipy==1.5.4 scipy==1.5.4
seacucumber==1.5.2
sentry-sdk==1.0.0 sentry-sdk==1.0.0
sgmllib3k==1.0.0 sgmllib3k==1.0.0
simplejson==3.17.2 simplejson==3.17.2

View file

@ -21,8 +21,8 @@ services:
nofile: nofile:
soft: 10000 soft: 10000
hard: 10000 hard: 10000
expose: ports:
- 8000 - 8000:8000
# only use gunicorn if the TEST env variable is not "True" # only use gunicorn if the TEST env variable is not "True"
entrypoint: /bin/sh -c newsblur_web/entrypoint.sh entrypoint: /bin/sh -c newsblur_web/entrypoint.sh
volumes: volumes:
@ -120,7 +120,7 @@ services:
db_mongo: db_mongo:
container_name: db_mongo container_name: db_mongo
image: mongo:3.6 image: mongo:4.0
restart: unless-stopped restart: unless-stopped
ports: ports:
- 29019:29019 - 29019:29019

View file

@ -63,6 +63,7 @@ frontend public
use_backend node_favicon if { path_beg /rss_feeds/icon/ } use_backend node_favicon if { path_beg /rss_feeds/icon/ }
use_backend node_text if { path_beg /rss_feeds/original_text_fetcher } use_backend node_text if { path_beg /rss_feeds/original_text_fetcher }
use_backend node_images if { hdr_end(host) -i imageproxy.newsblur.com } use_backend node_images if { hdr_end(host) -i imageproxy.newsblur.com }
use_backend node_images if { hdr_end(host) -i imageproxy2.newsblur.com }
use_backend node_page if { path_beg /original_page/ } use_backend node_page if { path_beg /original_page/ }
use_backend blog if { hdr_end(host) -i blog.newsblur.com } use_backend blog if { hdr_end(host) -i blog.newsblur.com }
use_backend blog if { hdr_end(host) -i blog2.newsblur.com } use_backend blog if { hdr_end(host) -i blog2.newsblur.com }
@ -171,14 +172,14 @@ backend mongo
{% for host in groups.mongo %} {% for host in groups.mongo %}
server {{host}} {{host}}.node.nyc1.consul:5579 server {{host}} {{host}}.node.nyc1.consul:5579
{% endfor %} {% endfor %}
{#
backend mongo_analytics backend mongo_analytics
option httpchk GET /db_check/mongo option httpchk GET /db_check/mongo_analytics
default-server check inter 2000ms resolvers consul resolve-prefer ipv4 resolve-opts allow-dup-ip init-addr none default-server check inter 2000ms resolvers consul resolve-prefer ipv4 resolve-opts allow-dup-ip init-addr none
{% for host in groups.mongo_analytics %} {% for host in groups.mongo_analytics %}
server {{host}} {{host}}.node.nyc1.consul:5579 server {{host}} {{host}}.node.nyc1.consul:5579
{% endfor %} {% endfor %}
#}
backend db_redis_user backend db_redis_user
option httpchk GET /db_check/redis option httpchk GET /db_check/redis
server db-redis-user db-redis-user.node.nyc1.consul:5579 check inter 2000ms resolvers consul resolve-opts allow-dup-ip init-addr none server db-redis-user db-redis-user.node.nyc1.consul:5579 check inter 2000ms resolvers consul resolve-opts allow-dup-ip init-addr none

36
docker/mongo/backup_mongo.sh Executable file
View file

@ -0,0 +1,36 @@
#!/usr/bin/env bash
collections=(
classifier_tag
classifier_author
classifier_feed
classifier_title
# shared_stories
category
category_site
sent_emails
social_profile
social_subscription
social_services
statistics
user_search
feedback
)
for collection in ${collections[@]}; do
now=$(date '+%Y-%m-%d-%H-%M')
echo "---> Dumping $collection - ${now}"
docker exec -it mongo mongodump -d newsblur -c $collection -o /backup/backup_mongo
done;
echo " ---> Compressing backup_mongo.tgz"
tar -zcf /opt/mongo/newsblur/backup/backup_mongo.tgz /opt/mongo/newsblur/backup/backup_mongo
echo " ---> Uploading backups to S3"
docker run --rm -v /srv/newsblur:/srv/newsblur -v /opt/mongo/newsblur/backup/:/opt/mongo/newsblur/backup/ --network=newsblurnet newsblur/newsblur_python3:latest python /srv/newsblur/utils/backups/backup_mongo.py
# Don't delete backup since the backup_mongo.py script will rm them
## rm /opt/mongo/newsblur/backup/backup_mongo_${now}.tgz
## rm /opt/mongo/newsblur/backup/backup_mongo_${now}
echo " ---> Finished uploading backups to S3: backup_mongo.tgz"

View file

@ -3,7 +3,7 @@ global:
scrape_configs: scrape_configs:
- job_name: 'node_exporter' - job_name: 'node_exporter'
consul_sd_configs: consul_sd_configs:
- server: 'consul.newsblur.com' - server: 'consul.service.nyc1.consul:8500'
services: ['node-exporter'] services: ['node-exporter']
relabel_configs: relabel_configs:
- source_labels: ['__meta_consul_node'] - source_labels: ['__meta_consul_node']
@ -12,7 +12,7 @@ scrape_configs:
- job_name: 'mongo_exporter' - job_name: 'mongo_exporter'
consul_sd_configs: consul_sd_configs:
- server: 'consul.newsblur.com' - server: 'consul.service.nyc1.consul:8500'
services: ['mongo-exporter'] services: ['mongo-exporter']
relabel_configs: relabel_configs:
- source_labels: ['__meta_consul_node'] - source_labels: ['__meta_consul_node']
@ -21,7 +21,7 @@ scrape_configs:
- job_name: 'postgres_exporter' - job_name: 'postgres_exporter'
consul_sd_configs: consul_sd_configs:
- server: 'consul.newsblur.com' - server: 'consul.service.nyc1.consul:8500'
services: ['postgres-exporter'] services: ['postgres-exporter']
relabel_configs: relabel_configs:
- source_labels: ['__meta_consul_node'] - source_labels: ['__meta_consul_node']
@ -31,7 +31,7 @@ scrape_configs:
## config for the multiple Redis targets that the exporter will scrape ## config for the multiple Redis targets that the exporter will scrape
- job_name: 'redis_exporter' - job_name: 'redis_exporter'
consul_sd_configs: consul_sd_configs:
- server: 'consul.newsblur.com:' - server: 'consul.service.nyc1.consul:8500'
services: ['redis-exporter'] services: ['redis-exporter']
relabel_configs: relabel_configs:
- source_labels: ['__meta_consul_service_id'] - source_labels: ['__meta_consul_service_id']
@ -114,4 +114,4 @@ scrape_configs:
static_configs: static_configs:
- targets: ['{{ monitor_server }}'] - targets: ['{{ monitor_server }}']
metrics_path: /monitor/users metrics_path: /monitor/users
scheme: https scheme: https

View file

@ -8,6 +8,16 @@ import elasticsearch
from newsblur_web import settings from newsblur_web import settings
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=settings.FLASK_SENTRY_DSN,
integrations=[FlaskIntegration()],
traces_sample_rate=1.0,
)
app = Flask(__name__) app = Flask(__name__)
PRIMARY_STATE = 1 PRIMARY_STATE = 1
@ -19,7 +29,7 @@ def db_check_postgres():
settings.DATABASES['default']['NAME'], settings.DATABASES['default']['NAME'],
settings.DATABASES['default']['USER'], settings.DATABASES['default']['USER'],
settings.DATABASES['default']['PASSWORD'], settings.DATABASES['default']['PASSWORD'],
'postgres', 'db-postgres.service.nyc1.consul',
settings.DATABASES['default']['PORT'], settings.DATABASES['default']['PORT'],
) )
try: try:
@ -67,18 +77,23 @@ def db_check_mysql():
@app.route("/db_check/mongo") @app.route("/db_check/mongo")
def db_check_mongo(): def db_check_mongo():
try: try:
client = pymongo.MongoClient('mongodb://mongo') # The `mongo` hostname below is a reference to the newsblurnet docker network, where 172.18.0.0/16 is defined
client = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}/?authSource=admin")
db = client.newsblur db = client.newsblur
except: except:
abort(503) abort(503)
try: try:
stories = db.stories.count() stories = db.stories.estimated_document_count()
except (pymongo.errors.NotMasterError, pymongo.errors.ServerSelectionTimeoutError): except (pymongo.errors.NotMasterError, pymongo.errors.ServerSelectionTimeoutError):
abort(504) abort(504)
except pymongo.errors.OperationFailure as e:
if 'Authentication failed' in str(e):
abort(505)
abort(506)
if not stories: if not stories:
abort(504) abort(510)
status = client.admin.command('replSetGetStatus') status = client.admin.command('replSetGetStatus')
members = status['members'] members = status['members']
@ -88,40 +103,45 @@ def db_check_mongo():
member_state = member['state'] member_state = member['state']
optime = member['optime'] optime = member['optime']
if member_state == PRIMARY_STATE: if member_state == PRIMARY_STATE:
primary_optime = optime.time primary_optime = optime['ts'].time
elif member_state == SECONDARY_STATE: elif member_state == SECONDARY_STATE:
if not oldest_secondary_optime or optime.time < oldest_secondary_optime: if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime:
oldest_secondary_optime = optime.time oldest_secondary_optime = optime['ts'].time
if not primary_optime or not oldest_secondary_optime: if not primary_optime or not oldest_secondary_optime:
abort(505) abort(511)
if primary_optime - oldest_secondary_optime > 100: # if primary_optime - oldest_secondary_optime > 100:
abort(506) # abort(512)
return str(stories) return str(stories)
@app.route("/db_check/redis") @app.route("/db_check/mongo_analytics")
def db_check_redis(): def db_check_mongo_analytics():
try: try:
r = redis.Redis('redis', db=0) client = pymongo.MongoClient(f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.SERVER_NAME}/?authSource=admin")
db = client.nbanalytics
except: except:
abort(503) abort(503)
try: try:
randkey = r.randomkey() fetches = db.feed_fetches.estimated_document_count()
except: except (pymongo.errors.NotMasterError, pymongo.errors.ServerSelectionTimeoutError):
abort(504) abort(504)
except pymongo.errors.OperationFailure as e:
if randkey: if 'Authentication failed' in str(e):
return str(randkey) abort(505)
else: abort(506)
abort(505)
if not fetches:
abort(510)
return str(fetches)
@app.route("/db_check/redis_user") @app.route("/db_check/redis_user")
def db_check_redis_user(): def db_check_redis_user():
try: try:
r = redis.Redis('redis', db=0) r = redis.Redis('db-redis-user.service.nyc1.consul', db=0)
except: except:
abort(503) abort(503)
@ -138,7 +158,7 @@ def db_check_redis_user():
@app.route("/db_check/redis_story") @app.route("/db_check/redis_story")
def db_check_redis_story(): def db_check_redis_story():
try: try:
r = redis.Redis('redis', db=1) r = redis.Redis('db-redis-story.service.nyc1.consul', db=1)
except: except:
abort(503) abort(503)
@ -155,7 +175,7 @@ def db_check_redis_story():
@app.route("/db_check/redis_sessions") @app.route("/db_check/redis_sessions")
def db_check_redis_sessions(): def db_check_redis_sessions():
try: try:
r = redis.Redis('redis', db=5) r = redis.Redis('db-redis-sessions.service.nyc1.consul', db=5)
except: except:
abort(503) abort(503)
@ -172,7 +192,7 @@ def db_check_redis_sessions():
@app.route("/db_check/redis_pubsub") @app.route("/db_check/redis_pubsub")
def db_check_redis_pubsub(): def db_check_redis_pubsub():
try: try:
r = redis.Redis('redis', db=1) r = redis.Redis('db-redis-pubsub.service.nyc1.consul', db=1)
except: except:
abort(503) abort(503)
@ -189,7 +209,7 @@ def db_check_redis_pubsub():
@app.route("/db_check/elasticsearch") @app.route("/db_check/elasticsearch")
def db_check_elasticsearch(): def db_check_elasticsearch():
try: try:
conn = elasticsearch.Elasticsearch('elasticsearch') conn = elasticsearch.Elasticsearch('db-elasticsearch.service.nyc1.consul')
except: except:
abort(503) abort(503)

View file

@ -1,4 +1,4 @@
flask==1.1.2 flask==2.0.1
pymongo==3.11.2 pymongo==3.11.2
psycopg2>=2,<3 psycopg2>=2,<3
redis==3.5.3 redis==3.5.3
@ -6,7 +6,6 @@ elasticsearch>=7,<8
pymysql==0.10.1 pymysql==0.10.1
celery>=4,<5 celery>=4,<5
Django>=3.1,<3.2 Django>=3.1,<3.2
sentry-sdk==0.20.3 sentry-sdk[flask]
mongoengine==0.21.0 mongoengine==0.21.0
boto==2.49.0 boto3==1.18.13
pyyaml==5.3.1

View file

@ -536,7 +536,7 @@ hr {
background-color: #F7F8F5; background-color: #F7F8F5;
overflow-y: auto; overflow-y: auto;
overflow-x: hidden; overflow-x: hidden;
font-size: 11px; font-size: 12px;
list-style: none; list-style: none;
margin: 0; margin: 0;
padding: 0; padding: 0;
@ -544,7 +544,7 @@ hr {
height: auto; height: auto;
} }
.NB-theme-feed-size-xs .NB-feedlist { .NB-theme-feed-size-xs .NB-feedlist {
font-size: 10px; font-size: 11px;
} }
.NB-theme-feed-size-m .NB-feedlist { .NB-theme-feed-size-m .NB-feedlist {
font-size: 12px; font-size: 12px;
@ -726,10 +726,10 @@ hr {
top: 1px; top: 1px;
} }
.NB-theme-feed-size-s .NB-feedlist img.feed_favicon { .NB-theme-feed-size-s .NB-feedlist img.feed_favicon {
top: 3px; top: 4px;
} }
.NB-density-compact.NB-theme-feed-size-s .NB-feedlist img.feed_favicon { .NB-density-compact.NB-theme-feed-size-s .NB-feedlist img.feed_favicon {
top: 1px; top: 2px;
} }
.NB-theme-feed-size-m .NB-feedlist img.feed_favicon { .NB-theme-feed-size-m .NB-feedlist img.feed_favicon {
top: 4px; top: 4px;
@ -846,8 +846,7 @@ hr {
.NB-feeds-header .NB-feedlist-collapse-icon { .NB-feeds-header .NB-feedlist-collapse-icon {
top: 4px; top: 4px;
} }
.NB-theme-feed-size-xs .NB-feedlist .folder .folder_title .NB-feedlist-collapse-icon, .NB-theme-feed-size-xs .NB-feedlist .folder .folder_title .NB-feedlist-collapse-icon {
.NB-theme-feed-size-s .NB-feedlist .folder .folder_title .NB-feedlist-collapse-icon {
top: -1px; top: -1px;
} }
.NB-theme-feed-size-xl .NB-feedlist .folder .folder_title .NB-feedlist-collapse-icon { .NB-theme-feed-size-xl .NB-feedlist .folder .folder_title .NB-feedlist-collapse-icon {
@ -1017,9 +1016,7 @@ hr {
padding-bottom: 2px; padding-bottom: 2px;
} }
.NB-theme-feed-size-s .NB-feedlist .unread_count { .NB-theme-feed-size-s .NB-feedlist .unread_count {
margin-top: 2px;
padding-top: 3px;
padding-bottom: 1px;
} }
.NB-theme-feed-size-l .NB-feedlist .unread_count { .NB-theme-feed-size-l .NB-feedlist .unread_count {
margin-top: 3px; margin-top: 3px;
@ -1069,7 +1066,7 @@ hr {
padding-bottom: 1px; padding-bottom: 1px;
} }
.NB-theme-feed-size-s .folder_title .unread_count { .NB-theme-feed-size-s .folder_title .unread_count {
margin-top: -3px; margin-top: -2px;
padding-top: 2px; padding-top: 2px;
padding-bottom: 1px; padding-bottom: 1px;
} }
@ -1738,7 +1735,7 @@ hr {
text-decoration: none; text-decoration: none;
color: #272727; color: #272727;
line-height: 15px; line-height: 15px;
font-size: 12px; font-size: 13px;
/* background-color: white; /* background-color: white;
border-top: 1px solid #E7EDF6; border-top: 1px solid #E7EDF6;
border-bottom: 1px solid #FFF; border-bottom: 1px solid #FFF;
@ -1749,7 +1746,7 @@ hr {
} }
.NB-theme-feed-size-xs .NB-story-title { .NB-theme-feed-size-xs .NB-story-title {
font-size: 11px; font-size: 12px;
line-height: 14px; line-height: 14px;
} }
.NB-theme-feed-size-m .NB-story-title { .NB-theme-feed-size-m .NB-story-title {
@ -1865,6 +1862,9 @@ hr {
-moz-box-sizing: border-box; -moz-box-sizing: border-box;
box-sizing: border-box; box-sizing: border-box;
} }
.NB-story-title.read .NB-storytitles-story-image {
opacity: 0.6;
}
.NB-image-preview-large-left:not(.NB-story-layout-grid) .NB-story-title .NB-storytitles-story-image { .NB-image-preview-large-left:not(.NB-story-layout-grid) .NB-story-title .NB-storytitles-story-image {
right: inherit; right: inherit;
left: 8px; left: 8px;
@ -1924,7 +1924,7 @@ hr {
} }
.NB-story-title.read a.story_title { .NB-story-title.read a.story_title {
color: #a2a2a2; color: #969696;
} }
.NB-storytitles-title { .NB-storytitles-title {
overflow-wrap: break-word; overflow-wrap: break-word;
@ -1940,10 +1940,10 @@ hr {
font-size: 11px; font-size: 11px;
} }
.NB-theme-feed-size-xs .NB-story-title .NB-storytitles-author { .NB-theme-feed-size-xs .NB-story-title .NB-storytitles-author {
font-size: 9px; font-size: 10px;
} }
.NB-theme-feed-size-s .NB-story-title .NB-storytitles-author { .NB-theme-feed-size-s .NB-story-title .NB-storytitles-author {
font-size: 10px; font-size: 11px;
} }
.NB-theme-feed-size-l .NB-story-title .NB-storytitles-author { .NB-theme-feed-size-l .NB-story-title .NB-storytitles-author {
font-size: 12px; font-size: 12px;
@ -2015,7 +2015,7 @@ hr {
} }
.NB-theme-feed-size-xs .NB-storytitles-content-preview { .NB-theme-feed-size-xs .NB-storytitles-content-preview {
font-size: 10px; font-size: 11px;
line-height: 13px; line-height: 13px;
} }
.NB-theme-feed-size-s .NB-storytitles-content-preview { .NB-theme-feed-size-s .NB-storytitles-content-preview {
@ -2108,7 +2108,7 @@ hr {
.NB-story-title .NB-story-feed .feed_title { .NB-story-title .NB-story-feed .feed_title {
display: block; display: block;
font-size: 10px; font-size: 11px;
position: absolute; position: absolute;
left: 20px; left: 20px;
top: 0; top: 0;
@ -2121,7 +2121,7 @@ hr {
white-space: nowrap; white-space: nowrap;
} }
.NB-theme-feed-size-xs .NB-story-title .NB-story-feed .feed_title { .NB-theme-feed-size-xs .NB-story-title .NB-story-feed .feed_title {
font-size: 9px; font-size: 10px;
height: 12px; height: 12px;
} }
.NB-theme-feed-size-m .NB-story-title .NB-story-feed .feed_title { .NB-theme-feed-size-m .NB-story-title .NB-story-feed .feed_title {
@ -2340,9 +2340,6 @@ hr {
grid-gap: 2rem; grid-gap: 2rem;
padding: 2rem; padding: 2rem;
} }
.NB-layout-grid .NB-story-content-container {
background-color: white;
}
.NB-layout-grid .NB-end-line { .NB-layout-grid .NB-end-line {
margin: 0 -2rem -2rem; margin: 0 -2rem -2rem;
@ -2754,7 +2751,17 @@ hr {
body { body {
font-family: 'Whitney SSm A', 'Whitney SSm B', "Lucida Grande", Verdana, "Helvetica Neue", Helvetica, sans-serif; font-family: 'Whitney SSm A', 'Whitney SSm B', "Lucida Grande", Verdana, "Helvetica Neue", Helvetica, sans-serif;
} }
.NB-theme-feed-font-whitney {
font-family: 'Whitney SSm A', 'Whitney SSm B', "Lucida Grande", Verdana, "Helvetica Neue", Helvetica, sans-serif;
}
.NB-theme-feed-font-lucida {
font-family: "Lucida Grande", Verdana, "Helvetica Neue", Helvetica, sans-serif;
/* font-family: Verdana, "Helvetica Neue", Helvetica, sans-serif; */
}
.NB-theme-feed-font-gotham {
font-family: 'Gotham Narrow A', 'Gotham Narrow B', "Helvetica Neue", Helvetica, sans-serif;
/* font-family: "Helvetica Neue", Helvetica, sans-serif; */
}
.NB-theme-sans-serif #story_pane { .NB-theme-sans-serif #story_pane {
font-family: "Helvetica Neue", "Helvetica", sans-serif; font-family: "Helvetica Neue", "Helvetica", sans-serif;
} }
@ -3092,25 +3099,26 @@ body {
max-width: 100%; max-width: 100%;
} }
.NB-feed-story .NB-feed-story-content img { .NB-feed-story .NB-feed-story-content img {
max-width: max-content !important; max-width: 100% !important;
margin-left: -28px; width: auto !important;
width: calc(100% - 56px * -1) !important;
height: auto; height: auto;
/* See http://www.newsblur.com/site/1031643/le-21me for width: auto, height: auto */ /* See http://www.newsblur.com/site/1031643/le-21me for width: auto, height: auto */
} }
.NB-feed-story .NB-feed-story-content img.NB-medium-image { .NB-feed-story .NB-feed-story-content img.NB-medium-image {
max-width: max-content !important;
margin: 0;
width: auto !important;
} }
.NB-feed-story .NB-feed-story-content img.NB-small-image { .NB-feed-story .NB-feed-story-content img.NB-small-image {
max-width: max-content !important;
margin: 0;
width: auto !important;
} }
.NB-feed-story .NB-feed-story-content img.NB-large-image { .NB-feed-story .NB-feed-story-content img.NB-large-image {
max-width: max-content !important;
margin-left: -28px !important;
width: calc(100% - 56px * -1) !important;
} }
.NB-feed-story .NB-feed-story-content img.NB-table-image.NB-large-image {
margin: 0;
width: 100% !important;
}
.NB-feed-story .NB-feed-story-content figure { .NB-feed-story .NB-feed-story-content figure {
margin: 0; margin: 0;
} }
@ -3530,6 +3538,7 @@ body {
overflow: hidden; overflow: hidden;
position: relative; position: relative;
min-height: 192px; min-height: 192px;
background-color: white;
} }
.NB-narrow-content .NB-story-content-container { .NB-narrow-content .NB-story-content-container {
min-height: 108px; min-height: 108px;
@ -5190,40 +5199,48 @@ background: transparent;
background: transparent url("/media/embed/icons/circular/menu_icn_settings.png") no-repeat center center; background: transparent url("/media/embed/icons/circular/menu_icn_settings.png") no-repeat center center;
} }
.NB-filter-popover .segmented-control.NB-options-feed-font-size li, .NB-filter-popover .segmented-control.NB-options-feed-size li,
.NB-style-popover .NB-options-feed-font-size li, .NB-style-popover .NB-options-feed-size li,
.NB-style-popover .NB-options-story-font-size li { .NB-style-popover .NB-options-story-font-size li {
width: 45px; width: 45px;
padding: 2px 0; padding: 2px 0;
line-height: 16px; line-height: 16px;
font-weight: bold; font-weight: bold;
} }
.NB-filter-popover .segmented-control.NB-options-feed-font-size li { .NB-filter-popover .segmented-control.NB-options-feed-size li {
width: 50px; width: 50px;
} }
.NB-filter-popover .segmented-control li.NB-options-font-size-xs, .NB-filter-popover .segmented-control li.NB-options-feed-size-xs,
.NB-style-popover li.NB-options-font-size-xs { .NB-style-popover li.NB-options-font-size-xs,
.NB-style-popover li.NB-options-feed-size-xs {
font-size: 9px; font-size: 9px;
padding: 3px 0 1px; padding: 3px 0 1px;
} }
.NB-filter-popover .segmented-control .NB-options-font-size-s, .NB-filter-popover .segmented-control .NB-options-feed-size-s,
.NB-style-popover .NB-options-font-size-s { .NB-style-popover .NB-options-font-size-s,
.NB-style-popover .NB-options-feed-size-s {
font-size: 10px; font-size: 10px;
} }
.NB-filter-popover .segmented-control li.NB-options-font-size-m, .NB-filter-popover .segmented-control li.NB-options-feed-size-m,
.NB-style-popover li.NB-options-font-size-m { .NB-style-popover li.NB-options-font-size-m,
.NB-style-popover li.NB-options-feed-size-m {
font-size: 12px; font-size: 12px;
padding: 3px 0 1px; padding: 3px 0 1px;
} }
.NB-filter-popover .segmented-control .NB-options-font-size-l, .NB-filter-popover .segmented-control .NB-options-feed-size-l,
.NB-style-popover .NB-options-font-size-l { .NB-style-popover .NB-options-font-size-l,
.NB-style-popover .NB-options-feed-size-l {
font-size: 13px; font-size: 13px;
} }
.NB-filter-popover .segmented-control li.NB-options-font-size-xl, .NB-filter-popover .segmented-control li.NB-options-feed-size-xl,
.NB-style-popover li.NB-options-font-size-xl { .NB-style-popover li.NB-options-font-size-xl,
.NB-style-popover li.NB-options-feed-size-xl {
font-size: 15px; font-size: 15px;
} }
.NB-filter-popover .segmented-control.NB-options-feed-font li {
padding: 4px 15px;
width: auto;
}
.NB-style-popover .NB-options-line-spacing { .NB-style-popover .NB-options-line-spacing {
margin-top: 6px; margin-top: 6px;
} }
@ -7954,6 +7971,46 @@ form.opml_import_form input {
.NB-menu-manage .NB-menu-manage-theme .segmented-control li { .NB-menu-manage .NB-menu-manage-theme .segmented-control li {
padding: 4px 8px; padding: 4px 8px;
} }
.NB-menu-manage .NB-menu-manage-font .NB-menu-manage-image {
background: transparent url('/media/img/icons/circular/menu_icn_font.png') no-repeat 0 0;
background-size: 18px;
}
.NB-menu-manage .NB-menu-manage-font .segmented-control {
margin: 2px 0 0 36px;
}
.NB-menu-manage .NB-menu-manage-font .segmented-control li {
padding: 4px 8px;
}
.NB-menu-manage .NB-menu-manage-size .NB-menu-manage-image {
background: transparent url('/media/img/icons/circular/menu_icn_size.png') no-repeat 0 0;
background-size: 18px;
}
.NB-menu-manage .NB-menu-manage-size .segmented-control {
margin: 2px 0 0 36px;
}
.NB-menu-manage .NB-menu-manage-size .segmented-control li {
font-weight: bold;
}
.NB-menu-manage .NB-menu-manage-size .segmented-control li.NB-options-feed-size-xs {
font-size: 9px;
padding: 7px 12px 6px;
}
.NB-menu-manage .NB-menu-manage-size .segmented-control li.NB-options-feed-size-s {
font-size: 10px;
padding: 6px 12px 5px;
}
.NB-menu-manage .NB-menu-manage-size .segmented-control li.NB-options-feed-size-m {
font-size: 12px;
padding: 5px 12px 4px;
}
.NB-menu-manage .NB-menu-manage-size .segmented-control li.NB-options-feed-size-l {
font-size: 13px;
padding: 4px 12px;
}
.NB-menu-manage .NB-menu-manage-size .segmented-control li.NB-options-feed-size-xl {
font-size: 15px;
padding: 3px 12px 2px;
}
.NB-menu-manage .NB-menu-manage-account .NB-menu-manage-image { .NB-menu-manage .NB-menu-manage-account .NB-menu-manage-image {
background: transparent url('/media/embed/icons/circular/menu_icn_profile.png') no-repeat 0 0; background: transparent url('/media/embed/icons/circular/menu_icn_profile.png') no-repeat 0 0;
background-size: 18px; background-size: 18px;
@ -12371,7 +12428,7 @@ form.opml_import_form input {
.NB-modal-organizer .segmented-control li { .NB-modal-organizer .segmented-control li {
padding: 2px 12px 0; padding: 2px 12px 0;
font-size: 11px; font-size: 11px;
width: 50%; width: 49%;
-webkit-box-sizing: border-box; -webkit-box-sizing: border-box;
-moz-box-sizing: border-box; -moz-box-sizing: border-box;
box-sizing: border-box; box-sizing: border-box;

Binary file not shown.

After

Width:  |  Height:  |  Size: 683 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 910 B

View file

@ -51,7 +51,7 @@ NEWSBLUR.Models.Story = Backbone.Model.extend({
// First do a naive strip, which is faster than rendering which makes network calls // First do a naive strip, which is faster than rendering which makes network calls
content = content && content.replace(/<(?:.|\n)*?>/gm, ' '); content = content && content.replace(/<(?:.|\n)*?>/gm, ' ');
content = content && Inflector.stripTags(content); content = content && Inflector.stripTags(content);
content = content && content.replaceAll(' ', ' '); // Invisible space, boo content = content && content.replace(/[\u00a0\u200c]/g, ' '); // Invisible space, boo
content = content && content.replace(/\s+/gm, ' '); content = content && content.replace(/\s+/gm, ' ');
return _.string.prune(_.string.trim(content), length || 150, "..."); return _.string.prune(_.string.trim(content), length || 150, "...");

View file

@ -3051,6 +3051,10 @@
.removeClass('NB-theme-feed-size-l') .removeClass('NB-theme-feed-size-l')
.removeClass('NB-theme-feed-size-xl'); .removeClass('NB-theme-feed-size-xl');
$body.addClass('NB-theme-feed-size-' + NEWSBLUR.Preferences['feed_size']); $body.addClass('NB-theme-feed-size-' + NEWSBLUR.Preferences['feed_size']);
$body.removeClass('NB-theme-feed-font-whitney')
.removeClass('NB-theme-feed-font-lucida')
.removeClass('NB-theme-feed-font-gotham');
$body.addClass('NB-theme-feed-font-' + NEWSBLUR.Preferences['feed_font']);
$body.removeClass('NB-line-spacing-xs') $body.removeClass('NB-line-spacing-xs')
.removeClass('NB-line-spacing-s') .removeClass('NB-line-spacing-s')
@ -3082,6 +3086,8 @@
NEWSBLUR.app.dashboard_rivers.right.redraw(); NEWSBLUR.app.dashboard_rivers.right.redraw();
NEWSBLUR.app.story_titles.render(); NEWSBLUR.app.story_titles.render();
} }
this.load_theme();
}, },
// =================== // ===================
@ -3369,14 +3375,26 @@
} }
}, },
switch_feed_font: function(feed_font) {
this.model.preference('feed_font', feed_font);
this.apply_story_styling();
},
switch_feed_font_size: function(feed_size) {
this.model.preference('feed_size', feed_size);
this.apply_story_styling();
},
switch_theme: function(theme) { switch_theme: function(theme) {
this.model.preference('theme', theme); this.model.preference('theme', theme);
this.load_theme(); this.apply_story_styling();
}, },
load_theme: function() { load_theme: function() {
var theme = NEWSBLUR.assets.theme(); var theme = NEWSBLUR.assets.theme();
var auto_theme = NEWSBLUR.assets.preference('theme'); // Add auto var auto_theme = NEWSBLUR.assets.preference('theme'); // Add auto
var feed_font = NEWSBLUR.assets.preference('feed_font');
var feed_size = NEWSBLUR.assets.preference('feed_size');
if (!this.flags.watching_system_theme && window.matchMedia) { if (!this.flags.watching_system_theme && window.matchMedia) {
var darkMediaQuery = window.matchMedia('(prefers-color-scheme: dark)'); var darkMediaQuery = window.matchMedia('(prefers-color-scheme: dark)');
@ -3401,8 +3419,13 @@
this.flags.watching_system_theme = true; this.flags.watching_system_theme = true;
} }
// Select theme options in manage menu on the dashboard
$('.NB-theme-option').removeClass('NB-active'); $('.NB-theme-option').removeClass('NB-active');
$('.NB-options-theme-'+auto_theme).addClass('NB-active'); $('.NB-options-theme-'+auto_theme).addClass('NB-active');
$('.NB-feed-font-option').removeClass('NB-active');
$('.NB-options-feed-font-'+feed_font).addClass('NB-active');
$('.NB-feed-size-option').removeClass('NB-active');
$('.NB-options-feed-size-'+feed_size).addClass('NB-active');
$("body").addClass('NB-theme-transitioning'); $("body").addClass('NB-theme-transitioning');
@ -3540,7 +3563,7 @@
$.make('div', { className: 'NB-menu-manage-image' }), $.make('div', { className: 'NB-menu-manage-image' }),
$.make('div', { className: 'NB-menu-manage-title' }, 'Email Newsletters') $.make('div', { className: 'NB-menu-manage-title' }, 'Email Newsletters')
]), ]),
$.make('li', { className: 'NB-menu-item NB-menu-manage-import, role: "button"' }, [ $.make('li', { className: 'NB-menu-item NB-menu-manage-import', role: "button" }, [
$.make('div', { className: 'NB-menu-manage-image' }), $.make('div', { className: 'NB-menu-manage-image' }),
$.make('div', { className: 'NB-menu-manage-title' }, 'Import or upload sites') $.make('div', { className: 'NB-menu-manage-title' }, 'Import or upload sites')
]), ]),
@ -3562,6 +3585,34 @@
$.make('div', { className: 'NB-menu-manage-image' }), $.make('div', { className: 'NB-menu-manage-image' }),
$.make('div', { className: 'NB-menu-manage-title' }, 'Preferences') $.make('div', { className: 'NB-menu-manage-title' }, 'Preferences')
]), ]),
$.make('li', { className: 'NB-menu-separator' }),
$.make('li', { className: 'NB-menu-item NB-menu-manage-font' }, [
$.make('div', { className: 'NB-menu-manage-image' }),
$.make('ul', { className: 'segmented-control NB-options-feed-font' }, [
$.make('li', { className: 'NB-feed-font-option NB-options-feed-font-whitney NB-theme-feed-font-whitney', role: "button" }, [
$.make('div', { className: 'NB-icon' }),
'Whitney'
]),
$.make('li', { className: 'NB-feed-font-option NB-options-feed-font-lucida NB-theme-feed-font-lucida', role: "button" }, [
$.make('div', { className: 'NB-icon' }),
'Lucida Grande'
]),
$.make('li', { className: 'NB-feed-font-option NB-options-feed-font-gotham NB-theme-feed-font-gotham', role: "button" }, [
$.make('div', { className: 'NB-icon' }),
'Gotham'
])
])
]),
$.make('li', { className: 'NB-menu-item NB-menu-manage-size' }, [
$.make('div', { className: 'NB-menu-manage-image' }),
$.make('ul', { className: 'segmented-control NB-options-feed-size' }, [
$.make('li', { className: 'NB-feed-size-option NB-options-feed-size-xs', role: "button" }, 'XS'),
$.make('li', { className: 'NB-feed-size-option NB-options-feed-size-s', role: "button" }, 'S'),
$.make('li', { className: 'NB-feed-size-option NB-options-feed-size-m', role: "button" }, 'M'),
$.make('li', { className: 'NB-feed-size-option NB-options-feed-size-l', role: "button" }, 'L'),
$.make('li', { className: 'NB-feed-size-option NB-options-feed-size-xl', role: "button" }, 'XL')
])
]),
$.make('li', { className: 'NB-menu-item NB-menu-manage-theme' }, [ $.make('li', { className: 'NB-menu-item NB-menu-manage-theme' }, [
$.make('div', { className: 'NB-menu-manage-image' }), $.make('div', { className: 'NB-menu-manage-image' }),
$.make('ul', { className: 'segmented-control NB-options-theme' }, [ $.make('ul', { className: 'segmented-control NB-options-theme' }, [
@ -3585,6 +3636,16 @@
$(".NB-options-theme-light", $manage_menu).toggleClass('NB-active', theme == 'light'); $(".NB-options-theme-light", $manage_menu).toggleClass('NB-active', theme == 'light');
$(".NB-options-theme-dark", $manage_menu).toggleClass('NB-active', theme == 'dark'); $(".NB-options-theme-dark", $manage_menu).toggleClass('NB-active', theme == 'dark');
$(".NB-options-theme-auto", $manage_menu).toggleClass('NB-active', theme == 'auto'); $(".NB-options-theme-auto", $manage_menu).toggleClass('NB-active', theme == 'auto');
var feed_font = this.model.preference('feed_font');
$(".NB-options-feed-font-whitney", $manage_menu).toggleClass('NB-active', feed_font == 'whitney');
$(".NB-options-feed-font-lucida", $manage_menu).toggleClass('NB-active', feed_font == 'lucida');
$(".NB-options-feed-font-gotham", $manage_menu).toggleClass('NB-active', feed_font == 'gotham');
var feed_size = this.model.preference('feed_size');
$(".NB-options-feed-size-xs", $manage_menu).toggleClass('NB-active', feed_size == 'xs');
$(".NB-options-feed-size-s", $manage_menu).toggleClass('NB-active', feed_size == 's');
$(".NB-options-feed-size-m", $manage_menu).toggleClass('NB-active', feed_size == 'm');
$(".NB-options-feed-size-l", $manage_menu).toggleClass('NB-active', feed_size == 'l');
$(".NB-options-feed-size-xl", $manage_menu).toggleClass('NB-active', feed_size == 'xl');
} else if (type == 'feed') { } else if (type == 'feed') {
var feed = this.model.get_feed(feed_id); var feed = this.model.get_feed(feed_id);
if (!feed) return; if (!feed) return;
@ -6486,6 +6547,44 @@
e.preventDefault(); e.preventDefault();
self.switch_theme('auto'); self.switch_theme('auto');
}); });
$.targetIs(e, { tagSelector: '.NB-menu-manage-font' }, function($t, $p){
e.preventDefault();
});
$.targetIs(e, { tagSelector: '.NB-options-feed-font-whitney' }, function($t, $p){
e.preventDefault();
self.switch_feed_font('whitney');
});
$.targetIs(e, { tagSelector: '.NB-options-feed-font-lucida' }, function($t, $p){
e.preventDefault();
self.switch_feed_font('lucida');
});
$.targetIs(e, { tagSelector: '.NB-options-feed-font-gotham' }, function($t, $p){
e.preventDefault();
self.switch_feed_font('gotham');
});
$.targetIs(e, { tagSelector: '.NB-menu-manage-size' }, function($t, $p){
e.preventDefault();
});
$.targetIs(e, { tagSelector: '.NB-options-feed-size-xs' }, function($t, $p){
e.preventDefault();
self.switch_feed_font_size('xs');
});
$.targetIs(e, { tagSelector: '.NB-options-feed-size-s' }, function($t, $p){
e.preventDefault();
self.switch_feed_font_size('s');
});
$.targetIs(e, { tagSelector: '.NB-options-feed-size-m' }, function($t, $p){
e.preventDefault();
self.switch_feed_font_size('m');
});
$.targetIs(e, { tagSelector: '.NB-options-feed-size-l' }, function($t, $p){
e.preventDefault();
self.switch_feed_font_size('l');
});
$.targetIs(e, { tagSelector: '.NB-options-feed-size-xl' }, function($t, $p){
e.preventDefault();
self.switch_feed_font_size('xl');
});
$.targetIs(e, { tagSelector: '.NB-menu-manage-logout' }, function($t, $p){ $.targetIs(e, { tagSelector: '.NB-menu-manage-logout' }, function($t, $p){
e.preventDefault(); e.preventDefault();
e.stopPropagation(); e.stopPropagation();

View file

@ -7,7 +7,7 @@ NEWSBLUR.ReaderFeedException = function(feed_id, options) {
this.options = $.extend({}, defaults, options); this.options = $.extend({}, defaults, options);
this.model = NEWSBLUR.assets; this.model = NEWSBLUR.assets;
this.feed_id = feed_id; this.feed_id = _.isString(feed_id) && _.string.startsWith(feed_id, 'feed:') ? parseInt(feed_id.replace('feed:', ''), 10) : feed_id;
this.feed = this.model.get_feed(feed_id); this.feed = this.model.get_feed(feed_id);
this.folder_title = this.options.folder_title; this.folder_title = this.options.folder_title;
this.folder = this.folder_title && NEWSBLUR.assets.get_folder(this.folder_title); this.folder = this.folder_title && NEWSBLUR.assets.get_folder(this.folder_title);
@ -24,7 +24,7 @@ _.extend(NEWSBLUR.ReaderFeedException.prototype, {
if (this.folder) { if (this.folder) {
NEWSBLUR.Modal.prototype.initialize_folder.call(this, this.folder_title); NEWSBLUR.Modal.prototype.initialize_folder.call(this, this.folder_title);
} else { } else {
NEWSBLUR.Modal.prototype.initialize_feed.call(this, this.feed_id); NEWSBLUR.Modal.prototype.initialize_feed.call(this, this.feed_id);
} }
this.make_modal(); this.make_modal();
if (this.feed) { if (this.feed) {

View file

@ -200,7 +200,7 @@ _.extend(NEWSBLUR.ReaderOrganizer.prototype, {
var $error = $(".NB-error-move", this.$modal); var $error = $(".NB-error-move", this.$modal);
var $delete = $(".NB-action-delete", this.$modal); var $delete = $(".NB-action-delete", this.$modal);
var count = this.feedlist.folder_view.highlighted_count_unique_folders(); var count = this.feedlist.folder_view.highlighted_count_unique_folders();
console.log(['change_selection', count]); // console.log(['change_selection', count]);
$title.text(count ? count + " selected" : "Select"); $title.text(count ? count + " selected" : "Select");
$error.text(''); $error.text('');

View file

@ -133,12 +133,26 @@ NEWSBLUR.FeedOptionsPopover = NEWSBLUR.ReaderPopover.extend({
$.make('img', { className: 'NB-icon', src: NEWSBLUR.Globals['MEDIA_URL']+'img/reader/image_preview_small_right.png' }) $.make('img', { className: 'NB-icon', src: NEWSBLUR.Globals['MEDIA_URL']+'img/reader/image_preview_small_right.png' })
]) ])
])), ])),
$.make('ul', { className: 'segmented-control NB-options-feed-font-size' }, [ $.make('ul', { className: 'segmented-control NB-options-feed-font' }, [
$.make('li', { className: 'NB-view-setting-option NB-options-font-size-xs', role: "button" }, 'XS'), $.make('li', { className: 'NB-view-setting-option NB-view-setting-feed-font-whitney NB-theme-feed-font-whitney', role: "button" }, [
$.make('li', { className: 'NB-view-setting-option NB-options-font-size-s', role: "button" }, 'S'), $.make('div', { className: 'NB-icon' }),
$.make('li', { className: 'NB-view-setting-option NB-options-font-size-m NB-active', role: "button" }, 'M'), 'Whitney'
$.make('li', { className: 'NB-view-setting-option NB-options-font-size-l', role: "button" }, 'L'), ]),
$.make('li', { className: 'NB-view-setting-option NB-options-font-size-xl', role: "button" }, 'XL') $.make('li', { className: 'NB-view-setting-option NB-view-setting-feed-font-lucida NB-theme-feed-font-lucida', role: "button" }, [
$.make('div', { className: 'NB-icon' }),
'Lucida Grande'
]),
$.make('li', { className: 'NB-view-setting-option NB-view-setting-feed-font-gotham NB-theme-feed-font-gotham', role: "button" }, [
$.make('div', { className: 'NB-icon' }),
'Gotham'
])
]),
$.make('ul', { className: 'segmented-control NB-options-feed-size' }, [
$.make('li', { className: 'NB-view-setting-option NB-options-feed-size-xs', role: "button" }, 'XS'),
$.make('li', { className: 'NB-view-setting-option NB-options-feed-size-s', role: "button" }, 'S'),
$.make('li', { className: 'NB-view-setting-option NB-options-feed-size-m NB-active', role: "button" }, 'M'),
$.make('li', { className: 'NB-view-setting-option NB-options-feed-size-l', role: "button" }, 'L'),
$.make('li', { className: 'NB-view-setting-option NB-options-feed-size-xl', role: "button" }, 'XL')
]) ])
]), ]),
(is_feed && $.make('div', { className: 'NB-popover-section' }, [ (is_feed && $.make('div', { className: 'NB-popover-section' }, [
@ -185,7 +199,8 @@ NEWSBLUR.FeedOptionsPopover = NEWSBLUR.ReaderPopover.extend({
var image_preview = NEWSBLUR.assets.preference('image_preview'); var image_preview = NEWSBLUR.assets.preference('image_preview');
var content_preview = NEWSBLUR.assets.preference('show_content_preview'); var content_preview = NEWSBLUR.assets.preference('show_content_preview');
var infrequent = parseInt(NEWSBLUR.assets.preference('infrequent_stories_per_month'), 10); var infrequent = parseInt(NEWSBLUR.assets.preference('infrequent_stories_per_month'), 10);
var feed_font_size = NEWSBLUR.assets.preference('feed_size'); var feed_size = NEWSBLUR.assets.preference('feed_size');
var feed_font = NEWSBLUR.assets.preference('feed_font');
var $oldest = this.$('.NB-view-setting-order-oldest'); var $oldest = this.$('.NB-view-setting-order-oldest');
var $newest = this.$('.NB-view-setting-order-newest'); var $newest = this.$('.NB-view-setting-order-newest');
@ -224,8 +239,10 @@ NEWSBLUR.FeedOptionsPopover = NEWSBLUR.ReaderPopover.extend({
$image_preview_sr.toggleClass('NB-active', image_preview == "small-right"); $image_preview_sr.toggleClass('NB-active', image_preview == "small-right");
$image_preview_ll.toggleClass('NB-active', image_preview == "large-left"); $image_preview_ll.toggleClass('NB-active', image_preview == "large-left");
$image_preview_lr.toggleClass('NB-active', image_preview == "1" || image_preview == "large-right"); $image_preview_lr.toggleClass('NB-active', image_preview == "1" || image_preview == "large-right");
this.$('.NB-options-feed-font-size li').removeClass('NB-active'); this.$('.NB-options-feed-size li').removeClass('NB-active');
this.$('.NB-options-feed-font-size .NB-options-font-size-'+feed_font_size).addClass('NB-active'); this.$('.NB-options-feed-size .NB-options-feed-size-'+feed_size).addClass('NB-active');
this.$('.NB-options-feed-font .NB-view-setting-option').removeClass('NB-active');
this.$('.NB-options-feed-font .NB-view-setting-feed-font-'+feed_font).addClass('NB-active');
var frequencies = [5, 15, 30, 60, 90]; var frequencies = [5, 15, 30, 60, 90];
for (var f in frequencies) { for (var f in frequencies) {
@ -312,16 +329,22 @@ NEWSBLUR.FeedOptionsPopover = NEWSBLUR.ReaderPopover.extend({
} else if ($target.hasClass("NB-view-setting-infrequent-90")) { } else if ($target.hasClass("NB-view-setting-infrequent-90")) {
NEWSBLUR.assets.preference('infrequent_stories_per_month', 90); NEWSBLUR.assets.preference('infrequent_stories_per_month', 90);
NEWSBLUR.reader.reload_feed(); NEWSBLUR.reader.reload_feed();
} else if ($target.hasClass("NB-options-font-size-xs")) { } else if ($target.hasClass("NB-options-feed-size-xs")) {
this.update_feed_font_size('xs'); this.update_feed_font_size('xs');
} else if ($target.hasClass("NB-options-font-size-s")) { } else if ($target.hasClass("NB-options-feed-size-s")) {
this.update_feed_font_size('s'); this.update_feed_font_size('s');
} else if ($target.hasClass("NB-options-font-size-m")) { } else if ($target.hasClass("NB-options-feed-size-m")) {
this.update_feed_font_size('m'); this.update_feed_font_size('m');
} else if ($target.hasClass("NB-options-font-size-l")) { } else if ($target.hasClass("NB-options-feed-size-l")) {
this.update_feed_font_size('l'); this.update_feed_font_size('l');
} else if ($target.hasClass("NB-options-font-size-xl")) { } else if ($target.hasClass("NB-options-feed-size-xl")) {
this.update_feed_font_size('xl'); this.update_feed_font_size('xl');
} else if ($target.hasClass("NB-view-setting-feed-font-whitney")) {
this.update_feed_font('whitney');
} else if ($target.hasClass("NB-view-setting-feed-font-lucida")) {
this.update_feed_font('lucida');
} else if ($target.hasClass("NB-view-setting-feed-font-gotham")) {
this.update_feed_font('gotham');
} }
if (NEWSBLUR.reader.flags.search) { if (NEWSBLUR.reader.flags.search) {
@ -336,6 +359,11 @@ NEWSBLUR.FeedOptionsPopover = NEWSBLUR.ReaderPopover.extend({
NEWSBLUR.reader.apply_story_styling(); NEWSBLUR.reader.apply_story_styling();
}, },
update_feed_font: function(setting) {
NEWSBLUR.assets.preference('feed_font', setting);
NEWSBLUR.reader.apply_story_styling();
},
update_feed: function(setting) { update_feed: function(setting) {
var changed = NEWSBLUR.assets.view_setting(this.options.feed_id, setting); var changed = NEWSBLUR.assets.view_setting(this.options.feed_id, setting);
if (!changed) return; if (!changed) return;

View file

@ -97,7 +97,6 @@ NEWSBLUR.Views.StoryDetailView = Backbone.View.extend({
this.render_comments(); this.render_comments();
this.attach_handlers(); this.attach_handlers();
// if (!this.model.get('image_urls') || (this.model.get('image_urls') && this.model.get('image_urls').length == 0)) { // if (!this.model.get('image_urls') || (this.model.get('image_urls') && this.model.get('image_urls').length == 0)) {
this.watch_images_load();
// } // }
return this; return this;
@ -127,21 +126,40 @@ NEWSBLUR.Views.StoryDetailView = Backbone.View.extend({
this.attach_fitvid_handler(); this.attach_fitvid_handler();
this.render_starred_tags(); this.render_starred_tags();
this.apply_starred_story_selections(); this.apply_starred_story_selections();
this.watch_images_load();
}, },
watch_images_load: function () { watch_images_load: function () {
var pane_width = NEWSBLUR.reader.$s.$story_pane.width() - 28*2; // 28px to compensate for both margins var pane_width;
if (this.options.inline_story_title) {
pane_width = this.$el.width();
}
if (!pane_width) {
pane_width = NEWSBLUR.reader.$s.$story_pane.width()
}
if (!pane_width) {
pane_width = NEWSBLUR.reader.$s.$story_titles.width();
}
pane_width = pane_width - (28 + 2); // 28px to compensate for both margins
var has_tables = this.$("table").length;
this.$el.imagesLoaded(_.bind(function() { this.$el.imagesLoaded(_.bind(function() {
var largest = 0; var largest = 0;
var $largest; var $largest;
// console.log(["Images loaded", this.model.get('story_title').substr(0, 30), this.$("img")]); // console.log(["Images loaded", this.model.get('story_title').substr(0, 30), this.$("img")]);
this.$("img").each(function() { this.$("img").each(function() {
// console.log(["Largest?", this.width, largest, this.src]); // console.log(["Largest?", this.width, this.naturalWidth, this.height, this.naturalHeight, largest, pane_width, this.src]);
if (this.width > 60 && this.width > largest) { if (this.width > 60 && this.width > largest) {
largest = this.width; largest = this.width;
$largest = $(this); $largest = $(this);
} }
$(this).removeClass('NB-large-image').removeClass('NB-medium-image').removeClass('NB-small-image');
if (pane_width >= 900) return;
if (has_tables) {
// Can't even calculate widths because with tables, nothing fits
$(this).addClass('NB-table-image');
}
if (this.naturalWidth >= pane_width && this.naturalHeight >= 50) { if (this.naturalWidth >= pane_width && this.naturalHeight >= 50) {
$(this).addClass('NB-large-image'); $(this).addClass('NB-large-image');
} else if (this.naturalWidth >= 100 && this.naturalHeight >= 50) { } else if (this.naturalWidth >= 100 && this.naturalHeight >= 50) {

View file

@ -19,7 +19,7 @@ NEWSBLUR.StoryOptionsPopover = NEWSBLUR.ReaderPopover.extend({
events: { events: {
"click .NB-font-family-option": "change_font_family", "click .NB-font-family-option": "change_font_family",
"click .NB-story-font-size-option": "change_story_font_size", "click .NB-story-font-size-option": "change_story_font_size",
"click .NB-feed-font-size-option": "change_feed_font_size", "click .NB-feed-size-option": "change_feed_font_size",
"click .NB-view-setting-option": "change_view_setting", "click .NB-view-setting-option": "change_view_setting",
"click .NB-line-spacing-option": "change_line_spacing", "click .NB-line-spacing-option": "change_line_spacing",
"click .NB-story-titles-pane-option": "change_story_titles_pane", "click .NB-story-titles-pane-option": "change_story_titles_pane",
@ -159,12 +159,12 @@ NEWSBLUR.StoryOptionsPopover = NEWSBLUR.ReaderPopover.extend({
$.make('li', { className: 'NB-line-spacing-option NB-options-line-spacing-xl', role: "button" }, $.make('div', { className: 'NB-icon' })) $.make('li', { className: 'NB-line-spacing-option NB-options-line-spacing-xl', role: "button" }, $.make('div', { className: 'NB-icon' }))
]), ]),
$.make('div', { className: 'NB-popover-section-title' }, 'Feed title styling'), $.make('div', { className: 'NB-popover-section-title' }, 'Feed title styling'),
$.make('ul', { className: 'segmented-control NB-options-feed-font-size' }, [ $.make('ul', { className: 'segmented-control NB-options-feed-size' }, [
$.make('li', { className: 'NB-feed-font-size-option NB-options-font-size-xs', role: "button" }, 'XS'), $.make('li', { className: 'NB-feed-size-option NB-options-feed-size-xs', role: "button" }, 'XS'),
$.make('li', { className: 'NB-feed-font-size-option NB-options-font-size-s', role: "button" }, 'S'), $.make('li', { className: 'NB-feed-size-option NB-options-feed-size-s', role: "button" }, 'S'),
$.make('li', { className: 'NB-feed-font-size-option NB-options-font-size-m NB-active', role: "button" }, 'M'), $.make('li', { className: 'NB-feed-size-option NB-options-feed-size-m NB-active', role: "button" }, 'M'),
$.make('li', { className: 'NB-feed-font-size-option NB-options-font-size-l', role: "button" }, 'L'), $.make('li', { className: 'NB-feed-size-option NB-options-feed-size-l', role: "button" }, 'L'),
$.make('li', { className: 'NB-feed-font-size-option NB-options-font-size-xl', role: "button" }, 'XL') $.make('li', { className: 'NB-feed-size-option NB-options-feed-size-xl', role: "button" }, 'XL')
]), ]),
(this.options.show_contentpreview && $.make('ul', { className: 'segmented-control NB-menu-manage-view-setting-contentpreview' }, [ (this.options.show_contentpreview && $.make('ul', { className: 'segmented-control NB-menu-manage-view-setting-contentpreview' }, [
$.make('li', { className: 'NB-view-setting-option NB-view-setting-contentpreview-title', role: "button" }, 'Title only'), $.make('li', { className: 'NB-view-setting-option NB-view-setting-contentpreview-title', role: "button" }, 'Title only'),
@ -209,10 +209,10 @@ NEWSBLUR.StoryOptionsPopover = NEWSBLUR.ReaderPopover.extend({
this.$('.NB-options-font-family-'+font_family).addClass('NB-active'); this.$('.NB-options-font-family-'+font_family).addClass('NB-active');
this.$('.NB-view-setting-option').removeClass('NB-active'); this.$('.NB-view-setting-option').removeClass('NB-active');
this.$('.NB-feed-font-size-option').removeClass('NB-active'); this.$('.NB-feed-size-option').removeClass('NB-active');
this.$('.NB-story-font-size-option').removeClass('NB-active'); this.$('.NB-story-font-size-option').removeClass('NB-active');
this.$('.NB-options-story-font-size .NB-options-font-size-'+story_font_size).addClass('NB-active'); this.$('.NB-options-story-font-size .NB-options-font-size-'+story_font_size).addClass('NB-active');
this.$('.NB-options-feed-font-size .NB-options-font-size-'+feed_font_size).addClass('NB-active'); this.$('.NB-options-feed-size .NB-options-feed-size-'+feed_font_size).addClass('NB-active');
this.$('.NB-line-spacing-option').removeClass('NB-active'); this.$('.NB-line-spacing-option').removeClass('NB-active');
this.$('.NB-options-line-spacing-'+line_spacing).addClass('NB-active'); this.$('.NB-options-line-spacing-'+line_spacing).addClass('NB-active');
@ -296,15 +296,15 @@ NEWSBLUR.StoryOptionsPopover = NEWSBLUR.ReaderPopover.extend({
change_feed_font_size: function(e) { change_feed_font_size: function(e) {
var $target = $(e.target); var $target = $(e.target);
if ($target.hasClass("NB-options-font-size-xs")) { if ($target.hasClass("NB-options-feed-size-xs")) {
this.update_feed_font_size('xs'); this.update_feed_font_size('xs');
} else if ($target.hasClass("NB-options-font-size-s")) { } else if ($target.hasClass("NB-options-feed-size-s")) {
this.update_feed_font_size('s'); this.update_feed_font_size('s');
} else if ($target.hasClass("NB-options-font-size-m")) { } else if ($target.hasClass("NB-options-feed-size-m")) {
this.update_feed_font_size('m'); this.update_feed_font_size('m');
} else if ($target.hasClass("NB-options-font-size-l")) { } else if ($target.hasClass("NB-options-feed-size-l")) {
this.update_feed_font_size('l'); this.update_feed_font_size('l');
} else if ($target.hasClass("NB-options-font-size-xl")) { } else if ($target.hasClass("NB-options-feed-size-xl")) {
this.update_feed_font_size('xl'); this.update_feed_font_size('xl');
} }

View file

@ -21,7 +21,8 @@ DOCKERBUILD = True
DEBUG = False DEBUG = False
DEBUG = True DEBUG = True
DEBUG_ASSETS = True DEBUG_ASSETS = True
# DEBUG_QUERIES = True DEBUG_QUERIES = True
DEBUG_QUERIES_SUMMARY_ONLY = True
MEDIA_URL = '/media/' MEDIA_URL = '/media/'
IMAGES_URL = '/imageproxy' IMAGES_URL = '/imageproxy'
SECRET_KEY = 'YOUR SECRET KEY' SECRET_KEY = 'YOUR SECRET KEY'
@ -94,8 +95,7 @@ MONGO_DB = {
} }
MONGO_ANALYTICS_DB = { MONGO_ANALYTICS_DB = {
'name': 'nbanalytics', 'name': 'nbanalytics',
'host': 'db_mongo', 'host': 'db_mongo:29019',
'port': 29019,
} }
MONGODB_SLAVE = { MONGODB_SLAVE = {

View file

@ -34,7 +34,7 @@ from sentry_sdk.integrations.celery import CeleryIntegration
import django.http import django.http
import re import re
from mongoengine import connect from mongoengine import connect
from boto.s3.connection import S3Connection, OrdinaryCallingFormat import boto3
from utils import jammit from utils import jammit
# =================== # ===================
@ -94,6 +94,8 @@ AUTO_PREMIUM_NEW_USERS = True
AUTO_ENABLE_NEW_USERS = True AUTO_ENABLE_NEW_USERS = True
ENFORCE_SIGNUP_CAPTCHA = False ENFORCE_SIGNUP_CAPTCHA = False
PAYPAL_TEST = False PAYPAL_TEST = False
DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB
FILE_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB
# Uncomment below to force all feeds to store this many stories. Default is to cut # Uncomment below to force all feeds to store this many stories. Default is to cut
# off at 25 stories for single subscriber non-premium feeds and 500 for popular feeds. # off at 25 stories for single subscriber non-premium feeds and 500 for popular feeds.
@ -119,6 +121,7 @@ MIDDLEWARE = (
'apps.profile.middleware.ServerHostnameMiddleware', 'apps.profile.middleware.ServerHostnameMiddleware',
'oauth2_provider.middleware.OAuth2TokenMiddleware', 'oauth2_provider.middleware.OAuth2TokenMiddleware',
# 'debug_toolbar.middleware.DebugToolbarMiddleware', # 'debug_toolbar.middleware.DebugToolbarMiddleware',
'utils.request_introspection_middleware.DumpRequestMiddleware',
'apps.profile.middleware.DBProfilerMiddleware', 'apps.profile.middleware.DBProfilerMiddleware',
'apps.profile.middleware.SQLLogToConsoleMiddleware', 'apps.profile.middleware.SQLLogToConsoleMiddleware',
'utils.mongo_raw_log_middleware.MongoDumpMiddleware', 'utils.mongo_raw_log_middleware.MongoDumpMiddleware',
@ -232,6 +235,10 @@ LOGGING = {
'level': 'DEBUG', 'level': 'DEBUG',
'propagate': True, 'propagate': True,
}, },
'subdomains.middleware': {
'level': 'ERROR',
'propagate': False,
}
}, },
'filters': { 'filters': {
'require_debug_false': { 'require_debug_false': {
@ -559,8 +566,11 @@ S3_AVATARS_BUCKET_NAME = 'avatars.newsblur.com'
if DOCKERBUILD: if DOCKERBUILD:
from newsblur_web.docker_local_settings import * from newsblur_web.docker_local_settings import *
else:
try:
from newsblur_web.local_settings import * from newsblur_web.local_settings import *
except ModuleNotFoundError:
pass
try: try:
from newsblur_web.task_env import * from newsblur_web.task_env import *
@ -579,9 +589,11 @@ if not DEBUG:
'django_ses', 'django_ses',
) )
sentry_sdk.init( sentry_sdk.init(
dsn=SENTRY_DSN, dsn=SENTRY_DSN,
integrations=[DjangoIntegration(), RedisIntegration(), CeleryIntegration()], integrations=[DjangoIntegration(), RedisIntegration(), CeleryIntegration()],
server_name=SERVER_NAME,
# Set traces_sample_rate to 1.0 to capture 100% # Set traces_sample_rate to 1.0 to capture 100%
# of transactions for performance monitoring. # of transactions for performance monitoring.
@ -686,10 +698,11 @@ MONGO_ANALYTICS_DB_DEFAULTS = {
'alias': 'nbanalytics', 'alias': 'nbanalytics',
} }
MONGO_ANALYTICS_DB = dict(MONGO_ANALYTICS_DB_DEFAULTS, **MONGO_ANALYTICS_DB) MONGO_ANALYTICS_DB = dict(MONGO_ANALYTICS_DB_DEFAULTS, **MONGO_ANALYTICS_DB)
MONGO_ANALYTICS_DB_NAME = MONGO_ANALYTICS_DB.pop('name') if 'username' in MONGO_ANALYTICS_DB:
# MONGO_ANALYTICS_URI = 'mongodb://%s' % (MONGO_ANALYTICS_DB.pop('host'),) MONGOANALYTICSDB = connect(db=MONGO_ANALYTICS_DB['name'], host=f"mongodb://{MONGO_ANALYTICS_DB['username']}:{MONGO_ANALYTICS_DB['password']}@{MONGO_ANALYTICS_DB['host']}/?authSource=admin", alias="nbanalytics")
# MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB.pop('name'), host=MONGO_ANALYTICS_URI, **MONGO_ANALYTICS_DB) else:
MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB_NAME, **MONGO_ANALYTICS_DB) MONGOANALYTICSDB = connect(db=MONGO_ANALYTICS_DB['name'], host=f"mongodb://{MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics")
# ========= # =========
# = Redis = # = Redis =
@ -759,21 +772,17 @@ accept_content = ['pickle', 'json', 'msgpack', 'yaml']
JAMMIT = jammit.JammitAssets(ROOT_DIR) JAMMIT = jammit.JammitAssets(ROOT_DIR)
if DEBUG:
MIDDLEWARE += ('utils.request_introspection_middleware.DumpRequestMiddleware',)
# MIDDLEWARE += ('utils.exception_middleware.ConsoleExceptionMiddleware',)
# ======= # =======
# = AWS = # = AWS =
# ======= # =======
S3_CONN = None S3_CONN = None
if BACKED_BY_AWS.get('pages_on_s3') or BACKED_BY_AWS.get('icons_on_s3'): if BACKED_BY_AWS.get('pages_on_s3') or BACKED_BY_AWS.get('icons_on_s3'):
S3_CONN = S3Connection(S3_ACCESS_KEY, S3_SECRET, calling_format=OrdinaryCallingFormat()) boto_session = boto3.Session(
# if BACKED_BY_AWS.get('pages_on_s3'): aws_access_key_id=S3_ACCESS_KEY,
# S3_PAGES_BUCKET = S3_CONN.get_bucket(S3_PAGES_BUCKET_NAME) aws_secret_access_key=S3_SECRET,
# if BACKED_BY_AWS.get('icons_on_s3'): )
# S3_ICONS_BUCKET = S3_CONN.get_bucket(S3_ICONS_BUCKET_NAME) S3_CONN = boto_session.resource('s3')
django.http.request.host_validation_re = re.compile(r"^([a-z0-9.-_\-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$") django.http.request.host_validation_re = re.compile(r"^([a-z0-9.-_\-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$")

View file

@ -6,6 +6,8 @@ favicons = (app) =>
ENV_DEV = process.env.NODE_ENV == 'development' or process.env.NODE_ENV == 'development' ENV_DEV = process.env.NODE_ENV == 'development' or process.env.NODE_ENV == 'development'
ENV_PROD = process.env.NODE_ENV == 'production' ENV_PROD = process.env.NODE_ENV == 'production'
ENV_DOCKER = process.env.NODE_ENV == 'docker' ENV_DOCKER = process.env.NODE_ENV == 'docker'
MONGODB_USERNAME = process.env.MONGODB_USERNAME
MONGODB_PASSWORD = process.env.MONGODB_PASSWORD
MONGODB_SERVER = "db_mongo" MONGODB_SERVER = "db_mongo"
if ENV_DEV if ENV_DEV
MONGODB_SERVER = 'localhost' MONGODB_SERVER = 'localhost'
@ -27,7 +29,7 @@ favicons = (app) =>
log.debug "Running as production server" log.debug "Running as production server"
if ENV_PROD if ENV_PROD
url = "mongodb://#{MONGODB_SERVER}:#{MONGODB_PORT}/newsblur?replicaSet=nbset&readPreference=secondaryPreferred" url = "mongodb://#{MONGODB_USERNAME}:#{MONGODB_PASSWORD}@#{MONGODB_SERVER}:#{MONGODB_PORT}/newsblur?replicaSet=nbset&readPreference=secondaryPreferred&authSource=admin"
else else
url = "mongodb://#{MONGODB_SERVER}:#{MONGODB_PORT}/newsblur" url = "mongodb://#{MONGODB_SERVER}:#{MONGODB_PORT}/newsblur"

View file

@ -7,11 +7,13 @@
log = require('./log.js'); log = require('./log.js');
favicons = (app) => { favicons = (app) => {
var ENV_DEBUG, ENV_DEV, ENV_DOCKER, ENV_PROD, MONGODB_PORT, MONGODB_SERVER, url; var ENV_DEBUG, ENV_DEV, ENV_DOCKER, ENV_PROD, MONGODB_PASSWORD, MONGODB_PORT, MONGODB_SERVER, MONGODB_USERNAME, url;
ENV_DEBUG = process.env.NODE_ENV === 'debug'; ENV_DEBUG = process.env.NODE_ENV === 'debug';
ENV_DEV = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'development'; ENV_DEV = process.env.NODE_ENV === 'development' || process.env.NODE_ENV === 'development';
ENV_PROD = process.env.NODE_ENV === 'production'; ENV_PROD = process.env.NODE_ENV === 'production';
ENV_DOCKER = process.env.NODE_ENV === 'docker'; ENV_DOCKER = process.env.NODE_ENV === 'docker';
MONGODB_USERNAME = process.env.MONGODB_USERNAME;
MONGODB_PASSWORD = process.env.MONGODB_PASSWORD;
MONGODB_SERVER = "db_mongo"; MONGODB_SERVER = "db_mongo";
if (ENV_DEV) { if (ENV_DEV) {
MONGODB_SERVER = 'localhost'; MONGODB_SERVER = 'localhost';
@ -33,7 +35,7 @@
log.debug("Running as production server"); log.debug("Running as production server");
} }
if (ENV_PROD) { if (ENV_PROD) {
url = `mongodb://${MONGODB_SERVER}:${MONGODB_PORT}/newsblur?replicaSet=nbset&readPreference=secondaryPreferred`; url = `mongodb://${MONGODB_USERNAME}:${MONGODB_PASSWORD}@${MONGODB_SERVER}:${MONGODB_PORT}/newsblur?replicaSet=nbset&readPreference=secondaryPreferred&authSource=admin`;
} else { } else {
url = `mongodb://${MONGODB_SERVER}:${MONGODB_PORT}/newsblur`; url = `mongodb://${MONGODB_SERVER}:${MONGODB_PORT}/newsblur`;
} }

2827
node/package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -73,6 +73,7 @@
'story_layout' : 'split', 'story_layout' : 'split',
'collapsed_folders' : [], 'collapsed_folders' : [],
'story_styling' : 'sans-serif', 'story_styling' : 'sans-serif',
'feed_font' : 'whitney',
'feed_size' : 'm', 'feed_size' : 'm',
'story_size' : 'm', 'story_size' : 'm',
'story_line_spacing' : 'm', 'story_line_spacing' : 'm',

View file

@ -85,10 +85,7 @@
<img src="/media/img/logo_512.png" class="logo"> <img src="/media/img/logo_512.png" class="logo">
<h1>NewsBlur is in <span class="error404">maintenance mode</span></h1> <h1>NewsBlur is in <span class="error404">maintenance mode</span></h1>
<div class="description"> <div class="description">
<p>3:00a ET: Snapshotting is done, backup has been verified (woohoo!), and now the MongoDB cluster is syncing. Should be about an hour from now and all will be well.</p> <p>Performing some much needed maintenance to the MongoDB server. This should take between 5 and 10 minutes.</p>
<p>12:15a ET: Snapshotting looks about half-way done. This unplanned downtime has made it to the top of Hacker News. If you'd like to comment or learn more details, <a href="https://news.ycombinator.com/item?id=27613217">I posted on the NewsBlur thread</a>.</p>
<p>10:35p ET: Looks like the snapshot will take 10 hours to make. Ordinarily this wouldn't be a problem becuase the service is running and a snapshot would be made on a secondary DB. But all of the Mongo DBs faithfully deleted their data, so I'm taking a snapshot of a recent good backup. Once done, I can replicare the DB and we'll be back.</p>
<p>9:54p ET: Holy moly, when I switched to a new Mongo DB server, a hacker deleted all of NewsBlurs mongo data and is now holding NewsBlurs data hostage. Im dipping into a backup from a few hours ago and will keep you all updated.</p>
<p>To pass the time, <a href="http://mltshp.com/popular">check out what's popular on MLTSHP</a>.</p> <p>To pass the time, <a href="http://mltshp.com/popular">check out what's popular on MLTSHP</a>.</p>
</div> </div>
</div> </div>

View file

@ -13,18 +13,53 @@
<ul class="NB-menu-manage NB-menu-manage-notop"> <ul class="NB-menu-manage NB-menu-manage-notop">
<li class="NB-menu-item NB-menu-manage-theme"> <li class="NB-menu-item NB-menu-manage-theme">
<div class="NB-menu-manage-image"></div> <div class="NB-menu-manage-image"></div>
<ul class="segmented-control NB-options-theme"> <ul class="segmented-control NB-options-theme">
<li class="NB-taskbar-button NB-theme-option NB-options-theme-light"> <li class="NB-taskbar-button NB-theme-option NB-options-theme-light">
<span class="NB-task-title">Light</span> <span class="NB-task-title">Light</span>
</li> </li>
<li class="NB-taskbar-button NB-theme-option NB-options-theme-dark"> <li class="NB-taskbar-button NB-theme-option NB-options-theme-dark">
<span class="NB-task-title">Dark</span> <span class="NB-task-title">Dark</span>
</li> </li>
<li class="NB-taskbar-button NB-theme-option NB-options-theme-auto"> <li class="NB-taskbar-button NB-theme-option NB-options-theme-auto">
<span class="NB-task-title">Auto</span> <span class="NB-task-title">Auto</span>
</li> </li>
</ul> </ul>
</li> </li>
<li class="NB-menu-item NB-menu-manage-size">
<div class="NB-menu-manage-image"></div>
<ul class="segmented-control NB-options-size">
<li class="NB-taskbar-button NB-feed-size-option NB-options-feed-size-xs">
<span class="NB-task-title">XS</span>
</li>
<li class="NB-taskbar-button NB-feed-size-option NB-options-feed-size-s">
<span class="NB-task-title">S</span>
</li>
<li class="NB-taskbar-button NB-feed-size-option NB-options-feed-size-m">
<span class="NB-task-title">M</span>
</li>
<li class="NB-taskbar-button NB-feed-size-option NB-options-feed-size-l">
<span class="NB-task-title">L</span>
</li>
<li class="NB-taskbar-button NB-feed-size-option NB-options-feed-size-xl">
<span class="NB-task-title">XL</span>
</li>
</ul>
</li>
<li class="NB-menu-item NB-menu-manage-font">
<div class="NB-menu-manage-image"></div>
<ul class="segmented-control NB-options-font">
<li class="NB-taskbar-button NB-feed-font-option NB-options-feed-font-whitney NB-theme-feed-font-whitney">
<span class="NB-task-title">Whitney</span>
</li>
<li class="NB-taskbar-button NB-feed-font-option NB-options-feed-font-lucida NB-theme-feed-font-lucida">
<span class="NB-task-title">Lucida Grande</span>
</li>
<li class="NB-taskbar-button NB-feed-font-option NB-options-feed-font-gotham NB-theme-feed-font-gotham">
<span class="NB-task-title">Gotham</span>
</li>
</ul>
</li>
<li class="NB-menu-separator"></li>
<li class="NB-menu-item NB-menu-manage-preferences"> <li class="NB-menu-item NB-menu-manage-preferences">
<div class="NB-menu-manage-image"></div> <div class="NB-menu-manage-image"></div>
<div class="NB-menu-manage-title"> <div class="NB-menu-manage-title">

View file

@ -117,7 +117,7 @@ resource "digitalocean_droplet" "app-push" {
} }
resource "digitalocean_droplet" "app-refresh" { resource "digitalocean_droplet" "app-refresh" {
count = 2 count = 8
image = var.droplet_os image = var.droplet_os
name = "app-refresh${count.index+1}" name = "app-refresh${count.index+1}"
region = var.droplet_region region = var.droplet_region
@ -254,12 +254,24 @@ resource "digitalocean_droplet" "node-images" {
} }
} }
resource "digitalocean_volume" "node_page_volume" {
count = 0
region = "nyc1"
name = "nodepage"
size = 100
initial_filesystem_type = "ext4"
description = "Original Pages for NewsBlur"
}
resource "digitalocean_droplet" "node-page" { resource "digitalocean_droplet" "node-page" {
image = var.droplet_os image = var.droplet_os
name = "node-page" name = "node-page"
region = var.droplet_region region = var.droplet_region
size = var.droplet_size size = var.droplet_size
ssh_keys = [digitalocean_ssh_key.default.fingerprint] ssh_keys = [digitalocean_ssh_key.default.fingerprint]
# volume_ids = [digitalocean_volume.node_page_volume.0.id]
volume_ids = ["70b5a115-eb5c-11eb-81b7-0a58ac144312"] # 100GB volume created outside TF. Remove when upgrading to 200GB
provisioner "local-exec" { provisioner "local-exec" {
command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120" command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120"
} }
@ -275,7 +287,7 @@ resource "digitalocean_droplet" "db-elasticsearch" {
image = var.droplet_os image = var.droplet_os
name = "db-elasticsearch" name = "db-elasticsearch"
region = var.droplet_region region = var.droplet_region
size = var.droplet_size size = var.elasticsearch_droplet_size
ssh_keys = [digitalocean_ssh_key.default.fingerprint] ssh_keys = [digitalocean_ssh_key.default.fingerprint]
provisioner "local-exec" { provisioner "local-exec" {
command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120" command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120"
@ -374,7 +386,7 @@ resource "digitalocean_droplet" "db-postgres" {
} }
resource "digitalocean_volume" "mongo_volume" { resource "digitalocean_volume" "mongo_volume" {
count = 1 count = 2
region = "nyc1" region = "nyc1"
name = "mongo${count.index+1}" name = "mongo${count.index+1}"
size = 400 size = 400
@ -383,7 +395,7 @@ resource "digitalocean_volume" "mongo_volume" {
} }
resource "digitalocean_droplet" "db-mongo-primary" { resource "digitalocean_droplet" "db-mongo-primary" {
count = 1 count = 2
image = var.droplet_os image = var.droplet_os
name = "db-mongo${count.index+1}" name = "db-mongo${count.index+1}"
region = var.droplet_region region = var.droplet_region
@ -429,28 +441,49 @@ resource "digitalocean_droplet" "db-mongo-secondary" {
} }
} }
# resource "digitalocean_droplet" "db-mongo-analytics" { resource "digitalocean_volume" "mongo_analytics_volume" {
# image = var.droplet_os count = 1
# name = "db-mongo-analytics" region = "nyc1"
# region = var.droplet_region name = "mongoanalytics"
# size = var.droplet_size size = 100
# ssh_keys = [digitalocean_ssh_key.default.fingerprint] initial_filesystem_type = "xfs"
# provisioner "local-exec" { description = "Storage for NewsBlur MongoDB Analytics"
# command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120" }
# }
# provisioner "local-exec" { resource "digitalocean_droplet" "db-mongo-analytics" {
# command = "cd ..; ansible-playbook -l ${self.name} ansible/playbooks/setup_root.yml" image = var.droplet_os
# } name = "db-mongo-analytics"
# provisioner "local-exec" { region = var.droplet_region
# command = "cd ..; ansible-playbook -l ${self.name} ansible/setup.yml" size = var.mongo_analytics_droplet_size
# } volume_ids = [digitalocean_volume.mongo_analytics_volume.0.id]
# } ssh_keys = [digitalocean_ssh_key.default.fingerprint]
provisioner "local-exec" {
command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120"
}
provisioner "local-exec" {
command = "cd ..; ansible-playbook -l ${self.name} ansible/playbooks/setup_root.yml"
}
provisioner "local-exec" {
command = "cd ..; ansible-playbook -l ${self.name} ansible/setup.yml"
}
}
resource "digitalocean_volume" "metrics_volume" {
count = 0
region = "nyc1"
name = "metrics"
size = 100
initial_filesystem_type = "xfs"
description = "Storage for NewsBlur Prometheus metrics"
}
resource "digitalocean_droplet" "db-metrics" { resource "digitalocean_droplet" "db-metrics" {
image = var.droplet_os image = var.droplet_os
name = "db-metrics" name = "db-metrics"
region = var.droplet_region region = var.droplet_region
size = var.droplet_size size = var.metrics_droplet_size
# volume_ids = [digitalocean_volume.metrics_volume.0.id]
volume_ids = ["f815908f-e1b7-11eb-a10f-0a58ac145428"] # 100GB volume created outside TF. Remove when upgrading to 200GB
ssh_keys = [digitalocean_ssh_key.default.fingerprint] ssh_keys = [digitalocean_ssh_key.default.fingerprint]
provisioner "local-exec" { provisioner "local-exec" {
command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120" command = "/srv/newsblur/ansible/utils/generate_inventory.py; sleep 120"

View file

@ -1,3 +1,5 @@
# doctl compute size list
variable "droplet_region" { variable "droplet_region" {
type = string type = string
default = "nyc1" default = "nyc1"
@ -29,7 +31,22 @@ variable "mongo_droplet_size" {
default = "m-4vcpu-32gb" default = "m-4vcpu-32gb"
} }
variable "metrics_droplet_size" {
type = string
default = "s-1vcpu-2gb"
}
variable "mongo_secondary_droplet_size" { variable "mongo_secondary_droplet_size" {
type = string type = string
default = "m-2vcpu-16gb" default = "m-2vcpu-16gb"
} }
variable "mongo_analytics_droplet_size" {
type = string
default = "s-2vcpu-4gb"
}
variable "elasticsearch_droplet_size" {
type = string
default = "m3-2vcpu-16gb"
}

136
utils/backups/backup_mongo.py Normal file → Executable file
View file

@ -1,19 +1,127 @@
#!/usr/bin/python3 #!/usr/bin/python3
from datetime import datetime, timedelta
import os import os
import shutil import re
import logging
from newsblur_web import settings import mimetypes
import boto3 import boto3
import shutil
from boto3.s3.transfer import S3Transfer
from newsblur_web import settings
filenames = [f for f in os.listdir('/opt/mongo/newsblur/backup/') if '.tgz' in f] logger = logging.getLogger(__name__)
for filename in filenames:
print('Uploading %s to S3...' % filename) def main():
try: BACKUP_DIR = '/opt/mongo/newsblur/backup/'
s3 = boto3.resource('s3') filenames = [f for f in os.listdir(BACKUP_DIR) if '.tgz' in f]
bucket = s3.Bucket(settings.get('S3_BACKUP_BUCKET')) for filename in filenames:
bucket.upload_file(filename, name="mongo/%s" % (filename)) file_path = os.path.join(BACKUP_DIR, filename)
except Exception as e: basename = os.path.basename(file_path)
print(" ****> Exceptions: %s" % e) key_base, key_ext = list(splitext(basename))
shutil.rmtree(filename[:-4]) key_prefix = "".join(['mongo/', key_base])
os.remove(filename) key_datestamp = datetime.utcnow().strftime("_%Y-%m-%d-%H-%M")
key = "".join([key_prefix, key_datestamp, key_ext])
print("Uploading {0} to {1}".format(file_path, key))
upload(file_path, settings.S3_BACKUP_BUCKET, key)
print('Rotating file on S3 with key prefix {0} and extension {1}'.format(key_prefix, key_ext))
rotate(key_prefix, key_ext, settings.S3_BACKUP_BUCKET)
# shutil.rmtree(filename[:-4])
# os.remove(filename)
def upload_rotate(file_path, s3_bucket, s3_key_prefix, aws_key=None, aws_secret=None):
'''
Upload file_path to s3 bucket with prefix
Ex. upload_rotate('/tmp/file-2015-01-01.tar.bz2', 'backups', 'foo.net/')
would upload file to bucket backups with key=foo.net/file-2015-01-01.tar.bz2
and then rotate all files starting with foo.net/file and with extension .tar.bz2
Timestamps need to be present between the file root and the extension and in the same format as strftime("%Y-%m-%d").
Ex file-2015-12-28.tar.bz2
'''
key = ''.join([s3_key_prefix, os.path.basename(file_path)])
logger.debug("Uploading {0} to {1}".format(file_path, key))
upload(file_path, s3_bucket, key, aws_access_key_id=aws_key, aws_secret_access_key=aws_secret)
file_root, file_ext = splitext(os.path.basename(file_path))
# strip timestamp from file_base
regex = '(?P<filename>.*)-(?P<year>[\d]+?)-(?P<month>[\d]+?)-(?P<day>[\d]+?)'
match = re.match(regex, file_root)
if not match:
raise Exception('File does not contain a timestamp')
key_prefix = ''.join([s3_key_prefix, match.group('filename')])
logger.debug('Rotating files on S3 with key prefix {0} and extension {1}'.format(key_prefix, file_ext))
rotate(key_prefix, file_ext, s3_bucket, aws_key=aws_key, aws_secret=aws_secret)
def rotate(key_prefix, key_ext, bucket_name, daily_backups=7, weekly_backups=4, aws_key=None, aws_secret=None):
""" Delete old files we've uploaded to S3 according to grandfather, father, sun strategy """
session = boto3.Session(
aws_access_key_id=aws_key,
aws_secret_access_key=aws_secret
)
s3 = session.resource('s3')
bucket = s3.Bucket(bucket_name)
keys = bucket.objects.filter(Prefix=key_prefix)
regex = '{0}_(?P<year>[\d]+?)-(?P<month>[\d]+?)-(?P<day>[\d]+?)-(?P<hour>[\d]+?)-(?P<minute>[\d]+?){1}'.format(key_prefix, key_ext)
backups = []
for key in keys:
match = re.match(regex, str(key.key))
if not match:
continue
year = int(match.group('year'))
month = int(match.group('month'))
day = int(match.group('day'))
hour = int(match.group('hour'))
minute = int(match.group('minute'))
key_date = datetime(year, month, day, hour, minute)
backups[:0] = [key_date]
backups = sorted(backups, reverse=True)
if len(backups) > daily_backups+1 and backups[daily_backups] - backups[daily_backups+1] < timedelta(days=7):
key = bucket.Object("{0}{1}{2}".format(key_prefix,backups[daily_backups].strftime("_%Y-%m-%d-%H-%M"), key_ext))
logger.debug("[not] deleting daily {0}".format(key))
# key.delete()
del backups[daily_backups]
month_offset = daily_backups + weekly_backups
if len(backups) > month_offset+1 and backups[month_offset] - backups[month_offset+1] < timedelta(days=30):
key = bucket.Object("{0}{1}{2}".format(key_prefix,backups[month_offset].strftime("_%Y-%m-%d-%H-%M"), key_ext))
logger.debug("[not] deleting weekly {0}".format(key))
# key.delete()
del backups[month_offset]
def splitext( filename ):
""" Return the filename and extension according to the first dot in the filename.
This helps date stamping .tar.bz2 or .ext.gz files properly.
"""
index = filename.find('.')
if index == 0:
index = 1+filename[1:].find('.')
if index == -1:
return filename, ''
return filename[:index], filename[index:]
return os.path.splitext(filename)
def upload(source_path, bucketname, keyname, acl='private', guess_mimetype=True, aws_access_key_id=None, aws_secret_access_key=None):
client = boto3.client('s3', 'us-west-2', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)
transfer = S3Transfer(client)
# Upload /tmp/myfile to s3://bucket/key
extra_args = {
'ACL': acl,
}
if guess_mimetype:
mtype = mimetypes.guess_type(keyname)[0] or 'application/octet-stream'
extra_args['ContentType'] = mtype
transfer.upload_file(source_path, bucketname, keyname, extra_args=extra_args)
if __name__ == "__main__":
main()

View file

@ -1,39 +0,0 @@
#!/bin/sh
MONGODB_SHELL='mongo'
DUMP_UTILITY='mongodump'
DB_NAME='newsblur'
COLLECTIONS="classifier_tag classifier_author classifier_feed classifier_title userstories starred_stories"
date_now=`date +%Y_%m_%d_%H_%M`
dir_name='backup_mongo_'${date_now}
file_name='backup_mongo_'${date_now}'.bz2'
log() {
echo $1
}
do_cleanup(){
rm -rf backup_mongo_*
log 'cleaning up....'
}
do_backup(){
log 'snapshotting the db and creating archive'
# ${MONGODB_SHELL} admin fsync_lock.js
for collection in $COLLECTIONS
do
${DUMP_UTILITY} --db ${DB_NAME} --collection $collection -o ${dir_name}
done
tar -jcf $file_name ${dir_name}
# ${MONGODB_SHELL} admin fsync_unlock.js
log 'data backd up and created snapshot'
}
save_in_s3(){
log 'saving the backup archive in amazon S3' && \
python s3.py set ${file_name} && \
log 'data backup saved in amazon s3'
}
do_backup && save_in_s3 && do_cleanup

View file

@ -1,63 +0,0 @@
from boto.s3.connection import S3Connection
from boto.s3.connection import OrdinaryCallingFormat
from boto.s3.key import Key
import os
import sys
if '/srv/newsblur' not in ' '.join(sys.path):
sys.path.append("/srv/newsblur")
os.environ['DJANGO_SETTINGS_MODULE'] = 'newsblur_web.settings'
from django.conf import settings
ACCESS_KEY = settings.S3_ACCESS_KEY
SECRET = settings.S3_SECRET
BUCKET_NAME = settings.S3_BACKUP_BUCKET # Note that you need to create this bucket first
def save_file_in_s3(filename, name=None):
conn = S3Connection(ACCESS_KEY, SECRET, calling_format=OrdinaryCallingFormat())
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = name or filename
k.set_contents_from_filename(filename)
def get_file_from_s3(filename):
conn = S3Connection(ACCESS_KEY, SECRET, calling_format=OrdinaryCallingFormat())
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
k.get_contents_to_filename(filename)
def list_backup_in_s3():
conn = S3Connection(ACCESS_KEY, SECRET, calling_format=OrdinaryCallingFormat())
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print("[%s] %s" % (i, key.name))
def delete_all_backups():
#FIXME: validate filename exists
conn = S3Connection(ACCESS_KEY, SECRET, calling_format=OrdinaryCallingFormat())
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print("deleting %s" % (key.name))
key.delete()
if __name__ == '__main__':
import sys
if len(sys.argv) < 3:
print('Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0]))
else:
if sys.argv[1] == 'set':
save_file_in_s3(sys.argv[2])
elif sys.argv[1] == 'get':
get_file_from_s3(sys.argv[2])
elif sys.argv[1] == 'list':
list_backup_in_s3()
elif sys.argv[1] == 'delete':
delete_all_backups()
else:
print('Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0]))

View file

@ -16,12 +16,12 @@ def mongo_max_replication_lag(connection):
member_state = member['state'] member_state = member['state']
optime = member['optime'] optime = member['optime']
if member_state == PRIMARY_STATE: if member_state == PRIMARY_STATE:
primary_optime = optime.time primary_optime = optime['ts'].time
elif member_state == SECONDARY_STATE: elif member_state == SECONDARY_STATE:
if not oldest_secondary_optime or optime.time < oldest_secondary_optime: if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime:
oldest_secondary_optime = optime.time oldest_secondary_optime = optime['ts'].time
if not primary_optime or not oldest_secondary_optime: if not primary_optime or not oldest_secondary_optime:
return 0 return 0
return primary_optime - oldest_secondary_optime return primary_optime - oldest_secondary_optime

View file

@ -28,7 +28,10 @@ from apps.notifications.tasks import QueueNotifications
from apps.notifications.models import MUserFeedNotification from apps.notifications.models import MUserFeedNotification
from apps.push.models import PushSubscription from apps.push.models import PushSubscription
from apps.statistics.models import MAnalyticsFetcher, MStatistics from apps.statistics.models import MAnalyticsFetcher, MStatistics
import feedparser import feedparser
feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(['iframe'])
from utils.story_functions import pre_process_story, strip_tags, linkify from utils.story_functions import pre_process_story, strip_tags, linkify
from utils import log as logging from utils import log as logging
from utils.feed_functions import timelimit, TimeoutError from utils.feed_functions import timelimit, TimeoutError
@ -166,7 +169,8 @@ class FetchFeed:
len(smart_str(raw_feed.content)), len(smart_str(raw_feed.content)),
raw_feed.headers)) raw_feed.headers))
except Exception as e: except Exception as e:
logging.debug(" ***> [%-30s] ~FRFeed failed to fetch with request, trying feedparser: %s" % (self.feed.log_title[:30], str(e)[:100])) logging.debug(" ***> [%-30s] ~FRFeed failed to fetch with request, trying feedparser: %s" % (self.feed.log_title[:30], str(e)))
# raise e
if not self.fpf or self.options.get('force_fp', False): if not self.fpf or self.options.get('force_fp', False):
try: try:
@ -661,12 +665,18 @@ class FeedFetcherWorker:
"""Update feed, since it may have changed""" """Update feed, since it may have changed"""
return Feed.get_by_id(feed_id) return Feed.get_by_id(feed_id)
def process_feed_wrapper(self, feed_queue): def reset_database_connections(self):
connection._connections = {} connection._connections = {}
connection._connection_settings ={} connection._connection_settings ={}
connection._dbs = {} connection._dbs = {}
settings.MONGODB = connect(settings.MONGO_DB_NAME, **settings.MONGO_DB) settings.MONGODB = connect(settings.MONGO_DB_NAME, **settings.MONGO_DB)
settings.MONGOANALYTICSDB = connect(settings.MONGO_ANALYTICS_DB_NAME, **settings.MONGO_ANALYTICS_DB) if 'username' in settings.MONGO_ANALYTICS_DB:
settings.MONGOANALYTICSDB = connect(db=settings.MONGO_ANALYTICS_DB['name'], host=f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.MONGO_ANALYTICS_DB['host']}/?authSource=admin", alias="nbanalytics")
else:
settings.MONGOANALYTICSDB = connect(db=settings.MONGO_ANALYTICS_DB['name'], host=f"mongodb://{settings.MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics")
def process_feed_wrapper(self, feed_queue):
self.reset_database_connections()
delta = None delta = None
current_process = multiprocessing.current_process() current_process = multiprocessing.current_process()

View file

@ -176,6 +176,10 @@ def format_relative_date(date, future=False):
return "%s day%s %s" % (days, '' if days == 1 else 's', '' if future else 'ago') return "%s day%s %s" % (days, '' if days == 1 else 's', '' if future else 'ago')
def add_object_to_folder(obj, in_folder, folders, parent='', added=False): def add_object_to_folder(obj, in_folder, folders, parent='', added=False):
if parent.startswith('river:'):
parent = parent.replace('river:', '')
if in_folder.startswith('river:'):
in_folder = in_folder.replace('river:', '')
obj_identifier = obj obj_identifier = obj
if isinstance(obj, dict): if isinstance(obj, dict):
obj_identifier = list(obj.keys())[0] obj_identifier = list(obj.keys())[0]

View file

@ -20,7 +20,7 @@ class JSONFetcher:
logging.debug(' ***> [%-30s] ~FRJSON fetch failed: %s' % logging.debug(' ***> [%-30s] ~FRJSON fetch failed: %s' %
(self.feed.log_title[:30], address)) (self.feed.log_title[:30], address))
return return
data = {} data = {}
data['title'] = json_feed.get('title', '[Untitled]') data['title'] = json_feed.get('title', '[Untitled]')
data['link'] = json_feed.get('home_page_url', "") data['link'] = json_feed.get('home_page_url', "")
@ -43,11 +43,16 @@ class JSONFetcher:
pubdate = item.get('date_published', None) pubdate = item.get('date_published', None)
if pubdate: if pubdate:
date_published = dateutil.parser.parse(pubdate) date_published = dateutil.parser.parse(pubdate)
authors = item.get('authors', item.get('author', {}))
if isinstance(authors, list):
author_name = ', '.join([author.get('name', "") for author in authors])
else:
author_name = authors.get('name', "")
story = { story = {
'title': item.get('title', ""), 'title': item.get('title', ""),
'link': item.get('external_url', item.get('url', "")), 'link': item.get('external_url', item.get('url', "")),
'description': item.get('content_html', item.get('content_text', "")), 'description': item.get('content_html', item.get('content_text', "")),
'author_name': item.get('authors', item.get('author', {})).get('name', ""), 'author_name': author_name,
'categories': item.get('tags', []), 'categories': item.get('tags', []),
'unique_id': str(item.get('id', item.get('url', ""))), 'unique_id': str(item.get('id', item.get('url', ""))),
'pubdate': date_published, 'pubdate': date_published,

Some files were not shown because too many files have changed in this diff Show more