--- - name: Copy common secrets copy: src: /srv/secrets-newsblur/settings/common_settings.py dest: /srv/newsblur/newsblur_web/local_settings.py register: app_changed - block: - name: Copy task env copy: src: /srv/secrets-newsblur/settings/task_env.py dest: /srv/newsblur/newsblur_web/task_env.py rescue: - debug: msg: Missing task_env.py, ignoring... - name: Add SERVER_NAME to task env lineinfile: path: /srv/newsblur/newsblur_web/task_env.py line: 'SERVER_NAME = "{{ inventory_hostname }}"' - name: Make docker network for newsblurnet become: yes docker_network: name: newsblurnet notify: restart docker - name: "Start docker containers" become: yes docker_container: name: "{{ item.container_name }}" image: newsblur/newsblur_python3 state: started pull: yes container_default_behavior: no_defaults env: DOCKERBUILD: "" restart_policy: always networks_cli_compatible: yes network_mode: default networks: - name: newsblurnet command: "{{ item.command }}" healthcheck: # test: celery inspect ping -A newsblur_web -d celery@$HOSTNAME test: bash -c "(($(date +%s) - $(stat /srv/newsblur/logs/newsblur.log -c %Y) < 120)) && exit 0 || exit 1" interval: 60s timeout: 10s retries: 3 start_period: 30s volumes: - /srv/newsblur:/srv/newsblur - /etc/hosts:/etc/hosts when: "item.container_name in inventory_hostname" with_items: - container_name: task-celery command: "celery worker -A newsblur_web --loglevel=INFO -Q new_feeds,push_feeds,update_feeds,search_indexer" - container_name: task-work command: "celery worker -A newsblur_web --loglevel=INFO -B -s /srv/newsblur/logs/celerybeat-schedule.db -Q beat_feeds_task,work_queue,cron_queue" tags: - start - name: "Start autoheal container" become: yes docker_container: name: autoheal image: willfarrell/autoheal state: started pull: yes container_default_behavior: no_defaults env: AUTOHEAL_CONTAINER_LABEL: all restart_policy: always volumes: - /var/run/docker.sock:/var/run/docker.sock - name: Register celery_task in consul tags: consul become: yes template: src: consul_service.json dest: /etc/consul.d/celery_task.json notify: - reload consul when: "item.service_name in inventory_hostname" with_items: - service_name: task-celery - service_name: task-work - name: Setup logrotate become: yes copy: src=logrotate.conf dest=/etc/logrotate.d/newsblur mode=0755 - name: Reload celery debug: msg: Reloading celery notify: "reload {{ item.container_name }}" when: app_changed.changed and item.container_name in inventory_hostname with_items: - container_name: task-celery - container_name: task-work changed_when: app_changed.changed - name: Add sanity checkers cronjob for feeds fetched become: yes cron: name: feeds_fetched_sanity_checker user: root cron_file: /etc/cron.hourly/feeds_fetched_sanity_checker job: docker pull newsblur/newsblur_python3:latest; docker run --rm -it -v /srv/newsblur/:/srv/newsblur -h `cat /etc/hostname` --network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_task_fetches.py when: "'task-work' in inventory_hostname" tags: - sanity-checker - name: Add sanity checkers cronjob for newsletter become: yes cron: name: newsletter_sanity_checker user: root cron_file: /etc/cron.hourly/newsletter_sanity_checker job: docker pull newsblur/newsblur_python3:latest; docker run --rm -it -v /srv/newsblur/:/srv/newsblur -h `cat /etc/hostname` --network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_newsletter_delivery.py when: "'task-work' in inventory_hostname" tags: - sanity-checker - name: Add sanity checkers cronjob for work queue become: yes cron: name: work_queue_sanity_checker user: root cron_file: /etc/cron.hourly/work_queue_sanity_checker job: docker pull newsblur/newsblur_python3:latest; docker run --rm -v /srv/newsblur/:/srv/newsblur -h `cat /etc/hostname` --network=newsblurnet newsblur/newsblur_python3 /srv/newsblur/utils/monitor_work_queue.py when: "'task-work' in inventory_hostname" tags: - sanity-checker