mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-04-13 09:42:01 +00:00
Fixing elasticsearch to allow consul to assume its OK.
This commit is contained in:
parent
a2866ac7e3
commit
1065c964fd
10 changed files with 32 additions and 11 deletions
4
Makefile
4
Makefile
|
@ -197,6 +197,10 @@ mongodump:
|
|||
mongorestore:
|
||||
- cp -fr docker/volumes/mongodump docker/volumes/db_mongo/
|
||||
- docker exec -it db_mongo mongorestore --port 29019 -d newsblur /data/db/mongodump/newsblur
|
||||
index_feeds:
|
||||
- docker exec -it newsblur_web ./manage.py index_feeds
|
||||
index_stories:
|
||||
- docker exec -it newsblur_web ./manage.py index_stories -R
|
||||
|
||||
# performance tests
|
||||
perf-cli:
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
state: present
|
||||
|
||||
- name: Start Elasticsearch Docker container
|
||||
become: yes
|
||||
docker_container:
|
||||
name: elasticsearch
|
||||
image: elasticsearch:7.14.0
|
||||
|
@ -41,7 +40,6 @@
|
|||
- name: newsblurnet
|
||||
aliases:
|
||||
- elasticsearch
|
||||
user: 1000:1001
|
||||
volumes:
|
||||
- /srv/newsblur/docker/volumes/elasticsearch:/usr/share/elasticsearch/data
|
||||
- /var/log/elasticsearch/:/var/log/elasticsearch/
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
{
|
||||
"service": {
|
||||
"name": "db-elasticsearch",
|
||||
"name": "db-elasticsearch-staging",
|
||||
"tags": [
|
||||
"db"
|
||||
],
|
||||
"port": 9200,
|
||||
"checks": [{
|
||||
"id": "es-ping",
|
||||
"http": "http://{{ ansible_host }}:5579/db_check/elasticsearch",
|
||||
"http": "http://{{ ansible_host }}:5579/db_check/elasticsearch?consul=1",
|
||||
"interval": "15s",
|
||||
"failures_before_critical": 4
|
||||
}]
|
||||
|
|
|
@ -1262,7 +1262,7 @@ def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug):
|
|||
story['story_feed_id'],
|
||||
feed_title,
|
||||
)
|
||||
# story_content = re.sub(r'[\x00-\x08\x0B-\x0C\x0E-\x1F]', '', story_content)
|
||||
story_content = re.sub(r'[\x00-\x08\x0B-\x0C\x0E-\x1F]', '', story_content)
|
||||
story_title = "%s%s" % (("%s: " % feed_title) if feed_title else "", story['story_title'])
|
||||
story_data = {
|
||||
'title': story_title,
|
||||
|
|
|
@ -91,7 +91,7 @@ def load_feed_favicon(request, feed_id):
|
|||
def feed_autocomplete(request):
|
||||
query = request.GET.get('term') or request.GET.get('query')
|
||||
version = int(request.GET.get('v', 1))
|
||||
format = request.GET.get('format', 'autocomplete')
|
||||
autocomplete_format = request.GET.get('format', 'autocomplete')
|
||||
|
||||
# user = get_user(request)
|
||||
# if True or not user.profile.is_premium:
|
||||
|
@ -126,7 +126,7 @@ def feed_autocomplete(request):
|
|||
feeds = [feed for feed in feeds if feed and not feed.branch_from_feed]
|
||||
feeds = [feed for feed in feeds if all([x not in feed.feed_address for x in IGNORE_AUTOCOMPLETE])]
|
||||
|
||||
if format == 'autocomplete':
|
||||
if autocomplete_format == 'autocomplete':
|
||||
feeds = [{
|
||||
'id': feed.pk,
|
||||
'value': feed.feed_address,
|
||||
|
|
|
@ -496,6 +496,7 @@ class SearchFeed:
|
|||
|
||||
@classmethod
|
||||
def index_name(cls):
|
||||
# feeds-index
|
||||
return "%s-index" % cls.name
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
http.port: 9200
|
||||
http.cors.enabled: true
|
||||
http.cors.allow-origin: "*"
|
||||
http.cors.allow-headers: X-Requested-With,X-Auth-Token,Content-Type,Content-Length,Authorization
|
||||
http.cors.allow-credentials: true
|
||||
xpack.security.enabled: false
|
||||
|
||||
discovery.type: single-node
|
||||
|
||||
|
@ -7,3 +11,4 @@ cluster.routing.allocation.disk.threshold_enabled: false
|
|||
|
||||
cluster.name: "docker-cluster"
|
||||
network.host: 0.0.0.0
|
||||
|
||||
|
|
|
@ -53,8 +53,8 @@ services:
|
|||
|
||||
imageproxy:
|
||||
container_name: imageproxy
|
||||
image: ghcr.io/willnorris/imageproxy:latest
|
||||
# image: yusukeito/imageproxy:v0.11.2 # Switch this with the willnorris/imageproxy above if you want arm64
|
||||
# image: ghcr.io/willnorris/imageproxy:latest
|
||||
image: yusukeito/imageproxy:v0.11.2 # Switch this with the willnorris/imageproxy above if you want arm64
|
||||
user: "${CURRENT_UID}:${CURRENT_GID}"
|
||||
entrypoint: /app/imageproxy -addr 0.0.0.0:8088 -cache /tmp/imageproxy -verbose
|
||||
restart: unless-stopped
|
||||
|
@ -122,9 +122,17 @@ services:
|
|||
- 9200:9200
|
||||
- 9300:9300
|
||||
volumes:
|
||||
- ./docker/volumes/elasticsearch:/elasticsearch/dat
|
||||
- ./docker/volumes/elasticsearch:/usr/share/elasticsearch/data
|
||||
- ./config/elasticsearch/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
|
||||
|
||||
dejavu:
|
||||
container_name: dejavu
|
||||
image: appbaseio/dejavu:3.6.0
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 1358:1358
|
||||
|
||||
|
||||
db_mongo:
|
||||
container_name: db_mongo
|
||||
image: mongo:4.0
|
||||
|
|
|
@ -239,6 +239,9 @@ def db_check_elasticsearch():
|
|||
except:
|
||||
abort(Response("Can't connect to db", 503))
|
||||
|
||||
if request.args.get('consul') == '1':
|
||||
return str(1)
|
||||
|
||||
if conn.indices.exists('feeds-index'):
|
||||
return str("Index exists, but didn't try search")
|
||||
# query = pyes.query.TermQuery("title", "daring fireball")
|
||||
|
|
|
@ -329,8 +329,10 @@ resource "digitalocean_droplet" "node-page" {
|
|||
}
|
||||
|
||||
resource "digitalocean_droplet" "db-elasticsearch" {
|
||||
count = 2
|
||||
image = var.droplet_os
|
||||
name = "db-elasticsearch"
|
||||
# name = "db-elasticsearch"
|
||||
name = contains([0], count.index) ? "db-elasticsearch" : "db-elasticsearch${count.index}"
|
||||
region = var.droplet_region
|
||||
size = var.elasticsearch_droplet_size
|
||||
ssh_keys = [digitalocean_ssh_key.default.fingerprint]
|
||||
|
|
Loading…
Add table
Reference in a new issue