Merge branch 'master' into dejal

This commit is contained in:
David Sinclair 2022-08-01 16:58:29 -07:00
commit 5df59872de
14 changed files with 126 additions and 63 deletions

View file

@ -35,4 +35,6 @@
"files.associations": {
"*.yml": "ansible"
},
"nrf-connect.toolchain.path": "${nrf-connect.toolchain:1.9.1}",
"C_Cpp.default.configurationProvider": "nrf-connect",
}

View file

@ -125,4 +125,18 @@ You got the downtime message either through email or SMS. This is the order of o
crack are automatically fixed after 24 hours, but if many feeds fall through due to a bad
deploy or electrical failure, you'll want to accelerate that check by just draining the
tasked feeds pool, adding those feeds back into the queue. This command is idempotent.
## Python 3
### Switching to a new redis server
When the new redis server is connected to the primary redis server:
# db-redis-story2 = moving to new server
# db-redis-story = old server about to be shutdown
make celery_stop
make maintenance_on
apd -l db-redis-story2 -t replicaofnoone
aps -l db-redis-story,db-redis-story2 -t consul
make maintenance_off
make task

View file

@ -5,32 +5,25 @@ newsblur := $(shell docker ps -qf "name=newsblur_web")
.PHONY: node
#creates newsblur, but does not rebuild images or create keys
start:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d
nb: pull bounce migrate bootstrap collectstatic
metrics:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose -f docker-compose.yml -f docker-compose.metrics.yml up -d
metrics-ps:
- RUNWITHMAKEBUILD=True docker compose -f docker-compose.yml -f docker-compose.metrics.yml ps
rebuild:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose down
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d
collectstatic:
- rm -fr static
- docker pull newsblur/newsblur_deploy
- docker run --rm -v $(shell pwd):/srv/newsblur newsblur/newsblur_deploy
#creates newsblur, builds new images, and creates/refreshes SSL keys
nb: pull
bounce:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose down
- [[ -d config/certificates ]] && echo "keys exist" || make keys
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d --build --remove-orphans
- docker exec newsblur_web ./manage.py migrate
bootstrap:
- docker exec newsblur_web ./manage.py loaddata config/fixtures/bootstrap.json
nbup:
- RUNWITHMAKEBUILD=True CURRENT_UID=${CURRENT_UID} CURRENT_GID=${CURRENT_GID} docker compose up -d --build --remove-orphans
coffee:

View file

@ -5,6 +5,7 @@ from django.shortcuts import render
from django.views import View
from apps.profile.models import Profile, RNewUserQueue
from apps.statistics.models import MStatistics
class Users(View):
@ -12,16 +13,33 @@ class Users(View):
last_year = datetime.datetime.utcnow() - datetime.timedelta(days=365)
last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30)
last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60*24)
expiration_sec = 60*60 # 1 hour
data = {
'all': User.objects.count(),
'yearly': Profile.objects.filter(last_seen_on__gte=last_year).count(),
'monthly': Profile.objects.filter(last_seen_on__gte=last_month).count(),
'daily': Profile.objects.filter(last_seen_on__gte=last_day).count(),
'premium': Profile.objects.filter(is_premium=True).count(),
'archive': Profile.objects.filter(is_archive=True).count(),
'pro': Profile.objects.filter(is_pro=True).count(),
'queued': RNewUserQueue.user_count(),
'all': MStatistics.get('munin:users_count',
lambda: User.objects.count(),
set_default=True, expiration_sec=expiration_sec),
'yearly': MStatistics.get('munin:users_yearly',
lambda: Profile.objects.filter(last_seen_on__gte=last_year).count(),
set_default=True, expiration_sec=expiration_sec),
'monthly': MStatistics.get('munin:users_monthly',
lambda: Profile.objects.filter(last_seen_on__gte=last_month).count(),
set_default=True, expiration_sec=expiration_sec),
'daily': MStatistics.get('munin:users_daily',
lambda: Profile.objects.filter(last_seen_on__gte=last_day).count(),
set_default=True, expiration_sec=expiration_sec),
'premium': MStatistics.get('munin:users_premium',
lambda: Profile.objects.filter(is_premium=True).count(),
set_default=True, expiration_sec=expiration_sec),
'archive': MStatistics.get('munin:users_archive',
lambda: Profile.objects.filter(is_archive=True).count(),
set_default=True, expiration_sec=expiration_sec),
'pro': MStatistics.get('munin:users_pro',
lambda: Profile.objects.filter(is_pro=True).count(),
set_default=True, expiration_sec=expiration_sec),
'queued': MStatistics.get('munin:users_queued',
lambda: RNewUserQueue.user_count(),
set_default=True, expiration_sec=expiration_sec),
}
chart_name = "users"
chart_type = "counter"

View file

@ -173,10 +173,10 @@ class UserSubscription(models.Model):
max_score = current_time
if read_filter == 'unread':
min_score = read_dates[feed_id]
if needs_unread_recalc[feed_id]:
pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key)
# pipeline.expire(unread_stories_key, unread_cutoff_diff.days*24*60*60)
pipeline.expire(unread_stories_key, 1*60*60) # 1 hour
# if needs_unread_recalc[feed_id]:
# pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key)
# # pipeline.expire(unread_stories_key, unread_cutoff_diff.days*24*60*60)
# pipeline.expire(unread_stories_key, 1*60*60) # 1 hour
else:
min_score = 0
@ -189,7 +189,7 @@ class UserSubscription(models.Model):
ranked_stories_key = unread_ranked_stories_key
if read_filter == 'unread':
if needs_unread_recalc[feed_id]:
pipeline.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key], aggregate="MAX")
pipeline.zdiffstore(unread_ranked_stories_key, [sorted_stories_key, read_stories_key])
# pipeline.expire(unread_ranked_stories_key, unread_cutoff_diff.days*24*60*60)
pipeline.expire(unread_ranked_stories_key, 1*60*60) # 1 hours
if order == 'oldest':
@ -234,16 +234,39 @@ class UserSubscription(models.Model):
story_hashes.extend(hashes)
if store_stories_key:
r.zunionstore(store_stories_key, unread_ranked_stories_keys, aggregate="MAX")
chunk_count = 0
chunk_size = 1000
if len(unread_ranked_stories_keys) < chunk_size:
r.zunionstore(store_stories_key, unread_ranked_stories_keys)
else:
pipeline = r.pipeline()
for unread_ranked_stories_keys_group in chunks(unread_ranked_stories_keys, chunk_size):
pipeline.zunionstore(f"{store_stories_key}-chunk{chunk_count}", unread_ranked_stories_keys_group, aggregate="MAX")
chunk_count += 1
pipeline.execute()
r.zunionstore(store_stories_key, [f"{store_stories_key}-chunk{i}" for i in range(chunk_count)], aggregate="MAX")
pipeline = r.pipeline()
for i in range(chunk_count):
pipeline.delete(f"{store_stories_key}-chunk{i}")
pipeline.execute()
if not store_stories_key:
return story_hashes
def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', cutoff_date=None):
story_hashes = UserSubscription.story_hashes(self.user.pk, feed_ids=[self.feed.pk],
order=order, read_filter=read_filter,
offset=offset, limit=limit,
cutoff_date=cutoff_date)
r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL)
unread_ranked_stories_key = 'zU:%s:%s' % (self.user_id, self.feed_id)
if offset and r.exists(unread_ranked_stories_key):
byscorefunc = r.zrevrange
if order == "oldest":
byscorefunc = r.zrange
story_hashes = byscorefunc(unread_ranked_stories_key, start=offset, end=offset+limit)[:limit]
else:
story_hashes = UserSubscription.story_hashes(self.user.pk, feed_ids=[self.feed.pk],
order=order, read_filter=read_filter,
offset=offset, limit=limit,
cutoff_date=cutoff_date)
story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-')
mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order)

View file

@ -671,8 +671,8 @@ def load_single_feed(request, feed_id):
# User must be subscribed to a newsletter in order to read it
raise Http404
if page > 200:
logging.user(request, "~BR~FK~SBOver page 200 on single feed: %s" % page)
if page > 400:
logging.user(request, "~BR~FK~SBOver page 400 on single feed: %s" % page)
assert False
if query:
@ -1029,7 +1029,7 @@ def load_starred_stories(request):
story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz)
starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, nowtz)
story['starred_timestamp'] = starred_date.strftime('%s')
story['starred_timestamp'] = int(starred_date.timestamp())
story['read_status'] = 1
story['starred'] = True
story['intelligence'] = {
@ -1354,7 +1354,7 @@ def load_read_stories(request):
starred_date = localtime_for_timezone(starred_story['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['starred_timestamp'] = int(starred_date.timestamp())
if story['story_hash'] in shared_stories:
story['shared'] = True
story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments'])
@ -1533,7 +1533,7 @@ def load_river_stories__redis(request):
starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'],
user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['starred_timestamp'] = starred_date.strftime('%s')
story['starred_timestamp'] = int(starred_date.timestamp())
story['user_tags'] = starred_stories[story['story_hash']]['user_tags']
story['user_notes'] = starred_stories[story['story_hash']]['user_notes']
story['highlights'] = starred_stories[story['story_hash']]['highlights']

View file

@ -1547,10 +1547,10 @@ class Feed(models.Model):
self.save_popular_authors(feed_authors=feed_authors[:-1])
@classmethod
def trim_old_stories(cls, start=0, verbose=True, dryrun=False, total=0):
def trim_old_stories(cls, start=0, verbose=True, dryrun=False, total=0, end=None):
now = datetime.datetime.now()
month_ago = now - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
feed_count = Feed.objects.latest('pk').pk
feed_count = end or Feed.objects.latest('pk').pk
for feed_id in range(start, feed_count):
if feed_id % 1000 == 0:
@ -3089,7 +3089,7 @@ class MStarredStory(mongo.DynamicDocument):
story_tags = mongo.ListField(mongo.StringField(max_length=250))
user_notes = mongo.StringField()
user_tags = mongo.ListField(mongo.StringField(max_length=128))
highlights = mongo.ListField(mongo.StringField(max_length=1024))
highlights = mongo.ListField(mongo.StringField(max_length=16384))
image_urls = mongo.ListField(mongo.StringField(max_length=1024))
meta = {

View file

@ -28,12 +28,18 @@ class MStatistics(mongo.Document):
return "%s: %s" % (self.key, self.value)
@classmethod
def get(cls, key, default=None):
def get(cls, key, default=None, set_default=False, expiration_sec=None):
obj = cls.objects.filter(key=key).first()
if not obj:
if set_default:
default = default()
cls.set(key, default, expiration_sec=expiration_sec)
return default
if obj.expiration_date and obj.expiration_date < datetime.datetime.now():
obj.delete()
if set_default:
default = default()
cls.set(key, default, expiration_sec=expiration_sec)
return default
return obj.value

View file

@ -28,7 +28,7 @@ django-paypal==1.1.2
django-qurl==0.1.1
django-pipeline>=2,<3
django-prometheus>=2,<3
django-redis-cache==3.0.0
django-redis==5.2.0
django-redis-sessions==0.6.1
django-ses==1.0.3
django-timezone-field==4.1.1
@ -101,7 +101,7 @@ PyYAML==5.3.1
pyzmq==22.0.3
raven==6.10.0
# readability-lxml==0.8.1.1 # Was vendorized due to noscript # Vendorized again due to 0.8.1.1 not out yet
redis==3.5.3
redis>=4,<5
requests==2.25.0
requests-oauthlib==1.3.0
scipy==1.5.4

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

After

Width:  |  Height:  |  Size: 335 KiB

View file

@ -10,8 +10,24 @@ NEWSBLUR.ReaderMarkRead = function(options) {
1: 1,
2: 3,
3: 7,
4: 14
4: 14,
5: 30
};
if (NEWSBLUR.Globals.is_archive) {
this.values = {
0: 0,
1: 1,
2: 3,
3: 7,
4: 14,
5: 30,
6: 60,
7: 90,
8: 120,
9: 180,
10: 365,
};
}
this.options = $.extend({}, defaults, options);
this.model = NEWSBLUR.assets;
this.runner();
@ -65,7 +81,7 @@ _.extend(NEWSBLUR.ReaderMarkRead.prototype, {
$slider.slider({
range: 'min',
min: 0,
max: 4,
max: Object.keys(this.values).length-1,
step: 1,
value: _.indexOf(_.values(this.values), this.options['days']),
slide: function(e, ui) {
@ -142,4 +158,4 @@ _.extend(NEWSBLUR.ReaderMarkRead.prototype, {
});
}
});
});

View file

@ -58,12 +58,8 @@ PRO_MINUTES_BETWEEN_FETCHES = 15
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': 'db_redis:6579',
'OPTIONS': {
'DB': 6,
'PARSER_CLASS': 'redis.connection.HiredisParser'
},
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://db_redis:6579/6',
},
}

View file

@ -786,13 +786,8 @@ SESSION_REDIS = {
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '%s:%s' % (REDIS_USER['host'], REDIS_PORT),
'OPTIONS': {
'DB': 6,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'SERIALIZER_CLASS': 'redis_cache.serializers.PickleSerializer'
},
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://%s:%s/6' % (REDIS_USER['host'], REDIS_PORT),
},
}

View file

@ -66,11 +66,11 @@
<div class="NB-about-who-name">Andrei Dan</div>
<div class="NB-acount-who-github"><a href="https://github.com/sictiru">@sictiru</a></div>
</li>
<li>
{% comment %} <li>
<img src="{{ MEDIA_URL }}img/static/Mark.jpg">
<div class="NB-about-who-name">Mark Anderson</div>
<div class="NB-acount-who-github"><a href="https://github.com/manderson23">@manderson23</a></div>
</li>
</li> {% endcomment %}
</ul>
</div>
</div>
@ -83,11 +83,11 @@
<div class="NB-about-who-name">David Sinclair</div>
<div class="NB-acount-who-github"><a href="https://github.com/Dejal">@Dejal</a></div>
</li>
<li>
{% comment %} <li>
<img src="{{ MEDIA_URL }}img/static/Nicholas.jpg">
<div class="NB-about-who-name">Nicholas Riley</div>
<div class="NB-acount-who-github"><a href="https://github.com/nriley">@nriley</a></div>
</li>
</li> {% endcomment %}
</ul>
</div>
</div>