mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-08-31 21:41:33 +00:00
Merge branch 'master' into circular
* master: Upping feed fetches. Adding meta refresh to maintenance page. Adding work to fabfile. Changing deploy to deploy_code.
This commit is contained in:
commit
976a5f15d1
5 changed files with 100 additions and 8 deletions
|
@ -379,7 +379,15 @@ class Feed(models.Model):
|
|||
# for feed_ids in (feeds[pos:pos + queue_size] for pos in xrange(0, len(feeds), queue_size)):
|
||||
for feed_id in feeds:
|
||||
UpdateFeeds.apply_async(args=(feed_id,), queue='update_feeds')
|
||||
|
||||
|
||||
@classmethod
|
||||
def drain_task_feeds(cls, empty=False):
|
||||
r = redis.Redis(connection_pool=settings.REDIS_FEED_POOL)
|
||||
if not empty:
|
||||
tasked_feeds = r.zrange('tasked_feeds', 0, -1)
|
||||
r.sadd('queued_feeds', *tasked_feeds)
|
||||
r.zremrangebyrank('tasked_feeds', 0, -1)
|
||||
|
||||
def update_all_statistics(self, full=True, force=False):
|
||||
self.count_subscribers()
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ class TaskFeeds(Task):
|
|||
|
||||
# Regular feeds
|
||||
if tasked_feeds_size < 2000:
|
||||
feeds = r.srandmember('queued_feeds', 1500)
|
||||
feeds = r.srandmember('queued_feeds', 2000)
|
||||
Feed.task_feeds(feeds, verbose=True)
|
||||
active_count = len(feeds)
|
||||
else:
|
||||
|
|
13
fabfile.py
vendored
13
fabfile.py
vendored
|
@ -65,6 +65,10 @@ def server():
|
|||
def app():
|
||||
server()
|
||||
env.roles = ['app']
|
||||
|
||||
def work():
|
||||
server()
|
||||
env.roles = ['work']
|
||||
|
||||
def dev():
|
||||
server()
|
||||
|
@ -117,10 +121,13 @@ def post_deploy():
|
|||
|
||||
@parallel
|
||||
def deploy(fast=False):
|
||||
deploy_code(copy_assets=True, fast=fast)
|
||||
deploy_code(copy_assets=False, fast=fast)
|
||||
|
||||
def deploy_full():
|
||||
deploy_code(copy_assets=True, full=True)
|
||||
def deploy_web(fast=False):
|
||||
deploy_code(copy_assets=True, fast=fast, full=False)
|
||||
|
||||
def deploy_full(fast=False):
|
||||
deploy_code(copy_assets=True, fast=fast, full=True)
|
||||
|
||||
@parallel
|
||||
def deploy_code(copy_assets=False, full=False, fast=False):
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
<META HTTP-EQUIV="CACHE-CONTROL" CONTENT="NO-CACHE" />
|
||||
<META HTTP-EQUIV="EXPIRES" CONTENT="0" />
|
||||
<META HTTP-EQUIV="PRAGMA" CONTENT="NO-CACHE" />
|
||||
|
||||
|
||||
<meta http-equiv="refresh" content="30">
|
||||
|
||||
<meta name="robots" content="noindex"/>
|
||||
|
||||
<style>
|
||||
|
@ -77,8 +79,9 @@
|
|||
<img src="/media/img/logo_512.png" class="logo">
|
||||
<h1>NewsBlur is in <span class="error404">maintenance mode</span></h1>
|
||||
<div class="description">
|
||||
<p>This will take approximately 5-10 minutes. I'm upgrading Redis, which is throwing faults due to memory issues.</p>
|
||||
<p>To pass the time, go surf <a href="http://mlkshk.com/popular">MLKSHK's popular page</a>.</p>
|
||||
<p>This will take approximately 2-3 minutes. I'm upgrading the MongoDB server. This is standard issue maintenance and should cause no harm to the interstellar continuum.</p>
|
||||
<p>Ohh, and if you're reading this, make sure you reload NewsBlur (which you can do right now) to grab the latest real-time upgrades.</p>
|
||||
<p>To pass the time, go surf <a href="http://mlkshk.com/popular" target="_blank">MLKSHK's popular page</a>.</p>
|
||||
<p></p>
|
||||
</div>
|
||||
</div>
|
||||
|
|
74
utils/zgrep.py
Executable file
74
utils/zgrep.py
Executable file
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import select
|
||||
import subprocess
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
|
||||
IGNORE_HOSTS = [
|
||||
'push',
|
||||
]
|
||||
|
||||
def main(role="app", role2="dev", command=None, path=None):
|
||||
streams = list()
|
||||
if not path:
|
||||
path = "/srv/newsblur/logs/newsblur.log"
|
||||
if not command:
|
||||
command = "tail -f"
|
||||
hosts_path = os.path.expanduser(os.path.join('../secrets-newsblur/configs/hosts.yml'))
|
||||
hosts = yaml.load(open(hosts_path))
|
||||
|
||||
for r in [role, role2]:
|
||||
if isinstance(hosts[r], dict):
|
||||
hosts[r] = ["%s:%s" % (hosts[r][k][-1], k) for k in hosts[r].keys()]
|
||||
|
||||
for hostname in set(hosts[role] + hosts[role2]):
|
||||
if any(h in hostname for h in IGNORE_HOSTS): continue
|
||||
if ':' in hostname:
|
||||
hostname, address = hostname.split(':', 1)
|
||||
else:
|
||||
address = hostname
|
||||
if 'ec2' in hostname:
|
||||
s = subprocess.Popen(["ssh", "-i", os.path.expanduser("~/.ec2/sclay.pem"),
|
||||
address, "%s %s" % (command, path)], stdout=subprocess.PIPE)
|
||||
else:
|
||||
s = subprocess.Popen(["ssh", address, "%s %s" % (command, path)], stdout=subprocess.PIPE)
|
||||
s.name = hostname
|
||||
streams.append(s)
|
||||
|
||||
try:
|
||||
i = 0
|
||||
while True:
|
||||
i += 1
|
||||
r, _, _ = select.select(
|
||||
[stream.stdout.fileno() for stream in streams], [], [])
|
||||
for fileno in r:
|
||||
for stream in streams:
|
||||
if stream.stdout.fileno() != fileno:
|
||||
continue
|
||||
data = os.read(fileno, 4096)
|
||||
if not data:
|
||||
streams.remove(stream)
|
||||
break
|
||||
combination_message = "%s" % (data)
|
||||
sys.stdout.write(combination_message)
|
||||
break
|
||||
if i > 1000:
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
print " --- End of Logging ---"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = OptionParser()
|
||||
parser.add_option("-f", "--find", dest="find")
|
||||
parser.add_option("-p", "--path", dest="path")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
path = options.path
|
||||
find = options.find
|
||||
command = "zgrep \"%s\"" % find
|
||||
main(role="app", role2="dev", command=command, path=path)
|
Loading…
Add table
Reference in a new issue