mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-09-18 21:50:56 +00:00
Merge branch 'master' into 5.1
* master: Adding search to all hosts in fabfile. Android v4.7.0. iOS: Work around iOS 9 issue where training menu doesn’t appear the first time you select text. Adding logrotate for nginx. Premium accounts get feeds fetched at least once every 12 hours (for feeds that publish nothing). Flat feeds also gets user_id. Adding user_id to /reader/feeds. Boosting oauth authorization code expiration to 1 hour (from 60 seconds) to aid development. Adding MX DNS check on new users. Fixing valuerror when trimming on feeds that have bogus dates. Change find-next-unread to search forward then backward rather than forward in a loop.
This commit is contained in:
commit
191658a353
10 changed files with 62 additions and 17 deletions
|
@ -10,6 +10,7 @@ from apps.profile.tasks import EmailNewUser
|
|||
from apps.social.models import MActivity
|
||||
from apps.profile.models import blank_authenticate, RNewUserQueue
|
||||
from utils import log as logging
|
||||
from dns.resolver import query, NXDOMAIN
|
||||
|
||||
class LoginForm(forms.Form):
|
||||
username = forms.CharField(label=_("Username or Email"), max_length=30,
|
||||
|
@ -115,6 +116,12 @@ class SignupForm(forms.Form):
|
|||
if any([banned in email for banned in ['mailwire24', 'mailbox9', 'scintillamail', 'bluemailboxes', 'devmailing']]):
|
||||
logging.info(" ***> [%s] Spammer signup banned: %s/%s" % (username, password, email))
|
||||
raise forms.ValidationError('Seriously, fuck off spammer.')
|
||||
try:
|
||||
domain = email.rsplit('@', 1)[-1]
|
||||
if not query(domain, 'MX'):
|
||||
raise forms.ValidationError('Sorry, that email is invalid.')
|
||||
except NXDOMAIN:
|
||||
raise forms.ValidationError('Sorry, that email is invalid.')
|
||||
exists = User.objects.filter(username__iexact=username).count()
|
||||
if exists:
|
||||
user_auth = authenticate(username=username, password=password)
|
||||
|
|
|
@ -297,6 +297,7 @@ def load_feeds(request):
|
|||
'social_services': social_services,
|
||||
'user_profile': user.profile,
|
||||
"is_staff": user.is_staff,
|
||||
'user_id': user.pk,
|
||||
'folders': json.decode(folders.folders),
|
||||
'starred_count': starred_count,
|
||||
'starred_counts': starred_counts,
|
||||
|
@ -391,6 +392,7 @@ def load_feeds_flat(request):
|
|||
"social_profile": social_profile,
|
||||
"social_services": social_services,
|
||||
"user": user.username,
|
||||
"user_id": user.pk,
|
||||
"is_staff": user.is_staff,
|
||||
"user_profile": user.profile,
|
||||
"iphone_version": iphone_version,
|
||||
|
|
|
@ -1270,11 +1270,11 @@ class Feed(models.Model):
|
|||
self.save_popular_authors(feed_authors=feed_authors[:-1])
|
||||
|
||||
@classmethod
|
||||
def trim_old_stories(cls, start=0, verbose=True, dryrun=False):
|
||||
def trim_old_stories(cls, start=0, verbose=True, dryrun=False, total=0):
|
||||
now = datetime.datetime.now()
|
||||
month_ago = now - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES)
|
||||
feed_count = Feed.objects.latest('pk').pk
|
||||
total = 0
|
||||
|
||||
for feed_id in xrange(start, feed_count):
|
||||
if feed_id % 1000 == 0:
|
||||
print "\n\n -------------------------- %s (%s deleted so far) --------------------------\n\n" % (feed_id, total)
|
||||
|
@ -1337,7 +1337,11 @@ class Feed(models.Model):
|
|||
if read_stories_last_month == 0:
|
||||
original_cutoff = cutoff
|
||||
cutoff = min(cutoff, 25)
|
||||
logging.debug(" ---> [%-30s] ~FBTrimming down to ~SB%s (instead of %s)~SN stories (~FM%s~FB)" % (self, cutoff, original_cutoff, self.last_story_date.strftime("%Y-%m-%d") if self.last_story_date else "No last story date"))
|
||||
try:
|
||||
logging.debug(" ---> [%-30s] ~FBTrimming down to ~SB%s (instead of %s)~SN stories (~FM%s~FB)" % (self, cutoff, original_cutoff, self.last_story_date.strftime("%Y-%m-%d") if self.last_story_date else "No last story date"))
|
||||
except ValueError, e:
|
||||
logging.debug(" ***> [%-30s] Error trimming: %s" % (self, e))
|
||||
pass
|
||||
|
||||
return cutoff
|
||||
|
||||
|
@ -1669,9 +1673,9 @@ class Feed(models.Model):
|
|||
if len(fetch_history['push_history']):
|
||||
total = total * 12
|
||||
|
||||
# 24 hour max for premiums, 48 hour max for free
|
||||
# 12 hour max for premiums, 48 hour max for free
|
||||
if subs >= 1:
|
||||
total = min(total, 60*24*1)
|
||||
total = min(total, 60*12*1)
|
||||
else:
|
||||
total = min(total, 60*24*2)
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="com.newsblur"
|
||||
android:versionCode="115"
|
||||
android:versionName="4.7.0b2" >
|
||||
android:versionCode="116"
|
||||
android:versionName="4.7.0" >
|
||||
|
||||
<uses-sdk
|
||||
android:minSdkVersion="14"
|
||||
|
|
|
@ -691,14 +691,15 @@ public abstract class Reading extends NbActivity implements OnPageChangeListener
|
|||
|
||||
boolean unreadFound = false;
|
||||
// start searching just after the current story
|
||||
int candidate = pager.getCurrentItem() + 1;
|
||||
int currentIndex = pager.getCurrentItem();
|
||||
int candidate = currentIndex + 1;
|
||||
unreadSearch:while (!unreadFound) {
|
||||
// if we've reached the end of the list, loop back to the beginning
|
||||
// if we've reached the end of the list, start searching backward from the current story
|
||||
if (candidate >= readingAdapter.getCount()) {
|
||||
candidate = 0;
|
||||
candidate = currentIndex - 1;
|
||||
}
|
||||
// if we have looped all the way around to the story we are on, there aren't any left
|
||||
if (candidate == pager.getCurrentItem()) {
|
||||
// if we have looked all the way back to the first story, there aren't any left
|
||||
if (candidate < 0) {
|
||||
break unreadSearch;
|
||||
}
|
||||
Story story = readingAdapter.getStory(candidate);
|
||||
|
@ -710,7 +711,13 @@ public abstract class Reading extends NbActivity implements OnPageChangeListener
|
|||
// iterate through the stories in our cursor until we find an unread one
|
||||
if (story != null) {
|
||||
if (story.read) {
|
||||
candidate++;
|
||||
if (candidate > currentIndex ) {
|
||||
// if we are still searching past the current story, search forward
|
||||
candidate++;
|
||||
} else {
|
||||
// if we hit the end and re-started before the current story, search backward
|
||||
candidate--;
|
||||
}
|
||||
continue unreadSearch;
|
||||
} else {
|
||||
unreadFound = true;
|
||||
|
|
|
@ -48,6 +48,10 @@
|
|||
[self hideGradientBackground:webView];
|
||||
[self.webView.scrollView setDelaysContentTouches:YES];
|
||||
[self.webView.scrollView setDecelerationRate:UIScrollViewDecelerationRateNormal];
|
||||
|
||||
// Work around iOS 9 issue where menu doesn't appear the first time
|
||||
// http://stackoverflow.com/questions/32685198/
|
||||
[self.webView becomeFirstResponder];
|
||||
}
|
||||
- (void) hideGradientBackground:(UIView*)theView
|
||||
{
|
||||
|
@ -614,4 +618,10 @@ shouldStartLoadWithRequest:(NSURLRequest *)request
|
|||
[appDelegate.trainerViewController changeTitle:sender score:-1];
|
||||
}
|
||||
|
||||
// Work around iOS 9 issue where menu doesn't appear the first time
|
||||
// http://stackoverflow.com/questions/32685198/
|
||||
- (BOOL)canBecomeFirstResponder {
|
||||
return YES;
|
||||
}
|
||||
|
||||
@end
|
||||
|
|
9
config/logrotate.nginx.conf
Normal file
9
config/logrotate.nginx.conf
Normal file
|
@ -0,0 +1,9 @@
|
|||
/usr/local/nginx/logs/*.log {
|
||||
weekly
|
||||
rotate 10
|
||||
copytruncate
|
||||
delaycompress
|
||||
compress
|
||||
notifempty
|
||||
missingok
|
||||
}
|
|
@ -15,6 +15,7 @@ django-ses==0.4.1
|
|||
django-subdomains==2.0.3
|
||||
Django>=1.5,<1.6
|
||||
python-digitalocean==1.6
|
||||
dnspython==1.12.0
|
||||
Fabric==1.8.3
|
||||
gunicorn==19.1.1
|
||||
# psycopg2==2.5.2
|
||||
|
|
10
fabfile.py
vendored
10
fabfile.py
vendored
|
@ -586,11 +586,14 @@ def setup_logrotate(clear=True):
|
|||
run('find /srv/newsblur/logs/*.log | xargs tee')
|
||||
put('config/logrotate.conf', '/etc/logrotate.d/newsblur', use_sudo=True)
|
||||
put('config/logrotate.mongo.conf', '/etc/logrotate.d/mongodb', use_sudo=True)
|
||||
sudo('chown root.root /etc/logrotate.d/{newsblur,mongodb}')
|
||||
sudo('chmod 644 /etc/logrotate.d/{newsblur,mongodb}')
|
||||
put('config/logrotate.nginx.conf', '/etc/logrotate.d/nginx', use_sudo=True)
|
||||
sudo('chown root.root /etc/logrotate.d/{newsblur,mongodb,nginx}')
|
||||
sudo('chmod 644 /etc/logrotate.d/{newsblur,mongodb,nginx}')
|
||||
with settings(warn_only=True):
|
||||
sudo('chown sclay.sclay /srv/newsblur/logs/*.log')
|
||||
sudo('logrotate -f /etc/logrotate.d/newsblur')
|
||||
sudo('logrotate -f /etc/logrotate.d/nginx')
|
||||
sudo('logrotate -f /etc/logrotate.d/mongodb')
|
||||
|
||||
def setup_ulimit():
|
||||
# Increase File Descriptor limits.
|
||||
|
@ -846,6 +849,7 @@ def setup_db_firewall():
|
|||
env.roledefs['work'] +
|
||||
env.roledefs['push'] +
|
||||
env.roledefs['www'] +
|
||||
env.roledefs['search'] +
|
||||
env.roledefs['node']):
|
||||
sudo('ufw allow proto tcp from %s to any port %s' % (
|
||||
ip,
|
||||
|
@ -1233,7 +1237,7 @@ def setup_do(name, size=2, image=None):
|
|||
env.host_string = host
|
||||
time.sleep(20)
|
||||
add_user_to_do()
|
||||
do()
|
||||
assign_digitalocean_roledefs()
|
||||
|
||||
def do_name(name):
|
||||
if re.search(r"[0-9]", name):
|
||||
|
|
|
@ -128,7 +128,8 @@ OAUTH2_PROVIDER = {
|
|||
'ifttt': 'Pair your NewsBlur account with other IFTTT channels.',
|
||||
},
|
||||
'CLIENT_ID_GENERATOR_CLASS': 'oauth2_provider.generators.ClientIdGenerator',
|
||||
'ACCESS_TOKEN_EXPIRE_SECONDS': 60*60*24*365*10 # 10 years
|
||||
'ACCESS_TOKEN_EXPIRE_SECONDS': 60*60*24*365*10, # 10 years
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': 60*60, # 1 hour
|
||||
}
|
||||
|
||||
# ===========
|
||||
|
|
Loading…
Add table
Reference in a new issue