Merge branch 'master' into bookmarklet

* master: (376 commits)
  Removing redis db accidentally committed.
  Removing non-ipad changes. -SC
  Adding source attribution in river blurblog. Also changing default view for new users to Feed.
  Cleaning up feed calculation in feed fetching.
  Turning on mongodb replication lag watcher.
  Forcing the feed fetcher to not count scores when there's replica lag. God this had better work.
  Collecting replication lag statistics, to be used to count unreads.
  Typo
  Removing all removal of read stories, so that they stick around for blurblogs. No good answer for this one.
  Fixing comments on river.
  final v1.5
  Moving read date to use shared date.
  final bug squashes
  semi final changes before launch
  Adding signup activity for iPad and web.
  make addsite a popover
  Adding monit for celery on db. Fixing push subscription issue with changing feed ids.
  polishing up first time user experience and fixing sharing nulls
  ftux ui
  adding in first time user experience fixes
  ...
This commit is contained in:
Samuel Clay 2012-08-17 16:10:19 -07:00
commit 0a1c974c41
709 changed files with 78234 additions and 10786 deletions

View file

114
apps/categories/models.py Normal file
View file

@ -0,0 +1,114 @@
import mongoengine as mongo
from itertools import groupby
from apps.rss_feeds.models import Feed
from apps.reader.models import UserSubscription, UserSubscriptionFolders
from utils import json_functions as json
from utils.feed_functions import add_object_to_folder
class MCategory(mongo.Document):
title = mongo.StringField()
description = mongo.StringField()
feed_ids = mongo.ListField(mongo.IntField())
meta = {
'collection': 'category',
'indexes': ['title'],
'allow_inheritance': False,
'index_drop_dups': True,
}
def __unicode__(self):
return "%s: %s sites" % (self.title, len(self.feed_ids))
@classmethod
def add(cls, title, description):
return cls.objects.create(title=title, description=description)
@classmethod
def serialize(cls, category=None):
categories = cls.objects.all()
if category:
categories = categories.filter(title=category)
data = dict(categories=[], feeds={})
feed_ids = set()
for category in categories:
category_output = {
'title': category.title,
'description': category.description,
'feed_ids': category.feed_ids,
}
data['categories'].append(category_output)
feed_ids.update(list(category.feed_ids))
feeds = Feed.objects.filter(pk__in=feed_ids)
for feed in feeds:
data['feeds'][feed.pk] = feed.canonical()
return data
@classmethod
def reload_sites(cls, category_title=None):
category_sites = MCategorySite.objects.all()
if category_title:
category_sites = category_sites.filter(category_title=category_title)
category_groups = groupby(sorted(category_sites, key=lambda c: c.category_title), key=lambda c: c.category_title)
for category_title, sites in category_groups:
category = cls.objects.get(title=category_title)
category.feed_ids = [site.feed_id for site in sites]
category.save()
print " ---> Reloaded category: %s" % category
@classmethod
def subscribe(cls, user_id, category_title):
category = cls.objects.get(title=category_title)
for feed_id in category.feed_ids:
us, _ = UserSubscription.objects.get_or_create(
feed_id=feed_id,
user_id=user_id,
defaults={
'needs_unread_recalc': True,
'active': True,
}
)
usf, created = UserSubscriptionFolders.objects.get_or_create(
user_id=user_id,
defaults={'folders': '[]'}
)
usf.add_folder('', category.title)
folders = json.decode(usf.folders)
for feed_id in category.feed_ids:
folders = add_object_to_folder(feed_id, category.title, folders)
usf.folders = json.encode(folders)
usf.save()
class MCategorySite(mongo.Document):
feed_id = mongo.IntField()
category_title = mongo.StringField()
meta = {
'collection': 'category_site',
'indexes': ['feed_id', 'category_title'],
'allow_inheritance': False,
'index_drop_dups': True,
}
def __unicode__(self):
feed = Feed.objects.get(pk=self.feed_id)
return "%s: %s" % (self.category_title, feed)
@classmethod
def add(cls, category_title, feed_id):
category_site, created = cls.objects.get_or_create(category_title=category_title,
feed_id=feed_id)
if not created:
print " ---> Site is already in category: %s" % category_site
else:
MCategory.reload_sites(category_title)

16
apps/categories/tests.py Normal file
View file

@ -0,0 +1,16 @@
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)

7
apps/categories/urls.py Normal file
View file

@ -0,0 +1,7 @@
from django.conf.urls.defaults import url, patterns
from apps.categories import views
urlpatterns = patterns('',
url(r'^/?$', views.all_categories, name='all-categories'),
url(r'^subscribe/?$', views.subscribe, name='categories-subscribe'),
)

37
apps/categories/views.py Normal file
View file

@ -0,0 +1,37 @@
from apps.categories.models import MCategory
from apps.reader.models import UserSubscriptionFolders
from utils import json_functions as json
from utils.user_functions import ajax_login_required
@json.json_view
def all_categories(request):
categories = MCategory.serialize()
return categories
@ajax_login_required
@json.json_view
def subscribe(request):
user = request.user
categories = MCategory.serialize()
category_titles = [c['title'] for c in categories['categories']]
subscribe_category_titles = request.REQUEST.getlist('category')
invalid_category_title = False
for category_title in subscribe_category_titles:
if category_title not in category_titles:
invalid_category_title = True
if not subscribe_category_titles or invalid_category_title:
message = "Choose one or more of these categories: %s" % ', '.join(category_titles)
return dict(code=-1, message=message)
for category_title in subscribe_category_titles:
MCategory.subscribe(user.pk, category_title)
usf = UserSubscriptionFolders.objects.get(user=user.pk)
return dict(code=1, message="Subscribed to %s %s" % (
len(subscribe_category_titles),
'category' if len(subscribe_category_titles) == 1 else 'categories',
), folders=json.decode(usf.folders))

View file

@ -2,7 +2,7 @@ import datetime
import pickle
import base64
from utils import log as logging
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.client import OAuth2WebServerFlow, FlowExchangeError
import uuid
from django.contrib.sites.models import Site
# from django.db import IntegrityError
@ -83,7 +83,6 @@ def opml_export(request):
def reader_authorize(request):
# is_modal = request.GET.get('modal', False)
domain = Site.objects.get_current().domain
STEP2_URI = "http://%s%s" % (
(domain + '.com') if not domain.endswith('.com') else domain,
@ -96,6 +95,7 @@ def reader_authorize(request):
scope="http://www.google.com/reader/api",
redirect_uri=STEP2_URI,
user_agent='NewsBlur Pro, www.newsblur.com',
approval_prompt="force",
)
logging.user(request, "~BB~FW~SBAuthorize Google Reader import - %s" % (
request.META['REMOTE_ADDR'],
@ -137,9 +137,14 @@ def reader_callback(request):
user_agent='NewsBlur Pro, www.newsblur.com',
)
FLOW.redirect_uri = STEP2_URI
is_modal = request.GET.get('modal', False)
credential = FLOW.step2_exchange(request.REQUEST)
try:
credential = FLOW.step2_exchange(request.REQUEST)
except FlowExchangeError:
logging.info(" ***> [%s] Bad token from Google Reader." % (request.user,))
return render_to_response('social/social_connect.xhtml', {
'error': 'There was an error trying to import from Google Reader. Trying again will probably fix the issue.'
}, context_instance=RequestContext(request))
user_token = None
if request.user.is_authenticated():
@ -160,19 +165,6 @@ def reader_callback(request):
user_token.credential = base64.b64encode(pickle.dumps(credential))
user_token.session_id = request.session.session_key
user_token.save()
#
# try:
# if not user_token.access_token:
# raise IntegrityError
# user_token.save()
# except IntegrityError:
# if is_modal:
# return render_to_response('social/social_connect.xhtml', {
# 'error': 'There was an error trying to import from Google Reader. Trying again will probably fix the issue.'
# }, context_instance=RequestContext(request))
# logging.info(" ***> [%s] Bad token from Google Reader. Re-authenticating." % (request.user,))
# return HttpResponseRedirect(reverse('google-reader-authorize'))
# Fetch imported feeds on next page load
request.session['import_from_google_reader'] = True
@ -180,17 +172,15 @@ def reader_callback(request):
logging.user(request, "~BB~FW~SBFinishing Google Reader import - %s" % (request.META['REMOTE_ADDR'],))
if request.user.is_authenticated():
if is_modal or True:
return render_to_response('social/social_connect.xhtml', {}, context_instance=RequestContext(request))
else:
return HttpResponseRedirect(reverse('index'))
return render_to_response('social/social_connect.xhtml', {}, context_instance=RequestContext(request))
return HttpResponseRedirect(reverse('import-signup'))
@json.json_view
def import_from_google_reader(request):
code = 0
feed_count = 0
if request.user.is_authenticated():
reader_importer = GoogleReaderImporter(request.user)
auto_active = bool(request.REQUEST.get('auto_active') or False)
@ -203,8 +193,10 @@ def import_from_google_reader(request):
code = 1
if 'import_from_google_reader' in request.session:
del request.session['import_from_google_reader']
return dict(code=code)
feed_count = UserSubscription.objects.filter(user=request.user).count()
return dict(code=code, feed_count=feed_count)
def import_signup(request):
if request.method == "POST":

View file

@ -41,7 +41,7 @@ def set_preference(request):
setattr(request.user.profile, preference_name, preference_value)
elif preference_name in SPECIAL_PREFERENCES:
if preference_name == 'autofollow_friends':
social_services = MSocialServices.objects.get(user_id=request.user.pk)
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk)
social_services.autofollow = preference_value
social_services.save()
elif preference_name == 'dashboard_date':

View file

@ -29,6 +29,7 @@ class PushSubscriptionManager(models.Manager):
lease_seconds = getattr(settings, 'PUBSUBHUBBUB_LEASE_SECONDS',
DEFAULT_LEASE_SECONDS)
feed = Feed.get_by_id(feed.pk)
subscription, created = self.get_or_create(feed=feed)
signals.pre_subscribe.send(sender=subscription, created=created)
subscription.set_expiration(lease_seconds)

View file

@ -6,6 +6,7 @@ from django.contrib.auth import authenticate
from django.db.models import Q
from apps.reader.models import Feature
from apps.profile.tasks import EmailNewUser
from apps.social.models import MActivity
from utils import log as logging
class LoginForm(forms.Form):
@ -126,7 +127,9 @@ class SignupForm(forms.Form):
new_user.save()
new_user = authenticate(username=username,
password=password)
MActivity.new_signup(user_id=new_user.pk)
if new_user.email:
EmailNewUser.delay(user_id=new_user.pk)

View file

@ -1,5 +1,6 @@
import sys
from django.db import models
from django.contrib.auth.models import User
from apps.rss_feeds.models import DuplicateFeed
from utils import log as logging
@ -26,6 +27,8 @@ class UserSubscriptionManager(models.Manager):
elif 'feed_id' in kwargs:
kwargs['feed_id'] = feed.pk
user = kwargs.get('user')
if isinstance(user, int):
user = User.objects.get(pk=user)
logging.debug(" ---> [%s] ~BRFound dupe UserSubscription: ~SB%s (%s)" % (user and user.username, feed, feed_id))
return super(UserSubscriptionManager, self).get(*args, **kwargs)
else:

View file

@ -191,7 +191,8 @@ class UserSubscription(models.Model):
return story_guids
@classmethod
def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True):
def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True,
skip_fetch=False):
feed = None
us = None
@ -235,7 +236,7 @@ class UserSubscription(models.Model):
us.active = True
us.save()
if feed.last_update < datetime.datetime.utcnow() - datetime.timedelta(days=1):
if not skip_fetch and feed.last_update < datetime.datetime.utcnow() - datetime.timedelta(days=1):
feed = feed.update()
from apps.social.models import MActivity

View file

@ -34,6 +34,7 @@ except:
pass
from apps.social.models import MSharedStory, MSocialProfile, MSocialServices
from apps.social.models import MSocialSubscription, MActivity
from apps.categories.models import MCategory
from apps.social.views import load_social_page
from utils import json_functions as json
from utils.user_functions import get_user, ajax_login_required
@ -46,12 +47,6 @@ from utils.view_functions import get_argument_or_404, render_to, is_true
from utils.ratelimit import ratelimit
from vendor.timezones.utilities import localtime_for_timezone
from pymongo.helpers import OperationFailure
from operator import itemgetter
from utils.story_functions import bunch
from utils.story_functions import story_score
SINGLE_DAY = 60*60*24
@never_cache
@render_to('reader/feeds.xhtml')
@ -234,6 +229,10 @@ def load_feeds(request):
user.profile.dashboard_date = datetime.datetime.now()
user.profile.save()
categories = None
if not user_subs:
categories = MCategory.serialize()
data = {
'feeds': feeds.values() if version == 2 else feeds,
'social_feeds': social_feeds,
@ -241,6 +240,7 @@ def load_feeds(request):
'social_services': social_services,
'folders': json.decode(folders.folders),
'starred_count': starred_count,
'categories': categories
}
return data
@ -317,6 +317,10 @@ def load_feeds_flat(request):
social_feeds = MSocialSubscription.feeds(**social_params)
social_profile = MSocialProfile.profile(user.pk)
categories = None
if not user_subs:
categories = MCategory.serialize()
logging.user(request, "~FBLoading ~SB%s~SN/~SB%s~SN feeds/socials ~FMflat~FB. %s" % (
len(feeds.keys()), len(social_feeds), '~SBUpdating counts.' if update_counts else ''))
@ -328,6 +332,7 @@ def load_feeds_flat(request):
"user": user.username,
"user_profile": user.profile,
"iphone_version": iphone_version,
"categories": categories,
}
return data
@ -587,163 +592,6 @@ def load_starred_stories(request):
return dict(stories=stories, feeds=unsub_feeds)
@json.json_view
def load_river_stories(request):
limit = 18
offset = int(request.REQUEST.get('offset', 0))
start = time.time()
user = get_user(request)
feed_ids = [int(feed_id) for feed_id in request.REQUEST.getlist('feeds') if feed_id]
original_feed_ids = list(feed_ids)
page = int(request.REQUEST.get('page', 1))
read_stories_count = int(request.REQUEST.get('read_stories_count', 0))
days_to_keep_unreads = datetime.timedelta(days=settings.DAYS_OF_UNREAD)
now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone)
if not feed_ids:
logging.user(request, "~FCLoading empty river stories: page %s" % (page))
return dict(stories=[])
# Fetch all stories at and before the page number.
# Not a single page, because reading stories can move them up in the unread order.
# `read_stories_count` is an optimization, works best when all 25 stories before have been read.
offset = (page-1) * limit - read_stories_count
limit = page * limit - read_stories_count
# Read stories to exclude
read_stories = MUserStory.objects(user_id=user.pk,
feed_id__in=feed_ids
).only('story_id').hint([('user_id', 1), ('feed_id', 1), ('story_id', 1)])
read_stories = [rs.story_id for rs in read_stories]
# Determine mark_as_read dates for all feeds to ignore all stories before this date.
feed_counts = {}
feed_last_reads = {}
for feed_id in feed_ids:
try:
usersub = UserSubscription.objects.get(feed__pk=feed_id, user=user)
except UserSubscription.DoesNotExist:
continue
if not usersub: continue
feed_counts[feed_id] = (usersub.unread_count_negative * 1 +
usersub.unread_count_neutral * 10 +
usersub.unread_count_positive * 20)
feed_last_reads[feed_id] = int(time.mktime(usersub.mark_read_date.timetuple()))
feed_counts = sorted(feed_counts.items(), key=itemgetter(1))[:40]
feed_ids = [f[0] for f in feed_counts]
feed_last_reads = dict([(str(feed_id), feed_last_reads[feed_id]) for feed_id in feed_ids
if feed_id in feed_last_reads])
feed_counts = dict(feed_counts)
# After excluding read stories, all that's left are stories
# past the mark_read_date. Everything returned is guaranteed to be unread.
mstories = MStory.objects(
story_guid__nin=read_stories,
story_feed_id__in=feed_ids,
# story_date__gte=start - days_to_keep_unreads
).map_reduce("""function() {
var d = feed_last_reads[this[~story_feed_id]];
if (this[~story_date].getTime()/1000 > d) {
emit(this[~id], this);
}
}""",
"""function(key, values) {
return values[0];
}""",
output='inline',
scope={
'feed_last_reads': feed_last_reads
}
)
try:
mstories = [story.value for story in mstories if story and story.value]
except OperationFailure, e:
return dict(error=str(e), code=-1)
mstories = sorted(mstories, cmp=lambda x, y: cmp(story_score(y, days_to_keep_unreads),
story_score(x, days_to_keep_unreads)))
# Prune the river to only include a set number of stories per feed
# story_feed_counts = defaultdict(int)
# mstories_pruned = []
# for story in mstories:
# print story['story_title'], story_feed_counts[story['story_feed_id']]
# if story_feed_counts[story['story_feed_id']] >= 3: continue
# mstories_pruned.append(story)
# story_feed_counts[story['story_feed_id']] += 1
stories = []
for i, story in enumerate(mstories):
if i < offset: continue
if i >= limit: break
stories.append(bunch(story))
stories = Feed.format_stories(stories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
# Find starred stories
# try:
if found_feed_ids:
starred_stories = MStarredStory.objects(
user_id=user.pk,
story_feed_id__in=found_feed_ids
).only('story_guid', 'starred_date')
starred_stories = dict([(story.story_guid, story.starred_date)
for story in starred_stories])
else:
starred_stories = {}
# except OperationFailure:
# logging.info(" ***> Starred stories failure")
# starred_stories = {}
# Intelligence classifiers for all feeds involved
if found_feed_ids:
classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
classifier_titles = list(MClassifierTitle.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
classifier_tags = list(MClassifierTag.objects(user_id=user.pk,
feed_id__in=found_feed_ids))
else:
classifier_feeds = []
classifier_authors = []
classifier_titles = []
classifier_tags = []
classifiers = sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids,
classifier_feeds=classifier_feeds,
classifier_authors=classifier_authors,
classifier_titles=classifier_titles,
classifier_tags=classifier_tags)
# Just need to format stories
for story in stories:
story_date = localtime_for_timezone(story['story_date'], user.profile.timezone)
story['short_parsed_date'] = format_story_link_date__short(story_date, now)
story['long_parsed_date'] = format_story_link_date__long(story_date, now)
story['read_status'] = 0
if story['id'] in starred_stories:
story['starred'] = True
starred_date = localtime_for_timezone(starred_stories[story['id']], user.profile.timezone)
story['starred_date'] = format_story_link_date__long(starred_date, now)
story['intelligence'] = {
'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']),
'author': apply_classifier_authors(classifier_authors, story),
'tags': apply_classifier_tags(classifier_tags, story),
'title': apply_classifier_titles(classifier_titles, story),
}
diff = time.time() - start
timediff = round(float(diff), 2)
logging.user(request, "~FYLoading ~FCriver stories~FY: ~SBp%s~SN (%s/%s "
"stories, ~SN%s/%s/%s feeds)" %
(page, len(stories), len(mstories), len(found_feed_ids),
len(feed_ids), len(original_feed_ids)))
return dict(stories=stories, classifiers=classifiers, elapsed_time=timediff)
@json.json_view
def load_river_stories__redis(request):
limit = 12
@ -769,6 +617,7 @@ def load_river_stories__redis(request):
mstories = MStory.objects(id__in=story_ids).order_by(story_date_order)
stories = Feed.format_stories(mstories)
found_feed_ids = list(set([story['story_feed_id'] for story in stories]))
stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk)
# Find starred stories
if found_feed_ids:
@ -826,7 +675,10 @@ def load_river_stories__redis(request):
(page, len(stories), len(mstories), len(found_feed_ids),
len(feed_ids), len(original_feed_ids)))
return dict(stories=stories, classifiers=classifiers, elapsed_time=timediff)
return dict(stories=stories,
classifiers=classifiers,
elapsed_time=timediff,
user_profiles=user_profiles)
@ajax_login_required
@ -1043,6 +895,7 @@ def add_url(request):
code = 0
url = request.POST['url']
auto_active = is_true(request.POST.get('auto_active', 1))
skip_fetch = is_true(request.POST.get('skip_fetch', False))
if not url:
code = -1
@ -1050,7 +903,8 @@ def add_url(request):
else:
folder = request.POST.get('folder', '')
code, message, _ = UserSubscription.add_subscription(user=request.user, feed_address=url,
folder=folder, auto_active=auto_active)
folder=folder, auto_active=auto_active,
skip_fetch=skip_fetch)
return dict(code=code, message=message)

View file

@ -1,6 +1,7 @@
from django.core.management.base import BaseCommand
from django.conf import settings
from optparse import make_option
from apps.rss_feeds.tasks import TaskFeeds
import datetime
@ -13,45 +14,4 @@ class Command(BaseCommand):
)
def handle(self, *args, **options):
from apps.rss_feeds.models import Feed
settings.LOG_TO_STREAM = True
now = datetime.datetime.utcnow()
# Active feeds
feeds = Feed.objects.filter(
next_scheduled_update__lte=now,
active=True
).exclude(
active_subscribers=0
).order_by('?')
if options['all']:
feeds = Feed.objects.all()
Feed.task_feeds(feeds)
# Mistakenly inactive feeds
day = now - datetime.timedelta(days=1)
feeds = Feed.objects.filter(
last_update__lte=day,
queued_date__lte=day,
min_to_decay__lte=60*24,
active_subscribers__gte=1,
active=True
).order_by('?')
if feeds: Feed.task_feeds(feeds)
week = now - datetime.timedelta(days=7)
feeds = Feed.objects.filter(
last_update__lte=week,
queued_date__lte=day,
active_subscribers__gte=1,
active=True
).order_by('?')
if feeds: Feed.task_feeds(feeds)
# feeds = Feed.objects.filter(
# last_update__lte=day,
# active_subscribers__gte=1,
# active=False,
# known_good=True
# ).order_by('?')
# if feeds: Feed.task_feeds(feeds)
TaskFeeds.apply()

View file

@ -0,0 +1,87 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Feed.errors_since_good'
db.add_column('feeds', 'errors_since_good',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Feed.errors_since_good'
db.delete_column('feeds', 'errors_since_good')
models = {
'rss_feeds.duplicatefeed': {
'Meta': {'object_name': 'DuplicateFeed'},
'duplicate_address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'duplicate_feed_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'duplicate_link': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'duplicate_addresses'", 'to': "orm['rss_feeds.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'rss_feeds.feed': {
'Meta': {'ordering': "['feed_title']", 'object_name': 'Feed', 'db_table': "'feeds'"},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'active_premium_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1', 'db_index': 'True'}),
'active_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1', 'db_index': 'True'}),
'average_stories_per_month': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'branch_from_feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']", 'null': 'True', 'blank': 'True'}),
'creation': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'days_to_trim': ('django.db.models.fields.IntegerField', [], {'default': '90'}),
'errors_since_good': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'etag': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'exception_code': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'favicon_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'favicon_not_found': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'feed_address': ('django.db.models.fields.URLField', [], {'max_length': '255', 'db_index': 'True'}),
'feed_address_locked': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'feed_link': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'feed_link_locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'feed_title': ('django.db.models.fields.CharField', [], {'default': "'[Untitled]'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fetched_once': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_feed_exception': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'has_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'has_page_exception': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'hash_address_and_link': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_push': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'known_good': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'last_load_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'min_to_decay': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'next_scheduled_update': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'num_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'premium_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'queued_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'stories_last_month': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'rss_feeds.feeddata': {
'Meta': {'object_name': 'FeedData'},
'feed': ('utils.fields.AutoOneToOneField', [], {'related_name': "'data'", 'unique': 'True', 'to': "orm['rss_feeds.Feed']"}),
'feed_classifier_counts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feed_tagline': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'popular_authors': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'popular_tags': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'story_count_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'rss_feeds.feedloadtime': {
'Meta': {'object_name': 'FeedLoadtime'},
'date_accessed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loadtime': ('django.db.models.fields.FloatField', [], {})
}
}
complete_apps = ['rss_feeds']

View file

@ -40,6 +40,7 @@ BROKEN_PAGE_URLS = [
'stackoverflow.com',
'stackexchange.com',
'twitter.com',
'rankexploits',
]
class Feed(models.Model):
@ -63,6 +64,7 @@ class Feed(models.Model):
has_page_exception = models.BooleanField(default=False, db_index=True)
has_page = models.BooleanField(default=True)
exception_code = models.IntegerField(default=0)
errors_since_good = models.IntegerField(default=0)
min_to_decay = models.IntegerField(default=0)
days_to_trim = models.IntegerField(default=90)
creation = models.DateField(auto_now_add=True)
@ -364,9 +366,11 @@ class Feed(models.Model):
# for history in old_fetch_histories:
# history.delete()
if status_code not in (200, 304):
errors, non_errors = self.count_errors_in_history('feed', status_code)
self.set_next_scheduled_update(error_count=len(errors), non_error_count=len(non_errors))
elif self.has_feed_exception:
self.errors_since_good += 1
self.count_errors_in_history('feed', status_code)
self.set_next_scheduled_update()
elif self.has_feed_exception or self.errors_since_good:
self.errors_since_good = 0
self.has_feed_exception = False
self.active = True
self.save()
@ -398,9 +402,10 @@ class Feed(models.Model):
errors = [h for h in fetch_history if int(h) not in (200, 304)]
if len(non_errors) == 0 and len(errors) > 1:
self.active = True
if exception_type == 'feed':
self.has_feed_exception = True
self.active = False
# self.active = False # No longer, just geometrically fetch
elif exception_type == 'page':
self.has_page_exception = True
self.exception_code = status_code or int(errors[0])
@ -683,6 +688,7 @@ class Feed(models.Model):
'single_threaded': kwargs.get('single_threaded', True),
'force': kwargs.get('force'),
'compute_scores': kwargs.get('compute_scores', True),
'mongodb_replication_lag': kwargs.get('mongodb_replication_lag', None),
'fake': kwargs.get('fake'),
'quick': kwargs.get('quick'),
'debug': kwargs.get('debug'),
@ -846,11 +852,12 @@ class Feed(models.Model):
def save_popular_tags(self, feed_tags=None, verbose=False):
if not feed_tags:
all_tags = MStory.objects(story_feed_id=self.pk, story_tags__exists=True).item_frequencies('story_tags')
feed_tags = sorted([(k, v) for k, v in all_tags.items() if isinstance(v, float) and int(v) > 1],
feed_tags = sorted([(k, v) for k, v in all_tags.items() if int(v) > 0],
key=itemgetter(1),
reverse=True)[:25]
popular_tags = json.encode(feed_tags)
if verbose:
print "Found %s tags: %s" % (len(feed_tags), popular_tags)
# TODO: This len() bullshit will be gone when feeds move to mongo
# On second thought, it might stay, because we don't want
@ -886,8 +893,6 @@ class Feed(models.Model):
self.save_popular_authors(feed_authors=feed_authors[:-1])
def trim_feed(self, verbose=False):
from apps.reader.models import MUserStory
DAYS_OF_UNREAD = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_UNREAD)
trim_cutoff = 500
if self.active_subscribers <= 1 and self.premium_subscribers < 1:
trim_cutoff = 100
@ -901,31 +906,29 @@ class Feed(models.Model):
trim_cutoff = 400
elif self.active_subscribers <= 25 and self.premium_subscribers < 5:
trim_cutoff = 450
stories = MStory.objects(
story_feed_id=self.pk,
).order_by('-story_date')
if stories.count() > trim_cutoff:
logging.debug(' ---> [%-30s] ~FBFound %s stories. Trimming to ~SB%s~SN...' % (unicode(self)[:30], stories.count(), trim_cutoff))
logging.debug(' ---> [%-30s] ~FBFound %s stories. Trimming to ~SB%s~SN...' %
(unicode(self)[:30], stories.count(), trim_cutoff))
try:
story_trim_date = stories[trim_cutoff].story_date
except IndexError, e:
logging.debug(' ***> [%-30s] ~BRError trimming feed: %s' % (unicode(self)[:30], e))
return
extra_stories = MStory.objects(story_feed_id=self.pk, story_date__lte=story_trim_date)
extra_stories = MStory.objects(story_feed_id=self.pk,
story_date__lte=story_trim_date)
extra_stories_count = extra_stories.count()
for story in extra_stories:
story.delete()
if verbose:
print "Deleted %s stories, %s left." % (extra_stories_count, MStory.objects(story_feed_id=self.pk).count())
# Can't use the story_trim_date because some users may have shared stories from
# this feed, but the trim date isn't past the two weeks of unreads.
userstories = MUserStory.objects(feed_id=self.pk, story_date__lte=DAYS_OF_UNREAD)
if userstories.count():
logging.debug(" ---> [%-30s] ~FBFound %s user stories. Deleting..." % (unicode(self)[:30], userstories.count()))
for userstory in userstories:
userstory.delete()
existing_story_count = MStory.objects(story_feed_id=self.pk).count()
print "Deleted %s stories, %s left." % (extra_stories_count,
existing_story_count)
def get_stories(self, offset=0, limit=25, force=False):
stories_db = MStory.objects(story_feed_id=self.pk)[offset:offset+limit]
stories = self.format_stories(stories_db, self.pk)
@ -1141,12 +1144,12 @@ class Feed(models.Model):
return total, random_factor*2
def set_next_scheduled_update(self, error_count=0, non_error_count=0):
def set_next_scheduled_update(self):
total, random_factor = self.get_next_scheduled_update(force=True, verbose=False)
if error_count:
total = total * error_count
logging.debug(' ---> [%-30s] ~FBScheduling feed fetch geometrically: ~SB%s/%s errors. Time: %s min' % (unicode(self)[:30], error_count, non_error_count, total))
if self.errors_since_good:
total = total * self.errors_since_good
logging.debug(' ---> [%-30s] ~FBScheduling feed fetch geometrically: ~SB%s errors. Time: %s min' % (unicode(self)[:30], self.errors_since_good, total))
next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta(
minutes = total + random_factor)

View file

@ -26,18 +26,16 @@ class TaskFeeds(Task):
last_update__lte=day,
queued_date__lte=day,
min_to_decay__lte=60*24,
active_subscribers__gte=1,
active=True
).order_by('?')
active_subscribers__gte=1
).order_by('?')[:20]
if feeds: Feed.task_feeds(feeds)
week = now - datetime.timedelta(days=7)
feeds = Feed.objects.filter(
last_update__lte=week,
queued_date__lte=day,
active_subscribers__gte=1,
active=True
).order_by('?')
active_subscribers__gte=1
).order_by('?')[:20]
if feeds: Feed.task_feeds(feeds)
@ -50,9 +48,14 @@ class UpdateFeeds(Task):
from apps.rss_feeds.models import Feed
from apps.statistics.models import MStatistics
mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
compute_scores = bool(mongodb_replication_lag < 250)
options = {
'fake': bool(MStatistics.get('fake_fetch')),
'quick': float(MStatistics.get('quick_fetch', 0)),
'compute_scores': compute_scores,
'mongodb_replication_lag': mongodb_replication_lag,
}
if not isinstance(feed_pks, list):
@ -90,9 +93,15 @@ class PushFeeds(Task):
def run(self, feed_id, xml, **kwargs):
from apps.rss_feeds.models import Feed
from apps.statistics.models import MStatistics
mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0))
compute_scores = bool(mongodb_replication_lag < 250)
options = {
'feed_xml': xml
'feed_xml': xml,
'compute_scores': compute_scores,
'mongodb_replication_lag': mongodb_replication_lag,
}
feed = Feed.objects.get(pk=feed_id)
feed.update(options=options)

View file

@ -172,10 +172,7 @@ class MSocialProfile(mongo.Document):
@classmethod
def profile(cls, user_id, include_follows=True):
try:
profile = cls.objects.get(user_id=user_id)
except cls.DoesNotExist:
return {}
profile = cls.get_user(user_id)
return profile.to_json(include_follows=True)
@classmethod
@ -375,9 +372,11 @@ class MSocialProfile(mongo.Document):
return socialsub
def is_following_user(self, user_id):
# XXX TODO: Outsource to redis
return user_id in self.following_user_ids
def is_followed_by_user(self, user_id):
# XXX TODO: Outsource to redis
return user_id in self.follower_user_ids
def unfollow_user(self, user_id):
@ -797,7 +796,7 @@ class MSocialSubscription(mongo.Document):
feed_id = story.story_feed_id
m = MUserStory(user_id=self.user_id,
feed_id=feed_id, read_date=date,
story_id=story.story_guid, story_date=story.story_date)
story_id=story.story_guid, story_date=story.shared_date)
try:
m.save()
except OperationError:
@ -1373,6 +1372,8 @@ class MSharedStory(mongo.Document):
story['share_count_friends'] = len(friends_with_shares)
story['friend_user_ids'] = list(set(story['commented_by_friends'] + story['shared_by_friends']))
story['public_user_ids'] = list(set(story['commented_by_public'] + story['shared_by_public']))
if not story['share_user_ids']:
story['share_user_ids'] = story['friend_user_ids'] + story['public_user_ids']
if story.get('source_user_id'):
profile_user_ids.add(story['source_user_id'])
@ -1660,12 +1661,16 @@ class MSocialServices(mongo.Document):
}
}
@classmethod
def get_user(cls, user_id):
profile, created = cls.objects.get_or_create(user_id=user_id)
if created:
profile.save()
return profile
@classmethod
def profile(cls, user_id):
try:
profile = cls.objects.get(user_id=user_id)
except cls.DoesNotExist:
return {}
profile = cls.get_user(user_id=user_id)
return profile.to_json()
def twitter_api(self):
@ -1681,12 +1686,21 @@ class MSocialServices(mongo.Document):
return graph
def sync_twitter_friends(self):
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BB~FRTwitter import starting...")
api = self.twitter_api()
if not api:
logging.user(user, "~BB~FRTwitter import ~SBfailed~SN: no api access.")
self.syncing_twitter = False
self.save()
return
friend_ids = list(unicode(friend.id) for friend in tweepy.Cursor(api.friends).items())
if not friend_ids:
logging.user(user, "~BB~FRTwitter import ~SBfailed~SN: no friend_ids.")
self.syncing_twitter = False
self.save()
return
twitter_user = api.me()
@ -1694,54 +1708,22 @@ class MSocialServices(mongo.Document):
self.twitter_username = twitter_user.screen_name
self.twitter_friend_ids = friend_ids
self.twitter_refreshed_date = datetime.datetime.utcnow()
self.syncing_twitter = False
self.save()
self.follow_twitter_friends()
profile = MSocialProfile.get_user(self.user_id)
profile.location = profile.location or twitter_user.location
profile.bio = profile.bio or twitter_user.description
profile.website = profile.website or twitter_user.url
profile.save()
profile.count_follows()
if not profile.photo_url or not profile.photo_service:
self.set_photo('twitter')
def sync_facebook_friends(self):
self.syncing_facebook = False
self.save()
graph = self.facebook_api()
if not graph:
return
friends = graph.get_connections("me", "friends")
if not friends:
return
facebook_friend_ids = [unicode(friend["id"]) for friend in friends["data"]]
self.facebook_friend_ids = facebook_friend_ids
self.facebook_refresh_date = datetime.datetime.utcnow()
self.facebook_picture_url = "//graph.facebook.com/%s/picture" % self.facebook_uid
self.save()
self.follow_facebook_friends()
facebook_user = graph.request('me', args={'fields':'website,bio,location'})
profile = MSocialProfile.get_user(self.user_id)
profile.location = profile.location or (facebook_user.get('location') and facebook_user['location']['name'])
profile.bio = profile.bio or facebook_user.get('bio')
if not profile.website and facebook_user.get('website'):
profile.website = facebook_user.get('website').split()[0]
profile.save()
profile.count_follows()
if not profile.photo_url or not profile.photo_service:
self.set_photo('facebook')
self.follow_twitter_friends()
def follow_twitter_friends(self):
self.syncing_twitter = False
self.save()
social_profile = MSocialProfile.get_user(self.user_id)
following = []
followers = 0
@ -1770,6 +1752,44 @@ class MSocialServices(mongo.Document):
return following
def sync_facebook_friends(self):
user = User.objects.get(pk=self.user_id)
logging.user(user, "~BB~FRFacebook import starting...")
graph = self.facebook_api()
if not graph:
logging.user(user, "~BB~FRFacebook import ~SBfailed~SN: no api access.")
self.syncing_facebook = False
self.save()
return
friends = graph.get_connections("me", "friends")
if not friends:
logging.user(user, "~BB~FRFacebook import ~SBfailed~SN: no friend_ids.")
self.syncing_facebook = False
self.save()
return
facebook_friend_ids = [unicode(friend["id"]) for friend in friends["data"]]
self.facebook_friend_ids = facebook_friend_ids
self.facebook_refresh_date = datetime.datetime.utcnow()
self.facebook_picture_url = "//graph.facebook.com/%s/picture" % self.facebook_uid
self.syncing_facebook = False
self.save()
facebook_user = graph.request('me', args={'fields':'website,bio,location'})
profile = MSocialProfile.get_user(self.user_id)
profile.location = profile.location or (facebook_user.get('location') and facebook_user['location']['name'])
profile.bio = profile.bio or facebook_user.get('bio')
if not profile.website and facebook_user.get('website'):
profile.website = facebook_user.get('website').split()[0]
profile.save()
profile.count_follows()
if not profile.photo_url or not profile.photo_service:
self.set_photo('facebook')
self.follow_facebook_friends()
def follow_facebook_friends(self):
social_profile = MSocialProfile.get_user(self.user_id)
following = []
@ -1886,13 +1906,18 @@ class MInteraction(mongo.Document):
}
@classmethod
def user(cls, user_id, page=1, limit=None):
def user(cls, user_id, page=1, limit=None, categories=None):
user_profile = Profile.objects.get(user=user_id)
dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on
page = max(1, page)
limit = int(limit) if limit else 4
offset = (page-1) * limit
interactions_db = cls.objects.filter(user_id=user_id)[offset:offset+limit+1]
interactions_db = cls.objects.filter(user_id=user_id)
if categories:
interactions_db = interactions_db.filter(category__in=categories)
interactions_db = interactions_db[offset:offset+limit+1]
has_next_page = len(interactions_db) > limit
interactions_db = interactions_db[offset:offset+limit]
with_user_ids = [i.with_user_id for i in interactions_db if i.with_user_id]
@ -2078,7 +2103,7 @@ class MActivity(mongo.Document):
}
@classmethod
def user(cls, user_id, page=1, limit=4, public=False):
def user(cls, user_id, page=1, limit=4, public=False, categories=None):
user_profile = Profile.objects.get(user=user_id)
dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on
page = max(1, page)
@ -2086,10 +2111,12 @@ class MActivity(mongo.Document):
offset = (page-1) * limit
activities_db = cls.objects.filter(user_id=user_id)
if categories:
activities_db = activities_db.filter(category__in=categories)
if public:
activities_db = activities_db.filter(category__nin=['star', 'feedsub'])
activities_db = activities_db[offset:offset+limit+1]
has_next_page = len(activities_db) > limit
activities_db = activities_db[offset:offset+limit]
with_user_ids = [a.with_user_id for a in activities_db if a.with_user_id]
@ -2221,3 +2248,8 @@ class MActivity(mongo.Document):
a.delete()
@classmethod
def new_signup(cls, user_id):
cls.objects.get_or_create(user_id=user_id,
with_user_id=user_id,
category="signup")

View file

@ -1,4 +1,5 @@
from django import template
from django.conf import settings
from apps.social.models import MSocialProfile
register = template.Library()
@ -27,6 +28,7 @@ def render_story_share(context, story):
def render_story_comments(context, story):
user = context['user']
user_social_profile = context.get('user_social_profile')
MEDIA_URL = settings.MEDIA_URL
if not user_social_profile and user.is_authenticated():
user_social_profile = MSocialProfile.objects.get(user_id=user.pk)
@ -34,6 +36,7 @@ def render_story_comments(context, story):
'user': user,
'user_social_profile': user_social_profile,
'story': story,
'MEDIA_URL': MEDIA_URL,
}
@register.inclusion_tag('social/story_comment.xhtml', takes_context=True)

View file

@ -141,9 +141,6 @@ def load_social_stories(request, user_id, username=None):
story['starred_date'] = format_story_link_date__long(starred_date, now)
if story['id'] in shared_stories:
story['shared'] = True
shared_date = localtime_for_timezone(shared_stories[story['id']]['shared_date'],
user.profile.timezone)
story['shared_date'] = format_story_link_date__long(shared_date, now)
story['shared_comments'] = strip_tags(shared_stories[story['id']]['comments'])
story['intelligence'] = {
@ -329,9 +326,14 @@ def load_social_page(request, user_id, username=None, **kwargs):
if page: offset = limit * (int(page) - 1)
user_social_profile = None
user_social_services = None
user_following_social_profile = None
if user.is_authenticated():
user_social_profile = MSocialProfile.get_user(user.pk)
user_social_services = MSocialServices.get_user(user.pk)
user_following_social_profile = user_social_profile.is_following_user(social_user_id)
social_profile = MSocialProfile.get_user(social_user_id)
params = dict(user_id=social_user.pk)
if feed_id:
params['story_feed_id'] = feed_id
@ -350,7 +352,9 @@ def load_social_page(request, user_id, username=None, **kwargs):
"feeds": {},
"social_user": social_user,
"social_profile": social_profile,
"user_social_services": user_social_services,
'user_social_profile' : json.encode(user_social_profile and user_social_profile.page()),
'user_following_social_profile': user_following_social_profile,
}
template = 'social/social_page.xhtml'
return render_to_response(template, params, context_instance=RequestContext(request))
@ -386,6 +390,9 @@ def load_social_page(request, user_id, username=None, **kwargs):
'stories' : stories,
'user_social_profile' : user_social_profile,
'user_social_profile_page' : json.encode(user_social_profile and user_social_profile.page()),
'user_social_services' : user_social_services,
'user_social_services_page' : json.encode(user_social_services and user_social_services.to_json()),
'user_following_social_profile': user_following_social_profile,
'social_profile': social_profile,
'feeds' : feeds,
'user_profile' : hasattr(user, 'profile') and user.profile,
@ -735,6 +742,7 @@ def shared_stories_public(request, username):
def profile(request):
user = get_user(request.user)
user_id = request.GET.get('user_id', user.pk)
categories = request.GET.getlist('category')
include_activities_html = request.REQUEST.get('include_activities_html', None)
user_profile = MSocialProfile.get_user(user_id)
@ -743,7 +751,7 @@ def profile(request):
profile_ids = set(user_profile['followers_youknow'] + user_profile['followers_everybody'] +
user_profile['following_youknow'] + user_profile['following_everybody'])
profiles = MSocialProfile.profiles(profile_ids)
activities, _ = MActivity.user(user_id, page=1, public=True)
activities, _ = MActivity.user(user_id, page=1, public=True, categories=categories)
logging.user(request, "~BB~FRLoading social profile: %s" % user_profile['username'])
payload = {
@ -1095,11 +1103,13 @@ def load_social_settings(request, social_user_id, username=None):
@ajax_login_required
def load_interactions(request):
user_id = request.REQUEST.get('user_id', None)
categories = request.GET.getlist('category')
if not user_id:
user_id = get_user(request).pk
page = max(1, int(request.REQUEST.get('page', 1)))
limit = request.REQUEST.get('limit')
interactions, has_next_page = MInteraction.user(user_id, page=page, limit=limit)
interactions, has_next_page = MInteraction.user(user_id, page=page, limit=limit,
categories=categories)
format = request.REQUEST.get('format', None)
data = {
@ -1117,6 +1127,7 @@ def load_interactions(request):
@ajax_login_required
def load_activities(request):
user_id = request.REQUEST.get('user_id', None)
categories = request.GET.getlist('category')
if user_id:
user_id = int(user_id)
user = User.objects.get(pk=user_id)
@ -1127,7 +1138,8 @@ def load_activities(request):
public = user_id != request.user.pk
page = max(1, int(request.REQUEST.get('page', 1)))
limit = request.REQUEST.get('limit', 4)
activities, has_next_page = MActivity.user(user_id, page=page, limit=limit, public=public)
activities, has_next_page = MActivity.user(user_id, page=page, limit=limit, public=public,
categories=categories)
format = request.REQUEST.get('format', None)
data = {

View file

@ -2,12 +2,13 @@ import datetime
import mongoengine as mongo
import urllib2
from django.db.models import Avg, Count
from django.conf import settings
from apps.rss_feeds.models import MFeedFetchHistory, MPageFetchHistory, MFeedPushHistory
from apps.rss_feeds.models import FeedLoadtime
from apps.social.models import MSharedStory
from apps.profile.models import Profile
from utils import json_functions as json
from utils import db_functions
class MStatistics(mongo.Document):
key = mongo.StringField(unique=True)
@ -67,6 +68,8 @@ class MStatistics(mongo.Document):
print "Sites loaded: %s" % (datetime.datetime.now() - now)
cls.collect_statistics_stories_shared(last_day)
print "Stories shared: %s" % (datetime.datetime.now() - now)
cls.collect_statistics_for_db()
print "DB Stats: %s" % (datetime.datetime.now() - now)
@classmethod
def collect_statistics_feeds_fetched(cls, last_day=None):
@ -174,6 +177,11 @@ class MStatistics(mongo.Document):
for key, value in values:
cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value)
@classmethod
def collect_statistics_for_db(cls):
lag = db_functions.mongo_max_replication_lag(settings.MONGODB)
cls.set('mongodb_replication_lag', lag)
@classmethod
def delete_old_stats(cls):
now = datetime.datetime.now()

12
config/monit_db.conf Normal file
View file

@ -0,0 +1,12 @@
set daemon 120
set logfile /var/log/monit.log
set eventqueue
basedir /var/monit # set the base directory where events will be stored
slots 100 # optionally limit the queue size
# If no feeds have been queued in the last 10 minutes, something is wrong
check file newsblur.log with path /home/sclay/newsblur/logs/newsblur.log
if timestamp > 10 minutes then exec "/usr/bin/supervisorctl restart celery"
as uid root and gid root

11
fabfile.py vendored
View file

@ -295,7 +295,7 @@ def setup_task():
enable_celery_supervisor()
setup_gunicorn(supervisor=False)
update_gunicorn()
config_monit()
config_monit_task()
# ==================
# = Setup - Common =
@ -410,8 +410,13 @@ def bounce_pgbouncer():
run('sudo /etc/init.d/pgbouncer start', pty=False)
run('sleep 2')
def config_monit():
put('config/monit.conf', '/etc/monit/conf.d/celery.conf', use_sudo=True)
def config_monit_task():
put('config/monit_task.conf', '/etc/monit/conf.d/celery.conf', use_sudo=True)
sudo('echo "startup=1" > /etc/default/monit')
sudo('/etc/init.d/monit restart')
def config_monit_db():
put('config/monit_db.conf', '/etc/monit/conf.d/celery.conf', use_sudo=True)
sudo('echo "startup=1" > /etc/default/monit')
sudo('/etc/init.d/monit restart')

View file

@ -54,7 +54,7 @@ body.NB-theme-sans-serif #story_pane {
}
body.NB-theme-serif #story_pane .NB-feed-story-content {
font-family: Palatino Linotype, Georgia, URW Palladio L, Century Schoolbook L, serif;
font-family: "Palatino Linotype", Georgia, "URW Palladio L", "Century Schoolbook L", serif;
font-size: 14px;
line-height: 20px;
}
@ -760,7 +760,6 @@ body.NB-theme-serif #story_pane .NB-feed-story-content {
margin: 3px 1px 0;
background-color: #8eb6e8;
display: none;
-moz-border-radius: 4px;
border-radius: 4px;
}
.unread_count_positive {
@ -1031,7 +1030,6 @@ background: transparent;
text-transform: uppercase;
display: none;
border-radius: 3px;
-moz-border-radius: 3px;
-webkit-transition: all .12s ease-out;
-moz-transition: all .12s ease-out;
@ -1205,7 +1203,6 @@ background: transparent;
text-transform: uppercase;
overflow: hidden;
border-radius: 3px;
-moz-border-radius: 3px;
}
#story_titles .NB-story-title-indicator:hover {
@ -1511,9 +1508,9 @@ background: transparent;
position: static;
margin: 0 0 4px 0;
color: #888C9B;
top: none;
left: none;
right: none;
top: 0;
left: 0;
right: 0;
display: block;
font-size: 9px;
clear: both;
@ -1781,9 +1778,10 @@ background: transparent;
#story_pane .NB-feed-stories .NB-feed-story .NB-feed-story-content div {
max-width: 100%;
}
#story_pane .NB-feed-stories .NB-feed-story img {
max-width: 100%;
height: auto;
#story_pane .NB-feed-stories .NB-feed-story .NB-feed-story-content img {
max-width: 100% !important;
width: auto !important;
height: auto !important;
}
#story_pane .NB-feed-story {
position: relative;
@ -1930,8 +1928,6 @@ background: transparent;
color: #9D9A95;
text-shadow: 0 1px 0 #E9E9E9;
border-radius: 4px;
-moz-border-radius: 4px;
-webkit-border-radius: 4px;
cursor: pointer;
}
@ -2039,12 +2035,15 @@ background: transparent;
max-width: 700px;
min-height: 66px;
}
#story_pane .NB-feed-story-view-narrow .NB-feed-story-content {
margin-right: 28px;
}
#story_pane .NB-feed-story-content ins {
text-decoration: underline;
color: #27452D;
}
#story_pane .NB-feed-story-content del {
display: block;
display: inline;
color: #661616;
}
.NB-pref-hide-changes #story_pane .NB-feed-story-content ins {
@ -2062,6 +2061,9 @@ background: transparent;
border-bottom: 1px solid #353535;
clear: both;
}
#story_pane .NB-feed-story-view-narrow .NB-feed-story-comments {
margin-right: 28px;
}
#story_pane .NB-story-comment {
border-top: 1px solid #A6A6A6;
background-color: #FCFCFC;
@ -2446,6 +2448,16 @@ background: transparent;
width: 172px;
}
.NB-feed-story-view-narrow .NB-feed-story-sideoptions-container {
width: auto;
position: static;
right: 0;
bottom: 0;
margin: 18px 28px 24px 28px;
clear: both;
overflow: hidden;
}
.NB-feed-story-sideoptions-container {
}
@ -2457,13 +2469,21 @@ background: transparent;
white-space: nowrap;
font-weight: bold;
text-shadow: 0 1px 0 #000;
margin-top: 1px;
border-bottom: 1px solid #A9A9A9;
}
.NB-feed-story-view-narrow .NB-sideoption {
float: left;
margin: 8px 8px 0 0;
border: 1px solid #C0C0C0;
}
.NB-sideoption .NB-sideoption-icon {
width: 24px;
padding: 4px;
height: 100%;
float: right;
background: #EBF6DA url('/media/embed/icons/silk/time.png') no-repeat 8px 4px;
background: #B0C4DE url('/media/embed/icons/silk/time.png') no-repeat 8px 4px;
border-left: 1px solid #92A6C0;
-webkit-transition: all .12s ease-out;
-moz-transition: all .12s ease-out;
-o-transition: all .12s ease-out;
@ -2480,6 +2500,8 @@ background: transparent;
padding: 4px 36px 4px 6px;
color: white;
text-shadow: none;
background-color: #DCDCDC;
border-left: 1px solid #DCDCDC;
-webkit-transition: all .12s ease-out;
-moz-transition: all .12s ease-out;
-o-transition: all .12s ease-out;
@ -2489,6 +2511,9 @@ background: transparent;
-o-user-select: none;
user-select: none;
}
.NB-feed-story-view-narrow .NB-sideoption .NB-sideoption-title {
padding: 4px 42px 4px 8px;
}
.NB-sideoption:hover {
cursor: pointer;
}
@ -2504,14 +2529,24 @@ background: transparent;
color: #D7DDF7;
opacity: 1;
text-shadow: 0 1px 0 #101010;
border-left: 1px solid #2B478C;
}
.NB-sideoption-share-wrapper {
background-color: #EBF6DA;
background-color: #EBF6FA;
height: 0;
overflow: hidden;
}
.NB-menu-manage .NB-sideoption-share-wrapper {
background-color: #EBF6DA;
}
.NB-feed-story-view-narrow .NB-sideoption-share-wrapper {
clear: both;
width: 100%;
margin: 0;
}
.NB-sideoption-share {
padding: 4px 12px 6px;
border: 1px solid #DBE6EA;
}
.NB-sideoption-share .NB-sideoption-share-title {
text-transform: uppercase;
@ -2530,27 +2565,29 @@ background: transparent;
width: 16px;
height: 16px;
margin: 0 6px 0 0;
opacity: .4;
opacity: 1;
cursor: pointer;
-webkit-filter: grayscale(100%);
filter: gray;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter {
background: transparent url('/media/embed/reader/twitter_icon_gray.png') no-repeat 0 0;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook {
background: transparent url('/media/embed/reader/facebook_icon_gray.png') no-repeat 0 0;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter:hover,
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook:hover {
opacity: .7;
-webkit-filter: none;
filter: none;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter.NB-active,
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook.NB-active {
opacity: 1;
-webkit-filter: none;
filter: none;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter {
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter.NB-active,
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter:hover {
background: transparent url('/media/embed/reader/twitter_icon.png') no-repeat 0 0;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook {
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook.NB-active,
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook:hover {
background: transparent url('/media/embed/reader/facebook_icon.png') no-repeat 0 0;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-text {
@ -2575,9 +2612,9 @@ background: transparent;
width: 92%;
background-color: #639510;
cursor: pointer;
-moz-box-shadow: 2px 2px 0 #95AB76;
-webkit-box-shadow: 2px 2px 0 #95AB76;
box-shadow: 2px 2px 0 #95AB76;
-moz-box-shadow: 2px 2px 0 #C9DBE2;
-webkit-box-shadow: 2px 2px 0 #C9DBE2;
box-shadow: 2px 2px 0 #C9DBE2;
text-shadow: 0 1px 0 #101010;
}
.NB-sideoption-share .NB-sideoption-share-save.NB-saving {
@ -2941,8 +2978,6 @@ background: transparent;
width: 20px;
height: 20px;
border: 1px solid #818181;
-webkit-border-radius: 0;
-moz-border-radius: 0;
border-radius: 0;
-webkit-border-top-right-radius: 6px;
-webkit-border-bottom-right-radius: 6px;
@ -3527,7 +3562,6 @@ form.opml_import_form input {
.NB-classifier {
border-radius: 14px;
-moz-border-radius: 14px;
}
.NB-classifier .NB-modal-loading {
@ -6330,8 +6364,6 @@ form.opml_import_form input {
text-align: center;
background: #222222;
border: 3px solid #ffffff;
-webkit-border-radius: 23px;
-moz-border-radius: 23px;
border-radius: 23px;
opacity: 0.5;
filter: alpha(opacity=50);
@ -6893,8 +6925,6 @@ form.opml_import_form input {
font-family: 'Lucida Grande', Verdana, sans-serif;
font-weight: bold;
font-size: 11px;
-webkit-border-radius: 8px;
-moz-border-radius: 8px;
border-radius: 8px;
color: #fff;
background-color: #626262;
@ -7002,8 +7032,6 @@ form.opml_import_form input {
.NB-keyboard-shortcut-key {
border-radius: 6px;
-webkit-border-radius: 6px;
-moz-border-radius: 6px;
border-top: 1px solid #717171;
border-left: 1px solid #717171;
border-bottom: 1px solid #303030;
@ -7061,7 +7089,6 @@ form.opml_import_form input {
margin: 12px 0 12px;
padding: 12px;
background-color: #F6F6F6;
-moz-border-radius: 4px;
border-radius: 4px;
}
@ -7128,7 +7155,7 @@ form.opml_import_form input {
font-family: "Lucida Sans", "Lucida Grande", Verdana, Arial, Helvetica, sans-serif;
}
.NB-modal-preferences .NB-preference-story-styling .NB-preference-story-styling-serif {
font-family: Palatino Linotype, Georgia, URW Palladio L, Century Schoolbook L, serif;
font-family: "Palatino Linotype", Georgia, "URW Palladio L", "Century Schoolbook L", serif;
}
.NB-modal-preferences .NB-preference-window input {
margin-top: 4px;
@ -8432,7 +8459,9 @@ form.opml_import_form input {
overflow: hidden;
cursor: pointer;
}
.NB-interaction.NB-disabled {
cursor: default;
}
.NB-interaction.NB-highlighted {
background-color: #FFFFF6;
border-bottom: 1px solid #F0F0E6;
@ -8441,7 +8470,7 @@ form.opml_import_form input {
border-bottom: 1px solid transparent;
margin-bottom: 0;
}
.NB-interaction:hover {
.NB-interaction:hover:not(.NB-disabled) {
background-color: #F3F6FD;
background-image: -moz-linear-gradient(top, #F5F7FB, #EBF1FE); /* FF3.6 */
background-image: -webkit-gradient(linear, left top, left bottom, from(#F5F7FB), to(#EBF1FE)); /* Saf4+, Chrome */
@ -8449,7 +8478,7 @@ form.opml_import_form input {
border: 1px solid #C3CFE2;
border-bottom: 1px solid #B9C5DC;
}
.NB-interaction:active {
.NB-interaction:active:not(.NB-disabled) {
background-color: #FBE5C7;
background-image: -moz-linear-gradient(top, #FBE5C7, #F7D9AD); /* FF3.6 */
background-image: -webkit-gradient(linear, left top, left bottom, from(#FBE5C7), to(#F7D9AD)); /* Saf4+, Chrome */
@ -8459,7 +8488,7 @@ form.opml_import_form input {
.NB-interaction .NB-splash-link:hover {
color: #405BA8;
}
.NB-interaction:active .NB-interaction-content .NB-splash-link {
.NB-interaction:active:not(.NB-disabled) .NB-interaction-content .NB-splash-link {
color: #A85B40;
}
.NB-interaction-photo {
@ -8478,7 +8507,7 @@ form.opml_import_form input {
text-transform: uppercase;
padding: 5px 0 3px 4px;
}
.NB-interaction:hover .NB-interaction-date {
.NB-interaction:hover:not(.NB-disabled) .NB-interaction-date {
color: #808080;
}
.NB-interaction-title {

View file

@ -230,6 +230,17 @@ header {
padding-right: 6px;
float: left;
}
.NB-header .NB-header-following-user {
color: #878787;
}
.NB-header .NB-header-following-user .NB-follow-user {
color: #C06411;
font-weight: bold;
}
.NB-header .NB-header-following-user .NB-follow-user:hover {
color: #AF000F;
cursor: pointer;
}
.NB-header .NB-header-stat:last-child {
border-bottom: none;
}
@ -596,7 +607,6 @@ header {
position: absolute;
left: 28px;
top: 10px;
cursor: pointer;
}
.NB-story-comment .NB-user-avatar.NB-story-comment-reshare {
top: 26px;
@ -786,8 +796,9 @@ header {
color: #404040;
font-size: 13px;
height: 28px;
margin: 8px 0 0;
margin: 8px 0 4px;
border: 1px solid #C0C0C0;
box-shadow: 2px 2px 0 #E3E8EB;
z-index: 1;
position: relative;
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
@ -796,15 +807,17 @@ header {
.NB-story-comment-input-form .NB-story-comment-buttons {
position: absolute;
right: 0;
left: 0;
bottom: 0;
width: 100%;
display: none;
}
.NB-story-comment-input-form .NB-modal-submit-button {
float: right;
float: left;
font-size: 12px;
line-height: 19px;
padding: 2px 16px;
margin: 4px 12px 4px 0;
z-index: 0;
opacity: 0;
-webkit-transition: opacity .44s ease-out;
@ -817,6 +830,48 @@ header {
opacity: 1;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-twitter,
.NB-story-comment-crossposts .NB-story-comment-crosspost-facebook {
float: left;
width: 16px;
height: 16px;
margin: 10px 0 0 6px;
cursor: pointer;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-twitter:hover,
.NB-story-comment-crossposts .NB-story-comment-crosspost-facebook:hover {
opacity: .7;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-twitter.NB-active,
.NB-story-comment-crossposts .NB-story-comment-crosspost-facebook.NB-active {
opacity: 1;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-twitter {
background: transparent url('/media/embed/reader/twitter_icon_gray.png') no-repeat 0 0;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-facebook {
background: transparent url('/media/embed/reader/facebook_icon_gray.png') no-repeat 0 0;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-twitter.NB-active,
.NB-story-comment-crossposts .NB-story-comment-crosspost-twitter:hover {
background: transparent url('/media/embed/reader/twitter_icon.png') no-repeat 0 0;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-facebook.NB-active,
.NB-story-comment-crossposts .NB-story-comment-crosspost-facebook:hover {
background: transparent url('/media/embed/reader/facebook_icon.png') no-repeat 0 0;
}
.NB-story-comment-crossposts .NB-story-comment-crosspost-text {
font-size: 9px;
text-transform: uppercase;
text-align: left;
color: #7483A2;
text-shadow: 0 1px 0 #FBFBFB;
line-height: 16px;
float: left;
margin: 10px 0 0 8px;
width: 200px;
}
/* =================== */
/* = Story Share Bar = */
/* =================== */
@ -883,6 +938,7 @@ header {
font-size: 10px;
/* padding: 8px 14px 0px;*/
padding: 8px 28px 0px;
position: relative;
overflow: hidden;
height: 26px;
-webkit-transition: all .12s ease-out;
@ -890,14 +946,14 @@ header {
-o-transition: all .12s ease-out;
-ms-transition: all .12s ease-out;
background-image: -webkit-gradient(
background-image: url('/media/embed/reader/background-control-light.png'), -webkit-gradient(
linear,
left bottom,
left top,
color-stop(0.10, #EBEBEC),
color-stop(0.84, #F5F5F5)
);
background-image: -moz-linear-gradient(
background-image: url('/media/embed/reader/background-control-light.png'), -moz-linear-gradient(
center bottom,
#EBEBEC 10%,
#F5F5F5 84%
@ -1066,78 +1122,6 @@ header {
.NB-sideoption-share {
padding: 4px 12px 6px;
}
.NB-sideoption-share .NB-sideoption-share-title {
text-transform: uppercase;
font-size: 10px;
text-align: left;
text-shadow: 0 1px 0 #F6F6F6;
color: #202020;
}
.NB-sideoption-share .NB-sideoption-share-crosspost {
margin-right: -4px;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter,
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook {
float: right;
width: 16px;
height: 16px;
margin: 0 0 0 6px;
opacity: .4;
cursor: pointer;
-webkit-filter: grayscale(100%);
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter:hover,
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook:hover {
opacity: .7;
-webkit-filter: none;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter.NB-active,
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook.NB-active {
opacity: 1;
-webkit-filter: none;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-twitter {
background: transparent url('/media/embed/reader/twitter_icon.png') no-repeat 0 0;
}
.NB-sideoption-share .NB-sideoption-share-crosspost-facebook {
background: transparent url('/media/embed/reader/facebook_icon.png') no-repeat 0 0;
}
.NB-sideoption-share .NB-sideoption-share-comments {
width: 100%;
height: 52px;
}
.NB-sideoption-share .NB-sideoption-share-save {
font-size: 10px;
font-weight: bold;
text-transform: uppercase;
color: white;
padding: 2px 6px;
margin: 2px 0;
width: 92%;
background-color: #639510;
cursor: pointer;
-moz-box-shadow: 2px 2px 0 #95AB76;
-webkit-box-shadow: 2px 2px 0 #95AB76;
box-shadow: 2px 2px 0 #95AB76;
text-shadow: 0 1px 0 #101010;
}
.NB-sideoption-share .NB-sideoption-share-save.NB-saving {
background-color: #b5b4bB;
text-shadow: none;
}
.NB-sideoption-share .NB-sideoption-share-unshare {
color: #404040;
text-shadow: 0 1px 0 #E0E0E0;
line-height: 1;
font-size: 11px;
padding: 2px 6px;
margin: 6px 0;
width: 92%;
font-weight: normal;
-moz-box-shadow: 1px 1px 0 #95AB76;
-webkit-box-shadow: 1px 1px 0 #95AB76;
box-shadow: 1px 1px 0 #95AB76;
}
.NB-sideoption-share .NB-error {
font-size: 10px;
color: #6A1000;

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Some files were not shown because too many files have changed in this diff Show more