2011-12-21 09:43:17 -08:00
import datetime
2012-07-19 23:29:11 -07:00
import time
2011-12-14 09:52:06 -08:00
import zlib
2011-12-22 13:36:03 -08:00
import hashlib
2012-01-09 13:55:26 -08:00
import redis
2012-02-16 18:36:52 -08:00
import re
2011-12-14 09:52:06 -08:00
import mongoengine as mongo
2012-05-15 16:35:09 -07:00
import random
2012-09-10 17:41:01 -07:00
import requests
2012-02-13 11:07:32 -08:00
from collections import defaultdict
2012-09-10 17:41:01 -07:00
from BeautifulSoup import BeautifulSoup
2013-07-25 11:10:56 -07:00
from mongoengine . queryset import Q
2011-12-21 09:43:17 -08:00
from django . conf import settings
2011-12-22 13:36:03 -08:00
from django . contrib . auth . models import User
2012-02-16 18:36:52 -08:00
from django . contrib . sites . models import Site
2012-01-31 10:15:11 -08:00
from django . core . urlresolvers import reverse
2012-04-16 15:44:22 -07:00
from django . template . loader import render_to_string
2012-05-02 12:23:12 -07:00
from django . template . defaultfilters import slugify
2012-04-16 15:44:22 -07:00
from django . core . mail import EmailMultiAlternatives
2013-05-10 16:11:30 -07:00
from apps . reader . models import UserSubscription , RUserStory
2012-02-08 12:20:05 -08:00
from apps . analyzer . models import MClassifierFeed , MClassifierAuthor , MClassifierTag , MClassifierTitle
from apps . analyzer . models import apply_classifier_titles , apply_classifier_feeds , apply_classifier_authors , apply_classifier_tags
2012-03-19 17:16:59 -07:00
from apps . rss_feeds . models import Feed , MStory
2013-01-08 18:33:30 -08:00
from apps . rss_feeds . text_importer import TextImporter
2012-07-09 13:55:29 -07:00
from apps . profile . models import Profile , MSentEmail
2011-12-20 11:49:49 -08:00
from vendor import facebook
from vendor import tweepy
2013-03-02 12:48:31 -08:00
from vendor import appdotnet
2012-08-06 17:52:33 -07:00
from vendor import pynliner
2011-12-24 00:30:37 -08:00
from utils import log as logging
2012-02-13 11:07:32 -08:00
from utils import json_functions as json
2013-10-07 13:36:10 -07:00
from utils . feed_functions import relative_timesince , chunks
2012-09-10 17:41:01 -07:00
from utils . story_functions import truncate_chars , strip_tags , linkify , image_size
from utils . scrubber import SelectiveScriptScrubber
2013-01-08 14:11:59 -08:00
from utils import s3_utils
2013-03-19 17:49:56 -07:00
from StringIO import StringIO
2011-12-14 09:52:06 -08:00
2012-05-15 16:35:09 -07:00
RECOMMENDATIONS_LIMIT = 5
2012-09-10 17:41:01 -07:00
IGNORE_IMAGE_SOURCES = [
" http://feeds.feedburner.com "
]
2012-08-20 14:51:32 -07:00
class MRequestInvite ( mongo . Document ) :
email = mongo . EmailField ( )
request_date = mongo . DateTimeField ( default = datetime . datetime . now )
invite_sent = mongo . BooleanField ( default = False )
invite_sent_date = mongo . DateTimeField ( )
meta = {
' collection ' : ' social_invites ' ,
' allow_inheritance ' : False ,
}
def __unicode__ ( self ) :
2012-08-20 23:00:28 -07:00
return " %s %s " % ( self . email , ' * ' if self . invite_sent else ' ' )
2012-04-05 13:51:08 -07:00
2012-08-20 14:51:32 -07:00
@classmethod
def blast ( cls ) :
invites = cls . objects . filter ( email_sent = None )
print ' ---> Found %s invites... ' % invites . count ( )
for invite in invites :
try :
invite . send_email ( )
except :
print ' ***> Could not send invite to: %s . Deleting. ' % invite . username
invite . delete ( )
def send_email ( self ) :
user = User . objects . filter ( username__iexact = self . username )
if not user :
user = User . objects . filter ( email__iexact = self . username )
if user :
user = user [ 0 ]
email = user . email or self . username
else :
user = {
' username ' : self . username ,
' profile ' : {
' autologin_url ' : ' / ' ,
}
}
email = self . username
params = {
' user ' : user ,
}
text = render_to_string ( ' mail/email_social_beta.txt ' , params )
html = render_to_string ( ' mail/email_social_beta.xhtml ' , params )
subject = " Psst, you ' re in... "
msg = EmailMultiAlternatives ( subject , text ,
from_email = ' NewsBlur < %s > ' % settings . HELLO_EMAIL ,
to = [ ' < %s > ' % ( email ) ] )
msg . attach_alternative ( html , " text/html " )
msg . send ( )
self . email_sent = True
self . save ( )
logging . debug ( " ---> ~BB~FM~SBSending email for social beta: %s " % self . username )
2012-01-04 09:42:35 -08:00
class MSocialProfile ( mongo . Document ) :
2012-03-20 16:46:38 -07:00
user_id = mongo . IntField ( unique = True )
username = mongo . StringField ( max_length = 30 , unique = True )
2012-01-04 09:42:35 -08:00
email = mongo . StringField ( )
2012-07-06 14:19:41 -07:00
bio = mongo . StringField ( max_length = 160 )
2012-07-10 00:17:45 -07:00
blurblog_title = mongo . StringField ( max_length = 256 )
custom_bgcolor = mongo . StringField ( max_length = 50 )
2012-01-31 10:15:11 -08:00
custom_css = mongo . StringField ( )
2012-01-04 09:42:35 -08:00
photo_url = mongo . StringField ( )
photo_service = mongo . StringField ( )
location = mongo . StringField ( max_length = 40 )
website = mongo . StringField ( max_length = 200 )
2013-09-09 12:01:21 -07:00
bb_permalink_direct = mongo . BooleanField ( )
2012-01-04 09:42:35 -08:00
subscription_count = mongo . IntField ( default = 0 )
shared_stories_count = mongo . IntField ( default = 0 )
following_count = mongo . IntField ( default = 0 )
follower_count = mongo . IntField ( default = 0 )
following_user_ids = mongo . ListField ( mongo . IntField ( ) )
follower_user_ids = mongo . ListField ( mongo . IntField ( ) )
unfollowed_user_ids = mongo . ListField ( mongo . IntField ( ) )
2012-10-24 16:09:47 -07:00
requested_follow_user_ids = mongo . ListField ( mongo . IntField ( ) )
2012-02-13 11:07:32 -08:00
popular_publishers = mongo . StringField ( )
2012-01-20 09:38:41 -08:00
stories_last_month = mongo . IntField ( default = 0 )
average_stories_per_month = mongo . IntField ( default = 0 )
2012-02-16 18:36:52 -08:00
story_count_history = mongo . ListField ( )
feed_classifier_counts = mongo . DictField ( )
2012-01-20 09:38:41 -08:00
favicon_color = mongo . StringField ( max_length = 6 )
2012-10-24 16:09:47 -07:00
protected = mongo . BooleanField ( )
private = mongo . BooleanField ( )
2012-01-04 09:42:35 -08:00
meta = {
' collection ' : ' social_profile ' ,
2012-10-24 16:09:47 -07:00
' indexes ' : [ ' user_id ' , ' following_user_ids ' , ' follower_user_ids ' , ' unfollowed_user_ids ' , ' requested_follow_user_ids ' ] ,
2012-01-04 09:42:35 -08:00
' allow_inheritance ' : False ,
2012-03-20 16:46:38 -07:00
' index_drop_dups ' : True ,
2012-01-04 09:42:35 -08:00
}
def __unicode__ ( self ) :
2012-03-13 16:58:50 -07:00
return " %s [ %s ] following %s / %s , shared %s " % ( self . username , self . user_id ,
self . following_count , self . follower_count , self . shared_stories_count )
2012-01-04 09:42:35 -08:00
2012-07-25 23:57:10 -07:00
@classmethod
def get_user ( cls , user_id ) :
profile , created = cls . objects . get_or_create ( user_id = user_id )
if created :
profile . save ( )
return profile
2012-01-04 09:42:35 -08:00
def save ( self , * args , * * kwargs ) :
if not self . username :
2012-03-07 15:01:44 -08:00
self . import_user_fields ( )
2012-01-04 09:42:35 -08:00
if not self . subscription_count :
2012-06-27 16:46:30 -07:00
self . count_follows ( skip_save = True )
2012-03-07 15:01:44 -08:00
if self . bio and len ( self . bio ) > MSocialProfile . bio . max_length :
self . bio = self . bio [ : 80 ]
2012-07-31 23:42:43 -07:00
if self . bio :
self . bio = strip_tags ( self . bio )
if self . website :
self . website = strip_tags ( self . website )
if self . location :
self . location = strip_tags ( self . location )
if self . custom_css :
self . custom_css = strip_tags ( self . custom_css )
2012-01-04 09:42:35 -08:00
super ( MSocialProfile , self ) . save ( * args , * * kwargs )
2012-03-07 15:01:44 -08:00
if self . user_id not in self . following_user_ids :
2012-10-24 19:28:54 -07:00
self . follow_user ( self . user_id , force = True )
2012-06-27 16:46:30 -07:00
self . count_follows ( )
2013-05-29 19:37:50 -07:00
return self
2012-06-28 10:14:42 -07:00
@property
def blurblog_url ( self ) :
return " http:// %s . %s " % (
self . username_slug ,
2012-07-29 22:31:40 -07:00
Site . objects . get_current ( ) . domain . replace ( ' www. ' , ' ' ) )
2012-02-13 11:07:32 -08:00
2013-04-22 15:24:38 -07:00
@property
def blurblog_rss ( self ) :
return " %s %s " % ( self . blurblog_url , reverse ( ' shared-stories-rss-feed ' ,
kwargs = { ' user_id ' : self . user_id ,
' username ' : self . username_slug } ) )
2013-07-25 11:10:56 -07:00
def find_stories ( self , query , offset = 0 , limit = 25 ) :
stories_db = MSharedStory . objects (
Q ( user_id = self . user_id ) &
( Q ( story_title__icontains = query ) |
2013-07-30 12:01:45 -07:00
Q ( story_author_name__icontains = query ) |
Q ( story_tags__icontains = query ) )
2013-07-25 11:10:56 -07:00
) . order_by ( ' -shared_date ' ) [ offset : offset + limit ]
stories = Feed . format_stories ( stories_db )
return stories
2012-05-15 16:35:09 -07:00
def recommended_users ( self ) :
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
following_key = " F: %s :F " % ( self . user_id )
social_follow_key = " FF: %s :F " % ( self . user_id )
profile_user_ids = [ ]
# Find potential twitter/fb friends
services = MSocialServices . objects . get ( user_id = self . user_id )
facebook_user_ids = [ u . user_id for u in
MSocialServices . objects . filter ( facebook_uid__in = services . facebook_friend_ids ) . only ( ' user_id ' ) ]
twitter_user_ids = [ u . user_id for u in
MSocialServices . objects . filter ( twitter_uid__in = services . twitter_friend_ids ) . only ( ' user_id ' ) ]
social_user_ids = facebook_user_ids + twitter_user_ids
# Find users not currently followed by this user
r . delete ( social_follow_key )
2012-05-15 16:45:16 -07:00
nonfriend_user_ids = [ ]
2012-05-15 16:43:53 -07:00
if social_user_ids :
2012-05-15 17:00:42 -07:00
r . sadd ( social_follow_key , * social_user_ids )
2012-05-15 16:43:53 -07:00
nonfriend_user_ids = r . sdiff ( social_follow_key , following_key )
profile_user_ids = [ int ( f ) for f in nonfriend_user_ids ]
r . delete ( social_follow_key )
2012-05-15 16:35:09 -07:00
# Not enough? Grab popular users.
if len ( nonfriend_user_ids ) < RECOMMENDATIONS_LIMIT :
2013-05-13 19:07:52 -07:00
homepage_user = User . objects . get ( username = ' popular ' )
2012-05-15 16:35:09 -07:00
suggested_users_list = r . sdiff ( " F: %s :F " % homepage_user . pk , following_key )
suggested_users_list = [ int ( f ) for f in suggested_users_list ]
suggested_user_ids = [ ]
slots_left = min ( len ( suggested_users_list ) , RECOMMENDATIONS_LIMIT - len ( nonfriend_user_ids ) )
for slot in range ( slots_left ) :
suggested_user_ids . append ( random . choice ( suggested_users_list ) )
profile_user_ids . extend ( suggested_user_ids )
# Sort by shared story count
2013-06-12 13:52:43 -07:00
profiles = MSocialProfile . profiles ( profile_user_ids ) . order_by ( ' -shared_stories_count ' ) [ : RECOMMENDATIONS_LIMIT ]
return profiles
2012-05-02 12:23:12 -07:00
@property
def username_slug ( self ) :
return slugify ( self . username )
2012-04-19 12:46:51 -07:00
2012-02-13 11:07:32 -08:00
def count_stories ( self ) :
# Popular Publishers
self . save_popular_publishers ( )
def save_popular_publishers ( self , feed_publishers = None ) :
if not feed_publishers :
publishers = defaultdict ( int )
for story in MSharedStory . objects ( user_id = self . user_id ) . only ( ' story_feed_id ' ) [ : 500 ] :
publishers [ story . story_feed_id ] + = 1
feed_titles = dict ( ( f . id , f . feed_title )
for f in Feed . objects . filter ( pk__in = publishers . keys ( ) ) . only ( ' id ' , ' feed_title ' ) )
feed_publishers = sorted ( [ { ' id ' : k , ' feed_title ' : feed_titles [ k ] , ' story_count ' : v }
2012-03-30 14:56:16 -07:00
for k , v in publishers . items ( )
if k in feed_titles ] ,
2012-02-13 11:07:32 -08:00
key = lambda f : f [ ' story_count ' ] ,
reverse = True ) [ : 20 ]
popular_publishers = json . encode ( feed_publishers )
if len ( popular_publishers ) < 1023 :
self . popular_publishers = popular_publishers
self . save ( )
return
if len ( popular_publishers ) > 1 :
self . save_popular_publishers ( feed_publishers = feed_publishers [ : - 1 ] )
2012-01-04 09:42:35 -08:00
2012-01-20 09:38:41 -08:00
@classmethod
2012-07-28 19:53:38 -07:00
def profile ( cls , user_id , include_follows = True ) :
2012-08-13 17:06:16 -07:00
profile = cls . get_user ( user_id )
2013-06-12 13:52:43 -07:00
return profile . canonical ( include_follows = True )
2012-03-07 18:35:17 -08:00
2012-01-04 09:42:35 -08:00
@classmethod
def profiles ( cls , user_ids ) :
profiles = cls . objects . filter ( user_id__in = user_ids )
return profiles
2012-01-20 09:38:41 -08:00
@classmethod
def profile_feeds ( cls , user_ids ) :
2012-05-08 18:26:38 -07:00
profiles = cls . objects . filter ( user_id__in = user_ids )
2012-01-21 16:12:54 -08:00
profiles = dict ( ( p . user_id , p . feed ( ) ) for p in profiles )
2012-01-20 09:38:41 -08:00
return profiles
2012-01-04 09:42:35 -08:00
2012-01-10 10:20:49 -08:00
@classmethod
2012-01-20 09:38:41 -08:00
def sync_all_redis ( cls ) :
2012-01-10 10:20:49 -08:00
for profile in cls . objects . all ( ) :
2012-07-25 14:03:15 -07:00
profile . sync_redis ( force = True )
2012-01-20 09:38:41 -08:00
2012-07-25 14:03:15 -07:00
def sync_redis ( self , force = False ) :
self . following_user_ids = list ( set ( self . following_user_ids ) )
self . save ( )
2012-01-20 09:38:41 -08:00
for user_id in self . following_user_ids :
2012-07-25 14:03:15 -07:00
self . follow_user ( user_id , force = force )
2012-02-10 11:57:37 -08:00
self . follow_user ( self . user_id )
2012-01-31 10:15:11 -08:00
@property
def title ( self ) :
2012-07-10 00:17:45 -07:00
return self . blurblog_title if self . blurblog_title else self . username + " ' s blurblog "
2012-01-31 10:15:11 -08:00
2012-01-21 16:12:54 -08:00
def feed ( self ) :
2013-06-12 13:52:43 -07:00
params = self . canonical ( compact = True )
2012-01-21 16:12:54 -08:00
params . update ( {
2012-01-31 10:15:11 -08:00
' feed_title ' : self . title ,
2012-05-08 18:26:38 -07:00
' page_url ' : reverse ( ' load-social-page ' , kwargs = { ' user_id ' : self . user_id , ' username ' : self . username_slug } ) ,
' shared_stories_count ' : self . shared_stories_count ,
2012-01-31 10:15:11 -08:00
} )
return params
def page ( self ) :
2013-06-12 13:52:43 -07:00
params = self . canonical ( include_follows = True )
2012-01-31 10:15:11 -08:00
params . update ( {
' feed_title ' : self . title ,
' custom_css ' : self . custom_css ,
2012-01-21 16:12:54 -08:00
} )
return params
2012-04-16 11:21:52 -07:00
@property
def profile_photo_url ( self ) :
if self . photo_url :
return self . photo_url
return settings . MEDIA_URL + ' img/reader/default_profile_photo.png '
2012-06-27 23:57:57 -07:00
2012-07-06 19:54:33 -07:00
@property
def large_photo_url ( self ) :
2012-07-09 13:55:29 -07:00
photo_url = self . email_photo_url
2012-07-06 19:54:33 -07:00
if ' graph.facebook.com ' in photo_url :
return photo_url + ' ?type=large '
elif ' twimg ' in photo_url :
return photo_url . replace ( ' _normal ' , ' ' )
2013-01-08 14:11:59 -08:00
elif ' /avatars/ ' in photo_url :
return photo_url . replace ( ' thumbnail_ ' , ' large_ ' )
2012-07-06 19:54:33 -07:00
return photo_url
2012-06-27 23:57:57 -07:00
@property
def email_photo_url ( self ) :
if self . photo_url :
if self . photo_url . startswith ( ' // ' ) :
self . photo_url = ' http: ' + self . photo_url
return self . photo_url
2012-07-29 22:31:40 -07:00
domain = Site . objects . get_current ( ) . domain
2012-06-27 23:57:57 -07:00
return ' http:// ' + domain + settings . MEDIA_URL + ' img/reader/default_profile_photo.png '
2012-01-21 16:12:54 -08:00
2013-06-12 13:52:43 -07:00
def canonical ( self , compact = False , include_follows = False , common_follows_with_user = None ,
include_settings = False , include_following_user = None ) :
2012-07-29 22:31:40 -07:00
domain = Site . objects . get_current ( ) . domain
2012-02-16 18:43:23 -08:00
params = {
' id ' : ' social: %s ' % self . user_id ,
' user_id ' : self . user_id ,
' username ' : self . username ,
2012-07-05 18:29:38 -07:00
' photo_url ' : self . email_photo_url ,
2013-01-11 15:51:21 -08:00
' large_photo_url ' : self . large_photo_url ,
2012-08-06 17:52:33 -07:00
' location ' : self . location ,
2012-02-16 18:43:23 -08:00
' num_subscribers ' : self . follower_count ,
2012-05-10 09:45:07 -07:00
' feed_title ' : self . title ,
2012-02-16 18:43:23 -08:00
' feed_address ' : " http:// %s %s " % ( domain , reverse ( ' shared-stories-rss-feed ' ,
2012-05-02 12:23:12 -07:00
kwargs = { ' user_id ' : self . user_id , ' username ' : self . username_slug } ) ) ,
2012-06-28 21:11:35 -07:00
' feed_link ' : self . blurblog_url ,
2012-10-24 16:09:47 -07:00
' protected ' : self . protected ,
' private ' : self . private ,
2012-02-16 18:43:23 -08:00
}
if not compact :
params . update ( {
2012-08-17 23:29:17 -07:00
' large_photo_url ' : self . large_photo_url ,
2012-01-13 09:20:37 -08:00
' bio ' : self . bio ,
' website ' : self . website ,
' shared_stories_count ' : self . shared_stories_count ,
' following_count ' : self . following_count ,
' follower_count ' : self . follower_count ,
2012-02-13 11:07:32 -08:00
' popular_publishers ' : json . decode ( self . popular_publishers ) ,
' stories_last_month ' : self . stories_last_month ,
' average_stories_per_month ' : self . average_stories_per_month ,
2012-02-16 18:43:23 -08:00
} )
2012-07-10 00:17:45 -07:00
if include_settings :
params . update ( {
' custom_css ' : self . custom_css ,
' custom_bgcolor ' : self . custom_bgcolor ,
2013-09-09 12:01:21 -07:00
' bb_permalink_direct ' : self . bb_permalink_direct ,
2012-07-10 00:17:45 -07:00
} )
2012-06-27 16:46:30 -07:00
if include_follows :
2012-02-16 18:43:23 -08:00
params . update ( {
' photo_service ' : self . photo_service ,
2012-07-11 00:43:53 -07:00
' following_user_ids ' : self . following_user_ids_without_self [ : 48 ] ,
' follower_user_ids ' : self . follower_user_ids_without_self [ : 48 ] ,
2012-02-16 18:43:23 -08:00
} )
2012-04-21 16:14:53 -07:00
if common_follows_with_user :
2012-10-15 17:34:08 -07:00
FOLLOWERS_LIMIT = 128
2012-07-25 23:57:10 -07:00
with_user = MSocialProfile . get_user ( common_follows_with_user )
2012-04-21 16:14:53 -07:00
followers_youknow , followers_everybody = with_user . common_follows ( self . user_id , direction = ' followers ' )
following_youknow , following_everybody = with_user . common_follows ( self . user_id , direction = ' following ' )
2012-10-15 17:34:08 -07:00
params [ ' followers_youknow ' ] = followers_youknow [ : FOLLOWERS_LIMIT ]
params [ ' followers_everybody ' ] = followers_everybody [ : FOLLOWERS_LIMIT ]
params [ ' following_youknow ' ] = following_youknow [ : FOLLOWERS_LIMIT ]
params [ ' following_everybody ' ] = following_everybody [ : FOLLOWERS_LIMIT ]
2012-10-24 16:09:47 -07:00
params [ ' requested_follow ' ] = common_follows_with_user in self . requested_follow_user_ids
2012-07-13 15:32:27 -07:00
if include_following_user or common_follows_with_user :
if not include_following_user :
include_following_user = common_follows_with_user
2012-11-30 14:28:55 -08:00
if include_following_user != self . user_id :
params [ ' followed_by_you ' ] = bool ( self . is_followed_by_user ( include_following_user ) )
params [ ' following_you ' ] = self . is_following_user ( include_following_user )
2012-04-21 16:14:53 -07:00
2012-01-04 09:42:35 -08:00
return params
2012-06-27 16:46:30 -07:00
@property
def following_user_ids_without_self ( self ) :
if self . user_id in self . following_user_ids :
return [ u for u in self . following_user_ids if u != self . user_id ]
return self . following_user_ids
@property
def follower_user_ids_without_self ( self ) :
if self . user_id in self . follower_user_ids :
return [ u for u in self . follower_user_ids if u != self . user_id ]
return self . follower_user_ids
2012-03-07 15:01:44 -08:00
def import_user_fields ( self , skip_save = False ) :
2012-01-04 09:42:35 -08:00
user = User . objects . get ( pk = self . user_id )
self . username = user . username
self . email = user . email
2012-06-27 16:46:30 -07:00
def count_follows ( self , skip_save = False ) :
2012-01-04 09:42:35 -08:00
self . subscription_count = UserSubscription . objects . filter ( user__pk = self . user_id ) . count ( )
self . shared_stories_count = MSharedStory . objects . filter ( user_id = self . user_id ) . count ( )
2012-06-27 16:46:30 -07:00
self . following_count = len ( self . following_user_ids_without_self )
self . follower_count = len ( self . follower_user_ids_without_self )
2012-01-04 09:42:35 -08:00
if not skip_save :
self . save ( )
2012-07-25 14:03:15 -07:00
def follow_user ( self , user_id , check_unfollowed = False , force = False ) :
2012-01-09 13:55:26 -08:00
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-01-04 09:42:35 -08:00
if check_unfollowed and user_id in self . unfollowed_user_ids :
return
2012-10-24 16:09:47 -07:00
if self . user_id == user_id :
followee = self
else :
followee = MSocialProfile . get_user ( user_id )
2012-07-11 00:43:53 -07:00
2012-07-25 14:03:15 -07:00
logging . debug ( " ---> ~FB~SB %s ~SN ( %s ) following %s " % ( self . username , self . user_id , user_id ) )
2012-10-24 16:09:47 -07:00
2012-10-24 19:28:54 -07:00
if not followee . protected or force :
2012-10-24 16:09:47 -07:00
if user_id not in self . following_user_ids :
self . following_user_ids . append ( user_id )
elif not force :
return
2012-01-04 09:42:35 -08:00
2012-07-11 00:43:53 -07:00
if user_id in self . unfollowed_user_ids :
self . unfollowed_user_ids . remove ( user_id )
self . count_follows ( )
self . save ( )
2013-04-18 13:49:17 -07:00
if followee . protected and user_id != self . user_id and not force :
2012-10-24 16:09:47 -07:00
if self . user_id not in followee . requested_follow_user_ids :
followee . requested_follow_user_ids . append ( self . user_id )
2012-10-24 18:52:24 -07:00
MFollowRequest . add ( self . user_id , user_id )
2012-10-24 16:09:47 -07:00
elif self . user_id not in followee . follower_user_ids :
2012-07-11 00:43:53 -07:00
followee . follower_user_ids . append ( self . user_id )
2012-10-24 16:09:47 -07:00
followee . count_follows ( )
followee . save ( )
2013-04-18 13:49:17 -07:00
if followee . protected and user_id != self . user_id and not force :
2012-10-24 16:09:47 -07:00
from apps . social . tasks import EmailFollowRequest
EmailFollowRequest . apply_async ( kwargs = dict ( follower_user_id = self . user_id ,
followee_user_id = user_id ) ,
countdown = settings . SECONDS_TO_DELAY_CELERY_EMAILS )
return
2012-01-09 13:55:26 -08:00
following_key = " F: %s :F " % ( self . user_id )
r . sadd ( following_key , user_id )
follower_key = " F: %s :f " % ( user_id )
r . sadd ( follower_key , self . user_id )
2012-10-24 18:52:24 -07:00
2013-04-18 13:49:17 -07:00
if user_id != self . user_id :
2012-04-12 11:43:36 -07:00
MInteraction . new_follow ( follower_user_id = self . user_id , followee_user_id = user_id )
2012-04-16 11:21:52 -07:00
MActivity . new_follow ( follower_user_id = self . user_id , followee_user_id = user_id )
2013-04-18 13:49:17 -07:00
2012-05-07 17:38:00 -07:00
socialsub , _ = MSocialSubscription . objects . get_or_create ( user_id = self . user_id ,
subscription_user_id = user_id )
socialsub . needs_unread_recalc = True
socialsub . save ( )
2012-06-27 23:57:57 -07:00
2012-10-24 18:52:24 -07:00
MFollowRequest . remove ( self . user_id , user_id )
2012-07-25 14:03:15 -07:00
if not force :
from apps . social . tasks import EmailNewFollower
2012-07-30 21:49:13 -07:00
EmailNewFollower . apply_async ( kwargs = dict ( follower_user_id = self . user_id ,
followee_user_id = user_id ) ,
2012-08-06 17:52:33 -07:00
countdown = settings . SECONDS_TO_DELAY_CELERY_EMAILS )
2012-07-11 00:43:53 -07:00
return socialsub
2012-01-04 09:42:35 -08:00
2012-03-07 15:01:44 -08:00
def is_following_user ( self , user_id ) :
2012-08-15 18:35:55 -07:00
# XXX TODO: Outsource to redis
2012-03-07 15:01:44 -08:00
return user_id in self . following_user_ids
2012-07-12 01:21:09 -07:00
def is_followed_by_user ( self , user_id ) :
2012-08-15 18:35:55 -07:00
# XXX TODO: Outsource to redis
2012-07-12 01:21:09 -07:00
return user_id in self . follower_user_ids
2012-03-07 15:01:44 -08:00
2012-01-04 09:42:35 -08:00
def unfollow_user ( self , user_id ) :
2012-01-09 13:55:26 -08:00
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-03-30 14:56:16 -07:00
if not isinstance ( user_id , int ) :
user_id = int ( user_id )
2012-03-13 16:58:50 -07:00
if user_id == self . user_id :
# Only unfollow other people, not yourself.
return
2012-01-04 09:42:35 -08:00
if user_id in self . following_user_ids :
self . following_user_ids . remove ( user_id )
if user_id not in self . unfollowed_user_ids :
self . unfollowed_user_ids . append ( user_id )
2012-06-27 16:46:30 -07:00
self . count_follows ( )
2012-01-04 09:42:35 -08:00
self . save ( )
2012-07-25 23:57:10 -07:00
followee = MSocialProfile . get_user ( user_id )
2012-01-04 09:42:35 -08:00
if self . user_id in followee . follower_user_ids :
followee . follower_user_ids . remove ( self . user_id )
2012-06-27 16:46:30 -07:00
followee . count_follows ( )
2012-01-04 09:42:35 -08:00
followee . save ( )
2012-10-24 16:09:47 -07:00
if self . user_id in followee . requested_follow_user_ids :
followee . requested_follow_user_ids . remove ( self . user_id )
followee . count_follows ( )
followee . save ( )
2012-10-24 18:52:24 -07:00
MFollowRequest . remove ( self . user_id , user_id )
2012-01-04 09:42:35 -08:00
2012-07-25 23:57:10 -07:00
following_key = " F: %s :F " % ( self . user_id )
r . srem ( following_key , user_id )
follower_key = " F: %s :f " % ( user_id )
r . srem ( follower_key , self . user_id )
2012-01-20 09:38:41 -08:00
2012-07-25 23:57:10 -07:00
try :
MSocialSubscription . objects . get ( user_id = self . user_id , subscription_user_id = user_id ) . delete ( )
except MSocialSubscription . DoesNotExist :
return False
2012-02-22 09:11:35 -08:00
def common_follows ( self , user_id , direction = ' followers ' ) :
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-03-12 18:11:13 -07:00
my_followers = " F: %s : %s " % ( self . user_id , ' F ' if direction == ' followers ' else ' F ' )
their_followers = " F: %s : %s " % ( user_id , ' f ' if direction == ' followers ' else ' F ' )
follows_inter = r . sinter ( their_followers , my_followers )
follows_diff = r . sdiff ( their_followers , my_followers )
follows_inter = [ int ( f ) for f in follows_inter ]
follows_diff = [ int ( f ) for f in follows_diff ]
2012-02-22 09:11:35 -08:00
2012-06-27 16:46:30 -07:00
if user_id in follows_inter :
follows_inter . remove ( user_id )
if user_id in follows_diff :
follows_diff . remove ( user_id )
2012-03-12 18:11:13 -07:00
return follows_inter , follows_diff
2012-06-27 23:57:57 -07:00
def send_email_for_new_follower ( self , follower_user_id ) :
user = User . objects . get ( pk = self . user_id )
2012-07-30 21:39:21 -07:00
if follower_user_id not in self . follower_user_ids :
2012-08-11 13:53:00 -07:00
logging . user ( user , " ~FMNo longer being followed by %s " % follower_user_id )
2012-07-30 21:39:21 -07:00
return
if not user . email :
2012-08-11 13:53:00 -07:00
logging . user ( user , " ~FMNo email to send to, skipping. " )
2012-07-30 21:39:21 -07:00
return
elif not user . profile . send_emails :
2012-08-11 13:53:00 -07:00
logging . user ( user , " ~FMDisabled emails, skipping. " )
2012-07-30 21:39:21 -07:00
return
if self . user_id == follower_user_id :
2012-06-27 23:57:57 -07:00
return
2012-07-09 13:55:29 -07:00
emails_sent = MSentEmail . objects . filter ( receiver_user_id = user . pk ,
sending_user_id = follower_user_id ,
email_type = ' new_follower ' )
day_ago = datetime . datetime . now ( ) - datetime . timedelta ( days = 1 )
for email in emails_sent :
if email . date_sent > day_ago :
2012-08-11 13:53:00 -07:00
logging . user ( user , " ~SK~FMNot sending new follower email, already sent before. NBD. " )
2012-07-10 15:37:39 -07:00
return
2012-07-09 13:55:29 -07:00
2012-07-25 23:57:10 -07:00
follower_profile = MSocialProfile . get_user ( follower_user_id )
2012-06-27 23:57:57 -07:00
common_followers , _ = self . common_follows ( follower_user_id , direction = ' followers ' )
common_followings , _ = self . common_follows ( follower_user_id , direction = ' following ' )
2012-07-03 12:53:56 -07:00
if self . user_id in common_followers :
common_followers . remove ( self . user_id )
if self . user_id in common_followings :
common_followings . remove ( self . user_id )
2012-06-27 23:57:57 -07:00
common_followers = MSocialProfile . profiles ( common_followers )
common_followings = MSocialProfile . profiles ( common_followings )
data = {
' user ' : user ,
' follower_profile ' : follower_profile ,
' common_followers ' : common_followers ,
' common_followings ' : common_followings ,
}
text = render_to_string ( ' mail/email_new_follower.txt ' , data )
html = render_to_string ( ' mail/email_new_follower.xhtml ' , data )
subject = " %s is now following your Blurblog on NewsBlur! " % follower_profile . username
msg = EmailMultiAlternatives ( subject , text ,
from_email = ' NewsBlur < %s > ' % settings . HELLO_EMAIL ,
to = [ ' %s < %s > ' % ( user . username , user . email ) ] )
msg . attach_alternative ( html , " text/html " )
msg . send ( )
2012-07-09 13:55:29 -07:00
MSentEmail . record ( receiver_user_id = user . pk , sending_user_id = follower_user_id ,
email_type = ' new_follower ' )
2012-06-27 23:57:57 -07:00
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BB~FM~SBSending email for new follower: %s " % follower_profile . username )
2012-10-24 16:09:47 -07:00
def send_email_for_follow_request ( self , follower_user_id ) :
user = User . objects . get ( pk = self . user_id )
if follower_user_id not in self . requested_follow_user_ids :
logging . user ( user , " ~FMNo longer being followed by %s " % follower_user_id )
return
if not user . email :
logging . user ( user , " ~FMNo email to send to, skipping. " )
return
elif not user . profile . send_emails :
logging . user ( user , " ~FMDisabled emails, skipping. " )
return
if self . user_id == follower_user_id :
return
emails_sent = MSentEmail . objects . filter ( receiver_user_id = user . pk ,
sending_user_id = follower_user_id ,
email_type = ' follow_request ' )
day_ago = datetime . datetime . now ( ) - datetime . timedelta ( days = 1 )
for email in emails_sent :
if email . date_sent > day_ago :
logging . user ( user , " ~SK~FMNot sending follow request email, already sent before. NBD. " )
return
follower_profile = MSocialProfile . get_user ( follower_user_id )
common_followers , _ = self . common_follows ( follower_user_id , direction = ' followers ' )
common_followings , _ = self . common_follows ( follower_user_id , direction = ' following ' )
if self . user_id in common_followers :
common_followers . remove ( self . user_id )
if self . user_id in common_followings :
common_followings . remove ( self . user_id )
common_followers = MSocialProfile . profiles ( common_followers )
common_followings = MSocialProfile . profiles ( common_followings )
data = {
' user ' : user ,
' follower_profile ' : follower_profile ,
' common_followers ' : common_followers ,
' common_followings ' : common_followings ,
}
text = render_to_string ( ' mail/email_follow_request.txt ' , data )
html = render_to_string ( ' mail/email_follow_request.xhtml ' , data )
subject = " %s has requested to follow your Blurblog on NewsBlur " % follower_profile . username
msg = EmailMultiAlternatives ( subject , text ,
from_email = ' NewsBlur < %s > ' % settings . HELLO_EMAIL ,
to = [ ' %s < %s > ' % ( user . username , user . email ) ] )
msg . attach_alternative ( html , " text/html " )
msg . send ( )
MSentEmail . record ( receiver_user_id = user . pk , sending_user_id = follower_user_id ,
email_type = ' follow_request ' )
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BB~FM~SBSending email for follow request: %s " % follower_profile . username )
2012-07-05 18:29:38 -07:00
2012-02-16 18:36:52 -08:00
def save_feed_story_history_statistics ( self ) :
"""
Fills in missing months between earlier occurances and now .
Save format : [ ( ' YYYY-MM, #), ...]
Example output : [ ( 2010 - 12 , 123 ) , ( 2011 - 01 , 146 ) ]
"""
now = datetime . datetime . utcnow ( )
min_year = now . year
total = 0
month_count = 0
# Count stories, aggregate by year and month. Map Reduce!
map_f = """
function ( ) {
var date = ( this . shared_date . getFullYear ( ) ) + " - " + ( this . shared_date . getMonth ( ) + 1 ) ;
emit ( date , 1 ) ;
}
"""
reduce_f = """
function ( key , values ) {
var total = 0 ;
for ( var i = 0 ; i < values . length ; i + + ) {
total + = values [ i ] ;
}
return total ;
}
"""
dates = { }
res = MSharedStory . objects ( user_id = self . user_id ) . map_reduce ( map_f , reduce_f , output = ' inline ' )
for r in res :
dates [ r . key ] = r . value
year = int ( re . findall ( r " ( \ d {4} )- \ d { 1,2} " , r . key ) [ 0 ] )
if year < min_year :
min_year = year
# Assemble a list with 0's filled in for missing months,
# trimming left and right 0's.
months = [ ]
start = False
for year in range ( min_year , now . year + 1 ) :
for month in range ( 1 , 12 + 1 ) :
if datetime . datetime ( year , month , 1 ) < now :
key = u ' %s - %s ' % ( year , month )
if dates . get ( key ) or start :
start = True
months . append ( ( key , dates . get ( key , 0 ) ) )
total + = dates . get ( key , 0 )
month_count + = 1
2012-02-24 14:36:19 -08:00
2012-02-16 18:36:52 -08:00
self . story_count_history = months
2012-07-12 12:57:35 -07:00
self . average_stories_per_month = total / max ( 1 , month_count )
2012-02-16 18:36:52 -08:00
self . save ( )
def save_classifier_counts ( self ) :
def calculate_scores ( cls , facet ) :
map_f = """
function ( ) {
emit ( this [ " %s " ] , {
pos : this . score > 0 ? this . score : 0 ,
neg : this . score < 0 ? Math . abs ( this . score ) : 0
} ) ;
}
""" % (facet)
reduce_f = """
function ( key , values ) {
var result = { pos : 0 , neg : 0 } ;
values . forEach ( function ( value ) {
result . pos + = value . pos ;
result . neg + = value . neg ;
} ) ;
return result ;
}
"""
scores = [ ]
res = cls . objects ( social_user_id = self . user_id ) . map_reduce ( map_f , reduce_f , output = ' inline ' )
for r in res :
facet_values = dict ( [ ( k , int ( v ) ) for k , v in r . value . iteritems ( ) ] )
facet_values [ facet ] = r . key
scores . append ( facet_values )
scores = sorted ( scores , key = lambda v : v [ ' neg ' ] - v [ ' pos ' ] )
return scores
scores = { }
for cls , facet in [ ( MClassifierTitle , ' title ' ) ,
( MClassifierAuthor , ' author ' ) ,
( MClassifierTag , ' tag ' ) ,
( MClassifierFeed , ' feed_id ' ) ] :
scores [ facet ] = calculate_scores ( cls , facet )
if facet == ' feed_id ' and scores [ facet ] :
scores [ ' feed ' ] = scores [ facet ]
del scores [ ' feed_id ' ]
elif not scores [ facet ] :
del scores [ facet ]
if scores :
self . feed_classifier_counts = scores
self . save ( )
2012-01-20 09:38:41 -08:00
class MSocialSubscription ( mongo . Document ) :
UNREAD_CUTOFF = datetime . datetime . utcnow ( ) - datetime . timedelta ( days = settings . DAYS_OF_UNREAD )
2012-01-04 09:42:35 -08:00
2012-01-20 09:38:41 -08:00
user_id = mongo . IntField ( )
subscription_user_id = mongo . IntField ( unique_with = ' user_id ' )
2012-04-07 12:57:51 -07:00
follow_date = mongo . DateTimeField ( default = datetime . datetime . utcnow ( ) )
2012-01-20 09:38:41 -08:00
last_read_date = mongo . DateTimeField ( default = UNREAD_CUTOFF )
mark_read_date = mongo . DateTimeField ( default = UNREAD_CUTOFF )
unread_count_neutral = mongo . IntField ( default = 0 )
unread_count_positive = mongo . IntField ( default = 0 )
unread_count_negative = mongo . IntField ( default = 0 )
unread_count_updated = mongo . DateTimeField ( )
oldest_unread_story_date = mongo . DateTimeField ( )
needs_unread_recalc = mongo . BooleanField ( default = False )
feed_opens = mongo . IntField ( default = 0 )
is_trained = mongo . BooleanField ( default = False )
meta = {
' collection ' : ' social_subscription ' ,
' indexes ' : [ ( ' user_id ' , ' subscription_user_id ' ) ] ,
' allow_inheritance ' : False ,
}
def __unicode__ ( self ) :
2012-08-29 18:07:54 -07:00
user = User . objects . get ( pk = self . user_id )
subscription_user = User . objects . get ( pk = self . subscription_user_id )
return " Socialsub %s : %s " % ( user , subscription_user )
2012-01-20 09:38:41 -08:00
@classmethod
2012-08-01 18:52:54 -07:00
def feeds ( cls , user_id = None , subscription_user_id = None , calculate_all_scores = False ,
update_counts = False , * args , * * kwargs ) :
2012-07-23 18:36:18 -07:00
params = {
' user_id ' : user_id ,
}
2012-08-01 18:52:54 -07:00
if subscription_user_id :
params [ " subscription_user_id " ] = subscription_user_id
2012-02-10 12:48:14 -08:00
social_subs = cls . objects . filter ( * * params )
2012-08-01 18:52:54 -07:00
2012-01-26 09:54:48 -08:00
social_feeds = [ ]
if social_subs :
2012-08-01 18:52:54 -07:00
if calculate_all_scores :
2012-05-07 17:38:00 -07:00
for s in social_subs : s . calculate_feed_scores ( )
2012-08-01 20:30:16 -07:00
2012-03-14 12:38:59 -07:00
# Fetch user profiles of subscriptions
2012-08-01 20:30:16 -07:00
social_user_ids = [ sub . subscription_user_id for sub in social_subs ]
2012-01-26 09:54:48 -08:00
social_profiles = MSocialProfile . profile_feeds ( social_user_ids )
2012-08-01 20:30:16 -07:00
for social_sub in social_subs :
user_id = social_sub . subscription_user_id
2012-07-23 18:36:18 -07:00
if social_profiles [ user_id ] [ ' shared_stories_count ' ] < = 0 :
continue
2012-08-01 18:52:54 -07:00
if update_counts and social_sub . needs_unread_recalc :
social_sub . calculate_feed_scores ( )
2012-07-23 18:36:18 -07:00
2012-05-08 18:26:38 -07:00
# Combine subscription read counts with feed/user info
2013-06-12 13:52:43 -07:00
feed = dict ( social_sub . canonical ( ) . items ( ) + social_profiles [ user_id ] . items ( ) )
2012-05-08 18:26:38 -07:00
social_feeds . append ( feed )
2012-01-21 16:12:54 -08:00
return social_feeds
2012-04-03 16:02:32 -07:00
@classmethod
def feeds_with_updated_counts ( cls , user , social_feed_ids = None ) :
feeds = { }
# Get social subscriptions for user
user_subs = cls . objects . filter ( user_id = user . pk )
if social_feed_ids :
social_user_ids = [ int ( f . replace ( ' social: ' , ' ' ) ) for f in social_feed_ids ]
user_subs = user_subs . filter ( subscription_user_id__in = social_user_ids )
2012-05-08 18:26:38 -07:00
profiles = MSocialProfile . objects . filter ( user_id__in = social_user_ids )
profiles = dict ( ( p . user_id , p ) for p in profiles )
2012-04-03 16:02:32 -07:00
for i , sub in enumerate ( user_subs ) :
# Count unreads if subscription is stale.
if ( sub . needs_unread_recalc or
2012-04-03 20:28:18 -07:00
( sub . unread_count_updated and
2013-09-16 16:42:49 -07:00
sub . unread_count_updated < user . profile . unread_cutoff ) or
2012-04-03 20:28:18 -07:00
( sub . oldest_unread_story_date and
2013-09-16 16:42:49 -07:00
sub . oldest_unread_story_date < user . profile . unread_cutoff ) ) :
2012-08-10 13:18:54 -07:00
sub = sub . calculate_feed_scores ( force = True , silent = True )
2012-04-03 16:02:32 -07:00
feed_id = " social: %s " % sub . subscription_user_id
feeds [ feed_id ] = {
' ps ' : sub . unread_count_positive ,
' nt ' : sub . unread_count_neutral ,
' ng ' : sub . unread_count_negative ,
' id ' : feed_id ,
}
2012-05-08 18:26:38 -07:00
if social_feed_ids and sub . subscription_user_id in profiles :
feeds [ feed_id ] [ ' shared_stories_count ' ] = profiles [ sub . subscription_user_id ] . shared_stories_count
2012-04-03 16:02:32 -07:00
return feeds
2013-06-12 13:52:43 -07:00
def canonical ( self ) :
2012-01-21 16:12:54 -08:00
return {
' user_id ' : self . user_id ,
' subscription_user_id ' : self . subscription_user_id ,
2012-02-15 18:00:10 -08:00
' nt ' : self . unread_count_neutral ,
' ps ' : self . unread_count_positive ,
' ng ' : self . unread_count_negative ,
2012-01-21 16:12:54 -08:00
' is_trained ' : self . is_trained ,
2012-04-24 13:18:08 -07:00
' feed_opens ' : self . feed_opens ,
2012-01-21 16:12:54 -08:00
}
2013-10-07 13:36:10 -07:00
@classmethod
def subs_for_users ( cls , user_id , subscription_user_ids = None , read_filter = " unread " ) :
socialsubs = cls . objects
if read_filter == " unread " :
socialsubs = socialsubs . filter ( Q ( unread_count_neutral__gt = 0 ) |
Q ( unread_count_positive__gt = 0 ) )
if not subscription_user_ids :
socialsubs = socialsubs . filter ( user = user_id ) . only ( ' feed ' , ' mark_read_date ' , ' is_trained ' )
else :
socialsubs = socialsubs . filter ( user = user_id ,
subscription_user_id__in = subscription_user_ids ) \
. only ( ' feed ' , ' mark_read_date ' , ' is_trained ' )
return socialsubs
@classmethod
def story_hashes ( cls , user_id , relative_user_id , subscription_user_ids = None , socialsubs = None ,
read_filter = " unread " , order = " newest " ,
include_timestamps = False , group_by_user = True , cutoff_date = None ) :
r = redis . Redis ( connection_pool = settings . REDIS_STORY_HASH_POOL )
pipeline = r . pipeline ( )
story_hashes = { } if group_by_user else [ ]
if not socialsubs :
socialsubs = cls . subs_for_users ( relative_user_id ,
subscription_user_ids = subscription_user_ids ,
read_filter = read_filter )
subscription_user_ids = [ sub . subscription_user_id for sub in socialsubs ]
if not subscription_user_ids :
return story_hashes
read_dates = dict ( ( us . subscription_user_id ,
int ( us . mark_read_date . strftime ( ' %s ' ) ) ) for us in socialsubs )
current_time = int ( time . time ( ) + 60 * 60 * 24 )
if not cutoff_date :
cutoff_date = datetime . datetime . now ( ) - datetime . timedelta ( days = settings . DAYS_OF_STORY_HASHES )
unread_timestamp = int ( time . mktime ( cutoff_date . timetuple ( ) ) ) - 1000
feed_counter = 0
for sub_user_id_group in chunks ( subscription_user_ids , 20 ) :
pipeline = r . pipeline ( )
for sub_user_id in sub_user_id_group :
stories_key = ' B: %s ' % ( sub_user_id )
sorted_stories_key = ' zB: %s ' % ( sub_user_id )
read_stories_key = ' RS: %s ' % ( user_id )
read_social_stories_key = ' RS: %s :B: %s ' % ( user_id , sub_user_id )
unread_stories_key = ' UB: %s : %s ' % ( user_id , sub_user_id )
sorted_stories_key = ' zB: %s ' % ( sub_user_id )
unread_ranked_stories_key = ' zUB: %s : %s ' % ( user_id , sub_user_id )
expire_unread_stories_key = False
max_score = current_time
if read_filter == ' unread ' :
# +1 for the intersection b/w zF and F, which carries an implicit score of 1.
min_score = read_dates [ sub_user_id ] + 1
pipeline . sdiffstore ( unread_stories_key , stories_key , read_stories_key )
pipeline . sdiffstore ( unread_stories_key , unread_stories_key , read_social_stories_key )
expire_unread_stories_key = True
else :
min_score = unread_timestamp
unread_stories_key = stories_key
if order == ' oldest ' :
byscorefunc = pipeline . zrangebyscore
else :
byscorefunc = pipeline . zrevrangebyscore
min_score , max_score = max_score , min_score
pipeline . zinterstore ( unread_ranked_stories_key , [ sorted_stories_key , unread_stories_key ] )
byscorefunc ( unread_ranked_stories_key , min_score , max_score , withscores = include_timestamps )
pipeline . delete ( unread_ranked_stories_key )
if expire_unread_stories_key :
pipeline . delete ( unread_stories_key )
results = pipeline . execute ( )
for hashes in results :
if not isinstance ( hashes , list ) : continue
if group_by_user :
story_hashes [ subscription_user_ids [ feed_counter ] ] = hashes
feed_counter + = 1
else :
story_hashes . extend ( hashes )
return story_hashes
2012-11-30 15:55:05 -08:00
def get_stories ( self , offset = 0 , limit = 6 , order = ' newest ' , read_filter = ' all ' ,
2013-09-18 23:46:16 -07:00
withscores = False , hashes_only = False , cutoff_date = None ,
mark_read_complement = False ) :
2013-04-29 15:27:22 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_STORY_HASH_POOL )
2012-07-19 23:29:11 -07:00
ignore_user_stories = False
2013-07-05 13:27:34 -07:00
stories_key = ' B: %s ' % ( self . subscription_user_id )
read_stories_key = ' RS: %s ' % ( self . user_id )
read_social_stories_key = ' RS: %s :B: %s ' % ( self . user_id , self . subscription_user_id )
unread_stories_key = ' UB: %s : %s ' % ( self . user_id , self . subscription_user_id )
2012-07-19 23:29:11 -07:00
if not r . exists ( stories_key ) :
return [ ]
2013-05-03 16:48:17 -07:00
elif read_filter != ' unread ' or not r . exists ( read_stories_key ) :
2012-07-19 23:29:11 -07:00
ignore_user_stories = True
unread_stories_key = stories_key
else :
r . sdiffstore ( unread_stories_key , stories_key , read_stories_key )
2013-07-05 13:27:34 -07:00
r . sdiffstore ( unread_stories_key , unread_stories_key , read_social_stories_key )
2012-08-07 14:37:07 -07:00
2012-07-24 17:16:01 -07:00
sorted_stories_key = ' zB: %s ' % ( self . subscription_user_id )
2013-05-09 16:16:14 -07:00
unread_ranked_stories_key = ' z %s UB: %s : %s ' % ( ' h ' if hashes_only else ' ' ,
2013-05-03 14:46:44 -07:00
self . user_id , self . subscription_user_id )
2012-07-19 23:29:11 -07:00
r . zinterstore ( unread_ranked_stories_key , [ sorted_stories_key , unread_stories_key ] )
2013-09-18 23:46:16 -07:00
now = datetime . datetime . now ( )
2012-07-23 15:35:41 -07:00
current_time = int ( time . time ( ) + 60 * 60 * 24 )
2012-08-29 18:44:43 -07:00
mark_read_time = int ( time . mktime ( self . mark_read_date . timetuple ( ) ) ) + 1
2013-09-13 17:21:05 -07:00
if cutoff_date :
mark_read_time = int ( time . mktime ( cutoff_date . timetuple ( ) ) ) + 1
2013-09-18 23:46:16 -07:00
2012-07-19 23:29:11 -07:00
if order == ' oldest ' :
byscorefunc = r . zrangebyscore
min_score = mark_read_time
max_score = current_time
2013-09-16 16:42:49 -07:00
else : # newest
2012-07-19 23:29:11 -07:00
byscorefunc = r . zrevrangebyscore
min_score = current_time
2013-09-18 23:46:16 -07:00
if mark_read_complement :
min_score = mark_read_time
2012-08-07 14:37:07 -07:00
now = datetime . datetime . now ( )
2013-09-16 16:42:49 -07:00
unread_cutoff = cutoff_date
if not unread_cutoff :
unread_cutoff = now - datetime . timedelta ( days = settings . DAYS_OF_UNREAD )
max_score = int ( time . mktime ( unread_cutoff . timetuple ( ) ) ) - 1
2013-09-18 23:46:16 -07:00
2012-07-19 23:29:11 -07:00
story_ids = byscorefunc ( unread_ranked_stories_key , min_score ,
max_score , start = offset , num = limit ,
withscores = withscores )
2013-06-16 21:39:38 -07:00
if withscores :
story_ids = [ ( s [ 0 ] , int ( s [ 1 ] ) ) for s in story_ids ]
2013-06-03 15:50:03 -07:00
r . expire ( unread_ranked_stories_key , 1 * 60 * 60 )
2012-07-24 17:16:01 -07:00
2012-07-19 23:29:11 -07:00
if not ignore_user_stories :
r . delete ( unread_stories_key )
2012-08-29 18:07:54 -07:00
2013-04-29 15:27:22 -07:00
return story_ids
2012-07-19 23:29:11 -07:00
@classmethod
2013-09-16 16:42:49 -07:00
def feed_stories ( cls , user_id , social_user_ids , offset = 0 , limit = 6 ,
order = ' newest ' , read_filter = ' all ' , relative_user_id = None , cache = True ,
2013-10-07 13:36:10 -07:00
socialsubs = None , cutoff_date = None ) :
rt = redis . Redis ( connection_pool = settings . REDIS_STORY_HASH_TEMP_POOL )
2012-07-19 23:29:11 -07:00
2012-10-19 18:33:28 -07:00
if not relative_user_id :
relative_user_id = user_id
2012-07-19 23:29:11 -07:00
if order == ' oldest ' :
2013-10-07 13:36:10 -07:00
range_func = rt . zrange
2012-07-19 23:29:11 -07:00
else :
2013-10-07 13:36:10 -07:00
range_func = rt . zrevrange
2012-07-19 23:29:11 -07:00
2012-08-07 14:37:07 -07:00
if not isinstance ( social_user_ids , list ) :
social_user_ids = [ social_user_ids ]
2012-07-19 23:29:11 -07:00
2013-05-03 14:46:44 -07:00
ranked_stories_keys = ' zU: %s :social ' % ( user_id )
2013-10-07 13:36:10 -07:00
unread_ranked_stories_keys = ' zhU: %s :social ' % ( user_id )
2013-05-09 17:00:30 -07:00
if ( offset and cache and
2013-10-07 13:36:10 -07:00
rt . exists ( ranked_stories_keys ) and
rt . exists ( unread_ranked_stories_keys ) ) :
story_hashes_and_dates = range_func ( ranked_stories_keys , offset , limit , withscores = True )
story_hashes , story_dates = zip ( * story_hashes_and_dates )
if read_filter == " unread " :
unread_story_hashes = story_hashes
2012-08-07 23:55:03 -07:00
else :
2013-10-07 13:36:10 -07:00
unread_story_hashes = range_func ( unread_ranked_stories_keys , 0 , offset + limit )
return story_hashes , story_dates , unread_story_hashes
2012-08-07 23:55:03 -07:00
else :
2013-10-07 13:36:10 -07:00
rt . delete ( ranked_stories_keys )
rt . delete ( unread_ranked_stories_keys )
story_hashes = cls . story_hashes ( user_id , relative_user_id ,
subscription_user_ids = social_user_ids ,
read_filter = read_filter , order = order ,
include_timestamps = True ,
group_by_user = False ,
socialsubs = socialsubs ,
cutoff_date = cutoff_date )
if not story_hashes :
return [ ] , [ ] , [ ]
pipeline = rt . pipeline ( )
for story_hash_group in chunks ( story_hashes , 100 ) :
pipeline . zadd ( ranked_stories_keys , * * dict ( story_hash_group ) )
pipeline . execute ( )
story_hashes_and_dates = range_func ( ranked_stories_keys , offset , limit , withscores = True )
if not story_hashes_and_dates :
2013-05-03 16:48:17 -07:00
return [ ] , [ ] , [ ]
2013-10-07 13:36:10 -07:00
story_hashes , story_dates = zip ( * story_hashes_and_dates )
if read_filter == " unread " :
unread_feed_story_hashes = story_hashes
else :
unread_story_hashes = cls . story_hashes ( user_id , relative_user_id ,
subscription_user_ids = social_user_ids ,
read_filter = " unread " , order = order ,
include_timestamps = True ,
group_by_user = False ,
socialsubs = socialsubs ,
cutoff_date = cutoff_date )
if unread_story_hashes :
pipeline = rt . pipeline ( )
for unread_story_hash_group in chunks ( unread_story_hashes , 100 ) :
pipeline . zadd ( unread_ranked_stories_keys , * * dict ( unread_story_hash_group ) )
pipeline . execute ( )
unread_feed_story_hashes = range_func ( unread_ranked_stories_keys , offset , limit )
rt . expire ( ranked_stories_keys , 60 * 60 )
rt . expire ( unread_ranked_stories_keys , 60 * 60 )
return story_hashes , story_dates , unread_feed_story_hashes
2012-07-19 23:29:11 -07:00
2013-05-14 17:31:08 -07:00
def mark_story_ids_as_read ( self , story_hashes , feed_id = None , mark_all_read = False , request = None ) :
data = dict ( code = 0 , payload = story_hashes )
2012-04-19 15:01:01 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-01-30 08:53:19 -08:00
if not request :
2012-08-09 21:13:57 -07:00
request = User . objects . get ( pk = self . user_id )
2012-01-30 08:53:19 -08:00
2012-08-10 11:51:25 -07:00
if not self . needs_unread_recalc and not mark_all_read :
2012-01-30 08:53:19 -08:00
self . needs_unread_recalc = True
self . save ( )
2012-07-25 23:57:10 -07:00
sub_username = MSocialProfile . get_user ( self . subscription_user_id ) . username
2012-08-10 13:18:54 -07:00
2013-05-14 17:31:08 -07:00
if len ( story_hashes ) > 1 :
logging . user ( request , " ~FYRead %s stories in social subscription: %s " % ( len ( story_hashes ) , sub_username ) )
2012-01-30 08:53:19 -08:00
else :
2012-03-07 17:34:22 -08:00
logging . user ( request , " ~FYRead story in social subscription: %s " % ( sub_username ) )
2012-01-30 08:53:19 -08:00
2013-05-14 17:31:08 -07:00
for story_hash in set ( story_hashes ) :
2013-07-05 18:09:07 -07:00
if feed_id is not None :
2013-07-05 13:27:34 -07:00
story_hash = MStory . ensure_story_hash ( story_hash , story_feed_id = feed_id )
2013-07-05 18:09:07 -07:00
if feed_id is None :
2013-06-04 15:34:03 -07:00
feed_id , _ = MStory . split_story_hash ( story_hash )
2013-07-05 13:27:34 -07:00
2012-04-19 15:01:01 -07:00
# Find other social feeds with this story to update their counts
friend_key = " F: %s :F " % ( self . user_id )
2013-05-14 17:31:08 -07:00
share_key = " S: %s " % ( story_hash )
2012-04-19 15:01:01 -07:00
friends_with_shares = [ int ( f ) for f in r . sinter ( share_key , friend_key ) ]
2013-07-05 13:27:34 -07:00
RUserStory . mark_read ( self . user_id , feed_id , story_hash , social_user_ids = friends_with_shares )
2012-04-19 15:01:01 -07:00
if self . user_id in friends_with_shares :
friends_with_shares . remove ( self . user_id )
if friends_with_shares :
2013-05-13 19:03:38 -07:00
socialsubs = MSocialSubscription . objects . filter (
user_id = self . user_id ,
subscription_user_id__in = friends_with_shares )
2012-04-19 15:01:01 -07:00
for socialsub in socialsubs :
2013-05-14 17:31:08 -07:00
if not socialsub . needs_unread_recalc and not mark_all_read :
2012-04-19 15:01:01 -07:00
socialsub . needs_unread_recalc = True
socialsub . save ( )
# Also count on original subscription
usersubs = UserSubscription . objects . filter ( user = self . user_id , feed = feed_id )
if usersubs :
usersub = usersubs [ 0 ]
if not usersub . needs_unread_recalc :
usersub . needs_unread_recalc = True
usersub . save ( )
2013-05-13 19:03:38 -07:00
2012-01-30 08:53:19 -08:00
return data
2012-02-08 12:20:05 -08:00
2012-11-30 16:12:37 -08:00
@classmethod
def mark_unsub_story_ids_as_read ( cls , user_id , social_user_id , story_ids , feed_id = None ,
request = None ) :
data = dict ( code = 0 , payload = story_ids )
2013-07-05 13:27:34 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-11-30 16:12:37 -08:00
if not request :
request = User . objects . get ( pk = user_id )
if len ( story_ids ) > 1 :
logging . user ( request , " ~FYRead %s social stories from global " % ( len ( story_ids ) ) )
else :
logging . user ( request , " ~FYRead social story from global " )
for story_id in set ( story_ids ) :
try :
story = MSharedStory . objects . get ( user_id = social_user_id ,
story_guid = story_id )
except MSharedStory . DoesNotExist :
continue
2013-05-14 17:31:08 -07:00
2013-07-05 13:27:34 -07:00
# Find other social feeds with this story to update their counts
friend_key = " F: %s :F " % ( user_id )
share_key = " S: %s " % ( story . story_hash )
friends_with_shares = [ int ( f ) for f in r . sinter ( share_key , friend_key ) ]
RUserStory . mark_read ( user_id , story . story_feed_id , story . story_hash ,
social_user_ids = friends_with_shares )
2012-11-30 16:12:37 -08:00
# Also count on original subscription
usersubs = UserSubscription . objects . filter ( user = user_id , feed = story . story_feed_id )
if usersubs :
usersub = usersubs [ 0 ]
if not usersub . needs_unread_recalc :
usersub . needs_unread_recalc = True
usersub . save ( )
# XXX TODO: Real-time notification, just for this user
return data
2013-09-13 17:21:05 -07:00
def mark_feed_read ( self , cutoff_date = None ) :
2013-09-16 16:42:49 -07:00
user_profile = Profile . objects . get ( user_id = self . user_id )
2013-09-13 17:21:05 -07:00
recount = True
if cutoff_date :
cutoff_date = cutoff_date + datetime . timedelta ( seconds = 1 )
else :
# Use the latest story to get last read time.
latest_shared_story = MSharedStory . objects ( user_id = self . subscription_user_id ,
2013-09-16 16:42:49 -07:00
shared_date__gte = user_profile . unread_cutoff
2013-10-03 18:09:02 -07:00
) . order_by ( ' -shared_date ' ) . only ( ' shared_date ' ) . first ( )
2013-09-13 17:21:05 -07:00
if latest_shared_story :
cutoff_date = latest_shared_story [ ' shared_date ' ] + datetime . timedelta ( seconds = 1 )
else :
cutoff_date = datetime . datetime . utcnow ( )
recount = False
self . last_read_date = cutoff_date
self . mark_read_date = cutoff_date
self . oldest_unread_story_date = cutoff_date
if not recount :
self . unread_count_negative = 0
self . unread_count_positive = 0
self . unread_count_neutral = 0
self . unread_count_updated = datetime . datetime . utcnow ( )
self . needs_unread_recalc = False
else :
self . needs_unread_recalc = True
2012-08-09 21:13:57 -07:00
# Manually mark all shared stories as read.
2013-09-13 17:21:05 -07:00
unread_story_hashes = self . get_stories ( read_filter = ' unread ' , limit = 500 , hashes_only = True ,
2013-09-18 23:46:16 -07:00
mark_read_complement = True )
2013-05-14 17:31:08 -07:00
self . mark_story_ids_as_read ( unread_story_hashes , mark_all_read = True )
2012-08-09 21:13:57 -07:00
2012-02-08 12:20:05 -08:00
self . save ( )
2012-01-30 08:53:19 -08:00
2012-08-10 13:18:54 -07:00
def calculate_feed_scores ( self , force = False , silent = False ) :
if not self . needs_unread_recalc and not force :
2012-08-10 09:49:21 -07:00
return self
2012-02-15 18:00:10 -08:00
2012-02-08 12:20:05 -08:00
now = datetime . datetime . now ( )
2013-09-16 16:42:49 -07:00
user_profile = Profile . objects . get ( user_id = self . user_id )
2012-02-08 12:20:05 -08:00
2013-09-16 16:42:49 -07:00
if user_profile . last_seen_on < user_profile . unread_cutoff :
2012-02-08 12:20:05 -08:00
# if not silent:
# logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed))
2012-04-03 16:02:32 -07:00
return self
2012-02-08 12:20:05 -08:00
feed_scores = dict ( negative = 0 , neutral = 0 , positive = 0 )
# Two weeks in age. If mark_read_date is older, mark old stories as read.
2013-09-16 16:42:49 -07:00
date_delta = user_profile . unread_cutoff
2012-02-08 12:20:05 -08:00
if date_delta < self . mark_read_date :
date_delta = self . mark_read_date
else :
self . mark_read_date = date_delta
2013-09-16 16:42:49 -07:00
unread_story_hashes = self . get_stories ( read_filter = ' unread ' , limit = 500 , hashes_only = True ,
cutoff_date = user_profile . unread_cutoff )
2012-02-15 18:00:10 -08:00
stories_db = MSharedStory . objects ( user_id = self . subscription_user_id ,
2013-05-03 17:45:19 -07:00
story_hash__in = unread_story_hashes )
2012-04-03 16:37:05 -07:00
story_feed_ids = set ( )
2012-02-24 14:36:19 -08:00
for s in stories_db :
2012-04-03 16:37:05 -07:00
story_feed_ids . add ( s [ ' story_feed_id ' ] )
story_feed_ids = list ( story_feed_ids )
2012-08-29 18:07:54 -07:00
2012-04-03 16:37:05 -07:00
usersubs = UserSubscription . objects . filter ( user__pk = self . user_id , feed__pk__in = story_feed_ids )
usersubs_map = dict ( ( sub . feed_id , sub ) for sub in usersubs )
2013-05-03 15:08:12 -07:00
2012-02-08 12:20:05 -08:00
oldest_unread_story_date = now
unread_stories_db = [ ]
2012-08-29 18:07:54 -07:00
2012-02-08 12:20:05 -08:00
for story in stories_db :
2013-05-03 15:08:12 -07:00
if story [ ' story_hash ' ] not in unread_story_hashes :
2012-04-03 16:37:05 -07:00
continue
feed_id = story . story_feed_id
2012-08-10 11:37:34 -07:00
if usersubs_map . get ( feed_id ) and story . shared_date < usersubs_map [ feed_id ] . mark_read_date :
2012-04-03 16:37:05 -07:00
continue
unread_stories_db . append ( story )
2012-08-10 11:37:34 -07:00
if story . shared_date < oldest_unread_story_date :
oldest_unread_story_date = story . shared_date
2012-02-15 18:00:10 -08:00
stories = Feed . format_stories ( unread_stories_db )
2012-08-29 18:07:54 -07:00
2012-02-15 18:00:10 -08:00
classifier_feeds = list ( MClassifierFeed . objects ( user_id = self . user_id , social_user_id = self . subscription_user_id ) )
classifier_authors = list ( MClassifierAuthor . objects ( user_id = self . user_id , social_user_id = self . subscription_user_id ) )
classifier_titles = list ( MClassifierTitle . objects ( user_id = self . user_id , social_user_id = self . subscription_user_id ) )
classifier_tags = list ( MClassifierTag . objects ( user_id = self . user_id , social_user_id = self . subscription_user_id ) )
2012-03-22 15:27:28 -07:00
# Merge with feed specific classifiers
2012-04-03 16:02:32 -07:00
if story_feed_ids :
classifier_feeds = classifier_feeds + list ( MClassifierFeed . objects ( user_id = self . user_id ,
feed_id__in = story_feed_ids ) )
classifier_authors = classifier_authors + list ( MClassifierAuthor . objects ( user_id = self . user_id ,
feed_id__in = story_feed_ids ) )
classifier_titles = classifier_titles + list ( MClassifierTitle . objects ( user_id = self . user_id ,
feed_id__in = story_feed_ids ) )
classifier_tags = classifier_tags + list ( MClassifierTag . objects ( user_id = self . user_id ,
feed_id__in = story_feed_ids ) )
2012-02-08 12:20:05 -08:00
for story in stories :
2012-02-15 18:00:10 -08:00
scores = {
' feed ' : apply_classifier_feeds ( classifier_feeds , story [ ' story_feed_id ' ] ,
2012-09-24 17:38:46 -07:00
social_user_ids = self . subscription_user_id ) ,
2012-02-08 12:20:05 -08:00
' author ' : apply_classifier_authors ( classifier_authors , story ) ,
' tags ' : apply_classifier_tags ( classifier_tags , story ) ,
' title ' : apply_classifier_titles ( classifier_titles , story ) ,
2012-02-15 18:00:10 -08:00
}
2012-02-08 12:20:05 -08:00
max_score = max ( scores [ ' author ' ] , scores [ ' tags ' ] , scores [ ' title ' ] )
min_score = min ( scores [ ' author ' ] , scores [ ' tags ' ] , scores [ ' title ' ] )
2012-12-13 18:32:50 -08:00
2012-02-08 12:20:05 -08:00
if max_score > 0 :
feed_scores [ ' positive ' ] + = 1
elif min_score < 0 :
feed_scores [ ' negative ' ] + = 1
else :
2012-12-13 18:32:50 -08:00
if scores [ ' feed ' ] > 0 :
2012-02-08 12:20:05 -08:00
feed_scores [ ' positive ' ] + = 1
2012-12-13 18:32:50 -08:00
elif scores [ ' feed ' ] < 0 :
2012-02-08 12:20:05 -08:00
feed_scores [ ' negative ' ] + = 1
else :
feed_scores [ ' neutral ' ] + = 1
self . unread_count_positive = feed_scores [ ' positive ' ]
self . unread_count_neutral = feed_scores [ ' neutral ' ]
self . unread_count_negative = feed_scores [ ' negative ' ]
self . unread_count_updated = datetime . datetime . now ( )
self . oldest_unread_story_date = oldest_unread_story_date
self . needs_unread_recalc = False
self . save ( )
if ( self . unread_count_positive == 0 and
2013-05-16 11:21:20 -07:00
self . unread_count_neutral == 0 ) :
2012-02-08 12:20:05 -08:00
self . mark_feed_read ( )
if not silent :
2013-09-16 16:42:49 -07:00
logging . info ( ' ---> [ %s ] Computing social scores: %s ( %s / %s / %s ) ' % ( user_profile , self . subscription_user_id , feed_scores [ ' negative ' ] , feed_scores [ ' neutral ' ] , feed_scores [ ' positive ' ] ) )
2012-02-08 12:20:05 -08:00
return self
2012-04-30 13:45:09 -07:00
@classmethod
def mark_dirty_sharing_story ( cls , user_id , story_feed_id , story_guid_hash ) :
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-02-16 18:36:52 -08:00
2012-04-30 13:45:09 -07:00
friends_key = " F: %s :F " % ( user_id )
share_key = " S: %s : %s " % ( story_feed_id , story_guid_hash )
following_user_ids = r . sinter ( friends_key , share_key )
following_user_ids = [ int ( f ) for f in following_user_ids ]
2012-05-22 15:02:37 -07:00
if not following_user_ids :
return None
2012-04-30 13:45:09 -07:00
social_subs = cls . objects . filter ( user_id = user_id , subscription_user_id__in = following_user_ids )
for social_sub in social_subs :
social_sub . needs_unread_recalc = True
social_sub . save ( )
return social_subs
2012-03-22 19:37:19 -07:00
class MCommentReply ( mongo . EmbeddedDocument ) :
2012-07-27 18:58:35 -07:00
reply_id = mongo . ObjectIdField ( )
user_id = mongo . IntField ( )
publish_date = mongo . DateTimeField ( )
comments = mongo . StringField ( )
email_sent = mongo . BooleanField ( default = False )
liking_users = mongo . ListField ( mongo . IntField ( ) )
2012-01-20 09:38:41 -08:00
2013-06-12 13:52:43 -07:00
def canonical ( self ) :
2012-03-22 19:37:19 -07:00
reply = {
2012-07-27 18:58:35 -07:00
' reply_id ' : self . reply_id ,
2012-03-22 19:37:19 -07:00
' user_id ' : self . user_id ,
' publish_date ' : relative_timesince ( self . publish_date ) ,
2012-07-23 12:49:16 -07:00
' date ' : self . publish_date ,
2012-03-22 19:37:19 -07:00
' comments ' : self . comments ,
}
return reply
meta = {
' ordering ' : [ ' publish_date ' ] ,
2012-07-27 18:58:35 -07:00
' id_field ' : ' reply_id ' ,
' allow_inheritance ' : False ,
2012-03-22 19:37:19 -07:00
}
2012-02-02 17:43:17 -08:00
class MSharedStory ( mongo . Document ) :
user_id = mongo . IntField ( )
shared_date = mongo . DateTimeField ( )
comments = mongo . StringField ( )
has_comments = mongo . BooleanField ( default = False )
2012-03-22 19:37:19 -07:00
has_replies = mongo . BooleanField ( default = False )
replies = mongo . ListField ( mongo . EmbeddedDocumentField ( MCommentReply ) )
2012-04-30 11:52:19 -07:00
source_user_id = mongo . IntField ( )
2013-04-29 16:07:08 -07:00
story_hash = mongo . StringField ( )
2012-02-02 17:43:17 -08:00
story_feed_id = mongo . IntField ( )
story_date = mongo . DateTimeField ( )
story_title = mongo . StringField ( max_length = 1024 )
story_content = mongo . StringField ( )
story_content_z = mongo . BinaryField ( )
story_original_content = mongo . StringField ( )
story_original_content_z = mongo . BinaryField ( )
2013-01-08 18:33:30 -08:00
original_text_z = mongo . BinaryField ( )
2012-02-02 17:43:17 -08:00
story_content_type = mongo . StringField ( max_length = 255 )
story_author_name = mongo . StringField ( )
story_permalink = mongo . StringField ( )
story_guid = mongo . StringField ( unique_with = ( ' user_id ' , ) )
2012-11-26 17:26:01 -08:00
story_guid_hash = mongo . StringField ( max_length = 6 )
2013-06-26 11:38:49 -07:00
image_urls = mongo . ListField ( mongo . StringField ( max_length = 1024 ) )
2012-02-02 17:43:17 -08:00
story_tags = mongo . ListField ( mongo . StringField ( max_length = 250 ) )
2012-06-27 23:57:57 -07:00
posted_to_services = mongo . ListField ( mongo . StringField ( max_length = 20 ) )
2012-07-05 18:29:38 -07:00
mute_email_users = mongo . ListField ( mongo . IntField ( ) )
2012-07-15 17:59:19 -07:00
liking_users = mongo . ListField ( mongo . IntField ( ) )
2012-07-05 19:50:02 -07:00
emailed_reshare = mongo . BooleanField ( default = False )
2012-07-27 18:58:35 -07:00
emailed_replies = mongo . ListField ( mongo . ObjectIdField ( ) )
2012-09-10 17:41:01 -07:00
image_count = mongo . IntField ( )
image_sizes = mongo . ListField ( mongo . DictField ( ) )
2012-02-02 17:43:17 -08:00
meta = {
' collection ' : ' shared_stories ' ,
2012-04-30 11:52:19 -07:00
' indexes ' : [ ( ' user_id ' , ' -shared_date ' ) , ( ' user_id ' , ' story_feed_id ' ) ,
2012-07-24 17:16:01 -07:00
' shared_date ' , ' story_guid ' , ' story_feed_id ' ] ,
2012-02-02 17:43:17 -08:00
' index_drop_dups ' : True ,
2013-03-19 17:27:04 -07:00
' ordering ' : [ ' -shared_date ' ] ,
2012-02-02 17:43:17 -08:00
' allow_inheritance ' : False ,
}
2012-07-27 18:58:35 -07:00
def __unicode__ ( self ) :
user = User . objects . get ( pk = self . user_id )
2012-09-10 17:41:01 -07:00
return " %s : %s ( %s ) %s %s " % ( user . username ,
self . story_title [ : 20 ] ,
self . story_feed_id ,
' : ' if self . has_comments else ' ' ,
self . comments [ : 20 ] )
2012-07-27 18:58:35 -07:00
2012-02-02 17:43:17 -08:00
@property
def guid_hash ( self ) :
2013-04-29 18:00:43 -07:00
return hashlib . sha1 ( self . story_guid ) . hexdigest ( ) [ : 6 ]
2012-08-24 18:07:44 -07:00
2013-04-13 22:31:05 -07:00
@property
def feed_guid_hash ( self ) :
2013-04-29 16:07:08 -07:00
return " %s : %s " % ( self . story_feed_id or " 0 " , self . guid_hash )
2013-04-13 22:31:05 -07:00
2013-06-12 13:52:43 -07:00
def canonical ( self ) :
2012-08-24 18:07:44 -07:00
return {
" user_id " : self . user_id ,
" shared_date " : self . shared_date ,
" story_title " : self . story_title ,
" story_content " : self . story_content_z and zlib . decompress ( self . story_content_z ) ,
" comments " : self . comments ,
}
2012-03-22 19:37:19 -07:00
2012-02-02 17:43:17 -08:00
def save ( self , * args , * * kwargs ) :
2012-09-10 17:41:01 -07:00
scrubber = SelectiveScriptScrubber ( )
2012-02-02 17:43:17 -08:00
if self . story_content :
2012-09-10 17:41:01 -07:00
self . story_content = scrubber . scrub ( self . story_content )
2012-02-02 17:43:17 -08:00
self . story_content_z = zlib . compress ( self . story_content )
self . story_content = None
if self . story_original_content :
self . story_original_content_z = zlib . compress ( self . story_original_content )
self . story_original_content = None
2012-10-16 13:40:27 -07:00
2012-11-26 17:26:01 -08:00
self . story_guid_hash = hashlib . sha1 ( self . story_guid ) . hexdigest ( ) [ : 6 ]
2012-09-10 17:41:01 -07:00
self . story_title = strip_tags ( self . story_title )
2013-04-29 16:07:08 -07:00
self . story_hash = self . feed_guid_hash
2012-07-21 16:38:37 -07:00
self . comments = linkify ( strip_tags ( self . comments ) )
for reply in self . replies :
reply . comments = linkify ( strip_tags ( reply . comments ) )
2012-09-10 17:41:01 -07:00
2012-03-22 19:37:19 -07:00
self . shared_date = self . shared_date or datetime . datetime . utcnow ( )
self . has_replies = bool ( len ( self . replies ) )
2012-07-19 23:29:11 -07:00
2012-02-02 17:43:17 -08:00
super ( MSharedStory , self ) . save ( * args , * * kwargs )
2012-07-25 23:57:10 -07:00
author = MSocialProfile . get_user ( self . user_id )
2012-06-27 16:46:30 -07:00
author . count_follows ( )
2012-02-02 17:43:17 -08:00
2012-07-24 17:16:01 -07:00
self . sync_redis ( )
2012-07-23 13:24:04 -07:00
2012-07-28 16:41:17 -07:00
MActivity . new_shared_story ( user_id = self . user_id , source_user_id = self . source_user_id ,
story_title = self . story_title ,
2012-04-16 11:21:52 -07:00
comments = self . comments , story_feed_id = self . story_feed_id ,
story_id = self . story_guid , share_date = self . shared_date )
2013-05-29 19:37:50 -07:00
return self
2012-04-16 11:21:52 -07:00
2012-02-02 17:43:17 -08:00
def delete ( self , * args , * * kwargs ) :
2012-06-20 20:53:32 -07:00
MActivity . remove_shared_story ( user_id = self . user_id , story_feed_id = self . story_feed_id ,
story_id = self . story_guid )
2012-07-19 23:29:11 -07:00
self . remove_from_redis ( )
2012-02-02 17:43:17 -08:00
super ( MSharedStory , self ) . delete ( * args , * * kwargs )
2012-07-24 17:16:01 -07:00
2012-09-10 11:55:42 -07:00
def unshare_story ( self ) :
socialsubs = MSocialSubscription . objects . filter ( subscription_user_id = self . user_id ,
needs_unread_recalc = False )
for socialsub in socialsubs :
socialsub . needs_unread_recalc = True
socialsub . save ( )
self . delete ( )
2012-08-24 14:34:53 -07:00
@classmethod
2012-08-25 19:19:44 -07:00
def get_shared_stories_from_site ( cls , feed_id , user_id , story_url , limit = 3 ) :
your_story = cls . objects . filter ( story_feed_id = feed_id ,
story_permalink = story_url ,
user_id = user_id ) . limit ( 1 ) . first ( )
same_stories = cls . objects . filter ( story_feed_id = feed_id ,
story_permalink = story_url ,
user_id__ne = user_id
) . order_by ( ' -shared_date ' )
same_stories = [ {
" user_id " : story . user_id ,
" comments " : story . comments ,
" relative_date " : relative_timesince ( story . shared_date ) ,
" blurblog_permalink " : story . blurblog_permalink ( ) ,
} for story in same_stories ]
other_stories = [ ]
if feed_id :
other_stories = cls . objects . filter ( story_feed_id = feed_id ,
story_permalink__ne = story_url
) . order_by ( ' -shared_date ' ) . limit ( limit )
other_stories = [ {
" user_id " : story . user_id ,
" story_title " : story . story_title ,
" story_permalink " : story . story_permalink ,
" comments " : story . comments ,
" relative_date " : relative_timesince ( story . shared_date ) ,
" blurblog_permalink " : story . blurblog_permalink ( ) ,
} for story in other_stories ]
2012-08-24 14:34:53 -07:00
2012-08-24 18:07:44 -07:00
return your_story , same_stories , other_stories
2013-07-05 16:53:03 -07:00
2012-08-06 17:52:33 -07:00
def set_source_user_id ( self , source_user_id ) :
2012-06-26 19:19:57 -07:00
if source_user_id == self . user_id :
return
2012-05-09 14:48:10 -07:00
def find_source ( source_user_id , seen_user_ids ) :
parent_shared_story = MSharedStory . objects . filter ( user_id = source_user_id ,
story_guid = self . story_guid ,
story_feed_id = self . story_feed_id ) . limit ( 1 )
if parent_shared_story and parent_shared_story [ 0 ] . source_user_id :
user_id = parent_shared_story [ 0 ] . source_user_id
if user_id in seen_user_ids :
return source_user_id
else :
seen_user_ids . append ( user_id )
return find_source ( user_id , seen_user_ids )
else :
return source_user_id
if source_user_id :
source_user_id = find_source ( source_user_id , [ ] )
2012-08-06 17:52:33 -07:00
if source_user_id == self . user_id :
return
elif not self . source_user_id or source_user_id != self . source_user_id :
2012-05-09 14:48:10 -07:00
self . source_user_id = source_user_id
logging . debug ( " ---> Re-share from %s . " % source_user_id )
self . save ( )
MInteraction . new_reshared_story ( user_id = self . source_user_id ,
reshare_user_id = self . user_id ,
comments = self . comments ,
story_title = self . story_title ,
story_feed_id = self . story_feed_id ,
2012-08-06 17:52:33 -07:00
story_id = self . story_guid )
2012-07-05 18:29:38 -07:00
def mute_for_user ( self , user_id ) :
if user_id not in self . mute_email_users :
self . mute_email_users . append ( user_id )
self . save ( )
2012-05-09 14:48:10 -07:00
2012-03-30 14:56:16 -07:00
@classmethod
def switch_feed ( cls , original_feed_id , duplicate_feed_id ) :
shared_stories = cls . objects . filter ( story_feed_id = duplicate_feed_id )
logging . info ( " ---> %s shared stories " % shared_stories . count ( ) )
for story in shared_stories :
story . story_feed_id = original_feed_id
story . save ( )
2012-02-02 17:43:17 -08:00
2012-03-19 17:16:59 -07:00
@classmethod
2013-06-07 03:39:36 -04:00
def collect_popular_stories ( cls , cutoff = None , days = None , shared_feed_ids = None ) :
if not days :
days = 3
if not cutoff :
2013-08-21 11:56:21 -07:00
cutoff = 6
if not shared_feed_ids :
shared_feed_ids = [ ]
2013-04-10 21:07:12 -07:00
# shared_stories_count = sum(json.decode(MStatistics.get('stories_shared')))
# cutoff = cutoff or max(math.floor(.025 * shared_stories_count), 3)
2012-08-22 11:00:08 -07:00
today = datetime . datetime . now ( ) - datetime . timedelta ( days = days )
2013-06-07 03:37:33 -04:00
2012-04-25 20:00:52 -07:00
map_f = """
function ( ) {
2013-06-07 03:20:14 -04:00
emit ( this . story_hash , {
' story_hash ' : this . story_hash ,
2012-04-30 11:52:19 -07:00
' feed_id ' : this . story_feed_id ,
2012-09-10 13:08:12 -07:00
' title ' : this . story_title ,
2012-04-30 11:52:19 -07:00
' count ' : 1
} ) ;
2012-04-25 20:00:52 -07:00
}
"""
reduce_f = """
function ( key , values ) {
2013-06-07 03:20:14 -04:00
var r = { ' story_hash ' : key , ' count ' : 0 } ;
2012-04-25 20:00:52 -07:00
for ( var i = 0 ; i < values . length ; i + + ) {
r . feed_id = values [ i ] . feed_id ;
2012-09-10 13:08:12 -07:00
r . title = values [ i ] . title ;
2013-06-07 03:20:14 -04:00
r . count + = values [ i ] . count ;
2012-04-25 20:00:52 -07:00
}
return r ;
}
"""
finalize_f = """
function ( key , value ) {
2013-06-07 03:37:33 -04:00
if ( value . count > = % ( cutoff ) s & & [ % ( shared_feed_ids ) s ] . indexOf ( value . feed_id ) == - 1 ) {
2012-09-10 13:14:32 -07:00
var english_title = value . title . replace ( / [ ^ \\062 - \\177 ] / g , " " ) ;
2012-09-10 13:08:12 -07:00
if ( english_title . length < 5 ) return ;
2013-06-07 03:20:14 -04:00
2012-04-25 20:00:52 -07:00
return value ;
}
}
2013-06-07 03:37:33 -04:00
""" % { ' cutoff ' : cutoff, ' shared_feed_ids ' : ' , ' .join(shared_feed_ids)}
2012-04-30 11:52:19 -07:00
res = cls . objects ( shared_date__gte = today ) . map_reduce ( map_f , reduce_f ,
finalize_f = finalize_f ,
output = ' inline ' )
2012-04-25 20:00:52 -07:00
stories = dict ( [ ( r . key , r . value ) for r in res if r . value ] )
2012-04-27 14:37:06 -07:00
return stories , cutoff
2012-04-25 20:00:52 -07:00
@classmethod
2012-09-10 13:08:12 -07:00
def share_popular_stories ( cls , cutoff = None , days = None , interactive = True ) :
2012-04-27 14:37:06 -07:00
publish_new_stories = False
2012-03-19 17:16:59 -07:00
popular_profile = MSocialProfile . objects . get ( username = ' popular ' )
popular_user = User . objects . get ( pk = popular_profile . user_id )
2013-06-07 03:37:33 -04:00
week_ago = datetime . datetime . now ( ) - datetime . timedelta ( days = 7 )
2013-06-07 03:39:36 -04:00
shared_feed_ids = [ str ( s . story_feed_id )
for s in MSharedStory . objects ( user_id = popular_profile . user_id ,
shared_date__gte = week_ago ) . only ( ' story_feed_id ' ) ]
2013-06-07 03:37:33 -04:00
shared_stories_today , cutoff = cls . collect_popular_stories ( cutoff = cutoff , days = days ,
shared_feed_ids = shared_feed_ids )
2012-09-10 13:08:12 -07:00
shared = 0
2013-06-07 03:20:14 -04:00
for story_hash , story_info in shared_stories_today . items ( ) :
story , _ = MStory . find_story ( story_info [ ' feed_id ' ] , story_info [ ' story_hash ' ] )
2012-04-25 20:00:52 -07:00
if not story :
logging . user ( popular_user , " ~FRPopular stories, story not found: %s " % story_info )
continue
2013-08-21 18:38:07 -07:00
if story . story_feed_id in shared_feed_ids :
logging . user ( popular_user , " ~FRPopular stories, story feed just shared: %s " % story_info )
continue
2012-09-10 13:08:12 -07:00
if interactive :
feed = Feed . get_by_id ( story . story_feed_id )
accept_story = raw_input ( " %s / %s [Y/n]: " % ( story . story_title , feed . title ) )
if accept_story in [ ' n ' , ' N ' ] : continue
2012-04-25 20:00:52 -07:00
story_db = dict ( [ ( k , v ) for k , v in story . _data . items ( )
if k is not None and v is not None ] )
2013-04-10 21:07:12 -07:00
story_db . pop ( ' user_id ' , None )
2013-07-05 16:53:03 -07:00
story_db . pop ( ' id ' , None )
2013-04-17 15:18:17 -07:00
story_db . pop ( ' comments ' , None )
story_db . pop ( ' replies ' , None )
story_db [ ' has_comments ' ] = False
story_db [ ' has_replies ' ] = False
story_db [ ' shared_date ' ] = datetime . datetime . now ( )
2012-04-25 20:00:52 -07:00
story_values = {
' user_id ' : popular_profile . user_id ,
' story_guid ' : story_db [ ' story_guid ' ] ,
' defaults ' : story_db ,
}
shared_story , created = MSharedStory . objects . get_or_create ( * * story_values )
2012-04-27 14:37:06 -07:00
if created :
2013-08-21 12:10:50 -07:00
shared_story . post_to_service ( ' twitter ' )
2012-09-10 13:08:12 -07:00
shared + = 1
2013-08-21 18:38:07 -07:00
shared_feed_ids . append ( story . story_feed_id )
2012-04-27 14:37:06 -07:00
publish_new_stories = True
logging . user ( popular_user , " ~FCSharing: ~SB~FM %s ( %s shares, %s min) " % (
story . story_title [ : 50 ] ,
story_info [ ' count ' ] ,
cutoff ) )
2013-08-21 18:38:07 -07:00
2012-04-27 14:37:06 -07:00
if publish_new_stories :
socialsubs = MSocialSubscription . objects . filter ( subscription_user_id = popular_user . pk )
for socialsub in socialsubs :
socialsub . needs_unread_recalc = True
socialsub . save ( )
shared_story . publish_update_to_subscribers ( )
2012-09-10 13:08:12 -07:00
return shared
2012-04-27 14:37:06 -07:00
2012-02-02 17:43:17 -08:00
@classmethod
2012-10-25 14:18:25 -07:00
def sync_all_redis ( cls , drop = False ) :
2012-02-02 17:43:17 -08:00
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2013-04-13 22:31:05 -07:00
h = redis . Redis ( connection_pool = settings . REDIS_STORY_HASH_POOL )
2013-08-14 14:32:50 -07:00
# h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
2012-10-25 14:18:25 -07:00
if drop :
for key_name in [ " C " , " S " ] :
keys = r . keys ( " %s :* " % key_name )
print " ---> Removing %s keys named %s :* " % ( len ( keys ) , key_name )
for key in keys :
r . delete ( key )
2012-02-02 17:43:17 -08:00
for story in cls . objects . all ( ) :
2013-04-13 22:31:05 -07:00
story . sync_redis_shares ( r = r )
2013-08-14 14:32:50 -07:00
story . sync_redis_story ( r = h )
2012-07-24 17:16:01 -07:00
def sync_redis ( self ) :
self . sync_redis_shares ( )
self . sync_redis_story ( )
2013-04-13 22:31:05 -07:00
def sync_redis_shares ( self , r = None ) :
if not r :
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-02-02 17:43:17 -08:00
2012-04-30 11:52:19 -07:00
share_key = " S: %s : %s " % ( self . story_feed_id , self . guid_hash )
2012-02-02 17:43:17 -08:00
comment_key = " C: %s : %s " % ( self . story_feed_id , self . guid_hash )
2013-04-13 22:31:05 -07:00
r . sadd ( share_key , self . user_id )
2012-02-02 17:43:17 -08:00
if self . has_comments :
2013-04-13 22:31:05 -07:00
r . sadd ( comment_key , self . user_id )
2012-02-02 17:43:17 -08:00
else :
2013-04-13 22:31:05 -07:00
r . srem ( comment_key , self . user_id )
2012-04-03 19:24:02 -07:00
2013-08-14 14:32:50 -07:00
def sync_redis_story ( self , r = None ) :
2013-04-13 22:31:05 -07:00
if not r :
2013-05-02 12:27:37 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_STORY_HASH_POOL )
2013-08-14 14:32:50 -07:00
# if not r2:
# r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
2012-07-24 17:16:01 -07:00
2013-07-05 16:53:03 -07:00
r . sadd ( ' B: %s ' % self . user_id , self . feed_guid_hash )
2013-08-14 14:32:50 -07:00
# r2.sadd('B:%s' % self.user_id, self.feed_guid_hash)
2013-07-05 16:53:03 -07:00
r . zadd ( ' zB: %s ' % self . user_id , self . feed_guid_hash ,
time . mktime ( self . shared_date . timetuple ( ) ) )
2013-08-14 14:32:50 -07:00
# r2.zadd('zB:%s' % self.user_id, self.feed_guid_hash,
# time.mktime(self.shared_date.timetuple()))
2013-09-16 16:42:49 -07:00
r . expire ( ' B: %s ' % self . user_id , settings . DAYS_OF_STORY_HASHES * 24 * 60 * 60 )
# r2.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
r . expire ( ' zB: %s ' % self . user_id , settings . DAYS_OF_STORY_HASHES * 24 * 60 * 60 )
# r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60)
2012-07-19 23:29:11 -07:00
def remove_from_redis ( self ) :
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
share_key = " S: %s : %s " % ( self . story_feed_id , self . guid_hash )
r . srem ( share_key , self . user_id )
comment_key = " C: %s : %s " % ( self . story_feed_id , self . guid_hash )
r . srem ( comment_key , self . user_id )
2013-04-13 22:31:05 -07:00
h = redis . Redis ( connection_pool = settings . REDIS_STORY_HASH_POOL )
2013-08-14 14:32:50 -07:00
# h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2)
2013-04-13 22:31:05 -07:00
h . srem ( ' B: %s ' % self . user_id , self . feed_guid_hash )
2013-08-14 14:32:50 -07:00
# h2.srem('B:%s' % self.user_id, self.feed_guid_hash)
2013-04-13 22:31:05 -07:00
h . zrem ( ' zB: %s ' % self . user_id , self . feed_guid_hash )
2013-08-14 14:32:50 -07:00
# h2.zrem('zB:%s' % self.user_id, self.feed_guid_hash)
2012-07-19 23:29:11 -07:00
2012-04-03 19:24:02 -07:00
def publish_update_to_subscribers ( self ) :
try :
2013-06-18 12:21:27 -07:00
r = redis . Redis ( connection_pool = settings . REDIS_PUBSUB_POOL )
2012-04-03 19:24:02 -07:00
feed_id = " social: %s " % self . user_id
listeners_count = r . publish ( feed_id , ' story:new ' )
if listeners_count :
logging . debug ( " ---> ~FMPublished to %s subscribers " % ( listeners_count ) )
except redis . ConnectionError :
logging . debug ( " ***> ~BMRedis is unavailable for real-time. " )
2012-03-12 18:11:13 -07:00
def comments_with_author ( self ) :
2012-02-02 17:43:17 -08:00
comments = {
2012-08-06 17:52:33 -07:00
' id ' : self . id ,
2012-02-02 17:43:17 -08:00
' user_id ' : self . user_id ,
' comments ' : self . comments ,
' shared_date ' : relative_timesince ( self . shared_date ) ,
2012-07-23 12:49:16 -07:00
' date ' : self . shared_date ,
2013-06-12 13:52:43 -07:00
' replies ' : [ reply . canonical ( ) for reply in self . replies ] ,
2012-07-15 17:59:19 -07:00
' liking_users ' : self . liking_users ,
2012-04-30 13:05:14 -07:00
' source_user_id ' : self . source_user_id ,
2012-02-02 17:43:17 -08:00
}
return comments
2012-07-15 17:59:19 -07:00
def comment_with_author_and_profiles ( self ) :
comment = self . comments_with_author ( )
profile_user_ids = set ( [ comment [ ' user_id ' ] ] )
reply_user_ids = [ reply [ ' user_id ' ] for reply in comment [ ' replies ' ] ]
profile_user_ids = profile_user_ids . union ( reply_user_ids )
profile_user_ids = profile_user_ids . union ( comment [ ' liking_users ' ] )
2012-08-06 17:52:33 -07:00
if comment [ ' source_user_id ' ] :
profile_user_ids . add ( comment [ ' source_user_id ' ] )
2012-07-15 17:59:19 -07:00
profiles = MSocialProfile . objects . filter ( user_id__in = list ( profile_user_ids ) )
2013-06-12 13:52:43 -07:00
profiles = [ profile . canonical ( compact = True ) for profile in profiles ]
2012-07-15 17:59:19 -07:00
return comment , profiles
2012-02-02 17:43:17 -08:00
@classmethod
2012-10-24 17:01:22 -07:00
def stories_with_comments_and_profiles ( cls , stories , user_id , check_all = False ) :
2012-02-02 17:43:17 -08:00
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
2012-06-29 21:06:33 -07:00
friend_key = " F: %s :F " % ( user_id )
2012-02-22 09:48:45 -08:00
profile_user_ids = set ( )
2012-02-02 17:43:17 -08:00
for story in stories :
2012-07-03 12:53:56 -07:00
story [ ' friend_comments ' ] = [ ]
story [ ' public_comments ' ] = [ ]
2012-08-08 19:13:26 -07:00
story [ ' reply_count ' ] = 0
2012-02-02 17:43:17 -08:00
if check_all or story [ ' comment_count ' ] :
comment_key = " C: %s : %s " % ( story [ ' story_feed_id ' ] , story [ ' guid_hash ' ] )
2012-07-11 10:52:50 -07:00
story [ ' comment_count ' ] = r . scard ( comment_key )
2012-07-03 12:53:56 -07:00
friends_with_comments = [ int ( f ) for f in r . sinter ( comment_key , friend_key ) ]
sharer_user_ids = [ int ( f ) for f in r . smembers ( comment_key ) ]
2012-02-02 17:43:17 -08:00
shared_stories = [ ]
2012-07-03 12:53:56 -07:00
if sharer_user_ids :
2012-02-02 17:43:17 -08:00
params = {
2013-05-15 12:36:24 -07:00
' story_hash ' : story [ ' story_hash ' ] ,
2012-07-03 12:53:56 -07:00
' user_id__in ' : sharer_user_ids ,
2012-02-02 17:43:17 -08:00
}
shared_stories = cls . objects . filter ( * * params )
for shared_story in shared_stories :
2012-04-04 20:54:54 -07:00
comments = shared_story . comments_with_author ( )
2012-08-08 19:13:26 -07:00
story [ ' reply_count ' ] + = len ( comments [ ' replies ' ] )
2012-07-03 12:53:56 -07:00
if shared_story . user_id in friends_with_comments :
story [ ' friend_comments ' ] . append ( comments )
else :
story [ ' public_comments ' ] . append ( comments )
2012-07-02 10:40:45 -07:00
if comments . get ( ' source_user_id ' ) :
profile_user_ids . add ( comments [ ' source_user_id ' ] )
2012-07-15 17:59:19 -07:00
if comments . get ( ' liking_users ' ) :
profile_user_ids = profile_user_ids . union ( comments [ ' liking_users ' ] )
2012-07-03 12:53:56 -07:00
all_comments = story [ ' friend_comments ' ] + story [ ' public_comments ' ]
2012-04-04 20:54:54 -07:00
profile_user_ids = profile_user_ids . union ( [ reply [ ' user_id ' ]
2012-07-03 12:53:56 -07:00
for c in all_comments
2012-04-04 20:54:54 -07:00
for reply in c [ ' replies ' ] ] )
2012-07-02 10:40:45 -07:00
if story . get ( ' source_user_id ' ) :
profile_user_ids . add ( story [ ' source_user_id ' ] )
2012-07-03 12:53:56 -07:00
story [ ' comment_count_friends ' ] = len ( friends_with_comments )
story [ ' comment_count_public ' ] = story [ ' comment_count ' ] - len ( friends_with_comments )
2012-02-02 17:43:17 -08:00
if check_all or story [ ' share_count ' ] :
share_key = " S: %s : %s " % ( story [ ' story_feed_id ' ] , story [ ' guid_hash ' ] )
2012-07-11 10:52:50 -07:00
story [ ' share_count ' ] = r . scard ( share_key )
2012-02-02 17:43:17 -08:00
friends_with_shares = [ int ( f ) for f in r . sinter ( share_key , friend_key ) ]
2012-03-12 18:11:13 -07:00
nonfriend_user_ids = [ int ( f ) for f in r . sdiff ( share_key , friend_key ) ]
2012-02-22 09:48:45 -08:00
profile_user_ids . update ( nonfriend_user_ids )
profile_user_ids . update ( friends_with_shares )
2012-08-08 22:02:05 -07:00
story [ ' commented_by_public ' ] = [ c [ ' user_id ' ] for c in story [ ' public_comments ' ] ]
story [ ' commented_by_friends ' ] = [ c [ ' user_id ' ] for c in story [ ' friend_comments ' ] ]
story [ ' shared_by_public ' ] = list ( set ( nonfriend_user_ids ) -
set ( story [ ' commented_by_public ' ] ) )
2012-08-08 19:53:27 -07:00
story [ ' shared_by_friends ' ] = list ( set ( friends_with_shares ) -
2012-08-08 22:02:05 -07:00
set ( story [ ' commented_by_friends ' ] ) )
2012-04-30 11:52:19 -07:00
story [ ' share_count_public ' ] = story [ ' share_count ' ] - len ( friends_with_shares )
2012-02-24 14:36:19 -08:00
story [ ' share_count_friends ' ] = len ( friends_with_shares )
2012-08-09 23:33:54 -07:00
story [ ' friend_user_ids ' ] = list ( set ( story [ ' commented_by_friends ' ] + story [ ' shared_by_friends ' ] ) )
story [ ' public_user_ids ' ] = list ( set ( story [ ' commented_by_public ' ] + story [ ' shared_by_public ' ] ) )
2012-08-14 00:36:50 -07:00
if not story [ ' share_user_ids ' ] :
story [ ' share_user_ids ' ] = story [ ' friend_user_ids ' ] + story [ ' public_user_ids ' ]
2012-04-30 12:20:04 -07:00
if story . get ( ' source_user_id ' ) :
2012-04-30 11:52:19 -07:00
profile_user_ids . add ( story [ ' source_user_id ' ] )
2012-04-04 20:54:54 -07:00
2012-02-22 09:48:45 -08:00
profiles = MSocialProfile . objects . filter ( user_id__in = list ( profile_user_ids ) )
2013-06-12 13:52:43 -07:00
profiles = [ profile . canonical ( compact = True ) for profile in profiles ]
2012-10-24 17:01:22 -07:00
# Toss public comments by private profiles
profiles_dict = dict ( ( profile [ ' user_id ' ] , profile ) for profile in profiles )
for story in stories :
commented_by_public = story . get ( ' commented_by_public ' ) or [ c [ ' user_id ' ] for c in story [ ' public_comments ' ] ]
for user_id in commented_by_public :
if profiles_dict [ user_id ] [ ' private ' ] :
story [ ' public_comments ' ] = [ c for c in story [ ' public_comments ' ] if c [ ' user_id ' ] != user_id ]
story [ ' comment_count_public ' ] - = 1
2012-02-22 09:48:45 -08:00
return stories , profiles
2012-06-27 23:57:57 -07:00
2012-07-01 18:34:34 -07:00
@staticmethod
def attach_users_to_stories ( stories , profiles ) :
profiles = dict ( [ ( p [ ' user_id ' ] , p ) for p in profiles ] )
for s , story in enumerate ( stories ) :
for u , user_id in enumerate ( story [ ' shared_by_friends ' ] ) :
2013-07-05 17:18:01 -07:00
if user_id not in profiles : continue
2012-07-01 18:34:34 -07:00
stories [ s ] [ ' shared_by_friends ' ] [ u ] = profiles [ user_id ]
for u , user_id in enumerate ( story [ ' shared_by_public ' ] ) :
2013-07-05 17:18:01 -07:00
if user_id not in profiles : continue
2012-07-01 18:34:34 -07:00
stories [ s ] [ ' shared_by_public ' ] [ u ] = profiles [ user_id ]
2012-07-03 12:53:56 -07:00
for comment_set in [ ' friend_comments ' , ' public_comments ' ] :
for c , comment in enumerate ( story [ comment_set ] ) :
2013-07-05 17:18:01 -07:00
if comment [ ' user_id ' ] not in profiles : continue
2012-07-03 12:53:56 -07:00
stories [ s ] [ comment_set ] [ c ] [ ' user ' ] = profiles [ comment [ ' user_id ' ] ]
if comment [ ' source_user_id ' ] :
stories [ s ] [ comment_set ] [ c ] [ ' source_user ' ] = profiles [ comment [ ' source_user_id ' ] ]
for r , reply in enumerate ( comment [ ' replies ' ] ) :
2013-07-05 17:18:01 -07:00
if reply [ ' user_id ' ] not in profiles : continue
stories [ s ] [ comment_set ] [ c ] [ ' replies ' ] [ r ] [ ' user ' ] = profiles [ reply [ ' user_id ' ] ]
2013-01-10 22:26:52 -08:00
stories [ s ] [ comment_set ] [ c ] [ ' liking_user_ids ' ] = list ( comment [ ' liking_users ' ] )
for u , user_id in enumerate ( comment [ ' liking_users ' ] ) :
2013-07-05 17:18:01 -07:00
if user_id not in profiles : continue
2013-01-10 22:26:52 -08:00
stories [ s ] [ comment_set ] [ c ] [ ' liking_users ' ] [ u ] = profiles [ user_id ]
2012-07-03 17:00:09 -07:00
2012-07-01 18:34:34 -07:00
return stories
2012-07-03 17:00:09 -07:00
@staticmethod
def attach_users_to_comment ( comment , profiles ) :
profiles = dict ( [ ( p [ ' user_id ' ] , p ) for p in profiles ] )
2013-07-05 17:18:01 -07:00
if comment [ ' user_id ' ] not in profiles : return comment
2012-07-03 17:00:09 -07:00
comment [ ' user ' ] = profiles [ comment [ ' user_id ' ] ]
2013-07-05 17:18:01 -07:00
2012-07-03 17:00:09 -07:00
if comment [ ' source_user_id ' ] :
comment [ ' source_user ' ] = profiles [ comment [ ' source_user_id ' ] ]
2013-07-05 17:18:01 -07:00
2012-07-03 17:00:09 -07:00
for r , reply in enumerate ( comment [ ' replies ' ] ) :
2013-08-21 12:38:03 -07:00
if reply [ ' user_id ' ] not in profiles : continue
2012-07-03 17:00:09 -07:00
comment [ ' replies ' ] [ r ] [ ' user ' ] = profiles [ reply [ ' user_id ' ] ]
2013-01-10 22:26:52 -08:00
comment [ ' liking_user_ids ' ] = list ( comment [ ' liking_users ' ] )
for u , user_id in enumerate ( comment [ ' liking_users ' ] ) :
2013-08-21 12:38:03 -07:00
if user_id not in profiles : continue
2013-01-10 22:26:52 -08:00
comment [ ' liking_users ' ] [ u ] = profiles [ user_id ]
2012-07-03 17:00:09 -07:00
return comment
2012-07-15 17:59:19 -07:00
def add_liking_user ( self , user_id ) :
if user_id not in self . liking_users :
self . liking_users . append ( user_id )
self . save ( )
def remove_liking_user ( self , user_id ) :
if user_id in self . liking_users :
self . liking_users . remove ( user_id )
self . save ( )
2012-07-01 18:34:34 -07:00
2012-06-27 23:57:57 -07:00
def blurblog_permalink ( self ) :
2012-07-25 23:57:10 -07:00
profile = MSocialProfile . get_user ( self . user_id )
2012-11-27 11:59:54 -08:00
return " %s /story/ %s / %s " % (
2012-06-28 10:14:42 -07:00
profile . blurblog_url ,
2012-11-27 11:59:54 -08:00
slugify ( self . story_title ) [ : 20 ] ,
2012-06-27 23:57:57 -07:00
self . guid_hash [ : 6 ]
)
2012-07-05 18:29:38 -07:00
2012-11-26 13:52:18 -08:00
def generate_post_to_service_message ( self , include_url = True ) :
2012-12-13 17:00:37 -08:00
message = strip_tags ( self . comments )
2012-06-27 23:57:57 -07:00
if not message or len ( message ) < 1 :
message = self . story_title
2013-09-09 12:01:21 -07:00
if include_url :
message = truncate_chars ( message , 92 )
feed = Feed . get_by_id ( self . story_feed_id )
message + = " ( %s ) " % truncate_chars ( feed . feed_title , 18 )
if include_url :
message + = " " + self . blurblog_permalink ( )
elif include_url :
2012-11-26 13:52:18 -08:00
message = truncate_chars ( message , 116 )
message + = " " + self . blurblog_permalink ( )
2012-06-27 23:57:57 -07:00
return message
def post_to_service ( self , service ) :
2013-06-12 14:36:02 -07:00
user = User . objects . get ( pk = self . user_id )
2012-06-27 23:57:57 -07:00
if service in self . posted_to_services :
2013-06-12 14:36:02 -07:00
logging . user ( user , " ~BM~FRAlready posted to %s . " % ( service ) )
2012-06-27 23:57:57 -07:00
return
2012-07-05 18:29:38 -07:00
2012-08-06 17:52:33 -07:00
posted = False
2012-06-27 23:57:57 -07:00
social_service = MSocialServices . objects . get ( user_id = self . user_id )
2012-07-05 18:29:38 -07:00
2012-11-26 13:52:18 -08:00
message = self . generate_post_to_service_message ( )
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BM~FGPosting to %s : ~SB %s " % ( service , message ) )
2012-07-05 18:29:38 -07:00
2012-06-27 23:57:57 -07:00
if service == ' twitter ' :
2012-11-26 13:52:18 -08:00
posted = social_service . post_to_twitter ( self )
2012-06-27 23:57:57 -07:00
elif service == ' facebook ' :
2012-11-26 13:52:18 -08:00
posted = social_service . post_to_facebook ( self )
2013-03-02 13:37:43 -08:00
elif service == ' appdotnet ' :
posted = social_service . post_to_appdotnet ( self )
2012-06-27 23:57:57 -07:00
if posted :
self . posted_to_services . append ( service )
self . save ( )
2012-07-05 18:29:38 -07:00
def notify_user_ids ( self , include_parent = True ) :
user_ids = set ( )
for reply in self . replies :
if reply . user_id not in self . mute_email_users :
user_ids . add ( reply . user_id )
if include_parent and self . user_id not in self . mute_email_users :
user_ids . add ( self . user_id )
return list ( user_ids )
2012-07-27 18:58:35 -07:00
def reply_for_id ( self , reply_id ) :
for reply in self . replies :
if reply . reply_id == reply_id :
return reply
def send_emails_for_new_reply ( self , reply_id ) :
if reply_id in self . emailed_replies :
logging . debug ( " ***> Already sent reply email: %s on %s " % ( reply_id , self ) )
return
2012-07-05 18:29:38 -07:00
2012-07-27 18:58:35 -07:00
reply = self . reply_for_id ( reply_id )
if not reply :
logging . debug ( " ***> Reply doesn ' t exist: %s on %s " % ( reply_id , self ) )
return
2012-07-05 18:29:38 -07:00
notify_user_ids = self . notify_user_ids ( )
2012-07-27 18:58:35 -07:00
if reply . user_id in notify_user_ids :
notify_user_ids . remove ( reply . user_id )
reply_user = User . objects . get ( pk = reply . user_id )
reply_user_profile = MSocialProfile . get_user ( reply . user_id )
2012-07-05 18:29:38 -07:00
sent_emails = 0
2012-10-25 16:14:25 -07:00
story_feed = Feed . get_by_id ( self . story_feed_id )
2012-07-05 18:29:38 -07:00
comment = self . comments_with_author ( )
profile_user_ids = set ( [ comment [ ' user_id ' ] ] )
2012-07-27 18:58:35 -07:00
reply_user_ids = list ( r [ ' user_id ' ] for r in comment [ ' replies ' ] )
2012-07-05 18:29:38 -07:00
profile_user_ids = profile_user_ids . union ( reply_user_ids )
2012-07-05 19:50:02 -07:00
if self . source_user_id :
profile_user_ids . add ( self . source_user_id )
2012-07-05 18:29:38 -07:00
profiles = MSocialProfile . objects . filter ( user_id__in = list ( profile_user_ids ) )
2013-06-12 13:52:43 -07:00
profiles = [ profile . canonical ( compact = True ) for profile in profiles ]
2012-07-05 18:29:38 -07:00
comment = MSharedStory . attach_users_to_comment ( comment , profiles )
for user_id in notify_user_ids :
user = User . objects . get ( pk = user_id )
if not user . email or not user . profile . send_emails :
2012-07-21 16:38:37 -07:00
if not user . email :
2012-08-11 13:53:00 -07:00
logging . user ( user , " ~FMNo email to send to, skipping. " )
2012-07-21 16:38:37 -07:00
elif not user . profile . send_emails :
2012-08-11 13:53:00 -07:00
logging . user ( user , " ~FMDisabled emails, skipping. " )
2012-07-05 18:29:38 -07:00
continue
mute_url = " http:// %s %s " % (
2012-07-29 22:31:40 -07:00
Site . objects . get_current ( ) . domain ,
2012-07-05 18:29:38 -07:00
reverse ( ' social-mute-story ' , kwargs = {
' secret_token ' : user . profile . secret_token ,
2012-07-26 23:22:35 -07:00
' shared_story_id ' : self . id ,
2012-07-05 18:29:38 -07:00
} )
)
data = {
' reply_user_profile ' : reply_user_profile ,
' comment ' : comment ,
' shared_story ' : self ,
' story_feed ' : story_feed ,
' mute_url ' : mute_url ,
}
2013-08-06 13:18:55 -07:00
story_title = self . story_title . replace ( ' \n ' , ' ' )
2012-07-05 18:29:38 -07:00
text = render_to_string ( ' mail/email_reply.txt ' , data )
2012-08-06 17:52:33 -07:00
html = pynliner . fromString ( render_to_string ( ' mail/email_reply.xhtml ' , data ) )
2013-08-06 13:18:55 -07:00
subject = " %s replied to you on \" %s \" on NewsBlur " % ( reply_user . username , story_title )
2012-07-05 18:29:38 -07:00
msg = EmailMultiAlternatives ( subject , text ,
from_email = ' NewsBlur < %s > ' % settings . HELLO_EMAIL ,
to = [ ' %s < %s > ' % ( user . username , user . email ) ] )
msg . attach_alternative ( html , " text/html " )
msg . send ( )
sent_emails + = 1
logging . user ( reply_user , " ~BB~FM~SBSending %s / %s email %s for new reply: %s " % (
sent_emails , len ( notify_user_ids ) ,
' ' if len ( notify_user_ids ) == 1 else ' s ' ,
self . story_title [ : 30 ] ) )
2012-07-27 18:58:35 -07:00
self . emailed_replies . append ( reply . reply_id )
self . save ( )
2012-07-05 19:50:02 -07:00
def send_email_for_reshare ( self ) :
2012-07-27 18:58:35 -07:00
if self . emailed_reshare :
logging . debug ( " ***> Already sent reply email: %s " % self )
return
2012-07-05 19:50:02 -07:00
reshare_user = User . objects . get ( pk = self . user_id )
2012-07-25 23:57:10 -07:00
reshare_user_profile = MSocialProfile . get_user ( self . user_id )
2012-07-05 19:50:02 -07:00
original_user = User . objects . get ( pk = self . source_user_id )
original_shared_story = MSharedStory . objects . get ( user_id = self . source_user_id ,
story_guid = self . story_guid )
if not original_user . email or not original_user . profile . send_emails :
2012-07-21 16:38:37 -07:00
if not original_user . email :
2012-08-11 13:53:00 -07:00
logging . user ( original_user , " ~FMNo email to send to, skipping. " )
2012-07-21 16:38:37 -07:00
elif not original_user . profile . send_emails :
2012-08-11 13:53:00 -07:00
logging . user ( original_user , " ~FMDisabled emails, skipping. " )
2012-07-05 19:50:02 -07:00
return
2012-10-25 16:14:25 -07:00
story_feed = Feed . get_by_id ( self . story_feed_id )
2012-07-05 19:50:02 -07:00
comment = self . comments_with_author ( )
profile_user_ids = set ( [ comment [ ' user_id ' ] ] )
reply_user_ids = [ reply [ ' user_id ' ] for reply in comment [ ' replies ' ] ]
profile_user_ids = profile_user_ids . union ( reply_user_ids )
if self . source_user_id :
profile_user_ids . add ( self . source_user_id )
profiles = MSocialProfile . objects . filter ( user_id__in = list ( profile_user_ids ) )
2013-06-12 13:52:43 -07:00
profiles = [ profile . canonical ( compact = True ) for profile in profiles ]
2012-07-05 19:50:02 -07:00
comment = MSharedStory . attach_users_to_comment ( comment , profiles )
mute_url = " http:// %s %s " % (
2012-07-29 22:31:40 -07:00
Site . objects . get_current ( ) . domain ,
2012-07-05 19:50:02 -07:00
reverse ( ' social-mute-story ' , kwargs = {
' secret_token ' : original_user . profile . secret_token ,
2012-07-26 23:22:35 -07:00
' shared_story_id ' : original_shared_story . id ,
2012-07-05 19:50:02 -07:00
} )
)
data = {
' comment ' : comment ,
' shared_story ' : self ,
' reshare_user_profile ' : reshare_user_profile ,
' original_shared_story ' : original_shared_story ,
' story_feed ' : story_feed ,
' mute_url ' : mute_url ,
}
2013-08-06 13:18:55 -07:00
story_title = self . story_title . replace ( ' \n ' , ' ' )
2012-07-05 19:50:02 -07:00
text = render_to_string ( ' mail/email_reshare.txt ' , data )
2012-08-06 17:52:33 -07:00
html = pynliner . fromString ( render_to_string ( ' mail/email_reshare.xhtml ' , data ) )
2013-08-06 13:18:55 -07:00
subject = " %s re-shared \" %s \" from you on NewsBlur " % ( reshare_user . username , story_title )
2012-07-05 19:50:02 -07:00
msg = EmailMultiAlternatives ( subject , text ,
from_email = ' NewsBlur < %s > ' % settings . HELLO_EMAIL ,
to = [ ' %s < %s > ' % ( original_user . username , original_user . email ) ] )
msg . attach_alternative ( html , " text/html " )
msg . send ( )
self . emailed_reshare = True
self . save ( )
logging . user ( reshare_user , " ~BB~FM~SBSending %s email for story re-share: %s " % (
original_user . username ,
self . story_title [ : 30 ] ) )
2012-09-10 17:41:01 -07:00
def calculate_image_sizes ( self , force = False ) :
if not self . story_content_z :
return
if not force and self . image_count :
return self . image_sizes
headers = {
' User-Agent ' : ' NewsBlur Image Fetcher - %s '
' (Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) '
' AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 '
' Safari/534.48.3) ' % (
settings . NEWSBLUR_URL
) ,
}
soup = BeautifulSoup ( zlib . decompress ( self . story_content_z ) )
image_sources = [ img . get ( ' src ' ) for img in soup . findAll ( ' img ' ) ]
image_sizes = [ ]
2013-03-19 17:27:04 -07:00
2012-09-10 17:41:01 -07:00
for image_source in image_sources [ : 10 ] :
if any ( ignore in image_source for ignore in IGNORE_IMAGE_SOURCES ) :
continue
2013-03-19 17:27:04 -07:00
req = requests . get ( image_source , headers = headers , stream = True )
2013-03-19 17:49:56 -07:00
datastream = StringIO ( req . content [ : 30 ] )
_ , width , height = image_size ( datastream )
2012-09-10 17:41:01 -07:00
if width < = 16 or height < = 16 :
continue
image_sizes . append ( { ' src ' : image_source , ' size ' : ( width , height ) } )
if image_sizes :
image_sizes = sorted ( image_sizes , key = lambda i : i [ ' size ' ] [ 0 ] * i [ ' size ' ] [ 1 ] ,
reverse = True )
self . image_sizes = image_sizes
2013-04-04 17:53:57 -07:00
self . image_count = len ( image_sizes )
self . save ( )
2012-07-05 19:50:02 -07:00
2012-09-10 17:41:01 -07:00
logging . debug ( " ---> ~SN~FGFetched image sizes on shared story: ~SB %s images " % self . image_count )
2012-02-02 17:43:17 -08:00
2012-09-10 17:41:01 -07:00
return image_sizes
2013-01-28 15:43:00 -08:00
def fetch_original_text ( self , force = False , request = None ) :
original_text_z = self . original_text_z
2013-07-15 11:06:50 -07:00
feed = Feed . get_by_id ( self . story_feed_id )
2012-09-10 17:41:01 -07:00
2013-01-28 15:43:00 -08:00
if not original_text_z or force :
2013-07-15 11:06:50 -07:00
ti = TextImporter ( self , feed , request = request )
2013-01-28 15:43:00 -08:00
original_text = ti . fetch ( )
else :
logging . user ( request , " ~FYFetching ~FGoriginal~FY story text, ~SBfound. " )
original_text = zlib . decompress ( original_text_z )
2012-09-10 17:41:01 -07:00
2013-01-28 15:43:00 -08:00
return original_text
2012-02-02 17:43:17 -08:00
2011-12-20 11:49:49 -08:00
class MSocialServices ( mongo . Document ) :
user_id = mongo . IntField ( )
2011-12-22 13:36:03 -08:00
autofollow = mongo . BooleanField ( default = True )
2011-12-20 11:49:49 -08:00
twitter_uid = mongo . StringField ( )
twitter_access_key = mongo . StringField ( )
twitter_access_secret = mongo . StringField ( )
twitter_friend_ids = mongo . ListField ( mongo . StringField ( ) )
twitter_picture_url = mongo . StringField ( )
twitter_username = mongo . StringField ( )
twitter_refresh_date = mongo . DateTimeField ( )
facebook_uid = mongo . StringField ( )
facebook_access_token = mongo . StringField ( )
facebook_friend_ids = mongo . ListField ( mongo . StringField ( ) )
facebook_picture_url = mongo . StringField ( )
facebook_refresh_date = mongo . DateTimeField ( )
2013-03-02 12:48:31 -08:00
appdotnet_uid = mongo . StringField ( )
appdotnet_access_token = mongo . StringField ( )
appdotnet_friend_ids = mongo . ListField ( mongo . StringField ( ) )
appdotnet_picture_url = mongo . StringField ( )
appdotnet_refresh_date = mongo . DateTimeField ( )
2011-12-24 00:30:37 -08:00
upload_picture_url = mongo . StringField ( )
2012-07-10 15:24:54 -07:00
syncing_twitter = mongo . BooleanField ( default = False )
syncing_facebook = mongo . BooleanField ( default = False )
2013-03-02 12:48:31 -08:00
syncing_appdotnet = mongo . BooleanField ( default = False )
2011-12-20 11:49:49 -08:00
meta = {
' collection ' : ' social_services ' ,
2013-03-02 12:48:31 -08:00
' indexes ' : [ ' user_id ' , ' twitter_friend_ids ' , ' facebook_friend_ids ' , ' twitter_uid ' , ' facebook_uid ' , ' appdotnet_uid ' ] ,
2011-12-20 11:49:49 -08:00
' allow_inheritance ' : False ,
}
2011-12-25 20:50:59 -08:00
def __unicode__ ( self ) :
2012-03-07 15:01:44 -08:00
user = User . objects . get ( pk = self . user_id )
2013-03-02 12:48:31 -08:00
return " %s (Twitter: %s , FB: %s , ADN: %s ) " % ( user . username , self . twitter_uid , self . facebook_uid , self . appdotnet_uid )
2011-12-22 13:36:03 -08:00
2013-06-12 13:52:43 -07:00
def canonical ( self ) :
2011-12-22 13:36:03 -08:00
user = User . objects . get ( pk = self . user_id )
return {
2011-12-25 12:27:05 -08:00
' twitter ' : {
' twitter_username ' : self . twitter_username ,
' twitter_picture_url ' : self . twitter_picture_url ,
' twitter_uid ' : self . twitter_uid ,
2012-07-10 15:24:54 -07:00
' syncing ' : self . syncing_twitter ,
2011-12-25 12:27:05 -08:00
} ,
' facebook ' : {
' facebook_uid ' : self . facebook_uid ,
' facebook_picture_url ' : self . facebook_picture_url ,
2012-07-10 15:24:54 -07:00
' syncing ' : self . syncing_facebook ,
2011-12-25 12:27:05 -08:00
} ,
2013-03-02 12:48:31 -08:00
' appdotnet ' : {
' appdotnet_uid ' : self . appdotnet_uid ,
' appdotnet_picture_url ' : self . appdotnet_picture_url ,
' syncing ' : self . syncing_appdotnet ,
} ,
2011-12-25 12:27:05 -08:00
' gravatar ' : {
2012-10-18 14:54:07 -07:00
' gravatar_picture_url ' : " https://www.gravatar.com/avatar/ " + \
2013-05-23 14:37:15 -04:00
hashlib . md5 ( user . email . lower ( ) ) . hexdigest ( )
2011-12-25 12:27:05 -08:00
} ,
' upload ' : {
' upload_picture_url ' : self . upload_picture_url
2011-12-22 13:36:03 -08:00
}
}
2011-12-20 11:49:49 -08:00
2012-08-14 00:26:42 -07:00
@classmethod
def get_user ( cls , user_id ) :
2012-08-31 13:51:24 -07:00
try :
profile , created = cls . objects . get_or_create ( user_id = user_id )
except cls . MultipleObjectsReturned :
dupes = cls . objects . filter ( user_id = user_id )
logging . debug ( " ---> ~FRDeleting dupe social services. %s found. " % dupes . count ( ) )
for dupe in dupes [ 1 : ] :
dupe . delete ( )
profile = dupes [ 0 ]
created = False
2012-08-14 00:26:42 -07:00
if created :
profile . save ( )
return profile
2012-06-27 23:57:57 -07:00
@classmethod
def profile ( cls , user_id ) :
2012-08-14 00:26:42 -07:00
profile = cls . get_user ( user_id = user_id )
2013-06-12 13:52:43 -07:00
return profile . canonical ( )
2013-01-08 14:11:59 -08:00
def save_uploaded_photo ( self , photo ) :
photo_body = photo . read ( )
filename = photo . name
s3 = s3_utils . S3Store ( )
image_name = s3 . save_profile_picture ( self . user_id , filename , photo_body )
if image_name :
self . upload_picture_url = " https://s3.amazonaws.com/ %s /avatars/ %s /thumbnail_ %s " % (
settings . S3_AVATARS_BUCKET_NAME ,
self . user_id ,
image_name ,
)
self . save ( )
return image_name and self . upload_picture_url
2012-06-27 23:57:57 -07:00
2011-12-21 09:43:17 -08:00
def twitter_api ( self ) :
twitter_consumer_key = settings . TWITTER_CONSUMER_KEY
twitter_consumer_secret = settings . TWITTER_CONSUMER_SECRET
auth = tweepy . OAuthHandler ( twitter_consumer_key , twitter_consumer_secret )
auth . set_access_token ( self . twitter_access_key , self . twitter_access_secret )
api = tweepy . API ( auth )
return api
def facebook_api ( self ) :
2011-12-20 11:49:49 -08:00
graph = facebook . GraphAPI ( self . facebook_access_token )
2011-12-21 09:43:17 -08:00
return graph
2013-03-02 12:48:31 -08:00
def appdotnet_api ( self ) :
adn_api = appdotnet . Appdotnet ( access_token = self . appdotnet_access_token )
return adn_api
2011-12-21 09:43:17 -08:00
def sync_twitter_friends ( self ) :
2012-08-11 21:01:33 -07:00
user = User . objects . get ( pk = self . user_id )
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BG~FMTwitter import starting... " )
2012-08-12 11:59:39 -07:00
2011-12-21 09:43:17 -08:00
api = self . twitter_api ( )
if not api :
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BG~FMTwitter import ~SBfailed~SN: no api access. " )
2012-08-11 20:51:12 -07:00
self . syncing_twitter = False
self . save ( )
2011-12-21 09:43:17 -08:00
return
2013-06-12 13:52:43 -07:00
twitter_user = api . me ( )
self . twitter_picture_url = twitter_user . profile_image_url
self . twitter_username = twitter_user . screen_name
self . twitter_refreshed_date = datetime . datetime . utcnow ( )
self . syncing_twitter = False
self . save ( )
2012-07-25 23:57:10 -07:00
profile = MSocialProfile . get_user ( self . user_id )
2011-12-25 20:50:59 -08:00
profile . location = profile . location or twitter_user . location
profile . bio = profile . bio or twitter_user . description
profile . website = profile . website or twitter_user . url
profile . save ( )
2012-06-27 16:46:30 -07:00
profile . count_follows ( )
2012-08-11 20:51:12 -07:00
2011-12-25 20:50:59 -08:00
if not profile . photo_url or not profile . photo_service :
self . set_photo ( ' twitter ' )
2013-06-12 14:26:09 -07:00
try :
friend_ids = list ( unicode ( friend . id ) for friend in tweepy . Cursor ( api . friends ) . items ( ) )
except tweepy . TweepError , e :
logging . user ( user , " ~BG~FMTwitter import ~SBfailed~SN: %s " % e )
return
if not friend_ids :
logging . user ( user , " ~BG~FMTwitter import ~SBfailed~SN: no friend_ids. " )
self . twitter_friend_ids = friend_ids
self . save ( )
2011-12-25 20:50:59 -08:00
2013-06-12 13:59:08 -07:00
following = self . follow_twitter_friends ( )
if not following :
logging . user ( user , " ~BG~FMTwitter import finished. " )
2012-08-12 11:59:39 -07:00
def follow_twitter_friends ( self ) :
social_profile = MSocialProfile . get_user ( self . user_id )
following = [ ]
followers = 0
if not self . autofollow :
return following
# Follow any friends already on NewsBlur
user_social_services = MSocialServices . objects . filter ( twitter_uid__in = self . twitter_friend_ids )
for user_social_service in user_social_services :
followee_user_id = user_social_service . user_id
socialsub = social_profile . follow_user ( followee_user_id )
if socialsub :
following . append ( followee_user_id )
2012-08-31 18:12:28 -07:00
# Friends already on NewsBlur should follow back
# following_users = MSocialServices.objects.filter(twitter_friend_ids__contains=self.twitter_uid)
# for following_user in following_users:
# if following_user.autofollow:
# following_user_profile = MSocialProfile.get_user(following_user.user_id)
# following_user_profile.follow_user(self.user_id, check_unfollowed=True)
# followers += 1
2012-08-12 11:59:39 -07:00
user = User . objects . get ( pk = self . user_id )
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BG~FMTwitter import: %s users, now following ~SB %s ~SN with ~SB %s ~SN follower-backs " % ( len ( self . twitter_friend_ids ) , len ( following ) , followers ) )
2012-08-12 11:59:39 -07:00
return following
2012-08-11 20:51:12 -07:00
def sync_facebook_friends ( self ) :
2012-08-11 21:01:33 -07:00
user = User . objects . get ( pk = self . user_id )
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BG~FMFacebook import starting... " )
2012-08-12 11:59:39 -07:00
2011-12-21 09:43:17 -08:00
graph = self . facebook_api ( )
2011-12-20 11:49:49 -08:00
if not graph :
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BG~FMFacebook import ~SBfailed~SN: no api access. " )
2012-08-11 20:51:12 -07:00
self . syncing_facebook = False
self . save ( )
2011-12-20 11:49:49 -08:00
return
friends = graph . get_connections ( " me " , " friends " )
if not friends :
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BG~FMFacebook import ~SBfailed~SN: no friend_ids. " )
2012-08-11 20:51:12 -07:00
self . syncing_facebook = False
self . save ( )
2011-12-20 11:49:49 -08:00
return
2011-12-21 09:43:17 -08:00
facebook_friend_ids = [ unicode ( friend [ " id " ] ) for friend in friends [ " data " ] ]
2011-12-20 11:49:49 -08:00
self . facebook_friend_ids = facebook_friend_ids
2011-12-21 09:43:17 -08:00
self . facebook_refresh_date = datetime . datetime . utcnow ( )
2011-12-22 13:36:03 -08:00
self . facebook_picture_url = " //graph.facebook.com/ %s /picture " % self . facebook_uid
2012-08-11 21:01:33 -07:00
self . syncing_facebook = False
2011-12-22 13:36:03 -08:00
self . save ( )
2011-12-24 00:30:37 -08:00
2011-12-25 20:50:59 -08:00
facebook_user = graph . request ( ' me ' , args = { ' fields ' : ' website,bio,location ' } )
2012-07-25 23:57:10 -07:00
profile = MSocialProfile . get_user ( self . user_id )
2011-12-25 20:50:59 -08:00
profile . location = profile . location or ( facebook_user . get ( ' location ' ) and facebook_user [ ' location ' ] [ ' name ' ] )
profile . bio = profile . bio or facebook_user . get ( ' bio ' )
2012-08-11 11:52:00 -07:00
if not profile . website and facebook_user . get ( ' website ' ) :
profile . website = facebook_user . get ( ' website ' ) . split ( ) [ 0 ]
2011-12-25 20:50:59 -08:00
profile . save ( )
2012-06-27 16:46:30 -07:00
profile . count_follows ( )
2011-12-25 20:50:59 -08:00
if not profile . photo_url or not profile . photo_service :
self . set_photo ( ' facebook ' )
2012-07-10 15:24:54 -07:00
2012-08-12 11:59:39 -07:00
self . follow_facebook_friends ( )
2011-12-24 00:30:37 -08:00
def follow_facebook_friends ( self ) :
2012-07-25 23:57:10 -07:00
social_profile = MSocialProfile . get_user ( self . user_id )
2011-12-24 00:30:37 -08:00
following = [ ]
followers = 0
2012-07-11 00:43:53 -07:00
if not self . autofollow :
return following
# Follow any friends already on NewsBlur
user_social_services = MSocialServices . objects . filter ( facebook_uid__in = self . facebook_friend_ids )
for user_social_service in user_social_services :
followee_user_id = user_social_service . user_id
socialsub = social_profile . follow_user ( followee_user_id )
if socialsub :
2012-03-07 15:01:44 -08:00
following . append ( followee_user_id )
2012-07-11 00:43:53 -07:00
# Friends already on NewsBlur should follow back
2012-08-31 18:12:28 -07:00
# following_users = MSocialServices.objects.filter(facebook_friend_ids__contains=self.facebook_uid)
# for following_user in following_users:
# if following_user.autofollow:
# following_user_profile = MSocialProfile.get_user(following_user.user_id)
# following_user_profile.follow_user(self.user_id, check_unfollowed=True)
# followers += 1
2011-12-24 00:30:37 -08:00
user = User . objects . get ( pk = self . user_id )
2012-11-20 09:41:58 -08:00
logging . user ( user , " ~BG~FMFacebook import: %s users, now following ~SB %s ~SN with ~SB %s ~SN follower-backs " % ( len ( self . facebook_friend_ids ) , len ( following ) , followers ) )
2011-12-24 00:30:37 -08:00
return following
2013-03-02 12:48:31 -08:00
def sync_appdotnet_friends ( self ) :
user = User . objects . get ( pk = self . user_id )
logging . user ( user , " ~BG~FMApp.net import starting... " )
api = self . appdotnet_api ( )
if not api :
logging . user ( user , " ~BG~FMApp.net import ~SBfailed~SN: no api access. " )
self . syncing_appdotnet = False
self . save ( )
return
2011-12-24 00:30:37 -08:00
2013-03-02 12:48:31 -08:00
friend_ids = [ ]
has_more_friends = True
before_id = None
since_id = None
while has_more_friends :
friends_resp = api . getUserFollowingIds ( self . appdotnet_uid ,
before_id = before_id ,
since_id = since_id )
friends = json . decode ( friends_resp )
before_id = friends [ ' meta ' ] . get ( ' min_id ' )
since_id = friends [ ' meta ' ] . get ( ' max_id ' )
has_more_friends = friends [ ' meta ' ] . get ( ' more ' )
friend_ids . extend ( [ fid for fid in friends [ ' data ' ] ] )
if not friend_ids :
logging . user ( user , " ~BG~FMApp.net import ~SBfailed~SN: no friend_ids. " )
self . syncing_appdotnet = False
self . save ( )
return
adn_user = json . decode ( api . getUser ( self . appdotnet_uid ) ) [ ' data ' ]
self . appdotnet_picture_url = adn_user [ ' avatar_image ' ] [ ' url ' ]
self . appdotnet_username = adn_user [ ' username ' ]
self . appdotnet_friend_ids = friend_ids
self . appdotnet_refreshed_date = datetime . datetime . utcnow ( )
self . syncing_appdotnet = False
self . save ( )
profile = MSocialProfile . get_user ( self . user_id )
profile . bio = profile . bio or adn_user [ ' description ' ] [ ' text ' ]
profile . save ( )
profile . count_follows ( )
if not profile . photo_url or not profile . photo_service :
self . set_photo ( ' appdotnet ' )
self . follow_appdotnet_friends ( )
def follow_appdotnet_friends ( self ) :
social_profile = MSocialProfile . get_user ( self . user_id )
following = [ ]
followers = 0
if not self . autofollow :
return following
# Follow any friends already on NewsBlur
user_social_services = MSocialServices . objects . filter ( appdotnet_uid__in = self . appdotnet_friend_ids )
for user_social_service in user_social_services :
followee_user_id = user_social_service . user_id
socialsub = social_profile . follow_user ( followee_user_id )
if socialsub :
following . append ( followee_user_id )
# Friends already on NewsBlur should follow back
# following_users = MSocialServices.objects.filter(appdotnet_friend_ids__contains=self.appdotnet_uid)
# for following_user in following_users:
# if following_user.autofollow:
# following_user_profile = MSocialProfile.get_user(following_user.user_id)
# following_user_profile.follow_user(self.user_id, check_unfollowed=True)
# followers += 1
user = User . objects . get ( pk = self . user_id )
logging . user ( user , " ~BG~FMApp.net import: %s users, now following ~SB %s ~SN with ~SB %s ~SN follower-backs " % ( len ( self . appdotnet_friend_ids ) , len ( following ) , followers ) )
return following
2011-12-22 13:36:03 -08:00
def disconnect_twitter ( self ) :
self . twitter_uid = None
self . save ( )
2013-03-02 12:48:31 -08:00
2011-12-22 13:36:03 -08:00
def disconnect_facebook ( self ) :
self . facebook_uid = None
2011-12-21 09:43:17 -08:00
self . save ( )
2013-03-02 12:48:31 -08:00
def disconnect_appdotnet ( self ) :
self . appdotnet_uid = None
self . save ( )
2011-12-25 20:50:59 -08:00
def set_photo ( self , service ) :
2012-07-25 23:57:10 -07:00
profile = MSocialProfile . get_user ( self . user_id )
2012-04-04 20:54:54 -07:00
if service == ' nothing ' :
service = None
2011-12-25 20:50:59 -08:00
profile . photo_service = service
2012-04-04 20:54:54 -07:00
if not service :
profile . photo_url = None
elif service == ' twitter ' :
2011-12-25 20:50:59 -08:00
profile . photo_url = self . twitter_picture_url
elif service == ' facebook ' :
profile . photo_url = self . facebook_picture_url
elif service == ' upload ' :
profile . photo_url = self . upload_picture_url
elif service == ' gravatar ' :
user = User . objects . get ( pk = self . user_id )
2012-10-18 14:54:07 -07:00
profile . photo_url = " https://www.gravatar.com/avatar/ " + \
2011-12-25 20:50:59 -08:00
hashlib . md5 ( user . email ) . hexdigest ( )
profile . save ( )
2012-04-04 20:54:54 -07:00
return profile
2012-06-27 23:57:57 -07:00
2012-11-26 13:52:18 -08:00
def post_to_twitter ( self , shared_story ) :
message = shared_story . generate_post_to_service_message ( )
2013-03-02 13:37:43 -08:00
2012-06-27 23:57:57 -07:00
try :
api = self . twitter_api ( )
api . update_status ( status = message )
except tweepy . TweepError , e :
print e
return
2013-03-02 13:37:43 -08:00
2012-06-27 23:57:57 -07:00
return True
2012-11-26 13:52:18 -08:00
def post_to_facebook ( self , shared_story ) :
message = shared_story . generate_post_to_service_message ( include_url = False )
shared_story . calculate_image_sizes ( )
content = zlib . decompress ( shared_story . story_content_z ) [ : 1024 ]
2012-10-16 13:40:27 -07:00
2012-06-27 23:57:57 -07:00
try :
api = self . facebook_api ( )
2012-10-16 13:40:27 -07:00
# api.put_wall_post(message=message)
api . put_object ( ' me ' , ' %s :share ' % settings . FACEBOOK_NAMESPACE ,
2012-11-26 13:52:18 -08:00
link = shared_story . blurblog_permalink ( ) ,
2012-10-16 13:40:27 -07:00
type = " link " ,
2012-11-26 13:52:18 -08:00
name = shared_story . story_title ,
2012-10-16 13:40:27 -07:00
description = content ,
2012-11-26 17:01:19 -08:00
website = shared_story . blurblog_permalink ( ) ,
2012-11-27 09:06:39 -08:00
message = message ,
2012-10-16 13:40:27 -07:00
)
2012-06-27 23:57:57 -07:00
except facebook . GraphAPIError , e :
print e
return
2013-03-02 13:37:43 -08:00
return True
2012-04-12 11:18:56 -07:00
2013-03-02 13:37:43 -08:00
def post_to_appdotnet ( self , shared_story ) :
message = shared_story . generate_post_to_service_message ( )
try :
api = self . appdotnet_api ( )
api . createPost ( text = message , links = [ {
' text ' : shared_story . story_title ,
' url ' : shared_story . blurblog_permalink ( )
} ] )
except Exception , e :
print e
return
2012-06-27 23:57:57 -07:00
return True
2013-03-02 13:37:43 -08:00
2012-04-12 11:18:56 -07:00
class MInteraction ( mongo . Document ) :
user_id = mongo . IntField ( )
date = mongo . DateTimeField ( default = datetime . datetime . now )
category = mongo . StringField ( )
title = mongo . StringField ( )
content = mongo . StringField ( )
with_user_id = mongo . IntField ( )
2012-07-28 16:41:17 -07:00
feed_id = mongo . DynamicField ( )
story_feed_id = mongo . IntField ( )
2012-04-12 11:18:56 -07:00
content_id = mongo . StringField ( )
meta = {
' collection ' : ' interactions ' ,
2012-10-24 19:28:54 -07:00
' indexes ' : [ ( ' user_id ' , ' -date ' ) , ' category ' , ' with_user_id ' ] ,
2012-04-12 11:18:56 -07:00
' allow_inheritance ' : False ,
' index_drop_dups ' : True ,
' ordering ' : [ ' -date ' ] ,
}
def __unicode__ ( self ) :
user = User . objects . get ( pk = self . user_id )
with_user = self . with_user_id and User . objects . get ( pk = self . with_user_id )
return " < %s > %s on %s : %s - %s " % ( user . username , with_user and with_user . username , self . date ,
self . category , self . content and self . content [ : 20 ] )
2013-06-12 13:52:43 -07:00
def canonical ( self ) :
2012-04-19 19:09:31 -07:00
return {
' date ' : self . date ,
' category ' : self . category ,
' title ' : self . title ,
' content ' : self . content ,
' with_user_id ' : self . with_user_id ,
2012-07-28 16:41:17 -07:00
' feed_id ' : self . feed_id ,
' story_feed_id ' : self . story_feed_id ,
2012-04-19 19:09:31 -07:00
' content_id ' : self . content_id ,
}
2013-02-04 16:16:03 -08:00
@classmethod
def publish_update_to_subscribers ( self , user_id ) :
user = User . objects . get ( pk = user_id )
try :
r = redis . Redis ( connection_pool = settings . REDIS_POOL )
listeners_count = r . publish ( user . username , ' interaction:new ' )
if listeners_count :
logging . debug ( " ---> ~FMPublished to %s subscribers " % ( listeners_count ) )
except redis . ConnectionError :
logging . debug ( " ***> ~BMRedis is unavailable for real-time. " )
2012-04-12 11:18:56 -07:00
@classmethod
2012-08-14 00:26:42 -07:00
def user ( cls , user_id , page = 1 , limit = None , categories = None ) :
2012-04-21 18:20:49 -07:00
user_profile = Profile . objects . get ( user = user_id )
dashboard_date = user_profile . dashboard_date or user_profile . last_seen_on
2012-04-12 11:18:56 -07:00
page = max ( 1 , page )
2012-07-18 23:06:43 -07:00
limit = int ( limit ) if limit else 4
2012-04-12 11:18:56 -07:00
offset = ( page - 1 ) * limit
2012-08-14 00:26:42 -07:00
interactions_db = cls . objects . filter ( user_id = user_id )
if categories :
interactions_db = interactions_db . filter ( category__in = categories )
interactions_db = interactions_db [ offset : offset + limit + 1 ]
2012-07-18 23:06:43 -07:00
has_next_page = len ( interactions_db ) > limit
interactions_db = interactions_db [ offset : offset + limit ]
2012-04-12 11:18:56 -07:00
with_user_ids = [ i . with_user_id for i in interactions_db if i . with_user_id ]
social_profiles = dict ( ( p . user_id , p ) for p in MSocialProfile . objects . filter ( user_id__in = with_user_ids ) )
2012-07-18 23:06:43 -07:00
2012-04-12 11:18:56 -07:00
interactions = [ ]
for interaction_db in interactions_db :
2013-06-12 13:52:43 -07:00
interaction = interaction_db . canonical ( )
2012-04-16 11:21:52 -07:00
social_profile = social_profiles . get ( interaction_db . with_user_id )
if social_profile :
interaction [ ' photo_url ' ] = social_profile . profile_photo_url
2012-04-12 11:18:56 -07:00
interaction [ ' with_user ' ] = social_profiles . get ( interaction_db . with_user_id )
2012-04-21 18:20:49 -07:00
interaction [ ' time_since ' ] = relative_timesince ( interaction_db . date )
interaction [ ' date ' ] = interaction_db . date
interaction [ ' is_new ' ] = interaction_db . date > dashboard_date
2012-04-12 11:18:56 -07:00
interactions . append ( interaction )
2012-07-18 23:06:43 -07:00
return interactions , has_next_page
2013-01-30 18:28:37 -08:00
@classmethod
def user_unread_count ( cls , user_id ) :
user_profile = Profile . objects . get ( user = user_id )
dashboard_date = user_profile . dashboard_date or user_profile . last_seen_on
interactions_count = cls . objects . filter ( user_id = user_id , date__gte = dashboard_date ) . count ( )
return interactions_count
2012-04-12 11:18:56 -07:00
@classmethod
def new_follow ( cls , follower_user_id , followee_user_id ) :
2012-05-03 18:42:32 -07:00
params = {
' user_id ' : followee_user_id ,
' with_user_id ' : follower_user_id ,
' category ' : ' follow ' ,
}
try :
cls . objects . get_or_create ( * * params )
except cls . MultipleObjectsReturned :
dupes = cls . objects . filter ( * * params ) . order_by ( ' -date ' )
logging . debug ( " ---> ~FRDeleting dupe follow interactions. %s found. " % dupes . count ( ) )
for dupe in dupes [ 1 : ] :
dupe . delete ( )
2013-02-04 16:16:03 -08:00
cls . publish_update_to_subscribers ( followee_user_id )
2012-04-12 11:18:56 -07:00
@classmethod
2012-07-28 16:41:17 -07:00
def new_comment_reply ( cls , user_id , reply_user_id , reply_content , story_id , story_feed_id , story_title = None , original_message = None ) :
2012-05-01 18:15:58 -07:00
params = {
' user_id ' : user_id ,
' with_user_id ' : reply_user_id ,
' category ' : ' comment_reply ' ,
2013-01-03 17:56:01 -08:00
' content ' : linkify ( strip_tags ( reply_content ) ) ,
2012-07-28 16:41:17 -07:00
' feed_id ' : " social: %s " % user_id ,
' story_feed_id ' : story_feed_id ,
2012-07-28 12:37:16 -07:00
' title ' : story_title ,
2012-05-01 18:15:58 -07:00
' content_id ' : story_id ,
}
if original_message :
params [ ' content ' ] = original_message
original = cls . objects . filter ( * * params ) . limit ( 1 )
if original :
original = original [ 0 ]
2013-01-03 17:56:01 -08:00
original . content = linkify ( strip_tags ( reply_content ) )
2012-05-01 18:15:58 -07:00
original . save ( )
else :
original_message = None
if not original_message :
cls . objects . create ( * * params )
2013-02-04 16:16:03 -08:00
cls . publish_update_to_subscribers ( user_id )
2012-07-28 22:36:50 -07:00
@classmethod
def remove_comment_reply ( cls , user_id , reply_user_id , reply_content , story_id , story_feed_id ) :
params = {
' user_id ' : user_id ,
' with_user_id ' : reply_user_id ,
' category ' : ' comment_reply ' ,
2013-01-03 17:56:01 -08:00
' content ' : linkify ( strip_tags ( reply_content ) ) ,
2012-07-28 22:36:50 -07:00
' feed_id ' : " social: %s " % user_id ,
' story_feed_id ' : story_feed_id ,
' content_id ' : story_id ,
}
original = cls . objects . filter ( * * params )
original . delete ( )
2013-02-04 16:16:03 -08:00
cls . publish_update_to_subscribers ( user_id )
2012-07-15 19:29:05 -07:00
@classmethod
2012-07-28 16:41:17 -07:00
def new_comment_like ( cls , liking_user_id , comment_user_id , story_id , story_title , comments ) :
2012-07-28 13:16:18 -07:00
cls . objects . get_or_create ( user_id = comment_user_id ,
with_user_id = liking_user_id ,
category = " comment_like " ,
2012-07-28 16:41:17 -07:00
feed_id = " social: %s " % comment_user_id ,
2012-07-28 13:16:18 -07:00
content_id = story_id ,
defaults = {
" title " : story_title ,
" content " : comments ,
} )
2013-02-04 16:16:03 -08:00
cls . publish_update_to_subscribers ( comment_user_id )
2012-04-12 11:18:56 -07:00
@classmethod
2012-07-28 16:41:17 -07:00
def new_reply_reply ( cls , user_id , comment_user_id , reply_user_id , reply_content , story_id , story_feed_id , story_title = None , original_message = None ) :
2012-05-01 18:15:58 -07:00
params = {
' user_id ' : user_id ,
' with_user_id ' : reply_user_id ,
' category ' : ' reply_reply ' ,
2013-01-03 17:56:01 -08:00
' content ' : linkify ( strip_tags ( reply_content ) ) ,
2012-07-28 16:41:17 -07:00
' feed_id ' : " social: %s " % comment_user_id ,
' story_feed_id ' : story_feed_id ,
2012-07-28 12:37:16 -07:00
' title ' : story_title ,
2012-05-01 18:15:58 -07:00
' content_id ' : story_id ,
}
if original_message :
params [ ' content ' ] = original_message
original = cls . objects . filter ( * * params ) . limit ( 1 )
if original :
original = original [ 0 ]
original . content = reply_content
original . save ( )
else :
original_message = None
if not original_message :
cls . objects . create ( * * params )
2013-02-04 16:16:03 -08:00
cls . publish_update_to_subscribers ( user_id )
2012-07-28 22:36:50 -07:00
@classmethod
def remove_reply_reply ( cls , user_id , comment_user_id , reply_user_id , reply_content , story_id , story_feed_id ) :
params = {
' user_id ' : user_id ,
' with_user_id ' : reply_user_id ,
' category ' : ' reply_reply ' ,
2013-01-03 17:56:01 -08:00
' content ' : linkify ( strip_tags ( reply_content ) ) ,
2012-07-28 22:36:50 -07:00
' feed_id ' : " social: %s " % comment_user_id ,
' story_feed_id ' : story_feed_id ,
' content_id ' : story_id ,
}
original = cls . objects . filter ( * * params )
original . delete ( )
2013-02-04 16:16:03 -08:00
cls . publish_update_to_subscribers ( user_id )
2012-05-09 14:48:10 -07:00
@classmethod
def new_reshared_story ( cls , user_id , reshare_user_id , comments , story_title , story_feed_id , story_id , original_comments = None ) :
params = {
' user_id ' : user_id ,
' with_user_id ' : reshare_user_id ,
' category ' : ' story_reshare ' ,
' content ' : comments ,
' title ' : story_title ,
2012-07-28 16:41:17 -07:00
' feed_id ' : " social: %s " % reshare_user_id ,
' story_feed_id ' : story_feed_id ,
2012-05-09 14:48:10 -07:00
' content_id ' : story_id ,
}
if original_comments :
params [ ' content ' ] = original_comments
original = cls . objects . filter ( * * params ) . limit ( 1 )
if original :
2013-02-04 16:16:03 -08:00
interaction = original [ 0 ]
interaction . content = comments
interaction . save ( )
2012-05-09 14:48:10 -07:00
else :
original_comments = None
if not original_comments :
cls . objects . create ( * * params )
2013-02-04 16:16:03 -08:00
cls . publish_update_to_subscribers ( user_id )
2012-04-16 11:21:52 -07:00
class MActivity ( mongo . Document ) :
user_id = mongo . IntField ( )
date = mongo . DateTimeField ( default = datetime . datetime . now )
category = mongo . StringField ( )
title = mongo . StringField ( )
content = mongo . StringField ( )
with_user_id = mongo . IntField ( )
2012-07-28 16:41:17 -07:00
feed_id = mongo . DynamicField ( )
story_feed_id = mongo . IntField ( )
2012-04-16 11:21:52 -07:00
content_id = mongo . StringField ( )
meta = {
' collection ' : ' activities ' ,
2012-10-24 19:28:54 -07:00
' indexes ' : [ ( ' user_id ' , ' -date ' ) , ' category ' , ' with_user_id ' ] ,
2012-04-16 11:21:52 -07:00
' allow_inheritance ' : False ,
' index_drop_dups ' : True ,
' ordering ' : [ ' -date ' ] ,
}
def __unicode__ ( self ) :
user = User . objects . get ( pk = self . user_id )
return " < %s > %s - %s " % ( user . username , self . category , self . content and self . content [ : 20 ] )
2013-06-12 13:52:43 -07:00
def canonical ( self ) :
2012-04-19 19:09:31 -07:00
return {
' date ' : self . date ,
' category ' : self . category ,
' title ' : self . title ,
' content ' : self . content ,
2012-09-07 15:10:07 -07:00
' user_id ' : self . user_id ,
' with_user_id ' : self . with_user_id or self . user_id ,
2013-02-15 16:41:20 -08:00
' feed_id ' : self . feed_id or self . story_feed_id ,
' story_feed_id ' : self . story_feed_id or self . feed_id ,
2012-04-19 19:09:31 -07:00
' content_id ' : self . content_id ,
}
2012-04-16 11:21:52 -07:00
@classmethod
2012-08-14 00:26:42 -07:00
def user ( cls , user_id , page = 1 , limit = 4 , public = False , categories = None ) :
2012-04-21 18:20:49 -07:00
user_profile = Profile . objects . get ( user = user_id )
dashboard_date = user_profile . dashboard_date or user_profile . last_seen_on
2012-04-16 11:21:52 -07:00
page = max ( 1 , page )
2012-07-18 23:06:43 -07:00
limit = int ( limit )
2012-04-16 11:21:52 -07:00
offset = ( page - 1 ) * limit
2012-04-19 19:09:31 -07:00
activities_db = cls . objects . filter ( user_id = user_id )
2012-08-14 00:26:42 -07:00
if categories :
activities_db = activities_db . filter ( category__in = categories )
2012-04-19 19:09:31 -07:00
if public :
activities_db = activities_db . filter ( category__nin = [ ' star ' , ' feedsub ' ] )
activities_db = activities_db [ offset : offset + limit + 1 ]
2012-08-14 00:26:42 -07:00
2012-07-18 23:06:43 -07:00
has_next_page = len ( activities_db ) > limit
activities_db = activities_db [ offset : offset + limit ]
2012-04-16 11:21:52 -07:00
with_user_ids = [ a . with_user_id for a in activities_db if a . with_user_id ]
social_profiles = dict ( ( p . user_id , p ) for p in MSocialProfile . objects . filter ( user_id__in = with_user_ids ) )
activities = [ ]
for activity_db in activities_db :
2013-06-12 13:52:43 -07:00
activity = activity_db . canonical ( )
2012-04-21 18:20:49 -07:00
activity [ ' date ' ] = activity_db . date
activity [ ' time_since ' ] = relative_timesince ( activity_db . date )
2012-04-16 11:21:52 -07:00
social_profile = social_profiles . get ( activity_db . with_user_id )
if social_profile :
activity [ ' photo_url ' ] = social_profile . profile_photo_url
2012-04-21 18:20:49 -07:00
activity [ ' is_new ' ] = activity_db . date > dashboard_date
2012-09-07 15:10:07 -07:00
activity [ ' with_user ' ] = social_profiles . get ( activity_db . with_user_id or activity_db . user_id )
2012-04-16 11:21:52 -07:00
activities . append ( activity )
2012-07-18 23:06:43 -07:00
return activities , has_next_page
2012-04-16 11:21:52 -07:00
@classmethod
def new_starred_story ( cls , user_id , story_title , story_feed_id , story_id ) :
2012-04-17 10:45:21 -07:00
cls . objects . get_or_create ( user_id = user_id ,
category = ' star ' ,
2012-07-28 16:41:17 -07:00
story_feed_id = story_feed_id ,
2012-08-29 16:23:25 -07:00
content_id = story_id ,
defaults = dict ( content = story_title ) )
2012-04-16 11:21:52 -07:00
@classmethod
def new_feed_subscription ( cls , user_id , feed_id , feed_title ) :
2012-12-10 18:36:54 -08:00
params = {
" user_id " : user_id ,
" category " : ' feedsub ' ,
" feed_id " : feed_id ,
}
try :
cls . objects . get_or_create ( defaults = dict ( content = feed_title ) , * * params )
except cls . MultipleObjectsReturned :
dupes = cls . objects . filter ( * * params ) . order_by ( ' -date ' )
logging . debug ( " ---> ~FRDeleting dupe feed subscription activities. %s found. " % dupes . count ( ) )
for dupe in dupes [ 1 : ] :
dupe . delete ( )
2012-04-16 11:21:52 -07:00
@classmethod
def new_follow ( cls , follower_user_id , followee_user_id ) :
2012-05-03 18:42:32 -07:00
params = {
' user_id ' : follower_user_id ,
' with_user_id ' : followee_user_id ,
' category ' : ' follow ' ,
}
try :
cls . objects . get_or_create ( * * params )
except cls . MultipleObjectsReturned :
dupes = cls . objects . filter ( * * params ) . order_by ( ' -date ' )
logging . debug ( " ---> ~FRDeleting dupe follow activities. %s found. " % dupes . count ( ) )
for dupe in dupes [ 1 : ] :
dupe . delete ( )
2012-04-16 11:21:52 -07:00
@classmethod
2012-07-28 16:41:17 -07:00
def new_comment_reply ( cls , user_id , comment_user_id , reply_content , story_id , story_feed_id , story_title = None , original_message = None ) :
2012-05-01 18:15:58 -07:00
params = {
' user_id ' : user_id ,
' with_user_id ' : comment_user_id ,
' category ' : ' comment_reply ' ,
2013-01-03 17:56:01 -08:00
' content ' : linkify ( strip_tags ( reply_content ) ) ,
2012-07-28 16:41:17 -07:00
' feed_id ' : " social: %s " % comment_user_id ,
' story_feed_id ' : story_feed_id ,
2012-07-28 12:37:16 -07:00
' title ' : story_title ,
2012-05-01 18:15:58 -07:00
' content_id ' : story_id ,
}
if original_message :
params [ ' content ' ] = original_message
original = cls . objects . filter ( * * params ) . limit ( 1 )
if original :
original = original [ 0 ]
2013-01-03 17:56:01 -08:00
original . content = linkify ( strip_tags ( reply_content ) )
2012-05-01 18:15:58 -07:00
original . save ( )
else :
original_message = None
if not original_message :
cls . objects . create ( * * params )
2012-07-15 19:29:05 -07:00
2012-07-28 22:36:50 -07:00
@classmethod
def remove_comment_reply ( cls , user_id , comment_user_id , reply_content , story_id , story_feed_id ) :
params = {
' user_id ' : user_id ,
' with_user_id ' : comment_user_id ,
' category ' : ' comment_reply ' ,
2013-01-03 17:56:01 -08:00
' content ' : linkify ( strip_tags ( reply_content ) ) ,
2012-07-28 22:36:50 -07:00
' feed_id ' : " social: %s " % comment_user_id ,
' story_feed_id ' : story_feed_id ,
' content_id ' : story_id ,
}
original = cls . objects . filter ( * * params )
original . delete ( )
2012-07-15 19:29:05 -07:00
@classmethod
2012-07-28 16:41:17 -07:00
def new_comment_like ( cls , liking_user_id , comment_user_id , story_id , story_title , comments ) :
cls . objects . get_or_create ( user_id = liking_user_id ,
with_user_id = comment_user_id ,
2012-07-28 13:16:18 -07:00
category = " comment_like " ,
2012-07-28 16:41:17 -07:00
feed_id = " social: %s " % comment_user_id ,
2012-07-28 13:16:18 -07:00
content_id = story_id ,
defaults = {
" title " : story_title ,
" content " : comments ,
} )
2012-04-16 11:21:52 -07:00
@classmethod
2012-07-28 16:41:17 -07:00
def new_shared_story ( cls , user_id , source_user_id , story_title , comments , story_feed_id , story_id , share_date = None ) :
2012-09-10 17:41:01 -07:00
data = {
" user_id " : user_id ,
" category " : ' sharedstory ' ,
" feed_id " : " social: %s " % user_id ,
" story_feed_id " : story_feed_id ,
" content_id " : story_id ,
}
try :
a , _ = cls . objects . get_or_create ( defaults = {
' with_user_id ' : source_user_id ,
' title ' : story_title ,
' content ' : comments ,
} , * * data )
except cls . MultipleObjectsReturned :
dupes = cls . objects . filter ( * * data )
logging . debug ( " ---> ~FRDeleting dupe shared story activities. %s found. " % dupes . count ( ) )
a = dupes [ 0 ]
for dupe in dupes [ 1 : ] :
dupe . delete ( )
2012-04-16 11:21:52 -07:00
if a . content != comments :
a . content = comments
a . save ( )
2012-09-07 15:10:07 -07:00
if source_user_id and a . with_user_id != source_user_id :
a . source_user_id = source_user_id
a . save ( )
2012-04-16 11:21:52 -07:00
if share_date :
a . date = share_date
a . save ( )
2012-06-20 20:53:32 -07:00
@classmethod
def remove_shared_story ( cls , user_id , story_feed_id , story_id ) :
try :
a = cls . objects . get ( user_id = user_id ,
category = ' sharedstory ' ,
2012-09-10 17:41:01 -07:00
feed_id = " social: %s " % user_id ,
story_feed_id = story_feed_id ,
2012-06-20 20:53:32 -07:00
content_id = story_id )
except cls . DoesNotExist :
return
a . delete ( )
2012-08-15 18:35:55 -07:00
@classmethod
def new_signup ( cls , user_id ) :
cls . objects . get_or_create ( user_id = user_id ,
with_user_id = user_id ,
category = " signup " )
2012-10-24 18:52:24 -07:00
class MFollowRequest ( mongo . Document ) :
follower_user_id = mongo . IntField ( unique_with = ' followee_user_id ' )
followee_user_id = mongo . IntField ( )
date = mongo . DateTimeField ( default = datetime . datetime . now )
meta = {
' collection ' : ' follow_request ' ,
' indexes ' : [ ' follower_user_id ' , ' followee_user_id ' ] ,
' ordering ' : [ ' -date ' ] ,
' allow_inheritance ' : False ,
' index_drop_dups ' : True ,
}
@classmethod
def add ( cls , follower_user_id , followee_user_id ) :
cls . objects . get_or_create ( follower_user_id = follower_user_id ,
followee_user_id = followee_user_id )
@classmethod
def remove ( cls , follower_user_id , followee_user_id ) :
cls . objects . filter ( follower_user_id = follower_user_id ,
followee_user_id = followee_user_id ) . delete ( )