mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-08-05 16:58:59 +00:00
Fixing Twitter sharing by upgrading to API v1.1.
This commit is contained in:
parent
d26066389e
commit
85120d3f99
20 changed files with 552 additions and 350 deletions
|
@ -99,7 +99,7 @@ def add_site_load_script(request, token):
|
|||
'token': token,
|
||||
'folders': (usf and usf.folders) or [],
|
||||
'user': profile and profile.user or {},
|
||||
'user_profile': user_profile and json.encode(user_profile.to_json()) or {},
|
||||
'user_profile': user_profile and json.encode(user_profile.canonical()) or {},
|
||||
'accept_image': accept_image,
|
||||
'error_image': error_image,
|
||||
'add_image': add_image,
|
||||
|
|
|
@ -56,7 +56,7 @@ class Profile(models.Model):
|
|||
def __unicode__(self):
|
||||
return "%s <%s> (Premium: %s)" % (self.user, self.user.email, self.is_premium)
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
return {
|
||||
'is_premium': self.is_premium,
|
||||
'preferences': json.decode(self.preferences),
|
||||
|
@ -752,7 +752,7 @@ class PaymentHistory(models.Model):
|
|||
class Meta:
|
||||
ordering = ['-payment_date']
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
return {
|
||||
'payment_date': self.payment_date.strftime('%Y-%m-%d'),
|
||||
'payment_amount': self.payment_amount,
|
||||
|
|
|
@ -272,6 +272,10 @@ class Feed(models.Model):
|
|||
|
||||
@classmethod
|
||||
def schedule_feed_fetches_immediately(cls, feed_ids):
|
||||
if settings.DEBUG:
|
||||
logging.info(" ---> ~SN~FMSkipping the scheduling immediate fetch of ~SB%s~SN feeds (in DEBUG)..." %
|
||||
len(feed_ids))
|
||||
return
|
||||
logging.info(" ---> ~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." %
|
||||
len(feed_ids))
|
||||
|
||||
|
@ -1973,7 +1977,7 @@ class DuplicateFeed(models.Model):
|
|||
def __unicode__(self):
|
||||
return "%s: %s / %s" % (self.feed, self.duplicate_address, self.duplicate_link)
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
return {
|
||||
'duplicate_address': self.duplicate_address,
|
||||
'duplicate_link': self.duplicate_link,
|
||||
|
|
|
@ -215,9 +215,9 @@ class MSocialProfile(mongo.Document):
|
|||
profile_user_ids.extend(suggested_user_ids)
|
||||
|
||||
# Sort by shared story count
|
||||
profiles = MSocialProfile.profiles(profile_user_ids).order_by('-shared_stories_count')
|
||||
|
||||
return profiles[:RECOMMENDATIONS_LIMIT]
|
||||
profiles = MSocialProfile.profiles(profile_user_ids).order_by('-shared_stories_count')[:RECOMMENDATIONS_LIMIT]
|
||||
|
||||
return profiles
|
||||
|
||||
@property
|
||||
def username_slug(self):
|
||||
|
@ -252,7 +252,7 @@ class MSocialProfile(mongo.Document):
|
|||
@classmethod
|
||||
def profile(cls, user_id, include_follows=True):
|
||||
profile = cls.get_user(user_id)
|
||||
return profile.to_json(include_follows=True)
|
||||
return profile.canonical(include_follows=True)
|
||||
|
||||
@classmethod
|
||||
def profiles(cls, user_ids):
|
||||
|
@ -284,7 +284,7 @@ class MSocialProfile(mongo.Document):
|
|||
return self.blurblog_title if self.blurblog_title else self.username + "'s blurblog"
|
||||
|
||||
def feed(self):
|
||||
params = self.to_json(compact=True)
|
||||
params = self.canonical(compact=True)
|
||||
params.update({
|
||||
'feed_title': self.title,
|
||||
'page_url': reverse('load-social-page', kwargs={'user_id': self.user_id, 'username': self.username_slug}),
|
||||
|
@ -293,7 +293,7 @@ class MSocialProfile(mongo.Document):
|
|||
return params
|
||||
|
||||
def page(self):
|
||||
params = self.to_json(include_follows=True)
|
||||
params = self.canonical(include_follows=True)
|
||||
params.update({
|
||||
'feed_title': self.title,
|
||||
'custom_css': self.custom_css,
|
||||
|
@ -326,8 +326,8 @@ class MSocialProfile(mongo.Document):
|
|||
domain = Site.objects.get_current().domain
|
||||
return 'http://' + domain + settings.MEDIA_URL + 'img/reader/default_profile_photo.png'
|
||||
|
||||
def to_json(self, compact=False, include_follows=False, common_follows_with_user=None,
|
||||
include_settings=False, include_following_user=None):
|
||||
def canonical(self, compact=False, include_follows=False, common_follows_with_user=None,
|
||||
include_settings=False, include_following_user=None):
|
||||
domain = Site.objects.get_current().domain
|
||||
params = {
|
||||
'id': 'social:%s' % self.user_id,
|
||||
|
@ -801,7 +801,7 @@ class MSocialSubscription(mongo.Document):
|
|||
social_sub.calculate_feed_scores()
|
||||
|
||||
# Combine subscription read counts with feed/user info
|
||||
feed = dict(social_sub.to_json().items() + social_profiles[user_id].items())
|
||||
feed = dict(social_sub.canonical().items() + social_profiles[user_id].items())
|
||||
social_feeds.append(feed)
|
||||
|
||||
return social_feeds
|
||||
|
@ -841,7 +841,7 @@ class MSocialSubscription(mongo.Document):
|
|||
|
||||
return feeds
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
return {
|
||||
'user_id': self.user_id,
|
||||
'subscription_user_id': self.subscription_user_id,
|
||||
|
@ -1185,7 +1185,7 @@ class MCommentReply(mongo.EmbeddedDocument):
|
|||
email_sent = mongo.BooleanField(default=False)
|
||||
liking_users = mongo.ListField(mongo.IntField())
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
reply = {
|
||||
'reply_id': self.reply_id,
|
||||
'user_id': self.user_id,
|
||||
|
@ -1260,7 +1260,7 @@ class MSharedStory(mongo.Document):
|
|||
def feed_guid_hash(self):
|
||||
return "%s:%s" % (self.story_feed_id or "0", self.guid_hash)
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
return {
|
||||
"user_id": self.user_id,
|
||||
"shared_date": self.shared_date,
|
||||
|
@ -1587,7 +1587,7 @@ class MSharedStory(mongo.Document):
|
|||
'comments': self.comments,
|
||||
'shared_date': relative_timesince(self.shared_date),
|
||||
'date': self.shared_date,
|
||||
'replies': [reply.to_json() for reply in self.replies],
|
||||
'replies': [reply.canonical() for reply in self.replies],
|
||||
'liking_users': self.liking_users,
|
||||
'source_user_id': self.source_user_id,
|
||||
}
|
||||
|
@ -1602,7 +1602,7 @@ class MSharedStory(mongo.Document):
|
|||
if comment['source_user_id']:
|
||||
profile_user_ids.add(comment['source_user_id'])
|
||||
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
|
||||
profiles = [profile.to_json(compact=True) for profile in profiles]
|
||||
profiles = [profile.canonical(compact=True) for profile in profiles]
|
||||
|
||||
return comment, profiles
|
||||
|
||||
|
@ -1670,7 +1670,7 @@ class MSharedStory(mongo.Document):
|
|||
profile_user_ids.add(story['source_user_id'])
|
||||
|
||||
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
|
||||
profiles = [profile.to_json(compact=True) for profile in profiles]
|
||||
profiles = [profile.canonical(compact=True) for profile in profiles]
|
||||
|
||||
# Toss public comments by private profiles
|
||||
profiles_dict = dict((profile['user_id'], profile) for profile in profiles)
|
||||
|
@ -1811,7 +1811,7 @@ class MSharedStory(mongo.Document):
|
|||
if self.source_user_id:
|
||||
profile_user_ids.add(self.source_user_id)
|
||||
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
|
||||
profiles = [profile.to_json(compact=True) for profile in profiles]
|
||||
profiles = [profile.canonical(compact=True) for profile in profiles]
|
||||
comment = MSharedStory.attach_users_to_comment(comment, profiles)
|
||||
|
||||
for user_id in notify_user_ids:
|
||||
|
@ -1883,7 +1883,7 @@ class MSharedStory(mongo.Document):
|
|||
if self.source_user_id:
|
||||
profile_user_ids.add(self.source_user_id)
|
||||
profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids))
|
||||
profiles = [profile.to_json(compact=True) for profile in profiles]
|
||||
profiles = [profile.canonical(compact=True) for profile in profiles]
|
||||
comment = MSharedStory.attach_users_to_comment(comment, profiles)
|
||||
|
||||
mute_url = "http://%s%s" % (
|
||||
|
@ -2005,7 +2005,7 @@ class MSocialServices(mongo.Document):
|
|||
user = User.objects.get(pk=self.user_id)
|
||||
return "%s (Twitter: %s, FB: %s, ADN: %s)" % (user.username, self.twitter_uid, self.facebook_uid, self.appdotnet_uid)
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
user = User.objects.get(pk=self.user_id)
|
||||
return {
|
||||
'twitter': {
|
||||
|
@ -2052,7 +2052,7 @@ class MSocialServices(mongo.Document):
|
|||
@classmethod
|
||||
def profile(cls, user_id):
|
||||
profile = cls.get_user(user_id=user_id)
|
||||
return profile.to_json()
|
||||
return profile.canonical()
|
||||
|
||||
def save_uploaded_photo(self, photo):
|
||||
photo_body = photo.read()
|
||||
|
@ -2096,20 +2096,26 @@ class MSocialServices(mongo.Document):
|
|||
self.syncing_twitter = False
|
||||
self.save()
|
||||
return
|
||||
|
||||
twitter_user = api.me()
|
||||
self.twitter_picture_url = twitter_user.profile_image_url
|
||||
self.twitter_username = twitter_user.screen_name
|
||||
self.twitter_refreshed_date = datetime.datetime.utcnow()
|
||||
self.syncing_twitter = False
|
||||
self.save()
|
||||
|
||||
try:
|
||||
friend_ids = list(unicode(friend.id) for friend in tweepy.Cursor(api.friends).items())
|
||||
except tweepy.TweepError, e:
|
||||
logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: %s" % e)
|
||||
return
|
||||
|
||||
friend_ids = list(unicode(friend.id) for friend in tweepy.Cursor(api.friends).items())
|
||||
if not friend_ids:
|
||||
logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no friend_ids.")
|
||||
self.syncing_twitter = False
|
||||
self.save()
|
||||
return
|
||||
|
||||
twitter_user = api.me()
|
||||
self.twitter_picture_url = twitter_user.profile_image_url
|
||||
self.twitter_username = twitter_user.screen_name
|
||||
self.twitter_friend_ids = friend_ids
|
||||
self.twitter_refreshed_date = datetime.datetime.utcnow()
|
||||
self.syncing_twitter = False
|
||||
self.save()
|
||||
|
||||
profile = MSocialProfile.get_user(self.user_id)
|
||||
|
@ -2406,7 +2412,7 @@ class MInteraction(mongo.Document):
|
|||
return "<%s> %s on %s: %s - %s" % (user.username, with_user and with_user.username, self.date,
|
||||
self.category, self.content and self.content[:20])
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
return {
|
||||
'date': self.date,
|
||||
'category': self.category,
|
||||
|
@ -2449,7 +2455,7 @@ class MInteraction(mongo.Document):
|
|||
|
||||
interactions = []
|
||||
for interaction_db in interactions_db:
|
||||
interaction = interaction_db.to_json()
|
||||
interaction = interaction_db.canonical()
|
||||
social_profile = social_profiles.get(interaction_db.with_user_id)
|
||||
if social_profile:
|
||||
interaction['photo_url'] = social_profile.profile_photo_url
|
||||
|
@ -2637,7 +2643,7 @@ class MActivity(mongo.Document):
|
|||
user = User.objects.get(pk=self.user_id)
|
||||
return "<%s> %s - %s" % (user.username, self.category, self.content and self.content[:20])
|
||||
|
||||
def to_json(self):
|
||||
def canonical(self):
|
||||
return {
|
||||
'date': self.date,
|
||||
'category': self.category,
|
||||
|
@ -2671,7 +2677,7 @@ class MActivity(mongo.Document):
|
|||
social_profiles = dict((p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids))
|
||||
activities = []
|
||||
for activity_db in activities_db:
|
||||
activity = activity_db.to_json()
|
||||
activity = activity_db.canonical()
|
||||
activity['date'] = activity_db.date
|
||||
activity['time_since'] = relative_timesince(activity_db.date)
|
||||
social_profile = social_profiles.get(activity_db.with_user_id)
|
||||
|
|
|
@ -441,7 +441,7 @@ def load_social_page(request, user_id, username=None, **kwargs):
|
|||
'user_social_profile' : user_social_profile,
|
||||
'user_social_profile_page' : json.encode(user_social_profile and user_social_profile.page()),
|
||||
'user_social_services' : user_social_services,
|
||||
'user_social_services_page' : json.encode(user_social_services and user_social_services.to_json()),
|
||||
'user_social_services_page' : json.encode(user_social_services and user_social_services.canonical()),
|
||||
'user_following_social_profile': user_following_social_profile,
|
||||
'social_profile': social_profile,
|
||||
'feeds' : feeds,
|
||||
|
@ -827,7 +827,7 @@ def profile(request):
|
|||
if not user_profile.private or user_profile.is_followed_by_user(user.pk):
|
||||
activities, _ = MActivity.user(user_id, page=1, public=True, categories=categories)
|
||||
|
||||
user_profile = user_profile.to_json(include_follows=True, common_follows_with_user=user.pk)
|
||||
user_profile = user_profile.canonical(include_follows=True, common_follows_with_user=user.pk)
|
||||
profile_ids = set(user_profile['followers_youknow'] + user_profile['followers_everybody'] +
|
||||
user_profile['following_youknow'] + user_profile['following_everybody'])
|
||||
profiles = MSocialProfile.profiles(profile_ids)
|
||||
|
@ -841,7 +841,7 @@ def profile(request):
|
|||
'following_youknow': user_profile['following_youknow'],
|
||||
'following_everybody': user_profile['following_everybody'],
|
||||
'requested_follow': user_profile['requested_follow'],
|
||||
'profiles': dict([(p.user_id, p.to_json(compact=True)) for p in profiles]),
|
||||
'profiles': dict([(p.user_id, p.canonical(compact=True)) for p in profiles]),
|
||||
'activities': activities,
|
||||
}
|
||||
|
||||
|
@ -864,7 +864,7 @@ def load_user_profile(request):
|
|||
|
||||
return {
|
||||
'services': social_services,
|
||||
'user_profile': social_profile.to_json(include_follows=True, include_settings=True),
|
||||
'user_profile': social_profile.canonical(include_follows=True, include_settings=True),
|
||||
}
|
||||
|
||||
@ajax_login_required
|
||||
|
@ -889,7 +889,7 @@ def save_user_profile(request):
|
|||
|
||||
logging.user(request, "~BB~FRSaving social profile")
|
||||
|
||||
return dict(code=1, user_profile=profile.to_json(include_follows=True))
|
||||
return dict(code=1, user_profile=profile.canonical(include_follows=True))
|
||||
|
||||
|
||||
@ajax_login_required
|
||||
|
@ -909,7 +909,7 @@ def upload_avatar(request):
|
|||
"code": 1 if image_url else -1,
|
||||
"uploaded": image_url,
|
||||
"services": social_services,
|
||||
"user_profile": profile.to_json(include_follows=True),
|
||||
"user_profile": profile.canonical(include_follows=True),
|
||||
}
|
||||
|
||||
@ajax_login_required
|
||||
|
@ -925,7 +925,7 @@ def save_blurblog_settings(request):
|
|||
|
||||
logging.user(request, "~BB~FRSaving blurblog settings")
|
||||
|
||||
return dict(code=1, user_profile=profile.to_json(include_follows=True, include_settings=True))
|
||||
return dict(code=1, user_profile=profile.canonical(include_follows=True, include_settings=True))
|
||||
|
||||
@json.json_view
|
||||
def load_follow_requests(request):
|
||||
|
@ -933,7 +933,7 @@ def load_follow_requests(request):
|
|||
follow_request_users = MFollowRequest.objects.filter(followee_user_id=user.pk)
|
||||
follow_request_user_ids = [f.follower_user_id for f in follow_request_users]
|
||||
request_profiles = MSocialProfile.profiles(follow_request_user_ids)
|
||||
request_profiles = [p.to_json(include_following_user=user.pk) for p in request_profiles]
|
||||
request_profiles = [p.canonical(include_following_user=user.pk) for p in request_profiles]
|
||||
|
||||
if len(request_profiles):
|
||||
logging.user(request, "~BB~FRLoading Follow Requests (%s requests)" % (
|
||||
|
@ -952,9 +952,8 @@ def load_user_friends(request):
|
|||
following_profiles = MSocialProfile.profiles(social_profile.following_user_ids)
|
||||
follower_profiles = MSocialProfile.profiles(social_profile.follower_user_ids)
|
||||
recommended_users = social_profile.recommended_users()
|
||||
|
||||
following_profiles = [p.to_json(include_following_user=user.pk) for p in following_profiles]
|
||||
follower_profiles = [p.to_json(include_following_user=user.pk) for p in follower_profiles]
|
||||
following_profiles = [p.canonical(include_following_user=user.pk) for p in following_profiles]
|
||||
follower_profiles = [p.canonical(include_following_user=user.pk) for p in follower_profiles]
|
||||
|
||||
logging.user(request, "~BB~FRLoading Friends (%s following, %s followers)" % (
|
||||
social_profile.following_count,
|
||||
|
@ -964,7 +963,7 @@ def load_user_friends(request):
|
|||
return {
|
||||
'services': social_services,
|
||||
'autofollow': social_services.autofollow,
|
||||
'user_profile': social_profile.to_json(include_follows=True),
|
||||
'user_profile': social_profile.canonical(include_follows=True),
|
||||
'following_profiles': following_profiles,
|
||||
'follower_profiles': follower_profiles,
|
||||
'recommended_users': recommended_users,
|
||||
|
@ -1006,8 +1005,8 @@ def follow(request):
|
|||
logging.user(request, "~BB~FRFollowing: ~SB%s" % follow_profile.username)
|
||||
|
||||
return {
|
||||
"user_profile": profile.to_json(include_follows=True),
|
||||
"follow_profile": follow_profile.to_json(common_follows_with_user=request.user.pk),
|
||||
"user_profile": profile.canonical(include_follows=True),
|
||||
"follow_profile": follow_profile.canonical(common_follows_with_user=request.user.pk),
|
||||
"follow_subscription": follow_subscription,
|
||||
}
|
||||
|
||||
|
@ -1036,8 +1035,8 @@ def unfollow(request):
|
|||
logging.user(request, "~BB~FRUnfollowing: ~SB%s" % unfollow_profile.username)
|
||||
|
||||
return {
|
||||
'user_profile': profile.to_json(include_follows=True),
|
||||
'unfollow_profile': unfollow_profile.to_json(common_follows_with_user=request.user.pk),
|
||||
'user_profile': profile.canonical(include_follows=True),
|
||||
'unfollow_profile': unfollow_profile.canonical(common_follows_with_user=request.user.pk),
|
||||
}
|
||||
|
||||
|
||||
|
@ -1087,7 +1086,7 @@ def find_friends(request):
|
|||
if not profiles:
|
||||
profiles = MSocialProfile.objects.filter(location__icontains=query)[:limit]
|
||||
|
||||
profiles = [p.to_json(include_following_user=request.user.pk) for p in profiles]
|
||||
profiles = [p.canonical(include_following_user=request.user.pk) for p in profiles]
|
||||
profiles = sorted(profiles, key=lambda p: -1 * p['shared_stories_count'])
|
||||
|
||||
return dict(profiles=profiles)
|
||||
|
@ -1240,7 +1239,7 @@ def social_feed_trainer(request):
|
|||
user = get_user(request)
|
||||
|
||||
social_profile.count_stories()
|
||||
classifier = social_profile.to_json()
|
||||
classifier = social_profile.canonical()
|
||||
classifier['classifiers'] = get_classifiers_for_user(user, social_user_id=classifier['id'])
|
||||
classifier['num_subscribers'] = social_profile.follower_count
|
||||
classifier['feed_tags'] = []
|
||||
|
@ -1279,7 +1278,7 @@ def load_social_statistics(request, social_user_id, username=None):
|
|||
def load_social_settings(request, social_user_id, username=None):
|
||||
social_profile = MSocialProfile.get_user(social_user_id)
|
||||
|
||||
return social_profile.to_json()
|
||||
return social_profile.canonical()
|
||||
|
||||
@ajax_login_required
|
||||
def load_interactions(request):
|
||||
|
|
5
fabfile.py
vendored
5
fabfile.py
vendored
|
@ -99,6 +99,10 @@ def server():
|
|||
def do(split=False):
|
||||
server()
|
||||
droplets = do_roledefs(split=split)
|
||||
if split:
|
||||
for roledef, hosts in env.roledefs.items():
|
||||
if roledef not in droplets:
|
||||
droplets[roledef] = hosts
|
||||
return droplets
|
||||
|
||||
def app():
|
||||
|
@ -649,7 +653,6 @@ def setup_db_firewall():
|
|||
sudo('ufw allow 80')
|
||||
|
||||
# DigitalOcean
|
||||
pprint(env)
|
||||
for ip in set(env.roledefs['app'] +
|
||||
env.roledefs['db'] +
|
||||
env.roledefs['dev'] +
|
||||
|
|
|
@ -325,6 +325,7 @@ _.extend(NEWSBLUR.ReaderFriends.prototype, {
|
|||
},
|
||||
|
||||
post_connect: function(data) {
|
||||
data = data || {};
|
||||
console.log(["post_connect", data, this, this.connect_window_timer]);
|
||||
clearInterval(this.connect_window_timer);
|
||||
$('.NB-error', this.$modal).remove();
|
||||
|
|
|
@ -52,8 +52,9 @@
|
|||
var opener = parent && parent.window.opener || window.opener;
|
||||
var next = "{{ next|safe }}";
|
||||
if (next) {
|
||||
console.log(["Forwarding to next", next]);
|
||||
console.log(["Forwarding to next", next, opener]);
|
||||
setTimeout(function() {
|
||||
console.log(["Forwarding to next", next, opener]);
|
||||
window.location.href = next;
|
||||
}, 1000);
|
||||
} else if (opener && opener.NEWSBLUR) {
|
||||
|
|
|
@ -8,7 +8,7 @@ from django.conf import settings
|
|||
from django.http import HttpResponse, HttpResponseForbidden, Http404
|
||||
from django.core.mail import mail_admins
|
||||
from django.db.models.query import QuerySet
|
||||
from mongoengine.queryset import QuerySet as MongoQuerySet
|
||||
from mongoengine.queryset.queryset import QuerySet as MongoQuerySet
|
||||
from bson.objectid import ObjectId
|
||||
import sys
|
||||
import datetime
|
||||
|
@ -37,10 +37,8 @@ def json_encode(data, *args, **kwargs):
|
|||
# Opps, we used to check if it is of type list, but that fails
|
||||
# i.e. in the case of django.newforms.utils.ErrorList, which extends
|
||||
# the type "list". Oh man, that was a dumb mistake!
|
||||
if hasattr(data, 'to_json'):
|
||||
ret = _any(data.to_json())
|
||||
elif hasattr(data, 'canonical'):
|
||||
ret = data.canonical()
|
||||
if hasattr(data, 'canonical'):
|
||||
ret = _any(data.canonical())
|
||||
elif isinstance(data, list):
|
||||
ret = _list(data)
|
||||
elif isinstance(data, set):
|
||||
|
@ -69,6 +67,8 @@ def json_encode(data, *args, **kwargs):
|
|||
ret = force_unicode(data)
|
||||
elif isinstance(data, datetime.datetime) or isinstance(data, datetime.date):
|
||||
ret = str(data)
|
||||
elif hasattr(data, 'to_json'):
|
||||
ret = data.to_json()
|
||||
else:
|
||||
ret = data
|
||||
return ret
|
||||
|
|
|
@ -46,7 +46,6 @@ def create_streams_for_roles(role, role2, command=None, path=None):
|
|||
path = "/srv/newsblur/logs/newsblur.log"
|
||||
if not command:
|
||||
command = "tail -f"
|
||||
|
||||
for hostname in (hosts[role] + hosts[role2]):
|
||||
if isinstance(hostname, dict):
|
||||
address = hostname['address']
|
||||
|
|
4
vendor/tweepy/__init__.py
vendored
4
vendor/tweepy/__init__.py
vendored
|
@ -5,11 +5,11 @@
|
|||
"""
|
||||
Tweepy Twitter API library
|
||||
"""
|
||||
__version__ = '1.8'
|
||||
__version__ = '2.0'
|
||||
__author__ = 'Joshua Roesslein'
|
||||
__license__ = 'MIT'
|
||||
|
||||
from tweepy.models import Status, User, DirectMessage, Friendship, SavedSearch, SearchResult, ModelFactory
|
||||
from tweepy.models import Status, User, DirectMessage, Friendship, SavedSearch, SearchResults, ModelFactory, Category
|
||||
from tweepy.error import TweepError
|
||||
from tweepy.api import API
|
||||
from tweepy.cache import Cache, MemoryCache, FileCache
|
||||
|
|
416
vendor/tweepy/api.py
vendored
416
vendor/tweepy/api.py
vendored
|
@ -7,7 +7,7 @@ import mimetypes
|
|||
|
||||
from tweepy.binder import bind_api
|
||||
from tweepy.error import TweepError
|
||||
from tweepy.parsers import ModelParser, RawParser
|
||||
from tweepy.parsers import ModelParser
|
||||
from tweepy.utils import list_to_csv
|
||||
|
||||
|
||||
|
@ -16,9 +16,9 @@ class API(object):
|
|||
|
||||
def __init__(self, auth_handler=None,
|
||||
host='api.twitter.com', search_host='search.twitter.com',
|
||||
cache=None, secure=False, api_root='/1', search_root='',
|
||||
retry_count=0, retry_delay=0, retry_errors=None,
|
||||
parser=None):
|
||||
cache=None, secure=True, api_root='/1.1', search_root='',
|
||||
retry_count=0, retry_delay=0, retry_errors=None, timeout=60,
|
||||
parser=None, compression=False):
|
||||
self.auth = auth_handler
|
||||
self.host = host
|
||||
self.search_host = search_host
|
||||
|
@ -26,31 +26,18 @@ class API(object):
|
|||
self.search_root = search_root
|
||||
self.cache = cache
|
||||
self.secure = secure
|
||||
self.compression = compression
|
||||
self.retry_count = retry_count
|
||||
self.retry_delay = retry_delay
|
||||
self.retry_errors = retry_errors
|
||||
self.timeout = timeout
|
||||
self.parser = parser or ModelParser()
|
||||
|
||||
""" statuses/public_timeline """
|
||||
public_timeline = bind_api(
|
||||
path = '/statuses/public_timeline.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = []
|
||||
)
|
||||
|
||||
""" statuses/home_timeline """
|
||||
home_timeline = bind_api(
|
||||
path = '/statuses/home_timeline.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['since_id', 'max_id', 'count', 'page'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" statuses/friends_timeline """
|
||||
friends_timeline = bind_api(
|
||||
path = '/statuses/friends_timeline.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['since_id', 'max_id', 'count', 'page'],
|
||||
allowed_param = ['since_id', 'max_id', 'count'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
|
@ -59,14 +46,14 @@ class API(object):
|
|||
path = '/statuses/user_timeline.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['id', 'user_id', 'screen_name', 'since_id',
|
||||
'max_id', 'count', 'page', 'include_rts']
|
||||
'max_id', 'count', 'include_rts']
|
||||
)
|
||||
|
||||
""" statuses/mentions """
|
||||
mentions = bind_api(
|
||||
path = '/statuses/mentions.json',
|
||||
mentions_timeline = bind_api(
|
||||
path = '/statuses/mentions_timeline.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['since_id', 'max_id', 'count', 'page'],
|
||||
allowed_param = ['since_id', 'max_id', 'count'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
|
@ -84,7 +71,7 @@ class API(object):
|
|||
payload_type = 'relation', payload_list = True,
|
||||
allowed_param = ['id'],
|
||||
require_auth = False
|
||||
)
|
||||
)
|
||||
|
||||
"""/statuses/:id/retweeted_by/ids.format"""
|
||||
retweeted_by_ids = bind_api(
|
||||
|
@ -94,27 +81,11 @@ class API(object):
|
|||
require_auth = True
|
||||
)
|
||||
|
||||
""" statuses/retweeted_by_me """
|
||||
retweeted_by_me = bind_api(
|
||||
path = '/statuses/retweeted_by_me.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['since_id', 'max_id', 'count', 'page'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" statuses/retweeted_to_me """
|
||||
retweeted_to_me = bind_api(
|
||||
path = '/statuses/retweeted_to_me.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['since_id', 'max_id', 'count', 'page'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" statuses/retweets_of_me """
|
||||
retweets_of_me = bind_api(
|
||||
path = '/statuses/retweets_of_me.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['since_id', 'max_id', 'count', 'page'],
|
||||
allowed_param = ['since_id', 'max_id', 'count'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
|
@ -136,8 +107,8 @@ class API(object):
|
|||
|
||||
""" statuses/destroy """
|
||||
destroy_status = bind_api(
|
||||
path = '/statuses/destroy.json',
|
||||
method = 'DELETE',
|
||||
path = '/statuses/destroy/{id}.json',
|
||||
method = 'POST',
|
||||
payload_type = 'status',
|
||||
allowed_param = ['id'],
|
||||
require_auth = True
|
||||
|
@ -167,6 +138,13 @@ class API(object):
|
|||
allowed_param = ['id', 'user_id', 'screen_name']
|
||||
)
|
||||
|
||||
''' statuses/oembed '''
|
||||
get_oembed = bind_api(
|
||||
path = '/statuses/oembed.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['id', 'url', 'maxwidth', 'hide_media', 'omit_script', 'align', 'related', 'lang']
|
||||
)
|
||||
|
||||
""" Perform bulk look up of users from user ID or screenname """
|
||||
def lookup_users(self, user_ids=None, screen_names=None):
|
||||
return self._lookup_users(list_to_csv(user_ids), list_to_csv(screen_names))
|
||||
|
@ -175,7 +153,6 @@ class API(object):
|
|||
path = '/users/lookup.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['user_id', 'screen_name'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" Get the authenticated user """
|
||||
|
@ -190,25 +167,35 @@ class API(object):
|
|||
allowed_param = ['q', 'per_page', 'page']
|
||||
)
|
||||
|
||||
""" statuses/friends """
|
||||
friends = bind_api(
|
||||
path = '/statuses/friends.json',
|
||||
""" users/suggestions/:slug """
|
||||
suggested_users = bind_api(
|
||||
path = '/users/suggestions/{slug}.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['id', 'user_id', 'screen_name', 'page', 'cursor']
|
||||
require_auth = True,
|
||||
allowed_param = ['slug', 'lang']
|
||||
)
|
||||
|
||||
""" statuses/followers """
|
||||
followers = bind_api(
|
||||
path = '/statuses/followers.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['id', 'user_id', 'screen_name', 'page', 'cursor']
|
||||
""" users/suggestions """
|
||||
suggested_categories = bind_api(
|
||||
path = '/users/suggestions.json',
|
||||
payload_type = 'category', payload_list = True,
|
||||
allowed_param = ['lang'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" users/suggestions/:slug/members """
|
||||
suggested_users_tweets = bind_api(
|
||||
path = '/users/suggestions/{slug}/members.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['slug'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" direct_messages """
|
||||
direct_messages = bind_api(
|
||||
path = '/direct_messages.json',
|
||||
payload_type = 'direct_message', payload_list = True,
|
||||
allowed_param = ['since_id', 'max_id', 'count', 'page'],
|
||||
allowed_param = ['since_id', 'max_id', 'count'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
|
@ -264,13 +251,6 @@ class API(object):
|
|||
require_auth = True
|
||||
)
|
||||
|
||||
""" friendships/exists """
|
||||
exists_friendship = bind_api(
|
||||
path = '/friendships/exists.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['user_a', 'user_b']
|
||||
)
|
||||
|
||||
""" friendships/show """
|
||||
show_friendship = bind_api(
|
||||
path = '/friendships/show.json',
|
||||
|
@ -279,6 +259,18 @@ class API(object):
|
|||
'target_id', 'target_screen_name']
|
||||
)
|
||||
|
||||
""" Perform bulk look up of friendships from user ID or screenname """
|
||||
def lookup_friendships(self, user_ids=None, screen_names=None):
|
||||
return self._lookup_friendships(list_to_csv(user_ids), list_to_csv(screen_names))
|
||||
|
||||
_lookup_friendships = bind_api(
|
||||
path = '/friendships/lookup.json',
|
||||
payload_type = 'relationship', payload_list = True,
|
||||
allowed_param = ['user_id', 'screen_name'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
|
||||
""" friends/ids """
|
||||
friends_ids = bind_api(
|
||||
path = '/friends/ids.json',
|
||||
|
@ -286,6 +278,13 @@ class API(object):
|
|||
allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
|
||||
)
|
||||
|
||||
""" friends/list """
|
||||
friends = bind_api(
|
||||
path = '/friends/list.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
|
||||
)
|
||||
|
||||
""" friendships/incoming """
|
||||
friendships_incoming = bind_api(
|
||||
path = '/friendships/incoming.json',
|
||||
|
@ -307,14 +306,22 @@ class API(object):
|
|||
allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
|
||||
)
|
||||
|
||||
""" followers/list """
|
||||
followers = bind_api(
|
||||
path = '/followers/list.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
|
||||
)
|
||||
|
||||
""" account/verify_credentials """
|
||||
def verify_credentials(self):
|
||||
def verify_credentials(self, **kargs):
|
||||
try:
|
||||
return bind_api(
|
||||
path = '/account/verify_credentials.json',
|
||||
payload_type = 'user',
|
||||
require_auth = True
|
||||
)(self)
|
||||
require_auth = True,
|
||||
allowed_param = ['include_entities', 'skip_status'],
|
||||
)(self, **kargs)
|
||||
except TweepError, e:
|
||||
if e.response and e.response.status == 401:
|
||||
return False
|
||||
|
@ -322,8 +329,9 @@ class API(object):
|
|||
|
||||
""" account/rate_limit_status """
|
||||
rate_limit_status = bind_api(
|
||||
path = '/account/rate_limit_status.json',
|
||||
path = '/application/rate_limit_status.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['resources'],
|
||||
use_cache = False
|
||||
)
|
||||
|
||||
|
@ -379,14 +387,14 @@ class API(object):
|
|||
|
||||
""" favorites """
|
||||
favorites = bind_api(
|
||||
path = '/favorites.json',
|
||||
path = '/favorites/list.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['id', 'page']
|
||||
allowed_param = ['screen_name', 'user_id', 'max_id', 'count', 'since_id', 'max_id']
|
||||
)
|
||||
|
||||
""" favorites/create """
|
||||
create_favorite = bind_api(
|
||||
path = '/favorites/create/{id}.json',
|
||||
path = '/favorites/create.json',
|
||||
method = 'POST',
|
||||
payload_type = 'status',
|
||||
allowed_param = ['id'],
|
||||
|
@ -395,31 +403,13 @@ class API(object):
|
|||
|
||||
""" favorites/destroy """
|
||||
destroy_favorite = bind_api(
|
||||
path = '/favorites/destroy/{id}.json',
|
||||
method = 'DELETE',
|
||||
path = '/favorites/destroy.json',
|
||||
method = 'POST',
|
||||
payload_type = 'status',
|
||||
allowed_param = ['id'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" notifications/follow """
|
||||
enable_notifications = bind_api(
|
||||
path = '/notifications/follow.json',
|
||||
method = 'POST',
|
||||
payload_type = 'user',
|
||||
allowed_param = ['id', 'user_id', 'screen_name'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" notifications/leave """
|
||||
disable_notifications = bind_api(
|
||||
path = '/notifications/leave.json',
|
||||
method = 'POST',
|
||||
payload_type = 'user',
|
||||
allowed_param = ['id', 'user_id', 'screen_name'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" blocks/create """
|
||||
create_block = bind_api(
|
||||
path = '/blocks/create.json',
|
||||
|
@ -438,45 +428,33 @@ class API(object):
|
|||
require_auth = True
|
||||
)
|
||||
|
||||
""" blocks/exists """
|
||||
def exists_block(self, *args, **kargs):
|
||||
try:
|
||||
bind_api(
|
||||
path = '/blocks/exists.json',
|
||||
allowed_param = ['id', 'user_id', 'screen_name'],
|
||||
require_auth = True
|
||||
)(self, *args, **kargs)
|
||||
except TweepError:
|
||||
return False
|
||||
return True
|
||||
|
||||
""" blocks/blocking """
|
||||
blocks = bind_api(
|
||||
path = '/blocks/blocking.json',
|
||||
path = '/blocks/list.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['page'],
|
||||
allowed_param = ['cursor'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" blocks/blocking/ids """
|
||||
blocks_ids = bind_api(
|
||||
path = '/blocks/blocking/ids.json',
|
||||
path = '/blocks/ids.json',
|
||||
payload_type = 'json',
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" report_spam """
|
||||
report_spam = bind_api(
|
||||
path = '/report_spam.json',
|
||||
path = '/users/report_spam.json',
|
||||
method = 'POST',
|
||||
payload_type = 'user',
|
||||
allowed_param = ['id', 'user_id', 'screen_name'],
|
||||
allowed_param = ['user_id', 'screen_name'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
""" saved_searches """
|
||||
saved_searches = bind_api(
|
||||
path = '/saved_searches.json',
|
||||
path = '/saved_searches/list.json',
|
||||
payload_type = 'saved_search', payload_list = True,
|
||||
require_auth = True
|
||||
)
|
||||
|
@ -501,7 +479,7 @@ class API(object):
|
|||
""" saved_searches/destroy """
|
||||
destroy_saved_search = bind_api(
|
||||
path = '/saved_searches/destroy/{id}.json',
|
||||
method = 'DELETE',
|
||||
method = 'POST',
|
||||
payload_type = 'saved_search',
|
||||
allowed_param = ['id'],
|
||||
require_auth = True
|
||||
|
@ -517,163 +495,142 @@ class API(object):
|
|||
return False
|
||||
return True
|
||||
|
||||
def create_list(self, *args, **kargs):
|
||||
return bind_api(
|
||||
path = '/%s/lists.json' % self.auth.get_username(),
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['name', 'mode', 'description'],
|
||||
require_auth = True
|
||||
)(self, *args, **kargs)
|
||||
create_list = bind_api(
|
||||
path = '/lists/create.json',
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['name', 'mode', 'description'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
def destroy_list(self, slug):
|
||||
return bind_api(
|
||||
path = '/%s/lists/%s.json' % (self.auth.get_username(), slug),
|
||||
method = 'DELETE',
|
||||
payload_type = 'list',
|
||||
require_auth = True
|
||||
)(self)
|
||||
destroy_list = bind_api(
|
||||
path = '/lists/destroy.json',
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['owner_screen_name', 'owner_id', 'list_id', 'slug'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
def update_list(self, slug, *args, **kargs):
|
||||
return bind_api(
|
||||
path = '/%s/lists/%s.json' % (self.auth.get_username(), slug),
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['name', 'mode', 'description'],
|
||||
require_auth = True
|
||||
)(self, *args, **kargs)
|
||||
update_list = bind_api(
|
||||
path = '/lists/update.json',
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['list_id', 'slug', 'name', 'mode', 'description', 'owner_screen_name', 'owner_id'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
lists = bind_api(
|
||||
path = '/{user}/lists.json',
|
||||
lists_all = bind_api(
|
||||
path = '/lists/list.json',
|
||||
payload_type = 'list', payload_list = True,
|
||||
allowed_param = ['user', 'cursor'],
|
||||
allowed_param = ['screen_name', 'user_id'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
lists_memberships = bind_api(
|
||||
path = '/{user}/lists/memberships.json',
|
||||
path = '/lists/memberships.json',
|
||||
payload_type = 'list', payload_list = True,
|
||||
allowed_param = ['user', 'cursor'],
|
||||
allowed_param = ['screen_name', 'user_id', 'filter_to_owned_lists', 'cursor'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
lists_subscriptions = bind_api(
|
||||
path = '/{user}/lists/subscriptions.json',
|
||||
path = '/lists/subscriptions.json',
|
||||
payload_type = 'list', payload_list = True,
|
||||
allowed_param = ['user', 'cursor'],
|
||||
allowed_param = ['screen_name', 'user_id', 'cursor'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
list_timeline = bind_api(
|
||||
path = '/{owner}/lists/{slug}/statuses.json',
|
||||
path = '/lists/statuses.json',
|
||||
payload_type = 'status', payload_list = True,
|
||||
allowed_param = ['owner', 'slug', 'since_id', 'max_id', 'per_page', 'page']
|
||||
allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id', 'since_id', 'max_id', 'count']
|
||||
)
|
||||
|
||||
get_list = bind_api(
|
||||
path = '/{owner}/lists/{slug}.json',
|
||||
path = '/lists/show.json',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['owner', 'slug']
|
||||
allowed_param = ['owner_screen_name', 'owner_id', 'slug', 'list_id']
|
||||
)
|
||||
|
||||
def add_list_member(self, slug, *args, **kargs):
|
||||
return bind_api(
|
||||
path = '/%s/%s/members.json' % (self.auth.get_username(), slug),
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['id'],
|
||||
require_auth = True
|
||||
)(self, *args, **kargs)
|
||||
|
||||
def remove_list_member(self, slug, *args, **kargs):
|
||||
return bind_api(
|
||||
path = '/%s/%s/members.json' % (self.auth.get_username(), slug),
|
||||
method = 'DELETE',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['id'],
|
||||
require_auth = True
|
||||
)(self, *args, **kargs)
|
||||
|
||||
list_members = bind_api(
|
||||
path = '/{owner}/{slug}/members.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['owner', 'slug', 'cursor']
|
||||
)
|
||||
|
||||
def is_list_member(self, owner, slug, user_id):
|
||||
try:
|
||||
return bind_api(
|
||||
path = '/%s/%s/members/%s.json' % (owner, slug, user_id),
|
||||
payload_type = 'user'
|
||||
)(self)
|
||||
except TweepError:
|
||||
return False
|
||||
|
||||
subscribe_list = bind_api(
|
||||
path = '/{owner}/{slug}/subscribers.json',
|
||||
add_list_member = bind_api(
|
||||
path = '/lists/members/create.json',
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['owner', 'slug'],
|
||||
allowed_param = ['screen_name', 'user_id', 'owner_screen_name', 'owner_id', 'slug', 'list_id'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
remove_list_member = bind_api(
|
||||
path = '/lists/members/destroy.json',
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['screen_name', 'user_id', 'owner_screen_name', 'owner_id', 'slug', 'list_id'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
list_members = bind_api(
|
||||
path = '/lists/members.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['owner_screen_name', 'slug', 'list_id', 'owner_id', 'cursor']
|
||||
)
|
||||
|
||||
show_list_member = bind_api(
|
||||
path = '/lists/members/show.json',
|
||||
payload_type = 'user',
|
||||
allowed_param = ['list_id', 'slug', 'user_id', 'screen_name', 'owner_screen_name', 'owner_id']
|
||||
)
|
||||
|
||||
subscribe_list = bind_api(
|
||||
path = '/lists/subscribers/create.json',
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
unsubscribe_list = bind_api(
|
||||
path = '/{owner}/{slug}/subscribers.json',
|
||||
method = 'DELETE',
|
||||
path = '/lists/subscribers/destroy.json',
|
||||
method = 'POST',
|
||||
payload_type = 'list',
|
||||
allowed_param = ['owner', 'slug'],
|
||||
allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id'],
|
||||
require_auth = True
|
||||
)
|
||||
|
||||
list_subscribers = bind_api(
|
||||
path = '/{owner}/{slug}/subscribers.json',
|
||||
path = '/lists/subscribers.json',
|
||||
payload_type = 'user', payload_list = True,
|
||||
allowed_param = ['owner', 'slug', 'cursor']
|
||||
allowed_param = ['owner_screen_name', 'slug', 'owner_id', 'list_id', 'cursor']
|
||||
)
|
||||
|
||||
def is_subscribed_list(self, owner, slug, user_id):
|
||||
try:
|
||||
return bind_api(
|
||||
path = '/%s/%s/subscribers/%s.json' % (owner, slug, user_id),
|
||||
payload_type = 'user'
|
||||
)(self)
|
||||
except TweepError:
|
||||
return False
|
||||
show_list_subscriber = bind_api(
|
||||
path = '/lists/subscribers/show.json',
|
||||
payload_type = 'user',
|
||||
allowed_param = ['owner_screen_name', 'slug', 'screen_name', 'owner_id', 'list_id', 'user_id']
|
||||
)
|
||||
|
||||
""" trends/available """
|
||||
trends_available = bind_api(
|
||||
path = '/trends/available.json',
|
||||
payload_type = 'json'
|
||||
)
|
||||
|
||||
trends_place = bind_api(
|
||||
path = '/trends/place.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['id', 'exclude']
|
||||
)
|
||||
|
||||
trends_closest = bind_api(
|
||||
path = '/trends/closest.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['lat', 'long']
|
||||
)
|
||||
|
||||
""" trends/location """
|
||||
trends_location = bind_api(
|
||||
path = '/trends/{woeid}.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['woeid']
|
||||
)
|
||||
|
||||
""" search """
|
||||
search = bind_api(
|
||||
search_api = True,
|
||||
path = '/search.json',
|
||||
payload_type = 'search_result', payload_list = True,
|
||||
allowed_param = ['q', 'lang', 'locale', 'rpp', 'page', 'since_id', 'geocode', 'show_user', 'max_id', 'since', 'until', 'result_type']
|
||||
)
|
||||
search.pagination_mode = 'page'
|
||||
|
||||
""" trends """
|
||||
trends = bind_api(
|
||||
path = '/trends.json',
|
||||
payload_type = 'json'
|
||||
)
|
||||
|
||||
""" trends/current """
|
||||
trends_current = bind_api(
|
||||
path = '/trends/current.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['exclude']
|
||||
path = '/search/tweets.json',
|
||||
payload_type = 'search_results',
|
||||
allowed_param = ['q', 'lang', 'locale', 'since_id', 'geocode', 'show_user', 'max_id', 'since', 'until', 'result_type']
|
||||
)
|
||||
|
||||
""" trends/daily """
|
||||
|
@ -693,24 +650,31 @@ class API(object):
|
|||
""" geo/reverse_geocode """
|
||||
reverse_geocode = bind_api(
|
||||
path = '/geo/reverse_geocode.json',
|
||||
payload_type = 'json',
|
||||
payload_type = 'place', payload_list = True,
|
||||
allowed_param = ['lat', 'long', 'accuracy', 'granularity', 'max_results']
|
||||
)
|
||||
|
||||
""" geo/nearby_places """
|
||||
nearby_places = bind_api(
|
||||
path = '/geo/nearby_places.json',
|
||||
payload_type = 'json',
|
||||
allowed_param = ['lat', 'long', 'ip', 'accuracy', 'granularity', 'max_results']
|
||||
)
|
||||
|
||||
""" geo/id """
|
||||
geo_id = bind_api(
|
||||
path = '/geo/id/{id}.json',
|
||||
payload_type = 'json',
|
||||
payload_type = 'place',
|
||||
allowed_param = ['id']
|
||||
)
|
||||
|
||||
""" geo/search """
|
||||
geo_search = bind_api(
|
||||
path = '/geo/search.json',
|
||||
payload_type = 'place', payload_list = True,
|
||||
allowed_param = ['lat', 'long', 'query', 'ip', 'granularity', 'accuracy', 'max_results', 'contained_within']
|
||||
)
|
||||
|
||||
""" geo/similar_places """
|
||||
geo_similar_places = bind_api(
|
||||
path = '/geo/similar_places.json',
|
||||
payload_type = 'place', payload_list = True,
|
||||
allowed_param = ['lat', 'long', 'name', 'contained_within']
|
||||
)
|
||||
|
||||
""" Internal use only """
|
||||
@staticmethod
|
||||
def _pack_image(filename, max_size):
|
||||
|
@ -719,7 +683,7 @@ class API(object):
|
|||
try:
|
||||
if os.path.getsize(filename) > (max_size * 1024):
|
||||
raise TweepError('File is too big, must be less than 700kb.')
|
||||
except os.error, e:
|
||||
except os.error:
|
||||
raise TweepError('Unable to access file')
|
||||
|
||||
# image must be gif, jpeg, or png
|
||||
|
|
6
vendor/tweepy/auth.py
vendored
6
vendor/tweepy/auth.py
vendored
|
@ -138,9 +138,9 @@ class OAuthHandler(AuthHandler):
|
|||
oauth_consumer=self._consumer,
|
||||
http_method='POST', http_url=url,
|
||||
parameters = {
|
||||
'x_auth_mode': 'client_auth',
|
||||
'x_auth_username': username,
|
||||
'x_auth_password': password
|
||||
'x_auth_mode': 'client_auth',
|
||||
'x_auth_username': username,
|
||||
'x_auth_password': password
|
||||
}
|
||||
)
|
||||
request.sign_request(self._sigmethod, self._consumer, None)
|
||||
|
|
25
vendor/tweepy/binder.py
vendored
25
vendor/tweepy/binder.py
vendored
|
@ -6,6 +6,8 @@ import httplib
|
|||
import urllib
|
||||
import time
|
||||
import re
|
||||
from StringIO import StringIO
|
||||
import gzip
|
||||
|
||||
from tweepy.error import TweepError
|
||||
from tweepy.utils import convert_to_utf8_str
|
||||
|
@ -63,7 +65,7 @@ def bind_api(**config):
|
|||
# Manually set Host header to fix an issue in python 2.5
|
||||
# or older where Host is set including the 443 port.
|
||||
# This causes Twitter to issue 301 redirect.
|
||||
# See Issue http://github.com/joshthecoder/tweepy/issues/#issue/12
|
||||
# See Issue https://github.com/tweepy/tweepy/issues/12
|
||||
self.headers['Host'] = self.host
|
||||
|
||||
def build_parameters(self, args, kargs):
|
||||
|
@ -128,11 +130,10 @@ def bind_api(**config):
|
|||
retries_performed = 0
|
||||
while retries_performed < self.retry_count + 1:
|
||||
# Open connection
|
||||
# FIXME: add timeout
|
||||
if self.api.secure:
|
||||
conn = httplib.HTTPSConnection(self.host)
|
||||
conn = httplib.HTTPSConnection(self.host, timeout=self.api.timeout)
|
||||
else:
|
||||
conn = httplib.HTTPConnection(self.host)
|
||||
conn = httplib.HTTPConnection(self.host, timeout=self.api.timeout)
|
||||
|
||||
# Apply authentication
|
||||
if self.api.auth:
|
||||
|
@ -141,6 +142,10 @@ def bind_api(**config):
|
|||
self.method, self.headers, self.parameters
|
||||
)
|
||||
|
||||
# Request compression if configured
|
||||
if self.api.compression:
|
||||
self.headers['Accept-encoding'] = 'gzip'
|
||||
|
||||
# Execute request
|
||||
try:
|
||||
conn.request(self.method, url, headers=self.headers, body=self.post_data)
|
||||
|
@ -168,7 +173,14 @@ def bind_api(**config):
|
|||
raise TweepError(error_msg, resp)
|
||||
|
||||
# Parse the response payload
|
||||
result = self.api.parser.parse(self, resp.read())
|
||||
body = resp.read()
|
||||
if resp.getheader('Content-Encoding', '') == 'gzip':
|
||||
try:
|
||||
zipper = gzip.GzipFile(fileobj=StringIO(body))
|
||||
body = zipper.read()
|
||||
except Exception, e:
|
||||
raise TweepError('Failed to decompress data: %s' % e)
|
||||
result = self.api.parser.parse(self, body)
|
||||
|
||||
conn.close()
|
||||
|
||||
|
@ -188,6 +200,9 @@ def bind_api(**config):
|
|||
# Set pagination mode
|
||||
if 'cursor' in APIMethod.allowed_param:
|
||||
_call.pagination_mode = 'cursor'
|
||||
elif 'max_id' in APIMethod.allowed_param and \
|
||||
'since_id' in APIMethod.allowed_param:
|
||||
_call.pagination_mode = 'id'
|
||||
elif 'page' in APIMethod.allowed_param:
|
||||
_call.pagination_mode = 'page'
|
||||
|
||||
|
|
43
vendor/tweepy/cache.py
vendored
43
vendor/tweepy/cache.py
vendored
|
@ -3,6 +3,7 @@
|
|||
# See LICENSE for details.
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import threading
|
||||
import os
|
||||
|
||||
|
@ -289,7 +290,7 @@ class MemCacheCache(Cache):
|
|||
key: which entry to get
|
||||
timeout: override timeout with this value [optional]. DOES NOT WORK HERE
|
||||
"""
|
||||
return self.client.get(key, key)
|
||||
return self.client.get(key)
|
||||
|
||||
def count(self):
|
||||
"""Get count of entries currently stored in cache. RETURN 0"""
|
||||
|
@ -381,3 +382,43 @@ class RedisCache(Cache):
|
|||
keys = self.client.smembers(self.keys_container)
|
||||
for key in keys:
|
||||
self.delete_entry(key)
|
||||
|
||||
|
||||
class MongodbCache(Cache):
|
||||
"""A simple pickle-based MongoDB cache sytem."""
|
||||
|
||||
def __init__(self, db, timeout=3600, collection='tweepy_cache'):
|
||||
"""Should receive a "database" cursor from pymongo."""
|
||||
Cache.__init__(self, timeout)
|
||||
self.timeout = timeout
|
||||
self.col = db[collection]
|
||||
self.col.create_index('created', expireAfterSeconds=timeout)
|
||||
|
||||
def store(self, key, value):
|
||||
from bson.binary import Binary
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
blob = Binary(pickle.dumps(value))
|
||||
|
||||
self.col.insert({'created': now, '_id': key, 'value': blob})
|
||||
|
||||
def get(self, key, timeout=None):
|
||||
if timeout:
|
||||
raise NotImplementedError
|
||||
obj = self.col.find_one({'_id': key})
|
||||
if obj:
|
||||
return pickle.loads(obj['value'])
|
||||
|
||||
def count(self):
|
||||
return self.col.find({}).count()
|
||||
|
||||
def delete_entry(self, key):
|
||||
return self.col.remove({'_id': key})
|
||||
|
||||
def cleanup(self):
|
||||
"""MongoDB will automatically clear expired keys."""
|
||||
pass
|
||||
|
||||
def flush(self):
|
||||
self.col.drop()
|
||||
self.col.create_index('created', expireAfterSeconds=self.timeout)
|
||||
|
|
35
vendor/tweepy/cursor.py
vendored
35
vendor/tweepy/cursor.py
vendored
|
@ -11,8 +11,12 @@ class Cursor(object):
|
|||
if hasattr(method, 'pagination_mode'):
|
||||
if method.pagination_mode == 'cursor':
|
||||
self.iterator = CursorIterator(method, args, kargs)
|
||||
else:
|
||||
elif method.pagination_mode == 'id':
|
||||
self.iterator = IdIterator(method, args, kargs)
|
||||
elif method.pagination_mode == 'page':
|
||||
self.iterator = PageIterator(method, args, kargs)
|
||||
else:
|
||||
raise TweepError('Invalid pagination mode.')
|
||||
else:
|
||||
raise TweepError('This method does not perform pagination')
|
||||
|
||||
|
@ -74,6 +78,35 @@ class CursorIterator(BaseIterator):
|
|||
self.count -= 1
|
||||
return data
|
||||
|
||||
class IdIterator(BaseIterator):
|
||||
|
||||
def __init__(self, method, args, kargs):
|
||||
BaseIterator.__init__(self, method, args, kargs)
|
||||
self.max_id = kargs.get('max_id')
|
||||
self.since_id = kargs.get('since_id')
|
||||
|
||||
def next(self):
|
||||
"""Fetch a set of items with IDs less than current set."""
|
||||
# max_id is inclusive so decrement by one
|
||||
# to avoid requesting duplicate items.
|
||||
max_id = self.since_id - 1 if self.max_id else None
|
||||
data = self.method(max_id = max_id, *self.args, **self.kargs)
|
||||
if len(data) == 0:
|
||||
raise StopIteration
|
||||
self.max_id = data.max_id
|
||||
self.since_id = data.since_id
|
||||
return data
|
||||
|
||||
def prev(self):
|
||||
"""Fetch a set of items with IDs greater than current set."""
|
||||
since_id = self.max_id
|
||||
data = self.method(since_id = since_id, *self.args, **self.kargs)
|
||||
if len(data) == 0:
|
||||
raise StopIteration
|
||||
self.max_id = data.max_id
|
||||
self.since_id = data.since_id
|
||||
return data
|
||||
|
||||
class PageIterator(BaseIterator):
|
||||
|
||||
def __init__(self, method, args, kargs):
|
||||
|
|
1
vendor/tweepy/error.py
vendored
1
vendor/tweepy/error.py
vendored
|
@ -8,6 +8,7 @@ class TweepError(Exception):
|
|||
def __init__(self, reason, response=None):
|
||||
self.reason = unicode(reason)
|
||||
self.response = response
|
||||
Exception.__init__(self, reason)
|
||||
|
||||
def __str__(self):
|
||||
return self.reason
|
||||
|
|
154
vendor/tweepy/models.py
vendored
154
vendor/tweepy/models.py
vendored
|
@ -9,7 +9,27 @@ from tweepy.utils import parse_datetime, parse_html_value, parse_a_href, \
|
|||
|
||||
class ResultSet(list):
|
||||
"""A list like object that holds results from a Twitter API query."""
|
||||
def __init__(self, max_id=None, since_id=None):
|
||||
super(ResultSet, self).__init__()
|
||||
self._max_id = max_id
|
||||
self._since_id = since_id
|
||||
|
||||
@property
|
||||
def max_id(self):
|
||||
if self._max_id:
|
||||
return self._max_id
|
||||
ids = self.ids()
|
||||
return max(ids) if ids else None
|
||||
|
||||
@property
|
||||
def since_id(self):
|
||||
if self._since_id:
|
||||
return self._since_id
|
||||
ids = self.ids()
|
||||
return min(ids) if ids else None
|
||||
|
||||
def ids(self):
|
||||
return [item.id for item in self if hasattr(item, 'id')]
|
||||
|
||||
class Model(object):
|
||||
|
||||
|
@ -62,6 +82,11 @@ class Status(Model):
|
|||
setattr(status, 'source_url', None)
|
||||
elif k == 'retweeted_status':
|
||||
setattr(status, k, Status.parse(api, v))
|
||||
elif k == 'place':
|
||||
if v is not None:
|
||||
setattr(status, k, Place.parse(api, v))
|
||||
else:
|
||||
setattr(status, k, None)
|
||||
else:
|
||||
setattr(status, k, v)
|
||||
return status
|
||||
|
@ -178,6 +203,16 @@ class Friendship(Model):
|
|||
return source, target
|
||||
|
||||
|
||||
class Category(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
category = cls(api)
|
||||
for k, v in json.items():
|
||||
setattr(category, k, v)
|
||||
return category
|
||||
|
||||
|
||||
class SavedSearch(Model):
|
||||
|
||||
@classmethod
|
||||
|
@ -194,34 +229,18 @@ class SavedSearch(Model):
|
|||
return self._api.destroy_saved_search(self.id)
|
||||
|
||||
|
||||
class SearchResult(Model):
|
||||
class SearchResults(ResultSet):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
result = cls()
|
||||
for k, v in json.items():
|
||||
if k == 'created_at':
|
||||
setattr(result, k, parse_search_datetime(v))
|
||||
elif k == 'source':
|
||||
setattr(result, k, parse_html_value(unescape_html(v)))
|
||||
else:
|
||||
setattr(result, k, v)
|
||||
return result
|
||||
metadata = json['search_metadata']
|
||||
results = SearchResults(metadata.get('max_id'), metadata.get('since_id'))
|
||||
results.refresh_url = metadata.get('refresh_url')
|
||||
results.completed_in = metadata.get('completed_in')
|
||||
results.query = metadata.get('query')
|
||||
|
||||
@classmethod
|
||||
def parse_list(cls, api, json_list, result_set=None):
|
||||
results = ResultSet()
|
||||
results.max_id = json_list.get('max_id')
|
||||
results.since_id = json_list.get('since_id')
|
||||
results.refresh_url = json_list.get('refresh_url')
|
||||
results.next_page = json_list.get('next_page')
|
||||
results.results_per_page = json_list.get('results_per_page')
|
||||
results.page = json_list.get('page')
|
||||
results.completed_in = json_list.get('completed_in')
|
||||
results.query = json_list.get('query')
|
||||
|
||||
for obj in json_list['results']:
|
||||
results.append(cls.parse(api, obj))
|
||||
for status in json['statuses']:
|
||||
results.append(Status.parse(api, status))
|
||||
return results
|
||||
|
||||
|
||||
|
@ -233,6 +252,8 @@ class List(Model):
|
|||
for k,v in json.items():
|
||||
if k == 'user':
|
||||
setattr(lst, k, User.parse(api, v))
|
||||
elif k == 'created_at':
|
||||
setattr(lst, k, parse_datetime(v))
|
||||
else:
|
||||
setattr(lst, k, v)
|
||||
return lst
|
||||
|
@ -240,7 +261,9 @@ class List(Model):
|
|||
@classmethod
|
||||
def parse_list(cls, api, json_list, result_set=None):
|
||||
results = ResultSet()
|
||||
for obj in json_list['lists']:
|
||||
if isinstance(json_list, dict):
|
||||
json_list = json_list['lists']
|
||||
for obj in json_list:
|
||||
results.append(cls.parse(api, obj))
|
||||
return results
|
||||
|
||||
|
@ -290,6 +313,17 @@ class Relation(Model):
|
|||
setattr(result, k, v)
|
||||
return result
|
||||
|
||||
class Relationship(Model):
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
result = cls(api)
|
||||
for k,v in json.items():
|
||||
if k == 'connections':
|
||||
setattr(result, 'is_following', 'following' in v)
|
||||
setattr(result, 'is_followed_by', 'followed_by' in v)
|
||||
else:
|
||||
setattr(result, k, v)
|
||||
return result
|
||||
|
||||
class JSONModel(Model):
|
||||
|
||||
|
@ -308,6 +342,70 @@ class IDModel(Model):
|
|||
return json['ids']
|
||||
|
||||
|
||||
class BoundingBox(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
result = cls(api)
|
||||
if json is not None:
|
||||
for k, v in json.items():
|
||||
setattr(result, k, v)
|
||||
return result
|
||||
|
||||
def origin(self):
|
||||
"""
|
||||
Return longitude, latitude of southwest (bottom, left) corner of
|
||||
bounding box, as a tuple.
|
||||
|
||||
This assumes that bounding box is always a rectangle, which
|
||||
appears to be the case at present.
|
||||
"""
|
||||
return tuple(self.coordinates[0][0])
|
||||
|
||||
def corner(self):
|
||||
"""
|
||||
Return longitude, latitude of northeast (top, right) corner of
|
||||
bounding box, as a tuple.
|
||||
|
||||
This assumes that bounding box is always a rectangle, which
|
||||
appears to be the case at present.
|
||||
"""
|
||||
return tuple(self.coordinates[0][2])
|
||||
|
||||
|
||||
class Place(Model):
|
||||
|
||||
@classmethod
|
||||
def parse(cls, api, json):
|
||||
place = cls(api)
|
||||
for k, v in json.items():
|
||||
if k == 'bounding_box':
|
||||
# bounding_box value may be null (None.)
|
||||
# Example: "United States" (id=96683cc9126741d1)
|
||||
if v is not None:
|
||||
t = BoundingBox.parse(api, v)
|
||||
else:
|
||||
t = v
|
||||
setattr(place, k, t)
|
||||
elif k == 'contained_within':
|
||||
# contained_within is a list of Places.
|
||||
setattr(place, k, Place.parse_list(api, v))
|
||||
else:
|
||||
setattr(place, k, v)
|
||||
return place
|
||||
|
||||
@classmethod
|
||||
def parse_list(cls, api, json_list):
|
||||
if isinstance(json_list, list):
|
||||
item_list = json_list
|
||||
else:
|
||||
item_list = json_list['result']['places']
|
||||
|
||||
results = ResultSet()
|
||||
for obj in item_list:
|
||||
results.append(cls.parse(api, obj))
|
||||
return results
|
||||
|
||||
class ModelFactory(object):
|
||||
"""
|
||||
Used by parsers for creating instances
|
||||
|
@ -320,10 +418,14 @@ class ModelFactory(object):
|
|||
direct_message = DirectMessage
|
||||
friendship = Friendship
|
||||
saved_search = SavedSearch
|
||||
search_result = SearchResult
|
||||
search_results = SearchResults
|
||||
category = Category
|
||||
list = List
|
||||
relation = Relation
|
||||
relationship = Relationship
|
||||
|
||||
json = JSONModel
|
||||
ids = IDModel
|
||||
place = Place
|
||||
bounding_box = BoundingBox
|
||||
|
||||
|
|
78
vendor/tweepy/streaming.py
vendored
78
vendor/tweepy/streaming.py
vendored
|
@ -6,16 +6,15 @@ import httplib
|
|||
from socket import timeout
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
import urllib
|
||||
|
||||
from tweepy.models import Status
|
||||
from tweepy.api import API
|
||||
from tweepy.error import TweepError
|
||||
|
||||
from tweepy.utils import import_simplejson
|
||||
from tweepy.utils import import_simplejson, urlencode_noplus
|
||||
json = import_simplejson()
|
||||
|
||||
STREAM_VERSION = 1
|
||||
STREAM_VERSION = '1.1'
|
||||
|
||||
|
||||
class StreamListener(object):
|
||||
|
@ -23,6 +22,15 @@ class StreamListener(object):
|
|||
def __init__(self, api=None):
|
||||
self.api = api or API()
|
||||
|
||||
def on_connect(self):
|
||||
"""Called once connected to streaming server.
|
||||
|
||||
This will be invoked once a successful response
|
||||
is received from the server. Allows the listener
|
||||
to perform some work prior to entering the read loop.
|
||||
"""
|
||||
pass
|
||||
|
||||
def on_data(self, data):
|
||||
"""Called when raw data is received from connection.
|
||||
|
||||
|
@ -115,6 +123,7 @@ class Stream(object):
|
|||
sleep(self.retry_time)
|
||||
else:
|
||||
error_counter = 0
|
||||
self.listener.on_connect()
|
||||
self._read_loop(resp)
|
||||
except timeout:
|
||||
if self.listener.on_timeout() == False:
|
||||
|
@ -135,23 +144,34 @@ class Stream(object):
|
|||
if exception:
|
||||
raise
|
||||
|
||||
def _data(self, data):
|
||||
if self.listener.on_data(data) is False:
|
||||
self.running = False
|
||||
|
||||
def _read_loop(self, resp):
|
||||
while self.running:
|
||||
if resp.isclosed():
|
||||
break
|
||||
|
||||
# read length
|
||||
data = ''
|
||||
while True:
|
||||
while self.running and not resp.isclosed():
|
||||
|
||||
# Note: keep-alive newlines might be inserted before each length value.
|
||||
# read until we get a digit...
|
||||
c = '\n'
|
||||
while c == '\n' and self.running and not resp.isclosed():
|
||||
c = resp.read(1)
|
||||
if c == '\n':
|
||||
break
|
||||
data += c
|
||||
data = data.strip()
|
||||
delimited_string = c
|
||||
|
||||
# read data and pass into listener
|
||||
if self.listener.on_data(data) is False:
|
||||
self.running = False
|
||||
# read rest of delimiter length..
|
||||
d = ''
|
||||
while d != '\n' and self.running and not resp.isclosed():
|
||||
d = resp.read(1)
|
||||
delimited_string += d
|
||||
|
||||
# read the next twitter status object
|
||||
if delimited_string.strip().isdigit():
|
||||
next_status_obj = resp.read( int(delimited_string) )
|
||||
self._data(next_status_obj)
|
||||
|
||||
if resp.isclosed():
|
||||
self.on_closed(resp)
|
||||
|
||||
def _start(self, async):
|
||||
self.running = True
|
||||
|
@ -160,20 +180,23 @@ class Stream(object):
|
|||
else:
|
||||
self._run()
|
||||
|
||||
def on_closed(self, resp):
|
||||
""" Called when the response has been closed by Twitter """
|
||||
pass
|
||||
|
||||
def userstream(self, count=None, async=False, secure=True):
|
||||
self.parameters = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/2/user.json'
|
||||
self.url = '/2/user.json?delimited=length'
|
||||
self.host='userstream.twitter.com'
|
||||
if count:
|
||||
self.url += '&count=%s' % count
|
||||
self._start(async)
|
||||
|
||||
def firehose(self, count=None, async=False):
|
||||
self.parameters = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%i/statuses/firehose.json?delimited=length' % STREAM_VERSION
|
||||
self.url = '/%s/statuses/firehose.json?delimited=length' % STREAM_VERSION
|
||||
if count:
|
||||
self.url += '&count=%s' % count
|
||||
self._start(async)
|
||||
|
@ -182,24 +205,25 @@ class Stream(object):
|
|||
self.parameters = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%i/statuses/retweet.json?delimited=length' % STREAM_VERSION
|
||||
self.url = '/%s/statuses/retweet.json?delimited=length' % STREAM_VERSION
|
||||
self._start(async)
|
||||
|
||||
def sample(self, count=None, async=False):
|
||||
self.parameters = {'delimited': 'length'}
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%i/statuses/sample.json?delimited=length' % STREAM_VERSION
|
||||
self.url = '/%s/statuses/sample.json?delimited=length' % STREAM_VERSION
|
||||
if count:
|
||||
self.url += '&count=%s' % count
|
||||
self._start(async)
|
||||
|
||||
def filter(self, follow=None, track=None, async=False, locations=None, count = None):
|
||||
def filter(self, follow=None, track=None, async=False, locations=None,
|
||||
count = None, stall_warnings=False, languages=None):
|
||||
self.parameters = {}
|
||||
self.headers['Content-type'] = "application/x-www-form-urlencoded"
|
||||
if self.running:
|
||||
raise TweepError('Stream object already connected!')
|
||||
self.url = '/%i/statuses/filter.json?delimited=length' % STREAM_VERSION
|
||||
self.url = '/%s/statuses/filter.json?delimited=length' % STREAM_VERSION
|
||||
if follow:
|
||||
self.parameters['follow'] = ','.join(map(str, follow))
|
||||
if track:
|
||||
|
@ -209,7 +233,11 @@ class Stream(object):
|
|||
self.parameters['locations'] = ','.join(['%.2f' % l for l in locations])
|
||||
if count:
|
||||
self.parameters['count'] = count
|
||||
self.body = urllib.urlencode(self.parameters)
|
||||
if stall_warnings:
|
||||
self.parameters['stall_warnings'] = stall_warnings
|
||||
if languages:
|
||||
self.parameters['language'] = ','.join(map(str, languages))
|
||||
self.body = urlencode_noplus(self.parameters)
|
||||
self.parameters['delimited'] = 'length'
|
||||
self._start(async)
|
||||
|
||||
|
|
5
vendor/tweepy/utils.py
vendored
5
vendor/tweepy/utils.py
vendored
|
@ -7,6 +7,7 @@ import time
|
|||
import htmlentitydefs
|
||||
import re
|
||||
import locale
|
||||
from urllib import quote
|
||||
|
||||
|
||||
def parse_datetime(string):
|
||||
|
@ -96,3 +97,7 @@ def list_to_csv(item_list):
|
|||
if item_list:
|
||||
return ','.join([str(i) for i in item_list])
|
||||
|
||||
def urlencode_noplus(query):
|
||||
return '&'.join(['%s=%s' % (quote(str(k)), quote(str(v))) \
|
||||
for k, v in query.iteritems()])
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue