mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-09-18 21:50:56 +00:00
Adding stathat stats for feed fetching and parsing.
This commit is contained in:
parent
5e981635d1
commit
eec1563549
2 changed files with 37 additions and 1 deletions
|
@ -17,6 +17,9 @@ from utils import feedparser
|
|||
from utils.story_functions import pre_process_story
|
||||
from utils import log as logging
|
||||
from utils.feed_functions import timelimit, TimeoutError, mail_feed_error_to_admin, utf8encode
|
||||
from vendor.stathat import StatHat
|
||||
|
||||
stathat = StatHat()
|
||||
|
||||
# Refresh feed code adapted from Feedjack.
|
||||
# http://feedjack.googlecode.com
|
||||
|
@ -41,6 +44,7 @@ class FetchFeed:
|
|||
"""
|
||||
Uses feedparser to download the feed. Will be parsed later.
|
||||
"""
|
||||
start = time.time()
|
||||
identity = self.get_identity()
|
||||
log_msg = u'%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY), last update: %s' % (identity,
|
||||
unicode(self.feed)[:30],
|
||||
|
@ -65,7 +69,10 @@ class FetchFeed:
|
|||
agent=USER_AGENT,
|
||||
etag=etag,
|
||||
modified=modified)
|
||||
|
||||
|
||||
if self.fpf.status == 200:
|
||||
stathat.ez_post_value('newsblur', 'Fetcher: feed fetch time', time.time() - start)
|
||||
|
||||
return FEED_OK, self.fpf
|
||||
|
||||
def get_identity(self):
|
||||
|
@ -95,6 +102,7 @@ class ProcessFeed:
|
|||
def process(self):
|
||||
""" Downloads and parses a feed.
|
||||
"""
|
||||
start = time.time()
|
||||
self.refresh_feed()
|
||||
|
||||
ret_values = {
|
||||
|
@ -230,6 +238,8 @@ class ProcessFeed:
|
|||
self.feed.trim_feed()
|
||||
self.feed.save_feed_history(200, "OK")
|
||||
|
||||
stathat.ez_post_value('newsblur', 'Fetcher: feed parse time', time.time() - start)
|
||||
|
||||
return FEED_OK, ret_values
|
||||
|
||||
|
||||
|
@ -312,6 +322,7 @@ class Dispatcher:
|
|||
feed = self.refresh_feed(feed_id)
|
||||
|
||||
if ret_entries.get(ENTRY_NEW) or self.options['force']:
|
||||
start = time.time()
|
||||
if not feed.known_good:
|
||||
feed.known_good = True
|
||||
feed.save()
|
||||
|
@ -320,6 +331,7 @@ class Dispatcher:
|
|||
self.count_unreads_for_subscribers(feed)
|
||||
except TimeoutError:
|
||||
logging.debug(' ---> [%-30s] Unread count took too long...' % (unicode(feed)[:30],))
|
||||
stathat.ez_post_value('newsblur', 'Fetcher: unread count time', time.time() - start)
|
||||
cache.delete('feed_stories:%s-%s-%s' % (feed.id, 0, 25))
|
||||
# if ret_entries.get(ENTRY_NEW) or ret_entries.get(ENTRY_UPDATED) or self.options['force']:
|
||||
# feed.get_stories(force=True)
|
||||
|
@ -405,6 +417,7 @@ class Dispatcher:
|
|||
identity, feed.feed_title[:30], delta,
|
||||
feed.pk, self.feed_trans[ret_feed],))
|
||||
logging.debug(done_msg)
|
||||
stathat.ez_post_value('newsblur', 'Fetcher: total processing time', delta)
|
||||
|
||||
self.feed_stats[ret_feed] += 1
|
||||
for key, val in ret_entries.items():
|
||||
|
|
23
vendor/stathat.py
vendored
Normal file
23
vendor/stathat.py
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
import urllib
|
||||
import urllib2
|
||||
|
||||
class StatHat:
|
||||
|
||||
def http_post(self, path, data):
|
||||
pdata = urllib.urlencode(data)
|
||||
req = urllib2.Request('http://api.stathat.com' + path, pdata)
|
||||
resp = urllib2.urlopen(req)
|
||||
return resp.read()
|
||||
|
||||
def post_value(self, user_key, stat_key, value):
|
||||
return self.http_post('/v', {'key': stat_key, 'ukey': user_key, 'value': value})
|
||||
|
||||
def post_count(self, user_key, stat_key, count):
|
||||
return self.http_post('/c', {'key': stat_key, 'ukey': user_key, 'count': count})
|
||||
|
||||
def ez_post_value(self, email, stat_name, value):
|
||||
return self.http_post('/ez', {'email': email, 'stat': stat_name, 'value': value})
|
||||
|
||||
def ez_post_count(self, email, stat_name, count):
|
||||
return self.http_post('/ez', {'email': email, 'stat': stat_name, 'count': count})
|
||||
|
Loading…
Add table
Reference in a new issue