mirror of
https://github.com/viq/NewsBlur.git
synced 2025-09-18 21:43:31 +00:00
Normalizing logging across all views and models.
This commit is contained in:
parent
f7574e6d11
commit
2b8f4bff95
9 changed files with 47 additions and 52 deletions
|
@ -77,7 +77,7 @@ def save_classifier(request):
|
|||
_save_classifier(ClassifierTitle, 'title')
|
||||
_save_classifier(ClassifierFeed, 'publisher')
|
||||
|
||||
logging.info(" ---> [%s]: Feed training: %s" % (request.user, feed))
|
||||
logging.info(" ---> [%s] Feed training: %s" % (request.user, feed))
|
||||
|
||||
response = dict(code=code, message=message, payload=payload)
|
||||
return response
|
||||
|
|
|
@ -87,10 +87,9 @@ class GoogleReaderImporter(Importer):
|
|||
folders = defaultdict(list)
|
||||
for item in self.feeds:
|
||||
folders = self.process_item(item, folders)
|
||||
logging.info(" ---> Google Reader import: [%s]" % self.user)
|
||||
# print dict(folders)
|
||||
self.rearrange_folders(folders)
|
||||
logging.info(" ---> [%s]: %s" % (self.user, self.subscription_folders))
|
||||
logging.info(" ---> [%s] Google Reader import: %s" % (self.user, self.subscription_folders))
|
||||
UserSubscriptionFolders.objects.create(user=self.user,
|
||||
folders=json.encode(self.subscription_folders))
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ def opml_upload(request):
|
|||
|
||||
|
||||
def reader_authorize(request):
|
||||
logging.info(" ---> [%s]: Authorize Google Reader import (%s) - %s" % (
|
||||
logging.info(" ---> [%s] Authorize Google Reader import (%s) - %s" % (
|
||||
request.user,
|
||||
request.session.session_key,
|
||||
request.META['REMOTE_ADDR'],
|
||||
|
@ -85,7 +85,7 @@ def reader_callback(request):
|
|||
access_token_url = 'https://www.google.com/accounts/OAuthGetAccessToken'
|
||||
consumer = oauth.Consumer(settings.OAUTH_KEY, settings.OAUTH_SECRET)
|
||||
|
||||
logging.info(" ---> [%s]: Google Reader callback (%s) - %s" % (
|
||||
logging.info(" ---> [%s] Google Reader callback (%s) - %s" % (
|
||||
request.user,
|
||||
request.session.session_key,
|
||||
request.META['REMOTE_ADDR']
|
||||
|
|
|
@ -10,7 +10,7 @@ class LastSeenMiddleware(object):
|
|||
and request.user.is_authenticated()):
|
||||
hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=60)
|
||||
if request.user.profile.last_seen_on < hour_ago:
|
||||
logging.info(" ---> Repeat visitor: %s" % request.user)
|
||||
logging.info(" ---> [%s] Repeat visitor: %s" % (request.user, request.user.profile.last_seen_on))
|
||||
request.user.profile.last_seen_on = datetime.datetime.now()
|
||||
request.user.profile.last_seen_ip = request.META['REMOTE_ADDR']
|
||||
request.user.profile.save()
|
||||
|
|
|
@ -23,7 +23,7 @@ class LoginForm(forms.Form):
|
|||
if username:
|
||||
self.user_cache = authenticate(username=username, password=password)
|
||||
if self.user_cache is None:
|
||||
print " ---> *** Bad Login: %s" % username
|
||||
print " ***> [%s] Bad Login" % username
|
||||
raise forms.ValidationError(_("Whoopsy-daisy. Try again."))
|
||||
elif not self.user_cache.is_active:
|
||||
raise forms.ValidationError(_("This account is inactive."))
|
||||
|
|
|
@ -58,13 +58,13 @@ class UserSubscription(models.Model):
|
|||
def calculate_feed_scores(self, silent=False):
|
||||
if not self.feed.fetched_once:
|
||||
if not silent:
|
||||
logging.info(' ---> [%s]: NOT Computing scores: %s' % (self.user, self.feed))
|
||||
logging.info(' ---> [%s] NOT Computing scores: %s' % (self.user, self.feed))
|
||||
self.needs_unread_recalc = False
|
||||
self.save()
|
||||
return
|
||||
|
||||
if not silent:
|
||||
logging.info(' ---> [%s]: Computing scores: %s' % (self.user, self.feed))
|
||||
logging.info(' ---> [%s] Computing scores: %s' % (self.user, self.feed))
|
||||
feed_scores = dict(negative=0, neutral=0, positive=0)
|
||||
|
||||
# Two weeks in age. If mark_read_date is older, mark old stories as read.
|
||||
|
|
|
@ -66,7 +66,7 @@ def login(request):
|
|||
form = LoginForm(request.POST, prefix='login')
|
||||
if form.is_valid():
|
||||
login_user(request, form.get_user())
|
||||
logging.info(" ---> Login: %s" % form.get_user())
|
||||
logging.info(" ---> [%s] Login" % form.get_user())
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
|
||||
return index(request)
|
||||
|
@ -78,7 +78,7 @@ def signup(request):
|
|||
if form.is_valid():
|
||||
new_user = form.save()
|
||||
login_user(request, new_user)
|
||||
logging.info(" ---> NEW SIGNUP: %s" % new_user)
|
||||
logging.info(" ---> [%s] NEW SIGNUP" % new_user)
|
||||
return HttpResponseRedirect(reverse('index'))
|
||||
|
||||
return index(request)
|
||||
|
@ -93,9 +93,10 @@ def logout(request):
|
|||
|
||||
@json.json_view
|
||||
def load_feeds(request):
|
||||
user = get_user(request)
|
||||
feeds = {}
|
||||
user = get_user(request)
|
||||
feeds = {}
|
||||
not_yet_fetched = False
|
||||
|
||||
|
||||
try:
|
||||
folders = UserSubscriptionFolders.objects.get(user=user)
|
||||
|
@ -111,6 +112,7 @@ def load_feeds(request):
|
|||
# > 200 means that we counted enough, just move to refresh during live.
|
||||
sub.calculate_feed_scores()
|
||||
updated_count += 1
|
||||
|
||||
feeds[sub.feed.pk] = {
|
||||
'id': sub.feed.pk,
|
||||
'feed_title': sub.feed.feed_title,
|
||||
|
@ -120,6 +122,7 @@ def load_feeds(request):
|
|||
'ng': sub.unread_count_negative,
|
||||
'updated': format_relative_date(sub.feed.last_update)
|
||||
}
|
||||
|
||||
if not sub.feed.fetched_once:
|
||||
not_yet_fetched = True
|
||||
feeds[sub.feed.pk]['not_yet_fetched'] = True
|
||||
|
@ -231,10 +234,10 @@ def load_single_feed(request):
|
|||
usersub = UserSubscription.objects.get(user=user, feed=feed)
|
||||
except UserSubscription.DoesNotExist:
|
||||
# FIXME: Why is this happening for `conesus` when logged into another account?!
|
||||
logging.info(" ***> UserSub DNE, creating: %s %s" % (user, feed))
|
||||
logging.info(" ***> [%s] UserSub DNE, creating: %s" % (user, feed))
|
||||
usersub = UserSubscription.objects.create(user=user, feed=feed)
|
||||
|
||||
logging.info("Loading feed: [%s] %s" % (request.user, feed.feed_title))
|
||||
logging.info(" ---> [%s] Loading feed: %s" % (request.user, feed.feed_title))
|
||||
|
||||
if stories:
|
||||
last_read_date = stories[-1]['story_date']
|
||||
|
@ -312,7 +315,7 @@ def mark_all_as_read(request):
|
|||
sub.mark_read_date = read_date
|
||||
sub.save()
|
||||
|
||||
logging.info(" ---> Marking all as read [%s]: %s days" % (request.user, days,))
|
||||
logging.info(" ---> [%s] Marking all as read: %s days" % (request.user, days,))
|
||||
return dict(code=code)
|
||||
|
||||
@ajax_login_required
|
||||
|
@ -321,7 +324,7 @@ def mark_story_as_read(request):
|
|||
story_ids = request.REQUEST['story_id'].split(',')
|
||||
feed_id = int(request.REQUEST['feed_id'])
|
||||
|
||||
usersub = UserSubscription.objects.get(user=request.user, feed=feed_id)
|
||||
usersub = UserSubscription.objects.get(user=request.user, feed=feed_id).select_related('feed')
|
||||
if not usersub.needs_unread_recalc:
|
||||
usersub.needs_unread_recalc = True
|
||||
usersub.save()
|
||||
|
@ -329,7 +332,7 @@ def mark_story_as_read(request):
|
|||
data = dict(code=0, payload=story_ids)
|
||||
|
||||
for story_id in story_ids:
|
||||
logging.debug("Marked Read: [%s] %s (%s)" % (request.user, story_id, feed_id))
|
||||
logging.debug(" ---> [%s] Read story in feed: %s" % (request.user, usersub.feed))
|
||||
m = UserStory(story_id=int(story_id), user=request.user, feed_id=feed_id)
|
||||
try:
|
||||
m.save()
|
||||
|
@ -354,7 +357,7 @@ def mark_feed_as_read(request):
|
|||
else:
|
||||
code = 1
|
||||
|
||||
logging.info(" ---> Marking feed as read [%s]: %s" % (request.user, feed,))
|
||||
logging.info(" ---> [%s] Marking feed as read: %s" % (request.user, feed,))
|
||||
# UserStory.objects.filter(user=request.user, feed=feed_id).delete()
|
||||
return dict(code=code)
|
||||
|
||||
|
@ -407,6 +410,8 @@ def add_url(request):
|
|||
folder = request.POST['folder']
|
||||
feed = None
|
||||
|
||||
logging.info(" ---> [%s] Adding URL: %s (in %s)" % (request.user, url, folder))
|
||||
|
||||
if url:
|
||||
url = urlnorm.normalize(url)
|
||||
feed = Feed.objects.filter(Q(feed_address=url)
|
||||
|
@ -466,6 +471,8 @@ def add_folder(request):
|
|||
folder = request.POST['folder']
|
||||
parent_folder = request.POST['parent_folder']
|
||||
|
||||
logging.info(" ---> [%s] Adding Folder: %s (in %s)" % (request.user, folder, parent_folder))
|
||||
|
||||
if folder:
|
||||
code = 1
|
||||
message = ""
|
||||
|
@ -500,7 +507,7 @@ def delete_feed(request):
|
|||
for k, folder in enumerate(old_folders):
|
||||
if isinstance(folder, int):
|
||||
if folder == feed_id:
|
||||
logging.info(" ---> [%s] DEL'ED: %s'th item: %s folders/feeds" % (request.user, k, len(old_folders)))
|
||||
logging.info(" ---> [%s] Delete folder: %s'th item: %s folders/feeds" % (request.user, k, len(old_folders)))
|
||||
# folders.remove(folder)
|
||||
else:
|
||||
new_folders.append(folder)
|
||||
|
@ -536,6 +543,7 @@ def add_feature(request):
|
|||
@json.json_view
|
||||
def load_features(request):
|
||||
page = int(request.POST.get('page', 0))
|
||||
logging.info(" ---> [%s] Browse features: Page #%s" % (request.user, page+1))
|
||||
features = Feature.objects.all()[page*3:(page+1)*3+1].values()
|
||||
features = [{'description': f['description'], 'date': f['date'].strftime("%b %d, %Y")} for f in features]
|
||||
return features
|
||||
|
@ -547,7 +555,7 @@ def save_feed_order(request):
|
|||
# Test that folders can be JSON decoded
|
||||
folders_list = json.decode(folders)
|
||||
assert folders_list is not None
|
||||
logging.info(" ---> [%s]: Feed re-ordering: %s folders/feeds" % (request.user, len(folders_list)))
|
||||
logging.info(" ---> [%s] Feed re-ordering: %s folders/feeds" % (request.user, len(folders_list)))
|
||||
user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user)
|
||||
user_sub_folders.folders = folders
|
||||
user_sub_folders.save()
|
||||
|
@ -571,7 +579,7 @@ def get_feeds_trainer(request):
|
|||
classifier['feed_authors'] = json.decode(us.feed.popular_authors) if us.feed.popular_authors else []
|
||||
classifiers.append(classifier)
|
||||
|
||||
logging.info(" ---> [%s] Loading Trainer: %s" % (request.user, len(classifiers)))
|
||||
logging.info(" ---> [%s] Loading Trainer: %s feeds" % (request.user, len(classifiers)))
|
||||
|
||||
return classifiers
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ from utils import feedparser
|
|||
from django.db.models import Q
|
||||
from django.db import IntegrityError
|
||||
from utils.story_functions import pre_process_story
|
||||
from utils.feed_functions import fetch_address_from_page
|
||||
from utils import log as logging
|
||||
import sys
|
||||
import time
|
||||
|
@ -47,10 +46,7 @@ class FetchFeed:
|
|||
def fetch(self):
|
||||
""" Downloads and parses a feed.
|
||||
"""
|
||||
current_process = multiprocessing.current_process()
|
||||
identity = "X"
|
||||
if current_process._identity:
|
||||
identity = current_process._identity[0]
|
||||
identity = self.get_identity()
|
||||
log_msg = u'%2s ---> Fetching %s (%d)' % (identity,
|
||||
self.feed.feed_title,
|
||||
self.feed.id)
|
||||
|
@ -70,13 +66,18 @@ class FetchFeed:
|
|||
agent=USER_AGENT,
|
||||
etag=self.feed.etag,
|
||||
modified=modified)
|
||||
|
||||
# feed_xml, _ = FeedXML.objects.get_or_create(feed=self.feed)
|
||||
# feed_xml.rss_xml = self.fpf
|
||||
# feed_xml.save()
|
||||
|
||||
return FEED_OK, self.fpf
|
||||
|
||||
|
||||
def get_identity(self):
|
||||
identity = "X"
|
||||
|
||||
current_process = multiprocessing.current_process()
|
||||
if current_process._identity:
|
||||
identity = current_process._identity[0]
|
||||
|
||||
return identity
|
||||
|
||||
class ProcessFeed:
|
||||
def __init__(self, feed, fpf, options):
|
||||
self.feed = feed
|
||||
|
@ -309,6 +310,13 @@ class Dispatcher:
|
|||
self.feed_stats[ret_feed] += 1
|
||||
for key, val in ret_entries.items():
|
||||
self.entry_stats[key] += val
|
||||
|
||||
time_taken = datetime.datetime.now() - self.time_start
|
||||
history = FeedUpdateHistory(
|
||||
number_of_feeds=len(feed_queue),
|
||||
seconds_taken=time_taken.seconds
|
||||
)
|
||||
history.save()
|
||||
if not self.options['single_threaded']:
|
||||
logging.debug("---> DONE WITH PROCESS: %s" % current_process.name)
|
||||
sys.exit()
|
||||
|
@ -348,12 +356,6 @@ class Dispatcher:
|
|||
for key in self.entry_keys)
|
||||
))
|
||||
logging.debug(done)
|
||||
time_taken = datetime.datetime.now() - self.time_start
|
||||
history = FeedUpdateHistory(
|
||||
number_of_feeds=self.feeds_count,
|
||||
seconds_taken=time_taken.seconds
|
||||
)
|
||||
history.save()
|
||||
return
|
||||
|
||||
|
|
@ -1,6 +1,4 @@
|
|||
import datetime
|
||||
import time
|
||||
import sys
|
||||
from django.utils.translation import ungettext
|
||||
from utils import feedfinder
|
||||
|
||||
|
@ -16,18 +14,6 @@ def encode(tstr):
|
|||
# it's already UTF8.. sigh
|
||||
return tstr.decode('utf-8').encode('utf-8')
|
||||
|
||||
def prints(tstr):
|
||||
""" lovely unicode
|
||||
"""
|
||||
sys.stdout.write('%s\n' % (tstr.encode(sys.getdefaultencoding(),
|
||||
'replace')))
|
||||
sys.stdout.flush()
|
||||
|
||||
def mtime(ttime):
|
||||
""" datetime auxiliar function.
|
||||
"""
|
||||
return datetime.datetime.fromtimestamp(time.mktime(ttime))
|
||||
|
||||
# From: http://www.poromenos.org/node/87
|
||||
def levenshtein_distance(first, second):
|
||||
"""Find the Levenshtein distance between two strings."""
|
||||
|
|
Loading…
Add table
Reference in a new issue