Merge branch 'master' of github.com:samuelclay/NewsBlur

This commit is contained in:
Samuel Clay 2010-10-11 10:29:03 -04:00
commit a77e170206
15 changed files with 42 additions and 42 deletions

View file

@ -94,7 +94,7 @@ class OPMLImporter(Importer):
user=self.user, user=self.user,
defaults={ defaults={
'needs_unread_recalc': True, 'needs_unread_recalc': True,
'mark_read_date': datetime.datetime.now() - datetime.timedelta(days=1) 'mark_read_date': datetime.datetime.utcnow() - datetime.timedelta(days=1)
} }
) )
folders.append(feed_db.pk) folders.append(feed_db.pk)
@ -161,7 +161,7 @@ class GoogleReaderImporter(Importer):
user=self.user, user=self.user,
defaults={ defaults={
'needs_unread_recalc': True, 'needs_unread_recalc': True,
'mark_read_date': datetime.datetime.now() - datetime.timedelta(days=1) 'mark_read_date': datetime.datetime.utcnow() - datetime.timedelta(days=1)
} }
) )
if not category: category = "Root" if not category: category = "Root"

View file

@ -8,10 +8,10 @@ class LastSeenMiddleware(object):
and not request.is_ajax() and not request.is_ajax()
and hasattr(request, 'user') and hasattr(request, 'user')
and request.user.is_authenticated()): and request.user.is_authenticated()):
hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=60) hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60)
if request.user.profile.last_seen_on < hour_ago: if request.user.profile.last_seen_on < hour_ago:
logging.info(" ---> [%s] Repeat visitor: %s" % (request.user, request.user.profile.last_seen_on)) logging.info(" ---> [%s] Repeat visitor: %s" % (request.user, request.user.profile.last_seen_on))
request.user.profile.last_seen_on = datetime.datetime.now() request.user.profile.last_seen_on = datetime.datetime.utcnow()
request.user.profile.last_seen_ip = request.META['REMOTE_ADDR'] request.user.profile.last_seen_ip = request.META['REMOTE_ADDR']
request.user.profile.save() request.user.profile.save()

View file

@ -108,6 +108,6 @@ class FeatureForm(forms.Form):
def save(self): def save(self):
feature = Feature(description=self.cleaned_data['description'], feature = Feature(description=self.cleaned_data['description'],
date=datetime.datetime.now() + datetime.timedelta(minutes=1)) date=datetime.datetime.utcnow() + datetime.timedelta(minutes=1))
feature.save() feature.save()
return feature return feature

View file

@ -64,7 +64,7 @@ class UserSubscription(models.Model):
self.needs_unread_recalc = False self.needs_unread_recalc = False
self.save() self.save()
return return
now = datetime.datetime.now() now = datetime.datetime.utcnow()
if not silent: if not silent:
logging.info(' ---> [%s] Computing scores: %s' % (self.user, self.feed)) logging.info(' ---> [%s] Computing scores: %s' % (self.user, self.feed))
feed_scores = dict(negative=0, neutral=0, positive=0) feed_scores = dict(negative=0, neutral=0, positive=0)

View file

@ -29,7 +29,7 @@ class Command(BaseCommand):
daemonize() daemonize()
settings.LOG_TO_STREAM = True settings.LOG_TO_STREAM = True
now = datetime.datetime.now() now = datetime.datetime.utcnow()
if options['skip']: if options['skip']:
feeds = Feed.objects.filter(next_scheduled_update__lte=now, feeds = Feed.objects.filter(next_scheduled_update__lte=now,

View file

@ -17,7 +17,7 @@ class Command(BaseCommand):
def handle(self, *args, **options): def handle(self, *args, **options):
settings.LOG_TO_STREAM = True settings.LOG_TO_STREAM = True
now = datetime.datetime.now() now = datetime.datetime.utcnow()
feeds = Feed.objects.filter( feeds = Feed.objects.filter(
next_scheduled_update__lte=now, next_scheduled_update__lte=now,
@ -36,7 +36,7 @@ class Command(BaseCommand):
feed_queue = [] feed_queue = []
size = 12 size = 12
for f in feeds: for f in feeds:
f.queued_date = datetime.datetime.now() f.queued_date = datetime.datetime.utcnow()
f.set_next_scheduled_update() f.set_next_scheduled_update()
for feed_queue in (feeds[pos:pos + size] for pos in xrange(0, len(feeds), size)): for feed_queue in (feeds[pos:pos + size] for pos in xrange(0, len(feeds), size)):

View file

@ -27,7 +27,7 @@ from utils.diff import HTMLDiff
from utils import log as logging from utils import log as logging
ENTRY_NEW, ENTRY_UPDATED, ENTRY_SAME, ENTRY_ERR = range(4) ENTRY_NEW, ENTRY_UPDATED, ENTRY_SAME, ENTRY_ERR = range(4)
SUBSCRIBER_EXPIRE = datetime.datetime.now() - datetime.timedelta(days=21) SUBSCRIBER_EXPIRE = datetime.datetime.utcnow() - datetime.timedelta(days=21)
class Feed(models.Model): class Feed(models.Model):
feed_address = models.URLField(max_length=255, verify_exists=True, unique=True) feed_address = models.URLField(max_length=255, verify_exists=True, unique=True)
@ -67,11 +67,11 @@ class Feed(models.Model):
if self.feed_tagline and len(self.feed_tagline) > 1024: if self.feed_tagline and len(self.feed_tagline) > 1024:
self.feed_tagline = self.feed_tagline[:1024] self.feed_tagline = self.feed_tagline[:1024]
if not self.last_update: if not self.last_update:
self.last_update = datetime.datetime.now() self.last_update = datetime.datetime.utcnow()
if not self.next_scheduled_update: if not self.next_scheduled_update:
self.next_scheduled_update = datetime.datetime.now() self.next_scheduled_update = datetime.datetime.utcnow()
if not self.queued_date: if not self.queued_date:
self.queued_date = datetime.datetime.now() self.queued_date = datetime.datetime.utcnow()
try: try:
@ -108,7 +108,7 @@ class Feed(models.Model):
if feed_address: if feed_address:
try: try:
self.feed_address = feed_address self.feed_address = feed_address
self.next_scheduled_update = datetime.datetime.now() self.next_scheduled_update = datetime.datetime.utcnow()
self.has_feed_exception = False self.has_feed_exception = False
self.active = True self.active = True
self.save() self.save()
@ -126,7 +126,7 @@ class Feed(models.Model):
status_code=int(status_code), status_code=int(status_code),
message=message, message=message,
exception=exception, exception=exception,
fetch_date=datetime.datetime.now()).save() fetch_date=datetime.datetime.utcnow()).save()
old_fetch_histories = MFeedFetchHistory.objects(feed_id=self.pk).order_by('-fetch_date')[5:] old_fetch_histories = MFeedFetchHistory.objects(feed_id=self.pk).order_by('-fetch_date')[5:]
for history in old_fetch_histories: for history in old_fetch_histories:
history.delete() history.delete()
@ -144,7 +144,7 @@ class Feed(models.Model):
status_code=int(status_code), status_code=int(status_code),
message=message, message=message,
exception=exception, exception=exception,
fetch_date=datetime.datetime.now()).save() fetch_date=datetime.datetime.utcnow()).save()
old_fetch_histories = MPageFetchHistory.objects(feed_id=self.pk).order_by('-fetch_date')[5:] old_fetch_histories = MPageFetchHistory.objects(feed_id=self.pk).order_by('-fetch_date')[5:]
for history in old_fetch_histories: for history in old_fetch_histories:
history.delete() history.delete()
@ -195,7 +195,7 @@ class Feed(models.Model):
# self.save_feed_story_history_statistics(lock) # self.save_feed_story_history_statistics(lock)
def save_feed_stories_last_month(self, verbose=False, lock=None): def save_feed_stories_last_month(self, verbose=False, lock=None):
month_ago = datetime.datetime.now() - datetime.timedelta(days=30) month_ago = datetime.datetime.utcnow() - datetime.timedelta(days=30)
stories_last_month = MStory.objects(story_feed_id=self.pk, stories_last_month = MStory.objects(story_feed_id=self.pk,
story_date__gte=month_ago).count() story_date__gte=month_ago).count()
self.stories_last_month = stories_last_month self.stories_last_month = stories_last_month
@ -213,7 +213,7 @@ class Feed(models.Model):
Save format: [('YYYY-MM, #), ...] Save format: [('YYYY-MM, #), ...]
Example output: [(2010-12, 123), (2011-01, 146)] Example output: [(2010-12, 123), (2011-01, 146)]
""" """
now = datetime.datetime.now() now = datetime.datetime.utcnow()
min_year = now.year min_year = now.year
total = 0 total = 0
month_count = 0 month_count = 0
@ -609,7 +609,7 @@ class Feed(models.Model):
def set_next_scheduled_update(self, lock=None): def set_next_scheduled_update(self, lock=None):
total, random_factor = self.get_next_scheduled_update() total, random_factor = self.get_next_scheduled_update()
next_scheduled_update = datetime.datetime.now() + datetime.timedelta( next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta(
minutes = total + random_factor) minutes = total + random_factor)
self.next_scheduled_update = next_scheduled_update self.next_scheduled_update = next_scheduled_update
@ -617,7 +617,7 @@ class Feed(models.Model):
self.save(lock=lock) self.save(lock=lock)
def schedule_feed_fetch_immediately(self, lock=None): def schedule_feed_fetch_immediately(self, lock=None):
self.next_scheduled_update = datetime.datetime.now() self.next_scheduled_update = datetime.datetime.utcnow()
self.save(lock=lock) self.save(lock=lock)

View file

@ -41,7 +41,7 @@ def exception_retry(request):
reset_fetch = json.decode(request.POST['reset_fetch']) reset_fetch = json.decode(request.POST['reset_fetch'])
feed = get_object_or_404(Feed, pk=feed_id) feed = get_object_or_404(Feed, pk=feed_id)
feed.next_scheduled_update = datetime.datetime.now() feed.next_scheduled_update = datetime.datetime.utcnow()
feed.has_page_exception = False feed.has_page_exception = False
feed.has_feed_exception = False feed.has_feed_exception = False
if reset_fetch: if reset_fetch:
@ -71,14 +71,14 @@ def exception_change_feed_address(request):
feed.active = True feed.active = True
feed.fetched_once = False feed.fetched_once = False
feed.feed_address = feed_address feed.feed_address = feed_address
feed.next_scheduled_update = datetime.datetime.now() feed.next_scheduled_update = datetime.datetime.utcnow()
retry_feed = feed retry_feed = feed
try: try:
feed.save() feed.save()
except IntegrityError: except IntegrityError:
original_feed = Feed.objects.get(feed_address=feed_address) original_feed = Feed.objects.get(feed_address=feed_address)
retry_feed = original_feed retry_feed = original_feed
original_feed.next_scheduled_update = datetime.datetime.now() original_feed.next_scheduled_update = datetime.datetime.utcnow()
original_feed.has_feed_exception = False original_feed.has_feed_exception = False
original_feed.active = True original_feed.active = True
original_feed.save() original_feed.save()
@ -111,13 +111,13 @@ def exception_change_feed_link(request):
feed.fetched_once = False feed.fetched_once = False
feed.feed_link = feed_link feed.feed_link = feed_link
feed.feed_address = feed_address feed.feed_address = feed_address
feed.next_scheduled_update = datetime.datetime.now() feed.next_scheduled_update = datetime.datetime.utcnow()
try: try:
feed.save() feed.save()
except IntegrityError: except IntegrityError:
original_feed = Feed.objects.get(feed_address=feed_address) original_feed = Feed.objects.get(feed_address=feed_address)
retry_feed = original_feed retry_feed = original_feed
original_feed.next_scheduled_update = datetime.datetime.now() original_feed.next_scheduled_update = datetime.datetime.utcnow()
original_feed.has_page_exception = False original_feed.has_page_exception = False
original_feed.active = True original_feed.active = True
original_feed.save() original_feed.save()

View file

@ -111,7 +111,7 @@ class ProcessFeed:
# logging.debug(u' ---> [%d] Processing %s' % (self.feed.id, self.feed.feed_title)) # logging.debug(u' ---> [%d] Processing %s' % (self.feed.id, self.feed.feed_title))
self.feed.fetched_once = True self.feed.fetched_once = True
self.feed.last_update = datetime.datetime.now() self.feed.last_update = datetime.datetime.utcnow()
if hasattr(self.fpf, 'status'): if hasattr(self.fpf, 'status'):
if self.options['verbose']: if self.options['verbose']:
@ -181,7 +181,7 @@ class ProcessFeed:
self.feed.feed_title = self.fpf.feed.get('title', self.feed.feed_title) self.feed.feed_title = self.fpf.feed.get('title', self.feed.feed_title)
self.feed.feed_tagline = self.fpf.feed.get('tagline', self.feed.feed_tagline) self.feed.feed_tagline = self.fpf.feed.get('tagline', self.feed.feed_tagline)
self.feed.feed_link = self.fpf.feed.get('link', self.feed.feed_link) self.feed.feed_link = self.fpf.feed.get('link', self.feed.feed_link)
self.feed.last_update = datetime.datetime.now() self.feed.last_update = datetime.datetime.utcnow()
guids = [] guids = []
for entry in self.fpf.entries: for entry in self.fpf.entries:
@ -199,8 +199,8 @@ class ProcessFeed:
self.lock.release() self.lock.release()
# Compare new stories to existing stories, adding and updating # Compare new stories to existing stories, adding and updating
# start_date = datetime.datetime.now() # start_date = datetime.datetime.utcnow()
# end_date = datetime.datetime.now() # end_date = datetime.datetime.utcnow()
story_guids = [] story_guids = []
for entry in self.fpf.entries: for entry in self.fpf.entries:
story = pre_process_story(entry) story = pre_process_story(entry)
@ -254,7 +254,7 @@ class Dispatcher:
FEED_ERREXC:'exception'} FEED_ERREXC:'exception'}
self.feed_keys = sorted(self.feed_trans.keys()) self.feed_keys = sorted(self.feed_trans.keys())
self.num_threads = num_threads self.num_threads = num_threads
self.time_start = datetime.datetime.now() self.time_start = datetime.datetime.utcnow()
self.workers = [] self.workers = []
def refresh_feed(self, feed_id): def refresh_feed(self, feed_id):
@ -282,7 +282,7 @@ class Dispatcher:
ENTRY_SAME: 0, ENTRY_SAME: 0,
ENTRY_ERR: 0 ENTRY_ERR: 0
} }
start_time = datetime.datetime.now() start_time = datetime.datetime.utcnow()
feed = self.refresh_feed(feed_id) feed = self.refresh_feed(feed_id)
@ -341,7 +341,7 @@ class Dispatcher:
page_importer.fetch_page() page_importer.fetch_page()
feed = self.refresh_feed(feed_id) feed = self.refresh_feed(feed_id)
delta = datetime.datetime.now() - start_time delta = datetime.datetime.utcnow() - start_time
feed.last_load_time = max(1, delta.seconds) feed.last_load_time = max(1, delta.seconds)
feed.fetched_once = True feed.fetched_once = True
@ -359,7 +359,7 @@ class Dispatcher:
for key, val in ret_entries.items(): for key, val in ret_entries.items():
self.entry_stats[key] += val self.entry_stats[key] += val
time_taken = datetime.datetime.now() - self.time_start time_taken = datetime.datetime.utcnow() - self.time_start
history = FeedUpdateHistory( history = FeedUpdateHistory(
number_of_feeds=len(feed_queue), number_of_feeds=len(feed_queue),
seconds_taken=time_taken.seconds seconds_taken=time_taken.seconds

View file

@ -112,7 +112,7 @@ def _do_timesince(d, chunks, now=None):
d = datetime.datetime(d.year, d.month, d.day) d = datetime.datetime(d.year, d.month, d.day)
if not now: if not now:
now = datetime.datetime.now() now = datetime.datetime.utcnow()
# ignore microsecond part of 'd' since we removed it from 'now' # ignore microsecond part of 'd' since we removed it from 'now'
delta = now - (d - datetime.timedelta(0, 0, d.microsecond)) delta = now - (d - datetime.timedelta(0, 0, d.microsecond))
@ -143,7 +143,7 @@ def relative_timeuntil(value):
(60, lambda n: ungettext('minute', 'minutes', n)) (60, lambda n: ungettext('minute', 'minutes', n))
) )
now = datetime.datetime.now() now = datetime.datetime.utcnow()
return _do_timesince(now, chunks, value) return _do_timesince(now, chunks, value)
@ -151,7 +151,7 @@ def format_relative_date(date, future=False):
if not date or date < datetime.datetime(2010, 1, 1): if not date or date < datetime.datetime(2010, 1, 1):
return "Soon" return "Soon"
now = datetime.datetime.now() now = datetime.datetime.utcnow()
diff = abs(now - date) diff = abs(now - date)
if diff < datetime.timedelta(minutes=60): if diff < datetime.timedelta(minutes=60):
minutes = diff.seconds / 60 minutes = diff.seconds / 60

View file

@ -15,7 +15,7 @@ graph_config = {
'page_success.label': 'Page Success', 'page_success.label': 'Page Success',
} }
last_day = datetime.datetime.now() - datetime.timedelta(days=1) last_day = datetime.datetime.utcnow() - datetime.timedelta(days=1)
metrics = { metrics = {
'feed_errors': MFeedFetchHistory.objects(fetch_date__gte=last_day, status_code__nin=[200, 304]).count(), 'feed_errors': MFeedFetchHistory.objects(fetch_date__gte=last_day, status_code__nin=[200, 304]).count(),

View file

@ -16,7 +16,7 @@ graph_config = {
'feeds_loaded_hour.label': 'Feeds Loaded (Hour)', 'feeds_loaded_hour.label': 'Feeds Loaded (Hour)',
} }
hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=60) hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60)
averages = dict(avg=Avg('loadtime'), max=Max('loadtime'), min=Min('loadtime'), count=Count('loadtime')) averages = dict(avg=Avg('loadtime'), max=Max('loadtime'), min=Min('loadtime'), count=Count('loadtime'))
hour = FeedLoadtime.objects.filter(date_accessed__gte=hour_ago).aggregate(**averages) hour = FeedLoadtime.objects.filter(date_accessed__gte=hour_ago).aggregate(**averages)

View file

@ -11,7 +11,7 @@ graph_config = {
'feeds_fetched.label': 'Fetched feeds last hour', 'feeds_fetched.label': 'Fetched feeds last hour',
} }
hour_ago = datetime.datetime.now() - datetime.timedelta(hours=1) hour_ago = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
metrics = { metrics = {
'update_queue': Feed.objects.filter(queued_date__gte=hour_ago).count(), 'update_queue': Feed.objects.filter(queued_date__gte=hour_ago).count(),

View file

@ -14,8 +14,8 @@ graph_config = {
'daily.label': 'daily', 'daily.label': 'daily',
} }
last_month = datetime.datetime.now() - datetime.timedelta(days=30) last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30)
last_day = datetime.datetime.now() - datetime.timedelta(minutes=60*24) last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60*24)
metrics = { metrics = {
'all': User.objects.count(), 'all': User.objects.count(),

View file

@ -25,7 +25,7 @@ def format_story_link_date__long(date):
def _extract_date_tuples(date): def _extract_date_tuples(date):
parsed_date = DateFormat(date) parsed_date = DateFormat(date)
date_tuple = datetime.datetime.timetuple(date)[:3] date_tuple = datetime.datetime.timetuple(date)[:3]
today_tuple = datetime.datetime.timetuple(datetime.datetime.now())[:3] today_tuple = datetime.datetime.timetuple(datetime.datetime.utcnow())[:3]
today = datetime.datetime.today() today = datetime.datetime.today()
yesterday_tuple = datetime.datetime.timetuple(today - datetime.timedelta(1))[:3] yesterday_tuple = datetime.datetime.timetuple(today - datetime.timedelta(1))[:3]