Fixing infinite mongoengine/mongodb connections by reusing the correct mongo connection (instead of creating new ones every time).

This commit is contained in:
Samuel Clay 2010-10-11 13:19:42 -04:00
parent 50ca857c3e
commit 3a51d79657
2 changed files with 7 additions and 14 deletions

View file

@ -1,4 +1,3 @@
import settings
import difflib
import datetime
import hashlib
@ -14,6 +13,7 @@ from nltk.collocations import TrigramCollocationFinder, BigramCollocationFinder,
from django.db import models
from django.db import IntegrityError
from django.core.cache import cache
from django.conf import settings
from mongoengine.queryset import OperationError
from utils import json
from utils import feedfinder
@ -295,7 +295,7 @@ class Feed(models.Model):
disp.add_jobs([[self.pk]])
disp.run_jobs()
def add_update_stories(self, stories, existing_stories, db):
def add_update_stories(self, stories, existing_stories):
ret_values = {
ENTRY_NEW:0,
ENTRY_UPDATED:0,
@ -367,7 +367,7 @@ class Feed(models.Model):
existing_story['story_guid'] = story.get('guid') or story.get('id') or story.get('link')
existing_story['story_tags'] = story_tags
try:
db.stories.update({'_id': existing_story['_id']}, existing_story)
settings.MONGODB.stories.update({'_id': existing_story['_id']}, existing_story)
ret_values[ENTRY_UPDATED] += 1
cache.set('updated_feed:%s' % self.id, 1)
except (IntegrityError, OperationError):

View file

@ -17,7 +17,6 @@ import multiprocessing
import urllib2
import xml.sax
import socket
import mongoengine
# Refresh feed code adapted from Feedjack.
# http://feedjack.googlecode.com
@ -81,12 +80,11 @@ class FetchFeed:
return identity
class ProcessFeed:
def __init__(self, feed_id, fpf, db, options):
def __init__(self, feed_id, fpf, options):
self.feed_id = feed_id
self.options = options
self.fpf = fpf
self.lock = multiprocessing.Lock()
self.db = db
self.entry_trans = {
ENTRY_NEW:'new',
ENTRY_UPDATED:'updated',
@ -209,7 +207,7 @@ class ProcessFeed:
# if story.get('published') > end_date:
# end_date = story.get('published')
story_guids.append(story.get('guid') or story.get('link'))
existing_stories = self.db.stories.find({
existing_stories = settings.MONGODB.stories.find({
'story_feed_id': self.feed.pk,
# 'story_date': {'$gte': start_date},
'story_guid': {'$in': story_guids}
@ -219,7 +217,7 @@ class ProcessFeed:
# | (Q(story_guid__in=story_guids)),
# story_feed=self.feed
# ).order_by('-story_date')
ret_values = self.feed.add_update_stories(self.fpf.entries, existing_stories, self.db)
ret_values = self.feed.add_update_stories(self.fpf.entries, existing_stories)
logging.debug(u' ---> [%-30s] Parsed Feed: %s' % (
unicode(self.feed)[:30],
@ -266,12 +264,7 @@ class Dispatcher:
"""
delta = None
MONGO_DB = settings.MONGO_DB
db = mongoengine.connection.connect(db=MONGO_DB['NAME'], host=MONGO_DB['HOST'], port=MONGO_DB['PORT'])
current_process = multiprocessing.current_process()
identity = "X"
if current_process._identity:
identity = current_process._identity[0]
@ -291,7 +284,7 @@ class Dispatcher:
ret_feed, fetched_feed = ffeed.fetch()
if ((fetched_feed and ret_feed == FEED_OK) or self.options['force']):
pfeed = ProcessFeed(feed_id, fetched_feed, db, self.options)
pfeed = ProcessFeed(feed_id, fetched_feed, self.options)
ret_feed, ret_entries = pfeed.process()
feed = self.refresh_feed(feed_id)