mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-08-05 16:58:59 +00:00
Preserving order on searches. Also using elasticsearch's paging.
This commit is contained in:
parent
8508bdc49e
commit
5c80b15e89
3 changed files with 14 additions and 26 deletions
|
@ -530,7 +530,7 @@ def load_single_feed(request, feed_id):
|
|||
if user.profile.is_premium:
|
||||
user_search = MUserSearch.get_user(user.pk)
|
||||
user_search.touch_search_date()
|
||||
stories = feed.find_stories(query, offset=offset, limit=limit)
|
||||
stories = feed.find_stories(query, order=order, offset=offset, limit=limit)
|
||||
else:
|
||||
stories = []
|
||||
message = "You must be a premium subscriber to search."
|
||||
|
@ -925,7 +925,7 @@ def load_river_stories__redis(request):
|
|||
usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids,
|
||||
read_filter='all')
|
||||
feed_ids = [sub.feed_id for sub in usersubs]
|
||||
stories = Feed.find_feed_stories(feed_ids, query, offset=offset, limit=limit)
|
||||
stories = Feed.find_feed_stories(feed_ids, query, order=order, offset=offset, limit=limit)
|
||||
mstories = stories
|
||||
unread_feed_story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids,
|
||||
read_filter="unread", order=order,
|
||||
|
|
|
@ -1193,35 +1193,22 @@ class Feed(models.Model):
|
|||
return stories
|
||||
|
||||
@classmethod
|
||||
def find_feed_stories(cls, feed_ids, query, offset=0, limit=25):
|
||||
story_ids = SearchStory.query(feed_ids=feed_ids, query=query)
|
||||
def find_feed_stories(cls, feed_ids, query, order="newest", offset=0, limit=25):
|
||||
story_ids = SearchStory.query(feed_ids=feed_ids, query=query, order=order,
|
||||
offset=offset, limit=limit)
|
||||
stories_db = MStory.objects(
|
||||
story_hash__in=story_ids
|
||||
).order_by('-story_date')[offset:offset+limit]
|
||||
|
||||
# stories_db = MStory.objects(
|
||||
# Q(story_feed_id__in=feed_ids) &
|
||||
# (Q(story_title__icontains=query) |
|
||||
# Q(story_author_name__icontains=query) |
|
||||
# Q(story_tags__icontains=query))
|
||||
# ).order_by('-story_date')[offset:offset+limit]
|
||||
).order_by('-story_date' if order == "newest" else 'story_date')[offset:offset+limit]
|
||||
stories = cls.format_stories(stories_db)
|
||||
|
||||
return stories
|
||||
|
||||
def find_stories(self, query, offset=0, limit=25):
|
||||
story_ids = SearchStory.query(feed_ids=[self.pk], query=query)
|
||||
def find_stories(self, query, order="newest", offset=0, limit=25):
|
||||
story_ids = SearchStory.query(feed_ids=[self.pk], query=query, order=order,
|
||||
offset=offset, limit=limit)
|
||||
stories_db = MStory.objects(
|
||||
story_hash__in=story_ids
|
||||
).order_by('-story_date')[offset:offset+limit]
|
||||
|
||||
# stories_db = MStory.objects(
|
||||
# Q(story_feed_id=self.pk) &
|
||||
# (Q(story_title__icontains=query) |
|
||||
# Q(story_author_name__icontains=query) |
|
||||
# Q(story_tags__icontains=query))
|
||||
# ).order_by('-story_date')[offset:offset+limit]
|
||||
|
||||
).order_by('-story_date' if order == "newest" else 'story_date')[offset:offset+limit]
|
||||
stories = self.format_stories(stories_db, self.pk)
|
||||
|
||||
return stories
|
||||
|
|
|
@ -156,14 +156,15 @@ class SearchStory:
|
|||
cls.ES.delete("%s-index" % cls.name, "%s-type" % cls.name, story_hash)
|
||||
|
||||
@classmethod
|
||||
def query(cls, feed_ids, query):
|
||||
def query(cls, feed_ids, query, order, offset, limit):
|
||||
cls.ES.indices.refresh()
|
||||
|
||||
|
||||
sort = "date:desc" if order == "newest" else "date:asc"
|
||||
string_q = pyes.query.StringQuery(query, default_operator="AND")
|
||||
feed_q = pyes.query.TermsQuery('feed_id', feed_ids)
|
||||
q = pyes.query.BoolQuery(must=[string_q, feed_q])
|
||||
results = cls.ES.search(q, indices=cls.index_name(), doc_types=[cls.type_name()],
|
||||
partial_fields={})
|
||||
partial_fields={}, sort=sort, start=offset, size=offset+limit)
|
||||
logging.info("~FGSearch ~FCstories~FG for: ~SB%s (across %s feed%s)" %
|
||||
(query, len(feed_ids), 's' if len(feed_ids) != 1 else ''))
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue