Automatic syncing of new unreads in sync service.

This commit is contained in:
dosiecki 2014-06-05 04:04:14 -07:00
parent 3b5360f2ec
commit de1b15404e
6 changed files with 133 additions and 3 deletions

View file

@ -4,9 +4,20 @@ import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.text.TextUtils;
import com.newsblur.domain.Classifier;
import com.newsblur.domain.Comment;
import com.newsblur.domain.Feed;
import com.newsblur.domain.FeedResult;
import com.newsblur.domain.Reply;
import com.newsblur.domain.SocialFeed;
import com.newsblur.domain.Story;
import com.newsblur.domain.UserProfile;
import com.newsblur.network.domain.StoriesResponse;
import com.newsblur.util.AppConstants;
import java.util.ArrayList;
import java.util.List;
/**
@ -48,6 +59,7 @@ public class BlurDatabaseHelper {
}
private void bulkInsertValues(String table, List<ContentValues> valuesList) {
if (valuesList.size() < 1) return;
dbRW.beginTransaction();
try {
for(ContentValues values: valuesList) {
@ -76,4 +88,63 @@ public class BlurDatabaseHelper {
dbRW.delete(DatabaseConstants.STARRED_STORY_COUNT_TABLE, null, null);
dbRW.insert(DatabaseConstants.STARRED_STORY_COUNT_TABLE, null, values);
}
public List<String> getStoryHashesForFeed(String feedId) {
String q = "SELECT " + DatabaseConstants.STORY_HASH +
" FROM " + DatabaseConstants.STORY_TABLE +
" WHERE " + DatabaseConstants.STORY_FEED_ID + " = ?";
Cursor c = dbRO.rawQuery(q, new String[]{feedId});
List<String> hashes = new ArrayList<String>(c.getCount());
while (c.moveToNext()) {
hashes.add(c.getString(c.getColumnIndexOrThrow(DatabaseConstants.STORY_HASH)));
}
c.close();
return hashes;
}
public void insertStories(StoriesResponse apiResponse) {
// handle users
List<ContentValues> userValues = new ArrayList<ContentValues>(apiResponse.users.length);
for (UserProfile user : apiResponse.users) {
userValues.add(user.getValues());
}
bulkInsertValues(DatabaseConstants.USER_TABLE, userValues);
// TODO: StoriesResponse can only handle classifiers from /reader/feed, not /reader/river_stories,
// so we can't yet make a generic digester
// handle story content
List<ContentValues> storyValues = new ArrayList<ContentValues>(apiResponse.stories.length);
for (Story story : apiResponse.stories) {
storyValues.add(story.getValues());
}
bulkInsertValues(DatabaseConstants.STORY_TABLE, storyValues);
// handle comments
List<ContentValues> commentValues = new ArrayList<ContentValues>();
List<ContentValues> replyValues = new ArrayList<ContentValues>();
for (Story story : apiResponse.stories) {
for (Comment comment : story.publicComments) {
comment.storyId = story.id;
comment.id = TextUtils.concat(story.id, story.feedId, comment.userId).toString();
commentValues.add(comment.getValues());
for (Reply reply : comment.replies) {
reply.commentId = comment.id;
replyValues.add(reply.getValues());
}
}
for (Comment comment : story.friendsComments) {
comment.storyId = story.id;
comment.id = TextUtils.concat(story.id, story.feedId, comment.userId).toString();
commentValues.add(comment.getValues());
for (Reply reply : comment.replies) {
reply.commentId = comment.id;
replyValues.add(reply.getValues());
}
}
}
bulkInsertValues(DatabaseConstants.COMMENT_TABLE, commentValues);
bulkInsertValues(DatabaseConstants.REPLY_TABLE, replyValues);
}
}

View file

@ -46,6 +46,7 @@ public class APIConstants {
public static final String URL_UNREAD_HASHES = NEWSBLUR_URL + "/reader/unread_story_hashes";
public static final String PARAMETER_FEEDS = "f";
public static final String PARAMETER_H = "h";
public static final String PARAMETER_PASSWORD = "password";
public static final String PARAMETER_USER_ID = "user_id";
public static final String PARAMETER_USERNAME = "username";

View file

@ -251,6 +251,15 @@ public class APIManager {
return storiesResponse;
}
public StoriesResponse getStoriesByHash(List<String> storyHashes) {
ValueMultimap values = new ValueMultimap();
for (String hash : storyHashes) {
values.put(APIConstants.PARAMETER_H, hash);
}
APIResponse response = get(APIConstants.URL_RIVER_STORIES, values);
return (StoriesResponse) response.getResponse(gson, StoriesResponse.class);
}
public StoriesResponse getStoriesForFeeds(String[] feedIds, int pageNumber, StoryOrder order, ReadFilter filter) {
final ValueMultimap values = new ValueMultimap();
for (String feedId : feedIds) {

View file

@ -26,10 +26,14 @@ public class BootReceiver extends BroadcastReceiver {
public static void scheduleSyncService(Context context) {
Log.d(BootReceiver.class.getName(), "scheduling sync service");
// wake up to check if a sync is needed about twice as often as one actually is, to ensure
// we never fall more than about half a cycle behind.
long interval = (AppConstants.AUTO_SYNC_TIME_MILLIS / 2L);
AlarmManager alarmManager = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
Intent i = new Intent(context, ServiceScheduleReceiver.class);
PendingIntent pi = PendingIntent.getBroadcast(context, 0, i, PendingIntent.FLAG_CANCEL_CURRENT);
alarmManager.setInexactRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, AppConstants.AUTO_SYNC_TIME_MILLIS, AppConstants.AUTO_SYNC_TIME_MILLIS, pi);
alarmManager.setInexactRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, interval, interval, pi);
}
}

View file

@ -15,12 +15,16 @@ import com.newsblur.database.DatabaseConstants;
import com.newsblur.domain.SocialFeed;
import com.newsblur.network.APIManager;
import com.newsblur.network.domain.FeedFolderResponse;
import com.newsblur.network.domain.StoriesResponse;
import com.newsblur.network.domain.UnreadStoryHashesResponse;
import com.newsblur.util.AppConstants;
import com.newsblur.util.PrefsUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
/**
* A background service to handle synchronisation with the NB servers.
@ -45,6 +49,8 @@ public class NBSyncService extends Service {
private APIManager apiManager;
private BlurDatabaseHelper dbHelper;
private Set<String> storyHashQueue;
@Override
public void onCreate() {
super.onCreate();
@ -52,6 +58,7 @@ public class NBSyncService extends Service {
apiManager = new APIManager(this);
PrefsUtils.checkForUpgrade(this);
dbHelper = new BlurDatabaseHelper(this);
storyHashQueue = new HashSet<String>();
}
/**
@ -108,6 +115,10 @@ public class NBSyncService extends Service {
SyncRunning = false;
NbActivity.updateAllActivities();
syncUnreads();
NbActivity.updateAllActivities();
} catch (Exception e) {
Log.e(this.getClass().getName(), "Sync error.", e);
} finally {
@ -195,9 +206,40 @@ public class NBSyncService extends Service {
// ignore unreads from orphaned feeds
if( debugFeedIds.contains(feedId)) {
Log.d(this.getClass().getName(), "feeds " + feedId + " has unreads: " + entry.getValue().length);
// only fetch the reported unreads if we don't already have them
List<String> existingHashes = dbHelper.getStoryHashesForFeed(feedId);
Log.d(this.getClass().getName(), "feeds " + feedId + " originally had stories: " + existingHashes.size());
for (String newHash : entry.getValue()) {
if (!existingHashes.contains(newHash)) {
Log.d(this.getClass().getName(), "found new unread hash: " + newHash);
storyHashQueue.add(newHash);
}
}
}
}
}
/**
* The second step of syncing: fetch new unread stories.
*/
private void syncUnreads() {
unreadsyncloop: while (storyHashQueue.size() > 0) {
List<String> hashBatch = new ArrayList(AppConstants.UNREAD_FETCH_BATCH_SIZE);
batchloop: for (String hash : storyHashQueue) {
hashBatch.add(hash);
if (hashBatch.size() >= AppConstants.UNREAD_FETCH_BATCH_SIZE) break batchloop;
}
for (String hash : hashBatch) {
storyHashQueue.remove(hash);
}
Log.d(this.getClass().getName(), "fetching batch of unreads with size: " + hashBatch.size());
StoriesResponse response = apiManager.getStoriesByHash(hashBatch);
if (response.isError()) {
Log.e(this.getClass().getName(), "error fetching unreads batch, abandoning sync.");
break unreadsyncloop;
}
dbHelper.insertStories(response);
}
}
public static boolean isSyncRunning() {

View file

@ -30,7 +30,7 @@ public class AppConstants {
public static final String LAST_SYNC_TIME = "LAST_SYNC_TIME";
// how long to wait before auto-syncing the feed/folder list
public static final long AUTO_SYNC_TIME_MILLIS = 5L * 60L * 1000L;
public static final long AUTO_SYNC_TIME_MILLIS = 10L * 60L * 1000L;
// how many total attemtps to make at a single API call
public static final int MAX_API_TRIES = 3;
@ -46,5 +46,8 @@ public class AppConstants {
public static final int READING_STORY_PRELOAD = 5;
// max stories to keep in the DB unless actively being read (i.e. for offline reading)
public static final int MAX_STORIES_STORED = 100;
public static final int MAX_STORIES_STORED = 1000;
// how many unread stories to fetch via hash at a time
public static final int UNREAD_FETCH_BATCH_SIZE = 5;
}