Fix DB optimization crashes, remove O(n) search tagging, instrument live memory.

This commit is contained in:
dosiecki 2016-01-29 03:13:05 -08:00
parent 73662bc912
commit 575e2d50f9
9 changed files with 53 additions and 58 deletions

View file

@ -206,11 +206,10 @@ public abstract class ItemsList extends NbActivity implements StoryOrderChangedL
if (overlayStatusText != null) {
String syncStatus = NBSyncService.getSyncStatusMessage(this, true);
if (AppConstants.VERBOSE_LOG) {
if (syncStatus == null) syncStatus = "";
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
}
if (syncStatus != null) {
if (AppConstants.VERBOSE_LOG) {
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
}
overlayStatusText.setText(syncStatus);
overlayStatusText.setVisibility(View.VISIBLE);
} else {

View file

@ -169,11 +169,10 @@ public class Main extends NbActivity implements StateChangedListener, SwipeRefre
if (overlayStatusText != null) {
String syncStatus = NBSyncService.getSyncStatusMessage(this, false);
if (AppConstants.VERBOSE_LOG) {
if (syncStatus == null) syncStatus = "";
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
}
if (syncStatus != null) {
if (AppConstants.VERBOSE_LOG) {
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
}
overlayStatusText.setText(syncStatus);
overlayStatusText.setVisibility(View.VISIBLE);
} else {

View file

@ -395,6 +395,9 @@ public abstract class Reading extends NbActivity implements OnPageChangeListener
if (overlayStatusText != null) {
String syncStatus = NBSyncService.getSyncStatusMessage(this, true);
if (syncStatus != null) {
if (AppConstants.VERBOSE_LOG) {
syncStatus = syncStatus + UIUtils.getMemoryUsageDebug(this);
}
overlayStatusText.setText(syncStatus);
overlayStatusText.setVisibility(View.VISIBLE);
} else {

View file

@ -266,7 +266,10 @@ public class BlurDatabaseHelper {
StateFilter intelState = PrefsUtils.getStateFilter(context);
long startTime = System.currentTimeMillis();
synchronized (RW_MUTEX) {
dbRW.beginTransactionNonExclusive();
// do not attempt to use beginTransactionNonExclusive() to reduce lock time for this very heavy set
// of calls. most versions of Android incorrectly implement the underlying SQLite calls and will
// result in crashes that poison the DB beyond repair
dbRW.beginTransaction();
try {
// to insert classifiers, we need to determine the feed ID of the stories in this
@ -330,7 +333,7 @@ public class BlurDatabaseHelper {
for (ContentValues values : classifierValues) {
values.put(DatabaseConstants.CLASSIFIER_ID, classifierFeedId);
}
synchronized (RW_MUTEX) {dbRW.delete(DatabaseConstants.CLASSIFIER_TABLE, DatabaseConstants.CLASSIFIER_ID + " = ?", new String[] { classifierFeedId });}
dbRW.delete(DatabaseConstants.CLASSIFIER_TABLE, DatabaseConstants.CLASSIFIER_ID + " = ?", new String[] { classifierFeedId });
bulkInsertValuesExtSync(DatabaseConstants.CLASSIFIER_TABLE, classifierValues);
}
}
@ -656,10 +659,10 @@ public class BlurDatabaseHelper {
*/
public int getLocalUnreadCount(FeedSet fs, StateFilter stateFilter) {
StringBuilder sel = new StringBuilder();
String[] selArgs = null;
selArgs = getLocalStorySelectionAndArgs(sel, fs, stateFilter, ReadFilter.UNREAD);
ArrayList<String> selArgs = new ArrayList<String>();
getLocalStorySelectionAndArgs(sel, selArgs, fs, stateFilter, ReadFilter.UNREAD);
Cursor c = dbRO.rawQuery(sel.toString(), selArgs);
Cursor c = dbRO.rawQuery(sel.toString(), selArgs.toArray(new String[selArgs.size()]));
int count = c.getCount();
c.close();
return count;
@ -783,15 +786,6 @@ public class BlurDatabaseHelper {
synchronized (RW_MUTEX) {dbRW.insertOrThrow(DatabaseConstants.STORY_TEXT_TABLE, null, values);}
}
/**
* Clears the search_hit flag for all stories.
*/
public void clearReadingSession() {
ContentValues values = new ContentValues();
values.put(DatabaseConstants.STORY_SEARCHIT, false);
synchronized (RW_MUTEX) {dbRW.update(DatabaseConstants.STORY_TABLE, values, null, null);}
}
public Loader<Cursor> getSocialFeedsLoader(final StateFilter stateFilter) {
return new QueryCursorLoader(context) {
protected Cursor createCursor() {return getSocialFeedsCursor(stateFilter, cancellationSignal);}
@ -901,59 +895,57 @@ public class BlurDatabaseHelper {
// a selection filter that will be used to pull active story hashes from the stories table into the reading session table
StringBuilder sel = new StringBuilder();
// any selection args that need to be used within the inner select statement
String[] selArgs = null;
ArrayList<String> selArgs = new ArrayList<String>();
selArgs = getLocalStorySelectionAndArgs(sel, fs, stateFilter, readFilter);
getLocalStorySelectionAndArgs(sel, selArgs, fs, stateFilter, readFilter);
// use the inner select statement to push the active hashes into the session table
StringBuilder q = new StringBuilder("INSERT INTO " + DatabaseConstants.READING_SESSION_TABLE);
q.append(" (" + DatabaseConstants.READING_SESSION_STORY_HASH + ") ");
q.append(sel);
Log.d(this.getClass().getName(), String.format("DB rawQuery: '%s' with args: %s", q.toString(), java.util.Arrays.toString(selArgs)));
dbRW.execSQL(q.toString(), selArgs);
dbRW.execSQL(q.toString(), selArgs.toArray(new String[selArgs.size()]));
}
/**
* Gets hashes of already-fetched stories that satisfy the given FeedSet and filters. Can be used
* both to populate a reading session or to count local unreads.
*/
private String[] getLocalStorySelectionAndArgs(StringBuilder sel, FeedSet fs, StateFilter stateFilter, ReadFilter readFilter) {
String[] selArgs = new String[]{};
private void getLocalStorySelectionAndArgs(StringBuilder sel, List<String> selArgs, FeedSet fs, StateFilter stateFilter, ReadFilter readFilter) {
sel.append("SELECT " + DatabaseConstants.STORY_HASH);
if (fs.getSingleFeed() != null) {
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
sel.append(" WHERE " + DatabaseConstants.STORY_FEED_ID + " = ?");
DatabaseConstants.appendStorySelection(sel, readFilter, stateFilter, (fs.getSearchQuery() != null));
selArgs = new String[]{fs.getSingleFeed()};
selArgs.add(fs.getSingleFeed());
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
} else if (fs.getMultipleFeeds() != null) {
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
sel.append(" WHERE " + DatabaseConstants.STORY_TABLE + "." + DatabaseConstants.STORY_FEED_ID + " IN ( ");
sel.append(TextUtils.join(",", fs.getMultipleFeeds()) + ")");
DatabaseConstants.appendStorySelection(sel, readFilter, stateFilter, (fs.getSearchQuery() != null));
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
} else if (fs.getSingleSocialFeed() != null) {
sel.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
sel.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
sel.append(" WHERE " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE + "." + DatabaseConstants.SOCIALFEED_STORY_USER_ID + " = ? ");
DatabaseConstants.appendStorySelection(sel, readFilter, stateFilter, (fs.getSearchQuery() != null));
selArgs = new String[]{fs.getSingleSocialFeed().getKey()};
selArgs.add(fs.getSingleSocialFeed().getKey());
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
} else if (fs.isAllNormal()) {
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
sel.append(" WHERE 1");
DatabaseConstants.appendStorySelection(sel, readFilter, stateFilter, (fs.getSearchQuery() != null));
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
} else if (fs.isAllSocial()) {
sel.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
sel.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
DatabaseConstants.appendStorySelection(sel, readFilter, stateFilter, (fs.getSearchQuery() != null));
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
} else if (fs.isAllRead()) {
@ -964,20 +956,17 @@ public class BlurDatabaseHelper {
sel.append(" FROM " + DatabaseConstants.STORY_TABLE);
sel.append(" WHERE (" + DatabaseConstants.STORY_STARRED + " = 1)");
if (fs.getSearchQuery() != null) {
sel.append(" AND (" + DatabaseConstants.STORY_TABLE + "." + DatabaseConstants.STORY_SEARCHIT + " = 1)");
}
DatabaseConstants.appendStorySelection(sel, selArgs, ReadFilter.ALL, StateFilter.ALL, fs.getSearchQuery());
} else if (fs.isGlobalShared()) {
sel.append(" FROM " + DatabaseConstants.SOCIALFEED_STORY_MAP_TABLE);
sel.append(DatabaseConstants.JOIN_STORIES_ON_SOCIALFEED_MAP);
DatabaseConstants.appendStorySelection(sel, readFilter, stateFilter, (fs.getSearchQuery() != null));
DatabaseConstants.appendStorySelection(sel, selArgs, readFilter, stateFilter, fs.getSearchQuery());
} else {
throw new IllegalStateException("Asked to get stories for FeedSet of unknown type.");
}
return selArgs;
}
public void clearClassifiersForFeed(String feedId) {

View file

@ -96,7 +96,7 @@ public class DatabaseConstants {
public static final String STORY_HASH = "story_hash";
public static final String STORY_IMAGE_URLS = "image_urls";
public static final String STORY_LAST_READ_DATE = "last_read_date";
public static final String STORY_SEARCHIT = "search_hit";
public static final String STORY_SEARCH_HIT = "search_hit";
public static final String READING_SESSION_TABLE = "reading_session";
public static final String READING_SESSION_STORY_HASH = "session_story_hash";
@ -237,7 +237,7 @@ public class DatabaseConstants {
STORY_TITLE + TEXT + ", " +
STORY_IMAGE_URLS + TEXT + ", " +
STORY_LAST_READ_DATE + INTEGER + ", " +
STORY_SEARCHIT + INTEGER + " DEFAULT 0" +
STORY_SEARCH_HIT + TEXT +
")";
static final String READING_SESSION_SQL = "CREATE TABLE " + READING_SESSION_TABLE + " (" +
@ -303,7 +303,7 @@ public class DatabaseConstants {
STORY_INTELLIGENCE_AUTHORS, STORY_INTELLIGENCE_FEED, STORY_INTELLIGENCE_TAGS, STORY_INTELLIGENCE_TOTAL,
STORY_INTELLIGENCE_TITLE, STORY_PERMALINK, STORY_READ, STORY_STARRED, STORY_STARRED_DATE, STORY_TAGS, STORY_TITLE,
STORY_SOCIAL_USER_ID, STORY_SOURCE_USER_ID, STORY_SHARED_USER_IDS, STORY_FRIEND_USER_IDS, STORY_HASH,
STORY_LAST_READ_DATE, STORY_SEARCHIT,
STORY_LAST_READ_DATE,
};
private static final String STORY_COLUMNS =
@ -330,7 +330,7 @@ public class DatabaseConstants {
* Appends to the given story query any and all selection statements that are required to satisfy the specified
* filtration parameters.
*/
public static void appendStorySelection(StringBuilder q, ReadFilter readFilter, StateFilter stateFilter, boolean requireQueryHit) {
public static void appendStorySelection(StringBuilder q, List<String> selArgs, ReadFilter readFilter, StateFilter stateFilter, String requireQueryHit) {
if (readFilter == ReadFilter.UNREAD) {
q.append(" AND (" + STORY_READ + " = 0)");
}
@ -340,8 +340,9 @@ public class DatabaseConstants {
q.append(" AND " + stateSelection);
}
if (requireQueryHit) {
q.append(" AND (" + STORY_TABLE + "." + STORY_SEARCHIT + " = 1)");
if (requireQueryHit != null) {
q.append(" AND (" + STORY_TABLE + "." + STORY_SEARCH_HIT + " = ?)");
selArgs.add(requireQueryHit);
}
}

View file

@ -89,7 +89,8 @@ public class Story implements Serializable {
// not yet vended by the API, but tracked locally and fudged (see SyncService) for remote stories
public long lastReadTimestamp = 0L;
public boolean isSearchHit = false;
// non-API and only set once when story is pushed to DB so it can be selected upon
public String searchHit = "";
public ContentValues getValues() {
final ContentValues values = new ContentValues();
@ -118,7 +119,7 @@ public class Story implements Serializable {
values.put(DatabaseConstants.STORY_HASH, storyHash);
values.put(DatabaseConstants.STORY_IMAGE_URLS, TextUtils.join(",", imageUrls));
values.put(DatabaseConstants.STORY_LAST_READ_DATE, lastReadTimestamp);
values.put(DatabaseConstants.STORY_SEARCHIT, isSearchHit);
values.put(DatabaseConstants.STORY_SEARCH_HIT, searchHit);
return values;
}
@ -149,7 +150,6 @@ public class Story implements Serializable {
story.id = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_ID));
story.storyHash = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_HASH));
story.lastReadTimestamp = cursor.getLong(cursor.getColumnIndex(DatabaseConstants.STORY_LAST_READ_DATE));
story.isSearchHit = cursor.getInt(cursor.getColumnIndex(DatabaseConstants.STORY_SEARCHIT)) > 0;
return story;
}

View file

@ -695,7 +695,7 @@ public class NBSyncService extends Service {
// If this set of stories was found in response to the active search query, note
// them as such in the DB so the UI can filter for them
for (Story story : apiResponse.stories) {
story.isSearchHit = true;
story.searchHit = fs.getSearchQuery();
}
}

View file

@ -50,24 +50,26 @@ public class UnreadsService extends SubService {
// values are the actual story hash, which will be extracted once we have processed
// all hashes.
NavigableMap<String,String> sortingMap = new TreeMap<String,String>();
Log.d(this.getClass().getName(), "getting unreads from API");
UnreadStoryHashesResponse unreadHashes = parent.apiManager.getUnreadStoryHashes();
// note all the stories we thought were unread before. if any fail to appear in
// the API request for unreads, we will mark them as read
List<String> oldUnreadHashes = parent.dbHelper.getUnreadStoryHashes();
Log.d(this.getClass().getName(), "getting unreads from DB");
// process the api response, both bookkeeping no-longer-unread stories and populating
// the sortation map we will use to create the fetch list for step two
for (Entry<String, List<String[]>> entry : unreadHashes.unreadHashes.entrySet()) {
Log.d(this.getClass().getName(), "building read state diff");
feedloop: for (Entry<String, List<String[]>> entry : unreadHashes.unreadHashes.entrySet()) {
String feedId = entry.getKey();
// ignore unreads from orphaned feeds
if( ! parent.orphanFeedIds.contains(feedId)) {
if(parent.orphanFeedIds.contains(feedId)) continue feedloop;
for (String[] newHash : entry.getValue()) {
// only fetch the reported unreads if we don't already have them
List<String> existingHashes = parent.dbHelper.getStoryHashesForFeed(feedId);
for (String[] newHash : entry.getValue()) {
if (!existingHashes.contains(newHash[0])) {
sortingMap.put(newHash[1]+newHash[0], newHash[0]);
}
if (!oldUnreadHashes.contains(newHash[0])) {
sortingMap.put(newHash[1]+newHash[0], newHash[0]);
} else {
oldUnreadHashes.remove(newHash[0]);
}
}
@ -79,6 +81,7 @@ public class UnreadsService extends SubService {
// if the user reads newest-first by default, reverse the download order
sortingMap = sortingMap.descendingMap();
}
Log.d(this.getClass().getName(), "setting queue");
StoryHashQueue.clear();
for (Map.Entry<String,String> entry : sortingMap.entrySet()) {
StoryHashQueue.add(entry.getValue());
@ -86,6 +89,7 @@ public class UnreadsService extends SubService {
// any stories that we previously thought to be unread but were not found in the
// list, mark them read now
Log.d(this.getClass().getName(), "updating unread states for old stories");
parent.dbHelper.markStoryHashesRead(oldUnreadHashes);
}

View file

@ -108,9 +108,9 @@ public class FeedUtils {
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... arg) {
// TODO: this reset might no longer be necessary after every FeedSet switch and could save a lot of API calls
NBSyncService.resetFeeds();
try {
dbHelper.clearReadingSession();
dbHelper.clearStorySession();
} catch (Exception e) {
; // this one call can evade the on-upgrade DB wipe and throw exceptions