mirror of
https://github.com/samuelclay/NewsBlur.git
synced 2025-09-18 21:50:56 +00:00
Better algorithm for image cache cleanup. (#627)
This commit is contained in:
parent
a8530da58a
commit
fc3b3dad19
6 changed files with 34 additions and 7 deletions
|
@ -209,6 +209,18 @@ public class BlurDatabaseHelper {
|
|||
return hashes;
|
||||
}
|
||||
|
||||
public Set<String> getAllStoryImages() {
|
||||
Cursor c = dbRO.query(DatabaseConstants.STORY_TABLE, new String[]{DatabaseConstants.STORY_IMAGE_URLS}, null, null, null, null, null);
|
||||
Set<String> urls = new HashSet<String>(c.getCount());
|
||||
while (c.moveToNext()) {
|
||||
for (String url : TextUtils.split(c.getString(c.getColumnIndexOrThrow(DatabaseConstants.STORY_IMAGE_URLS)), ",")) {
|
||||
urls.add(url);
|
||||
}
|
||||
}
|
||||
c.close();
|
||||
return urls;
|
||||
}
|
||||
|
||||
public void insertStories(StoriesResponse apiResponse, NBSyncService.ActivationMode actMode, long modeCutoff) {
|
||||
// to insert classifiers, we need to determine the feed ID of the stories in this
|
||||
// response, so sniff one out.
|
||||
|
|
|
@ -98,6 +98,7 @@ public class DatabaseConstants {
|
|||
public static final String STORY_TAGS = "tags";
|
||||
public static final String STORY_HASH = "story_hash";
|
||||
public static final String STORY_ACTIVE = "active";
|
||||
public static final String STORY_IMAGE_URLS = "image_urls";
|
||||
|
||||
public static final String STORY_TEXT_TABLE = "storytext";
|
||||
public static final String STORY_TEXT_STORY_HASH = "story_hash";
|
||||
|
@ -228,7 +229,8 @@ public class DatabaseConstants {
|
|||
STORY_STARRED + INTEGER + ", " +
|
||||
STORY_STARRED_DATE + INTEGER + ", " +
|
||||
STORY_TITLE + TEXT + ", " +
|
||||
STORY_ACTIVE + INTEGER + " DEFAULT 0" +
|
||||
STORY_ACTIVE + INTEGER + " DEFAULT 0, " +
|
||||
STORY_IMAGE_URLS + TEXT +
|
||||
")";
|
||||
|
||||
static final String STORY_TEXT_SQL = "CREATE TABLE " + STORY_TEXT_TABLE + " (" +
|
||||
|
@ -309,7 +311,8 @@ public class DatabaseConstants {
|
|||
STORY_AUTHORS, STORY_COMMENT_COUNT, STORY_CONTENT, STORY_SHORT_CONTENT, STORY_TIMESTAMP, STORY_SHARED_DATE, STORY_SHORTDATE, STORY_LONGDATE,
|
||||
STORY_TABLE + "." + STORY_FEED_ID, STORY_TABLE + "." + STORY_ID, STORY_INTELLIGENCE_AUTHORS, STORY_INTELLIGENCE_FEED, STORY_INTELLIGENCE_TAGS,
|
||||
STORY_INTELLIGENCE_TITLE, STORY_PERMALINK, STORY_READ, STORY_STARRED, STORY_STARRED_DATE, STORY_SHARE_COUNT, STORY_TAGS, STORY_TITLE,
|
||||
STORY_SOCIAL_USER_ID, STORY_SOURCE_USER_ID, STORY_SHARED_USER_IDS, STORY_FRIEND_USER_IDS, STORY_PUBLIC_USER_IDS, STORY_SUM_TOTAL, STORY_HASH
|
||||
STORY_SOCIAL_USER_ID, STORY_SOURCE_USER_ID, STORY_SHARED_USER_IDS, STORY_FRIEND_USER_IDS, STORY_PUBLIC_USER_IDS, STORY_SUM_TOTAL, STORY_HASH,
|
||||
STORY_IMAGE_URLS
|
||||
};
|
||||
|
||||
public static final String MULTIFEED_STORIES_QUERY_BASE =
|
||||
|
|
|
@ -121,6 +121,7 @@ public class Story implements Serializable {
|
|||
values.put(DatabaseConstants.STORY_STARRED_DATE, starredTimestamp);
|
||||
values.put(DatabaseConstants.STORY_FEED_ID, feedId);
|
||||
values.put(DatabaseConstants.STORY_HASH, storyHash);
|
||||
values.put(DatabaseConstants.STORY_IMAGE_URLS, TextUtils.join(",", imageUrls));
|
||||
return values;
|
||||
}
|
||||
|
||||
|
@ -155,6 +156,7 @@ public class Story implements Serializable {
|
|||
story.feedId = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_FEED_ID));
|
||||
story.id = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_ID));
|
||||
story.storyHash = cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_HASH));
|
||||
story.imageUrls = TextUtils.split(cursor.getString(cursor.getColumnIndex(DatabaseConstants.STORY_IMAGE_URLS)), ",");
|
||||
return story;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ public class ImagePrefetchService extends SubService {
|
|||
|
||||
private static volatile boolean Running = false;
|
||||
|
||||
private ImageCache imageCache;
|
||||
ImageCache imageCache;
|
||||
|
||||
/** URLs of images contained in recently fetched stories that are candidates for prefetch. */
|
||||
static Set<String> ImageQueue;
|
||||
|
@ -61,8 +61,7 @@ public class ImagePrefetchService extends SubService {
|
|||
gotWork();
|
||||
}
|
||||
}
|
||||
// TODO: do this in a cleanup thread
|
||||
imageCache.cleanup();
|
||||
|
||||
}
|
||||
|
||||
public void addUrl(String url) {
|
||||
|
|
|
@ -347,6 +347,7 @@ public class NBSyncService extends Service {
|
|||
NbActivity.updateAllActivities(false);
|
||||
dbHelper.cleanupStories(PrefsUtils.isKeepOldStories(this));
|
||||
dbHelper.cleanupStoryText();
|
||||
imagePrefetchService.imageCache.cleanup(dbHelper.getAllStoryImages());
|
||||
CleanupRunning = false;
|
||||
NbActivity.updateAllActivities(false);
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@ import java.io.File;
|
|||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -88,12 +90,20 @@ public class ImageCache {
|
|||
return fileName;
|
||||
}
|
||||
|
||||
public void cleanup() {
|
||||
public void cleanup(Set<String> currentImages) {
|
||||
// if there appear to be zero images in the system, a DB rebuild probably just
|
||||
// occured, so don't trust that data for cleanup
|
||||
if (currentImages.size() == 0) return;
|
||||
|
||||
Set<String> currentFiles = new HashSet<String>(currentImages.size());
|
||||
for (String url : currentImages) currentFiles.add(getFileName(url));
|
||||
File[] files = cacheDir.listFiles();
|
||||
if (files == null) return;
|
||||
for (File f : files) {
|
||||
long timestamp = f.lastModified();
|
||||
if (System.currentTimeMillis() > (timestamp + MAX_FILE_AGE_MILLIS)) {
|
||||
if ((System.currentTimeMillis() > (timestamp + MAX_FILE_AGE_MILLIS)) ||
|
||||
(!currentFiles.contains(f.getName()))) {
|
||||
Log.d(this.getClass().getName(), "deleting " + f.getName());
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue