Small code cleanups for feed manager, don't fetch new feeds twice.

This commit is contained in:
akwizgran
2022-12-07 10:51:33 +00:00
parent 8b9140f477
commit 1a2f85f701
4 changed files with 53 additions and 64 deletions

View File

@@ -41,7 +41,6 @@ import static java.util.logging.Logger.getLogger;
import static org.briarproject.bramble.util.LogUtils.logDuration; import static org.briarproject.bramble.util.LogUtils.logDuration;
import static org.briarproject.bramble.util.LogUtils.logException; import static org.briarproject.bramble.util.LogUtils.logException;
import static org.briarproject.bramble.util.LogUtils.now; import static org.briarproject.bramble.util.LogUtils.now;
import static org.briarproject.briar.util.HtmlUtils.ARTICLE;
@NotNullByDefault @NotNullByDefault
abstract class BaseViewModel extends DbViewModel implements EventListener { abstract class BaseViewModel extends DbViewModel implements EventListener {
@@ -115,7 +114,7 @@ abstract class BaseViewModel extends DbViewModel implements EventListener {
@DatabaseExecutor @DatabaseExecutor
private String getPostText(Transaction txn, MessageId m) private String getPostText(Transaction txn, MessageId m)
throws DbException { throws DbException {
return HtmlUtils.clean(blogManager.getPostText(txn, m), ARTICLE); return HtmlUtils.cleanArticle(blogManager.getPostText(txn, m));
} }
LiveData<LiveResult<BlogPostItem>> loadBlogPost(GroupId g, MessageId m) { LiveData<LiveResult<BlogPostItem>> loadBlogPost(GroupId g, MessageId m) {

View File

@@ -31,7 +31,6 @@ import org.briarproject.bramble.api.sync.GroupId;
import org.briarproject.bramble.api.system.Clock; import org.briarproject.bramble.api.system.Clock;
import org.briarproject.bramble.api.system.TaskScheduler; import org.briarproject.bramble.api.system.TaskScheduler;
import org.briarproject.bramble.api.system.Wakeful; import org.briarproject.bramble.api.system.Wakeful;
import org.briarproject.bramble.util.StringUtils;
import org.briarproject.briar.api.blog.Blog; import org.briarproject.briar.api.blog.Blog;
import org.briarproject.briar.api.blog.BlogManager; import org.briarproject.briar.api.blog.BlogManager;
import org.briarproject.briar.api.blog.BlogManager.RemoveBlogHook; import org.briarproject.briar.api.blog.BlogManager.RemoveBlogHook;
@@ -47,12 +46,12 @@ import java.security.GeneralSecurityException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator; import java.util.Comparator;
import java.util.Date; import java.util.Date;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Logger; import java.util.logging.Logger;
import javax.annotation.Nullable;
import javax.annotation.concurrent.ThreadSafe; import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject; import javax.inject.Inject;
@@ -64,14 +63,15 @@ import okhttp3.ResponseBody;
import static java.util.Collections.sort; import static java.util.Collections.sort;
import static java.util.logging.Level.WARNING; import static java.util.logging.Level.WARNING;
import static org.briarproject.bramble.util.LogUtils.logException; import static org.briarproject.bramble.util.LogUtils.logException;
import static org.briarproject.bramble.util.StringUtils.isNullOrEmpty;
import static org.briarproject.bramble.util.StringUtils.truncateUtf8;
import static org.briarproject.briar.api.blog.BlogConstants.MAX_BLOG_POST_TEXT_LENGTH; import static org.briarproject.briar.api.blog.BlogConstants.MAX_BLOG_POST_TEXT_LENGTH;
import static org.briarproject.briar.api.feed.FeedConstants.FETCH_DELAY_INITIAL; import static org.briarproject.briar.api.feed.FeedConstants.FETCH_DELAY_INITIAL;
import static org.briarproject.briar.api.feed.FeedConstants.FETCH_INTERVAL; import static org.briarproject.briar.api.feed.FeedConstants.FETCH_INTERVAL;
import static org.briarproject.briar.api.feed.FeedConstants.FETCH_UNIT; import static org.briarproject.briar.api.feed.FeedConstants.FETCH_UNIT;
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEEDS; import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEEDS;
import static org.briarproject.briar.util.HtmlUtils.ARTICLE; import static org.briarproject.briar.util.HtmlUtils.cleanAll;
import static org.briarproject.briar.util.HtmlUtils.STRIP_ALL; import static org.briarproject.briar.util.HtmlUtils.cleanArticle;
import static org.briarproject.briar.util.HtmlUtils.clean;
@ThreadSafe @ThreadSafe
@NotNullByDefault @NotNullByDefault
@@ -160,12 +160,12 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
@Override @Override
public Feed addFeed(String url) throws DbException, IOException { public Feed addFeed(String url) throws DbException, IOException {
// fetch syndication feed to get its metadata // fetch feed to get posts and metadata
SyndFeed f = fetchSyndFeed(url); SyndFeed sf = fetchSyndFeed(url);
Feed feed = feedFactory.createFeed(url, f); Feed feed = feedFactory.createFeed(url, sf);
// store feed and new blog // store feed metadata and new blog
Transaction txn = db.startTransaction(false); Transaction txn = db.startTransaction(false);
try { try {
blogManager.addBlog(txn, feed.getBlog()); blogManager.addBlog(txn, feed.getBlog());
@@ -177,10 +177,11 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
db.endTransaction(txn); db.endTransaction(txn);
} }
// fetch feed again and post entries // post entries
Feed updatedFeed = fetchFeed(feed); long lastEntryTime = postFeedEntries(feed, sf.getEntries());
Feed updatedFeed = feedFactory.createFeed(feed, sf, lastEntryTime);
// store feed again to also store last added entry // store feed metadata again to also store last entry time
txn = db.startTransaction(false); txn = db.startTransaction(false);
try { try {
List<Feed> feeds = getFeeds(txn); List<Feed> feeds = getFeeds(txn);
@@ -215,10 +216,12 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
// delete blog's RSS feed if we have it // delete blog's RSS feed if we have it
boolean found = false; boolean found = false;
List<Feed> feeds = getFeeds(txn); List<Feed> feeds = getFeeds(txn);
for (Feed f : feeds) { Iterator<Feed> it = feeds.iterator();
while (it.hasNext()) {
Feed f = it.next();
if (f.getBlogId().equals(b.getId())) { if (f.getBlogId().equals(b.getId())) {
it.remove();
found = true; found = true;
feeds.remove(f);
break; break;
} }
} }
@@ -248,7 +251,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
return feeds; return feeds;
} }
private void storeFeeds(@Nullable Transaction txn, List<Feed> feeds) private void storeFeeds(Transaction txn, List<Feed> feeds)
throws DbException { throws DbException {
BdfList feedList = new BdfList(); BdfList feedList = new BdfList();
@@ -257,19 +260,14 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
} }
BdfDictionary gm = BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList)); BdfDictionary gm = BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList));
try { try {
if (txn == null) { clientHelper.mergeGroupMetadata(txn, getLocalGroup().getId(), gm);
clientHelper.mergeGroupMetadata(getLocalGroup().getId(), gm);
} else {
clientHelper.mergeGroupMetadata(txn, getLocalGroup().getId(),
gm);
}
} catch (FormatException e) { } catch (FormatException e) {
throw new DbException(e); throw new DbException(e);
} }
} }
private void storeFeeds(List<Feed> feeds) throws DbException { private void storeFeeds(List<Feed> feeds) throws DbException {
storeFeeds(null, feeds); db.transaction(false, txn -> storeFeeds(txn, feeds));
} }
/** /**
@@ -299,7 +297,11 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
List<Feed> newFeeds = new ArrayList<>(feeds.size()); List<Feed> newFeeds = new ArrayList<>(feeds.size());
for (Feed feed : feeds) { for (Feed feed : feeds) {
try { try {
newFeeds.add(fetchFeed(feed)); // fetch and clean feed
SyndFeed sf = fetchSyndFeed(feed.getUrl());
// sort and add new entries
long lastEntryTime = postFeedEntries(feed, sf.getEntries());
newFeeds.add(feedFactory.createFeed(feed, sf, lastEntryTime));
} catch (IOException | DbException e) { } catch (IOException | DbException e) {
logException(LOG, WARNING, e); logException(LOG, WARNING, e);
newFeeds.add(feed); newFeeds.add(feed);
@@ -318,42 +320,25 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
private SyndFeed fetchSyndFeed(String url) throws IOException { private SyndFeed fetchSyndFeed(String url) throws IOException {
// fetch feed // fetch feed
InputStream stream = getFeedInputStream(url); InputStream stream = getFeedInputStream(url);
SyndFeed f = getSyndFeed(stream); SyndFeed sf = getSyndFeed(stream);
stream.close(); stream.close();
if (f.getEntries().size() == 0)
throw new IOException("Feed has no entries");
// clean title // clean title
String title = String title = sf.getTitle();
StringUtils.isNullOrEmpty(f.getTitle()) ? null : f.getTitle(); if (title != null) title = cleanAll(title);
if (title != null) title = clean(title, STRIP_ALL); sf.setTitle(isNullOrEmpty(title) ? null : title);
f.setTitle(title);
// clean description // clean description
String description = String description = sf.getDescription();
StringUtils.isNullOrEmpty(f.getDescription()) ? null : if (description != null) description = cleanAll(description);
f.getDescription(); sf.setDescription(isNullOrEmpty(description) ? null : description);
if (description != null) description = clean(description, STRIP_ALL);
f.setDescription(description);
// clean author // clean author
String author = String author = sf.getAuthor();
StringUtils.isNullOrEmpty(f.getAuthor()) ? null : f.getAuthor(); if (author != null) author = cleanAll(author);
if (author != null) author = clean(author, STRIP_ALL); sf.setAuthor(isNullOrEmpty(author) ? null : author);
f.setAuthor(author);
return f; return sf;
}
private Feed fetchFeed(Feed feed) throws IOException, DbException {
// fetch and clean feed
SyndFeed f = fetchSyndFeed(feed.getUrl());
// sort and add new entries
long lastEntryTime = postFeedEntries(feed, f.getEntries());
return feedFactory.createFeed(feed, f, lastEntryTime);
} }
private InputStream getFeedInputStream(String url) throws IOException { private InputStream getFeedInputStream(String url) throws IOException {
@@ -417,7 +402,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
// build post text // build post text
StringBuilder b = new StringBuilder(); StringBuilder b = new StringBuilder();
if (!StringUtils.isNullOrEmpty(entry.getTitle())) { if (!isNullOrEmpty(entry.getTitle())) {
b.append("<h1>").append(entry.getTitle()).append("</h1>"); b.append("<h1>").append(entry.getTitle()).append("</h1>");
} }
for (SyndContent content : entry.getContents()) { for (SyndContent content : entry.getContents()) {
@@ -430,7 +415,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
b.append(entry.getDescription().getValue()); b.append(entry.getDescription().getValue());
} }
b.append("<p>"); b.append("<p>");
if (!StringUtils.isNullOrEmpty(entry.getAuthor())) { if (!isNullOrEmpty(entry.getAuthor())) {
b.append("-- ").append(entry.getAuthor()); b.append("-- ").append(entry.getAuthor());
} }
if (entry.getPublishedDate() != null) { if (entry.getPublishedDate() != null) {
@@ -442,7 +427,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
} }
b.append("</p>"); b.append("</p>");
String link = entry.getLink(); String link = entry.getLink();
if (!StringUtils.isNullOrEmpty(link)) { if (!isNullOrEmpty(link)) {
b.append("<a href=\"").append(link).append("\">").append(link) b.append("<a href=\"").append(link).append("\">").append(link)
.append("</a>"); .append("</a>");
} }
@@ -472,8 +457,8 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
} }
private String getPostText(String text) { private String getPostText(String text) {
text = clean(text, ARTICLE); text = cleanArticle(text);
return StringUtils.truncateUtf8(text, MAX_BLOG_POST_TEXT_LENGTH); return truncateUtf8(text, MAX_BLOG_POST_TEXT_LENGTH);
} }
private Comparator<SyndEntry> getEntryComparator() { private Comparator<SyndEntry> getEntryComparator() {

View File

@@ -7,12 +7,15 @@ import org.jsoup.safety.Safelist;
@NotNullByDefault @NotNullByDefault
public class HtmlUtils { public class HtmlUtils {
public static Safelist STRIP_ALL = Safelist.none(); private static final Safelist STRIP_ALL = Safelist.none();
public static Safelist ARTICLE = Safelist.basic() private static final Safelist ARTICLE = Safelist.basic()
.addTags("h1", "h2", "h3", "h4", "h5", "h6"); .addTags("h1", "h2", "h3", "h4", "h5", "h6");
public static String clean(String s, Safelist list) { public static String cleanAll(String s) {
return Jsoup.clean(s, list); return Jsoup.clean(s, STRIP_ALL);
} }
public static String cleanArticle(String s) {
return Jsoup.clean(s, ARTICLE);
}
} }

View File

@@ -175,11 +175,13 @@ public class FeedManagerImplTest extends BrambleMockTestCase {
} }
private void expectStoreFeed(BdfList feedList) throws Exception { private void expectStoreFeed(BdfList feedList) throws Exception {
Transaction txn = new Transaction(null, false);
BdfDictionary feedDict = BdfDictionary feedDict =
BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList)); BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList));
expectGetLocalGroup(); expectGetLocalGroup();
context.checking(new Expectations() {{ context.checking(new DbExpectations() {{
oneOf(clientHelper).mergeGroupMetadata(localGroupId, feedDict); oneOf(db).transaction(with(false), withDbRunnable(txn));
oneOf(clientHelper).mergeGroupMetadata(txn, localGroupId, feedDict);
if (feedList.size() == 1) { if (feedList.size() == 1) {
oneOf(feedFactory).feedToBdfDictionary(feed); oneOf(feedFactory).feedToBdfDictionary(feed);
will(returnValue(feedList.getDictionary(0))); will(returnValue(feedList.getDictionary(0)));