mirror of
https://code.briarproject.org/briar/briar.git
synced 2026-02-11 18:29:05 +01:00
Merge branch '1822-rss-feeds-backend' into 'master'
Resolve "Import RSS feeds shared by other apps" See merge request briar/briar!1763
This commit is contained in:
@@ -33,7 +33,7 @@ dependencies {
|
||||
testImplementation "org.jmock:jmock:$jmock_version"
|
||||
testImplementation "org.jmock:jmock-junit4:$jmock_version"
|
||||
testImplementation "org.jmock:jmock-imposters:$jmock_version"
|
||||
testImplementation "com.squareup.okhttp3:mockwebserver:4.9.3"
|
||||
testImplementation "com.squareup.okhttp3:mockwebserver:$mockwebserver_version"
|
||||
|
||||
testAnnotationProcessor "com.google.dagger:dagger-compiler:$dagger_version"
|
||||
|
||||
|
||||
@@ -41,7 +41,6 @@ import static java.util.logging.Logger.getLogger;
|
||||
import static org.briarproject.bramble.util.LogUtils.logDuration;
|
||||
import static org.briarproject.bramble.util.LogUtils.logException;
|
||||
import static org.briarproject.bramble.util.LogUtils.now;
|
||||
import static org.briarproject.briar.util.HtmlUtils.ARTICLE;
|
||||
|
||||
@NotNullByDefault
|
||||
abstract class BaseViewModel extends DbViewModel implements EventListener {
|
||||
@@ -115,7 +114,7 @@ abstract class BaseViewModel extends DbViewModel implements EventListener {
|
||||
@DatabaseExecutor
|
||||
private String getPostText(Transaction txn, MessageId m)
|
||||
throws DbException {
|
||||
return HtmlUtils.clean(blogManager.getPostText(txn, m), ARTICLE);
|
||||
return HtmlUtils.cleanArticle(blogManager.getPostText(txn, m));
|
||||
}
|
||||
|
||||
LiveData<LiveResult<BlogPostItem>> loadBlogPost(GroupId g, MessageId m) {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package org.briarproject.briar.android.blog;
|
||||
|
||||
import android.os.Bundle;
|
||||
import android.widget.Toast;
|
||||
|
||||
import org.briarproject.briar.R;
|
||||
import org.briarproject.briar.android.activity.ActivityComponent;
|
||||
@@ -16,10 +15,6 @@ import androidx.annotation.Nullable;
|
||||
import androidx.fragment.app.FragmentManager;
|
||||
import androidx.lifecycle.ViewModelProvider;
|
||||
|
||||
import static org.briarproject.briar.android.blog.RssFeedViewModel.ImportResult.EXISTS;
|
||||
import static org.briarproject.briar.android.blog.RssFeedViewModel.ImportResult.FAILED;
|
||||
import static org.briarproject.briar.android.blog.RssFeedViewModel.ImportResult.IMPORTED;
|
||||
|
||||
@MethodsNotNullByDefault
|
||||
@ParametersNotNullByDefault
|
||||
public class RssFeedActivity extends BriarActivity
|
||||
@@ -50,13 +45,13 @@ public class RssFeedActivity extends BriarActivity
|
||||
viewModel.getImportResult().observeEvent(this, this::onImportResult);
|
||||
}
|
||||
|
||||
private void onImportResult(RssFeedViewModel.ImportResult result) {
|
||||
if (result == IMPORTED) {
|
||||
private void onImportResult(boolean result) {
|
||||
if (result) {
|
||||
FragmentManager fm = getSupportFragmentManager();
|
||||
if (fm.findFragmentByTag(RssFeedImportFragment.TAG) != null) {
|
||||
onBackPressed();
|
||||
}
|
||||
} else if (result == FAILED) {
|
||||
} else {
|
||||
String url = viewModel.getUrlFailedImport();
|
||||
if (url == null) {
|
||||
throw new AssertionError();
|
||||
@@ -65,9 +60,6 @@ public class RssFeedActivity extends BriarActivity
|
||||
RssFeedImportFailedDialogFragment.newInstance(url);
|
||||
dialog.show(getSupportFragmentManager(),
|
||||
RssFeedImportFailedDialogFragment.TAG);
|
||||
} else if (result == EXISTS) {
|
||||
Toast.makeText(this, R.string.blogs_rss_feeds_import_exists,
|
||||
Toast.LENGTH_LONG).show();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,8 +28,7 @@ class RssFeedAdapter extends ListAdapter<Feed, RssFeedAdapter.FeedViewHolder> {
|
||||
super(new DiffUtil.ItemCallback<Feed>() {
|
||||
@Override
|
||||
public boolean areItemsTheSame(Feed a, Feed b) {
|
||||
return a.getUrl().equals(b.getUrl()) &&
|
||||
a.getBlogId().equals(b.getBlogId()) &&
|
||||
return a.getBlogId().equals(b.getBlogId()) &&
|
||||
a.getAdded() == b.getAdded();
|
||||
}
|
||||
|
||||
@@ -86,8 +85,8 @@ class RssFeedAdapter extends ListAdapter<Feed, RssFeedAdapter.FeedViewHolder> {
|
||||
delete.setOnClickListener(v -> listener.onDeleteClick(item));
|
||||
|
||||
// Author
|
||||
if (item.getRssAuthor() != null) {
|
||||
author.setText(item.getRssAuthor());
|
||||
if (item.getProperties().getAuthor() != null) {
|
||||
author.setText(item.getProperties().getAuthor());
|
||||
author.setVisibility(VISIBLE);
|
||||
authorLabel.setVisibility(VISIBLE);
|
||||
} else {
|
||||
@@ -100,8 +99,8 @@ class RssFeedAdapter extends ListAdapter<Feed, RssFeedAdapter.FeedViewHolder> {
|
||||
updated.setText(formatDate(ctx, item.getUpdated()));
|
||||
|
||||
// Description
|
||||
if (item.getDescription() != null) {
|
||||
description.setText(item.getDescription());
|
||||
if (item.getProperties().getDescription() != null) {
|
||||
description.setText(item.getProperties().getDescription());
|
||||
description.setVisibility(VISIBLE);
|
||||
} else {
|
||||
description.setVisibility(GONE);
|
||||
|
||||
@@ -22,7 +22,7 @@ import org.briarproject.nullsafety.NotNullByDefault;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.logging.Logger;
|
||||
@@ -38,13 +38,9 @@ import static java.util.logging.Logger.getLogger;
|
||||
import static org.briarproject.bramble.util.LogUtils.logDuration;
|
||||
import static org.briarproject.bramble.util.LogUtils.logException;
|
||||
import static org.briarproject.bramble.util.LogUtils.now;
|
||||
import static org.briarproject.briar.android.blog.RssFeedViewModel.ImportResult.EXISTS;
|
||||
import static org.briarproject.briar.android.blog.RssFeedViewModel.ImportResult.FAILED;
|
||||
import static org.briarproject.briar.android.blog.RssFeedViewModel.ImportResult.IMPORTED;
|
||||
|
||||
@NotNullByDefault
|
||||
class RssFeedViewModel extends DbViewModel {
|
||||
enum ImportResult {IMPORTED, FAILED, EXISTS}
|
||||
|
||||
private static final Logger LOG =
|
||||
getLogger(RssFeedViewModel.class.getName());
|
||||
@@ -60,7 +56,7 @@ class RssFeedViewModel extends DbViewModel {
|
||||
private volatile String urlFailedImport = null;
|
||||
private final MutableLiveData<Boolean> isImporting =
|
||||
new MutableLiveData<>(false);
|
||||
private final MutableLiveEvent<ImportResult> importResult =
|
||||
private final MutableLiveEvent<Boolean> importResult =
|
||||
new MutableLiveEvent<>();
|
||||
|
||||
@Inject
|
||||
@@ -101,7 +97,6 @@ class RssFeedViewModel extends DbViewModel {
|
||||
private List<Feed> loadFeeds(Transaction txn) throws DbException {
|
||||
long start = now();
|
||||
List<Feed> feeds = feedManager.getFeeds(txn);
|
||||
Collections.sort(feeds);
|
||||
logDuration(LOG, "Loading feeds", start);
|
||||
return feeds;
|
||||
}
|
||||
@@ -125,7 +120,7 @@ class RssFeedViewModel extends DbViewModel {
|
||||
});
|
||||
}
|
||||
|
||||
LiveEvent<ImportResult> getImportResult() {
|
||||
LiveEvent<Boolean> getImportResult() {
|
||||
return importResult;
|
||||
}
|
||||
|
||||
@@ -138,21 +133,23 @@ class RssFeedViewModel extends DbViewModel {
|
||||
urlFailedImport = null;
|
||||
ioExecutor.execute(() -> {
|
||||
try {
|
||||
if (exists(url)) {
|
||||
importResult.postEvent(EXISTS);
|
||||
return;
|
||||
}
|
||||
Feed feed = feedManager.addFeed(url);
|
||||
List<Feed> updated = addListItem(getList(feeds), feed);
|
||||
if (updated != null) {
|
||||
Collections.sort(updated);
|
||||
feeds.postValue(new LiveResult<>(updated));
|
||||
// Update the feed if it was already present
|
||||
List<Feed> feedList = getList(feeds);
|
||||
if (feedList == null) feedList = new ArrayList<>();
|
||||
List<Feed> updated = updateListItems(feedList,
|
||||
f -> f.equals(feed), f -> feed);
|
||||
// Add the feed if it wasn't already present
|
||||
if (updated == null) {
|
||||
feedList.add(feed);
|
||||
updated = feedList;
|
||||
}
|
||||
importResult.postEvent(IMPORTED);
|
||||
feeds.postValue(new LiveResult<>(updated));
|
||||
importResult.postEvent(true);
|
||||
} catch (DbException | IOException e) {
|
||||
logException(LOG, WARNING, e);
|
||||
urlFailedImport = url;
|
||||
importResult.postEvent(FAILED);
|
||||
importResult.postEvent(false);
|
||||
} finally {
|
||||
isImporting.postValue(false);
|
||||
}
|
||||
@@ -163,16 +160,4 @@ class RssFeedViewModel extends DbViewModel {
|
||||
String getUrlFailedImport() {
|
||||
return urlFailedImport;
|
||||
}
|
||||
|
||||
private boolean exists(String url) {
|
||||
List<Feed> list = getList(feeds);
|
||||
if (list != null) {
|
||||
for (Feed feed : list) {
|
||||
if (url.equals(feed.getUrl())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -520,7 +520,6 @@
|
||||
<string name="blogs_rss_feeds_import_button">Import</string>
|
||||
<string name="blogs_rss_feeds_import_hint">Enter the URL of the RSS feed</string>
|
||||
<string name="blogs_rss_feeds_import_error">We are sorry! There was an error importing your feed.</string>
|
||||
<string name="blogs_rss_feeds_import_exists">That feed is already imported.</string>
|
||||
<string name="blogs_rss_feeds">RSS Feeds</string>
|
||||
<string name="blogs_rss_feeds_manage_imported">Imported:</string>
|
||||
<string name="blogs_rss_feeds_manage_author">Author:</string>
|
||||
|
||||
@@ -5,47 +5,27 @@ import org.briarproject.bramble.api.sync.GroupId;
|
||||
import org.briarproject.briar.api.blog.Blog;
|
||||
import org.briarproject.nullsafety.NotNullByDefault;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
|
||||
@Immutable
|
||||
@NotNullByDefault
|
||||
public class Feed implements Comparable<Feed> {
|
||||
public class Feed {
|
||||
|
||||
private final String url;
|
||||
private final Blog blog;
|
||||
private final LocalAuthor localAuthor;
|
||||
@Nullable
|
||||
private final String description, rssAuthor;
|
||||
private final RssProperties properties;
|
||||
private final long added, updated, lastEntryTime;
|
||||
|
||||
public Feed(String url, Blog blog, LocalAuthor localAuthor,
|
||||
@Nullable String description, @Nullable String rssAuthor,
|
||||
public Feed(Blog blog, LocalAuthor localAuthor, RssProperties properties,
|
||||
long added, long updated, long lastEntryTime) {
|
||||
this.url = url;
|
||||
this.blog = blog;
|
||||
this.localAuthor = localAuthor;
|
||||
this.description = description;
|
||||
this.rssAuthor = rssAuthor;
|
||||
this.properties = properties;
|
||||
this.added = added;
|
||||
this.updated = updated;
|
||||
this.lastEntryTime = lastEntryTime;
|
||||
}
|
||||
|
||||
public Feed(String url, Blog blog, LocalAuthor localAuthor,
|
||||
@Nullable String description, @Nullable String rssAuthor,
|
||||
long added) {
|
||||
this(url, blog, localAuthor, description, rssAuthor, added, 0L, 0L);
|
||||
}
|
||||
|
||||
public Feed(String url, Blog blog, LocalAuthor localAuthor, long added) {
|
||||
this(url, blog, localAuthor, null, null, added, 0L, 0L);
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public GroupId getBlogId() {
|
||||
return blog.getId();
|
||||
}
|
||||
@@ -62,14 +42,8 @@ public class Feed implements Comparable<Feed> {
|
||||
return blog.getName();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getRssAuthor() {
|
||||
return rssAuthor;
|
||||
public RssProperties getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public long getAdded() {
|
||||
@@ -95,12 +69,7 @@ public class Feed implements Comparable<Feed> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Feed o) {
|
||||
if (this == o) return 0;
|
||||
long aTime = getAdded(), bTime = o.getAdded();
|
||||
if (aTime > bTime) return -1;
|
||||
if (aTime < bTime) return 1;
|
||||
return 0;
|
||||
public int hashCode() {
|
||||
return blog.hashCode();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -22,6 +22,9 @@ public interface FeedConstants {
|
||||
String KEY_FEED_PRIVATE_KEY = "feedPrivateKey";
|
||||
String KEY_FEED_DESC = "feedDesc";
|
||||
String KEY_FEED_RSS_AUTHOR = "feedRssAuthor";
|
||||
String KEY_FEED_RSS_TITLE = "feedRssTitle";
|
||||
String KEY_FEED_RSS_LINK = "feedRssLink";
|
||||
String KEY_FEED_RSS_URI = "feedRssUri";
|
||||
String KEY_FEED_ADDED = "feedAdded";
|
||||
String KEY_FEED_UPDATED = "feedUpdated";
|
||||
String KEY_FEED_LAST_ENTRY = "feedLastEntryTime";
|
||||
|
||||
@@ -6,6 +6,7 @@ import org.briarproject.bramble.api.sync.ClientId;
|
||||
import org.briarproject.nullsafety.NotNullByDefault;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
|
||||
@NotNullByDefault
|
||||
@@ -22,10 +23,17 @@ public interface FeedManager {
|
||||
int MAJOR_VERSION = 0;
|
||||
|
||||
/**
|
||||
* Adds an RSS feed as a new dedicated blog.
|
||||
* Adds an RSS feed as a new dedicated blog, or updates the existing blog
|
||||
* if a blog for the feed already exists.
|
||||
*/
|
||||
Feed addFeed(String url) throws DbException, IOException;
|
||||
|
||||
/**
|
||||
* Adds an RSS feed as a new dedicated blog, or updates the existing blog
|
||||
* if a blog for the feed already exists.
|
||||
*/
|
||||
Feed addFeed(InputStream in) throws DbException, IOException;
|
||||
|
||||
/**
|
||||
* Removes an RSS feed.
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
package org.briarproject.briar.api.feed;
|
||||
|
||||
import org.briarproject.nullsafety.NotNullByDefault;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.concurrent.Immutable;
|
||||
|
||||
/**
|
||||
* The properties of an RSS feed, which may have been imported from a URL
|
||||
* or a file.
|
||||
*/
|
||||
@Immutable
|
||||
@NotNullByDefault
|
||||
public class RssProperties {
|
||||
|
||||
@Nullable
|
||||
private final String url, title, description, author, link, uri;
|
||||
|
||||
public RssProperties(@Nullable String url, @Nullable String title,
|
||||
@Nullable String description, @Nullable String author,
|
||||
@Nullable String link, @Nullable String uri) {
|
||||
this.url = url;
|
||||
this.title = title;
|
||||
this.description = description;
|
||||
this.author = author;
|
||||
this.link = link;
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the URL from which the RSS feed was imported, or null if the
|
||||
* feed was imported from a file.
|
||||
*/
|
||||
@Nullable
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the title property of the RSS feed, or null if no title was
|
||||
* specified.
|
||||
*/
|
||||
@Nullable
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the description property of the RSS feed, or null if no
|
||||
* description was specified.
|
||||
*/
|
||||
@Nullable
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the author property of the RSS feed, or null if no author was
|
||||
* specified.
|
||||
*/
|
||||
@Nullable
|
||||
public String getAuthor() {
|
||||
return author;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the link property of the RSS feed, or null if no link was
|
||||
* specified. This is usually the URL of a webpage where the equivalent
|
||||
* content can be viewed in a browser.
|
||||
*/
|
||||
@Nullable
|
||||
public String getLink() {
|
||||
return link;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the URI property of the RSS feed, or null if no URI was
|
||||
* specified. This may be a URL from which the feed can be downloaded,
|
||||
* or it may be an opaque identifier such as a number that serves to
|
||||
* distinguish this feed from other feeds produced by the same creator.
|
||||
*/
|
||||
@Nullable
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,7 @@ dependencies {
|
||||
testImplementation "org.jmock:jmock:$jmock_version"
|
||||
testImplementation "org.jmock:jmock-junit4:$jmock_version"
|
||||
testImplementation "org.jmock:jmock-imposters:$jmock_version"
|
||||
testImplementation "com.squareup.okhttp3:mockwebserver:$mockwebserver_version"
|
||||
|
||||
testAnnotationProcessor "com.google.dagger:dagger-compiler:$dagger_version"
|
||||
|
||||
|
||||
@@ -6,20 +6,22 @@ import org.briarproject.bramble.api.FormatException;
|
||||
import org.briarproject.bramble.api.data.BdfDictionary;
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
interface FeedFactory {
|
||||
|
||||
/**
|
||||
* Create a new feed based on the feed url
|
||||
* and the metadata of an existing {@link SyndFeed}.
|
||||
*/
|
||||
Feed createFeed(String url, SyndFeed feed);
|
||||
Feed createFeed(@Nullable String url, SyndFeed sf);
|
||||
|
||||
/**
|
||||
* Creates a new updated feed, based on the given existing feed,
|
||||
* new metadata from the given {@link SyndFeed}
|
||||
* and the time of the last feed entry.
|
||||
*/
|
||||
Feed createFeed(Feed feed, SyndFeed f, long lastEntryTime);
|
||||
Feed updateFeed(Feed feed, SyndFeed sf, long lastEntryTime);
|
||||
|
||||
/**
|
||||
* De-serializes a {@link BdfDictionary} into a {@link Feed}.
|
||||
|
||||
@@ -13,20 +13,25 @@ import org.briarproject.bramble.api.identity.Author;
|
||||
import org.briarproject.bramble.api.identity.AuthorFactory;
|
||||
import org.briarproject.bramble.api.identity.LocalAuthor;
|
||||
import org.briarproject.bramble.api.system.Clock;
|
||||
import org.briarproject.bramble.util.StringUtils;
|
||||
import org.briarproject.briar.api.blog.Blog;
|
||||
import org.briarproject.briar.api.blog.BlogFactory;
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.RssProperties;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.inject.Inject;
|
||||
|
||||
import static org.briarproject.bramble.api.identity.AuthorConstants.MAX_AUTHOR_NAME_LENGTH;
|
||||
import static org.briarproject.bramble.util.StringUtils.truncateUtf8;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_ADDED;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_AUTHOR;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_DESC;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_LAST_ENTRY;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_PRIVATE_KEY;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_AUTHOR;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_LINK;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_TITLE;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_URI;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_UPDATED;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_URL;
|
||||
|
||||
@@ -47,29 +52,33 @@ class FeedFactoryImpl implements FeedFactory {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Feed createFeed(String url, SyndFeed syndFeed) {
|
||||
String title = syndFeed.getTitle();
|
||||
public Feed createFeed(@Nullable String url, SyndFeed sf) {
|
||||
String title = sf.getTitle();
|
||||
if (title == null) title = "RSS";
|
||||
else title = StringUtils.truncateUtf8(title, MAX_AUTHOR_NAME_LENGTH);
|
||||
else title = truncateUtf8(title, MAX_AUTHOR_NAME_LENGTH);
|
||||
|
||||
LocalAuthor localAuthor = authorFactory.createLocalAuthor(title);
|
||||
Blog blog = blogFactory.createFeedBlog(localAuthor);
|
||||
long added = clock.currentTimeMillis();
|
||||
|
||||
return new Feed(url, blog, localAuthor, added);
|
||||
RssProperties properties = new RssProperties(url, sf.getTitle(),
|
||||
sf.getDescription(), sf.getAuthor(), sf.getLink(), sf.getUri());
|
||||
return new Feed(blog, localAuthor, properties, added, 0, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Feed createFeed(Feed feed, SyndFeed f, long lastEntryTime) {
|
||||
public Feed updateFeed(Feed feed, SyndFeed sf, long lastEntryTime) {
|
||||
long updated = clock.currentTimeMillis();
|
||||
return new Feed(feed.getUrl(), feed.getBlog(), feed.getLocalAuthor(),
|
||||
f.getDescription(), f.getAuthor(), feed.getAdded(), updated,
|
||||
lastEntryTime);
|
||||
String url = feed.getProperties().getUrl();
|
||||
// Update the RSS properties
|
||||
RssProperties properties = new RssProperties(url, sf.getTitle(),
|
||||
sf.getDescription(), sf.getAuthor(), sf.getLink(), sf.getUri());
|
||||
return new Feed(feed.getBlog(), feed.getLocalAuthor(), properties,
|
||||
feed.getAdded(), updated, lastEntryTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Feed createFeed(BdfDictionary d) throws FormatException {
|
||||
String url = d.getString(KEY_FEED_URL);
|
||||
|
||||
BdfList authorList = d.getList(KEY_FEED_AUTHOR);
|
||||
PrivateKey privateKey =
|
||||
@@ -80,14 +89,21 @@ class FeedFactoryImpl implements FeedFactory {
|
||||
author.getPublicKey(), privateKey);
|
||||
Blog blog = blogFactory.createFeedBlog(localAuthor);
|
||||
|
||||
String desc = d.getOptionalString(KEY_FEED_DESC);
|
||||
String url = d.getOptionalString(KEY_FEED_URL);
|
||||
String description = d.getOptionalString(KEY_FEED_DESC);
|
||||
String rssAuthor = d.getOptionalString(KEY_FEED_RSS_AUTHOR);
|
||||
String title = d.getOptionalString(KEY_FEED_RSS_TITLE);
|
||||
String link = d.getOptionalString(KEY_FEED_RSS_LINK);
|
||||
String uri = d.getOptionalString(KEY_FEED_RSS_URI);
|
||||
RssProperties properties = new RssProperties(url, title, description,
|
||||
rssAuthor, link, uri);
|
||||
|
||||
long added = d.getLong(KEY_FEED_ADDED, 0L);
|
||||
long updated = d.getLong(KEY_FEED_UPDATED, 0L);
|
||||
long lastEntryTime = d.getLong(KEY_FEED_LAST_ENTRY, 0L);
|
||||
|
||||
return new Feed(url, blog, localAuthor, desc, rssAuthor, added,
|
||||
updated, lastEntryTime);
|
||||
return new Feed(blog, localAuthor, properties, added, updated,
|
||||
lastEntryTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -95,17 +111,25 @@ class FeedFactoryImpl implements FeedFactory {
|
||||
LocalAuthor localAuthor = feed.getLocalAuthor();
|
||||
BdfList authorList = clientHelper.toList(localAuthor);
|
||||
BdfDictionary d = BdfDictionary.of(
|
||||
new BdfEntry(KEY_FEED_URL, feed.getUrl()),
|
||||
new BdfEntry(KEY_FEED_AUTHOR, authorList),
|
||||
new BdfEntry(KEY_FEED_PRIVATE_KEY, localAuthor.getPrivateKey()),
|
||||
new BdfEntry(KEY_FEED_ADDED, feed.getAdded()),
|
||||
new BdfEntry(KEY_FEED_UPDATED, feed.getUpdated()),
|
||||
new BdfEntry(KEY_FEED_LAST_ENTRY, feed.getLastEntryTime())
|
||||
);
|
||||
if (feed.getDescription() != null)
|
||||
d.put(KEY_FEED_DESC, feed.getDescription());
|
||||
if (feed.getRssAuthor() != null)
|
||||
d.put(KEY_FEED_RSS_AUTHOR, feed.getRssAuthor());
|
||||
RssProperties properties = feed.getProperties();
|
||||
if (properties.getUrl() != null)
|
||||
d.put(KEY_FEED_URL, properties.getUrl());
|
||||
if (properties.getTitle() != null)
|
||||
d.put(KEY_FEED_RSS_TITLE, properties.getTitle());
|
||||
if (properties.getDescription() != null)
|
||||
d.put(KEY_FEED_DESC, properties.getDescription());
|
||||
if (properties.getAuthor() != null)
|
||||
d.put(KEY_FEED_RSS_AUTHOR, properties.getAuthor());
|
||||
if (properties.getLink() != null)
|
||||
d.put(KEY_FEED_RSS_LINK, properties.getLink());
|
||||
if (properties.getUri() != null)
|
||||
d.put(KEY_FEED_RSS_URI, properties.getUri());
|
||||
return d;
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,6 @@ import org.briarproject.bramble.api.sync.GroupId;
|
||||
import org.briarproject.bramble.api.system.Clock;
|
||||
import org.briarproject.bramble.api.system.TaskScheduler;
|
||||
import org.briarproject.bramble.api.system.Wakeful;
|
||||
import org.briarproject.bramble.util.StringUtils;
|
||||
import org.briarproject.briar.api.blog.Blog;
|
||||
import org.briarproject.briar.api.blog.BlogManager;
|
||||
import org.briarproject.briar.api.blog.BlogManager.RemoveBlogHook;
|
||||
@@ -39,6 +38,7 @@ import org.briarproject.briar.api.blog.BlogPost;
|
||||
import org.briarproject.briar.api.blog.BlogPostFactory;
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.FeedManager;
|
||||
import org.briarproject.briar.api.feed.RssProperties;
|
||||
import org.briarproject.nullsafety.NotNullByDefault;
|
||||
|
||||
import java.io.IOException;
|
||||
@@ -47,7 +47,11 @@ import java.security.GeneralSecurityException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.logging.Logger;
|
||||
@@ -61,17 +65,21 @@ import okhttp3.Request;
|
||||
import okhttp3.Response;
|
||||
import okhttp3.ResponseBody;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.sort;
|
||||
import static java.util.logging.Level.WARNING;
|
||||
import static java.util.logging.Logger.getLogger;
|
||||
import static org.briarproject.bramble.util.IoUtils.tryToClose;
|
||||
import static org.briarproject.bramble.util.LogUtils.logException;
|
||||
import static org.briarproject.bramble.util.StringUtils.isNullOrEmpty;
|
||||
import static org.briarproject.bramble.util.StringUtils.truncateUtf8;
|
||||
import static org.briarproject.briar.api.blog.BlogConstants.MAX_BLOG_POST_TEXT_LENGTH;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.FETCH_DELAY_INITIAL;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.FETCH_INTERVAL;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.FETCH_UNIT;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEEDS;
|
||||
import static org.briarproject.briar.util.HtmlUtils.ARTICLE;
|
||||
import static org.briarproject.briar.util.HtmlUtils.STRIP_ALL;
|
||||
import static org.briarproject.briar.util.HtmlUtils.clean;
|
||||
import static org.briarproject.briar.util.HtmlUtils.cleanAll;
|
||||
import static org.briarproject.briar.util.HtmlUtils.cleanArticle;
|
||||
|
||||
@ThreadSafe
|
||||
@NotNullByDefault
|
||||
@@ -79,7 +87,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
RemoveBlogHook {
|
||||
|
||||
private static final Logger LOG =
|
||||
Logger.getLogger(FeedManagerImpl.class.getName());
|
||||
getLogger(FeedManagerImpl.class.getName());
|
||||
|
||||
private final TaskScheduler scheduler;
|
||||
private final Executor ioExecutor;
|
||||
@@ -89,6 +97,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
private final BlogManager blogManager;
|
||||
private final BlogPostFactory blogPostFactory;
|
||||
private final FeedFactory feedFactory;
|
||||
private final FeedMatcher feedMatcher;
|
||||
private final Clock clock;
|
||||
private final WeakSingletonProvider<OkHttpClient> httpClientProvider;
|
||||
private final AtomicBoolean fetcherStarted = new AtomicBoolean(false);
|
||||
@@ -104,6 +113,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
BlogManager blogManager,
|
||||
BlogPostFactory blogPostFactory,
|
||||
FeedFactory feedFactory,
|
||||
FeedMatcher feedMatcher,
|
||||
WeakSingletonProvider<OkHttpClient> httpClientProvider,
|
||||
Clock clock) {
|
||||
this.scheduler = scheduler;
|
||||
@@ -114,6 +124,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
this.blogManager = blogManager;
|
||||
this.blogPostFactory = blogPostFactory;
|
||||
this.feedFactory = feedFactory;
|
||||
this.feedMatcher = feedMatcher;
|
||||
this.httpClientProvider = httpClientProvider;
|
||||
this.clock = clock;
|
||||
}
|
||||
@@ -160,37 +171,49 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
|
||||
@Override
|
||||
public Feed addFeed(String url) throws DbException, IOException {
|
||||
// fetch syndication feed to get its metadata
|
||||
SyndFeed f = fetchSyndFeed(url);
|
||||
// fetch feed to get posts and metadata
|
||||
SyndFeed sf = fetchAndCleanFeed(url);
|
||||
return addFeed(url, sf);
|
||||
}
|
||||
|
||||
Feed feed = feedFactory.createFeed(url, f);
|
||||
@Override
|
||||
public Feed addFeed(InputStream in) throws DbException, IOException {
|
||||
// fetch feed to get posts and metadata
|
||||
SyndFeed sf = fetchAndCleanFeed(in);
|
||||
return addFeed(null, sf);
|
||||
}
|
||||
|
||||
// store feed and new blog
|
||||
Transaction txn = db.startTransaction(false);
|
||||
try {
|
||||
private Feed addFeed(@Nullable String url, SyndFeed sf) throws DbException {
|
||||
// extract properties from the feed
|
||||
RssProperties properties = new RssProperties(url, sf.getTitle(),
|
||||
sf.getDescription(), sf.getAuthor(), sf.getLink(), sf.getUri());
|
||||
|
||||
// check whether the properties match an existing feed
|
||||
List<Feed> candidates = db.transactionWithResult(true, this::getFeeds);
|
||||
Feed matched = feedMatcher.findMatchingFeed(properties, candidates);
|
||||
|
||||
Feed feed;
|
||||
if (matched == null) {
|
||||
LOG.info("Adding new feed");
|
||||
feed = feedFactory.createFeed(url, sf);
|
||||
// store feed metadata and new blog
|
||||
db.transaction(false, txn -> {
|
||||
blogManager.addBlog(txn, feed.getBlog());
|
||||
List<Feed> feeds = getFeeds(txn);
|
||||
feeds.add(feed);
|
||||
storeFeeds(txn, feeds);
|
||||
db.commitTransaction(txn);
|
||||
} finally {
|
||||
db.endTransaction(txn);
|
||||
});
|
||||
} else {
|
||||
LOG.info("New feed matches an existing feed");
|
||||
feed = matched;
|
||||
}
|
||||
|
||||
// fetch feed again and post entries
|
||||
Feed updatedFeed = fetchFeed(feed);
|
||||
// post entries
|
||||
long lastEntryTime = postFeedEntries(feed, sf.getEntries());
|
||||
Feed updatedFeed = feedFactory.updateFeed(feed, sf, lastEntryTime);
|
||||
|
||||
// store feed again to also store last added entry
|
||||
txn = db.startTransaction(false);
|
||||
try {
|
||||
List<Feed> feeds = getFeeds(txn);
|
||||
feeds.remove(feed);
|
||||
feeds.add(updatedFeed);
|
||||
storeFeeds(txn, feeds);
|
||||
db.commitTransaction(txn);
|
||||
} finally {
|
||||
db.endTransaction(txn);
|
||||
}
|
||||
// store feed metadata again to also store last entry time
|
||||
updateFeeds(singletonList(updatedFeed));
|
||||
|
||||
return updatedFeed;
|
||||
}
|
||||
@@ -198,14 +221,9 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
@Override
|
||||
public void removeFeed(Feed feed) throws DbException {
|
||||
LOG.info("Removing RSS feed...");
|
||||
Transaction txn = db.startTransaction(false);
|
||||
try {
|
||||
// this will call removingBlog() where the feed itself gets removed
|
||||
blogManager.removeBlog(txn, feed.getBlog());
|
||||
db.commitTransaction(txn);
|
||||
} finally {
|
||||
db.endTransaction(txn);
|
||||
}
|
||||
db.transaction(false, txn ->
|
||||
blogManager.removeBlog(txn, feed.getBlog()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -215,10 +233,12 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
// delete blog's RSS feed if we have it
|
||||
boolean found = false;
|
||||
List<Feed> feeds = getFeeds(txn);
|
||||
for (Feed f : feeds) {
|
||||
Iterator<Feed> it = feeds.iterator();
|
||||
while (it.hasNext()) {
|
||||
Feed f = it.next();
|
||||
if (f.getBlogId().equals(b.getId())) {
|
||||
it.remove();
|
||||
found = true;
|
||||
feeds.remove(f);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -248,7 +268,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
return feeds;
|
||||
}
|
||||
|
||||
private void storeFeeds(@Nullable Transaction txn, List<Feed> feeds)
|
||||
private void storeFeeds(Transaction txn, List<Feed> feeds)
|
||||
throws DbException {
|
||||
|
||||
BdfList feedList = new BdfList();
|
||||
@@ -257,19 +277,29 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
}
|
||||
BdfDictionary gm = BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList));
|
||||
try {
|
||||
if (txn == null) {
|
||||
clientHelper.mergeGroupMetadata(getLocalGroup().getId(), gm);
|
||||
} else {
|
||||
clientHelper.mergeGroupMetadata(txn, getLocalGroup().getId(),
|
||||
gm);
|
||||
}
|
||||
clientHelper.mergeGroupMetadata(txn, getLocalGroup().getId(), gm);
|
||||
} catch (FormatException e) {
|
||||
throw new DbException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void storeFeeds(List<Feed> feeds) throws DbException {
|
||||
storeFeeds(null, feeds);
|
||||
/**
|
||||
* Updates the given feeds in the stored list of feeds, without affecting
|
||||
* any other feeds in the list or re-adding any of the given feeds that
|
||||
* have been removed from the list.
|
||||
*/
|
||||
private void updateFeeds(List<Feed> updatedFeeds) throws DbException {
|
||||
Map<GroupId, Feed> updatedMap = new HashMap<>();
|
||||
for (Feed feed : updatedFeeds) updatedMap.put(feed.getBlogId(), feed);
|
||||
db.transaction(false, txn -> {
|
||||
List<Feed> feeds = getFeeds(txn);
|
||||
ListIterator<Feed> it = feeds.listIterator();
|
||||
while (it.hasNext()) {
|
||||
Feed updated = updatedMap.get(it.next().getBlogId());
|
||||
if (updated != null) it.set(updated);
|
||||
}
|
||||
storeFeeds(txn, feeds);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -295,65 +325,69 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
return;
|
||||
}
|
||||
|
||||
if (feeds.isEmpty()) {
|
||||
LOG.info("No RSS feeds to update");
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch and update all feeds
|
||||
List<Feed> newFeeds = new ArrayList<>(feeds.size());
|
||||
List<Feed> updatedFeeds = new ArrayList<>(feeds.size());
|
||||
for (Feed feed : feeds) {
|
||||
try {
|
||||
newFeeds.add(fetchFeed(feed));
|
||||
String url = feed.getProperties().getUrl();
|
||||
if (url == null) continue;
|
||||
// fetch and clean feed
|
||||
SyndFeed sf = fetchAndCleanFeed(url);
|
||||
// sort and add new entries
|
||||
long lastEntryTime = postFeedEntries(feed, sf.getEntries());
|
||||
updatedFeeds.add(
|
||||
feedFactory.updateFeed(feed, sf, lastEntryTime));
|
||||
} catch (IOException | DbException e) {
|
||||
logException(LOG, WARNING, e);
|
||||
newFeeds.add(feed);
|
||||
}
|
||||
}
|
||||
|
||||
// Store updated feeds
|
||||
try {
|
||||
storeFeeds(newFeeds);
|
||||
updateFeeds(updatedFeeds);
|
||||
} catch (DbException e) {
|
||||
logException(LOG, WARNING, e);
|
||||
}
|
||||
LOG.info("Done updating RSS feeds");
|
||||
}
|
||||
|
||||
private SyndFeed fetchSyndFeed(String url) throws IOException {
|
||||
// fetch feed
|
||||
InputStream stream = getFeedInputStream(url);
|
||||
SyndFeed f = getSyndFeed(stream);
|
||||
stream.close();
|
||||
|
||||
if (f.getEntries().size() == 0)
|
||||
throw new IOException("Feed has no entries");
|
||||
|
||||
// clean title
|
||||
String title =
|
||||
StringUtils.isNullOrEmpty(f.getTitle()) ? null : f.getTitle();
|
||||
if (title != null) title = clean(title, STRIP_ALL);
|
||||
f.setTitle(title);
|
||||
|
||||
// clean description
|
||||
String description =
|
||||
StringUtils.isNullOrEmpty(f.getDescription()) ? null :
|
||||
f.getDescription();
|
||||
if (description != null) description = clean(description, STRIP_ALL);
|
||||
f.setDescription(description);
|
||||
|
||||
// clean author
|
||||
String author =
|
||||
StringUtils.isNullOrEmpty(f.getAuthor()) ? null : f.getAuthor();
|
||||
if (author != null) author = clean(author, STRIP_ALL);
|
||||
f.setAuthor(author);
|
||||
|
||||
return f;
|
||||
private SyndFeed fetchAndCleanFeed(String url) throws IOException {
|
||||
return fetchAndCleanFeed(getFeedInputStream(url));
|
||||
}
|
||||
|
||||
private Feed fetchFeed(Feed feed) throws IOException, DbException {
|
||||
// fetch and clean feed
|
||||
SyndFeed f = fetchSyndFeed(feed.getUrl());
|
||||
private SyndFeed fetchAndCleanFeed(InputStream in) throws IOException {
|
||||
SyndFeed sf;
|
||||
try {
|
||||
sf = getSyndFeed(in);
|
||||
} finally {
|
||||
tryToClose(in, LOG, WARNING);
|
||||
}
|
||||
|
||||
// sort and add new entries
|
||||
long lastEntryTime = postFeedEntries(feed, f.getEntries());
|
||||
// clean title
|
||||
String title = sf.getTitle();
|
||||
if (title != null) title = cleanAll(title);
|
||||
sf.setTitle(isNullOrEmpty(title) ? "RSS" : title);
|
||||
|
||||
return feedFactory.createFeed(feed, f, lastEntryTime);
|
||||
// clean description
|
||||
String description = sf.getDescription();
|
||||
if (description != null) description = cleanAll(description);
|
||||
sf.setDescription(isNullOrEmpty(description) ? null : description);
|
||||
|
||||
// clean author
|
||||
String author = sf.getAuthor();
|
||||
if (author != null) author = cleanAll(author);
|
||||
sf.setAuthor(isNullOrEmpty(author) ? null : author);
|
||||
|
||||
// set other relevant fields to null if empty
|
||||
if ("".equals(sf.getLink())) sf.setLink(null);
|
||||
if ("".equals(sf.getUri())) sf.setUri(null);
|
||||
|
||||
return sf;
|
||||
}
|
||||
|
||||
private InputStream getFeedInputStream(String url) throws IOException {
|
||||
@@ -380,12 +414,11 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
}
|
||||
}
|
||||
|
||||
long postFeedEntries(Feed feed, List<SyndEntry> entries)
|
||||
private long postFeedEntries(Feed feed, List<SyndEntry> entries)
|
||||
throws DbException {
|
||||
|
||||
return db.transactionWithResult(false, txn -> {
|
||||
long lastEntryTime = feed.getLastEntryTime();
|
||||
Transaction txn = db.startTransaction(false);
|
||||
try {
|
||||
//noinspection Java8ListSort
|
||||
sort(entries, getEntryComparator());
|
||||
for (SyndEntry entry : entries) {
|
||||
@@ -404,11 +437,8 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
if (entryTime > lastEntryTime) lastEntryTime = entryTime;
|
||||
}
|
||||
}
|
||||
db.commitTransaction(txn);
|
||||
} finally {
|
||||
db.endTransaction(txn);
|
||||
}
|
||||
return lastEntryTime;
|
||||
});
|
||||
}
|
||||
|
||||
private void postEntry(Transaction txn, Feed feed, SyndEntry entry) {
|
||||
@@ -417,7 +447,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
// build post text
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
||||
if (!StringUtils.isNullOrEmpty(entry.getTitle())) {
|
||||
if (!isNullOrEmpty(entry.getTitle())) {
|
||||
b.append("<h1>").append(entry.getTitle()).append("</h1>");
|
||||
}
|
||||
for (SyndContent content : entry.getContents()) {
|
||||
@@ -430,7 +460,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
b.append(entry.getDescription().getValue());
|
||||
}
|
||||
b.append("<p>");
|
||||
if (!StringUtils.isNullOrEmpty(entry.getAuthor())) {
|
||||
if (!isNullOrEmpty(entry.getAuthor())) {
|
||||
b.append("-- ").append(entry.getAuthor());
|
||||
}
|
||||
if (entry.getPublishedDate() != null) {
|
||||
@@ -442,7 +472,7 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
}
|
||||
b.append("</p>");
|
||||
String link = entry.getLink();
|
||||
if (!StringUtils.isNullOrEmpty(link)) {
|
||||
if (!isNullOrEmpty(link)) {
|
||||
b.append("<a href=\"").append(link).append("\">").append(link)
|
||||
.append("</a>");
|
||||
}
|
||||
@@ -472,8 +502,8 @@ class FeedManagerImpl implements FeedManager, EventListener, OpenDatabaseHook,
|
||||
}
|
||||
|
||||
private String getPostText(String text) {
|
||||
text = clean(text, ARTICLE);
|
||||
return StringUtils.truncateUtf8(text, MAX_BLOG_POST_TEXT_LENGTH);
|
||||
text = cleanArticle(text);
|
||||
return truncateUtf8(text, MAX_BLOG_POST_TEXT_LENGTH);
|
||||
}
|
||||
|
||||
private Comparator<SyndEntry> getEntryComparator() {
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
package org.briarproject.briar.feed;
|
||||
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.RssProperties;
|
||||
import org.briarproject.nullsafety.NotNullByDefault;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.concurrent.ThreadSafe;
|
||||
|
||||
@ThreadSafe
|
||||
@NotNullByDefault
|
||||
interface FeedMatcher {
|
||||
|
||||
/**
|
||||
* Returns the best match for the given candidate from the given list of
|
||||
* feeds, or null if there are no matches.
|
||||
*/
|
||||
@Nullable
|
||||
Feed findMatchingFeed(RssProperties candidate, List<Feed> feeds);
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package org.briarproject.briar.feed;
|
||||
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.RssProperties;
|
||||
import org.briarproject.nullsafety.NotNullByDefault;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.inject.Inject;
|
||||
|
||||
@NotNullByDefault
|
||||
class FeedMatcherImpl implements FeedMatcher {
|
||||
|
||||
private static final int MIN_MATCHING_FIELDS = 2;
|
||||
|
||||
@Inject
|
||||
FeedMatcherImpl() {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Feed findMatchingFeed(RssProperties candidate, List<Feed> feeds) {
|
||||
// First pass: if the candidate was imported from a URL and we have
|
||||
// a feed that was imported from the same URL then it's a match
|
||||
String url = candidate.getUrl();
|
||||
if (url != null) {
|
||||
for (Feed f : feeds) {
|
||||
if (url.equals(f.getProperties().getUrl())) return f;
|
||||
}
|
||||
}
|
||||
// Second pass: if the candidate matches at least MIN_MATCHING_FIELDS
|
||||
// out of the title, description, author, link and URI, then return the
|
||||
// feed with the highest number of matching fields
|
||||
int bestScore = 0;
|
||||
Feed bestFeed = null;
|
||||
String title = candidate.getTitle();
|
||||
String description = candidate.getDescription();
|
||||
String author = candidate.getAuthor();
|
||||
String link = candidate.getLink();
|
||||
String uri = candidate.getUri();
|
||||
for (Feed f : feeds) {
|
||||
int score = 0;
|
||||
RssProperties p = f.getProperties();
|
||||
if (title != null && title.equals(p.getTitle())) {
|
||||
score++;
|
||||
}
|
||||
if (description != null && description.equals(p.getDescription())) {
|
||||
score++;
|
||||
}
|
||||
if (author != null && author.equals(p.getAuthor())) {
|
||||
score++;
|
||||
}
|
||||
if (link != null && link.equals(p.getLink())) {
|
||||
score++;
|
||||
}
|
||||
if (uri != null && uri.equals(p.getUri())) {
|
||||
score++;
|
||||
}
|
||||
if (score > bestScore) {
|
||||
bestScore = score;
|
||||
bestFeed = f;
|
||||
}
|
||||
}
|
||||
if (bestScore >= MIN_MATCHING_FIELDS) return bestFeed;
|
||||
// No match
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -38,4 +38,9 @@ public class FeedModule {
|
||||
FeedFactory provideFeedFactory(FeedFactoryImpl feedFactory) {
|
||||
return feedFactory;
|
||||
}
|
||||
|
||||
@Provides
|
||||
FeedMatcher provideFeedMatcher(FeedMatcherImpl feedMatcher) {
|
||||
return feedMatcher;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,12 +7,15 @@ import org.jsoup.safety.Safelist;
|
||||
@NotNullByDefault
|
||||
public class HtmlUtils {
|
||||
|
||||
public static Safelist STRIP_ALL = Safelist.none();
|
||||
public static Safelist ARTICLE = Safelist.basic()
|
||||
private static final Safelist STRIP_ALL = Safelist.none();
|
||||
private static final Safelist ARTICLE = Safelist.basic()
|
||||
.addTags("h1", "h2", "h3", "h4", "h5", "h6");
|
||||
|
||||
public static String clean(String s, Safelist list) {
|
||||
return Jsoup.clean(s, list);
|
||||
public static String cleanAll(String s) {
|
||||
return Jsoup.clean(s, STRIP_ALL);
|
||||
}
|
||||
|
||||
public static String cleanArticle(String s) {
|
||||
return Jsoup.clean(s, ARTICLE);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,159 @@
|
||||
package org.briarproject.briar.feed;
|
||||
|
||||
import org.briarproject.bramble.api.client.ClientHelper;
|
||||
import org.briarproject.bramble.api.data.BdfDictionary;
|
||||
import org.briarproject.bramble.api.data.BdfEntry;
|
||||
import org.briarproject.bramble.api.data.BdfList;
|
||||
import org.briarproject.bramble.api.identity.AuthorFactory;
|
||||
import org.briarproject.bramble.api.identity.LocalAuthor;
|
||||
import org.briarproject.bramble.api.sync.Group;
|
||||
import org.briarproject.bramble.api.system.Clock;
|
||||
import org.briarproject.bramble.test.BrambleMockTestCase;
|
||||
import org.briarproject.briar.api.blog.Blog;
|
||||
import org.briarproject.briar.api.blog.BlogFactory;
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.RssProperties;
|
||||
import org.jmock.Expectations;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.briarproject.bramble.test.TestUtils.getGroup;
|
||||
import static org.briarproject.bramble.test.TestUtils.getLocalAuthor;
|
||||
import static org.briarproject.bramble.util.StringUtils.getRandomString;
|
||||
import static org.briarproject.briar.api.blog.BlogManager.CLIENT_ID;
|
||||
import static org.briarproject.briar.api.blog.BlogManager.MAJOR_VERSION;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_ADDED;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_AUTHOR;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_DESC;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_LAST_ENTRY;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_PRIVATE_KEY;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_AUTHOR;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_LINK;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_TITLE;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_RSS_URI;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_UPDATED;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEED_URL;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class FeedFactoryImplTest extends BrambleMockTestCase {
|
||||
|
||||
private final AuthorFactory authorFactory =
|
||||
context.mock(AuthorFactory.class);
|
||||
private final BlogFactory blogFactory = context.mock(BlogFactory.class);
|
||||
private final ClientHelper clientHelper = context.mock(ClientHelper.class);
|
||||
private final Clock clock = context.mock(Clock.class);
|
||||
|
||||
private final LocalAuthor localAuthor = getLocalAuthor();
|
||||
private final Group blogGroup = getGroup(CLIENT_ID, MAJOR_VERSION);
|
||||
private final Blog blog = new Blog(blogGroup, localAuthor, true);
|
||||
private final BdfList authorList = BdfList.of("foo");
|
||||
private final long added = 123, updated = 234, lastEntryTime = 345;
|
||||
|
||||
private final String url = getRandomString(123);
|
||||
private final String description = getRandomString(123);
|
||||
private final String rssAuthor = getRandomString(123);
|
||||
private final String title = getRandomString(123);
|
||||
private final String link = getRandomString(123);
|
||||
private final String uri = getRandomString(123);
|
||||
|
||||
private final FeedFactoryImpl feedFactory = new FeedFactoryImpl(
|
||||
authorFactory, blogFactory, clientHelper, clock);
|
||||
|
||||
@Test
|
||||
public void testSerialiseAndDeserialiseWithoutOptionalFields()
|
||||
throws Exception {
|
||||
RssProperties propertiesBefore = new RssProperties(null, null, null,
|
||||
null, null, null);
|
||||
Feed before = new Feed(blog, localAuthor, propertiesBefore, added,
|
||||
updated, lastEntryTime);
|
||||
|
||||
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(clientHelper).toList(localAuthor);
|
||||
will(returnValue(authorList));
|
||||
}});
|
||||
|
||||
BdfDictionary dict = feedFactory.feedToBdfDictionary(before);
|
||||
|
||||
BdfDictionary expectedDict = BdfDictionary.of(
|
||||
new BdfEntry(KEY_FEED_AUTHOR, authorList),
|
||||
new BdfEntry(KEY_FEED_PRIVATE_KEY, localAuthor.getPrivateKey()),
|
||||
new BdfEntry(KEY_FEED_ADDED, added),
|
||||
new BdfEntry(KEY_FEED_UPDATED, updated),
|
||||
new BdfEntry(KEY_FEED_LAST_ENTRY, lastEntryTime)
|
||||
);
|
||||
assertEquals(expectedDict, dict);
|
||||
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(clientHelper).parseAndValidateAuthor(authorList);
|
||||
will(returnValue(localAuthor));
|
||||
oneOf(blogFactory).createFeedBlog(localAuthor);
|
||||
will(returnValue(blog));
|
||||
}});
|
||||
|
||||
Feed after = feedFactory.createFeed(dict);
|
||||
RssProperties afterProperties = after.getProperties();
|
||||
|
||||
assertNull(afterProperties.getUrl());
|
||||
assertNull(afterProperties.getTitle());
|
||||
assertNull(afterProperties.getDescription());
|
||||
assertNull(afterProperties.getAuthor());
|
||||
assertNull(afterProperties.getLink());
|
||||
assertNull(afterProperties.getUri());
|
||||
assertEquals(added, after.getAdded());
|
||||
assertEquals(updated, after.getUpdated());
|
||||
assertEquals(lastEntryTime, after.getLastEntryTime());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerialiseAndDeserialiseWithOptionalFields()
|
||||
throws Exception {
|
||||
RssProperties propertiesBefore = new RssProperties(url, title,
|
||||
description, rssAuthor, link, uri);
|
||||
Feed before = new Feed(blog, localAuthor, propertiesBefore, added,
|
||||
updated, lastEntryTime);
|
||||
|
||||
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(clientHelper).toList(localAuthor);
|
||||
will(returnValue(authorList));
|
||||
}});
|
||||
|
||||
BdfDictionary dict = feedFactory.feedToBdfDictionary(before);
|
||||
|
||||
BdfDictionary expectedDict = BdfDictionary.of(
|
||||
new BdfEntry(KEY_FEED_AUTHOR, authorList),
|
||||
new BdfEntry(KEY_FEED_PRIVATE_KEY, localAuthor.getPrivateKey()),
|
||||
new BdfEntry(KEY_FEED_ADDED, added),
|
||||
new BdfEntry(KEY_FEED_UPDATED, updated),
|
||||
new BdfEntry(KEY_FEED_LAST_ENTRY, lastEntryTime),
|
||||
new BdfEntry(KEY_FEED_URL, url),
|
||||
new BdfEntry(KEY_FEED_RSS_TITLE, title),
|
||||
new BdfEntry(KEY_FEED_DESC, description),
|
||||
new BdfEntry(KEY_FEED_RSS_AUTHOR, rssAuthor),
|
||||
new BdfEntry(KEY_FEED_RSS_LINK, link),
|
||||
new BdfEntry(KEY_FEED_RSS_URI, uri)
|
||||
);
|
||||
assertEquals(expectedDict, dict);
|
||||
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(clientHelper).parseAndValidateAuthor(authorList);
|
||||
will(returnValue(localAuthor));
|
||||
oneOf(blogFactory).createFeedBlog(localAuthor);
|
||||
will(returnValue(blog));
|
||||
}});
|
||||
|
||||
Feed after = feedFactory.createFeed(dict);
|
||||
RssProperties afterProperties = after.getProperties();
|
||||
|
||||
assertEquals(url, afterProperties.getUrl());
|
||||
assertEquals(title, afterProperties.getTitle());
|
||||
assertEquals(description, afterProperties.getDescription());
|
||||
assertEquals(rssAuthor, afterProperties.getAuthor());
|
||||
assertEquals(link, afterProperties.getLink());
|
||||
assertEquals(uri, afterProperties.getUri());
|
||||
assertEquals(added, after.getAdded());
|
||||
assertEquals(updated, after.getUpdated());
|
||||
assertEquals(lastEntryTime, after.getLastEntryTime());
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package org.briarproject.briar.feed;
|
||||
|
||||
import com.rometools.rome.feed.synd.SyndEntry;
|
||||
import com.rometools.rome.feed.synd.SyndEntryImpl;
|
||||
import com.rometools.rome.feed.synd.SyndFeed;
|
||||
|
||||
import org.briarproject.bramble.api.WeakSingletonProvider;
|
||||
import org.briarproject.bramble.api.client.ClientHelper;
|
||||
@@ -25,28 +24,31 @@ import org.briarproject.briar.api.blog.BlogManager;
|
||||
import org.briarproject.briar.api.blog.BlogPost;
|
||||
import org.briarproject.briar.api.blog.BlogPostFactory;
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.RssProperties;
|
||||
import org.jmock.Expectations;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.net.SocketFactory;
|
||||
|
||||
import okhttp3.Dns;
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.mockwebserver.MockResponse;
|
||||
import okhttp3.mockwebserver.MockWebServer;
|
||||
|
||||
import static java.util.concurrent.TimeUnit.MILLISECONDS;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static okhttp3.mockwebserver.SocketPolicy.DISCONNECT_DURING_RESPONSE_BODY;
|
||||
import static org.briarproject.bramble.test.TestUtils.getGroup;
|
||||
import static org.briarproject.bramble.test.TestUtils.getLocalAuthor;
|
||||
import static org.briarproject.bramble.test.TestUtils.getMessage;
|
||||
import static org.briarproject.bramble.util.StringUtils.UTF_8;
|
||||
import static org.briarproject.briar.api.feed.FeedConstants.KEY_FEEDS;
|
||||
import static org.briarproject.briar.api.feed.FeedManager.CLIENT_ID;
|
||||
import static org.briarproject.briar.api.feed.FeedManager.MAJOR_VERSION;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class FeedManagerImplTest extends BrambleMockTestCase {
|
||||
|
||||
@@ -60,14 +62,10 @@ public class FeedManagerImplTest extends BrambleMockTestCase {
|
||||
private final BlogPostFactory blogPostFactory =
|
||||
context.mock(BlogPostFactory.class);
|
||||
private final FeedFactory feedFactory = context.mock(FeedFactory.class);
|
||||
private final FeedMatcher feedMatcher = context.mock(FeedMatcher.class);
|
||||
private final Clock clock = context.mock(Clock.class);
|
||||
private final Dns noDnsLookups = context.mock(Dns.class);
|
||||
|
||||
private final OkHttpClient client = new OkHttpClient.Builder()
|
||||
.socketFactory(SocketFactory.getDefault())
|
||||
.dns(noDnsLookups)
|
||||
.connectTimeout(60_000, MILLISECONDS)
|
||||
.build();
|
||||
private final OkHttpClient client = new OkHttpClient.Builder().build();
|
||||
private final WeakSingletonProvider<OkHttpClient> httpClientProvider =
|
||||
new WeakSingletonProvider<OkHttpClient>() {
|
||||
@Override
|
||||
@@ -83,14 +81,20 @@ public class FeedManagerImplTest extends BrambleMockTestCase {
|
||||
private final GroupId blogGroupId = blogGroup.getId();
|
||||
private final LocalAuthor localAuthor = getLocalAuthor();
|
||||
private final Blog blog = new Blog(blogGroup, localAuthor, true);
|
||||
private final Feed feed =
|
||||
new Feed("http://example.org", blog, localAuthor, 0);
|
||||
private final BdfDictionary feedDict = new BdfDictionary();
|
||||
private final Message message = getMessage(blogGroupId);
|
||||
private final BlogPost blogPost = new BlogPost(message, null, localAuthor);
|
||||
|
||||
private final long now = System.currentTimeMillis();
|
||||
// Round the publication date to a whole second to avoid rounding errors
|
||||
private final long pubDate = now / 1000 * 1000 - 1000;
|
||||
private final SimpleDateFormat sdf =
|
||||
new SimpleDateFormat("EEE, dd MMM yy HH:mm:ss Z");
|
||||
private final String pubDateString = sdf.format(new Date(pubDate));
|
||||
|
||||
private final FeedManagerImpl feedManager =
|
||||
new FeedManagerImpl(scheduler, ioExecutor, db, contactGroupFactory,
|
||||
clientHelper, blogManager, blogPostFactory, feedFactory,
|
||||
httpClientProvider, clock);
|
||||
feedMatcher, httpClientProvider, clock);
|
||||
|
||||
@Test
|
||||
public void testFetchFeedsReturnsEarlyIfTorIsNotActive() {
|
||||
@@ -99,55 +103,223 @@ public class FeedManagerImplTest extends BrambleMockTestCase {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyFetchFeeds() throws Exception {
|
||||
BdfList feedList = new BdfList();
|
||||
expectGetFeeds(feedList);
|
||||
expectStoreFeed(feedList);
|
||||
public void testFetchFeedsEmptyList() throws Exception {
|
||||
// The list of feeds is empty
|
||||
expectGetFeeds();
|
||||
|
||||
feedManager.setTorActive(true);
|
||||
feedManager.fetchFeeds();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchFeedsIoException() throws Exception {
|
||||
BdfDictionary feedDict = new BdfDictionary();
|
||||
BdfList feedList = BdfList.of(feedDict);
|
||||
// Fetching the feed will fail
|
||||
MockWebServer server = new MockWebServer();
|
||||
String url = server.url("/").toString();
|
||||
server.enqueue(new MockResponse()
|
||||
.setBody(" ")
|
||||
.setSocketPolicy(DISCONNECT_DURING_RESPONSE_BODY));
|
||||
|
||||
expectGetFeeds(feedList);
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(noDnsLookups).lookup("example.org");
|
||||
will(throwException(new UnknownHostException()));
|
||||
}});
|
||||
expectStoreFeed(feedList);
|
||||
Feed feed = createFeed(url, blog);
|
||||
|
||||
expectGetFeeds(feed);
|
||||
expectGetAndStoreFeeds(feed);
|
||||
|
||||
feedManager.setTorActive(true);
|
||||
feedManager.fetchFeeds();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPostFeedEntriesEmptyDate() throws Exception {
|
||||
Transaction txn = new Transaction(null, false);
|
||||
List<SyndEntry> entries = new ArrayList<>();
|
||||
entries.add(new SyndEntryImpl());
|
||||
SyndEntry entry = new SyndEntryImpl();
|
||||
entry.setUpdatedDate(new Date());
|
||||
entries.add(entry);
|
||||
String text = "<p>(" + entry.getUpdatedDate().toString() + ")</p>";
|
||||
Message msg = getMessage(blogGroupId);
|
||||
BlogPost post = new BlogPost(msg, null, localAuthor);
|
||||
public void testFetchFeedsEmptyResponseBody() throws Exception {
|
||||
// Fetching the feed will succeed, but parsing the empty body will fail
|
||||
MockWebServer server = new MockWebServer();
|
||||
String url = server.url("/").toString();
|
||||
server.enqueue(new MockResponse());
|
||||
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(db).startTransaction(false);
|
||||
will(returnValue(txn));
|
||||
oneOf(clock).currentTimeMillis();
|
||||
will(returnValue(42L));
|
||||
oneOf(blogPostFactory).createBlogPost(feed.getBlogId(), 42L, null,
|
||||
localAuthor, text);
|
||||
will(returnValue(post));
|
||||
oneOf(blogManager).addLocalPost(txn, post);
|
||||
oneOf(db).commitTransaction(txn);
|
||||
oneOf(db).endTransaction(txn);
|
||||
Feed feed = createFeed(url, blog);
|
||||
|
||||
expectGetFeeds(feed);
|
||||
expectGetAndStoreFeeds(feed);
|
||||
|
||||
feedManager.setTorActive(true);
|
||||
feedManager.fetchFeeds();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchFeedsNoEntries() throws Exception {
|
||||
// Fetching and parsing the feed will succeed; there are no entries
|
||||
String feedXml = createRssFeedXml();
|
||||
|
||||
MockWebServer server = new MockWebServer();
|
||||
String url = server.url("/").toString();
|
||||
server.enqueue(new MockResponse().setBody(feedXml));
|
||||
|
||||
Feed feed = createFeed(url, blog);
|
||||
|
||||
expectGetFeeds(feed);
|
||||
expectUpdateFeedNoEntries(feed);
|
||||
expectGetAndStoreFeeds(feed);
|
||||
|
||||
feedManager.setTorActive(true);
|
||||
feedManager.fetchFeeds();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchFeedsOneEntry() throws Exception {
|
||||
// Fetching and parsing the feed will succeed; there is one entry
|
||||
String entryXml =
|
||||
"<item><pubDate>" + pubDateString + "</pubDate></item>";
|
||||
String feedXml = createRssFeedXml(entryXml);
|
||||
|
||||
MockWebServer server = new MockWebServer();
|
||||
String url = server.url("/").toString();
|
||||
server.enqueue(new MockResponse().setBody(feedXml));
|
||||
|
||||
Feed feed = createFeed(url, blog);
|
||||
|
||||
expectGetFeeds(feed);
|
||||
expectUpdateFeedOneEntry(feed);
|
||||
expectGetAndStoreFeeds(feed);
|
||||
|
||||
feedManager.setTorActive(true);
|
||||
feedManager.fetchFeeds();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddNewFeedFromUrl() throws Exception {
|
||||
// Fetching and parsing the feed will succeed; there are no entries
|
||||
String feedXml = createRssFeedXml();
|
||||
|
||||
MockWebServer server = new MockWebServer();
|
||||
String url = server.url("/").toString();
|
||||
server.enqueue(new MockResponse().setBody(feedXml));
|
||||
|
||||
Feed newFeed = createFeed(url, blog);
|
||||
|
||||
Group existingBlogGroup = getGroup(BlogManager.CLIENT_ID,
|
||||
BlogManager.MAJOR_VERSION);
|
||||
Blog existingBlog = new Blog(existingBlogGroup, localAuthor, true);
|
||||
Feed existingFeed = createFeed("http://example.com", existingBlog);
|
||||
|
||||
expectGetFeeds(existingFeed);
|
||||
|
||||
context.checking(new DbExpectations() {{
|
||||
// The added feed doesn't match any existing feed
|
||||
oneOf(feedMatcher).findMatchingFeed(with(any(RssProperties.class)),
|
||||
with(singletonList(existingFeed)));
|
||||
will(returnValue(null));
|
||||
// Create the new feed
|
||||
oneOf(feedFactory).createFeed(with(url), with(any(SyndFeed.class)));
|
||||
will(returnValue(newFeed));
|
||||
// Add the new feed to the list of feeds
|
||||
Transaction txn = new Transaction(null, false);
|
||||
oneOf(db).transaction(with(false), withDbRunnable(txn));
|
||||
oneOf(blogManager).addBlog(txn, blog);
|
||||
expectGetFeeds(txn, existingFeed);
|
||||
expectStoreFeeds(txn, existingFeed, newFeed);
|
||||
}});
|
||||
feedManager.postFeedEntries(feed, entries);
|
||||
|
||||
expectUpdateFeedNoEntries(newFeed);
|
||||
expectGetAndStoreFeeds(existingFeed, newFeed);
|
||||
|
||||
feedManager.addFeed(url);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddExistingFeedFromUrl() throws Exception {
|
||||
// Fetching and parsing the feed will succeed; there are no entries
|
||||
String feedXml = createRssFeedXml();
|
||||
|
||||
MockWebServer server = new MockWebServer();
|
||||
String url = server.url("/").toString();
|
||||
server.enqueue(new MockResponse().setBody(feedXml));
|
||||
|
||||
Feed newFeed = createFeed(url, blog);
|
||||
|
||||
expectGetFeeds(newFeed);
|
||||
|
||||
context.checking(new DbExpectations() {{
|
||||
// The added feed matches an existing feed
|
||||
oneOf(feedMatcher).findMatchingFeed(with(any(RssProperties.class)),
|
||||
with(singletonList(newFeed)));
|
||||
will(returnValue(newFeed));
|
||||
}});
|
||||
|
||||
expectUpdateFeedNoEntries(newFeed);
|
||||
expectGetAndStoreFeeds(newFeed);
|
||||
|
||||
feedManager.addFeed(url);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddNewFeedFromInputStream() throws Exception {
|
||||
// Reading and parsing the feed will succeed; there are no entries
|
||||
String feedXml = createRssFeedXml();
|
||||
Feed newFeed = createFeed(null, blog);
|
||||
|
||||
Group existingBlogGroup = getGroup(BlogManager.CLIENT_ID,
|
||||
BlogManager.MAJOR_VERSION);
|
||||
Blog existingBlog = new Blog(existingBlogGroup, localAuthor, true);
|
||||
Feed existingFeed = createFeed(null, existingBlog);
|
||||
|
||||
expectGetFeeds(existingFeed);
|
||||
|
||||
context.checking(new DbExpectations() {{
|
||||
// The added feed doesn't match any existing feed
|
||||
oneOf(feedMatcher).findMatchingFeed(with(any(RssProperties.class)),
|
||||
with(singletonList(existingFeed)));
|
||||
will(returnValue(null));
|
||||
// Create the new feed
|
||||
oneOf(feedFactory).createFeed(with(nullValue(String.class)),
|
||||
with(any(SyndFeed.class)));
|
||||
will(returnValue(newFeed));
|
||||
// Add the new feed to the list of feeds
|
||||
Transaction txn = new Transaction(null, false);
|
||||
oneOf(db).transaction(with(false), withDbRunnable(txn));
|
||||
oneOf(blogManager).addBlog(txn, blog);
|
||||
expectGetFeeds(txn, existingFeed);
|
||||
expectStoreFeeds(txn, existingFeed, newFeed);
|
||||
}});
|
||||
|
||||
expectUpdateFeedNoEntries(newFeed);
|
||||
expectGetAndStoreFeeds(existingFeed, newFeed);
|
||||
|
||||
feedManager.addFeed(new ByteArrayInputStream(feedXml.getBytes(UTF_8)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddExistingFeedFromInputStream() throws Exception {
|
||||
// Reading and parsing the feed will succeed; there are no entries
|
||||
String feedXml = createRssFeedXml();
|
||||
Feed newFeed = createFeed(null, blog);
|
||||
|
||||
expectGetFeeds(newFeed);
|
||||
|
||||
context.checking(new DbExpectations() {{
|
||||
// The added feed matches an existing feed
|
||||
oneOf(feedMatcher).findMatchingFeed(with(any(RssProperties.class)),
|
||||
with(singletonList(newFeed)));
|
||||
will(returnValue(newFeed));
|
||||
}});
|
||||
|
||||
expectUpdateFeedNoEntries(newFeed);
|
||||
expectGetAndStoreFeeds(newFeed);
|
||||
|
||||
feedManager.addFeed(new ByteArrayInputStream(feedXml.getBytes(UTF_8)));
|
||||
}
|
||||
|
||||
private Feed createFeed(String url, Blog blog) {
|
||||
RssProperties properties = new RssProperties(url,
|
||||
null, null, null, null, null);
|
||||
return new Feed(blog, localAuthor, properties, 0, 0, 0);
|
||||
}
|
||||
|
||||
private String createRssFeedXml(String... entries) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("<rss version='2.0'><channel>");
|
||||
for (String entry : entries) sb.append(entry);
|
||||
sb.append("</channel></rss>");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private void expectGetLocalGroup() {
|
||||
@@ -158,33 +330,88 @@ public class FeedManagerImplTest extends BrambleMockTestCase {
|
||||
}});
|
||||
}
|
||||
|
||||
private void expectGetFeeds(BdfList feedList) throws Exception {
|
||||
private void expectGetFeeds(Feed... feeds) throws Exception {
|
||||
Transaction txn = new Transaction(null, true);
|
||||
context.checking(new DbExpectations() {{
|
||||
oneOf(db).transactionWithResult(with(true), withDbCallable(txn));
|
||||
}});
|
||||
expectGetFeeds(txn, feeds);
|
||||
}
|
||||
|
||||
private void expectGetFeeds(Transaction txn, Feed... feeds)
|
||||
throws Exception {
|
||||
BdfList feedList = new BdfList();
|
||||
for (int i = 0; i < feeds.length; i++) {
|
||||
feedList.add(new BdfDictionary());
|
||||
}
|
||||
BdfDictionary feedsDict =
|
||||
BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList));
|
||||
expectGetLocalGroup();
|
||||
context.checking(new DbExpectations() {{
|
||||
oneOf(db).transactionWithResult(with(true), withDbCallable(txn));
|
||||
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(clientHelper).getGroupMetadataAsDictionary(txn, localGroupId);
|
||||
will(returnValue(feedsDict));
|
||||
if (feedList.size() == 1) {
|
||||
oneOf(feedFactory).createFeed(feedDict);
|
||||
will(returnValue(feed));
|
||||
for (int i = 0; i < feeds.length; i++) {
|
||||
oneOf(feedFactory).createFeed(feedList.getDictionary(i));
|
||||
will(returnValue(feeds[i]));
|
||||
}
|
||||
}});
|
||||
}
|
||||
|
||||
private void expectStoreFeed(BdfList feedList) throws Exception {
|
||||
private void expectStoreFeeds(Transaction txn, Feed... feeds)
|
||||
throws Exception {
|
||||
BdfList feedList = new BdfList();
|
||||
for (int i = 0; i < feeds.length; i++) {
|
||||
feedList.add(new BdfDictionary());
|
||||
}
|
||||
BdfDictionary feedDict =
|
||||
BdfDictionary.of(new BdfEntry(KEY_FEEDS, feedList));
|
||||
expectGetLocalGroup();
|
||||
|
||||
context.checking(new Expectations() {{
|
||||
oneOf(clientHelper).mergeGroupMetadata(localGroupId, feedDict);
|
||||
if (feedList.size() == 1) {
|
||||
oneOf(feedFactory).feedToBdfDictionary(feed);
|
||||
will(returnValue(feedList.getDictionary(0)));
|
||||
oneOf(clientHelper).mergeGroupMetadata(txn, localGroupId, feedDict);
|
||||
for (int i = 0; i < feeds.length; i++) {
|
||||
oneOf(feedFactory).feedToBdfDictionary(feeds[i]);
|
||||
will(returnValue(feedList.getDictionary(i)));
|
||||
}
|
||||
}});
|
||||
}
|
||||
|
||||
private void expectGetAndStoreFeeds(Feed... feeds) throws Exception {
|
||||
context.checking(new DbExpectations() {{
|
||||
Transaction txn = new Transaction(null, false);
|
||||
oneOf(db).transaction(with(false), withDbRunnable(txn));
|
||||
expectGetFeeds(txn, feeds);
|
||||
expectStoreFeeds(txn, feeds);
|
||||
}});
|
||||
}
|
||||
|
||||
private void expectUpdateFeedNoEntries(Feed feed) throws Exception {
|
||||
Transaction txn = new Transaction(null, false);
|
||||
|
||||
context.checking(new DbExpectations() {{
|
||||
oneOf(db).transactionWithResult(with(false), withDbCallable(txn));
|
||||
oneOf(feedFactory).updateFeed(with(feed), with(any(SyndFeed.class)),
|
||||
with(0L));
|
||||
will(returnValue(feed));
|
||||
}});
|
||||
}
|
||||
|
||||
private void expectUpdateFeedOneEntry(Feed feed) throws Exception {
|
||||
Transaction txn = new Transaction(null, false);
|
||||
String body = "<p>(" + new Date(pubDate) + ")</p>";
|
||||
|
||||
context.checking(new DbExpectations() {{
|
||||
oneOf(db).transactionWithResult(with(false), withDbCallable(txn));
|
||||
oneOf(clock).currentTimeMillis();
|
||||
will(returnValue(now));
|
||||
oneOf(blogPostFactory).createBlogPost(blogGroupId, pubDate, null,
|
||||
localAuthor, body);
|
||||
will(returnValue(blogPost));
|
||||
oneOf(blogManager).addLocalPost(txn, blogPost);
|
||||
oneOf(feedFactory).updateFeed(with(feed), with(any(SyndFeed.class)),
|
||||
with(pubDate));
|
||||
will(returnValue(feed));
|
||||
}});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,30 +5,43 @@ import org.briarproject.bramble.api.identity.IdentityManager;
|
||||
import org.briarproject.bramble.api.lifecycle.LifecycleManager;
|
||||
import org.briarproject.bramble.test.BrambleTestCase;
|
||||
import org.briarproject.bramble.test.TestDatabaseConfigModule;
|
||||
import org.briarproject.bramble.test.TestUtils;
|
||||
import org.briarproject.briar.api.blog.Blog;
|
||||
import org.briarproject.briar.api.blog.BlogManager;
|
||||
import org.briarproject.briar.api.blog.BlogPostHeader;
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.FeedManager;
|
||||
import org.briarproject.nullsafety.NullSafety;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.util.Collection;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static org.briarproject.bramble.test.TestUtils.deleteTestDirectory;
|
||||
import static org.briarproject.bramble.test.TestUtils.getSecretKey;
|
||||
import static org.briarproject.bramble.test.TestUtils.getTestDirectory;
|
||||
import static org.briarproject.nullsafety.NullSafety.requireNonNull;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class FeedManagerIntegrationTest extends BrambleTestCase {
|
||||
|
||||
private static final String FEED_PATH =
|
||||
"src/test/resources/briarproject.org_news_index.xml";
|
||||
private static final String FEED_URL =
|
||||
"https://briarproject.org/news/index.xml";
|
||||
private static final String FEED_TITLE = "News - Briar";
|
||||
|
||||
private LifecycleManager lifecycleManager;
|
||||
private FeedManager feedManager;
|
||||
private BlogManager blogManager;
|
||||
private final File testDir = TestUtils.getTestDirectory();
|
||||
private final File testDir = getTestDirectory();
|
||||
private final File testFile = new File(testDir, "feedTest");
|
||||
|
||||
@Before
|
||||
@@ -38,7 +51,8 @@ public class FeedManagerIntegrationTest extends BrambleTestCase {
|
||||
DaggerFeedManagerIntegrationTestComponent.builder()
|
||||
.testDatabaseConfigModule(
|
||||
new TestDatabaseConfigModule(testFile)).build();
|
||||
FeedManagerIntegrationTestComponent.Helper.injectEagerSingletons(component);
|
||||
FeedManagerIntegrationTestComponent.Helper
|
||||
.injectEagerSingletons(component);
|
||||
component.inject(this);
|
||||
|
||||
IdentityManager identityManager = component.getIdentityManager();
|
||||
@@ -54,17 +68,30 @@ public class FeedManagerIntegrationTest extends BrambleTestCase {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFeedImportAndRemoval() throws Exception {
|
||||
public void testFeedImportAndRemovalFromUrl() throws Exception {
|
||||
testFeedImportAndRemoval(FEED_URL, null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFeedImportAndRemovalFromFile() throws Exception {
|
||||
testFeedImportAndRemoval(null, FEED_PATH);
|
||||
}
|
||||
|
||||
private void testFeedImportAndRemoval(@Nullable String url,
|
||||
@Nullable String path) throws Exception {
|
||||
// initially, there's only the one personal blog
|
||||
Collection<Blog> blogs = blogManager.getBlogs();
|
||||
assertEquals(1, blogs.size());
|
||||
Blog personalBlog = blogs.iterator().next();
|
||||
|
||||
// add feed into a dedicated blog
|
||||
String url = "https://www.schneier.com/blog/atom.xml";
|
||||
if (url == null) {
|
||||
feedManager.addFeed(new FileInputStream(requireNonNull(path)));
|
||||
} else {
|
||||
feedManager.addFeed(url);
|
||||
}
|
||||
|
||||
// then there's the feed's blog now
|
||||
// now there's the feed's blog too
|
||||
blogs = blogManager.getBlogs();
|
||||
assertEquals(2, blogs.size());
|
||||
Blog feedBlog = null;
|
||||
@@ -80,15 +107,16 @@ public class FeedManagerIntegrationTest extends BrambleTestCase {
|
||||
assertTrue(feed.getLastEntryTime() > 0);
|
||||
assertTrue(feed.getAdded() > 0);
|
||||
assertTrue(feed.getUpdated() > 0);
|
||||
assertEquals(url, feed.getUrl());
|
||||
assertTrue(NullSafety.equals(url, feed.getProperties().getUrl()));
|
||||
assertEquals(feedBlog, feed.getBlog());
|
||||
assertEquals("Schneier on Security", feed.getTitle());
|
||||
assertEquals(FEED_TITLE, feed.getTitle());
|
||||
assertEquals(feed.getTitle(), feed.getBlog().getName());
|
||||
assertEquals(feed.getTitle(), feed.getLocalAuthor().getName());
|
||||
|
||||
// check the feed entries have been added to the blog as expected
|
||||
Collection<BlogPostHeader> headers =
|
||||
blogManager.getPostHeaders(feedBlog.getId());
|
||||
assertFalse(headers.isEmpty());
|
||||
for (BlogPostHeader header : headers) {
|
||||
assertTrue(header.isRssFeed());
|
||||
}
|
||||
@@ -105,6 +133,6 @@ public class FeedManagerIntegrationTest extends BrambleTestCase {
|
||||
public void tearDown() throws Exception {
|
||||
lifecycleManager.stopServices();
|
||||
lifecycleManager.waitForShutdown();
|
||||
TestUtils.deleteTestDirectory(testDir);
|
||||
deleteTestDirectory(testDir);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,179 @@
|
||||
package org.briarproject.briar.feed;
|
||||
|
||||
import org.briarproject.bramble.api.identity.LocalAuthor;
|
||||
import org.briarproject.bramble.api.sync.ClientId;
|
||||
import org.briarproject.bramble.test.BrambleTestCase;
|
||||
import org.briarproject.briar.api.blog.Blog;
|
||||
import org.briarproject.briar.api.feed.Feed;
|
||||
import org.briarproject.briar.api.feed.RssProperties;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Random;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.briarproject.bramble.test.TestUtils.getClientId;
|
||||
import static org.briarproject.bramble.test.TestUtils.getGroup;
|
||||
import static org.briarproject.bramble.test.TestUtils.getLocalAuthor;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertSame;
|
||||
|
||||
public class FeedMatcherImplTest extends BrambleTestCase {
|
||||
|
||||
private static final String URL = "url";
|
||||
private static final String TITLE = "title";
|
||||
private static final String DESCRIPTION = "description";
|
||||
private static final String AUTHOR = "author";
|
||||
private static final String LINK = "link";
|
||||
private static final String URI = "uri";
|
||||
|
||||
private final Random random = new Random();
|
||||
private final ClientId clientId = getClientId();
|
||||
private final LocalAuthor localAuthor = getLocalAuthor();
|
||||
private final FeedMatcher matcher = new FeedMatcherImpl();
|
||||
|
||||
@Test
|
||||
public void testFeedWithMatchingUrlIsChosen() {
|
||||
RssProperties candidate = new RssProperties(URL,
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URI);
|
||||
// The first feed has a different/null URL but matching RSS fields
|
||||
Feed feed1 = createFeed(new RssProperties(nope(),
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URI));
|
||||
// The second feed has a matching URL but different/null RSS fields
|
||||
Feed feed2 = createFeed(new RssProperties(URL,
|
||||
nope(), nope(), nope(), nope(), nope()));
|
||||
|
||||
Feed match = matcher.findMatchingFeed(candidate, asList(feed1, feed2));
|
||||
|
||||
// The matcher should choose the feed with the matching URL
|
||||
assertNotNull(match);
|
||||
assertSame(feed2, match);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullUrlIsNotMatched() {
|
||||
// The candidate has a null URL
|
||||
RssProperties candidate = new RssProperties(null,
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URI);
|
||||
// The first feed has a non-null URL and matching RSS fields
|
||||
Feed feed1 = createFeed(new RssProperties(URL,
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URI));
|
||||
// The second feed has a null URL and different/null RSS fields
|
||||
Feed feed2 = createFeed(new RssProperties(null,
|
||||
nope(), nope(), nope(), nope(), nope()));
|
||||
|
||||
Feed match = matcher.findMatchingFeed(candidate, asList(feed1, feed2));
|
||||
|
||||
// The matcher should choose the feed with the matching RSS fields
|
||||
assertNotNull(match);
|
||||
assertSame(feed1, match);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDoesNotMatchOneRssField() {
|
||||
testDoesNotMatchRssFields(TITLE, nope(), nope(), nope(), nope());
|
||||
testDoesNotMatchRssFields(nope(), DESCRIPTION, nope(), nope(), nope());
|
||||
testDoesNotMatchRssFields(nope(), nope(), AUTHOR, nope(), nope());
|
||||
testDoesNotMatchRssFields(nope(), nope(), nope(), LINK, nope());
|
||||
testDoesNotMatchRssFields(nope(), nope(), nope(), nope(), URL);
|
||||
}
|
||||
|
||||
private void testDoesNotMatchRssFields(String title, String description,
|
||||
String author, String link, String uri) {
|
||||
RssProperties candidate = new RssProperties(null,
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URL);
|
||||
// The first feed has no matching RSS fields
|
||||
Feed feed1 = createFeed(new RssProperties(null,
|
||||
nope(), nope(), nope(), nope(), nope()));
|
||||
// The second feed has the given RSS fields
|
||||
Feed feed2 = createFeed(new RssProperties(null,
|
||||
title, description, author, link, uri));
|
||||
|
||||
Feed match = matcher.findMatchingFeed(candidate, asList(feed1, feed2));
|
||||
|
||||
// The matcher should not choose either of the feeds
|
||||
assertNull(match);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMatchesTwoRssFields() {
|
||||
testMatchesRssFields(TITLE, DESCRIPTION, nope(), nope(), nope());
|
||||
testMatchesRssFields(nope(), DESCRIPTION, AUTHOR, nope(), nope());
|
||||
testMatchesRssFields(nope(), nope(), AUTHOR, LINK, nope());
|
||||
testMatchesRssFields(nope(), nope(), nope(), LINK, URI);
|
||||
}
|
||||
|
||||
private void testMatchesRssFields(String title, String description,
|
||||
String author, String link, String uri) {
|
||||
RssProperties candidate = new RssProperties(null,
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URI);
|
||||
// The first feed has no matching RSS fields
|
||||
Feed feed1 = createFeed(new RssProperties(null,
|
||||
nope(), nope(), nope(), nope(), nope()));
|
||||
// The second feed has the given RSS fields
|
||||
Feed feed2 = createFeed(new RssProperties(null,
|
||||
title, description, author, link, uri));
|
||||
// The third feed has one matching RSS field
|
||||
Feed feed3 = createFeed(new RssProperties(null,
|
||||
TITLE, nope(), nope(), nope(), nope()));
|
||||
|
||||
FeedMatcher matcher = new FeedMatcherImpl();
|
||||
Feed match = matcher.findMatchingFeed(candidate,
|
||||
asList(feed1, feed2, feed3));
|
||||
|
||||
// The matcher should choose the second feed
|
||||
assertSame(feed2, match);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFeedWithMostMatchingRssFieldsIsChosen() {
|
||||
RssProperties candidate = new RssProperties(null,
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URI);
|
||||
// The first feed has no matching RSS fields
|
||||
Feed feed1 = createFeed(new RssProperties(null,
|
||||
nope(), nope(), nope(), nope(), nope()));
|
||||
// The second feed has three matching RSS fields
|
||||
Feed feed2 = createFeed(new RssProperties(null,
|
||||
TITLE, DESCRIPTION, AUTHOR, nope(), nope()));
|
||||
// The third feed has two matching RSS fields
|
||||
Feed feed3 = createFeed(new RssProperties(null,
|
||||
TITLE, DESCRIPTION, nope(), nope(), nope()));
|
||||
|
||||
Feed match = matcher.findMatchingFeed(candidate,
|
||||
asList(feed1, feed2, feed3));
|
||||
|
||||
// The matcher should choose the second feed
|
||||
assertSame(feed2, match);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullRssFieldsAreNotMatched() {
|
||||
// The candidate has a null URL and null RSS fields
|
||||
RssProperties candidate = new RssProperties(null,
|
||||
null, null, null, null, null);
|
||||
// The first feed has a null URL and non-null RSS fields
|
||||
Feed feed1 = createFeed(new RssProperties(null,
|
||||
TITLE, DESCRIPTION, AUTHOR, LINK, URI));
|
||||
// The second feed has a non-null URL and null RSS fields
|
||||
Feed feed2 = createFeed(new RssProperties(URL,
|
||||
null, null, null, null, null));
|
||||
|
||||
Feed match = matcher.findMatchingFeed(candidate, asList(feed1, feed2));
|
||||
|
||||
// The matcher should not choose either of the feeds
|
||||
assertNull(match);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an RSS field that doesn't match the default, either because it's
|
||||
* null or because it's a different non-null value.
|
||||
*/
|
||||
private String nope() {
|
||||
return random.nextBoolean() ? null : "x";
|
||||
}
|
||||
|
||||
private Feed createFeed(RssProperties properties) {
|
||||
Blog blog = new Blog(getGroup(clientId, 123), localAuthor, true);
|
||||
return new Feed(blog, localAuthor, properties, 0, 0, 0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
|
||||
<title>News - Briar</title>
|
||||
|
||||
<link href="https://briarproject.org/news/index.xml" rel="self"/>
|
||||
<link href="https://briarproject.org/news/"/>
|
||||
<updated>2022-12-05T12:00:00+00:00</updated>
|
||||
<id>https://briarproject.org/news/</id>
|
||||
<author>
|
||||
<name>The Briar Team</name>
|
||||
</author>
|
||||
<generator>Hugo -- gohugo.io</generator>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar Desktop got another round of funding]]></title>
|
||||
<link href="https://briarproject.org/news/2022-briar-desktop-nlnet-funding/"/>
|
||||
<id>https://briarproject.org/news/2022-briar-desktop-nlnet-funding/</id>
|
||||
<published>2022-12-05T12:00:00+00:00</published>
|
||||
<updated>2022-12-05T12:00:00+00:00</updated>
|
||||
<summary><![CDATA[Briar Desktop got another round of funding So far, Briar is only available as an Android app, which is preventing some organizations that work in repressive environments from using it as a secure communications tool and considering it as a more secure alternative to email. To remedy that, we have started working on a desktop app in September 2021 that is supposed to work on three major operating systems: Linux, macOS and Windows.]]></summary>
|
||||
|
||||
</entry>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar is available on Google Play again]]></title>
|
||||
<link href="https://briarproject.org/news/2022-briar-removed-from-google-play/"/>
|
||||
<id>https://briarproject.org/news/2022-briar-removed-from-google-play/</id>
|
||||
<published>2022-02-28T13:20:00+00:00</published>
|
||||
<updated>2022-02-28T13:20:00+00:00</updated>
|
||||
<summary><![CDATA[Status update Update (February 28, 13:20 UTC): Briar is available on Google Play again.
|
||||
Briar was briefly removed from Google Play because we didn’t provide Google’s review team with a username and password for testing the app. We provided Google with a username and password for testing and the app is now available again.
|
||||
About Briar Briar is a messaging app designed for activists, journalists, and anyone else who needs a safe, easy and robust way to communicate.]]></summary>
|
||||
|
||||
</entry>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar 1.4 released - offline app sharing, message transfer via SD cards and USB sticks]]></title>
|
||||
<link href="https://briarproject.org/news/2021-briar-1.4-released/"/>
|
||||
<id>https://briarproject.org/news/2021-briar-1.4-released/</id>
|
||||
<published>2021-11-15T00:00:00+00:00</published>
|
||||
<updated>2021-11-15T00:00:00+00:00</updated>
|
||||
<summary><![CDATA[Press release The Briar Project released version 1.4 of its Android app today. This release adds a couple of new features, highlighted below.
|
||||
First of all, users can now share the app offline. Prior to this release, the only way to get the app was to to download it from the internet, which requires an internet connection. Now, it is possible to share the app offline to others who don’t have it installed.]]></summary>
|
||||
|
||||
</entry>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar 1.3 released - image attachments, profile images and disappearing messages]]></title>
|
||||
<link href="https://briarproject.org/news/2021-briar-1.3-released/"/>
|
||||
<id>https://briarproject.org/news/2021-briar-1.3-released/</id>
|
||||
<published>2021-06-07T00:00:00+00:00</published>
|
||||
<updated>2021-06-07T00:00:00+00:00</updated>
|
||||
<summary><![CDATA[Press release The Briar Project released version 1.3 of its Android app today. Thanks to support from eQualit.ie, this release adds several new features that have been requested by many users over the years.
|
||||
With today’s release, users can upload profile pictures that will be visible only to their contacts.
|
||||
Lots of people have asked for a way to send images via Briar. We listened! This release adds the ability to send images in private conversations.]]></summary>
|
||||
|
||||
</entry>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar 1.2 released, contacts can now be added by exchanging links]]></title>
|
||||
<link href="https://briarproject.org/news/2019-briar-1.2-released-remote-contacts/"/>
|
||||
<id>https://briarproject.org/news/2019-briar-1.2-released-remote-contacts/</id>
|
||||
<published>2019-11-06T00:00:00+00:00</published>
|
||||
<updated>2019-11-06T00:00:00+00:00</updated>
|
||||
<summary><![CDATA[Press Release The Briar Project released version 1.2 of its Android app today. This release allows users to add each other securely by exchanging links. Previously users needed to meet in person or ask a mutual contact to introduce them.
|
||||
Most messenger apps find your contacts by uploading your phone’s contact list to a server. Since Briar is designed to protect metadata and contact relationships, it instead uses the Tor network to connect directly and securely to the person you’re adding, without revealing your contact list to anyone.]]></summary>
|
||||
|
||||
</entry>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar 1.1 released with dark theme, new emoji and more]]></title>
|
||||
<link href="https://briarproject.org/news/2018-briar-1.1-released/"/>
|
||||
<id>https://briarproject.org/news/2018-briar-1.1-released/</id>
|
||||
<published>2018-09-14T00:00:00+00:00</published>
|
||||
<updated>2018-09-14T00:00:00+00:00</updated>
|
||||
<summary><![CDATA[Press Release The Briar Project released version 1.1 of its Android app today. This release adds new features following the app’s first public release in May.
|
||||
Thanks to support from the Open Technology Fund, the new release has a dark theme designed by Ura Design. Users can switch between the light and dark themes, or use an automatic mode that activates the dark theme at night. The conversation screen has also been redesigned, with rounded message bubbles and a new color scheme.]]></summary>
|
||||
|
||||
</entry>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar - Secure P2P Messenger Releases First Version, Receives New Funding]]></title>
|
||||
<link href="https://briarproject.org/news/2018-1.0-released-new-funding/"/>
|
||||
<id>https://briarproject.org/news/2018-1.0-released-new-funding/</id>
|
||||
<published>2018-05-09T00:00:00+00:00</published>
|
||||
<updated>2018-05-09T00:00:00+00:00</updated>
|
||||
<summary><![CDATA[Press Release The peer-to-peer messenger Briar released its first stable version today. It is available for Android devices from Google Play or F-Droid. This release follows a security audit and a 10 month public beta period during which many bugs were fixed and lots of feedback was received. The Briar Project wishes to thank all beta testers for their contributions.
|
||||
The development of Briar will continue with help from the Open Technology Fund, which has previously supported the project as part of its mission to promote internet freedom worldwide.]]></summary>
|
||||
|
||||
</entry>
|
||||
<entry>
|
||||
<title type="html"><![CDATA[Briar - Darknet Messenger Releases Beta, Passes Security Audit]]></title>
|
||||
<link href="https://briarproject.org/news/2017-beta-released-security-audit/"/>
|
||||
<id>https://briarproject.org/news/2017-beta-released-security-audit/</id>
|
||||
<published>2017-07-21T00:00:00+00:00</published>
|
||||
<updated>2017-07-21T00:00:00+00:00</updated>
|
||||
<summary><![CDATA[Press Release After extensive private beta tests, the first public beta of Briar was released today. Briar is a secure messaging app for Android.
|
||||
Unlike other popular apps, Briar does not require servers to work. It connects users directly using a peer-to-peer network. This makes it resistant to censorship and allows it to work even without internet access.
|
||||
The app encrypts all data end-to-end and also hides metadata about who is communicating.]]></summary>
|
||||
|
||||
</entry>
|
||||
</feed>
|
||||
@@ -39,6 +39,7 @@ buildscript {
|
||||
bouncy_castle_version = '1.71'
|
||||
junit_version = "4.13.2"
|
||||
jmock_version = '2.12.0'
|
||||
mockwebserver_version = '4.9.3'
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:7.2.2'
|
||||
|
||||
Reference in New Issue
Block a user