1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-09-15 18:57:04 +00:00

feat: support force refresh in feed edit and feed entries page

This commit is contained in:
njzy 2023-08-08 14:12:41 +00:00 committed by Frédéric Guillot
parent 3060946cc1
commit 79c91d71c8
30 changed files with 88 additions and 23 deletions

View file

@ -83,7 +83,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
subscription.WithClientResponse(response)
subscription.CheckedNow()
processor.ProcessFeedEntries(store, subscription, user)
processor.ProcessFeedEntries(store, subscription, user, true)
if storeErr := store.CreateFeed(subscription); storeErr != nil {
return nil, storeErr
@ -104,7 +104,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
}
// RefreshFeed refreshes a feed.
func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool) error {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[RefreshFeed] feedID=%d", feedID))
user, storeErr := store.UserByID(userID)
if storeErr != nil {
@ -173,10 +173,11 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
}
originalFeed.Entries = updatedFeed.Entries
processor.ProcessFeedEntries(store, originalFeed, user)
processor.ProcessFeedEntries(store, originalFeed, user, forceRefresh)
// We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
if storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil {
// We don't update existing entries when the crawler is enabled (we crawl only inexisting entries). Unless it is forced to refresh
updateExistingEntries := forceRefresh || !originalFeed.Crawler
if storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, updateExistingEntries); storeErr != nil {
originalFeed.WithError(storeErr.Error())
store.UpdateFeedError(originalFeed)
return storeErr

View file

@ -38,7 +38,7 @@ var (
)
// ProcessFeedEntries downloads original web page for entries and apply filters.
func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User) {
func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
var filteredEntries model.Entries
// array used for bulk push
@ -56,7 +56,7 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
url := getUrlFromEntry(feed, entry)
entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
if feed.Crawler && entryIsNew {
if feed.Crawler && (entryIsNew || forceRefresh) {
logger.Debug("[Processor] Crawling entry %q from feed %q", url, feed.FeedURL)
startTime := time.Now()