1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-08-01 17:38:37 +00:00

Simplify feed entries filtering

- Rename processor package to filter
- Remove boilerplate code
This commit is contained in:
Frédéric Guillot 2018-10-14 22:33:19 -07:00
parent 234b3710d4
commit b8f874a37d
4 changed files with 45 additions and 86 deletions

View file

@ -14,9 +14,9 @@ import (
"miniflux.app/logger"
"miniflux.app/model"
"miniflux.app/reader/browser"
"miniflux.app/reader/filter"
"miniflux.app/reader/icon"
"miniflux.app/reader/parser"
"miniflux.app/reader/processor"
"miniflux.app/storage"
"miniflux.app/timer"
)
@ -63,9 +63,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
subscription.WithClientResponse(response)
subscription.CheckedNow()
feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription)
feedProcessor.WithCrawler(crawler)
feedProcessor.Process()
filter.Apply(h.store, subscription)
if storeErr := h.store.CreateFeed(subscription); storeErr != nil {
return nil, storeErr
@ -108,22 +106,18 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
if response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
logger.Debug("[Handler:RefreshFeed] Feed #%d has been modified", feedID)
subscription, parseErr := parser.ParseFeed(response.String())
updatedFeed, parseErr := parser.ParseFeed(response.String())
if parseErr != nil {
originalFeed.WithError(parseErr.Localize(printer))
h.store.UpdateFeed(originalFeed)
return parseErr
}
feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription)
feedProcessor.WithScraperRules(originalFeed.ScraperRules)
feedProcessor.WithUserAgent(originalFeed.UserAgent)
feedProcessor.WithRewriteRules(originalFeed.RewriteRules)
feedProcessor.WithCrawler(originalFeed.Crawler)
feedProcessor.Process()
originalFeed.Entries = updatedFeed.Entries
filter.Apply(h.store, originalFeed)
// Note: We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
if storeErr := h.store.UpdateEntries(originalFeed.UserID, originalFeed.ID, subscription.Entries, !originalFeed.Crawler); storeErr != nil {
// We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
if storeErr := h.store.UpdateEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil {
return storeErr
}