1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-08-11 17:51:01 +00:00

Add feed option to ignore HTTP cache

This commit is contained in:
Frédéric Guillot 2020-06-05 21:50:59 -07:00
parent 57f62b4797
commit 6c6ca69141
23 changed files with 105 additions and 52 deletions

View file

@ -105,8 +105,12 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
request := client.New(originalFeed.FeedURL)
request.WithCredentials(originalFeed.Username, originalFeed.Password)
request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
request.WithUserAgent(originalFeed.UserAgent)
if !originalFeed.IgnoreHTTPCache {
request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
}
response, requestErr := browser.Exec(request)
if requestErr != nil {
originalFeed.WithError(requestErr.Localize(printer))
@ -114,7 +118,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
return requestErr
}
if response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
if originalFeed.IgnoreHTTPCache || response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
logger.Debug("[Handler:RefreshFeed] Feed #%d has been modified", feedID)
updatedFeed, parseErr := parser.ParseFeed(response.BodyAsString())

View file

@ -16,6 +16,8 @@ import (
// ProcessFeedEntries downloads original web page for entries and apply filters.
func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
for _, entry := range feed.Entries {
logger.Debug("[Feed #%d] Processing entry %s", feed.ID, entry.URL)
if feed.Crawler {
if !store.EntryURLExists(feed.ID, entry.URL) {
content, err := scraper.Fetch(entry.URL, feed.ScraperRules, feed.UserAgent)