1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-08-11 17:51:01 +00:00

refactor(reader): move the fetcher outside of a loop

There is no need to rebuilt the fetcher for every item, creating it once is
enough.
This commit is contained in:
jvoisin 2025-08-02 15:47:17 +02:00 committed by Frédéric Guillot
parent d3ad460c9d
commit 5affd78f4f

View file

@ -50,6 +50,17 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64,
slog.Int64("feed_id", feed.ID),
)
requestBuilder := fetcher.NewRequestBuilder()
requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
requestBuilder.WithCookie(feed.Cookie)
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
requestBuilder.DisableHTTP2(feed.DisableHTTP2)
// Processing older entries first ensures that their creation timestamp is lower than newer entries.
for _, entry := range slices.Backward(feed.Entries) {
slog.Debug("Processing entry",
@ -95,17 +106,6 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64,
startTime := time.Now()
requestBuilder := fetcher.NewRequestBuilder()
requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
requestBuilder.WithCookie(feed.Cookie)
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
requestBuilder.DisableHTTP2(feed.DisableHTTP2)
scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
requestBuilder,
entry.URL,