mirror of
https://github.com/miniflux/v2.git
synced 2025-08-11 17:51:01 +00:00
refactor(reader): move the fetcher outside of a loop
There is no need to rebuilt the fetcher for every item, creating it once is enough.
This commit is contained in:
parent
d3ad460c9d
commit
5affd78f4f
1 changed files with 11 additions and 11 deletions
|
@ -50,6 +50,17 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64,
|
|||
slog.Int64("feed_id", feed.ID),
|
||||
)
|
||||
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
|
||||
requestBuilder.WithCookie(feed.Cookie)
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
|
||||
requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
|
||||
requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
|
||||
requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
|
||||
requestBuilder.DisableHTTP2(feed.DisableHTTP2)
|
||||
|
||||
// Processing older entries first ensures that their creation timestamp is lower than newer entries.
|
||||
for _, entry := range slices.Backward(feed.Entries) {
|
||||
slog.Debug("Processing entry",
|
||||
|
@ -95,17 +106,6 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64,
|
|||
|
||||
startTime := time.Now()
|
||||
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
|
||||
requestBuilder.WithCookie(feed.Cookie)
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
|
||||
requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
|
||||
requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
|
||||
requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
|
||||
requestBuilder.DisableHTTP2(feed.DisableHTTP2)
|
||||
|
||||
scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
|
||||
requestBuilder,
|
||||
entry.URL,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue