mirror of
https://github.com/miniflux/v2.git
synced 2025-06-27 16:36:00 +00:00
perf(reader): optimize RemoveTrackingParameters
A bit more than 10% of processor.ProcessFeedEntries' CPU time is spent in urlcleaner.RemoveTrackingParameters, specifically calling url.Parse, so let's extract this operation outside of it, and do it once before calling urlcleaner.RemoveTrackingParameters multiple times. Co-authored-by: Frédéric Guillot <f@miniflux.net>
This commit is contained in:
parent
0caadf82f2
commit
7c857bdc72
4 changed files with 23 additions and 28 deletions
|
@ -5,6 +5,7 @@ package processor // import "miniflux.app/v2/internal/reader/processor"
|
|||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
|
@ -36,6 +37,10 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64,
|
|||
return
|
||||
}
|
||||
|
||||
// The errors are handled in RemoveTrackingParameters.
|
||||
parsedFeedURL, _ := url.Parse(feed.FeedURL)
|
||||
parsedSiteURL, _ := url.Parse(feed.SiteURL)
|
||||
|
||||
// Process older entries first
|
||||
for i := len(feed.Entries) - 1; i >= 0; i-- {
|
||||
entry := feed.Entries[i]
|
||||
|
@ -52,7 +57,8 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64,
|
|||
continue
|
||||
}
|
||||
|
||||
if cleanedURL, err := urlcleaner.RemoveTrackingParameters(feed.FeedURL, feed.SiteURL, entry.URL); err == nil {
|
||||
parsedInputUrl, _ := url.Parse(entry.URL)
|
||||
if cleanedURL, err := urlcleaner.RemoveTrackingParameters(parsedFeedURL, parsedSiteURL, parsedInputUrl); err == nil {
|
||||
entry.URL = cleanedURL
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue