1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-06-27 16:36:00 +00:00

Implement structured logging using log/slog package

This commit is contained in:
Frédéric Guillot 2023-09-24 16:32:09 -07:00
parent 54cb8fa028
commit c0e954f19d
77 changed files with 1868 additions and 892 deletions

View file

@ -6,6 +6,7 @@ package processor
import (
"errors"
"fmt"
"log/slog"
"math"
"regexp"
"strconv"
@ -15,7 +16,6 @@ import (
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/client"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/metric"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/browser"
@ -43,7 +43,13 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
for i := len(feed.Entries) - 1; i >= 0; i-- {
entry := feed.Entries[i]
logger.Debug("[Processor] Processing entry %q from feed %q", entry.URL, feed.FeedURL)
slog.Debug("Processing entry",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
)
if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
continue
@ -52,7 +58,13 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
url := getUrlFromEntry(feed, entry)
entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
if feed.Crawler && (entryIsNew || forceRefresh) {
logger.Debug("[Processor] Crawling entry %q from feed %q", url, feed.FeedURL)
slog.Debug("Scraping entry",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
)
startTime := time.Now()
content, scraperErr := scraper.Fetch(
@ -73,7 +85,14 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
}
if scraperErr != nil {
logger.Error(`[Processor] Unable to crawl this entry: %q => %v`, entry.URL, scraperErr)
slog.Warn("Unable to scrape entry",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.Any("error", scraperErr),
)
} else if content != "" {
// We replace the entry content only if the scraper doesn't return any error.
entry.Content = content
@ -96,7 +115,13 @@ func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
if feed.BlocklistRules != "" {
match, _ := regexp.MatchString(feed.BlocklistRules, entry.Title)
if match {
logger.Debug("[Processor] Blocking entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.BlocklistRules)
slog.Debug("Blocking entry based on rule",
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("rule", feed.BlocklistRules),
)
return true
}
}
@ -107,7 +132,13 @@ func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
if feed.KeeplistRules != "" {
match, _ := regexp.MatchString(feed.KeeplistRules, entry.Title)
if match {
logger.Debug("[Processor] Allow entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.KeeplistRules)
slog.Debug("Allow entry based on rule",
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("rule", feed.KeeplistRules),
)
return true
}
return false
@ -160,9 +191,22 @@ func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
if len(parts) >= 3 {
re := regexp.MustCompile(parts[1])
url = re.ReplaceAllString(entry.URL, parts[2])
logger.Debug(`[Processor] Rewriting entry URL %s to %s`, entry.URL, url)
slog.Debug("Rewriting entry URL",
slog.Int64("entry_id", entry.ID),
slog.String("original_entry_url", entry.URL),
slog.String("rewritten_entry_url", url),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
)
} else {
logger.Debug("[Processor] Cannot find search and replace terms for replace rule %s", feed.UrlRewriteRules)
slog.Debug("Cannot find search and replace terms for replace rule",
slog.Int64("entry_id", entry.ID),
slog.String("original_entry_url", entry.URL),
slog.String("rewritten_entry_url", url),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("url_rewrite_rules", feed.UrlRewriteRules),
)
}
}
return url
@ -173,7 +217,14 @@ func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *mod
if entryIsNew {
watchTime, err := fetchYouTubeWatchTime(entry.URL)
if err != nil {
logger.Error("[Processor] Unable to fetch YouTube watch time: %q => %v", entry.URL, err)
slog.Warn("Unable to fetch YouTube watch time",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.Any("error", err),
)
}
entry.ReadingTime = watchTime
} else {
@ -185,7 +236,14 @@ func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *mod
if entryIsNew {
watchTime, err := fetchOdyseeWatchTime(entry.URL)
if err != nil {
logger.Error("[Processor] Unable to fetch Odysee watch time: %q => %v", entry.URL, err)
slog.Warn("Unable to fetch Odysee watch time",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.Any("error", err),
)
}
entry.ReadingTime = watchTime
} else {