1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-08-01 17:38:37 +00:00

Implement structured logging using log/slog package

This commit is contained in:
Frédéric Guillot 2023-09-24 16:32:09 -07:00
parent 54cb8fa028
commit c0e954f19d
77 changed files with 1868 additions and 892 deletions

View file

@ -6,11 +6,11 @@ package atom // import "miniflux.app/v2/internal/reader/atom"
import (
"encoding/base64"
"html"
"log/slog"
"strings"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/date"
"miniflux.app/v2/internal/reader/sanitizer"
@ -126,7 +126,11 @@ func (a *atom03Entry) entryDate() time.Time {
if dateText != "" {
result, err := date.Parse(dateText)
if err != nil {
logger.Error("atom: %v", err)
slog.Warn("Unable to parse date from Atom 0.3 feed",
slog.String("date", dateText),
slog.String("id", a.ID),
slog.Any("error", err),
)
return time.Now()
}

View file

@ -6,12 +6,12 @@ package atom // import "miniflux.app/v2/internal/reader/atom"
import (
"encoding/xml"
"html"
"log/slog"
"strconv"
"strings"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/date"
"miniflux.app/v2/internal/reader/media"
@ -144,7 +144,11 @@ func (a *atom10Entry) entryDate() time.Time {
if dateText != "" {
result, err := date.Parse(dateText)
if err != nil {
logger.Error("atom: %v (entry ID = %s)", err, a.ID)
slog.Warn("Unable to parse date from Atom 0.3 feed",
slog.String("date", dateText),
slog.String("id", a.ID),
slog.Any("error", err),
)
return time.Now()
}

View file

@ -4,22 +4,19 @@
package handler // import "miniflux.app/v2/internal/reader/handler"
import (
"fmt"
"time"
"log/slog"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/errors"
"miniflux.app/v2/internal/http/client"
"miniflux.app/v2/internal/integration"
"miniflux.app/v2/internal/locale"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/browser"
"miniflux.app/v2/internal/reader/icon"
"miniflux.app/v2/internal/reader/parser"
"miniflux.app/v2/internal/reader/processor"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/timer"
)
var (
@ -30,7 +27,10 @@ var (
// CreateFeed fetch, parse and store a new feed.
func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model.FeedCreationRequest) (*model.Feed, error) {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[CreateFeed] FeedURL=%s", feedCreationRequest.FeedURL))
slog.Debug("Begin feed creation process",
slog.Int64("user_id", userID),
slog.String("feed_url", feedCreationRequest.FeedURL),
)
user, storeErr := store.UserByID(userID)
if storeErr != nil {
@ -90,7 +90,11 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
return nil, storeErr
}
logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID)
slog.Debug("Created feed",
slog.Int64("user_id", userID),
slog.Int64("feed_id", subscription.ID),
slog.String("feed_url", subscription.FeedURL),
)
checkFeedIcon(
store,
@ -106,7 +110,12 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
// RefreshFeed refreshes a feed.
func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool) error {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[RefreshFeed] feedID=%d", feedID))
slog.Debug("Begin feed refresh process",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Bool("force_refresh", forceRefresh),
)
user, storeErr := store.UserByID(userID)
if storeErr != nil {
return storeErr
@ -164,7 +173,10 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
}
if originalFeed.IgnoreHTTPCache || response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
logger.Debug("[RefreshFeed] Feed #%d has been modified", feedID)
slog.Debug("Feed modified",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
)
updatedFeed, parseErr := parser.ParseFeed(response.EffectiveURL, response.BodyAsString())
if parseErr != nil {
@ -187,7 +199,11 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
userIntegrations, intErr := store.Integration(userID)
if intErr != nil {
logger.Error("[RefreshFeed] Fetching integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time.", userID, intErr)
slog.Error("Fetching integrations failed; the refresh process will go on, but no integrations will run this time",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Any("error", intErr),
)
} else if userIntegrations != nil && len(newEntries) > 0 {
go integration.PushEntries(originalFeed, newEntries, userIntegrations)
}
@ -206,7 +222,10 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
originalFeed.AllowSelfSignedCertificates,
)
} else {
logger.Debug("[RefreshFeed] Feed #%d not modified", feedID)
slog.Debug("Feed not modified",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
)
}
originalFeed.ResetErrorCounter()
@ -224,12 +243,26 @@ func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL, feedIconURL
if !store.HasIcon(feedID) {
icon, err := icon.FindIcon(websiteURL, feedIconURL, userAgent, fetchViaProxy, allowSelfSignedCertificates)
if err != nil {
logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
slog.Warn("Unable to find feed icon",
slog.Int64("feed_id", feedID),
slog.String("website_url", websiteURL),
slog.String("feed_icon_url", feedIconURL),
slog.Any("error", err),
)
} else if icon == nil {
logger.Debug(`[CheckFeedIcon] No icon found (feedID=%d websiteURL=%s)`, feedID, websiteURL)
slog.Debug("No icon found",
slog.Int64("feed_id", feedID),
slog.String("website_url", websiteURL),
slog.String("feed_icon_url", feedIconURL),
)
} else {
if err := store.CreateFeedIcon(feedID, icon); err != nil {
logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
slog.Error("Unable to store feed icon",
slog.Int64("feed_id", feedID),
slog.String("website_url", websiteURL),
slog.String("feed_icon_url", feedIconURL),
slog.Any("error", err),
)
}
}
}

View file

@ -13,7 +13,6 @@ import (
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/http/client"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/urllib"
@ -65,7 +64,6 @@ func generateIconURL(websiteURL, feedIconURL string) (iconURL string, err error)
func fetchHTMLDocumentAndFindIconURL(websiteURL, userAgent string, fetchViaProxy, allowSelfSignedCertificates bool) (string, error) {
rootURL := urllib.RootURL(websiteURL)
logger.Debug("[FindIcon] Find icon from HTML webpage: %s", rootURL)
clt := client.NewClientWithConfig(rootURL, config.Opts)
clt.WithUserAgent(userAgent)

View file

@ -4,11 +4,11 @@
package json // import "miniflux.app/v2/internal/reader/json"
import (
"log/slog"
"strings"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/date"
"miniflux.app/v2/internal/reader/sanitizer"
@ -110,7 +110,11 @@ func (j *jsonItem) GetDate() time.Time {
if value != "" {
d, err := date.Parse(value)
if err != nil {
logger.Error("json: %v", err)
slog.Warn("Unable to parse date from JSON feed",
slog.String("date", value),
slog.String("url", j.URL),
slog.Any("error", err),
)
return time.Now()
}

View file

@ -4,11 +4,9 @@
package opml // import "miniflux.app/v2/internal/reader/opml"
import (
"errors"
"fmt"
"io"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/storage"
)
@ -53,21 +51,18 @@ func (h *Handler) Import(userID int64, data io.Reader) error {
if subscription.CategoryName == "" {
category, err = h.store.FirstCategory(userID)
if err != nil {
logger.Error("[OPML:Import] %v", err)
return errors.New("unable to find first category")
return fmt.Errorf("opml: unable to find first category: %w", err)
}
} else {
category, err = h.store.CategoryByTitle(userID, subscription.CategoryName)
if err != nil {
logger.Error("[OPML:Import] %v", err)
return errors.New("unable to search category by title")
return fmt.Errorf("opml: unable to search category by title: %w", err)
}
if category == nil {
category, err = h.store.CreateCategory(userID, &model.CategoryRequest{Title: subscription.CategoryName})
if err != nil {
logger.Error("[OPML:Import] %v", err)
return fmt.Errorf(`unable to create this category: %q`, subscription.CategoryName)
return fmt.Errorf(`opml: unable to create this category: %q`, subscription.CategoryName)
}
}
}

View file

@ -7,10 +7,9 @@ import (
"bufio"
"bytes"
"encoding/xml"
"log/slog"
"sort"
"time"
"miniflux.app/v2/internal/logger"
)
// Serialize returns a SubcriptionList in OPML format.
@ -23,7 +22,9 @@ func Serialize(subscriptions SubcriptionList) string {
encoder := xml.NewEncoder(writer)
encoder.Indent("", " ")
if err := encoder.Encode(opmlDocument); err != nil {
logger.Error("[OPML:Serialize] %v", err)
slog.Error("Unable to serialize OPML document",
slog.Any("error", err),
)
return ""
}

View file

@ -6,6 +6,7 @@ package processor
import (
"errors"
"fmt"
"log/slog"
"math"
"regexp"
"strconv"
@ -15,7 +16,6 @@ import (
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/client"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/metric"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/browser"
@ -43,7 +43,13 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
for i := len(feed.Entries) - 1; i >= 0; i-- {
entry := feed.Entries[i]
logger.Debug("[Processor] Processing entry %q from feed %q", entry.URL, feed.FeedURL)
slog.Debug("Processing entry",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
)
if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
continue
@ -52,7 +58,13 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
url := getUrlFromEntry(feed, entry)
entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
if feed.Crawler && (entryIsNew || forceRefresh) {
logger.Debug("[Processor] Crawling entry %q from feed %q", url, feed.FeedURL)
slog.Debug("Scraping entry",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
)
startTime := time.Now()
content, scraperErr := scraper.Fetch(
@ -73,7 +85,14 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
}
if scraperErr != nil {
logger.Error(`[Processor] Unable to crawl this entry: %q => %v`, entry.URL, scraperErr)
slog.Warn("Unable to scrape entry",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.Any("error", scraperErr),
)
} else if content != "" {
// We replace the entry content only if the scraper doesn't return any error.
entry.Content = content
@ -96,7 +115,13 @@ func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
if feed.BlocklistRules != "" {
match, _ := regexp.MatchString(feed.BlocklistRules, entry.Title)
if match {
logger.Debug("[Processor] Blocking entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.BlocklistRules)
slog.Debug("Blocking entry based on rule",
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("rule", feed.BlocklistRules),
)
return true
}
}
@ -107,7 +132,13 @@ func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
if feed.KeeplistRules != "" {
match, _ := regexp.MatchString(feed.KeeplistRules, entry.Title)
if match {
logger.Debug("[Processor] Allow entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.KeeplistRules)
slog.Debug("Allow entry based on rule",
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("rule", feed.KeeplistRules),
)
return true
}
return false
@ -160,9 +191,22 @@ func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
if len(parts) >= 3 {
re := regexp.MustCompile(parts[1])
url = re.ReplaceAllString(entry.URL, parts[2])
logger.Debug(`[Processor] Rewriting entry URL %s to %s`, entry.URL, url)
slog.Debug("Rewriting entry URL",
slog.Int64("entry_id", entry.ID),
slog.String("original_entry_url", entry.URL),
slog.String("rewritten_entry_url", url),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
)
} else {
logger.Debug("[Processor] Cannot find search and replace terms for replace rule %s", feed.UrlRewriteRules)
slog.Debug("Cannot find search and replace terms for replace rule",
slog.Int64("entry_id", entry.ID),
slog.String("original_entry_url", entry.URL),
slog.String("rewritten_entry_url", url),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("url_rewrite_rules", feed.UrlRewriteRules),
)
}
}
return url
@ -173,7 +217,14 @@ func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *mod
if entryIsNew {
watchTime, err := fetchYouTubeWatchTime(entry.URL)
if err != nil {
logger.Error("[Processor] Unable to fetch YouTube watch time: %q => %v", entry.URL, err)
slog.Warn("Unable to fetch YouTube watch time",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.Any("error", err),
)
}
entry.ReadingTime = watchTime
} else {
@ -185,7 +236,14 @@ func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *mod
if entryIsNew {
watchTime, err := fetchOdyseeWatchTime(entry.URL)
if err != nil {
logger.Error("[Processor] Unable to fetch Odysee watch time: %q => %v", entry.URL, err)
slog.Warn("Unable to fetch Odysee watch time",
slog.Int64("user_id", user.ID),
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.Any("error", err),
)
}
entry.ReadingTime = watchTime
} else {

View file

@ -6,11 +6,11 @@ package rdf // import "miniflux.app/v2/internal/reader/rdf"
import (
"encoding/xml"
"html"
"log/slog"
"strings"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/date"
"miniflux.app/v2/internal/reader/dublincore"
@ -100,7 +100,11 @@ func (r *rdfItem) entryDate() time.Time {
if r.DublinCoreDate != "" {
result, err := date.Parse(r.DublinCoreDate)
if err != nil {
logger.Error("rdf: %v (entry link = %s)", err, r.Link)
slog.Warn("Unable to parse date from RDF feed",
slog.String("date", r.DublinCoreDate),
slog.String("link", r.Link),
slog.Any("error", err),
)
return time.Now()
}

View file

@ -7,12 +7,11 @@ import (
"bytes"
"fmt"
"io"
"log/slog"
"math"
"regexp"
"strings"
"miniflux.app/v2/internal/logger"
"github.com/PuerkitoBio/goquery"
"golang.org/x/net/html"
)
@ -83,10 +82,12 @@ func ExtractContent(page io.Reader) (string, error) {
removeUnlikelyCandidates(document)
candidates := getCandidates(document)
logger.Debug("[Readability] Candidates: %v", candidates)
topCandidate := getTopCandidate(document, candidates)
logger.Debug("[Readability] TopCandidate: %v", topCandidate)
slog.Debug("Readability parsing",
slog.Any("candidates", candidates),
slog.Any("topCandidate", topCandidate),
)
output := getArticle(topCandidate, candidates)
return output, nil

View file

@ -7,12 +7,12 @@ import (
"encoding/base64"
"fmt"
"html"
"log/slog"
"net/url"
"regexp"
"strings"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/logger"
"github.com/PuerkitoBio/goquery"
"github.com/yuin/goldmark"
@ -359,7 +359,9 @@ func addHackerNewsLinksUsing(entryContent, app string) string {
open_with_hack := `<a href="` + url + `">Open with HACK</a>`
a.Parent().AppendHtml(" " + open_with_hack)
} else {
logger.Error("[openHackerNewsLinksWith] unknown app provided: %q", app)
slog.Warn("Unknown app provided for openHackerNewsLinksWith rewrite rule",
slog.String("app", app),
)
return
}
})

View file

@ -4,11 +4,11 @@
package rewrite // import "miniflux.app/v2/internal/reader/rewrite"
import (
"log/slog"
"strconv"
"strings"
"text/scanner"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/urllib"
)
@ -28,7 +28,10 @@ func Rewriter(entryURL string, entry *model.Entry, customRewriteRules string) {
rules := parseRules(rulesList)
rules = append(rules, rule{name: "add_pdf_download_link"})
logger.Debug(`[Rewrite] Applying rules %v for %q`, rules, entryURL)
slog.Debug("Rewrite rules applied",
slog.Any("rules", rules),
slog.String("entry_url", entryURL),
)
for _, rule := range rules {
applyRule(entryURL, entry, rule)
@ -89,21 +92,30 @@ func applyRule(entryURL string, entry *model.Entry, rule rule) {
if len(rule.args) >= 2 {
entry.Content = replaceCustom(entry.Content, rule.args[0], rule.args[1])
} else {
logger.Debug("[Rewrite] Cannot find search and replace terms for replace rule %s", rule)
slog.Warn("Cannot find search and replace terms for replace rule",
slog.Any("rule", rule),
slog.String("entry_url", entryURL),
)
}
case "replace_title":
// Format: replace_title("search-term"|"replace-term")
if len(rule.args) >= 2 {
entry.Title = replaceCustom(entry.Title, rule.args[0], rule.args[1])
} else {
logger.Debug("[Rewrite] Cannot find search and replace terms for replace rule %s", rule)
slog.Warn("Cannot find search and replace terms for replace_title rule",
slog.Any("rule", rule),
slog.String("entry_url", entryURL),
)
}
case "remove":
// Format: remove("#selector > .element, .another")
if len(rule.args) >= 1 {
entry.Content = removeCustom(entry.Content, rule.args[0])
} else {
logger.Debug("[Rewrite] Cannot find selector for remove rule %s", rule)
slog.Warn("Cannot find selector for remove rule",
slog.Any("rule", rule),
slog.String("entry_url", entryURL),
)
}
case "add_castopod_episode":
entry.Content = addCastopodEpisode(entryURL, entry.Content)

View file

@ -6,13 +6,13 @@ package rss // import "miniflux.app/v2/internal/reader/rss"
import (
"encoding/xml"
"html"
"log/slog"
"path"
"strconv"
"strings"
"time"
"miniflux.app/v2/internal/crypto"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/date"
"miniflux.app/v2/internal/reader/dublincore"
@ -216,7 +216,11 @@ func (r *rssItem) entryDate() time.Time {
if value != "" {
result, err := date.Parse(value)
if err != nil {
logger.Error("rss: %v (entry GUID = %s)", err, r.GUID)
slog.Warn("Unable to parse date from RSS feed",
slog.String("date", value),
slog.String("guid", r.GUID.Data),
slog.Any("error", err),
)
return time.Now()
}

View file

@ -7,11 +7,11 @@ import (
"errors"
"fmt"
"io"
"log/slog"
"strings"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/client"
"miniflux.app/v2/internal/logger"
"miniflux.app/v2/internal/reader/readability"
"miniflux.app/v2/internal/urllib"
@ -55,10 +55,15 @@ func Fetch(websiteURL, rules, userAgent string, cookie string, allowSelfSignedCe
var content string
if sameSite && rules != "" {
logger.Debug(`[Scraper] Using rules %q for %q`, rules, websiteURL)
slog.Debug("Extracting content with custom rules",
"url", websiteURL,
"rules", rules,
)
content, err = scrapContent(response.Body, rules)
} else {
logger.Debug(`[Scraper] Using readability for %q`, websiteURL)
slog.Debug("Extracting content with readability",
"url", websiteURL,
)
content, err = readability.ExtractContent(response.Body)
}