1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-06-27 16:36:00 +00:00

fix: address minor issues detected by Go linters

This commit is contained in:
Frédéric Guillot 2025-03-24 20:42:30 -07:00
parent febb7b1748
commit 31f0afe1ac
10 changed files with 31 additions and 25 deletions

View file

@ -33,12 +33,10 @@ jobs:
with: with:
args: > args: >
--timeout 10m --timeout 10m
--disable errcheck,staticcheck --disable errcheck
--enable sqlclosecheck,misspell,whitespace,gocritic --enable sqlclosecheck,misspell,whitespace,gocritic
- uses: dominikh/staticcheck-action@v1.3.1 - name: Run gofmt linter
with: run: gofmt -d -e .
version: "latest"
install-go: false
commitlint: commitlint:
name: Commit Linter name: Commit Linter

View file

@ -189,11 +189,15 @@ func (p *Parser) parseLines(lines []string) (err error) {
case "PROXY_PRIVATE_KEY": case "PROXY_PRIVATE_KEY":
slog.Warn("The PROXY_PRIVATE_KEY environment variable is deprecated, use MEDIA_PROXY_PRIVATE_KEY instead") slog.Warn("The PROXY_PRIVATE_KEY environment variable is deprecated, use MEDIA_PROXY_PRIVATE_KEY instead")
randomKey := make([]byte, 16) randomKey := make([]byte, 16)
rand.Read(randomKey) if _, err := rand.Read(randomKey); err != nil {
return fmt.Errorf("config: unable to generate random key: %w", err)
}
p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey) p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey)
case "MEDIA_PROXY_PRIVATE_KEY": case "MEDIA_PROXY_PRIVATE_KEY":
randomKey := make([]byte, 16) randomKey := make([]byte, 16)
rand.Read(randomKey) if _, err := rand.Read(randomKey); err != nil {
return fmt.Errorf("config: unable to generate random key: %w", err)
}
p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey) p.opts.mediaProxyPrivateKey = parseBytes(value, randomKey)
case "MEDIA_PROXY_CUSTOM_URL": case "MEDIA_PROXY_CUSTOM_URL":
p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL) p.opts.mediaProxyCustomURL = parseString(value, defaultMediaProxyURL)

View file

@ -36,7 +36,7 @@ func (s WebAuthnSession) String() string {
if s.SessionData == nil { if s.SessionData == nil {
return "{}" return "{}"
} }
return fmt.Sprintf("{Challenge: %s, UserID: %x}", s.SessionData.Challenge, s.SessionData.UserID) return fmt.Sprintf("{Challenge: %s, UserID: %x}", s.Challenge, s.UserID)
} }
type WebAuthnCredential struct { type WebAuthnCredential struct {

View file

@ -116,12 +116,12 @@ type Atom03Content struct {
func (a *Atom03Content) Content() string { func (a *Atom03Content) Content() string {
content := "" content := ""
switch { switch a.Mode {
case a.Mode == "xml": case "xml":
content = a.InnerXML content = a.InnerXML
case a.Mode == "escaped": case "escaped":
content = a.CharData content = a.CharData
case a.Mode == "base64": case "base64":
b, err := base64.StdEncoding.DecodeString(a.CharData) b, err := base64.StdEncoding.DecodeString(a.CharData)
if err == nil { if err == nil {
content = string(b) content = string(b)

View file

@ -77,7 +77,9 @@ func (h *Handler) Import(userID int64, data io.Reader) error {
Category: category, Category: category,
} }
h.store.CreateFeed(feed) if err := h.store.CreateFeed(feed); err != nil {
return fmt.Errorf(`opml: unable to create this feed: %q`, subscription.FeedURL)
}
} }
} }

View file

@ -39,7 +39,7 @@ func (r *RSSAdapter) BuildFeed(baseURL string) *model.Feed {
} }
// Try to find the feed URL from the Atom links. // Try to find the feed URL from the Atom links.
for _, atomLink := range r.rss.Channel.AtomLinks.Links { for _, atomLink := range r.rss.Channel.Links {
atomLinkHref := strings.TrimSpace(atomLink.Href) atomLinkHref := strings.TrimSpace(atomLink.Href)
if atomLinkHref != "" && atomLink.Rel == "self" { if atomLinkHref != "" && atomLink.Rel == "self" {
if absoluteFeedURL, err := urllib.AbsoluteURL(feed.FeedURL, atomLinkHref); err == nil { if absoluteFeedURL, err := urllib.AbsoluteURL(feed.FeedURL, atomLinkHref); err == nil {
@ -189,7 +189,7 @@ func findEntryURL(rssItem *RSSItem) string {
} }
} }
for _, atomLink := range rssItem.AtomLinks.Links { for _, atomLink := range rssItem.Links {
if atomLink.Href != "" && (strings.EqualFold(atomLink.Rel, "alternate") || atomLink.Rel == "") { if atomLink.Href != "" && (strings.EqualFold(atomLink.Rel, "alternate") || atomLink.Rel == "") {
return strings.TrimSpace(atomLink.Href) return strings.TrimSpace(atomLink.Href)
} }
@ -253,8 +253,8 @@ func findEntryAuthor(rssItem *RSSItem) string {
author = rssItem.ItunesAuthor author = rssItem.ItunesAuthor
case rssItem.DublinCoreCreator != "": case rssItem.DublinCoreCreator != "":
author = rssItem.DublinCoreCreator author = rssItem.DublinCoreCreator
case rssItem.AtomAuthor.PersonName() != "": case rssItem.PersonName() != "":
author = rssItem.AtomAuthor.PersonName() author = rssItem.PersonName()
case strings.Contains(rssItem.Author.Inner, "<![CDATA["): case strings.Contains(rssItem.Author.Inner, "<![CDATA["):
author = rssItem.Author.Data author = rssItem.Author.Data
default: default:

View file

@ -283,9 +283,10 @@ func isPixelTracker(tagName string, attributes []html.Attribute) bool {
for _, attribute := range attributes { for _, attribute := range attributes {
if attribute.Val == "1" { if attribute.Val == "1" {
if attribute.Key == "height" { switch attribute.Key {
case "height":
hasHeight = true hasHeight = true
} else if attribute.Key == "width" { case "width":
hasWidth = true hasWidth = true
} }
} }

View file

@ -49,10 +49,10 @@ func parseImageCandidate(input string) (*ImageCandidate, error) {
parts := strings.Split(strings.TrimSpace(input), " ") parts := strings.Split(strings.TrimSpace(input), " ")
nbParts := len(parts) nbParts := len(parts)
switch { switch nbParts {
case nbParts == 1: case 1:
return &ImageCandidate{ImageURL: parts[0]}, nil return &ImageCandidate{ImageURL: parts[0]}, nil
case nbParts == 2: case 2:
if !isValidWidthOrDensityDescriptor(parts[1]) { if !isValidWidthOrDensityDescriptor(parts[1]) {
return nil, fmt.Errorf(`srcset: invalid descriptor`) return nil, fmt.Errorf(`srcset: invalid descriptor`)
} }

View file

@ -318,9 +318,10 @@ func (f *FeedQueryBuilder) fetchFeedCounter() (unreadCounters map[int64]int, rea
return nil, nil, fmt.Errorf(`store: unable to fetch feed counter row: %w`, err) return nil, nil, fmt.Errorf(`store: unable to fetch feed counter row: %w`, err)
} }
if status == model.EntryStatusRead { switch status {
case model.EntryStatusRead:
readCounters[feedID] = count readCounters[feedID] = count
} else if status == model.EntryStatusUnread { case model.EntryStatusUnread:
unreadCounters[feedID] = count unreadCounters[feedID] = count
} }
} }

View file

@ -235,7 +235,7 @@ func (h *handler) finishLogin(w http.ResponseWriter, r *http.Request) {
return return
} }
sessionData.SessionData.UserID = parsedResponse.Response.UserHandle sessionData.UserID = parsedResponse.Response.UserHandle
webAuthUser := WebAuthnUser{user, parsedResponse.Response.UserHandle, storedCredentials} webAuthUser := WebAuthnUser{user, parsedResponse.Response.UserHandle, storedCredentials}
// Since go-webauthn v0.11.0, the backup eligibility flag is strictly validated, but Miniflux does not store this flag. // Since go-webauthn v0.11.0, the backup eligibility flag is strictly validated, but Miniflux does not store this flag.