mirror of
https://github.com/miniflux/v2.git
synced 2025-06-27 16:36:00 +00:00
Add global block and keep filters
This commit is contained in:
parent
c4278821cb
commit
1a81866bb9
30 changed files with 457 additions and 50 deletions
|
@ -10,6 +10,7 @@ import (
|
|||
"regexp"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"miniflux.app/v2/internal/config"
|
||||
|
@ -51,7 +52,7 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
|
|||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
)
|
||||
if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) || !isRecentEntry(entry) {
|
||||
if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -121,7 +122,46 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
|
|||
feed.Entries = filteredEntries
|
||||
}
|
||||
|
||||
func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
|
||||
func isBlockedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
|
||||
if user.BlockFilterEntryRules != "" {
|
||||
rules := strings.Split(user.BlockFilterEntryRules, "\n")
|
||||
for _, rule := range rules {
|
||||
parts := strings.SplitN(rule, "=", 2)
|
||||
|
||||
var match bool
|
||||
switch parts[0] {
|
||||
case "EntryTitle":
|
||||
match, _ = regexp.MatchString(parts[1], entry.Title)
|
||||
case "EntryURL":
|
||||
match, _ = regexp.MatchString(parts[1], entry.URL)
|
||||
case "EntryCommentsURL":
|
||||
match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
|
||||
case "EntryContent":
|
||||
match, _ = regexp.MatchString(parts[1], entry.Content)
|
||||
case "EntryAuthor":
|
||||
match, _ = regexp.MatchString(parts[1], entry.Author)
|
||||
case "EntryTag":
|
||||
containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
|
||||
match, _ = regexp.MatchString(parts[1], tag)
|
||||
return match
|
||||
})
|
||||
if containsTag {
|
||||
match = true
|
||||
}
|
||||
}
|
||||
|
||||
if match {
|
||||
slog.Debug("Blocking entry based on rule",
|
||||
slog.String("entry_url", entry.URL),
|
||||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
slog.String("rule", rule),
|
||||
)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if feed.BlocklistRules == "" {
|
||||
return false
|
||||
}
|
||||
|
@ -152,7 +192,47 @@ func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
|
||||
func isAllowedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
|
||||
if user.KeepFilterEntryRules != "" {
|
||||
rules := strings.Split(user.KeepFilterEntryRules, "\n")
|
||||
for _, rule := range rules {
|
||||
parts := strings.SplitN(rule, "=", 2)
|
||||
|
||||
var match bool
|
||||
switch parts[0] {
|
||||
case "EntryTitle":
|
||||
match, _ = regexp.MatchString(parts[1], entry.Title)
|
||||
case "EntryURL":
|
||||
match, _ = regexp.MatchString(parts[1], entry.URL)
|
||||
case "EntryCommentsURL":
|
||||
match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
|
||||
case "EntryContent":
|
||||
match, _ = regexp.MatchString(parts[1], entry.Content)
|
||||
case "EntryAuthor":
|
||||
match, _ = regexp.MatchString(parts[1], entry.Author)
|
||||
case "EntryTag":
|
||||
containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
|
||||
match, _ = regexp.MatchString(parts[1], tag)
|
||||
return match
|
||||
})
|
||||
if containsTag {
|
||||
match = true
|
||||
}
|
||||
}
|
||||
|
||||
if match {
|
||||
slog.Debug("Allowing entry based on rule",
|
||||
slog.String("entry_url", entry.URL),
|
||||
slog.Int64("feed_id", feed.ID),
|
||||
slog.String("feed_url", feed.FeedURL),
|
||||
slog.String("rule", rule),
|
||||
)
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
if feed.KeeplistRules == "" {
|
||||
return true
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue