mirror of
https://github.com/miniflux/v2.git
synced 2025-08-11 17:51:01 +00:00
Add generic webhook integration
This commit is contained in:
parent
32d33104a4
commit
48f6885f44
39 changed files with 527 additions and 324 deletions
|
@ -86,7 +86,7 @@ type atom03Entry struct {
|
|||
}
|
||||
|
||||
func (a *atom03Entry) Transform() *model.Entry {
|
||||
entry := new(model.Entry)
|
||||
entry := model.NewEntry()
|
||||
entry.URL = a.Links.originalLink()
|
||||
entry.Date = a.entryDate()
|
||||
entry.Author = a.Author.String()
|
||||
|
|
|
@ -95,7 +95,7 @@ type atom10Entry struct {
|
|||
}
|
||||
|
||||
func (a *atom10Entry) Transform() *model.Entry {
|
||||
entry := new(model.Entry)
|
||||
entry := model.NewEntry()
|
||||
entry.URL = a.Links.originalLink()
|
||||
entry.Date = a.entryDate()
|
||||
entry.Author = a.Authors.String()
|
||||
|
@ -219,7 +219,7 @@ func (a *atom10Entry) entryEnclosures() model.EnclosureList {
|
|||
}
|
||||
|
||||
func (r *atom10Entry) entryCategories() []string {
|
||||
var categoryList []string
|
||||
categoryList := make([]string, 0)
|
||||
|
||||
for _, atomCategory := range r.Categories {
|
||||
if strings.TrimSpace(atomCategory.Label) != "" {
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"miniflux.app/v2/internal/config"
|
||||
"miniflux.app/v2/internal/errors"
|
||||
"miniflux.app/v2/internal/http/client"
|
||||
"miniflux.app/v2/internal/integration"
|
||||
"miniflux.app/v2/internal/locale"
|
||||
"miniflux.app/v2/internal/logger"
|
||||
"miniflux.app/v2/internal/model"
|
||||
|
@ -177,15 +178,24 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
|
|||
|
||||
// We don't update existing entries when the crawler is enabled (we crawl only inexisting entries). Unless it is forced to refresh
|
||||
updateExistingEntries := forceRefresh || !originalFeed.Crawler
|
||||
if storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, updateExistingEntries); storeErr != nil {
|
||||
newEntries, storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, updateExistingEntries)
|
||||
if storeErr != nil {
|
||||
originalFeed.WithError(storeErr.Error())
|
||||
store.UpdateFeedError(originalFeed)
|
||||
return storeErr
|
||||
}
|
||||
|
||||
userIntegrations, intErr := store.Integration(userID)
|
||||
if intErr != nil {
|
||||
logger.Error("[RefreshFeed] Fetching integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time.", userID, intErr)
|
||||
} else if userIntegrations != nil && len(newEntries) > 0 {
|
||||
go integration.PushEntries(originalFeed, newEntries, userIntegrations)
|
||||
}
|
||||
|
||||
// We update caching headers only if the feed has been modified,
|
||||
// because some websites don't return the same headers when replying with a 304.
|
||||
originalFeed.WithClientResponse(response)
|
||||
|
||||
checkFeedIcon(
|
||||
store,
|
||||
originalFeed.ID,
|
||||
|
|
|
@ -181,7 +181,7 @@ func (j *jsonItem) GetEnclosures() model.EnclosureList {
|
|||
}
|
||||
|
||||
func (j *jsonItem) Transform() *model.Entry {
|
||||
entry := new(model.Entry)
|
||||
entry := model.NewEntry()
|
||||
entry.URL = j.URL
|
||||
entry.Date = j.GetDate()
|
||||
entry.Author = j.GetAuthor()
|
||||
|
@ -189,7 +189,10 @@ func (j *jsonItem) Transform() *model.Entry {
|
|||
entry.Content = j.GetContent()
|
||||
entry.Title = strings.TrimSpace(j.GetTitle())
|
||||
entry.Enclosures = j.GetEnclosures()
|
||||
entry.Tags = j.Tags
|
||||
if len(j.Tags) > 0 {
|
||||
entry.Tags = j.Tags
|
||||
}
|
||||
|
||||
return entry
|
||||
}
|
||||
|
||||
|
|
|
@ -13,8 +13,6 @@ import (
|
|||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"miniflux.app/v2/internal/integration"
|
||||
|
||||
"miniflux.app/v2/internal/config"
|
||||
"miniflux.app/v2/internal/http/client"
|
||||
"miniflux.app/v2/internal/logger"
|
||||
|
@ -41,9 +39,6 @@ var (
|
|||
func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
|
||||
var filteredEntries model.Entries
|
||||
|
||||
// array used for bulk push
|
||||
entriesToPush := model.Entries{}
|
||||
|
||||
// Process older entries first
|
||||
for i := len(feed.Entries) - 1; i >= 0; i-- {
|
||||
entry := feed.Entries[i]
|
||||
|
@ -90,32 +85,10 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
|
|||
// The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
|
||||
entry.Content = sanitizer.Sanitize(url, entry.Content)
|
||||
|
||||
if entryIsNew {
|
||||
intg, err := store.Integration(feed.UserID)
|
||||
if err != nil {
|
||||
logger.Error("[Processor] Get integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time.", feed.UserID, err)
|
||||
} else if intg != nil {
|
||||
localEntry := entry
|
||||
go func() {
|
||||
integration.PushEntry(localEntry, feed, intg)
|
||||
}()
|
||||
entriesToPush = append(entriesToPush, localEntry)
|
||||
}
|
||||
}
|
||||
|
||||
updateEntryReadingTime(store, feed, entry, entryIsNew, user)
|
||||
filteredEntries = append(filteredEntries, entry)
|
||||
}
|
||||
|
||||
intg, err := store.Integration(feed.UserID)
|
||||
if err != nil {
|
||||
logger.Error("[Processor] Get integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time.", feed.UserID, err)
|
||||
} else if intg != nil && len(entriesToPush) > 0 {
|
||||
go func() {
|
||||
integration.PushEntries(entriesToPush, intg)
|
||||
}()
|
||||
}
|
||||
|
||||
feed.Entries = filteredEntries
|
||||
}
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ type rdfItem struct {
|
|||
}
|
||||
|
||||
func (r *rdfItem) Transform() *model.Entry {
|
||||
entry := new(model.Entry)
|
||||
entry := model.NewEntry()
|
||||
entry.Title = r.entryTitle()
|
||||
entry.Author = r.entryAuthor()
|
||||
entry.URL = r.entryURL()
|
||||
|
|
|
@ -190,7 +190,7 @@ type rssItem struct {
|
|||
}
|
||||
|
||||
func (r *rssItem) Transform() *model.Entry {
|
||||
entry := new(model.Entry)
|
||||
entry := model.NewEntry()
|
||||
entry.URL = r.entryURL()
|
||||
entry.CommentsURL = r.entryCommentsURL()
|
||||
entry.Date = r.entryDate()
|
||||
|
@ -388,7 +388,7 @@ func (r *rssItem) entryEnclosures() model.EnclosureList {
|
|||
}
|
||||
|
||||
func (r *rssItem) entryCategories() []string {
|
||||
var categoryList []string
|
||||
categoryList := make([]string, 0)
|
||||
|
||||
for _, rssCategory := range r.Categories {
|
||||
if strings.Contains(rssCategory.Inner, "<![CDATA[") {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue