1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-09-15 18:57:04 +00:00

Add the possibility to enable crawler for feeds

This commit is contained in:
Frédéric Guillot 2017-12-12 19:19:36 -08:00
parent 33445e5b68
commit ef097f02fe
22 changed files with 77 additions and 25 deletions

View file

@ -8,6 +8,8 @@ import (
"errors"
"log"
"github.com/miniflux/miniflux2/reader/sanitizer"
"github.com/miniflux/miniflux2/integration"
"github.com/miniflux/miniflux2/model"
"github.com/miniflux/miniflux2/reader/scraper"
@ -46,10 +48,10 @@ func (c *Controller) FetchContent(ctx *core.Context, request *core.Request, resp
return
}
entry.Content = content
entry.Content = sanitizer.Sanitize(entry.URL, content)
c.store.UpdateEntryContent(entry)
response.JSON().Created(map[string]string{"content": content})
response.JSON().Created(map[string]string{"content": entry.Content})
}
// SaveEntry send the link to external services.

View file

@ -222,6 +222,7 @@ func (c *Controller) getFeedFormTemplateArgs(ctx *core.Context, user *model.User
Title: feed.Title,
ScraperRules: feed.ScraperRules,
RewriteRules: feed.RewriteRules,
Crawler: feed.Crawler,
CategoryID: feed.Category.ID,
}
} else {

View file

@ -80,7 +80,7 @@ func (c *Controller) SubmitSubscription(ctx *core.Context, request *core.Request
"errorMessage": "Unable to find any subscription.",
}))
case n == 1:
feed, err := c.feedHandler.CreateFeed(user.ID, subscriptionForm.CategoryID, subscriptions[0].URL)
feed, err := c.feedHandler.CreateFeed(user.ID, subscriptionForm.CategoryID, subscriptions[0].URL, subscriptionForm.Crawler)
if err != nil {
response.HTML().Render("add_subscription", args.Merge(tplParams{
"form": subscriptionForm,
@ -117,7 +117,7 @@ func (c *Controller) ChooseSubscription(ctx *core.Context, request *core.Request
return
}
feed, err := c.feedHandler.CreateFeed(user.ID, subscriptionForm.CategoryID, subscriptionForm.URL)
feed, err := c.feedHandler.CreateFeed(user.ID, subscriptionForm.CategoryID, subscriptionForm.URL, subscriptionForm.Crawler)
if err != nil {
response.HTML().Render("add_subscription", args.Merge(tplParams{
"form": subscriptionForm,

View file

@ -19,6 +19,7 @@ type FeedForm struct {
Title string
ScraperRules string
RewriteRules string
Crawler bool
CategoryID int64
}
@ -38,6 +39,7 @@ func (f FeedForm) Merge(feed *model.Feed) *model.Feed {
feed.FeedURL = f.FeedURL
feed.ScraperRules = f.ScraperRules
feed.RewriteRules = f.RewriteRules
feed.Crawler = f.Crawler
feed.ParsingErrorCount = 0
feed.ParsingErrorMsg = ""
return feed
@ -56,6 +58,7 @@ func NewFeedForm(r *http.Request) *FeedForm {
Title: r.FormValue("title"),
ScraperRules: r.FormValue("scraper_rules"),
RewriteRules: r.FormValue("rewrite_rules"),
Crawler: r.FormValue("crawler") == "1",
CategoryID: int64(categoryID),
}
}

View file

@ -15,6 +15,7 @@ import (
type SubscriptionForm struct {
URL string
CategoryID int64
Crawler bool
}
// Validate makes sure the form values are valid.
@ -35,6 +36,7 @@ func NewSubscriptionForm(r *http.Request) *SubscriptionForm {
return &SubscriptionForm{
URL: r.FormValue("url"),
Crawler: r.FormValue("crawler") == "1",
CategoryID: int64(categoryID),
}
}