mirror of
https://github.com/miniflux/v2.git
synced 2025-09-15 18:57:04 +00:00
Add option to allow self-signed or invalid certificates
This commit is contained in:
parent
c3f871b49b
commit
ec3c604a83
35 changed files with 388 additions and 227 deletions
|
@ -12,15 +12,18 @@ import (
|
|||
"miniflux.app/model"
|
||||
"miniflux.app/proxy"
|
||||
"miniflux.app/reader/processor"
|
||||
"miniflux.app/storage"
|
||||
)
|
||||
|
||||
func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) {
|
||||
loggedUserID := request.UserID(r)
|
||||
entryID := request.RouteInt64Param(r, "entryID")
|
||||
builder := h.store.NewEntryQueryBuilder(request.UserID(r))
|
||||
builder.WithEntryID(entryID)
|
||||
builder.WithoutStatus(model.EntryStatusRemoved)
|
||||
|
||||
entry, err := builder.GetEntry()
|
||||
entryBuilder := h.store.NewEntryQueryBuilder(loggedUserID)
|
||||
entryBuilder.WithEntryID(entryID)
|
||||
entryBuilder.WithoutStatus(model.EntryStatusRemoved)
|
||||
|
||||
entry, err := entryBuilder.GetEntry()
|
||||
if err != nil {
|
||||
json.ServerError(w, r, err)
|
||||
return
|
||||
|
@ -31,7 +34,20 @@ func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if err := processor.ProcessEntryWebPage(entry); err != nil {
|
||||
feedBuilder := storage.NewFeedQueryBuilder(h.store, loggedUserID)
|
||||
feedBuilder.WithFeedID(entry.FeedID)
|
||||
feed, err := feedBuilder.GetFeed()
|
||||
if err != nil {
|
||||
json.ServerError(w, r, err)
|
||||
return
|
||||
}
|
||||
|
||||
if feed == nil {
|
||||
json.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if err := processor.ProcessEntryWebPage(feed, entry); err != nil {
|
||||
json.ServerError(w, r, err)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -41,21 +41,22 @@ func (h *handler) showEditFeedPage(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
feedForm := form.FeedForm{
|
||||
SiteURL: feed.SiteURL,
|
||||
FeedURL: feed.FeedURL,
|
||||
Title: feed.Title,
|
||||
ScraperRules: feed.ScraperRules,
|
||||
RewriteRules: feed.RewriteRules,
|
||||
BlocklistRules: feed.BlocklistRules,
|
||||
KeeplistRules: feed.KeeplistRules,
|
||||
Crawler: feed.Crawler,
|
||||
UserAgent: feed.UserAgent,
|
||||
CategoryID: feed.Category.ID,
|
||||
Username: feed.Username,
|
||||
Password: feed.Password,
|
||||
IgnoreHTTPCache: feed.IgnoreHTTPCache,
|
||||
FetchViaProxy: feed.FetchViaProxy,
|
||||
Disabled: feed.Disabled,
|
||||
SiteURL: feed.SiteURL,
|
||||
FeedURL: feed.FeedURL,
|
||||
Title: feed.Title,
|
||||
ScraperRules: feed.ScraperRules,
|
||||
RewriteRules: feed.RewriteRules,
|
||||
BlocklistRules: feed.BlocklistRules,
|
||||
KeeplistRules: feed.KeeplistRules,
|
||||
Crawler: feed.Crawler,
|
||||
UserAgent: feed.UserAgent,
|
||||
CategoryID: feed.Category.ID,
|
||||
Username: feed.Username,
|
||||
Password: feed.Password,
|
||||
IgnoreHTTPCache: feed.IgnoreHTTPCache,
|
||||
AllowSelfSignedCertificates: feed.AllowSelfSignedCertificates,
|
||||
FetchViaProxy: feed.FetchViaProxy,
|
||||
Disabled: feed.Disabled,
|
||||
}
|
||||
|
||||
sess := session.New(h.store, request.SessionID(r))
|
||||
|
|
|
@ -13,21 +13,22 @@ import (
|
|||
|
||||
// FeedForm represents a feed form in the UI
|
||||
type FeedForm struct {
|
||||
FeedURL string
|
||||
SiteURL string
|
||||
Title string
|
||||
ScraperRules string
|
||||
RewriteRules string
|
||||
BlocklistRules string
|
||||
KeeplistRules string
|
||||
Crawler bool
|
||||
UserAgent string
|
||||
CategoryID int64
|
||||
Username string
|
||||
Password string
|
||||
IgnoreHTTPCache bool
|
||||
FetchViaProxy bool
|
||||
Disabled bool
|
||||
FeedURL string
|
||||
SiteURL string
|
||||
Title string
|
||||
ScraperRules string
|
||||
RewriteRules string
|
||||
BlocklistRules string
|
||||
KeeplistRules string
|
||||
Crawler bool
|
||||
UserAgent string
|
||||
CategoryID int64
|
||||
Username string
|
||||
Password string
|
||||
IgnoreHTTPCache bool
|
||||
AllowSelfSignedCertificates bool
|
||||
FetchViaProxy bool
|
||||
Disabled bool
|
||||
}
|
||||
|
||||
// Merge updates the fields of the given feed.
|
||||
|
@ -47,6 +48,7 @@ func (f FeedForm) Merge(feed *model.Feed) *model.Feed {
|
|||
feed.Username = f.Username
|
||||
feed.Password = f.Password
|
||||
feed.IgnoreHTTPCache = f.IgnoreHTTPCache
|
||||
feed.AllowSelfSignedCertificates = f.AllowSelfSignedCertificates
|
||||
feed.FetchViaProxy = f.FetchViaProxy
|
||||
feed.Disabled = f.Disabled
|
||||
return feed
|
||||
|
@ -59,20 +61,21 @@ func NewFeedForm(r *http.Request) *FeedForm {
|
|||
categoryID = 0
|
||||
}
|
||||
return &FeedForm{
|
||||
FeedURL: r.FormValue("feed_url"),
|
||||
SiteURL: r.FormValue("site_url"),
|
||||
Title: r.FormValue("title"),
|
||||
ScraperRules: r.FormValue("scraper_rules"),
|
||||
UserAgent: r.FormValue("user_agent"),
|
||||
RewriteRules: r.FormValue("rewrite_rules"),
|
||||
BlocklistRules: r.FormValue("blocklist_rules"),
|
||||
KeeplistRules: r.FormValue("keeplist_rules"),
|
||||
Crawler: r.FormValue("crawler") == "1",
|
||||
CategoryID: int64(categoryID),
|
||||
Username: r.FormValue("feed_username"),
|
||||
Password: r.FormValue("feed_password"),
|
||||
IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1",
|
||||
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
|
||||
Disabled: r.FormValue("disabled") == "1",
|
||||
FeedURL: r.FormValue("feed_url"),
|
||||
SiteURL: r.FormValue("site_url"),
|
||||
Title: r.FormValue("title"),
|
||||
ScraperRules: r.FormValue("scraper_rules"),
|
||||
UserAgent: r.FormValue("user_agent"),
|
||||
RewriteRules: r.FormValue("rewrite_rules"),
|
||||
BlocklistRules: r.FormValue("blocklist_rules"),
|
||||
KeeplistRules: r.FormValue("keeplist_rules"),
|
||||
Crawler: r.FormValue("crawler") == "1",
|
||||
CategoryID: int64(categoryID),
|
||||
Username: r.FormValue("feed_username"),
|
||||
Password: r.FormValue("feed_password"),
|
||||
IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1",
|
||||
AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1",
|
||||
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
|
||||
Disabled: r.FormValue("disabled") == "1",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,17 +14,18 @@ import (
|
|||
|
||||
// SubscriptionForm represents the subscription form.
|
||||
type SubscriptionForm struct {
|
||||
URL string
|
||||
CategoryID int64
|
||||
Crawler bool
|
||||
FetchViaProxy bool
|
||||
UserAgent string
|
||||
Username string
|
||||
Password string
|
||||
ScraperRules string
|
||||
RewriteRules string
|
||||
BlocklistRules string
|
||||
KeeplistRules string
|
||||
URL string
|
||||
CategoryID int64
|
||||
Crawler bool
|
||||
FetchViaProxy bool
|
||||
AllowSelfSignedCertificates bool
|
||||
UserAgent string
|
||||
Username string
|
||||
Password string
|
||||
ScraperRules string
|
||||
RewriteRules string
|
||||
BlocklistRules string
|
||||
KeeplistRules string
|
||||
}
|
||||
|
||||
// Validate makes sure the form values are valid.
|
||||
|
@ -56,16 +57,17 @@ func NewSubscriptionForm(r *http.Request) *SubscriptionForm {
|
|||
}
|
||||
|
||||
return &SubscriptionForm{
|
||||
URL: r.FormValue("url"),
|
||||
CategoryID: int64(categoryID),
|
||||
Crawler: r.FormValue("crawler") == "1",
|
||||
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
|
||||
UserAgent: r.FormValue("user_agent"),
|
||||
Username: r.FormValue("feed_username"),
|
||||
Password: r.FormValue("feed_password"),
|
||||
ScraperRules: r.FormValue("scraper_rules"),
|
||||
RewriteRules: r.FormValue("rewrite_rules"),
|
||||
BlocklistRules: r.FormValue("blocklist_rules"),
|
||||
KeeplistRules: r.FormValue("keeplist_rules"),
|
||||
URL: r.FormValue("url"),
|
||||
CategoryID: int64(categoryID),
|
||||
Crawler: r.FormValue("crawler") == "1",
|
||||
AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1",
|
||||
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
|
||||
UserAgent: r.FormValue("user_agent"),
|
||||
Username: r.FormValue("feed_username"),
|
||||
Password: r.FormValue("feed_password"),
|
||||
ScraperRules: r.FormValue("scraper_rules"),
|
||||
RewriteRules: r.FormValue("rewrite_rules"),
|
||||
BlocklistRules: r.FormValue("blocklist_rules"),
|
||||
KeeplistRules: r.FormValue("keeplist_rules"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,17 +50,18 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ
|
|||
}
|
||||
|
||||
feed, err := feedHandler.CreateFeed(h.store, user.ID, &model.FeedCreationRequest{
|
||||
CategoryID: subscriptionForm.CategoryID,
|
||||
FeedURL: subscriptionForm.URL,
|
||||
Crawler: subscriptionForm.Crawler,
|
||||
UserAgent: subscriptionForm.UserAgent,
|
||||
Username: subscriptionForm.Username,
|
||||
Password: subscriptionForm.Password,
|
||||
ScraperRules: subscriptionForm.ScraperRules,
|
||||
RewriteRules: subscriptionForm.RewriteRules,
|
||||
BlocklistRules: subscriptionForm.BlocklistRules,
|
||||
KeeplistRules: subscriptionForm.KeeplistRules,
|
||||
FetchViaProxy: subscriptionForm.FetchViaProxy,
|
||||
CategoryID: subscriptionForm.CategoryID,
|
||||
FeedURL: subscriptionForm.URL,
|
||||
Crawler: subscriptionForm.Crawler,
|
||||
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
|
||||
UserAgent: subscriptionForm.UserAgent,
|
||||
Username: subscriptionForm.Username,
|
||||
Password: subscriptionForm.Password,
|
||||
ScraperRules: subscriptionForm.ScraperRules,
|
||||
RewriteRules: subscriptionForm.RewriteRules,
|
||||
BlocklistRules: subscriptionForm.BlocklistRules,
|
||||
KeeplistRules: subscriptionForm.KeeplistRules,
|
||||
FetchViaProxy: subscriptionForm.FetchViaProxy,
|
||||
})
|
||||
if err != nil {
|
||||
view.Set("form", subscriptionForm)
|
||||
|
|
|
@ -58,6 +58,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
|
|||
subscriptionForm.Username,
|
||||
subscriptionForm.Password,
|
||||
subscriptionForm.FetchViaProxy,
|
||||
subscriptionForm.AllowSelfSignedCertificates,
|
||||
)
|
||||
if findErr != nil {
|
||||
logger.Error("[UI:SubmitSubscription] %s", findErr)
|
||||
|
@ -77,17 +78,18 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
|
|||
html.OK(w, r, v.Render("add_subscription"))
|
||||
case n == 1:
|
||||
feed, err := feedHandler.CreateFeed(h.store, user.ID, &model.FeedCreationRequest{
|
||||
CategoryID: subscriptionForm.CategoryID,
|
||||
FeedURL: subscriptions[0].URL,
|
||||
Crawler: subscriptionForm.Crawler,
|
||||
UserAgent: subscriptionForm.UserAgent,
|
||||
Username: subscriptionForm.Username,
|
||||
Password: subscriptionForm.Password,
|
||||
ScraperRules: subscriptionForm.ScraperRules,
|
||||
RewriteRules: subscriptionForm.RewriteRules,
|
||||
BlocklistRules: subscriptionForm.BlocklistRules,
|
||||
KeeplistRules: subscriptionForm.KeeplistRules,
|
||||
FetchViaProxy: subscriptionForm.FetchViaProxy,
|
||||
CategoryID: subscriptionForm.CategoryID,
|
||||
FeedURL: subscriptions[0].URL,
|
||||
Crawler: subscriptionForm.Crawler,
|
||||
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
|
||||
UserAgent: subscriptionForm.UserAgent,
|
||||
Username: subscriptionForm.Username,
|
||||
Password: subscriptionForm.Password,
|
||||
ScraperRules: subscriptionForm.ScraperRules,
|
||||
RewriteRules: subscriptionForm.RewriteRules,
|
||||
BlocklistRules: subscriptionForm.BlocklistRules,
|
||||
KeeplistRules: subscriptionForm.KeeplistRules,
|
||||
FetchViaProxy: subscriptionForm.FetchViaProxy,
|
||||
})
|
||||
if err != nil {
|
||||
v.Set("form", subscriptionForm)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue