mirror of
https://github.com/miniflux/v2.git
synced 2025-08-16 18:01:37 +00:00
Add option to allow self-signed or invalid certificates
This commit is contained in:
parent
c3f871b49b
commit
ec3c604a83
35 changed files with 388 additions and 227 deletions
|
@ -39,6 +39,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
|
|||
request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts)
|
||||
request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password)
|
||||
request.WithUserAgent(feedCreationRequest.UserAgent)
|
||||
request.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
|
||||
|
||||
if feedCreationRequest.FetchViaProxy {
|
||||
request.WithProxy()
|
||||
|
@ -65,6 +66,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
|
|||
subscription.Crawler = feedCreationRequest.Crawler
|
||||
subscription.Disabled = feedCreationRequest.Disabled
|
||||
subscription.IgnoreHTTPCache = feedCreationRequest.IgnoreHTTPCache
|
||||
subscription.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
|
||||
subscription.FetchViaProxy = feedCreationRequest.FetchViaProxy
|
||||
subscription.ScraperRules = feedCreationRequest.ScraperRules
|
||||
subscription.RewriteRules = feedCreationRequest.RewriteRules
|
||||
|
@ -82,7 +84,13 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
|
|||
|
||||
logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID)
|
||||
|
||||
checkFeedIcon(store, subscription.ID, subscription.SiteURL, feedCreationRequest.FetchViaProxy)
|
||||
checkFeedIcon(
|
||||
store,
|
||||
subscription.ID,
|
||||
subscription.SiteURL,
|
||||
feedCreationRequest.FetchViaProxy,
|
||||
feedCreationRequest.AllowSelfSignedCertificates,
|
||||
)
|
||||
return subscription, nil
|
||||
}
|
||||
|
||||
|
@ -116,6 +124,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
|
|||
request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts)
|
||||
request.WithCredentials(originalFeed.Username, originalFeed.Password)
|
||||
request.WithUserAgent(originalFeed.UserAgent)
|
||||
request.AllowSelfSignedCertificates = originalFeed.AllowSelfSignedCertificates
|
||||
|
||||
if !originalFeed.IgnoreHTTPCache {
|
||||
request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
|
||||
|
@ -162,7 +171,13 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
|
|||
// We update caching headers only if the feed has been modified,
|
||||
// because some websites don't return the same headers when replying with a 304.
|
||||
originalFeed.WithClientResponse(response)
|
||||
checkFeedIcon(store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy)
|
||||
checkFeedIcon(
|
||||
store,
|
||||
originalFeed.ID,
|
||||
originalFeed.SiteURL,
|
||||
originalFeed.FetchViaProxy,
|
||||
originalFeed.AllowSelfSignedCertificates,
|
||||
)
|
||||
} else {
|
||||
logger.Debug("[RefreshFeed] Feed #%d not modified", feedID)
|
||||
}
|
||||
|
@ -178,9 +193,9 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) {
|
||||
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) {
|
||||
if !store.HasIcon(feedID) {
|
||||
icon, err := icon.FindIcon(websiteURL, fetchViaProxy)
|
||||
icon, err := icon.FindIcon(websiteURL, fetchViaProxy, allowSelfSignedCertificates)
|
||||
if err != nil {
|
||||
logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
|
||||
} else if icon == nil {
|
||||
|
|
|
@ -21,12 +21,14 @@ import (
|
|||
)
|
||||
|
||||
// FindIcon try to find the website's icon.
|
||||
func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) {
|
||||
func FindIcon(websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) {
|
||||
rootURL := url.RootURL(websiteURL)
|
||||
clt := client.NewClientWithConfig(rootURL, config.Opts)
|
||||
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
|
||||
if fetchViaProxy {
|
||||
clt.WithProxy()
|
||||
}
|
||||
|
||||
response, err := clt.Get()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to download website index page: %v", err)
|
||||
|
@ -46,7 +48,7 @@ func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) {
|
|||
}
|
||||
|
||||
logger.Debug("[FindIcon] Fetching icon => %s", iconURL)
|
||||
icon, err := downloadIcon(iconURL, fetchViaProxy)
|
||||
icon, err := downloadIcon(iconURL, fetchViaProxy, allowSelfSignedCertificates)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -89,8 +91,9 @@ func parseDocument(websiteURL string, data io.Reader) (string, error) {
|
|||
return iconURL, nil
|
||||
}
|
||||
|
||||
func downloadIcon(iconURL string, fetchViaProxy bool) (*model.Icon, error) {
|
||||
func downloadIcon(iconURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) {
|
||||
clt := client.NewClientWithConfig(iconURL, config.Opts)
|
||||
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
|
||||
if fetchViaProxy {
|
||||
clt.WithProxy()
|
||||
}
|
||||
|
|
|
@ -50,7 +50,12 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
|
|||
logger.Debug("[Processor] Crawling entry %q from feed %q", entry.URL, feed.FeedURL)
|
||||
|
||||
startTime := time.Now()
|
||||
content, scraperErr := scraper.Fetch(entry.URL, feed.ScraperRules, feed.UserAgent)
|
||||
content, scraperErr := scraper.Fetch(
|
||||
entry.URL,
|
||||
feed.ScraperRules,
|
||||
feed.UserAgent,
|
||||
feed.AllowSelfSignedCertificates,
|
||||
)
|
||||
|
||||
if config.Opts.HasMetricsCollector() {
|
||||
status := "success"
|
||||
|
@ -118,9 +123,15 @@ func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
|
|||
}
|
||||
|
||||
// ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
|
||||
func ProcessEntryWebPage(entry *model.Entry) error {
|
||||
func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error {
|
||||
startTime := time.Now()
|
||||
content, scraperErr := scraper.Fetch(entry.URL, entry.Feed.ScraperRules, entry.Feed.UserAgent)
|
||||
content, scraperErr := scraper.Fetch(
|
||||
entry.URL,
|
||||
entry.Feed.ScraperRules,
|
||||
entry.Feed.UserAgent,
|
||||
feed.AllowSelfSignedCertificates,
|
||||
)
|
||||
|
||||
if config.Opts.HasMetricsCollector() {
|
||||
status := "success"
|
||||
if scraperErr != nil {
|
||||
|
|
|
@ -20,11 +20,10 @@ import (
|
|||
)
|
||||
|
||||
// Fetch downloads a web page and returns relevant contents.
|
||||
func Fetch(websiteURL, rules, userAgent string) (string, error) {
|
||||
func Fetch(websiteURL, rules, userAgent string, allowSelfSignedCertificates bool) (string, error) {
|
||||
clt := client.NewClientWithConfig(websiteURL, config.Opts)
|
||||
if userAgent != "" {
|
||||
clt.WithUserAgent(userAgent)
|
||||
}
|
||||
clt.WithUserAgent(userAgent)
|
||||
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
|
||||
|
||||
response, err := clt.Get()
|
||||
if err != nil {
|
||||
|
|
|
@ -27,13 +27,14 @@ var (
|
|||
)
|
||||
|
||||
// FindSubscriptions downloads and try to find one or more subscriptions from an URL.
|
||||
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy bool) (Subscriptions, *errors.LocalizedError) {
|
||||
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) {
|
||||
websiteURL = findYoutubeChannelFeed(websiteURL)
|
||||
websiteURL = parseYoutubeVideoPage(websiteURL)
|
||||
|
||||
clt := client.NewClientWithConfig(websiteURL, config.Opts)
|
||||
clt.WithCredentials(username, password)
|
||||
clt.WithUserAgent(userAgent)
|
||||
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
|
||||
|
||||
if fetchViaProxy {
|
||||
clt.WithProxy()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue