mirror of
https://github.com/miniflux/v2.git
synced 2025-06-27 16:36:00 +00:00
Add option to allow self-signed or invalid certificates
This commit is contained in:
parent
c3f871b49b
commit
ec3c604a83
35 changed files with 388 additions and 227 deletions
|
@ -50,7 +50,12 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
|
|||
logger.Debug("[Processor] Crawling entry %q from feed %q", entry.URL, feed.FeedURL)
|
||||
|
||||
startTime := time.Now()
|
||||
content, scraperErr := scraper.Fetch(entry.URL, feed.ScraperRules, feed.UserAgent)
|
||||
content, scraperErr := scraper.Fetch(
|
||||
entry.URL,
|
||||
feed.ScraperRules,
|
||||
feed.UserAgent,
|
||||
feed.AllowSelfSignedCertificates,
|
||||
)
|
||||
|
||||
if config.Opts.HasMetricsCollector() {
|
||||
status := "success"
|
||||
|
@ -118,9 +123,15 @@ func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
|
|||
}
|
||||
|
||||
// ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
|
||||
func ProcessEntryWebPage(entry *model.Entry) error {
|
||||
func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error {
|
||||
startTime := time.Now()
|
||||
content, scraperErr := scraper.Fetch(entry.URL, entry.Feed.ScraperRules, entry.Feed.UserAgent)
|
||||
content, scraperErr := scraper.Fetch(
|
||||
entry.URL,
|
||||
entry.Feed.ScraperRules,
|
||||
entry.Feed.UserAgent,
|
||||
feed.AllowSelfSignedCertificates,
|
||||
)
|
||||
|
||||
if config.Opts.HasMetricsCollector() {
|
||||
status := "success"
|
||||
if scraperErr != nil {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue