1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-06-27 16:36:00 +00:00

Add option to allow self-signed or invalid certificates

This commit is contained in:
Frédéric Guillot 2021-02-21 13:42:49 -08:00 committed by fguillot
parent c3f871b49b
commit ec3c604a83
35 changed files with 388 additions and 227 deletions

View file

@ -32,6 +32,7 @@ func (h *handler) discoverSubscriptions(w http.ResponseWriter, r *http.Request)
subscriptionDiscoveryRequest.Username, subscriptionDiscoveryRequest.Username,
subscriptionDiscoveryRequest.Password, subscriptionDiscoveryRequest.Password,
subscriptionDiscoveryRequest.FetchViaProxy, subscriptionDiscoveryRequest.FetchViaProxy,
subscriptionDiscoveryRequest.AllowSelfSignedCertificates,
) )
if finderErr != nil { if finderErr != nil {
json.ServerError(w, r, finderErr) json.ServerError(w, r, finderErr)

View file

@ -145,7 +145,9 @@ func Parse() {
} }
if flagMigrate { if flagMigrate {
database.Migrate(db) if err := database.Migrate(db); err != nil {
logger.Fatal(`%v`, err)
}
return return
} }

View file

@ -112,6 +112,7 @@ type Feed struct {
ParsingErrorCount int `json:"parsing_error_count,omitempty"` ParsingErrorCount int `json:"parsing_error_count,omitempty"`
Disabled bool `json:"disabled"` Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"` IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"` FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"` ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"` RewriteRules string `json:"rewrite_rules"`
@ -134,6 +135,7 @@ type FeedCreationRequest struct {
Crawler bool `json:"crawler"` Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"` Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"` IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"` FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"` ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"` RewriteRules string `json:"rewrite_rules"`
@ -157,6 +159,7 @@ type FeedModificationRequest struct {
CategoryID *int64 `json:"category_id"` CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"` Disabled *bool `json:"disabled"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"` IgnoreHTTPCache *bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"`
FetchViaProxy *bool `json:"fetch_via_proxy"` FetchViaProxy *bool `json:"fetch_via_proxy"`
} }

View file

@ -10,6 +10,7 @@ import (
var schemaVersion = len(migrations) var schemaVersion = len(migrations)
// Order is important. Add new migrations at the end of the list.
var migrations = []func(tx *sql.Tx) error{ var migrations = []func(tx *sql.Tx) error{
func(tx *sql.Tx) (err error) { func(tx *sql.Tx) (err error) {
sql := ` sql := `
@ -514,4 +515,10 @@ var migrations = []func(tx *sql.Tx) error{
`) `)
return err return err
}, },
func(tx *sql.Tx) (err error) {
_, err = tx.Exec(`
ALTER TABLE feeds ADD COLUMN allow_self_signed_certificates boolean not null default false
`)
return err
},
} }

View file

@ -6,6 +6,7 @@ package client // import "miniflux.app/http/client"
import ( import (
"bytes" "bytes"
"crypto/tls"
"crypto/x509" "crypto/x509"
"encoding/json" "encoding/json"
"fmt" "fmt"
@ -53,6 +54,7 @@ type Client struct {
ClientTimeout int ClientTimeout int
ClientMaxBodySize int64 ClientMaxBodySize int64
ClientProxyURL string ClientProxyURL string
AllowSelfSignedCertificates bool
} }
// New initializes a new HTTP client. // New initializes a new HTTP client.
@ -87,13 +89,14 @@ func (c *Client) String() string {
} }
return fmt.Sprintf( return fmt.Sprintf(
`InputURL=%q RequestURL=%q ETag=%s LastModified=%s Auth=%v UserAgent=%q`, `InputURL=%q RequestURL=%q ETag=%s LastMod=%s Auth=%v UserAgent=%q Verify=%v`,
c.inputURL, c.inputURL,
c.requestURL, c.requestURL,
etagHeader, etagHeader,
lastModifiedHeader, lastModifiedHeader,
c.requestAuthorizationHeader != "" || (c.requestUsername != "" && c.requestPassword != ""), c.requestAuthorizationHeader != "" || (c.requestUsername != "" && c.requestPassword != ""),
c.requestUserAgent, c.requestUserAgent,
!c.AllowSelfSignedCertificates,
) )
} }
@ -288,6 +291,10 @@ func (c *Client) buildClient() http.Client {
IdleConnTimeout: 10 * time.Second, IdleConnTimeout: 10 * time.Second,
} }
if c.AllowSelfSignedCertificates {
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
}
if c.doNotFollowRedirects { if c.doNotFollowRedirects {
client.CheckRedirect = func(req *http.Request, via []*http.Request) error { client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse return http.ErrUseLastResponse

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Blockierregeln", "form.feed.label.blocklist_rules": "Blockierregeln",
"form.feed.label.keeplist_rules": "Erlaubnisregeln", "form.feed.label.keeplist_rules": "Erlaubnisregeln",
"form.feed.label.ignore_http_cache": "Ignoriere HTTP-cache", "form.feed.label.ignore_http_cache": "Ignoriere HTTP-cache",
"form.feed.label.allow_self_signed_certificates": "Erlaube selbstsignierte oder ungültige Zertifikate",
"form.feed.label.fetch_via_proxy": "Über Proxy abrufen", "form.feed.label.fetch_via_proxy": "Über Proxy abrufen",
"form.feed.label.disabled": "Dieses Abonnement nicht aktualisieren", "form.feed.label.disabled": "Dieses Abonnement nicht aktualisieren",
"form.category.label.title": "Titel", "form.category.label.title": "Titel",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Block Rules", "form.feed.label.blocklist_rules": "Block Rules",
"form.feed.label.keeplist_rules": "Keep Rules", "form.feed.label.keeplist_rules": "Keep Rules",
"form.feed.label.ignore_http_cache": "Ignore HTTP cache", "form.feed.label.ignore_http_cache": "Ignore HTTP cache",
"form.feed.label.allow_self_signed_certificates": "Allow self-signed or invalid certificates",
"form.feed.label.fetch_via_proxy": "Fetch via proxy", "form.feed.label.fetch_via_proxy": "Fetch via proxy",
"form.feed.label.disabled": "Do not refresh this feed", "form.feed.label.disabled": "Do not refresh this feed",
"form.category.label.title": "Title", "form.category.label.title": "Title",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Reglas de Filtrado(Bloquear)", "form.feed.label.blocklist_rules": "Reglas de Filtrado(Bloquear)",
"form.feed.label.keeplist_rules": "Reglas de Filtrado(Permitir)", "form.feed.label.keeplist_rules": "Reglas de Filtrado(Permitir)",
"form.feed.label.ignore_http_cache": "Ignorar caché HTTP", "form.feed.label.ignore_http_cache": "Ignorar caché HTTP",
"form.feed.label.allow_self_signed_certificates": "Permitir certificados autofirmados o no válidos",
"form.feed.label.fetch_via_proxy": "Buscar a través de proxy", "form.feed.label.fetch_via_proxy": "Buscar a través de proxy",
"form.feed.label.disabled": "No actualice este feed", "form.feed.label.disabled": "No actualice este feed",
"form.category.label.title": "Título", "form.category.label.title": "Título",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Règles de blocage", "form.feed.label.blocklist_rules": "Règles de blocage",
"form.feed.label.keeplist_rules": "Règles d'autorisation", "form.feed.label.keeplist_rules": "Règles d'autorisation",
"form.feed.label.ignore_http_cache": "Ignorer le cache HTTP", "form.feed.label.ignore_http_cache": "Ignorer le cache HTTP",
"form.feed.label.allow_self_signed_certificates": "Autoriser les certificats auto-signés ou non valides",
"form.feed.label.fetch_via_proxy": "Récupérer via proxy", "form.feed.label.fetch_via_proxy": "Récupérer via proxy",
"form.feed.label.disabled": "Ne pas actualiser ce flux", "form.feed.label.disabled": "Ne pas actualiser ce flux",
"form.category.label.title": "Titre", "form.category.label.title": "Titre",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Regole di blocco", "form.feed.label.blocklist_rules": "Regole di blocco",
"form.feed.label.keeplist_rules": "Regole di autorizzazione", "form.feed.label.keeplist_rules": "Regole di autorizzazione",
"form.feed.label.ignore_http_cache": "Ignora cache HTTP", "form.feed.label.ignore_http_cache": "Ignora cache HTTP",
"form.feed.label.allow_self_signed_certificates": "Consenti certificati autofirmati o non validi",
"form.feed.label.fetch_via_proxy": "Recuperare tramite proxy", "form.feed.label.fetch_via_proxy": "Recuperare tramite proxy",
"form.feed.label.disabled": "Non aggiornare questo feed", "form.feed.label.disabled": "Non aggiornare questo feed",
"form.category.label.title": "Titolo", "form.category.label.title": "Titolo",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "ブロックルール", "form.feed.label.blocklist_rules": "ブロックルール",
"form.feed.label.keeplist_rules": "許可規則", "form.feed.label.keeplist_rules": "許可規則",
"form.feed.label.ignore_http_cache": "HTTPキャッシュを無視", "form.feed.label.ignore_http_cache": "HTTPキャッシュを無視",
"form.feed.label.allow_self_signed_certificates": "自己署名証明書または無効な証明書を許可する",
"form.feed.label.fetch_via_proxy": "プロキシ経由でフェッチ", "form.feed.label.fetch_via_proxy": "プロキシ経由でフェッチ",
"form.feed.label.disabled": "このフィードを更新しない", "form.feed.label.disabled": "このフィードを更新しない",
"form.category.label.title": "タイトル", "form.category.label.title": "タイトル",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Blokkeer regels", "form.feed.label.blocklist_rules": "Blokkeer regels",
"form.feed.label.keeplist_rules": "toestemmingsregels", "form.feed.label.keeplist_rules": "toestemmingsregels",
"form.feed.label.ignore_http_cache": "Negeer HTTP-cache", "form.feed.label.ignore_http_cache": "Negeer HTTP-cache",
"form.feed.label.allow_self_signed_certificates": "Sta zelfondertekende of ongeldige certificaten toe",
"form.feed.label.fetch_via_proxy": "Ophalen via proxy", "form.feed.label.fetch_via_proxy": "Ophalen via proxy",
"form.feed.label.disabled": "Vernieuw deze feed niet", "form.feed.label.disabled": "Vernieuw deze feed niet",
"form.category.label.title": "Naam", "form.category.label.title": "Naam",

View file

@ -269,6 +269,7 @@
"form.feed.label.blocklist_rules": "Zasady blokowania", "form.feed.label.blocklist_rules": "Zasady blokowania",
"form.feed.label.keeplist_rules": "Zasady zezwoleń", "form.feed.label.keeplist_rules": "Zasady zezwoleń",
"form.feed.label.ignore_http_cache": "Zignoruj pamięć podręczną HTTP", "form.feed.label.ignore_http_cache": "Zignoruj pamięć podręczną HTTP",
"form.feed.label.allow_self_signed_certificates": "Zezwalaj na certyfikaty z podpisem własnym lub nieprawidłowe certyfikaty",
"form.feed.label.fetch_via_proxy": "Pobierz przez proxy", "form.feed.label.fetch_via_proxy": "Pobierz przez proxy",
"form.feed.label.disabled": "Не обновлять этот канал", "form.feed.label.disabled": "Не обновлять этот канал",
"form.category.label.title": "Tytuł", "form.category.label.title": "Tytuł",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Regras de bloqueio", "form.feed.label.blocklist_rules": "Regras de bloqueio",
"form.feed.label.keeplist_rules": "Regras de permissão", "form.feed.label.keeplist_rules": "Regras de permissão",
"form.feed.label.ignore_http_cache": "Ignorar cache HTTP", "form.feed.label.ignore_http_cache": "Ignorar cache HTTP",
"form.feed.label.allow_self_signed_certificates": "Permitir certificados autoassinados ou inválidos",
"form.feed.label.disabled": "Não atualizar esta fonte", "form.feed.label.disabled": "Não atualizar esta fonte",
"form.feed.label.fetch_via_proxy": "Buscar via proxy", "form.feed.label.fetch_via_proxy": "Buscar via proxy",
"form.category.label.title": "Título", "form.category.label.title": "Título",

View file

@ -269,6 +269,7 @@
"form.feed.label.blocklist_rules": "Правила блокировки", "form.feed.label.blocklist_rules": "Правила блокировки",
"form.feed.label.keeplist_rules": "правила разрешений", "form.feed.label.keeplist_rules": "правила разрешений",
"form.feed.label.ignore_http_cache": "Игнорировать HTTP-кеш", "form.feed.label.ignore_http_cache": "Игнорировать HTTP-кеш",
"form.feed.label.allow_self_signed_certificates": "Разрешить самоподписанные или недействительные сертификаты",
"form.feed.label.fetch_via_proxy": "Получить через прокси", "form.feed.label.fetch_via_proxy": "Получить через прокси",
"form.feed.label.disabled": "Не обновлять этот канал", "form.feed.label.disabled": "Не обновлять этот канал",
"form.category.label.title": "Название", "form.category.label.title": "Название",

View file

@ -265,6 +265,7 @@
"form.feed.label.blocklist_rules": "封锁规则", "form.feed.label.blocklist_rules": "封锁规则",
"form.feed.label.keeplist_rules": "许可规则", "form.feed.label.keeplist_rules": "许可规则",
"form.feed.label.ignore_http_cache": "忽略HTTP缓存", "form.feed.label.ignore_http_cache": "忽略HTTP缓存",
"form.feed.label.allow_self_signed_certificates": "允许自签名或无效的证书",
"form.feed.label.fetch_via_proxy": "通过代理获取", "form.feed.label.fetch_via_proxy": "通过代理获取",
"form.feed.label.disabled": "请勿刷新此Feed", "form.feed.label.disabled": "请勿刷新此Feed",
"form.category.label.title": "标题", "form.category.label.title": "标题",

View file

@ -45,6 +45,7 @@ type Feed struct {
Password string `json:"password"` Password string `json:"password"`
Disabled bool `json:"disabled"` Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"` IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"` FetchViaProxy bool `json:"fetch_via_proxy"`
Category *Category `json:"category,omitempty"` Category *Category `json:"category,omitempty"`
Entries Entries `json:"entries,omitempty"` Entries Entries `json:"entries,omitempty"`
@ -125,6 +126,7 @@ type FeedCreationRequest struct {
Crawler bool `json:"crawler"` Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"` Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"` IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"` FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"` ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"` RewriteRules string `json:"rewrite_rules"`
@ -148,6 +150,7 @@ type FeedModificationRequest struct {
CategoryID *int64 `json:"category_id"` CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"` Disabled *bool `json:"disabled"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"` IgnoreHTTPCache *bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"`
FetchViaProxy *bool `json:"fetch_via_proxy"` FetchViaProxy *bool `json:"fetch_via_proxy"`
} }
@ -209,6 +212,10 @@ func (f *FeedModificationRequest) Patch(feed *Feed) {
feed.IgnoreHTTPCache = *f.IgnoreHTTPCache feed.IgnoreHTTPCache = *f.IgnoreHTTPCache
} }
if f.AllowSelfSignedCertificates != nil {
feed.AllowSelfSignedCertificates = *f.AllowSelfSignedCertificates
}
if f.FetchViaProxy != nil { if f.FetchViaProxy != nil {
feed.FetchViaProxy = *f.FetchViaProxy feed.FetchViaProxy = *f.FetchViaProxy
} }

View file

@ -11,4 +11,5 @@ type SubscriptionDiscoveryRequest struct {
Username string `json:"username"` Username string `json:"username"`
Password string `json:"password"` Password string `json:"password"`
FetchViaProxy bool `json:"fetch_via_proxy"` FetchViaProxy bool `json:"fetch_via_proxy"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
} }

View file

@ -39,6 +39,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts) request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts)
request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password) request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password)
request.WithUserAgent(feedCreationRequest.UserAgent) request.WithUserAgent(feedCreationRequest.UserAgent)
request.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
if feedCreationRequest.FetchViaProxy { if feedCreationRequest.FetchViaProxy {
request.WithProxy() request.WithProxy()
@ -65,6 +66,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
subscription.Crawler = feedCreationRequest.Crawler subscription.Crawler = feedCreationRequest.Crawler
subscription.Disabled = feedCreationRequest.Disabled subscription.Disabled = feedCreationRequest.Disabled
subscription.IgnoreHTTPCache = feedCreationRequest.IgnoreHTTPCache subscription.IgnoreHTTPCache = feedCreationRequest.IgnoreHTTPCache
subscription.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
subscription.FetchViaProxy = feedCreationRequest.FetchViaProxy subscription.FetchViaProxy = feedCreationRequest.FetchViaProxy
subscription.ScraperRules = feedCreationRequest.ScraperRules subscription.ScraperRules = feedCreationRequest.ScraperRules
subscription.RewriteRules = feedCreationRequest.RewriteRules subscription.RewriteRules = feedCreationRequest.RewriteRules
@ -82,7 +84,13 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID) logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID)
checkFeedIcon(store, subscription.ID, subscription.SiteURL, feedCreationRequest.FetchViaProxy) checkFeedIcon(
store,
subscription.ID,
subscription.SiteURL,
feedCreationRequest.FetchViaProxy,
feedCreationRequest.AllowSelfSignedCertificates,
)
return subscription, nil return subscription, nil
} }
@ -116,6 +124,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts) request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts)
request.WithCredentials(originalFeed.Username, originalFeed.Password) request.WithCredentials(originalFeed.Username, originalFeed.Password)
request.WithUserAgent(originalFeed.UserAgent) request.WithUserAgent(originalFeed.UserAgent)
request.AllowSelfSignedCertificates = originalFeed.AllowSelfSignedCertificates
if !originalFeed.IgnoreHTTPCache { if !originalFeed.IgnoreHTTPCache {
request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
@ -162,7 +171,13 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
// We update caching headers only if the feed has been modified, // We update caching headers only if the feed has been modified,
// because some websites don't return the same headers when replying with a 304. // because some websites don't return the same headers when replying with a 304.
originalFeed.WithClientResponse(response) originalFeed.WithClientResponse(response)
checkFeedIcon(store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy) checkFeedIcon(
store,
originalFeed.ID,
originalFeed.SiteURL,
originalFeed.FetchViaProxy,
originalFeed.AllowSelfSignedCertificates,
)
} else { } else {
logger.Debug("[RefreshFeed] Feed #%d not modified", feedID) logger.Debug("[RefreshFeed] Feed #%d not modified", feedID)
} }
@ -178,9 +193,9 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
return nil return nil
} }
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) { func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) {
if !store.HasIcon(feedID) { if !store.HasIcon(feedID) {
icon, err := icon.FindIcon(websiteURL, fetchViaProxy) icon, err := icon.FindIcon(websiteURL, fetchViaProxy, allowSelfSignedCertificates)
if err != nil { if err != nil {
logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL) logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
} else if icon == nil { } else if icon == nil {

View file

@ -21,12 +21,14 @@ import (
) )
// FindIcon try to find the website's icon. // FindIcon try to find the website's icon.
func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) { func FindIcon(websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) {
rootURL := url.RootURL(websiteURL) rootURL := url.RootURL(websiteURL)
clt := client.NewClientWithConfig(rootURL, config.Opts) clt := client.NewClientWithConfig(rootURL, config.Opts)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
if fetchViaProxy { if fetchViaProxy {
clt.WithProxy() clt.WithProxy()
} }
response, err := clt.Get() response, err := clt.Get()
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to download website index page: %v", err) return nil, fmt.Errorf("unable to download website index page: %v", err)
@ -46,7 +48,7 @@ func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) {
} }
logger.Debug("[FindIcon] Fetching icon => %s", iconURL) logger.Debug("[FindIcon] Fetching icon => %s", iconURL)
icon, err := downloadIcon(iconURL, fetchViaProxy) icon, err := downloadIcon(iconURL, fetchViaProxy, allowSelfSignedCertificates)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -89,8 +91,9 @@ func parseDocument(websiteURL string, data io.Reader) (string, error) {
return iconURL, nil return iconURL, nil
} }
func downloadIcon(iconURL string, fetchViaProxy bool) (*model.Icon, error) { func downloadIcon(iconURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) {
clt := client.NewClientWithConfig(iconURL, config.Opts) clt := client.NewClientWithConfig(iconURL, config.Opts)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
if fetchViaProxy { if fetchViaProxy {
clt.WithProxy() clt.WithProxy()
} }

View file

@ -50,7 +50,12 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
logger.Debug("[Processor] Crawling entry %q from feed %q", entry.URL, feed.FeedURL) logger.Debug("[Processor] Crawling entry %q from feed %q", entry.URL, feed.FeedURL)
startTime := time.Now() startTime := time.Now()
content, scraperErr := scraper.Fetch(entry.URL, feed.ScraperRules, feed.UserAgent) content, scraperErr := scraper.Fetch(
entry.URL,
feed.ScraperRules,
feed.UserAgent,
feed.AllowSelfSignedCertificates,
)
if config.Opts.HasMetricsCollector() { if config.Opts.HasMetricsCollector() {
status := "success" status := "success"
@ -118,9 +123,15 @@ func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
} }
// ProcessEntryWebPage downloads the entry web page and apply rewrite rules. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
func ProcessEntryWebPage(entry *model.Entry) error { func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error {
startTime := time.Now() startTime := time.Now()
content, scraperErr := scraper.Fetch(entry.URL, entry.Feed.ScraperRules, entry.Feed.UserAgent) content, scraperErr := scraper.Fetch(
entry.URL,
entry.Feed.ScraperRules,
entry.Feed.UserAgent,
feed.AllowSelfSignedCertificates,
)
if config.Opts.HasMetricsCollector() { if config.Opts.HasMetricsCollector() {
status := "success" status := "success"
if scraperErr != nil { if scraperErr != nil {

View file

@ -20,11 +20,10 @@ import (
) )
// Fetch downloads a web page and returns relevant contents. // Fetch downloads a web page and returns relevant contents.
func Fetch(websiteURL, rules, userAgent string) (string, error) { func Fetch(websiteURL, rules, userAgent string, allowSelfSignedCertificates bool) (string, error) {
clt := client.NewClientWithConfig(websiteURL, config.Opts) clt := client.NewClientWithConfig(websiteURL, config.Opts)
if userAgent != "" {
clt.WithUserAgent(userAgent) clt.WithUserAgent(userAgent)
} clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
response, err := clt.Get() response, err := clt.Get()
if err != nil { if err != nil {

View file

@ -27,13 +27,14 @@ var (
) )
// FindSubscriptions downloads and try to find one or more subscriptions from an URL. // FindSubscriptions downloads and try to find one or more subscriptions from an URL.
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy bool) (Subscriptions, *errors.LocalizedError) { func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) {
websiteURL = findYoutubeChannelFeed(websiteURL) websiteURL = findYoutubeChannelFeed(websiteURL)
websiteURL = parseYoutubeVideoPage(websiteURL) websiteURL = parseYoutubeVideoPage(websiteURL)
clt := client.NewClientWithConfig(websiteURL, config.Opts) clt := client.NewClientWithConfig(websiteURL, config.Opts)
clt.WithCredentials(username, password) clt.WithCredentials(username, password)
clt.WithUserAgent(userAgent) clt.WithUserAgent(userAgent)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
if fetchViaProxy { if fetchViaProxy {
clt.WithProxy() clt.WithProxy()

View file

@ -201,10 +201,11 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
blocklist_rules, blocklist_rules,
keeplist_rules, keeplist_rules,
ignore_http_cache, ignore_http_cache,
allow_self_signed_certificates,
fetch_via_proxy fetch_via_proxy
) )
VALUES VALUES
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19)
RETURNING RETURNING
id id
` `
@ -227,6 +228,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
feed.BlocklistRules, feed.BlocklistRules,
feed.KeeplistRules, feed.KeeplistRules,
feed.IgnoreHTTPCache, feed.IgnoreHTTPCache,
feed.AllowSelfSignedCertificates,
feed.FetchViaProxy, feed.FetchViaProxy,
).Scan(&feed.ID) ).Scan(&feed.ID)
if err != nil { if err != nil {
@ -283,9 +285,10 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
disabled=$18, disabled=$18,
next_check_at=$19, next_check_at=$19,
ignore_http_cache=$20, ignore_http_cache=$20,
fetch_via_proxy=$21 allow_self_signed_certificates=$21,
fetch_via_proxy=$22
WHERE WHERE
id=$22 AND user_id=$23 id=$23 AND user_id=$24
` `
_, err = s.db.Exec(query, _, err = s.db.Exec(query,
feed.FeedURL, feed.FeedURL,
@ -308,6 +311,7 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
feed.Disabled, feed.Disabled,
feed.NextCheckAt, feed.NextCheckAt,
feed.IgnoreHTTPCache, feed.IgnoreHTTPCache,
feed.AllowSelfSignedCertificates,
feed.FetchViaProxy, feed.FetchViaProxy,
feed.ID, feed.ID,
feed.UserID, feed.UserID,

View file

@ -162,6 +162,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) {
f.username, f.username,
f.password, f.password,
f.ignore_http_cache, f.ignore_http_cache,
f.allow_self_signed_certificates,
f.fetch_via_proxy, f.fetch_via_proxy,
f.disabled, f.disabled,
f.category_id, f.category_id,
@ -220,6 +221,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) {
&feed.Username, &feed.Username,
&feed.Password, &feed.Password,
&feed.IgnoreHTTPCache, &feed.IgnoreHTTPCache,
&feed.AllowSelfSignedCertificates,
&feed.FetchViaProxy, &feed.FetchViaProxy,
&feed.Disabled, &feed.Disabled,
&feed.Category.ID, &feed.Category.ID,

View file

@ -30,6 +30,8 @@
<summary>{{ t "page.add_feed.legend.advanced_options" }}</summary> <summary>{{ t "page.add_feed.legend.advanced_options" }}</summary>
<div class="details-content"> <div class="details-content">
<label><input type="checkbox" name="crawler" value="1" {{ if .form.Crawler }}checked{{ end }}> {{ t "form.feed.label.crawler" }}</label> <label><input type="checkbox" name="crawler" value="1" {{ if .form.Crawler }}checked{{ end }}> {{ t "form.feed.label.crawler" }}</label>
<label><input type="checkbox" name="allow_self_signed_certificates" value="1" {{ if .form.AllowSelfSignedCertificates }}checked{{ end }}> {{ t "form.feed.label.allow_self_signed_certificates" }}</label>
{{ if .hasProxyConfigured }} {{ if .hasProxyConfigured }}
<label><input type="checkbox" name="fetch_via_proxy" value="1" {{ if .form.FetchViaProxy }}checked{{ end }}> {{ t "form.feed.label.fetch_via_proxy" }}</label> <label><input type="checkbox" name="fetch_via_proxy" value="1" {{ if .form.FetchViaProxy }}checked{{ end }}> {{ t "form.feed.label.fetch_via_proxy" }}</label>
{{ end }} {{ end }}

View file

@ -20,6 +20,9 @@
{{ if .form.Crawler }} {{ if .form.Crawler }}
<input type="hidden" name="crawler" value="1"> <input type="hidden" name="crawler" value="1">
{{ end }} {{ end }}
{{ if .form.AllowSelfSignedCertificates }}
<input type="hidden" name="allow_self_signed_certificates" value="1">
{{ end }}
<h3>{{ t "page.add_feed.choose_feed" }}</h3> <h3>{{ t "page.add_feed.choose_feed" }}</h3>

View file

@ -79,6 +79,7 @@
<label><input type="checkbox" name="crawler" value="1" {{ if .form.Crawler }}checked{{ end }}> {{ t "form.feed.label.crawler" }}</label> <label><input type="checkbox" name="crawler" value="1" {{ if .form.Crawler }}checked{{ end }}> {{ t "form.feed.label.crawler" }}</label>
<label><input type="checkbox" name="ignore_http_cache" value="1" {{ if .form.IgnoreHTTPCache }}checked{{ end }}> {{ t "form.feed.label.ignore_http_cache" }}</label> <label><input type="checkbox" name="ignore_http_cache" value="1" {{ if .form.IgnoreHTTPCache }}checked{{ end }}> {{ t "form.feed.label.ignore_http_cache" }}</label>
<label><input type="checkbox" name="allow_self_signed_certificates" value="1" {{ if .form.AllowSelfSignedCertificates }}checked{{ end }}> {{ t "form.feed.label.allow_self_signed_certificates" }}</label>
{{ if .hasProxyConfigured }} {{ if .hasProxyConfigured }}
<label><input type="checkbox" name="fetch_via_proxy" value="1" {{ if .form.FetchViaProxy }}checked{{ end }}> {{ t "form.feed.label.fetch_via_proxy" }}</label> <label><input type="checkbox" name="fetch_via_proxy" value="1" {{ if .form.FetchViaProxy }}checked{{ end }}> {{ t "form.feed.label.fetch_via_proxy" }}</label>
{{ end }} {{ end }}

View file

@ -171,6 +171,37 @@ func TestCreateFeedWithCrawlerEnabled(t *testing.T) {
} }
} }
func TestCreateFeedWithSelfSignedCertificatesAllowed(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
AllowSelfSignedCertificates: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.AllowSelfSignedCertificates {
t.Error(`The feed should have self-signed certificates enabled`)
}
}
func TestCreateFeedWithScraperRule(t *testing.T) { func TestCreateFeedWithScraperRule(t *testing.T) {
client := createClient(t) client := createClient(t)
@ -375,6 +406,31 @@ func TestUpdateFeedCrawler(t *testing.T) {
} }
} }
func TestUpdateFeedAllowSelfSignedCertificates(t *testing.T) {
client := createClient(t)
feed, _ := createFeed(t, client)
selfSigned := true
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned})
if err != nil {
t.Fatal(err)
}
if updatedFeed.AllowSelfSignedCertificates != selfSigned {
t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned)
}
selfSigned = false
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned})
if err != nil {
t.Fatal(err)
}
if updatedFeed.AllowSelfSignedCertificates != selfSigned {
t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned)
}
}
func TestUpdateFeedScraperRules(t *testing.T) { func TestUpdateFeedScraperRules(t *testing.T) {
client := createClient(t) client := createClient(t)
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)

View file

@ -12,15 +12,18 @@ import (
"miniflux.app/model" "miniflux.app/model"
"miniflux.app/proxy" "miniflux.app/proxy"
"miniflux.app/reader/processor" "miniflux.app/reader/processor"
"miniflux.app/storage"
) )
func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) { func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) {
loggedUserID := request.UserID(r)
entryID := request.RouteInt64Param(r, "entryID") entryID := request.RouteInt64Param(r, "entryID")
builder := h.store.NewEntryQueryBuilder(request.UserID(r))
builder.WithEntryID(entryID)
builder.WithoutStatus(model.EntryStatusRemoved)
entry, err := builder.GetEntry() entryBuilder := h.store.NewEntryQueryBuilder(loggedUserID)
entryBuilder.WithEntryID(entryID)
entryBuilder.WithoutStatus(model.EntryStatusRemoved)
entry, err := entryBuilder.GetEntry()
if err != nil { if err != nil {
json.ServerError(w, r, err) json.ServerError(w, r, err)
return return
@ -31,7 +34,20 @@ func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) {
return return
} }
if err := processor.ProcessEntryWebPage(entry); err != nil { feedBuilder := storage.NewFeedQueryBuilder(h.store, loggedUserID)
feedBuilder.WithFeedID(entry.FeedID)
feed, err := feedBuilder.GetFeed()
if err != nil {
json.ServerError(w, r, err)
return
}
if feed == nil {
json.NotFound(w, r)
return
}
if err := processor.ProcessEntryWebPage(feed, entry); err != nil {
json.ServerError(w, r, err) json.ServerError(w, r, err)
return return
} }

View file

@ -54,6 +54,7 @@ func (h *handler) showEditFeedPage(w http.ResponseWriter, r *http.Request) {
Username: feed.Username, Username: feed.Username,
Password: feed.Password, Password: feed.Password,
IgnoreHTTPCache: feed.IgnoreHTTPCache, IgnoreHTTPCache: feed.IgnoreHTTPCache,
AllowSelfSignedCertificates: feed.AllowSelfSignedCertificates,
FetchViaProxy: feed.FetchViaProxy, FetchViaProxy: feed.FetchViaProxy,
Disabled: feed.Disabled, Disabled: feed.Disabled,
} }

View file

@ -26,6 +26,7 @@ type FeedForm struct {
Username string Username string
Password string Password string
IgnoreHTTPCache bool IgnoreHTTPCache bool
AllowSelfSignedCertificates bool
FetchViaProxy bool FetchViaProxy bool
Disabled bool Disabled bool
} }
@ -47,6 +48,7 @@ func (f FeedForm) Merge(feed *model.Feed) *model.Feed {
feed.Username = f.Username feed.Username = f.Username
feed.Password = f.Password feed.Password = f.Password
feed.IgnoreHTTPCache = f.IgnoreHTTPCache feed.IgnoreHTTPCache = f.IgnoreHTTPCache
feed.AllowSelfSignedCertificates = f.AllowSelfSignedCertificates
feed.FetchViaProxy = f.FetchViaProxy feed.FetchViaProxy = f.FetchViaProxy
feed.Disabled = f.Disabled feed.Disabled = f.Disabled
return feed return feed
@ -72,6 +74,7 @@ func NewFeedForm(r *http.Request) *FeedForm {
Username: r.FormValue("feed_username"), Username: r.FormValue("feed_username"),
Password: r.FormValue("feed_password"), Password: r.FormValue("feed_password"),
IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1", IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1",
AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1",
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1", FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
Disabled: r.FormValue("disabled") == "1", Disabled: r.FormValue("disabled") == "1",
} }

View file

@ -18,6 +18,7 @@ type SubscriptionForm struct {
CategoryID int64 CategoryID int64
Crawler bool Crawler bool
FetchViaProxy bool FetchViaProxy bool
AllowSelfSignedCertificates bool
UserAgent string UserAgent string
Username string Username string
Password string Password string
@ -59,6 +60,7 @@ func NewSubscriptionForm(r *http.Request) *SubscriptionForm {
URL: r.FormValue("url"), URL: r.FormValue("url"),
CategoryID: int64(categoryID), CategoryID: int64(categoryID),
Crawler: r.FormValue("crawler") == "1", Crawler: r.FormValue("crawler") == "1",
AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1",
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1", FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
UserAgent: r.FormValue("user_agent"), UserAgent: r.FormValue("user_agent"),
Username: r.FormValue("feed_username"), Username: r.FormValue("feed_username"),

View file

@ -53,6 +53,7 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ
CategoryID: subscriptionForm.CategoryID, CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptionForm.URL, FeedURL: subscriptionForm.URL,
Crawler: subscriptionForm.Crawler, Crawler: subscriptionForm.Crawler,
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
UserAgent: subscriptionForm.UserAgent, UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username, Username: subscriptionForm.Username,
Password: subscriptionForm.Password, Password: subscriptionForm.Password,

View file

@ -58,6 +58,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
subscriptionForm.Username, subscriptionForm.Username,
subscriptionForm.Password, subscriptionForm.Password,
subscriptionForm.FetchViaProxy, subscriptionForm.FetchViaProxy,
subscriptionForm.AllowSelfSignedCertificates,
) )
if findErr != nil { if findErr != nil {
logger.Error("[UI:SubmitSubscription] %s", findErr) logger.Error("[UI:SubmitSubscription] %s", findErr)
@ -80,6 +81,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
CategoryID: subscriptionForm.CategoryID, CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptions[0].URL, FeedURL: subscriptions[0].URL,
Crawler: subscriptionForm.Crawler, Crawler: subscriptionForm.Crawler,
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
UserAgent: subscriptionForm.UserAgent, UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username, Username: subscriptionForm.Username,
Password: subscriptionForm.Password, Password: subscriptionForm.Password,