1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-06-27 16:36:00 +00:00

Add HTTP proxy option for subscriptions

This commit is contained in:
Kebin Liu 2020-09-10 14:28:54 +08:00 committed by GitHub
parent 0f258fd55b
commit cf7712acea
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 201 additions and 65 deletions

View file

@ -34,7 +34,7 @@ type Handler struct {
}
// CreateFeed fetch, parse and store a new feed.
func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool, userAgent, username, password, scraperRules, rewriteRules string) (*model.Feed, error) {
func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool, userAgent, username, password, scraperRules, rewriteRules string, fetchViaProxy bool) (*model.Feed, error) {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:CreateFeed] feedUrl=%s", url))
if !h.store.CategoryExists(userID, categoryID) {
@ -44,6 +44,11 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
request := client.New(url)
request.WithCredentials(username, password)
request.WithUserAgent(userAgent)
if fetchViaProxy {
request.WithProxy()
}
response, requestErr := browser.Exec(request)
if requestErr != nil {
return nil, requestErr
@ -60,7 +65,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
subscription.UserID = userID
subscription.WithCategoryID(categoryID)
subscription.WithBrowsingParameters(crawler, userAgent, username, password, scraperRules, rewriteRules)
subscription.WithBrowsingParameters(crawler, userAgent, username, password, scraperRules, rewriteRules, fetchViaProxy)
subscription.WithClientResponse(response)
subscription.CheckedNow()
@ -72,7 +77,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
logger.Debug("[Handler:CreateFeed] Feed saved with ID: %d", subscription.ID)
checkFeedIcon(h.store, subscription.ID, subscription.SiteURL)
checkFeedIcon(h.store, subscription.ID, subscription.SiteURL, fetchViaProxy)
return subscription, nil
}
@ -111,6 +116,10 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
}
if originalFeed.FetchViaProxy {
request.WithProxy()
}
response, requestErr := browser.Exec(request)
if requestErr != nil {
originalFeed.WithError(requestErr.Localize(printer))
@ -141,7 +150,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
// We update caching headers only if the feed has been modified,
// because some websites don't return the same headers when replying with a 304.
originalFeed.WithClientResponse(response)
checkFeedIcon(h.store, originalFeed.ID, originalFeed.SiteURL)
checkFeedIcon(h.store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy)
} else {
logger.Debug("[Handler:RefreshFeed] Feed #%d not modified", feedID)
}
@ -162,9 +171,9 @@ func NewFeedHandler(store *storage.Storage) *Handler {
return &Handler{store}
}
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string) {
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) {
if !store.HasIcon(feedID) {
icon, err := icon.FindIcon(websiteURL)
icon, err := icon.FindIcon(websiteURL, fetchViaProxy)
if err != nil {
logger.Debug("CheckFeedIcon: %v (feedID=%d websiteURL=%s)", err, feedID, websiteURL)
} else if icon == nil {

View file

@ -21,9 +21,12 @@ import (
)
// FindIcon try to find the website's icon.
func FindIcon(websiteURL string) (*model.Icon, error) {
func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) {
rootURL := url.RootURL(websiteURL)
clt := client.New(rootURL)
if fetchViaProxy {
clt.WithProxy()
}
response, err := clt.Get()
if err != nil {
return nil, fmt.Errorf("unable to download website index page: %v", err)
@ -43,7 +46,7 @@ func FindIcon(websiteURL string) (*model.Icon, error) {
}
logger.Debug("[FindIcon] Fetching icon => %s", iconURL)
icon, err := downloadIcon(iconURL)
icon, err := downloadIcon(iconURL, fetchViaProxy)
if err != nil {
return nil, err
}
@ -86,8 +89,11 @@ func parseDocument(websiteURL string, data io.Reader) (string, error) {
return iconURL, nil
}
func downloadIcon(iconURL string) (*model.Icon, error) {
func downloadIcon(iconURL string, fetchViaProxy bool) (*model.Icon, error) {
clt := client.New(iconURL)
if fetchViaProxy {
clt.WithProxy()
}
response, err := clt.Get()
if err != nil {
return nil, fmt.Errorf("unable to download iconURL: %v", err)

View file

@ -26,13 +26,18 @@ var (
)
// FindSubscriptions downloads and try to find one or more subscriptions from an URL.
func FindSubscriptions(websiteURL, userAgent, username, password string) (Subscriptions, *errors.LocalizedError) {
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy bool) (Subscriptions, *errors.LocalizedError) {
websiteURL = findYoutubeChannelFeed(websiteURL)
websiteURL = parseYoutubeVideoPage(websiteURL)
request := client.New(websiteURL)
request.WithCredentials(username, password)
request.WithUserAgent(userAgent)
if fetchViaProxy {
request.WithProxy()
}
response, err := browser.Exec(request)
if err != nil {
return nil, err