mirror of
https://github.com/miniflux/v2.git
synced 2025-09-15 18:57:04 +00:00
Add the possibility to override default user agent for each feed
This commit is contained in:
parent
1d335390c2
commit
2538eea177
29 changed files with 129 additions and 22 deletions
|
@ -37,7 +37,7 @@ type Handler struct {
|
|||
}
|
||||
|
||||
// CreateFeed fetch, parse and store a new feed.
|
||||
func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool, username, password string) (*model.Feed, error) {
|
||||
func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool, userAgent, username, password string) (*model.Feed, error) {
|
||||
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:CreateFeed] feedUrl=%s", url))
|
||||
|
||||
if !h.store.CategoryExists(userID, categoryID) {
|
||||
|
@ -46,6 +46,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
|
|||
|
||||
clt := client.New(url)
|
||||
clt.WithCredentials(username, password)
|
||||
clt.WithUserAgent(userAgent)
|
||||
response, err := clt.Get()
|
||||
if err != nil {
|
||||
if _, ok := err.(*errors.LocalizedError); ok {
|
||||
|
@ -87,6 +88,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
|
|||
subscription.FeedURL = response.EffectiveURL
|
||||
subscription.UserID = userID
|
||||
subscription.Crawler = crawler
|
||||
subscription.UserAgent = userAgent
|
||||
subscription.Username = username
|
||||
subscription.Password = password
|
||||
|
||||
|
@ -136,6 +138,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
|
|||
clt := client.New(originalFeed.FeedURL)
|
||||
clt.WithCredentials(originalFeed.Username, originalFeed.Password)
|
||||
clt.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
|
||||
clt.WithUserAgent(originalFeed.UserAgent)
|
||||
response, err := clt.Get()
|
||||
if err != nil {
|
||||
var customErr errors.LocalizedError
|
||||
|
@ -196,6 +199,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
|
|||
|
||||
feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription)
|
||||
feedProcessor.WithScraperRules(originalFeed.ScraperRules)
|
||||
feedProcessor.WithUserAgent(originalFeed.UserAgent)
|
||||
feedProcessor.WithRewriteRules(originalFeed.RewriteRules)
|
||||
feedProcessor.WithCrawler(originalFeed.Crawler)
|
||||
feedProcessor.Process()
|
||||
|
|
|
@ -21,6 +21,7 @@ type FeedProcessor struct {
|
|||
scraperRules string
|
||||
rewriteRules string
|
||||
crawler bool
|
||||
userAgent string
|
||||
}
|
||||
|
||||
// WithCrawler enables the crawler.
|
||||
|
@ -33,6 +34,11 @@ func (f *FeedProcessor) WithScraperRules(rules string) {
|
|||
f.scraperRules = rules
|
||||
}
|
||||
|
||||
// WithUserAgent sets the User-Agent header for fetching article content.
|
||||
func (f *FeedProcessor) WithUserAgent(userAgent string) {
|
||||
f.userAgent = userAgent
|
||||
}
|
||||
|
||||
// WithRewriteRules adds rewrite rules to the processing.
|
||||
func (f *FeedProcessor) WithRewriteRules(rules string) {
|
||||
f.rewriteRules = rules
|
||||
|
@ -45,7 +51,7 @@ func (f *FeedProcessor) Process() {
|
|||
if f.store.EntryURLExists(f.userID, entry.URL) {
|
||||
logger.Debug(`[FeedProcessor] Do not crawl existing entry URL: "%s"`, entry.URL)
|
||||
} else {
|
||||
content, err := scraper.Fetch(entry.URL, f.scraperRules)
|
||||
content, err := scraper.Fetch(entry.URL, f.scraperRules, f.userAgent)
|
||||
if err != nil {
|
||||
logger.Error("[FeedProcessor] %v", err)
|
||||
} else {
|
||||
|
|
|
@ -19,8 +19,12 @@ import (
|
|||
)
|
||||
|
||||
// Fetch downloads a web page a returns relevant contents.
|
||||
func Fetch(websiteURL, rules string) (string, error) {
|
||||
func Fetch(websiteURL, rules, userAgent string) (string, error) {
|
||||
clt := client.New(websiteURL)
|
||||
if userAgent != "" {
|
||||
clt.WithUserAgent(userAgent)
|
||||
}
|
||||
|
||||
response, err := clt.Get()
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
|
|
@ -29,11 +29,12 @@ var (
|
|||
)
|
||||
|
||||
// FindSubscriptions downloads and try to find one or more subscriptions from an URL.
|
||||
func FindSubscriptions(websiteURL, username, password string) (Subscriptions, error) {
|
||||
func FindSubscriptions(websiteURL, userAgent, username, password string) (Subscriptions, error) {
|
||||
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[FindSubscriptions] url=%s", websiteURL))
|
||||
|
||||
clt := client.New(websiteURL)
|
||||
clt.WithCredentials(username, password)
|
||||
clt.WithUserAgent(userAgent)
|
||||
response, err := clt.Get()
|
||||
if err != nil {
|
||||
if _, ok := err.(errors.LocalizedError); ok {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue