1
0
Fork 0
mirror of https://github.com/miniflux/v2.git synced 2025-08-06 17:41:00 +00:00

Refactor feed creation to allow setting most fields via API

Allow API clients to create disabled feeds or define field like "ignore_http_cache".
This commit is contained in:
Frédéric Guillot 2021-01-02 16:33:41 -08:00 committed by fguillot
parent ab82c4b300
commit f0610bdd9c
26 changed files with 370 additions and 264 deletions

View file

@ -7,7 +7,6 @@ package api // import "miniflux.app/api"
import (
"net/http"
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/worker"
@ -15,8 +14,8 @@ import (
)
// Serve declares API routes for the application.
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) {
handler := &handler{store, pool, feedHandler}
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
handler := &handler{store, pool}
sr := router.PathPrefix("/v1").Subrouter()
middleware := newMiddleware(store)

View file

@ -11,6 +11,7 @@ import (
"miniflux.app/http/request"
"miniflux.app/http/response/json"
feedHandler "miniflux.app/reader/handler"
)
func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
@ -42,20 +43,22 @@ func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
return
}
feed, err := h.feedHandler.CreateFeed(
userID,
feedInfo.CategoryID,
feedInfo.FeedURL,
feedInfo.Crawler,
feedInfo.UserAgent,
feedInfo.Username,
feedInfo.Password,
feedInfo.ScraperRules,
feedInfo.RewriteRules,
feedInfo.BlocklistRules,
feedInfo.KeeplistRules,
feedInfo.FetchViaProxy,
)
feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
UserID: userID,
CategoryID: feedInfo.CategoryID,
FeedURL: feedInfo.FeedURL,
UserAgent: feedInfo.UserAgent,
Username: feedInfo.Username,
Password: feedInfo.Password,
Crawler: feedInfo.Crawler,
Disabled: feedInfo.Disabled,
IgnoreHTTPCache: feedInfo.IgnoreHTTPCache,
FetchViaProxy: feedInfo.FetchViaProxy,
ScraperRules: feedInfo.ScraperRules,
RewriteRules: feedInfo.RewriteRules,
BlocklistRules: feedInfo.BlocklistRules,
KeeplistRules: feedInfo.KeeplistRules,
})
if err != nil {
json.ServerError(w, r, err)
return
@ -73,7 +76,7 @@ func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) {
return
}
err := h.feedHandler.RefreshFeed(userID, feedID)
err := feedHandler.RefreshFeed(h.store, userID, feedID)
if err != nil {
json.ServerError(w, r, err)
return

View file

@ -5,13 +5,11 @@
package api // import "miniflux.app/api"
import (
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/worker"
)
type handler struct {
store *storage.Storage
pool *worker.Pool
feedHandler *feed.Handler
store *storage.Storage
pool *worker.Pool
}

View file

@ -48,17 +48,19 @@ type feedCreationResponse struct {
}
type feedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}
func decodeFeedCreationRequest(r io.ReadCloser) (*feedCreationRequest, error) {