Refactor feed creation to allow setting most fields via API

Allow API clients to create disabled feeds or define field like "ignore_http_cache".
This commit is contained in:
Frédéric Guillot 2021-01-02 16:33:41 -08:00 committed by fguillot
parent ab82c4b300
commit f0610bdd9c
26 changed files with 370 additions and 264 deletions

View File

@ -7,7 +7,6 @@ package api // import "miniflux.app/api"
import (
"net/http"
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/worker"
@ -15,8 +14,8 @@ import (
)
// Serve declares API routes for the application.
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) {
handler := &handler{store, pool, feedHandler}
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
handler := &handler{store, pool}
sr := router.PathPrefix("/v1").Subrouter()
middleware := newMiddleware(store)

View File

@ -11,6 +11,7 @@ import (
"miniflux.app/http/request"
"miniflux.app/http/response/json"
feedHandler "miniflux.app/reader/handler"
)
func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
@ -42,20 +43,22 @@ func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
return
}
feed, err := h.feedHandler.CreateFeed(
userID,
feedInfo.CategoryID,
feedInfo.FeedURL,
feedInfo.Crawler,
feedInfo.UserAgent,
feedInfo.Username,
feedInfo.Password,
feedInfo.ScraperRules,
feedInfo.RewriteRules,
feedInfo.BlocklistRules,
feedInfo.KeeplistRules,
feedInfo.FetchViaProxy,
)
feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
UserID: userID,
CategoryID: feedInfo.CategoryID,
FeedURL: feedInfo.FeedURL,
UserAgent: feedInfo.UserAgent,
Username: feedInfo.Username,
Password: feedInfo.Password,
Crawler: feedInfo.Crawler,
Disabled: feedInfo.Disabled,
IgnoreHTTPCache: feedInfo.IgnoreHTTPCache,
FetchViaProxy: feedInfo.FetchViaProxy,
ScraperRules: feedInfo.ScraperRules,
RewriteRules: feedInfo.RewriteRules,
BlocklistRules: feedInfo.BlocklistRules,
KeeplistRules: feedInfo.KeeplistRules,
})
if err != nil {
json.ServerError(w, r, err)
return
@ -73,7 +76,7 @@ func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) {
return
}
err := h.feedHandler.RefreshFeed(userID, feedID)
err := feedHandler.RefreshFeed(h.store, userID, feedID)
if err != nil {
json.ServerError(w, r, err)
return

View File

@ -5,13 +5,11 @@
package api // import "miniflux.app/api"
import (
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/worker"
)
type handler struct {
store *storage.Storage
pool *worker.Pool
feedHandler *feed.Handler
store *storage.Storage
pool *worker.Pool
}

View File

@ -48,17 +48,19 @@ type feedCreationResponse struct {
}
type feedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}
func decodeFeedCreationRequest(r io.ReadCloser) (*feedCreationRequest, error) {

View File

@ -139,7 +139,9 @@ func Parse() {
// Run migrations and start the deamon.
if config.Opts.RunMigrations() {
database.Migrate(db)
if err := database.Migrate(db); err != nil {
logger.Fatal(`%v`, err)
}
}
if err := database.IsSchemaUpToDate(db); err != nil {

View File

@ -15,7 +15,6 @@ import (
"miniflux.app/config"
"miniflux.app/logger"
"miniflux.app/metric"
"miniflux.app/reader/feed"
"miniflux.app/service/httpd"
"miniflux.app/service/scheduler"
"miniflux.app/storage"
@ -29,8 +28,7 @@ func startDaemon(store *storage.Storage) {
signal.Notify(stop, os.Interrupt)
signal.Notify(stop, syscall.SIGTERM)
feedHandler := feed.NewFeedHandler(store)
pool := worker.NewPool(feedHandler, config.Opts.WorkerPoolSize())
pool := worker.NewPool(store, config.Opts.WorkerPoolSize())
if config.Opts.HasSchedulerService() && !config.Opts.HasMaintenanceMode() {
scheduler.Serve(store, pool)
@ -38,7 +36,7 @@ func startDaemon(store *storage.Storage) {
var httpServer *http.Server
if config.Opts.HasHTTPService() {
httpServer = httpd.Serve(store, pool, feedHandler)
httpServer = httpd.Serve(store, pool)
}
if config.Opts.HasMetricsCollector() {

View File

@ -116,7 +116,7 @@ func (c *Client) CreateUser(username, password string, isAdmin bool) (*User, err
}
// UpdateUser updates a user in the system.
func (c *Client) UpdateUser(userID int64, userChanges *UserModification) (*User, error) {
func (c *Client) UpdateUser(userID int64, userChanges *UserModificationRequest) (*User, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/users/%d", userID), userChanges)
if err != nil {
return nil, err
@ -285,11 +285,8 @@ func (c *Client) Feed(feedID int64) (*Feed, error) {
}
// CreateFeed creates a new feed.
func (c *Client) CreateFeed(url string, categoryID int64) (int64, error) {
body, err := c.request.Post("/v1/feeds", map[string]interface{}{
"feed_url": url,
"category_id": categoryID,
})
func (c *Client) CreateFeed(feedCreationRequest *FeedCreationRequest) (int64, error) {
body, err := c.request.Post("/v1/feeds", feedCreationRequest)
if err != nil {
return 0, err
}
@ -309,7 +306,7 @@ func (c *Client) CreateFeed(url string, categoryID int64) (int64, error) {
}
// UpdateFeed updates a feed.
func (c *Client) UpdateFeed(feedID int64, feedChanges *FeedModification) (*Feed, error) {
func (c *Client) UpdateFeed(feedID int64, feedChanges *FeedModificationRequest) (*Feed, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/feeds/%d", feedID), feedChanges)
if err != nil {
return nil, err

View File

@ -49,8 +49,8 @@ type UserCreationRequest struct {
OpenIDConnectID string `json:"openid_connect_id"`
}
// UserModification represents the request to update a user.
type UserModification struct {
// UserModificationRequest represents the request to update a user.
type UserModificationRequest struct {
Username *string `json:"username"`
Password *string `json:"password"`
IsAdmin *bool `json:"is_admin"`
@ -110,6 +110,9 @@ type Feed struct {
LastModifiedHeader string `json:"last_modified_header,omitempty"`
ParsingErrorMsg string `json:"parsing_error_message,omitempty"`
ParsingErrorCount int `json:"parsing_error_count,omitempty"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
@ -121,8 +124,25 @@ type Feed struct {
Category *Category `json:"category,omitempty"`
}
// FeedModification represents changes for a feed.
type FeedModification struct {
// FeedCreationRequest represents the request to create a feed.
type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}
// FeedModificationRequest represents the request to update a feed.
type FeedModificationRequest struct {
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`

View File

@ -10,8 +10,6 @@ import (
// Postgresql driver import
_ "github.com/lib/pq"
"miniflux.app/logger"
)
// NewConnectionPool configures the database connection pool.
@ -28,7 +26,7 @@ func NewConnectionPool(dsn string, minConnections, maxConnections int) (*sql.DB,
}
// Migrate executes database migrations.
func Migrate(db *sql.DB) {
func Migrate(db *sql.DB) error {
var currentVersion int
db.QueryRow(`SELECT version FROM schema_version`).Scan(&currentVersion)
@ -41,28 +39,30 @@ func Migrate(db *sql.DB) {
tx, err := db.Begin()
if err != nil {
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if err := migrations[version](tx); err != nil {
tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if _, err := tx.Exec(`DELETE FROM schema_version`); err != nil {
tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if _, err := tx.Exec(`INSERT INTO schema_version (version) VALUES ($1)`, newVersion); err != nil {
tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
if err := tx.Commit(); err != nil {
logger.Fatal("[Migration v%d] %v", newVersion, err)
return fmt.Errorf("[Migration v%d] %v", newVersion, err)
}
}
return nil
}
// IsSchemaUpToDate checks if the database schema is up to date.

View File

@ -73,19 +73,6 @@ func (f *Feed) WithCategoryID(categoryID int64) {
f.Category = &Category{ID: categoryID}
}
// WithBrowsingParameters defines browsing parameters.
func (f *Feed) WithBrowsingParameters(crawler bool, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules string, fetchViaProxy bool) {
f.Crawler = crawler
f.UserAgent = userAgent
f.Username = username
f.Password = password
f.ScraperRules = scraperRules
f.RewriteRules = rewriteRules
f.FetchViaProxy = fetchViaProxy
f.BlocklistRules = blocklistRules
f.KeeplistRules = keeplistRules
}
// WithError adds a new error message and increment the error counter.
func (f *Feed) WithError(message string) {
f.ParsingErrorCount++

View File

@ -46,57 +46,6 @@ func TestFeedCategorySetter(t *testing.T) {
}
}
func TestFeedBrowsingParams(t *testing.T) {
feed := &Feed{}
feed.WithBrowsingParameters(
true,
"Custom User Agent",
"Username",
"Secret",
"Scraper Rule",
"Rewrite Rule",
"Block Rule",
"Allow Rule",
true,
)
if !feed.Crawler {
t.Error(`The crawler must be activated`)
}
if feed.UserAgent != "Custom User Agent" {
t.Error(`The user agent must be set`)
}
if feed.Username != "Username" {
t.Error(`The username must be set`)
}
if feed.Password != "Secret" {
t.Error(`The password must be set`)
}
if feed.ScraperRules != "Scraper Rule" {
t.Errorf(`The scraper rules must be set`)
}
if feed.RewriteRules != "Rewrite Rule" {
t.Errorf(`The rewrite rules must be set`)
}
if feed.BlocklistRules != "Block Rule" {
t.Errorf(`The block list rules must be set`)
}
if feed.KeeplistRules != "Allow Rule" {
t.Errorf(`The keep list rules must be set`)
}
if !feed.FetchViaProxy {
t.Errorf(`The fetch via proxy is no set`)
}
}
func TestFeedErrorCounter(t *testing.T) {
feed := &Feed{}
feed.WithError("Some Error")

View File

@ -1,10 +0,0 @@
// Copyright 2018 Frédéric Guillot. All rights reserved.
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
/*
Package feed handles feed updates and creation.
*/
package feed // import "miniflux.app/reader/feed"

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
package feed // import "miniflux.app/reader/feed"
package handler // import "miniflux.app/reader/handler"
import (
"fmt"
@ -28,24 +28,37 @@ var (
errCategoryNotFound = "Category not found for this user"
)
// Handler contains all the logic to create and refresh feeds.
type Handler struct {
store *storage.Storage
// FeedCreationArgs represents the arguments required to create a new feed.
type FeedCreationArgs struct {
UserID int64
CategoryID int64
FeedURL string
UserAgent string
Username string
Password string
Crawler bool
Disabled bool
IgnoreHTTPCache bool
FetchViaProxy bool
ScraperRules string
RewriteRules string
BlocklistRules string
KeeplistRules string
}
// CreateFeed fetch, parse and store a new feed.
func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules string, fetchViaProxy bool) (*model.Feed, error) {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:CreateFeed] feedUrl=%s", url))
func CreateFeed(store *storage.Storage, args *FeedCreationArgs) (*model.Feed, error) {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[CreateFeed] FeedURL=%s", args.FeedURL))
if !h.store.CategoryExists(userID, categoryID) {
if !store.CategoryExists(args.UserID, args.CategoryID) {
return nil, errors.NewLocalizedError(errCategoryNotFound)
}
request := client.NewClientWithConfig(url, config.Opts)
request.WithCredentials(username, password)
request.WithUserAgent(userAgent)
request := client.NewClientWithConfig(args.FeedURL, config.Opts)
request.WithCredentials(args.Username, args.Password)
request.WithUserAgent(args.UserAgent)
if fetchViaProxy {
if args.FetchViaProxy {
request.WithProxy()
}
@ -54,7 +67,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
return nil, requestErr
}
if h.store.FeedURLExists(userID, response.EffectiveURL) {
if store.FeedURLExists(args.UserID, response.EffectiveURL) {
return nil, errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
}
@ -63,31 +76,41 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
return nil, parseErr
}
subscription.UserID = userID
subscription.WithCategoryID(categoryID)
subscription.WithBrowsingParameters(crawler, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules, fetchViaProxy)
subscription.UserID = args.UserID
subscription.UserAgent = args.UserAgent
subscription.Username = args.Username
subscription.Password = args.Password
subscription.Crawler = args.Crawler
subscription.Disabled = args.Disabled
subscription.IgnoreHTTPCache = args.IgnoreHTTPCache
subscription.FetchViaProxy = args.FetchViaProxy
subscription.ScraperRules = args.ScraperRules
subscription.RewriteRules = args.RewriteRules
subscription.BlocklistRules = args.BlocklistRules
subscription.KeeplistRules = args.KeeplistRules
subscription.WithCategoryID(args.CategoryID)
subscription.WithClientResponse(response)
subscription.CheckedNow()
processor.ProcessFeedEntries(h.store, subscription)
processor.ProcessFeedEntries(store, subscription)
if storeErr := h.store.CreateFeed(subscription); storeErr != nil {
if storeErr := store.CreateFeed(subscription); storeErr != nil {
return nil, storeErr
}
logger.Debug("[Handler:CreateFeed] Feed saved with ID: %d", subscription.ID)
logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID)
checkFeedIcon(h.store, subscription.ID, subscription.SiteURL, fetchViaProxy)
checkFeedIcon(store, subscription.ID, subscription.SiteURL, args.FetchViaProxy)
return subscription, nil
}
// RefreshFeed refreshes a feed.
func (h *Handler) RefreshFeed(userID, feedID int64) error {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:RefreshFeed] feedID=%d", feedID))
userLanguage := h.store.UserLanguage(userID)
func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[RefreshFeed] feedID=%d", feedID))
userLanguage := store.UserLanguage(userID)
printer := locale.NewPrinter(userLanguage)
originalFeed, storeErr := h.store.FeedByID(userID, feedID)
originalFeed, storeErr := store.FeedByID(userID, feedID)
if storeErr != nil {
return storeErr
}
@ -99,7 +122,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
weeklyEntryCount := 0
if config.Opts.PollingScheduler() == model.SchedulerEntryFrequency {
var weeklyCountErr error
weeklyEntryCount, weeklyCountErr = h.store.WeeklyFeedEntryCount(userID, feedID)
weeklyEntryCount, weeklyCountErr = store.WeeklyFeedEntryCount(userID, feedID)
if weeklyCountErr != nil {
return weeklyCountErr
}
@ -123,71 +146,66 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
response, requestErr := browser.Exec(request)
if requestErr != nil {
originalFeed.WithError(requestErr.Localize(printer))
h.store.UpdateFeedError(originalFeed)
store.UpdateFeedError(originalFeed)
return requestErr
}
if h.store.AnotherFeedURLExists(userID, originalFeed.ID, response.EffectiveURL) {
if store.AnotherFeedURLExists(userID, originalFeed.ID, response.EffectiveURL) {
storeErr := errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
originalFeed.WithError(storeErr.Error())
h.store.UpdateFeedError(originalFeed)
store.UpdateFeedError(originalFeed)
return storeErr
}
if originalFeed.IgnoreHTTPCache || response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
logger.Debug("[Handler:RefreshFeed] Feed #%d has been modified", feedID)
logger.Debug("[RefreshFeed] Feed #%d has been modified", feedID)
updatedFeed, parseErr := parser.ParseFeed(response.EffectiveURL, response.BodyAsString())
if parseErr != nil {
originalFeed.WithError(parseErr.Localize(printer))
h.store.UpdateFeedError(originalFeed)
store.UpdateFeedError(originalFeed)
return parseErr
}
originalFeed.Entries = updatedFeed.Entries
processor.ProcessFeedEntries(h.store, originalFeed)
processor.ProcessFeedEntries(store, originalFeed)
// We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
if storeErr := h.store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil {
if storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil {
originalFeed.WithError(storeErr.Error())
h.store.UpdateFeedError(originalFeed)
store.UpdateFeedError(originalFeed)
return storeErr
}
// We update caching headers only if the feed has been modified,
// because some websites don't return the same headers when replying with a 304.
originalFeed.WithClientResponse(response)
checkFeedIcon(h.store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy)
checkFeedIcon(store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy)
} else {
logger.Debug("[Handler:RefreshFeed] Feed #%d not modified", feedID)
logger.Debug("[RefreshFeed] Feed #%d not modified", feedID)
}
originalFeed.ResetErrorCounter()
if storeErr := h.store.UpdateFeed(originalFeed); storeErr != nil {
if storeErr := store.UpdateFeed(originalFeed); storeErr != nil {
originalFeed.WithError(storeErr.Error())
h.store.UpdateFeedError(originalFeed)
store.UpdateFeedError(originalFeed)
return storeErr
}
return nil
}
// NewFeedHandler returns a feed handler.
func NewFeedHandler(store *storage.Storage) *Handler {
return &Handler{store}
}
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) {
if !store.HasIcon(feedID) {
icon, err := icon.FindIcon(websiteURL, fetchViaProxy)
if err != nil {
logger.Debug("CheckFeedIcon: %v (feedID=%d websiteURL=%s)", err, feedID, websiteURL)
logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
} else if icon == nil {
logger.Debug("CheckFeedIcon: No icon found (feedID=%d websiteURL=%s)", feedID, websiteURL)
logger.Debug(`[CheckFeedIcon] No icon found (feedID=%d websiteURL=%s)`, feedID, websiteURL)
} else {
if err := store.CreateFeedIcon(feedID, icon); err != nil {
logger.Debug("CheckFeedIcon: %v (feedID=%d websiteURL=%s)", err, feedID, websiteURL)
logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
}
}
}

View File

@ -18,7 +18,6 @@ import (
"miniflux.app/fever"
"miniflux.app/http/request"
"miniflux.app/logger"
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/ui"
"miniflux.app/version"
@ -30,7 +29,7 @@ import (
)
// Serve starts a new HTTP server.
func Serve(store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) *http.Server {
func Serve(store *storage.Storage, pool *worker.Pool) *http.Server {
certFile := config.Opts.CertFile()
keyFile := config.Opts.CertKeyFile()
certDomain := config.Opts.CertDomain()
@ -40,7 +39,7 @@ func Serve(store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler)
ReadTimeout: 300 * time.Second,
WriteTimeout: 300 * time.Second,
IdleTimeout: 300 * time.Second,
Handler: setupHandler(store, feedHandler, pool),
Handler: setupHandler(store, pool),
}
switch {
@ -164,7 +163,7 @@ func startHTTPServer(server *http.Server) {
}()
}
func setupHandler(store *storage.Storage, feedHandler *feed.Handler, pool *worker.Pool) *mux.Router {
func setupHandler(store *storage.Storage, pool *worker.Pool) *mux.Router {
router := mux.NewRouter()
if config.Opts.BasePath() != "" {
@ -182,8 +181,8 @@ func setupHandler(store *storage.Storage, feedHandler *feed.Handler, pool *worke
router.Use(middleware)
fever.Serve(router, store)
api.Serve(router, store, pool, feedHandler)
ui.Serve(router, store, pool, feedHandler)
api.Serve(router, store, pool)
ui.Serve(router, store, pool)
router.HandleFunc("/healthcheck", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("OK"))

View File

@ -465,10 +465,11 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
rewrite_rules,
blocklist_rules,
keeplist_rules,
ignore_http_cache,
fetch_via_proxy
)
VALUES
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17)
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18)
RETURNING
id
`
@ -490,6 +491,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
feed.RewriteRules,
feed.BlocklistRules,
feed.KeeplistRules,
feed.IgnoreHTTPCache,
feed.FetchViaProxy,
).Scan(&feed.ID)
if err != nil {

View File

@ -100,7 +100,10 @@ func TestFilterEntriesByCategory(t *testing.T) {
t.Fatal(err)
}
feedID, err := client.CreateFeed(testFeedURL, category.ID)
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: category.ID,
})
if err != nil {
t.Fatal(err)
}
@ -134,7 +137,10 @@ func TestFilterEntriesByStatuses(t *testing.T) {
t.Fatal(err)
}
feedID, err := client.CreateFeed(testFeedURL, category.ID)
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: category.ID,
})
if err != nil {
t.Fatal(err)
}
@ -181,7 +187,10 @@ func TestSearchEntries(t *testing.T) {
t.Fatal(err)
}
feedID, err := client.CreateFeed(testFeedURL, categories[0].ID)
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
})
if err != nil {
t.Fatal(err)
}

View File

@ -26,7 +26,10 @@ func TestCannotCreateDuplicatedFeed(t *testing.T) {
client := createClient(t)
feed, category := createFeed(t, client)
_, err := client.CreateFeed(feed.FeedURL, category.ID)
_, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: feed.FeedURL,
CategoryID: category.ID,
})
if err == nil {
t.Fatal(`Duplicated feeds should not be allowed`)
}
@ -34,19 +37,145 @@ func TestCannotCreateDuplicatedFeed(t *testing.T) {
func TestCreateFeedWithInexistingCategory(t *testing.T) {
client := createClient(t)
_, err := client.CreateFeed(testFeedURL, -1)
_, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: -1,
})
if err == nil {
t.Fatal(`Feeds should not be created with inexisting category`)
}
}
func TestCreateDisabledFeed(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
Disabled: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.Disabled {
t.Error(`The feed should be disabled`)
}
}
func TestCreateFeedWithDisabledCache(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
IgnoreHTTPCache: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.IgnoreHTTPCache {
t.Error(`The feed should be ignoring HTTP cache`)
}
}
func TestCreateFeedWithCrawlerEnabled(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
Crawler: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.Crawler {
t.Error(`The feed should have the scraper enabled`)
}
}
func TestCreateFeedWithScraperRule(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
ScraperRules: "article",
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if feed.ScraperRules != "article" {
t.Error(`The feed should have the custom scraper rule saved`)
}
}
func TestUpdateFeedURL(t *testing.T) {
client := createClient(t)
feed, _ := createFeed(t, client)
url := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{FeedURL: &url})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{FeedURL: &url})
if err != nil {
t.Fatal(err)
}
@ -56,7 +185,7 @@ func TestUpdateFeedURL(t *testing.T) {
}
url = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{FeedURL: &url})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{FeedURL: &url})
if err != nil {
t.Fatal(err)
}
@ -71,7 +200,7 @@ func TestUpdateFeedSiteURL(t *testing.T) {
feed, _ := createFeed(t, client)
url := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{SiteURL: &url})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{SiteURL: &url})
if err != nil {
t.Fatal(err)
}
@ -81,7 +210,7 @@ func TestUpdateFeedSiteURL(t *testing.T) {
}
url = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{SiteURL: &url})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{SiteURL: &url})
if err != nil {
t.Fatal(err)
}
@ -96,7 +225,7 @@ func TestUpdateFeedTitle(t *testing.T) {
feed, _ := createFeed(t, client)
newTitle := "My new feed"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Title: &newTitle})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Title: &newTitle})
if err != nil {
t.Fatal(err)
}
@ -106,7 +235,7 @@ func TestUpdateFeedTitle(t *testing.T) {
}
newTitle = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Title: &newTitle})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Title: &newTitle})
if err != nil {
t.Fatal(err)
}
@ -121,7 +250,7 @@ func TestUpdateFeedCrawler(t *testing.T) {
feed, _ := createFeed(t, client)
crawler := true
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Crawler: &crawler})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Crawler: &crawler})
if err != nil {
t.Fatal(err)
}
@ -135,7 +264,7 @@ func TestUpdateFeedCrawler(t *testing.T) {
}
crawler = false
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Crawler: &crawler})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Crawler: &crawler})
if err != nil {
t.Fatal(err)
}
@ -150,7 +279,7 @@ func TestUpdateFeedScraperRules(t *testing.T) {
feed, _ := createFeed(t, client)
scraperRules := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{ScraperRules: &scraperRules})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{ScraperRules: &scraperRules})
if err != nil {
t.Fatal(err)
}
@ -160,7 +289,7 @@ func TestUpdateFeedScraperRules(t *testing.T) {
}
scraperRules = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{ScraperRules: &scraperRules})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{ScraperRules: &scraperRules})
if err != nil {
t.Fatal(err)
}
@ -175,7 +304,7 @@ func TestUpdateFeedRewriteRules(t *testing.T) {
feed, _ := createFeed(t, client)
rewriteRules := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{RewriteRules: &rewriteRules})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{RewriteRules: &rewriteRules})
if err != nil {
t.Fatal(err)
}
@ -185,7 +314,7 @@ func TestUpdateFeedRewriteRules(t *testing.T) {
}
rewriteRules = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{RewriteRules: &rewriteRules})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{RewriteRules: &rewriteRules})
if err != nil {
t.Fatal(err)
}
@ -200,7 +329,7 @@ func TestUpdateFeedKeeplistRules(t *testing.T) {
feed, _ := createFeed(t, client)
keeplistRules := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{KeeplistRules: &keeplistRules})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{KeeplistRules: &keeplistRules})
if err != nil {
t.Fatal(err)
}
@ -210,7 +339,7 @@ func TestUpdateFeedKeeplistRules(t *testing.T) {
}
keeplistRules = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{KeeplistRules: &keeplistRules})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{KeeplistRules: &keeplistRules})
if err != nil {
t.Fatal(err)
}
@ -225,7 +354,7 @@ func TestUpdateFeedUserAgent(t *testing.T) {
feed, _ := createFeed(t, client)
userAgent := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{UserAgent: &userAgent})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{UserAgent: &userAgent})
if err != nil {
t.Fatal(err)
}
@ -235,7 +364,7 @@ func TestUpdateFeedUserAgent(t *testing.T) {
}
userAgent = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{UserAgent: &userAgent})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{UserAgent: &userAgent})
if err != nil {
t.Fatal(err)
}
@ -250,7 +379,7 @@ func TestUpdateFeedUsername(t *testing.T) {
feed, _ := createFeed(t, client)
username := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Username: &username})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Username: &username})
if err != nil {
t.Fatal(err)
}
@ -260,7 +389,7 @@ func TestUpdateFeedUsername(t *testing.T) {
}
username = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Username: &username})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Username: &username})
if err != nil {
t.Fatal(err)
}
@ -275,7 +404,7 @@ func TestUpdateFeedPassword(t *testing.T) {
feed, _ := createFeed(t, client)
password := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Password: &password})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Password: &password})
if err != nil {
t.Fatal(err)
}
@ -285,7 +414,7 @@ func TestUpdateFeedPassword(t *testing.T) {
}
password = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Password: &password})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Password: &password})
if err != nil {
t.Fatal(err)
}
@ -304,7 +433,7 @@ func TestUpdateFeedCategory(t *testing.T) {
t.Fatal(err)
}
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{CategoryID: &newCategory.ID})
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{CategoryID: &newCategory.ID})
if err != nil {
t.Fatal(err)
}
@ -314,7 +443,7 @@ func TestUpdateFeedCategory(t *testing.T) {
}
categoryID := int64(0)
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{CategoryID: &categoryID})
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{CategoryID: &categoryID})
if err != nil {
t.Fatal(err)
}

View File

@ -51,7 +51,10 @@ func createFeed(t *testing.T, client *miniflux.Client) (*miniflux.Feed, *miniflu
t.Fatal(err)
}
feedID, err := client.CreateFeed(testFeedURL, categories[0].ID)
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
})
if err != nil {
t.Fatal(err)
}

View File

@ -261,7 +261,7 @@ func TestUpdateUserTheme(t *testing.T) {
}
theme := "dark_serif"
user, err = client.UpdateUser(user.ID, &miniflux.UserModification{Theme: &theme})
user, err = client.UpdateUser(user.ID, &miniflux.UserModificationRequest{Theme: &theme})
if err != nil {
t.Fatal(err)
}
@ -282,7 +282,7 @@ func TestUpdateUserFields(t *testing.T) {
stylesheet := "body { color: red }"
swipe := false
entriesPerPage := 5
user, err = client.UpdateUser(user.ID, &miniflux.UserModification{
user, err = client.UpdateUser(user.ID, &miniflux.UserModificationRequest{
Stylesheet: &stylesheet,
EntrySwipe: &swipe,
EntriesPerPage: &entriesPerPage,
@ -313,7 +313,7 @@ func TestUpdateUserThemeWithInvalidValue(t *testing.T) {
}
theme := "something that doesn't exists"
_, err = client.UpdateUser(user.ID, &miniflux.UserModification{Theme: &theme})
_, err = client.UpdateUser(user.ID, &miniflux.UserModificationRequest{Theme: &theme})
if err == nil {
t.Fatal(`Updating a user Theme with an invalid value should raise an error`)
}
@ -388,7 +388,7 @@ func TestCannotUpdateUserAsNonAdmin(t *testing.T) {
entriesPerPage := 10
userAClient := miniflux.New(testBaseURL, usernameA, testStandardPassword)
userAAfterUpdate, err := userAClient.UpdateUser(userA.ID, &miniflux.UserModification{EntriesPerPage: &entriesPerPage})
userAAfterUpdate, err := userAClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{EntriesPerPage: &entriesPerPage})
if err != nil {
t.Fatal(`Standard users should be able to update themselves`)
}
@ -398,13 +398,13 @@ func TestCannotUpdateUserAsNonAdmin(t *testing.T) {
}
isAdmin := true
_, err = userAClient.UpdateUser(userA.ID, &miniflux.UserModification{IsAdmin: &isAdmin})
_, err = userAClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{IsAdmin: &isAdmin})
if err == nil {
t.Fatal(`Standard users should not be able to become admin`)
}
userBClient := miniflux.New(testBaseURL, usernameB, testStandardPassword)
_, err = userBClient.UpdateUser(userA.ID, &miniflux.UserModification{})
_, err = userBClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{})
if err == nil {
t.Fatal(`Standard users should not be able to update other users`)
}
@ -414,7 +414,7 @@ func TestCannotUpdateUserAsNonAdmin(t *testing.T) {
}
stylesheet := "test"
userC, err := adminClient.UpdateUser(userA.ID, &miniflux.UserModification{Stylesheet: &stylesheet})
userC, err := adminClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{Stylesheet: &stylesheet})
if err != nil {
t.Fatal(`Admin users should be able to update any users`)
}

View File

@ -11,11 +11,12 @@ import (
"miniflux.app/http/response/html"
"miniflux.app/http/route"
"miniflux.app/logger"
feedHandler "miniflux.app/reader/handler"
)
func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID")
if err := h.feedHandler.RefreshFeed(request.UserID(r), feedID); err != nil {
if err := feedHandler.RefreshFeed(h.store, request.UserID(r), feedID); err != nil {
logger.Error("[UI:RefreshFeed] %v", err)
}

View File

@ -5,7 +5,6 @@
package ui // import "miniflux.app/ui"
import (
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/template"
"miniflux.app/worker"
@ -14,9 +13,8 @@ import (
)
type handler struct {
router *mux.Router
store *storage.Storage
tpl *template.Engine
pool *worker.Pool
feedHandler *feed.Handler
router *mux.Router
store *storage.Storage
tpl *template.Engine
pool *worker.Pool
}

View File

@ -11,6 +11,7 @@ import (
"miniflux.app/http/request"
"miniflux.app/http/response/html"
"miniflux.app/http/route"
feedHandler "miniflux.app/reader/handler"
"miniflux.app/ui/form"
"miniflux.app/ui/session"
"miniflux.app/ui/view"
@ -47,20 +48,20 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ
return
}
feed, err := h.feedHandler.CreateFeed(
user.ID,
subscriptionForm.CategoryID,
subscriptionForm.URL,
subscriptionForm.Crawler,
subscriptionForm.UserAgent,
subscriptionForm.Username,
subscriptionForm.Password,
subscriptionForm.ScraperRules,
subscriptionForm.RewriteRules,
subscriptionForm.BlocklistRules,
subscriptionForm.KeeplistRules,
subscriptionForm.FetchViaProxy,
)
feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
UserID: user.ID,
CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptionForm.URL,
Crawler: subscriptionForm.Crawler,
UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,
RewriteRules: subscriptionForm.RewriteRules,
BlocklistRules: subscriptionForm.BlocklistRules,
KeeplistRules: subscriptionForm.KeeplistRules,
FetchViaProxy: subscriptionForm.FetchViaProxy,
})
if err != nil {
view.Set("form", subscriptionForm)
view.Set("errorMessage", err)

View File

@ -12,6 +12,7 @@ import (
"miniflux.app/http/response/html"
"miniflux.app/http/route"
"miniflux.app/logger"
feedHandler "miniflux.app/reader/handler"
"miniflux.app/reader/subscription"
"miniflux.app/ui/form"
"miniflux.app/ui/session"
@ -74,20 +75,20 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
v.Set("errorMessage", "error.subscription_not_found")
html.OK(w, r, v.Render("add_subscription"))
case n == 1:
feed, err := h.feedHandler.CreateFeed(
user.ID,
subscriptionForm.CategoryID,
subscriptions[0].URL,
subscriptionForm.Crawler,
subscriptionForm.UserAgent,
subscriptionForm.Username,
subscriptionForm.Password,
subscriptionForm.ScraperRules,
subscriptionForm.RewriteRules,
subscriptionForm.BlocklistRules,
subscriptionForm.KeeplistRules,
subscriptionForm.FetchViaProxy,
)
feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
UserID: user.ID,
CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptions[0].URL,
Crawler: subscriptionForm.Crawler,
UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,
RewriteRules: subscriptionForm.RewriteRules,
BlocklistRules: subscriptionForm.BlocklistRules,
KeeplistRules: subscriptionForm.KeeplistRules,
FetchViaProxy: subscriptionForm.FetchViaProxy,
})
if err != nil {
v.Set("form", subscriptionForm)
v.Set("errorMessage", err)

View File

@ -8,19 +8,18 @@ import (
"net/http"
"miniflux.app/config"
"miniflux.app/reader/feed"
"miniflux.app/storage"
"miniflux.app/template"
"miniflux.app/worker"
"github.com/gorilla/mux"
"github.com/mitchellh/go-server-timing"
servertiming "github.com/mitchellh/go-server-timing"
)
// Serve declares all routes for the user interface.
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) {
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
middleware := newMiddleware(router, store)
handler := &handler{router, store, template.NewEngine(router), pool, feedHandler}
handler := &handler{router, store, template.NewEngine(router), pool}
uiRouter := router.NewRoute().Subrouter()
uiRouter.Use(middleware.handleUserSession)

View File

@ -6,7 +6,7 @@ package worker // import "miniflux.app/worker"
import (
"miniflux.app/model"
"miniflux.app/reader/feed"
"miniflux.app/storage"
)
// Pool handles a pool of workers.
@ -22,13 +22,13 @@ func (p *Pool) Push(jobs model.JobList) {
}
// NewPool creates a pool of background workers.
func NewPool(feedHandler *feed.Handler, nbWorkers int) *Pool {
func NewPool(store *storage.Storage, nbWorkers int) *Pool {
workerPool := &Pool{
queue: make(chan model.Job),
}
for i := 0; i < nbWorkers; i++ {
worker := &Worker{id: i, feedHandler: feedHandler}
worker := &Worker{id: i, store: store}
go worker.Run(workerPool.queue)
}

View File

@ -11,13 +11,14 @@ import (
"miniflux.app/logger"
"miniflux.app/metric"
"miniflux.app/model"
"miniflux.app/reader/feed"
feedHandler "miniflux.app/reader/handler"
"miniflux.app/storage"
)
// Worker refreshes a feed in the background.
type Worker struct {
id int
feedHandler *feed.Handler
id int
store *storage.Storage
}
// Run wait for a job and refresh the given feed.
@ -29,7 +30,7 @@ func (w *Worker) Run(c chan model.Job) {
logger.Debug("[Worker #%d] Received feed #%d for user #%d", w.id, job.FeedID, job.UserID)
startTime := time.Now()
refreshErr := w.feedHandler.RefreshFeed(job.UserID, job.FeedID)
refreshErr := feedHandler.RefreshFeed(w.store, job.UserID, job.FeedID)
if config.Opts.HasMetricsCollector() {
status := "success"