miniflux-v2/internal/reader/handler/handler.go

270 lines
8.6 KiB
Go
Raw Normal View History

// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
2017-11-20 06:10:04 +01:00
package handler // import "miniflux.app/v2/internal/reader/handler"
2017-11-20 06:10:04 +01:00
import (
"log/slog"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/errors"
"miniflux.app/v2/internal/http/client"
2023-09-09 07:45:17 +02:00
"miniflux.app/v2/internal/integration"
"miniflux.app/v2/internal/locale"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/reader/browser"
"miniflux.app/v2/internal/reader/icon"
"miniflux.app/v2/internal/reader/parser"
"miniflux.app/v2/internal/reader/processor"
"miniflux.app/v2/internal/storage"
2017-11-20 06:10:04 +01:00
)
var (
2018-02-08 04:10:36 +01:00
errDuplicate = "This feed already exists (%s)"
2017-11-25 07:29:20 +01:00
errNotFound = "Feed %d not found"
2018-02-08 04:10:36 +01:00
errCategoryNotFound = "Category not found for this user"
2017-11-20 06:10:04 +01:00
)
// CreateFeed fetch, parse and store a new feed.
2021-01-04 22:49:28 +01:00
func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model.FeedCreationRequest) (*model.Feed, error) {
slog.Debug("Begin feed creation process",
slog.Int64("user_id", userID),
slog.String("feed_url", feedCreationRequest.FeedURL),
)
2017-11-20 06:10:04 +01:00
2021-08-30 16:53:05 +02:00
user, storeErr := store.UserByID(userID)
if storeErr != nil {
return nil, storeErr
}
2021-01-04 22:49:28 +01:00
if !store.CategoryIDExists(userID, feedCreationRequest.CategoryID) {
2017-11-25 07:29:20 +01:00
return nil, errors.NewLocalizedError(errCategoryNotFound)
}
2021-01-04 22:49:28 +01:00
request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts)
request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password)
request.WithUserAgent(feedCreationRequest.UserAgent)
2021-03-23 04:27:58 +01:00
request.WithCookie(feedCreationRequest.Cookie)
request.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
2021-01-04 22:49:28 +01:00
if feedCreationRequest.FetchViaProxy {
request.WithProxy()
}
response, requestErr := browser.Exec(request)
if requestErr != nil {
return nil, requestErr
}
2021-01-04 22:49:28 +01:00
if store.FeedURLExists(userID, response.EffectiveURL) {
2017-11-20 06:10:04 +01:00
return nil, errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
}
subscription, parseErr := parser.ParseFeed(response.EffectiveURL, response.BodyAsString())
if parseErr != nil {
return nil, parseErr
}
2021-01-04 22:49:28 +01:00
subscription.UserID = userID
subscription.UserAgent = feedCreationRequest.UserAgent
2021-03-23 04:27:58 +01:00
subscription.Cookie = feedCreationRequest.Cookie
2021-01-04 22:49:28 +01:00
subscription.Username = feedCreationRequest.Username
subscription.Password = feedCreationRequest.Password
subscription.Crawler = feedCreationRequest.Crawler
subscription.Disabled = feedCreationRequest.Disabled
subscription.IgnoreHTTPCache = feedCreationRequest.IgnoreHTTPCache
subscription.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
2021-01-04 22:49:28 +01:00
subscription.FetchViaProxy = feedCreationRequest.FetchViaProxy
subscription.ScraperRules = feedCreationRequest.ScraperRules
subscription.RewriteRules = feedCreationRequest.RewriteRules
subscription.BlocklistRules = feedCreationRequest.BlocklistRules
subscription.KeeplistRules = feedCreationRequest.KeeplistRules
subscription.UrlRewriteRules = feedCreationRequest.UrlRewriteRules
2021-01-04 22:49:28 +01:00
subscription.WithCategoryID(feedCreationRequest.CategoryID)
subscription.WithClientResponse(response)
subscription.CheckedNow()
2017-11-20 06:10:04 +01:00
processor.ProcessFeedEntries(store, subscription, user, true)
if storeErr := store.CreateFeed(subscription); storeErr != nil {
return nil, storeErr
2017-11-20 06:10:04 +01:00
}
slog.Debug("Created feed",
slog.Int64("user_id", userID),
slog.Int64("feed_id", subscription.ID),
slog.String("feed_url", subscription.FeedURL),
)
2017-11-20 06:10:04 +01:00
checkFeedIcon(
store,
subscription.ID,
subscription.SiteURL,
subscription.IconURL,
feedCreationRequest.UserAgent,
feedCreationRequest.FetchViaProxy,
feedCreationRequest.AllowSelfSignedCertificates,
)
2017-11-20 06:10:04 +01:00
return subscription, nil
}
2020-09-28 01:01:06 +02:00
// RefreshFeed refreshes a feed.
func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool) error {
slog.Debug("Begin feed refresh process",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Bool("force_refresh", forceRefresh),
)
2021-08-30 16:53:05 +02:00
user, storeErr := store.UserByID(userID)
if storeErr != nil {
return storeErr
}
printer := locale.NewPrinter(user.Language)
2017-11-20 06:10:04 +01:00
originalFeed, storeErr := store.FeedByID(userID, feedID)
if storeErr != nil {
return storeErr
2017-11-20 06:10:04 +01:00
}
if originalFeed == nil {
return errors.NewLocalizedError(errNotFound, feedID)
}
weeklyEntryCount := 0
if config.Opts.PollingScheduler() == model.SchedulerEntryFrequency {
var weeklyCountErr error
weeklyEntryCount, weeklyCountErr = store.WeeklyFeedEntryCount(userID, feedID)
if weeklyCountErr != nil {
return weeklyCountErr
}
}
originalFeed.CheckedNow()
originalFeed.ScheduleNextCheck(weeklyEntryCount)
2018-02-09 03:16:54 +01:00
request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts)
request.WithCredentials(originalFeed.Username, originalFeed.Password)
request.WithUserAgent(originalFeed.UserAgent)
2021-03-23 04:27:58 +01:00
request.WithCookie(originalFeed.Cookie)
request.AllowSelfSignedCertificates = originalFeed.AllowSelfSignedCertificates
2020-06-06 06:50:59 +02:00
if !originalFeed.IgnoreHTTPCache {
request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
}
if originalFeed.FetchViaProxy {
request.WithProxy()
}
response, requestErr := browser.Exec(request)
if requestErr != nil {
originalFeed.WithError(requestErr.Localize(printer))
store.UpdateFeedError(originalFeed)
return requestErr
2017-11-20 06:10:04 +01:00
}
if store.AnotherFeedURLExists(userID, originalFeed.ID, response.EffectiveURL) {
storeErr := errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
originalFeed.WithError(storeErr.Error())
store.UpdateFeedError(originalFeed)
return storeErr
}
2020-06-06 06:50:59 +02:00
if originalFeed.IgnoreHTTPCache || response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
slog.Debug("Feed modified",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
)
updatedFeed, parseErr := parser.ParseFeed(response.EffectiveURL, response.BodyAsString())
if parseErr != nil {
originalFeed.WithError(parseErr.Localize(printer))
store.UpdateFeedError(originalFeed)
return parseErr
2017-11-20 06:10:04 +01:00
}
originalFeed.Entries = updatedFeed.Entries
processor.ProcessFeedEntries(store, originalFeed, user, forceRefresh)
// We don't update existing entries when the crawler is enabled (we crawl only inexisting entries). Unless it is forced to refresh
updateExistingEntries := forceRefresh || !originalFeed.Crawler
2023-09-09 07:45:17 +02:00
newEntries, storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, updateExistingEntries)
if storeErr != nil {
originalFeed.WithError(storeErr.Error())
store.UpdateFeedError(originalFeed)
return storeErr
2017-11-20 06:10:04 +01:00
}
2023-09-09 07:45:17 +02:00
userIntegrations, intErr := store.Integration(userID)
if intErr != nil {
slog.Error("Fetching integrations failed; the refresh process will go on, but no integrations will run this time",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Any("error", intErr),
)
2023-09-09 07:45:17 +02:00
} else if userIntegrations != nil && len(newEntries) > 0 {
go integration.PushEntries(originalFeed, newEntries, userIntegrations)
}
// We update caching headers only if the feed has been modified,
// because some websites don't return the same headers when replying with a 304.
originalFeed.WithClientResponse(response)
2023-09-09 07:45:17 +02:00
checkFeedIcon(
store,
originalFeed.ID,
originalFeed.SiteURL,
updatedFeed.IconURL,
originalFeed.UserAgent,
originalFeed.FetchViaProxy,
originalFeed.AllowSelfSignedCertificates,
)
2017-11-20 06:10:04 +01:00
} else {
slog.Debug("Feed not modified",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
)
2017-11-20 06:10:04 +01:00
}
originalFeed.ResetErrorCounter()
2018-12-15 22:04:38 +01:00
if storeErr := store.UpdateFeed(originalFeed); storeErr != nil {
2018-12-15 22:04:38 +01:00
originalFeed.WithError(storeErr.Error())
store.UpdateFeedError(originalFeed)
2018-12-15 22:04:38 +01:00
return storeErr
}
return nil
2017-11-20 06:10:04 +01:00
}
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL, feedIconURL, userAgent string, fetchViaProxy, allowSelfSignedCertificates bool) {
if !store.HasIcon(feedID) {
icon, err := icon.FindIcon(websiteURL, feedIconURL, userAgent, fetchViaProxy, allowSelfSignedCertificates)
if err != nil {
slog.Warn("Unable to find feed icon",
slog.Int64("feed_id", feedID),
slog.String("website_url", websiteURL),
slog.String("feed_icon_url", feedIconURL),
slog.Any("error", err),
)
} else if icon == nil {
slog.Debug("No icon found",
slog.Int64("feed_id", feedID),
slog.String("website_url", websiteURL),
slog.String("feed_icon_url", feedIconURL),
)
} else {
if err := store.CreateFeedIcon(feedID, icon); err != nil {
slog.Error("Unable to store feed icon",
slog.Int64("feed_id", feedID),
slog.String("website_url", websiteURL),
slog.String("feed_icon_url", feedIconURL),
slog.Any("error", err),
)
}
}
}
}