Compare commits

...

8 Commits

39 changed files with 447 additions and 55 deletions

1
go.mod
View File

@ -5,6 +5,7 @@ module miniflux.app/v2
require (
github.com/PuerkitoBio/goquery v1.9.1
github.com/abadojack/whatlanggo v1.0.1
github.com/andybalholm/brotli v1.1.0
github.com/coreos/go-oidc/v3 v3.10.0
github.com/go-webauthn/webauthn v0.10.2
github.com/gorilla/mux v1.8.1

2
go.sum
View File

@ -2,6 +2,8 @@ github.com/PuerkitoBio/goquery v1.9.1 h1:mTL6XjbJTZdpfL+Gwl5U2h1l9yEkJjhmlTeV9VP
github.com/PuerkitoBio/goquery v1.9.1/go.mod h1:cW1n6TmIMDoORQU5IU/P1T3tGFunOeXEpGP2WHRwkbY=
github.com/abadojack/whatlanggo v1.0.1 h1:19N6YogDnf71CTHm3Mp2qhYfkRdyvbgwWdd2EPxJRG4=
github.com/abadojack/whatlanggo v1.0.1/go.mod h1:66WiQbSbJBIlOZMsvbKe5m6pzQovxCH9B/K8tQB2uoc=
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=

View File

@ -12,6 +12,8 @@ import (
"net/http"
"strings"
"time"
"github.com/andybalholm/brotli"
)
const compressionThreshold = 1024
@ -110,8 +112,15 @@ func (b *Builder) writeHeaders() {
func (b *Builder) compress(data []byte) {
if b.enableCompression && len(data) > compressionThreshold {
acceptEncoding := b.r.Header.Get("Accept-Encoding")
switch {
case strings.Contains(acceptEncoding, "br"):
b.headers["Content-Encoding"] = "br"
b.writeHeaders()
brotliWriter := brotli.NewWriterV2(b.w, brotli.DefaultCompression)
defer brotliWriter.Close()
brotliWriter.Write(data)
return
case strings.Contains(acceptEncoding, "gzip"):
b.headers["Content-Encoding"] = "gzip"
b.writeHeaders()

View File

@ -228,7 +228,7 @@ func TestBuildResponseWithCachingAndEtag(t *testing.T) {
}
}
func TestBuildResponseWithGzipCompression(t *testing.T) {
func TestBuildResponseWithBrotliCompression(t *testing.T) {
body := strings.Repeat("a", compressionThreshold+1)
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("Accept-Encoding", "gzip, deflate, br")
@ -245,6 +245,30 @@ func TestBuildResponseWithGzipCompression(t *testing.T) {
handler.ServeHTTP(w, r)
resp := w.Result()
expected := "br"
actual := resp.Header.Get("Content-Encoding")
if actual != expected {
t.Fatalf(`Unexpected header value, got %q instead of %q`, actual, expected)
}
}
func TestBuildResponseWithGzipCompression(t *testing.T) {
body := strings.Repeat("a", compressionThreshold+1)
r, err := http.NewRequest("GET", "/", nil)
r.Header.Set("Accept-Encoding", "gzip, deflate")
if err != nil {
t.Fatal(err)
}
w := httptest.NewRecorder()
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
New(w, r).WithBody(body).Write()
})
handler.ServeHTTP(w, r)
resp := w.Result()
expected := "gzip"
actual := resp.Header.Get("Content-Encoding")
if actual != expected {

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "Es existiert derzeit kein Lesezeichen.",
"alert.no_category": "Es ist keine Kategorie vorhanden.",
"alert.no_category_entry": "Es befindet sich kein Artikel in dieser Kategorie.",
"alert.no_tag_entry": "Es gibt keine Artikel, die diesem Tag entsprechen.",
"alert.no_feed_entry": "Es existiert kein Artikel für dieses Abonnement.",
"alert.no_feed": "Es sind keine Abonnements vorhanden.",
"alert.no_feed_in_category": "Für diese Kategorie gibt es kein Abonnement.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "Δεν υπάρχει σελιδοδείκτης αυτή τη στιγμή.",
"alert.no_category": "Δεν υπάρχει κατηγορία.",
"alert.no_category_entry": "Δεν υπάρχουν άρθρα σε αυτήν την κατηγορία.",
"alert.no_tag_entry": "Δεν υπάρχουν αντικείμενα που να ταιριάζουν με αυτή την ετικέτα.",
"alert.no_feed_entry": "Δεν υπάρχουν άρθρα για αυτήν τη ροή.",
"alert.no_feed": "Δεν έχετε συνδρομές.",
"alert.no_feed_in_category": "Δεν υπάρχει συνδρομή για αυτήν την κατηγορία.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "There are no starred entries.",
"alert.no_category": "There is no category.",
"alert.no_category_entry": "There are no entries in this category.",
"alert.no_tag_entry": "There are no entries matching this tag.",
"alert.no_feed_entry": "There are no entries for this feed.",
"alert.no_feed": "You dont have any feeds.",
"alert.no_feed_in_category": "There is no feed for this category.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "No hay marcador en este momento.",
"alert.no_category": "No hay categoría.",
"alert.no_category_entry": "No hay artículos en esta categoría.",
"alert.no_tag_entry": "No hay artículos con esta etiqueta.",
"alert.no_feed_entry": "No hay artículos para esta fuente.",
"alert.no_feed": "No tienes fuentes.",
"alert.no_feed_in_category": "No hay fuentes para esta categoría.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "Tällä hetkellä ei ole kirjanmerkkiä.",
"alert.no_category": "Ei ole kategoriaa.",
"alert.no_category_entry": "Tässä kategoriassa ei ole artikkeleita.",
"alert.no_tag_entry": "Tätä tunnistetta vastaavia merkintöjä ei ole.",
"alert.no_feed_entry": "Tässä syötteessä ei ole artikkeleita.",
"alert.no_feed": "Sinulla ei ole tilauksia.",
"alert.no_feed_in_category": "Tälle kategorialle ei ole tilausta.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "Il n'y a aucun favoris pour le moment.",
"alert.no_category": "Il n'y a aucune catégorie.",
"alert.no_category_entry": "Il n'y a aucun article dans cette catégorie.",
"alert.no_tag_entry": "Il n'y a aucun article correspondant à ce tag.",
"alert.no_feed_entry": "Il n'y a aucun article pour cet abonnement.",
"alert.no_feed": "Vous n'avez aucun abonnement.",
"alert.no_feed_in_category": "Il n'y a pas d'abonnement pour cette catégorie.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "इस समय कोई बुकमार्क नहीं है",
"alert.no_category": "कोई श्रेणी नहीं है।",
"alert.no_category_entry": "इस श्रेणी में कोई विषय-वस्तु नहीं है।",
"alert.no_tag_entry": "इस टैग से मेल खाती कोई प्रविष्टियाँ नहीं हैं।",
"alert.no_feed_entry": "इस फ़ीड के लिए कोई विषय-वस्तु नहीं है।",
"alert.no_feed": "आपके पास कोई सदस्यता नहीं है।",
"alert.no_feed_in_category": "इस श्रेणी के लिए कोई सदस्यता नहीं है।",

View File

@ -248,6 +248,7 @@
"alert.no_bookmark": "Tidak ada markah.",
"alert.no_category": "Tidak ada kategori.",
"alert.no_category_entry": "Tidak ada artikel di kategori ini.",
"alert.no_tag_entry": "Tidak ada entri yang cocok dengan tag ini.",
"alert.no_feed_entry": "Tidak ada artikel di umpan ini.",
"alert.no_feed": "Anda tidak memiliki langganan.",
"alert.no_feed_in_category": "Tidak ada langganan untuk kategori ini.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "Nessun preferito disponibile.",
"alert.no_category": "Nessuna categoria disponibile.",
"alert.no_category_entry": "Questa categoria non contiene alcun articolo.",
"alert.no_tag_entry": "Non ci sono voci corrispondenti a questo tag.",
"alert.no_feed_entry": "Questo feed non contiene alcun articolo.",
"alert.no_feed": "Nessun feed disponibile.",
"alert.no_feed_in_category": "Non esiste un abbonamento per questa categoria.",

View File

@ -248,6 +248,7 @@
"alert.no_bookmark": "現在星付きはありません。",
"alert.no_category": "カテゴリが存在しません。",
"alert.no_category_entry": "このカテゴリには記事がありません。",
"alert.no_tag_entry": "このタグに一致するエントリーはありません。",
"alert.no_feed_entry": "このフィードには記事がありません。",
"alert.no_feed": "何も購読していません。",
"alert.no_feed_in_category": "このカテゴリには購読中のフィードがありません。",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "Er zijn op dit moment geen favorieten.",
"alert.no_category": "Er zijn geen categorieën.",
"alert.no_category_entry": "Deze categorie bevat geen feeds.",
"alert.no_tag_entry": "Er zijn geen items die overeenkomen met deze tag.",
"alert.no_feed_entry": "Er zijn geen artikelen in deze feed.",
"alert.no_feed": "Je hebt nog geen feeds geabboneerd staan.",
"alert.no_feed_in_category": "Er is geen abonnement voor deze categorie.",

View File

@ -268,6 +268,7 @@
"alert.no_bookmark": "Obecnie nie ma żadnych zakładek.",
"alert.no_category": "Nie ma żadnej kategorii!",
"alert.no_category_entry": "W tej kategorii nie ma żadnych artykułów",
"alert.no_tag_entry": "Nie ma wpisów pasujących do tego tagu.",
"alert.no_feed_entry": "Nie ma artykułu dla tego kanału.",
"alert.no_feed": "Nie masz żadnej subskrypcji.",
"alert.no_feed_in_category": "Nie ma subskrypcji dla tej kategorii.",

View File

@ -258,6 +258,7 @@
"alert.no_bookmark": "Não há favorito neste momento.",
"alert.no_category": "Não há categoria.",
"alert.no_category_entry": "Não há itens nesta categoria.",
"alert.no_tag_entry": "Não há itens que correspondam a esta etiqueta.",
"alert.no_feed_entry": "Não há itens nessa fonte.",
"alert.no_feed": "Não há inscrições.",
"alert.no_feed_in_category": "Não há inscrições nessa categoria.",

View File

@ -268,6 +268,7 @@
"alert.no_bookmark": "Избранное отсутствует.",
"alert.no_category": "Категории отсутствуют.",
"alert.no_category_entry": "В этой категории нет статей.",
"alert.no_tag_entry": "Нет записей, соответствующих этому тегу.",
"alert.no_feed_entry": "В этой подписке отсутствуют статьи.",
"alert.no_feed": "У вас нет ни одной подписки.",
"alert.no_feed_in_category": "Для этой категории нет подписки.",

View File

@ -18,6 +18,7 @@
"alert.no_bookmark": "Yıldızlanmış makale yok.",
"alert.no_category": "Hiç kategori yok.",
"alert.no_category_entry": "Bu kategoride hiç makele yok.",
"alert.no_tag_entry": "Bu etiketle eşleşen hiçbir giriş yok.",
"alert.no_feed": "Hiç beslemeniz yok.",
"alert.no_feed_entry": "Bu besleme için makele yok.",
"alert.no_feed_in_category": "Bu kategori için besleme yok.",

View File

@ -268,6 +268,7 @@
"alert.no_bookmark": "Наразі закладки відсутні.",
"alert.no_category": "Немає категорії.",
"alert.no_category_entry": "У цій категорії немає записів.",
"alert.no_tag_entry": "Немає записів, що відповідають цьому тегу.",
"alert.no_feed_entry": "У цій стрічці немає записів.",
"alert.no_feed": "У вас немає підписок.",
"alert.no_feed_in_category": "У цій категорії немає підписок.",

View File

@ -248,6 +248,7 @@
"alert.no_bookmark": "目前没有收藏",
"alert.no_category": "目前没有分类",
"alert.no_category_entry": "该分类下没有文章",
"alert.no_tag_entry": "没有与此标签匹配的条目。",
"alert.no_feed_entry": "该源中没有文章",
"alert.no_feed": "目前没有源",
"alert.no_history": "目前没有历史",

View File

@ -248,6 +248,7 @@
"alert.no_bookmark": "目前沒有收藏",
"alert.no_category": "目前沒有分類",
"alert.no_category_entry": "該分類下沒有文章",
"alert.no_tag_entry": "沒有與此標籤相符的條目。",
"alert.no_feed_entry": "該Feed中沒有文章",
"alert.no_feed": "目前沒有Feed",
"alert.no_history": "目前沒有歷史",

View File

@ -108,7 +108,7 @@ func (f *Feed) CheckedNow() {
}
// ScheduleNextCheck set "next_check_at" of a feed based on the scheduler selected from the configuration.
func (f *Feed) ScheduleNextCheck(weeklyCount int, newTTL int) {
func (f *Feed) ScheduleNextCheck(weeklyCount int, newTTL int, rateLimited bool) {
f.TTL = newTTL
// Default to the global config Polling Frequency.
var intervalMinutes int
@ -124,6 +124,9 @@ func (f *Feed) ScheduleNextCheck(weeklyCount int, newTTL int) {
default:
intervalMinutes = config.Opts.SchedulerRoundRobinMinInterval()
}
if rateLimited {
intervalMinutes += (12 * 60)
}
// If the feed has a TTL defined, we use it to make sure we don't check it too often.
if newTTL > intervalMinutes && newTTL > 0 {
intervalMinutes = newTTL

View File

@ -15,6 +15,7 @@ import (
const (
largeWeeklyCount = 10080
noNewTTL = 0
noRateLimited = false
)
func TestFeedCategorySetter(t *testing.T) {
@ -89,7 +90,7 @@ func TestFeedScheduleNextCheckDefault(t *testing.T) {
timeBefore := time.Now()
feed := &Feed{}
weeklyCount := 10
feed.ScheduleNextCheck(weeklyCount, noNewTTL)
feed.ScheduleNextCheck(weeklyCount, noNewTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -115,7 +116,7 @@ func TestFeedScheduleNextCheckRoundRobinMinInterval(t *testing.T) {
timeBefore := time.Now()
feed := &Feed{}
weeklyCount := 100
feed.ScheduleNextCheck(weeklyCount, noNewTTL)
feed.ScheduleNextCheck(weeklyCount, noNewTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -144,7 +145,7 @@ func TestFeedScheduleNextCheckEntryFrequencyMaxInterval(t *testing.T) {
feed := &Feed{}
// Use a very small weekly count to trigger the max interval
weeklyCount := 1
feed.ScheduleNextCheck(weeklyCount, noNewTTL)
feed.ScheduleNextCheck(weeklyCount, noNewTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -173,7 +174,7 @@ func TestFeedScheduleNextCheckEntryFrequencyMaxIntervalZeroWeeklyCount(t *testin
feed := &Feed{}
// Use a very small weekly count to trigger the max interval
weeklyCount := 0
feed.ScheduleNextCheck(weeklyCount, noNewTTL)
feed.ScheduleNextCheck(weeklyCount, noNewTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -202,7 +203,7 @@ func TestFeedScheduleNextCheckEntryFrequencyMinInterval(t *testing.T) {
feed := &Feed{}
// Use a very large weekly count to trigger the min interval
weeklyCount := largeWeeklyCount
feed.ScheduleNextCheck(weeklyCount, noNewTTL)
feed.ScheduleNextCheck(weeklyCount, noNewTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -228,7 +229,7 @@ func TestFeedScheduleNextCheckEntryFrequencyFactor(t *testing.T) {
timeBefore := time.Now()
feed := &Feed{}
weeklyCount := 7
feed.ScheduleNextCheck(weeklyCount, noNewTTL)
feed.ScheduleNextCheck(weeklyCount, noNewTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -260,7 +261,7 @@ func TestFeedScheduleNextCheckEntryFrequencySmallNewTTL(t *testing.T) {
weeklyCount := largeWeeklyCount
// TTL is smaller than minInterval.
newTTL := minInterval / 2
feed.ScheduleNextCheck(weeklyCount, newTTL)
feed.ScheduleNextCheck(weeklyCount, newTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -296,7 +297,7 @@ func TestFeedScheduleNextCheckEntryFrequencyLargeNewTTL(t *testing.T) {
weeklyCount := largeWeeklyCount
// TTL is larger than minInterval.
newTTL := minInterval * 2
feed.ScheduleNextCheck(weeklyCount, newTTL)
feed.ScheduleNextCheck(weeklyCount, newTTL, noRateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
@ -309,3 +310,25 @@ func TestFeedScheduleNextCheckEntryFrequencyLargeNewTTL(t *testing.T) {
t.Error(`The next_check_at should be after timeBefore + entry frequency min interval`)
}
}
func TestFeedScheduleNextCheckRateLimited(t *testing.T) {
var err error
parser := config.NewParser()
config.Opts, err = parser.ParseEnvironmentVariables()
if err != nil {
t.Fatalf(`Parsing failure: %v`, err)
}
feed := &Feed{}
weeklyCount := 10
rateLimited := true
feed.ScheduleNextCheck(weeklyCount, noNewTTL, rateLimited)
if feed.NextCheckAt.IsZero() {
t.Error(`The next_check_at must be set`)
}
if feed.NextCheckAt.Before(time.Now().Add(time.Minute * time.Duration(60*12))) {
t.Error(`The next_check_at should not be before the now + 12 hours`)
}
}

View File

@ -0,0 +1,55 @@
package fetcher
import (
"compress/gzip"
"io"
"github.com/andybalholm/brotli"
)
type brotliReadCloser struct {
body io.ReadCloser
brotliReader io.Reader
}
func NewBrotliReadCloser(body io.ReadCloser) *brotliReadCloser {
return &brotliReadCloser{
body: body,
brotliReader: brotli.NewReader(body),
}
}
func (b *brotliReadCloser) Read(p []byte) (n int, err error) {
return b.brotliReader.Read(p)
}
func (b *brotliReadCloser) Close() error {
return b.body.Close()
}
type gzipReadCloser struct {
body io.ReadCloser
gzipReader io.Reader
gzipErr error
}
func NewGzipReadCloser(body io.ReadCloser) *gzipReadCloser {
return &gzipReadCloser{body: body}
}
func (gz *gzipReadCloser) Read(p []byte) (n int, err error) {
if gz.gzipReader == nil {
if gz.gzipErr == nil {
gz.gzipReader, gz.gzipErr = gzip.NewReader(gz.body)
}
if gz.gzipErr != nil {
return 0, gz.gzipErr
}
}
return gz.gzipReader.Read(p)
}
func (gz *gzipReadCloser) Close() error {
return gz.body.Close()
}

View File

@ -169,6 +169,7 @@ func (r *RequestBuilder) ExecuteRequest(requestURL string) (*http.Response, erro
}
req.Header = r.headers
req.Header.Set("Accept-Encoding", "br, gzip")
req.Header.Set("Accept", defaultAcceptHeader)
req.Header.Set("Connection", "close")

View File

@ -8,10 +8,12 @@ import (
"errors"
"fmt"
"io"
"log/slog"
"net"
"net/http"
"net/url"
"os"
"strings"
"miniflux.app/v2/internal/locale"
)
@ -65,18 +67,41 @@ func (r *ResponseHandler) IsModified(lastEtagValue, lastModifiedValue string) bo
return true
}
func (r *ResponseHandler) IsRateLimited() bool {
return r.httpResponse.StatusCode == http.StatusTooManyRequests
}
func (r *ResponseHandler) Close() {
if r.httpResponse != nil && r.httpResponse.Body != nil && r.clientErr == nil {
r.httpResponse.Body.Close()
}
}
func (r *ResponseHandler) getReader(maxBodySize int64) io.ReadCloser {
contentEncoding := strings.ToLower(r.httpResponse.Header.Get("Content-Encoding"))
slog.Debug("Request response",
slog.String("effective_url", r.EffectiveURL()),
slog.String("content_length", r.httpResponse.Header.Get("Content-Length")),
slog.String("content_encoding", contentEncoding),
slog.String("content_type", r.httpResponse.Header.Get("Content-Type")),
)
reader := r.httpResponse.Body
switch contentEncoding {
case "br":
reader = NewBrotliReadCloser(r.httpResponse.Body)
case "gzip":
reader = NewGzipReadCloser(r.httpResponse.Body)
}
return http.MaxBytesReader(nil, reader, maxBodySize)
}
func (r *ResponseHandler) Body(maxBodySize int64) io.ReadCloser {
return http.MaxBytesReader(nil, r.httpResponse.Body, maxBodySize)
return r.getReader(maxBodySize)
}
func (r *ResponseHandler) ReadBody(maxBodySize int64) ([]byte, *locale.LocalizedErrorWrapper) {
limitedReader := http.MaxBytesReader(nil, r.httpResponse.Body, maxBodySize)
limitedReader := r.getReader(maxBodySize)
buffer, err := io.ReadAll(limitedReader)
if err != nil && err != io.EOF {

View File

@ -198,12 +198,13 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
}
// RefreshFeed refreshes a feed.
func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool) *locale.LocalizedErrorWrapper {
func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool) (localizedError *locale.LocalizedErrorWrapper) {
slog.Debug("Begin feed refresh process",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Bool("force_refresh", forceRefresh),
)
localizedError = nil
user, storeErr := store.UserByID(userID)
if storeErr != nil {
@ -221,6 +222,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
weeklyEntryCount := 0
newTTL := 0
rateLimited := false
if config.Opts.PollingScheduler() == model.SchedulerEntryFrequency {
var weeklyCountErr error
weeklyEntryCount, weeklyCountErr = store.WeeklyFeedEntryCount(userID, feedID)
@ -230,7 +232,31 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
}
originalFeed.CheckedNow()
originalFeed.ScheduleNextCheck(weeklyEntryCount, newTTL)
// Commit the result to the database at the end of this function.
// If we met an error before entering the defer function, localizedError would not be nil.
defer func() {
originalFeed.ScheduleNextCheck(weeklyEntryCount, newTTL, rateLimited)
slog.Debug("Updated next check date",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Int("weeklyEntryCount", weeklyEntryCount),
slog.Int("ttl", newTTL),
slog.Bool("rateLimited", rateLimited),
slog.Time("new_next_check_at", originalFeed.NextCheckAt),
)
if localizedError == nil {
// We have not encountered any error before entering this delay function.
originalFeed.ResetErrorCounter()
if storeErr := store.UpdateFeed(originalFeed); storeErr != nil {
// Update the return value when there is an error.
localizedError = locale.NewLocalizedErrorWrapper(storeErr, "error.database_error", storeErr)
}
}
if localizedError != nil {
originalFeed.WithTranslatedErrorMessage(localizedError.Translate(user.Language))
store.UpdateFeedError(originalFeed)
}
}()
requestBuilder := fetcher.NewRequestBuilder()
requestBuilder.WithUsernameAndPassword(originalFeed.Username, originalFeed.Password)
@ -251,17 +277,18 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(originalFeed.FeedURL))
defer responseHandler.Close()
if localizedError := responseHandler.LocalizedError(); localizedError != nil {
rateLimited = responseHandler.IsRateLimited()
if rateLimited {
slog.Warn("Feed is rate limited (429 status code)", slog.String("feed_url", originalFeed.FeedURL))
}
if localizedError = responseHandler.LocalizedError(); localizedError != nil {
slog.Warn("Unable to fetch feed", slog.String("feed_url", originalFeed.FeedURL), slog.Any("error", localizedError.Error()))
originalFeed.WithTranslatedErrorMessage(localizedError.Translate(user.Language))
store.UpdateFeedError(originalFeed)
return localizedError
}
if store.AnotherFeedURLExists(userID, originalFeed.ID, responseHandler.EffectiveURL()) {
localizedError := locale.NewLocalizedErrorWrapper(ErrDuplicatedFeed, "error.duplicated_feed")
originalFeed.WithTranslatedErrorMessage(localizedError.Translate(user.Language))
store.UpdateFeedError(originalFeed)
localizedError = locale.NewLocalizedErrorWrapper(ErrDuplicatedFeed, "error.duplicated_feed")
return localizedError
}
@ -279,27 +306,16 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
updatedFeed, parseErr := parser.ParseFeed(responseHandler.EffectiveURL(), bytes.NewReader(responseBody))
if parseErr != nil {
localizedError := locale.NewLocalizedErrorWrapper(parseErr, "error.unable_to_parse_feed", parseErr)
if errors.Is(parseErr, parser.ErrFeedFormatNotDetected) {
localizedError = locale.NewLocalizedErrorWrapper(parseErr, "error.feed_format_not_detected", parseErr)
} else {
localizedError = locale.NewLocalizedErrorWrapper(parseErr, "error.unable_to_parse_feed", parseErr)
}
originalFeed.WithTranslatedErrorMessage(localizedError.Translate(user.Language))
store.UpdateFeedError(originalFeed)
return localizedError
}
// If the feed has a TTL defined, we use it to make sure we don't check it too often.
newTTL = updatedFeed.TTL
// Set the next check at with updated arguments.
originalFeed.ScheduleNextCheck(weeklyEntryCount, newTTL)
slog.Debug("Updated next check date",
slog.Int64("user_id", userID),
slog.Int64("feed_id", feedID),
slog.Int("ttl", newTTL),
slog.Time("new_next_check_at", originalFeed.NextCheckAt),
)
originalFeed.Entries = updatedFeed.Entries
processor.ProcessFeedEntries(store, originalFeed, user, forceRefresh)
@ -308,9 +324,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
updateExistingEntries := forceRefresh || !originalFeed.Crawler
newEntries, storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, updateExistingEntries)
if storeErr != nil {
localizedError := locale.NewLocalizedErrorWrapper(storeErr, "error.database_error", storeErr)
originalFeed.WithTranslatedErrorMessage(localizedError.Translate(user.Language))
store.UpdateFeedError(originalFeed)
localizedError = locale.NewLocalizedErrorWrapper(storeErr, "error.database_error", storeErr)
return localizedError
}
@ -344,16 +358,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
)
}
originalFeed.ResetErrorCounter()
if storeErr := store.UpdateFeed(originalFeed); storeErr != nil {
localizedError := locale.NewLocalizedErrorWrapper(storeErr, "error.database_error", storeErr)
originalFeed.WithTranslatedErrorMessage(localizedError.Translate(user.Language))
store.UpdateFeedError(originalFeed)
return localizedError
}
return nil
return localizedError
}
func checkFeedIcon(store *storage.Storage, requestBuilder *fetcher.RequestBuilder, feedID int64, websiteURL, feedIconURL string) {

View File

@ -58,6 +58,15 @@ func (e *EntryPaginationBuilder) WithStatus(status string) {
}
}
func (e *EntryPaginationBuilder) WithTags(tags []string) {
if len(tags) > 0 {
for _, tag := range tags {
e.conditions = append(e.conditions, fmt.Sprintf("LOWER($%d) = ANY(LOWER(e.tags::text)::text[])", len(e.args)+1))
e.args = append(e.args, tag)
}
}
}
// WithGloballyVisible adds global visibility to the condition.
func (e *EntryPaginationBuilder) WithGloballyVisible() {
e.conditions = append(e.conditions, "not c.hide_globally")

View File

@ -160,7 +160,7 @@ func (e *EntryQueryBuilder) WithStatuses(statuses []string) *EntryQueryBuilder {
func (e *EntryQueryBuilder) WithTags(tags []string) *EntryQueryBuilder {
if len(tags) > 0 {
for _, cat := range tags {
e.conditions = append(e.conditions, fmt.Sprintf("$%d = ANY(e.tags)", len(e.args)+1))
e.conditions = append(e.conditions, fmt.Sprintf("LOWER($%d) = ANY(LOWER(e.tags::text)::text[])", len(e.args)+1))
e.args = append(e.args, cat)
}
}

View File

@ -8,6 +8,7 @@ import (
"html/template"
"math"
"net/mail"
"net/url"
"slices"
"strings"
"time"
@ -91,8 +92,9 @@ func (f *funcMap) Map() template.FuncMap {
"nonce": func() string {
return crypto.GenerateRandomStringHex(16)
},
"deRef": func(i *int) int { return *i },
"duration": duration,
"deRef": func(i *int) int { return *i },
"duration": duration,
"urlEncode": url.PathEscape,
// These functions are overrode at runtime after the parsing.
"elapsed": func(timezone string, t time.Time) string {

View File

@ -135,7 +135,7 @@
{{ if .entry.Tags }}
<div class="entry-tags">
{{ t "entry.tags.label" }}
{{range $i, $e := .entry.Tags}}{{if $i}}, {{end}}<strong>{{ $e }}</strong>{{end}}
{{range $i, $e := .entry.Tags}}{{if $i}}, {{end}}<a href="{{ route "tagEntriesAll" "tagName" (urlEncode $e) }}"><strong>{{ $e }}</strong></a>{{end}}
</div>
{{ end }}
<div class="entry-date">

View File

@ -0,0 +1,52 @@
{{ define "title"}}{{ .tagName }} ({{ .total }}){{ end }}
{{ define "page_header"}}
<section class="page-header" aria-labelledby="page-header-title page-header-title-count">
<h1 id="page-header-title" dir="auto">
{{ .tagName }}
<span aria-hidden="true"> ({{ .total }})</span>
</h1>
<span id="page-header-title-count" class="sr-only">{{ plural "page.tag_entry_count" .total .total }}</span>
</section>
{{ end }}
{{ define "content"}}
{{ if not .entries }}
<p role="alert" class="alert alert-info">{{ t "alert.no_tag_entry" }}</p>
{{ else }}
<div class="pagination-top">
{{ template "pagination" .pagination }}
</div>
<div class="items">
{{ range .entries }}
<article
class="item entry-item {{ if $.user.EntrySwipe }}entry-swipe{{ end }} item-status-{{ .Status }}"
data-id="{{ .ID }}"
aria-labelledby="entry-title-{{ .ID }}"
tabindex="-1"
>
<header class="item-header" dir="auto">
<h2 id="entry-title-{{ .ID }}" class="item-title">
<a href="{{ route "tagEntry" "entryID" .ID "tagName" (urlEncode $.tagName) }}">
{{ if ne .Feed.Icon.IconID 0 }}
<img src="{{ route "icon" "iconID" .Feed.Icon.IconID }}" width="16" height="16" loading="lazy" alt="">
{{ end }}
{{ .Title }}
</a>
</h2>
<span class="category">
<a href="{{ route "categoryEntries" "categoryID" .Feed.Category.ID }}">
{{ .Feed.Category.Title }}
</a>
</span>
</header>
{{ template "item_meta" dict "user" $.user "entry" . "hasSaveEntry" $.hasSaveEntry }}
</article>
{{ end }}
</div>
<div class="pagination-bottom">
{{ template "pagination" .pagination }}
</div>
{{ end }}
{{ end }}

View File

@ -8,6 +8,7 @@ import (
"net/http"
"time"
"miniflux.app/v2/internal/config"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/html"
"miniflux.app/v2/internal/http/route"
@ -32,8 +33,9 @@ func (h *handler) refreshCategory(w http.ResponseWriter, r *http.Request) int64
sess := session.New(h.store, request.SessionID(r))
// Avoid accidental and excessive refreshes.
if time.Now().UTC().Unix()-request.LastForceRefresh(r) < 1800 {
sess.NewFlashErrorMessage(printer.Print("alert.too_many_feeds_refresh"))
if time.Now().UTC().Unix()-request.LastForceRefresh(r) < int64(config.Opts.ForceRefreshInterval())*60 {
time := config.Opts.ForceRefreshInterval()
sess.NewFlashErrorMessage(printer.Plural("alert.too_many_feeds_refresh", time, time))
} else {
// We allow the end-user to force refresh all its feeds in this category
// without taking into consideration the number of errors.

90
internal/ui/entry_tag.go Normal file
View File

@ -0,0 +1,90 @@
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package ui // import "miniflux.app/v2/internal/ui"
import (
"net/http"
"net/url"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/html"
"miniflux.app/v2/internal/http/route"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/storage"
"miniflux.app/v2/internal/ui/session"
"miniflux.app/v2/internal/ui/view"
)
func (h *handler) showTagEntryPage(w http.ResponseWriter, r *http.Request) {
user, err := h.store.UserByID(request.UserID(r))
if err != nil {
html.ServerError(w, r, err)
return
}
tagName, err := url.PathUnescape(request.RouteStringParam(r, "tagName"))
if err != nil {
html.ServerError(w, r, err)
return
}
entryID := request.RouteInt64Param(r, "entryID")
builder := h.store.NewEntryQueryBuilder(user.ID)
builder.WithTags([]string{tagName})
builder.WithEntryID(entryID)
builder.WithoutStatus(model.EntryStatusRemoved)
entry, err := builder.GetEntry()
if err != nil {
html.ServerError(w, r, err)
return
}
if entry == nil {
html.NotFound(w, r)
return
}
if user.MarkReadOnView && entry.Status == model.EntryStatusUnread {
err = h.store.SetEntriesStatus(user.ID, []int64{entry.ID}, model.EntryStatusRead)
if err != nil {
html.ServerError(w, r, err)
return
}
entry.Status = model.EntryStatusRead
}
entryPaginationBuilder := storage.NewEntryPaginationBuilder(h.store, user.ID, entry.ID, user.EntryOrder, user.EntryDirection)
entryPaginationBuilder.WithTags([]string{tagName})
prevEntry, nextEntry, err := entryPaginationBuilder.Entries()
if err != nil {
html.ServerError(w, r, err)
return
}
nextEntryRoute := ""
if nextEntry != nil {
nextEntryRoute = route.Path(h.router, "tagEntry", "tagName", url.PathEscape(tagName), "entryID", nextEntry.ID)
}
prevEntryRoute := ""
if prevEntry != nil {
prevEntryRoute = route.Path(h.router, "tagEntry", "tagName", url.PathEscape(tagName), "entryID", prevEntry.ID)
}
sess := session.New(h.store, request.SessionID(r))
view := view.New(h.tpl, r, sess)
view.Set("entry", entry)
view.Set("prevEntry", prevEntry)
view.Set("nextEntry", nextEntry)
view.Set("nextEntryRoute", nextEntryRoute)
view.Set("prevEntryRoute", prevEntryRoute)
view.Set("user", user)
view.Set("countUnread", h.store.CountUnreadEntries(user.ID))
view.Set("countErrorFeeds", h.store.CountUserFeedsWithErrors(user.ID))
view.Set("hasSaveEntry", h.store.HasSaveEntry(user.ID))
html.OK(w, r, view.Render("entry"))
}

View File

@ -29,7 +29,9 @@ func (h *handler) showIcon(w http.ResponseWriter, r *http.Request) {
b.WithHeader("Content-Security-Policy", `default-src 'self'`)
b.WithHeader("Content-Type", icon.MimeType)
b.WithBody(icon.Content)
b.WithoutCompression()
if icon.MimeType != "image/svg+xml" {
b.WithoutCompression()
}
b.Write()
})
}

View File

@ -31,12 +31,12 @@ func (h *handler) showAppIcon(w http.ResponseWriter, r *http.Request) {
switch filepath.Ext(filename) {
case ".png":
b.WithoutCompression()
b.WithHeader("Content-Type", "image/png")
case ".svg":
b.WithHeader("Content-Type", "image/svg+xml")
}
b.WithoutCompression()
b.WithBody(blob)
b.Write()
})

View File

@ -0,0 +1,65 @@
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package ui // import "miniflux.app/v2/internal/ui"
import (
"net/http"
"net/url"
"miniflux.app/v2/internal/http/request"
"miniflux.app/v2/internal/http/response/html"
"miniflux.app/v2/internal/http/route"
"miniflux.app/v2/internal/model"
"miniflux.app/v2/internal/ui/session"
"miniflux.app/v2/internal/ui/view"
)
func (h *handler) showTagEntriesAllPage(w http.ResponseWriter, r *http.Request) {
user, err := h.store.UserByID(request.UserID(r))
if err != nil {
html.ServerError(w, r, err)
return
}
tagName, err := url.PathUnescape(request.RouteStringParam(r, "tagName"))
if err != nil {
html.ServerError(w, r, err)
return
}
offset := request.QueryIntParam(r, "offset", 0)
builder := h.store.NewEntryQueryBuilder(user.ID)
builder.WithoutStatus(model.EntryStatusRemoved)
builder.WithTags([]string{tagName})
builder.WithSorting("status", "asc")
builder.WithSorting(user.EntryOrder, user.EntryDirection)
builder.WithOffset(offset)
builder.WithLimit(user.EntriesPerPage)
entries, err := builder.GetEntries()
if err != nil {
html.ServerError(w, r, err)
return
}
count, err := builder.CountEntries()
if err != nil {
html.ServerError(w, r, err)
return
}
sess := session.New(h.store, request.SessionID(r))
view := view.New(h.tpl, r, sess)
view.Set("tagName", tagName)
view.Set("total", count)
view.Set("entries", entries)
view.Set("pagination", getPagination(route.Path(h.router, "tagEntriesAll", "tagName", url.PathEscape(tagName)), count, offset, user.EntriesPerPage))
view.Set("user", user)
view.Set("countUnread", h.store.CountUnreadEntries(user.ID))
view.Set("countErrorFeeds", h.store.CountUserFeedsWithErrors(user.ID))
view.Set("hasSaveEntry", h.store.HasSaveEntry(user.ID))
view.Set("showOnlyUnreadEntries", false)
html.OK(w, r, view.Render("tag_entries"))
}

View File

@ -93,6 +93,10 @@ func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
uiRouter.HandleFunc("/category/{categoryID}/remove", handler.removeCategory).Name("removeCategory").Methods(http.MethodPost)
uiRouter.HandleFunc("/category/{categoryID}/mark-all-as-read", handler.markCategoryAsRead).Name("markCategoryAsRead").Methods(http.MethodPost)
// Tag pages.
uiRouter.HandleFunc("/tags/{tagName}/entries/all", handler.showTagEntriesAllPage).Name("tagEntriesAll").Methods(http.MethodGet)
uiRouter.HandleFunc("/tags/{tagName}/entry/{entryID}", handler.showTagEntryPage).Name("tagEntry").Methods(http.MethodGet)
// Entry pages.
uiRouter.HandleFunc("/entry/status", handler.updateEntriesStatus).Name("updateEntriesStatus").Methods(http.MethodPost)
uiRouter.HandleFunc("/entry/save/{entryID}", handler.saveEntry).Name("saveEntry").Methods(http.MethodPost)