Compile block/keep regex only once per feed

No need to compile them once for matching on the url,
once per tag, once per title, once per author, … one time is enough.
It also simplify error handling, since while regexp compilation can fail,
matching can't.
This commit is contained in:
jvoisin 2024-03-17 14:38:13 +01:00 committed by Frédéric Guillot
parent 00dabc1d3c
commit 02a074ed26
1 changed files with 46 additions and 39 deletions

View File

@ -116,58 +116,65 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.Us
} }
func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool { func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
if feed.BlocklistRules != "" { if feed.BlocklistRules == "" {
containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool { return false
return matchField(feed.BlocklistRules, tag) }
})
if matchField(feed.BlocklistRules, entry.URL) || matchField(feed.BlocklistRules, entry.Title) || matchField(feed.BlocklistRules, entry.Author) || containsBlockedTag { compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
slog.Debug("Blocking entry based on rule", if err != nil {
slog.Int64("entry_id", entry.ID), slog.Debug("Failed on regexp compilation",
slog.String("entry_url", entry.URL), slog.String("pattern", feed.BlocklistRules),
slog.Int64("feed_id", feed.ID), slog.Any("error", err),
slog.String("feed_url", feed.FeedURL), )
slog.String("rule", feed.BlocklistRules), return false
) }
return true
} containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
return compiledBlocklist.MatchString(tag)
})
if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
slog.Debug("Blocking entry based on rule",
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("rule", feed.BlocklistRules),
)
return true
} }
return false return false
} }
func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool { func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
if feed.KeeplistRules != "" { if feed.KeeplistRules == "" {
containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool { return true
return matchField(feed.KeeplistRules, tag)
})
if matchField(feed.KeeplistRules, entry.URL) || matchField(feed.KeeplistRules, entry.Title) || matchField(feed.KeeplistRules, entry.Author) || containsAllowedTag {
slog.Debug("Allow entry based on rule",
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("rule", feed.KeeplistRules),
)
return true
}
return false
} }
return true
}
func matchField(pattern, value string) bool { compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
match, err := regexp.MatchString(pattern, value)
if err != nil { if err != nil {
slog.Debug("Failed on regexp match", slog.Debug("Failed on regexp compilation",
slog.String("pattern", pattern), slog.String("pattern", feed.KeeplistRules),
slog.String("value", value),
slog.Bool("match", match),
slog.Any("error", err), slog.Any("error", err),
) )
return false
} }
return match containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
return compiledKeeplist.MatchString(tag)
})
if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
slog.Debug("Allow entry based on rule",
slog.Int64("entry_id", entry.ID),
slog.String("entry_url", entry.URL),
slog.Int64("feed_id", feed.ID),
slog.String("feed_url", feed.FeedURL),
slog.String("rule", feed.KeeplistRules),
)
return true
}
return false
} }
// ProcessEntryWebPage downloads the entry web page and apply rewrite rules. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.