From 863a5b36482b5f2749fcd8d7f6084af640f2090a Mon Sep 17 00:00:00 2001 From: jvoisin Date: Mon, 18 Mar 2024 17:06:30 +0100 Subject: [PATCH] Simplify removeDuplicates Use a sort+compact construct instead of doing it by hand with a hashmap. The time complexity is now O(nlogn+n) instead of O(n), and space complexity around O(logn) instead of O(n+uniq(n)), but it shouldn't matter anyway, since removeDuplicates is only called to deduplicate tags. --- internal/storage/entry.go | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/internal/storage/entry.go b/internal/storage/entry.go index b8468550..c9037094 100644 --- a/internal/storage/entry.go +++ b/internal/storage/entry.go @@ -8,6 +8,7 @@ import ( "errors" "fmt" "log/slog" + "slices" "time" "miniflux.app/v2/internal/crypto" @@ -615,15 +616,7 @@ func (s *Storage) UnshareEntry(userID int64, entryID int64) (err error) { return } -// removeDuplicate removes duplicate entries from a slice -func removeDuplicates[T string | int](sliceList []T) []T { - allKeys := make(map[T]bool) - list := []T{} - for _, item := range sliceList { - if _, value := allKeys[item]; !value { - allKeys[item] = true - list = append(list, item) - } - } - return list +func removeDuplicates(l []string) []string { + slices.Sort(l) + return slices.Compact(l) }