diff --git a/api/subscription.go b/api/subscription.go index 246dde7f..f391dc9d 100644 --- a/api/subscription.go +++ b/api/subscription.go @@ -29,6 +29,7 @@ func (h *handler) discoverSubscriptions(w http.ResponseWriter, r *http.Request) subscriptions, finderErr := subscription.FindSubscriptions( subscriptionDiscoveryRequest.URL, subscriptionDiscoveryRequest.UserAgent, + subscriptionDiscoveryRequest.Cookie, subscriptionDiscoveryRequest.Username, subscriptionDiscoveryRequest.Password, subscriptionDiscoveryRequest.FetchViaProxy, diff --git a/client/model.go b/client/model.go index 0b9cdeb9..535f5062 100644 --- a/client/model.go +++ b/client/model.go @@ -122,6 +122,7 @@ type Feed struct { KeeplistRules string `json:"keeplist_rules"` Crawler bool `json:"crawler"` UserAgent string `json:"user_agent"` + Cookie string `json:"cookie"` Username string `json:"username"` Password string `json:"password"` Category *Category `json:"category,omitempty"` @@ -132,6 +133,7 @@ type FeedCreationRequest struct { FeedURL string `json:"feed_url"` CategoryID int64 `json:"category_id"` UserAgent string `json:"user_agent"` + Cookie string `json:"cookie"` Username string `json:"username"` Password string `json:"password"` Crawler bool `json:"crawler"` @@ -156,6 +158,7 @@ type FeedModificationRequest struct { KeeplistRules *string `json:"keeplist_rules"` Crawler *bool `json:"crawler"` UserAgent *string `json:"user_agent"` + Cookie *string `json:"cookie"` Username *string `json:"username"` Password *string `json:"password"` CategoryID *int64 `json:"category_id"` diff --git a/database/migrations.go b/database/migrations.go index c7dfc8d9..4bad07a4 100644 --- a/database/migrations.go +++ b/database/migrations.go @@ -529,4 +529,9 @@ var migrations = []func(tx *sql.Tx) error{ _, err = tx.Exec(sql) return err }, + func(tx *sql.Tx) (err error) { + sql := `ALTER TABLE feeds ADD COLUMN cookie text default ''` + _, err = tx.Exec(sql) + return err + }, } diff --git a/http/client/client.go b/http/client/client.go index 8b3b169a..dc5cb76c 100644 --- a/http/client/client.go +++ b/http/client/client.go @@ -47,6 +47,7 @@ type Client struct { requestUsername string requestPassword string requestUserAgent string + requestCookie string useProxy bool doNotFollowRedirects bool @@ -142,6 +143,14 @@ func (c *Client) WithUserAgent(userAgent string) *Client { return c } +// WithCookie defines the Cookies to use for HTTP requests. +func (c *Client) WithCookie(cookie string) *Client { + if cookie != "" { + c.requestCookie = cookie + } + return c +} + // Get performs a GET HTTP request. func (c *Client) Get() (*Response, error) { request, err := c.buildRequest(http.MethodGet, nil) @@ -336,6 +345,10 @@ func (c *Client) buildHeaders() http.Header { headers.Add("Authorization", c.requestAuthorizationHeader) } + if c.requestCookie != "" { + headers.Add("Cookie", c.requestCookie) + } + headers.Add("Connection", "close") return headers } diff --git a/locale/translations/de_DE.json b/locale/translations/de_DE.json index c5eccd8c..15c1d73d 100644 --- a/locale/translations/de_DE.json +++ b/locale/translations/de_DE.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Benutzername des Abonnements", "form.feed.label.feed_password": "Passwort des Abonnements", "form.feed.label.user_agent": "Standardbenutzeragenten überschreiben", + "form.feed.label.cookie": "Cookies setzten", "form.feed.label.scraper_rules": "Extraktionsregeln", "form.feed.label.rewrite_rules": "Umschreiberegeln", "form.feed.label.blocklist_rules": "Blockierregeln", diff --git a/locale/translations/en_US.json b/locale/translations/en_US.json index 53dc9efb..f5fce3a6 100644 --- a/locale/translations/en_US.json +++ b/locale/translations/en_US.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Feed Username", "form.feed.label.feed_password": "Feed Password", "form.feed.label.user_agent": "Override Default User Agent", + "form.feed.label.cookie": "Set Cookies", "form.feed.label.scraper_rules": "Scraper Rules", "form.feed.label.rewrite_rules": "Rewrite Rules", "form.feed.label.blocklist_rules": "Block Rules", diff --git a/locale/translations/es_ES.json b/locale/translations/es_ES.json index 4716a1d6..058ba192 100644 --- a/locale/translations/es_ES.json +++ b/locale/translations/es_ES.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Nombre de usuario de fuente", "form.feed.label.feed_password": "Contraseña de fuente", "form.feed.label.user_agent": "Invalidar el agente de usuario predeterminado", + "form.feed.label.cookie": "Configurar las cookies", "form.feed.label.scraper_rules": "Reglas de raspador", "form.feed.label.rewrite_rules": "Reglas de reescribir", "form.feed.label.blocklist_rules": "Reglas de Filtrado(Bloquear)", diff --git a/locale/translations/fr_FR.json b/locale/translations/fr_FR.json index 4b990173..bc38a322 100644 --- a/locale/translations/fr_FR.json +++ b/locale/translations/fr_FR.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Nom d'utilisateur du flux", "form.feed.label.feed_password": "Mot de passe du flux", "form.feed.label.user_agent": "Remplacer l'agent utilisateur par défaut", + "form.feed.label.cookie": "Définir les cookies", "form.feed.label.scraper_rules": "Règles pour récupérer le contenu original", "form.feed.label.rewrite_rules": "Règles de réécriture", "form.feed.label.blocklist_rules": "Règles de blocage", diff --git a/locale/translations/it_IT.json b/locale/translations/it_IT.json index 3003c4d8..ce9a4cb8 100644 --- a/locale/translations/it_IT.json +++ b/locale/translations/it_IT.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Nome utente del feed", "form.feed.label.feed_password": "Password del feed", "form.feed.label.user_agent": "Usa user agent personalizzato", + "form.feed.label.cookie": "Installare i cookies", "form.feed.label.scraper_rules": "Regole di estrazione del contenuto", "form.feed.label.rewrite_rules": "Regole di impaginazione del contenuto", "form.feed.label.blocklist_rules": "Regole di blocco", diff --git a/locale/translations/ja_JP.json b/locale/translations/ja_JP.json index 235035b6..788716a4 100644 --- a/locale/translations/ja_JP.json +++ b/locale/translations/ja_JP.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "フィードのユーザー名", "form.feed.label.feed_password": "フィードのパスワード", "form.feed.label.user_agent": "ディフォルトの User Agent を上書きする", + "form.feed.label.cookie": "クッキーの設定", "form.feed.label.scraper_rules": "スクラップルール", "form.feed.label.rewrite_rules": "Rewrite ルール", "form.feed.label.blocklist_rules": "ブロックルール", diff --git a/locale/translations/nl_NL.json b/locale/translations/nl_NL.json index 71c547b3..1743a14d 100644 --- a/locale/translations/nl_NL.json +++ b/locale/translations/nl_NL.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Feed-gebruikersnaam", "form.feed.label.feed_password": "Feed wachtwoord", "form.feed.label.user_agent": "Standaard User Agent overschrijven", + "form.feed.label.cookie": "Cookies instellen", "form.feed.label.scraper_rules": "Scraper regels", "form.feed.label.rewrite_rules": "Rewrite regels", "form.feed.label.blocklist_rules": "Blokkeer regels", diff --git a/locale/translations/pl_PL.json b/locale/translations/pl_PL.json index ac8313c0..3b59e246 100644 --- a/locale/translations/pl_PL.json +++ b/locale/translations/pl_PL.json @@ -268,6 +268,7 @@ "form.feed.label.feed_username": "Subskrypcję nazwa użytkownika", "form.feed.label.feed_password": "Subskrypcję Hasło", "form.feed.label.user_agent": "Zastąp domyślny agent użytkownika", + "form.feed.label.cookie": "Ustawianie ciasteczek", "form.feed.label.scraper_rules": "Zasady ekstrakcji", "form.feed.label.rewrite_rules": "Reguły zapisu", "form.feed.label.blocklist_rules": "Zasady blokowania", diff --git a/locale/translations/pt_BR.json b/locale/translations/pt_BR.json index 3f0da761..8e6d361c 100644 --- a/locale/translations/pt_BR.json +++ b/locale/translations/pt_BR.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Nome de usuário da fonte", "form.feed.label.feed_password": "Senha da fonte", "form.feed.label.user_agent": "Sobrescrever o agente de usuário (user-agent) padrão", + "form.feed.label.cookie": "Definir Cookies", "form.feed.label.scraper_rules": "Regras do scraper", "form.feed.label.rewrite_rules": "Regras para o Rewrite", "form.feed.label.blocklist_rules": "Regras de bloqueio", diff --git a/locale/translations/ru_RU.json b/locale/translations/ru_RU.json index 03f96fc0..cad1d952 100644 --- a/locale/translations/ru_RU.json +++ b/locale/translations/ru_RU.json @@ -268,6 +268,7 @@ "form.feed.label.feed_username": "Имя пользователя подписки", "form.feed.label.feed_password": "Пароль подписки", "form.feed.label.user_agent": "Переопределить User Agent по умолчанию", + "form.feed.label.cookie": "Установить Ку́ки", "form.feed.label.scraper_rules": "Правила Scraper", "form.feed.label.rewrite_rules": "Правила Rewrite", "form.feed.label.blocklist_rules": "Правила блокировки", diff --git a/locale/translations/tr_TR.json b/locale/translations/tr_TR.json index 8276e597..1093a32b 100644 --- a/locale/translations/tr_TR.json +++ b/locale/translations/tr_TR.json @@ -266,6 +266,7 @@ "form.feed.label.feed_username": "Besleme Kullanıcı Adı", "form.feed.label.feed_password": "Besleme Parolası", "form.feed.label.user_agent": "Varsayılan User Agent'i Geçersiz Kıl", + "form.feed.label.cookie": "Çerezleri Ayarla", "form.feed.label.scraper_rules": "Scrapper Kuralları", "form.feed.label.rewrite_rules": "Yeniden Yazma Kuralları", "form.feed.label.blocklist_rules": "Engelleme Kuralları", diff --git a/locale/translations/zh_CN.json b/locale/translations/zh_CN.json index 0f4f8838..71e1a2df 100644 --- a/locale/translations/zh_CN.json +++ b/locale/translations/zh_CN.json @@ -264,6 +264,7 @@ "form.feed.label.feed_username": "源用户名", "form.feed.label.feed_password": "源密码", "form.feed.label.user_agent": "覆盖默认 User-Agent", + "form.feed.label.cookie": "设置 Cookies", "form.feed.label.scraper_rules": "Scraper 规则", "form.feed.label.rewrite_rules": "重写规则", "form.feed.label.blocklist_rules": "封锁规则", diff --git a/model/feed.go b/model/feed.go index 5f3b622c..9ed668a3 100644 --- a/model/feed.go +++ b/model/feed.go @@ -41,6 +41,7 @@ type Feed struct { BlocklistRules string `json:"blocklist_rules"` KeeplistRules string `json:"keeplist_rules"` UserAgent string `json:"user_agent"` + Cookie string `json:"cookie"` Username string `json:"username"` Password string `json:"password"` Disabled bool `json:"disabled"` @@ -121,6 +122,7 @@ type FeedCreationRequest struct { FeedURL string `json:"feed_url"` CategoryID int64 `json:"category_id"` UserAgent string `json:"user_agent"` + Cookie string `json:"cookie"` Username string `json:"username"` Password string `json:"password"` Crawler bool `json:"crawler"` @@ -145,6 +147,7 @@ type FeedModificationRequest struct { KeeplistRules *string `json:"keeplist_rules"` Crawler *bool `json:"crawler"` UserAgent *string `json:"user_agent"` + Cookie *string `json:"cookie"` Username *string `json:"username"` Password *string `json:"password"` CategoryID *int64 `json:"category_id"` @@ -192,6 +195,10 @@ func (f *FeedModificationRequest) Patch(feed *Feed) { feed.UserAgent = *f.UserAgent } + if f.Cookie != nil { + feed.Cookie = *f.Cookie + } + if f.Username != nil { feed.Username = *f.Username } diff --git a/model/subscription.go b/model/subscription.go index 8fddd9ed..e1157cb5 100644 --- a/model/subscription.go +++ b/model/subscription.go @@ -8,6 +8,7 @@ package model // import "miniflux.app/model" type SubscriptionDiscoveryRequest struct { URL string `json:"url"` UserAgent string `json:"user_agent"` + Cookie string `json:"cookie"` Username string `json:"username"` Password string `json:"password"` FetchViaProxy bool `json:"fetch_via_proxy"` diff --git a/reader/handler/handler.go b/reader/handler/handler.go index c003b970..49f0a5b7 100644 --- a/reader/handler/handler.go +++ b/reader/handler/handler.go @@ -39,6 +39,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts) request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password) request.WithUserAgent(feedCreationRequest.UserAgent) + request.WithCookie(feedCreationRequest.Cookie) request.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates if feedCreationRequest.FetchViaProxy { @@ -61,6 +62,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model subscription.UserID = userID subscription.UserAgent = feedCreationRequest.UserAgent + subscription.Cookie = feedCreationRequest.Cookie subscription.Username = feedCreationRequest.Username subscription.Password = feedCreationRequest.Password subscription.Crawler = feedCreationRequest.Crawler @@ -124,6 +126,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error { request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts) request.WithCredentials(originalFeed.Username, originalFeed.Password) request.WithUserAgent(originalFeed.UserAgent) + request.WithCookie(originalFeed.Cookie) request.AllowSelfSignedCertificates = originalFeed.AllowSelfSignedCertificates if !originalFeed.IgnoreHTTPCache { diff --git a/reader/processor/processor.go b/reader/processor/processor.go index 696fbec6..c9999978 100644 --- a/reader/processor/processor.go +++ b/reader/processor/processor.go @@ -54,6 +54,7 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) { entry.URL, feed.ScraperRules, feed.UserAgent, + feed.Cookie, feed.AllowSelfSignedCertificates, ) @@ -115,6 +116,7 @@ func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error { entry.URL, entry.Feed.ScraperRules, entry.Feed.UserAgent, + entry.Feed.Cookie, feed.AllowSelfSignedCertificates, ) diff --git a/reader/scraper/scraper.go b/reader/scraper/scraper.go index 37cf29ec..7b482b10 100644 --- a/reader/scraper/scraper.go +++ b/reader/scraper/scraper.go @@ -20,9 +20,10 @@ import ( ) // Fetch downloads a web page and returns relevant contents. -func Fetch(websiteURL, rules, userAgent string, allowSelfSignedCertificates bool) (string, error) { +func Fetch(websiteURL, rules, userAgent string, cookie string, allowSelfSignedCertificates bool) (string, error) { clt := client.NewClientWithConfig(websiteURL, config.Opts) clt.WithUserAgent(userAgent) + clt.WithCookie(cookie) clt.AllowSelfSignedCertificates = allowSelfSignedCertificates response, err := clt.Get() diff --git a/reader/subscription/finder.go b/reader/subscription/finder.go index ee755a9e..f70ffa03 100644 --- a/reader/subscription/finder.go +++ b/reader/subscription/finder.go @@ -27,13 +27,14 @@ var ( ) // FindSubscriptions downloads and try to find one or more subscriptions from an URL. -func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) { +func FindSubscriptions(websiteURL, userAgent, cookie, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) { websiteURL = findYoutubeChannelFeed(websiteURL) websiteURL = parseYoutubeVideoPage(websiteURL) clt := client.NewClientWithConfig(websiteURL, config.Opts) clt.WithCredentials(username, password) clt.WithUserAgent(userAgent) + clt.WithCookie(cookie) clt.AllowSelfSignedCertificates = allowSelfSignedCertificates if fetchViaProxy { @@ -62,7 +63,7 @@ func FindSubscriptions(websiteURL, userAgent, username, password string, fetchVi return subscriptions, err } - return tryWellKnownUrls(websiteURL, userAgent, username, password) + return tryWellKnownUrls(websiteURL, userAgent, cookie, username, password) } func parseWebPage(websiteURL string, data io.Reader) (Subscriptions, *errors.LocalizedError) { @@ -138,7 +139,7 @@ func parseYoutubeVideoPage(websiteURL string) string { return websiteURL } -func tryWellKnownUrls(websiteURL, userAgent, username, password string) (Subscriptions, *errors.LocalizedError) { +func tryWellKnownUrls(websiteURL, userAgent, cookie, username, password string) (Subscriptions, *errors.LocalizedError) { var subscriptions Subscriptions knownURLs := map[string]string{ "/atom.xml": "atom", @@ -161,6 +162,7 @@ func tryWellKnownUrls(websiteURL, userAgent, username, password string) (Subscri clt := client.NewClientWithConfig(fullURL, config.Opts) clt.WithCredentials(username, password) clt.WithUserAgent(userAgent) + clt.WithCookie(cookie) // Some websites redirects unknown URLs to the home page. // As result, the list of known URLs is returned to the subscription list. diff --git a/storage/entry_query_builder.go b/storage/entry_query_builder.go index cbe1dad2..b87b2e7f 100644 --- a/storage/entry_query_builder.go +++ b/storage/entry_query_builder.go @@ -242,6 +242,7 @@ func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) { f.rewrite_rules, f.crawler, f.user_agent, + f.cookie, fi.icon_id, u.timezone FROM @@ -303,6 +304,7 @@ func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) { &entry.Feed.RewriteRules, &entry.Feed.Crawler, &entry.Feed.UserAgent, + &entry.Feed.Cookie, &iconID, &tz, ) diff --git a/storage/feed.go b/storage/feed.go index bd366b1d..b3126ab8 100644 --- a/storage/feed.go +++ b/storage/feed.go @@ -195,6 +195,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error { last_modified_header, crawler, user_agent, + cookie, username, password, disabled, @@ -207,7 +208,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error { fetch_via_proxy ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20) RETURNING id ` @@ -222,6 +223,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error { feed.LastModifiedHeader, feed.Crawler, feed.UserAgent, + feed.Cookie, feed.Username, feed.Password, feed.Disabled, @@ -282,15 +284,16 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) { keeplist_rules=$13, crawler=$14, user_agent=$15, - username=$16, - password=$17, - disabled=$18, - next_check_at=$19, - ignore_http_cache=$20, - allow_self_signed_certificates=$21, - fetch_via_proxy=$22 + cookie=$16, + username=$17, + password=$18, + disabled=$19, + next_check_at=$20, + ignore_http_cache=$21, + allow_self_signed_certificates=$22, + fetch_via_proxy=$23 WHERE - id=$23 AND user_id=$24 + id=$24 AND user_id=$25 ` _, err = s.db.Exec(query, feed.FeedURL, @@ -308,6 +311,7 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) { feed.KeeplistRules, feed.Crawler, feed.UserAgent, + feed.Cookie, feed.Username, feed.Password, feed.Disabled, diff --git a/storage/feed_query_builder.go b/storage/feed_query_builder.go index 0400a701..66547c87 100644 --- a/storage/feed_query_builder.go +++ b/storage/feed_query_builder.go @@ -159,6 +159,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) { f.keeplist_rules, f.crawler, f.user_agent, + f.cookie, f.username, f.password, f.ignore_http_cache, @@ -218,6 +219,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) { &feed.KeeplistRules, &feed.Crawler, &feed.UserAgent, + &feed.Cookie, &feed.Username, &feed.Password, &feed.IgnoreHTTPCache, diff --git a/template/templates/views/add_subscription.html b/template/templates/views/add_subscription.html index de9f45b2..e4392229 100644 --- a/template/templates/views/add_subscription.html +++ b/template/templates/views/add_subscription.html @@ -31,7 +31,7 @@
- + {{ if .hasProxyConfigured }} {{ end }} @@ -39,6 +39,9 @@ + + + diff --git a/template/templates/views/choose_subscription.html b/template/templates/views/choose_subscription.html index 957aac3d..bbe2d9e4 100644 --- a/template/templates/views/choose_subscription.html +++ b/template/templates/views/choose_subscription.html @@ -10,6 +10,7 @@ + diff --git a/template/templates/views/edit_feed.html b/template/templates/views/edit_feed.html index 62e339db..017759c0 100644 --- a/template/templates/views/edit_feed.html +++ b/template/templates/views/edit_feed.html @@ -58,6 +58,9 @@ + + + @@ -66,7 +69,7 @@ - + diff --git a/tests/feed_test.go b/tests/feed_test.go index aa9be393..14b4be11 100644 --- a/tests/feed_test.go +++ b/tests/feed_test.go @@ -531,6 +531,31 @@ func TestUpdateFeedUserAgent(t *testing.T) { } } +func TestUpdateFeedCookie(t *testing.T) { + client := createClient(t) + feed, _ := createFeed(t, client) + + cookie := "test" + updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Cookie: &cookie}) + if err != nil { + t.Fatal(err) + } + + if updatedFeed.Cookie != cookie { + t.Fatalf(`Wrong Cookie value, got "%v" instead of "%v"`, updatedFeed.Cookie, cookie) + } + + cookie = "" + updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Cookie: &cookie}) + if err != nil { + t.Fatal(err) + } + + if updatedFeed.Cookie != cookie { + t.Fatalf(`Wrong Cookie value, got "%v" instead of "%v"`, updatedFeed.Cookie, cookie) + } +} + func TestUpdateFeedUsername(t *testing.T) { client := createClient(t) feed, _ := createFeed(t, client) diff --git a/ui/feed_edit.go b/ui/feed_edit.go index 0e5cbd78..46421864 100644 --- a/ui/feed_edit.go +++ b/ui/feed_edit.go @@ -50,6 +50,7 @@ func (h *handler) showEditFeedPage(w http.ResponseWriter, r *http.Request) { KeeplistRules: feed.KeeplistRules, Crawler: feed.Crawler, UserAgent: feed.UserAgent, + Cookie: feed.Cookie, CategoryID: feed.Category.ID, Username: feed.Username, Password: feed.Password, diff --git a/ui/form/feed.go b/ui/form/feed.go index 0f3ded75..0a937420 100644 --- a/ui/form/feed.go +++ b/ui/form/feed.go @@ -22,6 +22,7 @@ type FeedForm struct { KeeplistRules string Crawler bool UserAgent string + Cookie string CategoryID int64 Username string Password string @@ -43,6 +44,7 @@ func (f FeedForm) Merge(feed *model.Feed) *model.Feed { feed.KeeplistRules = f.KeeplistRules feed.Crawler = f.Crawler feed.UserAgent = f.UserAgent + feed.Cookie = f.Cookie feed.ParsingErrorCount = 0 feed.ParsingErrorMsg = "" feed.Username = f.Username @@ -66,6 +68,7 @@ func NewFeedForm(r *http.Request) *FeedForm { Title: r.FormValue("title"), ScraperRules: r.FormValue("scraper_rules"), UserAgent: r.FormValue("user_agent"), + Cookie: r.FormValue("cookie"), RewriteRules: r.FormValue("rewrite_rules"), BlocklistRules: r.FormValue("blocklist_rules"), KeeplistRules: r.FormValue("keeplist_rules"), diff --git a/ui/form/subscription.go b/ui/form/subscription.go index 8b425b3c..769dfb2e 100644 --- a/ui/form/subscription.go +++ b/ui/form/subscription.go @@ -20,6 +20,7 @@ type SubscriptionForm struct { FetchViaProxy bool AllowSelfSignedCertificates bool UserAgent string + Cookie string Username string Password string ScraperRules string @@ -63,6 +64,7 @@ func NewSubscriptionForm(r *http.Request) *SubscriptionForm { AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1", FetchViaProxy: r.FormValue("fetch_via_proxy") == "1", UserAgent: r.FormValue("user_agent"), + Cookie: r.FormValue("cookie"), Username: r.FormValue("feed_username"), Password: r.FormValue("feed_password"), ScraperRules: r.FormValue("scraper_rules"), diff --git a/ui/subscription_choose.go b/ui/subscription_choose.go index b8bded99..bc752efb 100644 --- a/ui/subscription_choose.go +++ b/ui/subscription_choose.go @@ -55,6 +55,7 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ Crawler: subscriptionForm.Crawler, AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates, UserAgent: subscriptionForm.UserAgent, + Cookie: subscriptionForm.Cookie, Username: subscriptionForm.Username, Password: subscriptionForm.Password, ScraperRules: subscriptionForm.ScraperRules, diff --git a/ui/subscription_submit.go b/ui/subscription_submit.go index 91f3d365..e8ad2939 100644 --- a/ui/subscription_submit.go +++ b/ui/subscription_submit.go @@ -55,6 +55,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) { subscriptions, findErr := subscription.FindSubscriptions( subscriptionForm.URL, subscriptionForm.UserAgent, + subscriptionForm.Cookie, subscriptionForm.Username, subscriptionForm.Password, subscriptionForm.FetchViaProxy, @@ -83,6 +84,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) { Crawler: subscriptionForm.Crawler, AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates, UserAgent: subscriptionForm.UserAgent, + Cookie: subscriptionForm.Cookie, Username: subscriptionForm.Username, Password: subscriptionForm.Password, ScraperRules: subscriptionForm.ScraperRules,