From 01f678c3b1f4f8d1214f3373fd4402772ffe50c0 Mon Sep 17 00:00:00 2001 From: hulb Date: Sat, 28 Aug 2021 17:30:04 +0800 Subject: [PATCH] add proxy arg in scraper.Fetch --- reader/processor/processor.go | 2 ++ reader/scraper/scraper.go | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/reader/processor/processor.go b/reader/processor/processor.go index c9999978..c36a2046 100644 --- a/reader/processor/processor.go +++ b/reader/processor/processor.go @@ -56,6 +56,7 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) { feed.UserAgent, feed.Cookie, feed.AllowSelfSignedCertificates, + feed.FetchViaProxy, ) if config.Opts.HasMetricsCollector() { @@ -118,6 +119,7 @@ func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error { entry.Feed.UserAgent, entry.Feed.Cookie, feed.AllowSelfSignedCertificates, + feed.FetchViaProxy, ) if config.Opts.HasMetricsCollector() { diff --git a/reader/scraper/scraper.go b/reader/scraper/scraper.go index 7b482b10..dba31223 100644 --- a/reader/scraper/scraper.go +++ b/reader/scraper/scraper.go @@ -20,10 +20,13 @@ import ( ) // Fetch downloads a web page and returns relevant contents. -func Fetch(websiteURL, rules, userAgent string, cookie string, allowSelfSignedCertificates bool) (string, error) { +func Fetch(websiteURL, rules, userAgent string, cookie string, allowSelfSignedCertificates, useProxy bool) (string, error) { clt := client.NewClientWithConfig(websiteURL, config.Opts) clt.WithUserAgent(userAgent) clt.WithCookie(cookie) + if useProxy { + clt.WithProxy() + } clt.AllowSelfSignedCertificates = allowSelfSignedCertificates response, err := clt.Get()