added proxy support for binary usage
This commit is contained in:
parent
dca81beea2
commit
a4cebb28b0
2 changed files with 41 additions and 13 deletions
|
@ -37,4 +37,6 @@ notifiers:
|
||||||
path: /config/notifications.log
|
path: /config/notifications.log
|
||||||
database:
|
database:
|
||||||
dsn: "/config/watch.db" # for docker usage
|
dsn: "/config/watch.db" # for docker usage
|
||||||
prune: "@every 1h"
|
prune: "@every 1h"
|
||||||
|
proxy:
|
||||||
|
proxy_url: http://proxy.com:1234
|
50
scraping.go
50
scraping.go
|
@ -8,6 +8,7 @@ import (
|
||||||
"log"
|
"log"
|
||||||
"math"
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -16,6 +17,7 @@ import (
|
||||||
"github.com/andybalholm/cascadia"
|
"github.com/andybalholm/cascadia"
|
||||||
"github.com/antchfx/htmlquery"
|
"github.com/antchfx/htmlquery"
|
||||||
"github.com/robfig/cron/v3"
|
"github.com/robfig/cron/v3"
|
||||||
|
"github.com/spf13/viper"
|
||||||
"github.com/tidwall/gjson"
|
"github.com/tidwall/gjson"
|
||||||
lualibs "github.com/vadv/gopher-lua-libs"
|
lualibs "github.com/vadv/gopher-lua-libs"
|
||||||
lua "github.com/yuin/gopher-lua"
|
lua "github.com/yuin/gopher-lua"
|
||||||
|
@ -239,54 +241,78 @@ func getFilterResult(filters []Filter, filter *Filter, watch *Watch, web *Web, d
|
||||||
}
|
}
|
||||||
|
|
||||||
func getFilterResultURL(filter *Filter, urlCache map[string]string, debug bool) {
|
func getFilterResultURL(filter *Filter, urlCache map[string]string, debug bool) {
|
||||||
url := filter.Var1
|
fetchURL := filter.Var1
|
||||||
val, exists := urlCache[url]
|
val, exists := urlCache[fetchURL]
|
||||||
if debug && exists {
|
if debug && exists {
|
||||||
filter.Results = append(filter.Results, val)
|
filter.Results = append(filter.Results, val)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := http.Get(url)
|
var httpClient *http.Client
|
||||||
|
if viper.IsSet("proxy.proxy_url") {
|
||||||
|
proxyUrl, err := url.Parse(viper.GetString("proxy.proxy_url"))
|
||||||
|
if err != nil {
|
||||||
|
log.Println("Could not parse proxy url, check config")
|
||||||
|
filter.log("Could not parse proxy url, check config")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
httpClient = &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(proxyUrl)}}
|
||||||
|
} else {
|
||||||
|
httpClient = &http.Client{}
|
||||||
|
}
|
||||||
|
resp, err := httpClient.Get(fetchURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
filter.log("Could not fetch url: ", url, " - ", err)
|
filter.log("Could not fetch url: ", fetchURL, " - ", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
body, err := ioutil.ReadAll(resp.Body)
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
filter.log("Could not fetch url: ", url, " - ", err)
|
filter.log("Could not fetch url: ", fetchURL, " - ", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
str := string(body)
|
str := string(body)
|
||||||
filter.Results = append(filter.Results, str)
|
filter.Results = append(filter.Results, str)
|
||||||
if debug {
|
if debug {
|
||||||
urlCache[url] = str
|
urlCache[fetchURL] = str
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getFilterResultURLs(filter *Filter, urlCache map[string]string, debug bool) {
|
func getFilterResultURLs(filter *Filter, urlCache map[string]string, debug bool) {
|
||||||
for _, parent := range filter.Parents {
|
for _, parent := range filter.Parents {
|
||||||
for _, result := range parent.Results {
|
for _, result := range parent.Results {
|
||||||
url := result
|
fetchURL := result
|
||||||
val, exists := urlCache[url]
|
val, exists := urlCache[fetchURL]
|
||||||
if debug && exists {
|
if debug && exists {
|
||||||
filter.Results = append(filter.Results, val)
|
filter.Results = append(filter.Results, val)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := http.Get(url)
|
var httpClient *http.Client
|
||||||
|
if viper.IsSet("proxy.proxy_url") {
|
||||||
|
proxyUrl, err := url.Parse(viper.GetString("proxy.proxy_url"))
|
||||||
|
if err != nil {
|
||||||
|
log.Println("Could not parse proxy url, check config")
|
||||||
|
filter.log("Could not parse proxy url, check config")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
httpClient = &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(proxyUrl)}}
|
||||||
|
} else {
|
||||||
|
httpClient = &http.Client{}
|
||||||
|
}
|
||||||
|
resp, err := httpClient.Get(fetchURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
filter.log("Could not fetch url: ", url, " - ", err)
|
filter.log("Could not fetch url: ", fetchURL, " - ", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
body, err := ioutil.ReadAll(resp.Body)
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
filter.log("Could not fetch url: ", url, " - ", err)
|
filter.log("Could not fetch url: ", fetchURL, " - ", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
str := string(body)
|
str := string(body)
|
||||||
filter.Results = append(filter.Results, str)
|
filter.Results = append(filter.Results, str)
|
||||||
if debug {
|
if debug {
|
||||||
urlCache[url] = str
|
urlCache[fetchURL] = str
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue