refactor to seperate url fetching into function

This commit is contained in:
BroodjeAap 2022-12-31 11:01:10 +00:00
parent 0c4431a73e
commit de7bb7c57a

View file

@ -247,30 +247,10 @@ func getFilterResultURL(filter *Filter, urlCache map[string]string, debug bool)
filter.Results = append(filter.Results, val) filter.Results = append(filter.Results, val)
return return
} }
str, err := getURLContent(filter, fetchURL)
var httpClient *http.Client
if viper.IsSet("proxy.proxy_url") {
proxyUrl, err := url.Parse(viper.GetString("proxy.proxy_url"))
if err != nil { if err != nil {
log.Println("Could not parse proxy url, check config")
filter.log("Could not parse proxy url, check config")
return return
} }
httpClient = &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(proxyUrl)}}
} else {
httpClient = &http.Client{}
}
resp, err := httpClient.Get(fetchURL)
if err != nil {
filter.log("Could not fetch url: ", fetchURL, " - ", err)
return
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
filter.log("Could not fetch url: ", fetchURL, " - ", err)
return
}
str := string(body)
filter.Results = append(filter.Results, str) filter.Results = append(filter.Results, str)
if debug { if debug {
urlCache[fetchURL] = str urlCache[fetchURL] = str
@ -287,13 +267,26 @@ func getFilterResultURLs(filter *Filter, urlCache map[string]string, debug bool)
continue continue
} }
str, err := getURLContent(filter, fetchURL)
if err != nil {
continue
}
filter.Results = append(filter.Results, str)
if debug {
urlCache[fetchURL] = str
}
}
}
}
func getURLContent(filter *Filter, fetchURL string) (string, error) {
var httpClient *http.Client var httpClient *http.Client
if viper.IsSet("proxy.proxy_url") { if viper.IsSet("proxy.proxy_url") {
proxyUrl, err := url.Parse(viper.GetString("proxy.proxy_url")) proxyUrl, err := url.Parse(viper.GetString("proxy.proxy_url"))
if err != nil { if err != nil {
log.Println("Could not parse proxy url, check config") log.Println("Could not parse proxy url, check config")
filter.log("Could not parse proxy url, check config") filter.log("Could not parse proxy url, check config")
return return "", err
} }
httpClient = &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(proxyUrl)}} httpClient = &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(proxyUrl)}}
} else { } else {
@ -302,20 +295,14 @@ func getFilterResultURLs(filter *Filter, urlCache map[string]string, debug bool)
resp, err := httpClient.Get(fetchURL) resp, err := httpClient.Get(fetchURL)
if err != nil { if err != nil {
filter.log("Could not fetch url: ", fetchURL, " - ", err) filter.log("Could not fetch url: ", fetchURL, " - ", err)
continue return "", err
} }
body, err := ioutil.ReadAll(resp.Body) body, err := ioutil.ReadAll(resp.Body)
if err != nil { if err != nil {
filter.log("Could not fetch url: ", fetchURL, " - ", err) filter.log("Could not fetch url: ", fetchURL, " - ", err)
continue return "", err
}
str := string(body)
filter.Results = append(filter.Results, str)
if debug {
urlCache[fetchURL] = str
}
}
} }
return string(body), nil
} }
func getFilterResultXPath(filter *Filter) { func getFilterResultXPath(filter *Filter) {