added url caching for viewing/editing watches

This commit is contained in:
BroodjeAap 2022-10-01 11:24:30 +00:00
parent 5b31802ceb
commit 94856b6f00
2 changed files with 22 additions and 10 deletions

View file

@ -22,7 +22,8 @@ var newWatchHTML = filepath.Join("templates", "newWatch.html")
type Web struct {
//Bot *tgbotapi.BotAPI
db *gorm.DB
urlCache map[string]string
db *gorm.DB
}
func (web Web) index(c *gin.Context) {
@ -76,7 +77,7 @@ func (web Web) watchView(c *gin.Context) {
web.db.Model(&FilterOutput{}).Where("watch_id = ?", watch.ID).Find(&values)
buildFilterTree(filters, connections)
processFilters(filters, web.db)
processFilters(filters, web.db, web.urlCache, true, true)
c.HTML(http.StatusOK, "watchView", gin.H{
"Watch": watch,
@ -174,7 +175,8 @@ func main() {
web := Web{
//bot,
db,
db: db,
urlCache: make(map[string]string, 5),
}
router := gin.Default()

View file

@ -19,7 +19,7 @@ import (
"gorm.io/gorm"
)
func processFilters(filters []Filter, db *gorm.DB) {
func processFilters(filters []Filter, db *gorm.DB, urlCache map[string]string, useCache bool, setCache bool) {
processedMap := make(map[uint]bool, len(filters))
for len(filters) > 0 {
filter := &filters[0]
@ -35,20 +35,20 @@ func processFilters(filters []Filter, db *gorm.DB) {
filters = append(filters, *filter)
continue
}
getFilterResult(filter, db)
getFilterResult(filter, db, urlCache, useCache, setCache)
processedMap[filter.ID] = true
}
}
func getFilterResult(filter *Filter, db *gorm.DB) {
func getFilterResult(filter *Filter, db *gorm.DB, urlCache map[string]string, useCache bool, setCache bool) {
switch {
case filter.Type == "gurl":
{
getFilterResultURL(filter)
getFilterResultURL(filter, urlCache, useCache, setCache)
}
case filter.Type == "gurls":
{
getFilterResultURL(filter)
getFilterResultURL(filter, urlCache, useCache, setCache)
}
case filter.Type == "xpath":
{
@ -145,8 +145,14 @@ func getFilterResult(filter *Filter, db *gorm.DB) {
}
}
func getFilterResultURL(filter *Filter) {
func getFilterResultURL(filter *Filter, urlCache map[string]string, useCache bool, setCache bool) {
url := filter.Var1
val, exists := urlCache[url]
if useCache && exists {
filter.Results = append(filter.Results, val)
return
}
resp, err := http.Get(url)
if err != nil {
log.Println("Could not fetch url", url)
@ -157,7 +163,11 @@ func getFilterResultURL(filter *Filter) {
log.Println("Could not fetch url", url)
log.Println("Reason:", err)
}
filter.Results = append(filter.Results, string(body))
str := string(body)
filter.Results = append(filter.Results, str)
if setCache {
urlCache[url] = str
}
}
func getFilterResultXPath(filter *Filter) {