fix mior tihgs

This commit is contained in:
Cyberes 2023-12-12 18:00:43 -07:00
parent 8d08f04a4f
commit d16eaf614e
5 changed files with 6 additions and 15 deletions

View File

@ -6,7 +6,7 @@ import (
"net/http"
)
// WriteJsonResponse formats/prettifies the JSON response and handles any errors during transmission.
// WriteJsonResponse formats/prettifies or minfies the JSON response and handles any errors during transmission.
func WriteJsonResponse(response any, minified bool, w http.ResponseWriter, r *http.Request) {
var jsonResponse []byte
var err error

View File

@ -7,7 +7,6 @@ import (
"crazyfs/elastic"
"crypto/sha256"
"crypto/subtle"
"encoding/json"
"net/http"
)
@ -36,11 +35,7 @@ func AdminCacheInfo(w http.ResponseWriter, r *http.Request) {
}
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(response)
if err != nil {
log.Errorf("AdminCacheInfo - Failed to serialize JSON: %s", err)
return
}
helpers.WriteJsonResponse(response, false, w, r)
return
}
}

View File

@ -7,7 +7,6 @@ import (
"crazyfs/config"
"crypto/sha256"
"crypto/subtle"
"encoding/json"
"net/http"
)
@ -41,12 +40,7 @@ func AdminCrawlsInfo(w http.ResponseWriter, r *http.Request) {
}
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("Content-Type", "application/json")
err := json.NewEncoder(w).Encode(response)
if err != nil {
log.Errorf("AdminCrawlsInfo - Failed to serialize JSON: %s", err)
helpers.Return500Msg(w)
return
}
helpers.WriteJsonResponse(response, false, w, r)
return
}
}

View File

@ -114,7 +114,7 @@ func main() {
cache.InitialCrawl()
duration := time.Since(start).Round(time.Second)
keys := SharedCache.Cache.Keys()
config.InitialCrawlElapsed = int(duration)
config.InitialCrawlElapsed = int(duration.Seconds())
log.Infof("Initial crawl completed in %s. %d items added to the cache.", duration, len(keys))
}

View File

@ -1,3 +1,5 @@
- Some way for the add to elastic callback to skip existing keys if not doing a full search
Later:
- Add a wildcard option to restricted_download_paths to block all sub-directories
- Add a dict to each restricted_download_paths item to specify how many levels recursive the block should be applied