remove profiling
This commit is contained in:
parent
82636792ea
commit
17c96e45c3
|
@ -40,6 +40,7 @@ func (dc *DirectoryCrawler) processPath(fullPath string, info os.FileInfo) error
|
||||||
if found {
|
if found {
|
||||||
// Remove the old version of the directory from the parent's Children field
|
// Remove the old version of the directory from the parent's Children field
|
||||||
newChildren, foundOldDir := removeOldDir(parentItem.Children, relPath)
|
newChildren, foundOldDir := removeOldDir(parentItem.Children, relPath)
|
||||||
|
|
||||||
// Add the new version of the directory to the parent's Children field only if it wasn't found
|
// Add the new version of the directory to the parent's Children field only if it wasn't found
|
||||||
if !foundOldDir {
|
if !foundOldDir {
|
||||||
parentItem.Children = append(newChildren, relPath)
|
parentItem.Children = append(newChildren, relPath)
|
||||||
|
|
|
@ -15,7 +15,7 @@ import (
|
||||||
lru "github.com/hashicorp/golang-lru/v2"
|
lru "github.com/hashicorp/golang-lru/v2"
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
"net/http"
|
"net/http"
|
||||||
_ "net/http/pprof"
|
//_ "net/http/pprof" // for profiling
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
|
@ -153,9 +153,10 @@ func main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
go func() {
|
// For profiling
|
||||||
log.Println(http.ListenAndServe("localhost:6060", nil))
|
//go func() {
|
||||||
}()
|
// log.Println(http.ListenAndServe("0.0.0.0:6060", nil))
|
||||||
|
//}()
|
||||||
|
|
||||||
select {}
|
select {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,12 +13,11 @@ func ElasticsearchThread(sharedCache *lru.Cache[string, *CacheItem.Item]) {
|
||||||
createCrazyfsIndex()
|
createCrazyfsIndex()
|
||||||
|
|
||||||
// Test connection to Elastic.
|
// Test connection to Elastic.
|
||||||
esContents, err := getPathsFromIndex()
|
esSize, err := getElasticSize()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logElasticConnError(err)
|
logElasticConnError(err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
esSize := len(esContents)
|
|
||||||
log.Infof(`ELASTIC - index "%s" contains %d items.`, config.GetConfig().ElasticsearchIndex, esSize)
|
log.Infof(`ELASTIC - index "%s" contains %d items.`, config.GetConfig().ElasticsearchIndex, esSize)
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
|
@ -53,6 +52,8 @@ func ElasticsearchThread(sharedCache *lru.Cache[string, *CacheItem.Item]) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: make this use workers instead of starting a million threads
|
||||||
|
// TODO: have the workers exit when the sync job is finished
|
||||||
func syncElasticsearch(sharedCache *lru.Cache[string, *CacheItem.Item], wg *sync.WaitGroup, sem chan bool, fullSync bool) {
|
func syncElasticsearch(sharedCache *lru.Cache[string, *CacheItem.Item], wg *sync.WaitGroup, sem chan bool, fullSync bool) {
|
||||||
var syncType string
|
var syncType string
|
||||||
var esContents []string
|
var esContents []string
|
||||||
|
@ -90,7 +91,6 @@ func syncElasticsearch(sharedCache *lru.Cache[string, *CacheItem.Item], wg *sync
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
deleteFromElasticsearch(key) // clean up
|
deleteFromElasticsearch(key) // clean up
|
||||||
//log.Debugf(`ELASTIC - skipping adding "%s"`, key)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
<-sem
|
<-sem
|
||||||
|
|
|
@ -10,6 +10,14 @@ import (
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func getElasticSize() (int, error) {
|
||||||
|
esContents, err := getPathsFromIndex()
|
||||||
|
if err != nil {
|
||||||
|
return -1, nil
|
||||||
|
}
|
||||||
|
return len(esContents), nil
|
||||||
|
}
|
||||||
|
|
||||||
func getPathsFromIndex() ([]string, error) {
|
func getPathsFromIndex() ([]string, error) {
|
||||||
// This may take a bit if the index is very large, so avoid calling this.
|
// This may take a bit if the index is very large, so avoid calling this.
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue