use ResponseItem instead of CacheItem.item when doing http stuff

This commit is contained in:
Cyberes 2024-03-15 19:29:00 -06:00
parent 115bd60ec5
commit 7fa7c362d5
4 changed files with 47 additions and 58 deletions

View File

@ -13,6 +13,7 @@ mkdir -p "$SCRIPT_DIR/dist"
cd "$SCRIPT_DIR/src" || exit 1
go build -v -trimpath -ldflags "-s -w -X main.VersionDate=$(date -u --iso-8601=minutes) -X main.Version=v$VERSION" -o "$SCRIPT_DIR/dist/crazyfs"
if [ $? -eq 0 ]; then
chmod +x "$SCRIPT_DIR/dist/crazyfs"
echo "Finished building -> $SCRIPT_DIR/dist/crazyfs"
fi

View File

@ -7,7 +7,7 @@ crawl_mode_crawl_interval: 3600 # seconds
watch_interval: 2 # seconds
directory_crawlers: 10
crawl_workers: 200
crawl_workers: 1000
cache_size: 100000000
cache_time: 30 # minutes

View File

@ -16,17 +16,20 @@ func init() {
log = logging.GetLogger()
}
// ResponseItem is what is returned by the HTTP API as a JSON object.
// We don't return a `CacheItem.Item` because having a separate `ResponseItem`
// object allows us to customize the structure without messing with the original item.
type ResponseItem struct {
Path string `json:"path"`
Name string `json:"name"`
Size int64 `json:"size"`
Extension *string `json:"extension"`
Modified string `json:"modified"`
Mode uint32 `json:"mode"`
IsDir bool `json:"isDir"`
IsSymlink bool `json:"isSymlink"`
Type *string `json:"type"`
Children []*CacheItem.Item `json:"children"`
MimeType *string `json:"type"`
Encoding *string `json:"encoding"`
Children []*ResponseItem `json:"children"`
Content string `json:"content,omitempty"`
Cached int64 `json:"cached"`
}
@ -60,26 +63,25 @@ func NewResponseItem(cacheItem *CacheItem.Item) *ResponseItem {
Size: cacheItem.Size,
Extension: cacheItem.Extension,
Modified: cacheItem.Modified,
Mode: cacheItem.Mode,
IsDir: cacheItem.IsDir,
IsSymlink: cacheItem.IsSymlink,
Cached: cacheItem.Cached,
Children: make([]*CacheItem.Item, len(cacheItem.Children)),
Type: cacheItem.MimeType,
Children: make([]*ResponseItem, len(cacheItem.Children)),
MimeType: cacheItem.MimeType,
}
// Grab the children from the cache and add them to this new newResponseItem
if len(cacheItem.Children) > 0 { // avoid a null entry for the children key in the JSON
var children []*CacheItem.Item
// Grab the children from the cache and add them to newResponseItem.
if len(cacheItem.Children) > 0 { // avoid a null entry for the children key in the JSON.
var children []*ResponseItem
for _, child := range cacheItem.Children {
childItem, found := SharedCache.Cache.Get(child)
// Do a quick crawl since the path could have been modified, since the last crawl.
// If the path wasn't found, do a quick crawl since the path could have been modified, since the last crawl.
// This also be triggered if we encounter a broken symlink. We don't check for broken symlinks when scanning
// because that would be an extra os.Lstat() call in processPath().
if !found {
crawlRelPath := filepath.Join(config.GetConfig().RootDir, child)
log.Debugf(`CRAWLER - "%s" ("%s") not in cache, crawling`, child, crawlRelPath)
log.Debugf(`CRAWLER - "%s" ("%s") not in cache, crawling.`, child, crawlRelPath)
dc := DirectoryCrawler.NewDirectoryCrawler()
item, err := dc.CrawlNoRecursion(crawlRelPath)
if err != nil {
@ -87,21 +89,19 @@ func NewResponseItem(cacheItem *CacheItem.Item) *ResponseItem {
continue // skip this child
}
if item == nil {
log.Debugf("NewResponseItem - CrawlNoRecursion - not found %s - likely broken symlink", child)
log.Debugf(`NewResponseItem - CrawlNoRecursion - not found: "%s". Likely broken symlink`, child)
continue
}
// Update the `childItem` var with the newly cached item.
childItem = item
childItem = item // Update the `childItem` var with the newly cached item.
}
if childItem != nil {
copiedChildItem := &CacheItem.Item{
if childItem != nil { // Double check
copiedChildItem := &ResponseItem{
Path: childItem.Path,
Name: childItem.Name,
Size: childItem.Size,
Extension: childItem.Extension,
Modified: childItem.Modified,
Mode: childItem.Mode,
IsDir: childItem.IsDir,
IsSymlink: childItem.IsSymlink,
Cached: childItem.Cached,

View File

@ -1,7 +1,6 @@
package file
import (
"crazyfs/CacheItem"
"crazyfs/ResponseItem"
"crazyfs/SharedCache"
"crazyfs/api/helpers"
@ -44,29 +43,27 @@ func ListDir(w http.ResponseWriter, r *http.Request) {
return
}
relPath := file.StripRootDir(fullPath)
// Try to get the data from the cache
relPath := file.StripRootDir(fullPath)
cacheItem, found := SharedCache.Cache.Get(relPath)
if !found {
cacheItem = helpers.HandleFileNotFound(relPath, fullPath, w)
}
if cacheItem == nil {
return // The errors have already been handled in handleFileNotFound() so we're good to just exit
// The errors have already been handled in handleFileNotFound() so we're good to just exit.
return
}
// Create a copy of the cached Item, so we don't modify the Item in the cache
item := ResponseItem.NewResponseItem(cacheItem)
// Get the MIME type of the file if the 'mime' argument is present
mime := r.URL.Query().Get("mime")
if mime != "" {
if item.IsDir && !config.GetConfig().HttpAllowDirMimeParse {
if cacheItem.IsDir && !config.GetConfig().HttpAllowDirMimeParse {
helpers.Return403Msg("not allowed to analyze the mime of directories", w)
return
} else {
// Only update the mime in the cache if it hasn't been set already.
// TODO: need to make sure that when a re-crawl is triggered, the MimeType is set back to nil
if item.Type == nil {
if cacheItem.MimeType == nil {
fileExists, mimeType, ext, err := file.GetMimeType(fullPath, true, nil)
if !fileExists {
helpers.ReturnFake404Msg("file not found", w)
@ -84,6 +81,9 @@ func ListDir(w http.ResponseWriter, r *http.Request) {
}
}
// Create a copy of the cached Item, so we don't modify the Item in the cache
item := ResponseItem.NewResponseItem(cacheItem)
response := map[string]interface{}{}
// Pagination
@ -114,7 +114,7 @@ func ListDir(w http.ResponseWriter, r *http.Request) {
}
if folderSorting == "folders" {
var dirs, files []*CacheItem.Item
var dirs, files []*ResponseItem.ResponseItem
for _, child := range item.Children {
if child.IsDir {
dirs = append(dirs, child)
@ -126,18 +126,18 @@ func ListDir(w http.ResponseWriter, r *http.Request) {
}
// Set the children to an empty array so that the JSON encoder doesn't return it as nil
var paginatedChildren []*CacheItem.Item // this var is either the full CacheItem list or a paginated list depending on the query args
var paginatedChildren []*ResponseItem.ResponseItem // this var is either the full CacheItem list or a paginated list depending on the query args
if item.Children != nil {
paginatedChildren = item.Children
} else {
paginatedChildren = make([]*CacheItem.Item, 0)
paginatedChildren = make([]*ResponseItem.ResponseItem, 0)
}
pageParam := r.URL.Query().Get("page")
if pageParam != "" {
page, err := strconv.Atoi(pageParam)
if err != nil || page < 1 || page > totalPages {
// Don't return an error, just trunucate things
// Don't return an error, just truncate things
page = totalPages
}
@ -155,24 +155,12 @@ func ListDir(w http.ResponseWriter, r *http.Request) {
}
}
// Erase the children of the children, so we aren't displaying things recursively.
// Erase the children of the children so we aren't displaying things recursively.
for i := range paginatedChildren {
paginatedChildren[i].Children = nil
}
response["item"] = map[string]interface{}{
"path": item.Path,
"name": item.Name,
"size": item.Size,
"extension": item.Extension,
"modified": item.Modified,
"mode": item.Mode,
"isDir": item.IsDir,
"isSymlink": item.IsSymlink,
"cached": item.Cached,
"children": paginatedChildren,
"type": item.Type,
}
item.Children = paginatedChildren
response["item"] = item
helpers.WriteJsonResponse(response, true, w, r)
}