fix crash when trying handle a file as a directory
This commit is contained in:
parent
6001894b0b
commit
9e35ee218c
|
@ -15,6 +15,8 @@ dist/
|
||||||
*.so
|
*.so
|
||||||
*.dylib
|
*.dylib
|
||||||
|
|
||||||
|
pkg/
|
||||||
|
|
||||||
# Test binary, built with `go test -c`
|
# Test binary, built with `go test -c`
|
||||||
*.test
|
*.test
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
- [ ] Clean up code
|
||||||
|
|
||||||
|
## Config
|
||||||
|
|
||||||
|
- [ ] Add a wildcard option to restricted_download_paths to block all sub-directories
|
||||||
|
- [ ] Add a dict to each restricted_download_paths item to specify how many levels recursive the block should be applied
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
- [ ] Convert JSON to camelCase
|
||||||
|
- [ ] On the client health page, show the time the initial crawl started
|
||||||
|
- [ ] New sort options:
|
||||||
|
- [ ] Modified
|
||||||
|
- [ ] Size
|
||||||
|
- [ ] Admin endpoint to fetch the last `n` modified files
|
||||||
|
- [ ] Document API
|
|
@ -144,6 +144,10 @@ func APIList(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateListing(cacheItem *cacheitem.Item, paginationLimit int, sortType string, pageParam string) (*responseitem.ResponseItem, [][]*responseitem.ResponseItem, string) {
|
func generateListing(cacheItem *cacheitem.Item, paginationLimit int, sortType string, pageParam string) (*responseitem.ResponseItem, [][]*responseitem.ResponseItem, string) {
|
||||||
|
if !cacheItem.IsDir {
|
||||||
|
return nil, nil, "path is not a directory"
|
||||||
|
}
|
||||||
|
|
||||||
if sortType == "" {
|
if sortType == "" {
|
||||||
panic("sortType was an empty string")
|
panic("sortType was an empty string")
|
||||||
}
|
}
|
||||||
|
@ -196,6 +200,16 @@ func generateListing(cacheItem *cacheitem.Item, paginationLimit int, sortType st
|
||||||
pages = append(pages, item.Children[i:end])
|
pages = append(pages, item.Children[i:end])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle case when there are fewer children than the paginationLimit
|
||||||
|
if len(pages) == 0 && len(item.Children) > 0 {
|
||||||
|
pages = append(pages, item.Children)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle case when there are no pages or children
|
||||||
|
if len(pages) == 0 {
|
||||||
|
return nil, nil, "no pages available"
|
||||||
|
}
|
||||||
|
|
||||||
paginatedChildren := pages[page]
|
paginatedChildren := pages[page]
|
||||||
|
|
||||||
// Erase the children of the children so we aren't displaying things recursively.
|
// Erase the children of the children so we aren't displaying things recursively.
|
||||||
|
|
8
todo.txt
8
todo.txt
|
@ -1,8 +0,0 @@
|
||||||
- on the client health page, show the time the initial crawl started
|
|
||||||
|
|
||||||
|
|
||||||
Later:
|
|
||||||
- Add a wildcard option to restricted_download_paths to block all sub-directories
|
|
||||||
- Add a dict to each restricted_download_paths item to specify how many levels recursive the block should be applied
|
|
||||||
- add a "last modified" to "sort" https://chub-archive.evulid.cc/api/file/list?path=/chub.ai/characters&page=1&limit=50&sort=folders
|
|
||||||
- add an admin endpoint to fetch the last n modified files.
|
|
Loading…
Reference in New Issue