updated to better index files
This commit is contained in:
parent
8d6c468a0c
commit
126561a464
12
Dockerfile
12
Dockerfile
|
@ -7,24 +7,16 @@ RUN npm run build
|
|||
FROM golang:alpine as base
|
||||
WORKDIR /app
|
||||
COPY ./src/backend ./
|
||||
RUN go build -ldflags='-w -s' -o filebrowser .
|
||||
RUN go build -ldflags="-w -s" -o filebrowser .
|
||||
|
||||
FROM alpine:latest
|
||||
RUN apk --no-cache add \
|
||||
ca-certificates \
|
||||
mailcap \
|
||||
curl
|
||||
|
||||
HEALTHCHECK --start-period=2s --interval=5s --timeout=3s \
|
||||
CMD curl -f http://localhost/health || exit 1
|
||||
|
||||
mailcap
|
||||
VOLUME /srv
|
||||
EXPOSE 80
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=base /app/docker_config.json ./.filebrowser.json
|
||||
COPY --from=base /app/filebrowser ./filebrowser
|
||||
COPY --from=nbuild /app/dist/ ./frontend/dist/
|
||||
|
||||
ENTRYPOINT [ "./filebrowser" ]
|
12
README.md
12
README.md
|
@ -5,18 +5,18 @@
|
|||
This fork makes the following significant changes to filebrowser for origin:
|
||||
|
||||
1. [x] Improves search to use index instead of filesystem.
|
||||
- [x] lightning fast
|
||||
- [ ] realtime results as you type
|
||||
- [x] Lightning fast
|
||||
- [x] Realtime results as you type
|
||||
- [ ] Works with file type filter
|
||||
1. [ ] Preview enhancements
|
||||
- preview default view is constrained to files subwindow,
|
||||
which can be toggled to fullscreen.
|
||||
1. [ ] Updated node version and dependencies
|
||||
1. [ ] Updated version and dependencies
|
||||
- [ ] uses latest npm and node version
|
||||
- [ ] removes deprecated npm packages
|
||||
1. [ ] Improved routing
|
||||
- fixed bugs in original version
|
||||
- [x] Updates golang dependencies
|
||||
1. [ ] Added authentication type
|
||||
- Using bearer token with remote authentication server
|
||||
- [ ] Using bearer token with remote authentication server
|
||||
|
||||
## About
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
"syscall"
|
||||
"strconv"
|
||||
|
||||
homedir "github.com/mitchellh/go-homedir"
|
||||
"github.com/spf13/afero"
|
||||
|
@ -49,13 +50,9 @@ func init() {
|
|||
rootCmd.SetVersionTemplate("File Browser version {{printf \"%s\" .Version}}\n")
|
||||
|
||||
flags := rootCmd.Flags()
|
||||
// initialize indexing and schedule indexing ever n minutes
|
||||
indexInterval, err := flags.GetUint32("indexing-interval") //nolint:govet
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
indexInterval = 60
|
||||
}
|
||||
go search.InitializeIndex(indexInterval)
|
||||
// initialize indexing and schedule indexing ever n minutes (default 5)
|
||||
indexingInterval := getEnvVariableAsUint32("INDEXING_INTERVAL")
|
||||
go search.InitializeIndex(indexingInterval)
|
||||
|
||||
persistent := rootCmd.PersistentFlags()
|
||||
|
||||
|
@ -68,6 +65,15 @@ func init() {
|
|||
addServerFlags(flags)
|
||||
}
|
||||
|
||||
func getEnvVariableAsUint32(key string) uint32 {
|
||||
valueStr := os.Getenv(key)
|
||||
value, err := strconv.ParseUint(valueStr, 10, 32)
|
||||
if err != nil {
|
||||
return 5 // default value every 5 minutes
|
||||
}
|
||||
return uint32(value)
|
||||
}
|
||||
|
||||
func addServerFlags(flags *pflag.FlagSet) {
|
||||
flags.StringP("address", "a", "127.0.0.1", "address to listen on")
|
||||
flags.StringP("log", "l", "stdout", "log output")
|
||||
|
@ -77,7 +83,6 @@ func addServerFlags(flags *pflag.FlagSet) {
|
|||
flags.StringP("root", "r", ".", "root to prepend to relative paths")
|
||||
flags.String("socket", "", "socket to listen to (cannot be used with address, port, cert nor key flags)")
|
||||
flags.Uint32("socket-perm", 0666, "unix socket file permissions") //nolint:gomnd
|
||||
flags.Uint32("indexing-interval", 60, "how frequently to index files, in minutes") //nolint:gomnd
|
||||
flags.StringP("baseurl", "b", "", "base url")
|
||||
flags.String("cache-dir", "", "file cache directory (disabled if empty)")
|
||||
flags.Int("img-processors", 4, "image processors count") //nolint:gomnd
|
||||
|
|
Binary file not shown.
|
@ -8,8 +8,9 @@ import (
|
|||
var searchHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
response := []map[string]interface{}{}
|
||||
query := r.URL.Query().Get("query")
|
||||
|
||||
files, dirs := search.IndexedSearch(query,r.URL.Path)
|
||||
var files []string
|
||||
var dirs []string
|
||||
files, dirs = search.IndexedSearch(query,r.URL.Path,&files,&dirs)
|
||||
for _,v := range(files){
|
||||
response = append(response, map[string]interface{}{
|
||||
"dir": false,
|
||||
|
@ -22,6 +23,8 @@ var searchHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *dat
|
|||
"path": v,
|
||||
})
|
||||
}
|
||||
files = files[:0]
|
||||
dirs = dirs[:0]
|
||||
// err := search.Search(d.user.Fs, r.URL.Path, query, d, func(path string, f os.FileInfo) error {
|
||||
// response = append(response, map[string]interface{}{
|
||||
// "dir": f.IsDir(),
|
||||
|
|
|
@ -5,43 +5,55 @@ import (
|
|||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
type PathInfo struct {
|
||||
DirPathNames []string
|
||||
FilePathNames []string
|
||||
LastIndexed time.Time
|
||||
}
|
||||
var rootPath = "/srv/"
|
||||
var indexes map[string]PathInfo
|
||||
var (
|
||||
rootPath = "/srv" // DO NOT include trailing slash
|
||||
indexes map[string][]string
|
||||
mutex sync.RWMutex
|
||||
lastIndexed time.Time
|
||||
)
|
||||
|
||||
func InitializeIndex(intervalMinutes uint32) {
|
||||
// Initialize the indexes map
|
||||
indexes = make(map[string]PathInfo)
|
||||
indexes = make(map[string][]string)
|
||||
var numFiles, numDirs int
|
||||
log.Println("Indexing files...")
|
||||
lastIndexedStart := time.Now()
|
||||
// Call the function to index files and directories
|
||||
err := indexFiles(rootPath, 1)
|
||||
totalNumFiles, totalNumDirs, err := indexFiles(rootPath,&numFiles,&numDirs)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
lastIndexed = lastIndexedStart
|
||||
go indexingScheduler(intervalMinutes)
|
||||
log.Println("Successfully indexed files.")
|
||||
log.Println("Files found :",totalNumFiles)
|
||||
log.Println("Directories found :",totalNumDirs)
|
||||
}
|
||||
|
||||
func indexingScheduler(intervalMinutes uint32) {
|
||||
log.Printf("Indexing scheduler will run every %v minutes",intervalMinutes)
|
||||
for {
|
||||
time.Sleep(time.Duration(intervalMinutes) * time.Minute)
|
||||
err := indexFiles(rootPath, 1)
|
||||
var numFiles, numDirs int
|
||||
lastIndexedStart := time.Now()
|
||||
totalNumFiles, totalNumDirs, err := indexFiles(rootPath,&numFiles,&numDirs)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
lastIndexed = lastIndexedStart
|
||||
if totalNumFiles+totalNumDirs > 0 {
|
||||
log.Println("re-indexing found changes and updated the index.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Define a function to recursively index files and directories
|
||||
func indexFiles(path string, depth int) error {
|
||||
func indexFiles(path string, numFiles *int, numDirs *int) (int,int,error) {
|
||||
// Check if the current directory has been modified since last indexing
|
||||
dir, err := os.Open(path)
|
||||
if err != nil {
|
||||
|
@ -51,89 +63,99 @@ func indexFiles(path string, depth int) error {
|
|||
defer dir.Close()
|
||||
dirInfo, err := dir.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
return *numFiles,*numDirs,err
|
||||
}
|
||||
// Compare the last modified time of the directory with the last indexed time
|
||||
if dirInfo.ModTime().Before(indexes[path].LastIndexed) {
|
||||
return nil
|
||||
}
|
||||
// Check if the directory path is more than 3 levels deep
|
||||
if depth > 3 {
|
||||
// Index the directory and its subdirectories
|
||||
err = indexEverythingFlattened(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
if dirInfo.ModTime().Before(lastIndexed) {
|
||||
return *numFiles,*numDirs,nil
|
||||
}
|
||||
// Read the directory contents
|
||||
files, err := dir.Readdir(-1)
|
||||
if err != nil {
|
||||
return err
|
||||
return *numFiles,*numDirs,err
|
||||
}
|
||||
// Iterate over the files and directories
|
||||
for _, file := range files {
|
||||
filePath := filepath.Join(path, file.Name())
|
||||
if file.IsDir() {
|
||||
// Recursively index subdirectories
|
||||
err = indexFiles(filePath, depth+1)
|
||||
} else {
|
||||
addToIndex(path, filePath, file.ModTime(),file.IsDir())
|
||||
*numDirs++
|
||||
indexFiles(path+"/"+file.Name(),numFiles,numDirs)
|
||||
}
|
||||
*numFiles++
|
||||
addToIndex(path, file.Name())
|
||||
}
|
||||
return nil
|
||||
return *numFiles,*numDirs,nil
|
||||
}
|
||||
|
||||
func indexEverythingFlattened(path string) error {
|
||||
// Index the directory and its subdirectories
|
||||
err := filepath.Walk(path, func(filePath string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
func addToIndex(path string, fileName string) {
|
||||
mutex.Lock()
|
||||
defer mutex.Unlock()
|
||||
path = strings.TrimPrefix(path,rootPath+"/")
|
||||
path = strings.TrimSuffix(path,"/")
|
||||
if path == rootPath {
|
||||
path = "/"
|
||||
}
|
||||
addToIndex(path, filePath, info.ModTime(),info.IsDir())
|
||||
return nil
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func addToIndex(path string, filePath string, lastModified time.Time, isDir bool) {
|
||||
filePath = strings.TrimPrefix(filePath, rootPath)
|
||||
currentTime := time.Now()
|
||||
info, exists := indexes[path]
|
||||
if !exists {
|
||||
info = PathInfo{}
|
||||
info = []string{}
|
||||
}
|
||||
if isDir {
|
||||
info.DirPathNames = append(info.DirPathNames, filePath)
|
||||
}else{
|
||||
info.FilePathNames = append(info.FilePathNames, filePath)
|
||||
}
|
||||
info.LastIndexed = currentTime
|
||||
info = append(info, fileName)
|
||||
indexes[path] = info
|
||||
}
|
||||
|
||||
func searchAllIndexes(searchTerm string,isDir bool,scope string) []string {
|
||||
var matchingResults []string
|
||||
func SearchAllIndexes(searchTerm string, scope string, files []string, dirs []string) ([]string, []string) {
|
||||
mutex.RLock()
|
||||
defer mutex.RUnlock()
|
||||
|
||||
var matchingFiles []string
|
||||
var matchingDirs []string
|
||||
|
||||
// Iterate over the indexes
|
||||
for _, subFiles := range indexes {
|
||||
searchItems := subFiles.FilePathNames
|
||||
if isDir {
|
||||
searchItems = subFiles.DirPathNames
|
||||
}
|
||||
for dirName, v := range indexes {
|
||||
searchItems := v
|
||||
// Iterate over the path names
|
||||
for _, pathName := range searchItems {
|
||||
if dirName != "/" {
|
||||
pathName = dirName+"/"+pathName
|
||||
}
|
||||
// Check if the path name contains the search term
|
||||
if !containsSearchTerm(pathName, searchTerm) {
|
||||
continue
|
||||
}
|
||||
pathName = scopedPathNameFilter(pathName,scope)
|
||||
pathName = scopedPathNameFilter(pathName, scope)
|
||||
if pathName == "" {
|
||||
continue
|
||||
}
|
||||
matchingResults = append(matchingResults, pathName)
|
||||
matchingFiles = append(matchingFiles, pathName)
|
||||
}
|
||||
// Check if the path name contains the search term
|
||||
if !containsSearchTerm(dirName, searchTerm) {
|
||||
continue
|
||||
}
|
||||
return matchingResults
|
||||
pathName := scopedPathNameFilter(dirName, scope)
|
||||
if pathName == "" {
|
||||
continue
|
||||
}
|
||||
matchingDirs = append(matchingDirs, pathName)
|
||||
}
|
||||
|
||||
// Sort the strings based on the number of elements after splitting by "/"
|
||||
sort.Slice(matchingFiles, func(i, j int) bool {
|
||||
parts1 := strings.Split(matchingFiles[i], "/")
|
||||
parts2 := strings.Split(matchingFiles[j], "/")
|
||||
return len(parts1) < len(parts2)
|
||||
})
|
||||
// Sort the strings based on the number of elements after splitting by "/"
|
||||
sort.Slice(matchingDirs, func(i, j int) bool {
|
||||
parts1 := strings.Split(matchingDirs[i], "/")
|
||||
parts2 := strings.Split(matchingDirs[j], "/")
|
||||
return len(parts1) < len(parts2)
|
||||
})
|
||||
|
||||
// Copy the matching files and dirs to the final slices
|
||||
files = append([]string{}, matchingFiles...)
|
||||
dirs = append([]string{}, matchingDirs...)
|
||||
|
||||
return files, dirs
|
||||
}
|
||||
|
||||
func scopedPathNameFilter(pathName string, scope string) string {
|
||||
|
|
|
@ -16,10 +16,9 @@ type searchOptions struct {
|
|||
Terms []string
|
||||
}
|
||||
|
||||
func IndexedSearch(query string,scope string) (files []string, dirs []string) {
|
||||
fileList := searchAllIndexes(query,false,scope)
|
||||
dirList := searchAllIndexes(query,true,scope)
|
||||
return fileList, dirList
|
||||
func IndexedSearch(query string, scope string, files *[]string, dirs *[]string) ([]string, []string) {
|
||||
*files, *dirs = SearchAllIndexes(query, scope, *files, *dirs)
|
||||
return *files, *dirs
|
||||
}
|
||||
|
||||
// Search searches for a query in a fs.
|
||||
|
|
|
@ -2,7 +2,7 @@ package version
|
|||
|
||||
var (
|
||||
// Version is the current File Browser version.
|
||||
Version = "(untracked)"
|
||||
Version = "(0.1.0)"
|
||||
// CommitSHA is the commmit sha.
|
||||
CommitSHA = "(unknown)"
|
||||
)
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
<input
|
||||
type="text"
|
||||
@keyup.exact="keyup"
|
||||
@keyup.enter="submit"
|
||||
@input="submit"
|
||||
ref="input"
|
||||
:autofocus="active"
|
||||
v-model.trim="value"
|
||||
|
@ -47,7 +47,7 @@
|
|||
</div>
|
||||
</template>
|
||||
</template>
|
||||
<ul v-show="results.length > 0">
|
||||
<ul v-show="filteredResults.length > 0">
|
||||
<li v-for="(s, k) in filteredResults" :key="k">
|
||||
<router-link @click.native="close" :to="s.url">
|
||||
<i v-if="s.dir" class="material-icons">folder</i>
|
||||
|
@ -163,7 +163,7 @@ export default {
|
|||
return;
|
||||
}
|
||||
|
||||
this.results.length = 0;
|
||||
this.results.length === 0;
|
||||
},
|
||||
init(string) {
|
||||
this.value = `${string} `;
|
||||
|
@ -177,7 +177,7 @@ export default {
|
|||
async submit(event) {
|
||||
event.preventDefault();
|
||||
|
||||
if (this.value === "") {
|
||||
if (this.value === "" || this.value.length < 3) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@
|
|||
v-else
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
href="https://github.com/filebrowser/filebrowser"
|
||||
href="https://github.com/gtsteffaniak/filebrowser"
|
||||
>File Browser</a
|
||||
>
|
||||
<span> {{ version }}</span>
|
||||
|
@ -156,8 +156,8 @@ export default {
|
|||
try {
|
||||
let usage = await api.usage(path);
|
||||
usageStats = {
|
||||
used: prettyBytes(usage.used, { binary: true }),
|
||||
total: prettyBytes(usage.total, { binary: true }),
|
||||
used: prettyBytes(usage.used/1024, { binary: true }),
|
||||
total: prettyBytes(usage.total/1024, { binary: true }),
|
||||
usedPercentage: Math.round((usage.used / usage.total) * 100),
|
||||
};
|
||||
} catch (error) {
|
||||
|
|
|
@ -152,7 +152,7 @@
|
|||
"images": "الصور",
|
||||
"music": "الموسيقى",
|
||||
"pdf": "PDF",
|
||||
"pressToSearch": "Press enter to search...",
|
||||
"pressToSearch": "No results found in indexed search.",
|
||||
"search": "البحث...",
|
||||
"typeToSearch": "Type to search...",
|
||||
"types": "الأنواع",
|
||||
|
|
|
@ -162,7 +162,7 @@
|
|||
"images": "Images",
|
||||
"music": "Music",
|
||||
"pdf": "PDF",
|
||||
"pressToSearch": "Press enter to search...",
|
||||
"pressToSearch": "No results found in indexed search.",
|
||||
"search": "Search...",
|
||||
"typeToSearch": "Type to search...",
|
||||
"types": "Types",
|
||||
|
|
|
@ -152,7 +152,7 @@
|
|||
"images": "画像",
|
||||
"music": "音楽",
|
||||
"pdf": "PDF",
|
||||
"pressToSearch": "Press enter to search...",
|
||||
"pressToSearch": "No results found in indexed search.",
|
||||
"search": "検索...",
|
||||
"typeToSearch": "Type to search...",
|
||||
"types": "種類",
|
||||
|
|
Loading…
Reference in New Issue