diff --git a/.gitignore b/.gitignore index da3c6c32..9162da20 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,7 @@ rice-box.go /backend/*.cov /backend/test_config.yaml /backend/srv +/backend/http/dist .DS_Store node_modules diff --git a/CHANGELOG.md b/CHANGELOG.md index 13f053ea..c8ca3190 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,33 @@ All notable changes to this project will be documented in this file. For commit guidelines, please refer to [Standard Version](https://github.com/conventional-changelog/standard-version). +## v0.3.0 + + This Release focuses on the API and making it more accessible for developers to access functions without the UI. + + **New Features**: + - You can now long-live api tokens to interact with API from the user settings page. + - These tokens have the same permissions as your user. + - Helpful swagger page for API usage. + - Some API's were refactored for friendlier API usage, moving some attributes to parameters and first looking for a api token, then using the stored cookie if none is found. This allows for all api requests from swagger page to work without a token. + - Add file size to search preview! Should have been in last release... sorry! + + **Notes**: + - Replaced backend http framework with go standard library. + - Right-click Context menu can target the item that was right-clicked. To fully address https://github.com/gtsteffaniak/filebrowser/issues/214 + - adjusted settings menu for mobile, always shows all available cards rather than grayed out cards that need to be clicked. + - longer and more cryptographically secure share links based on UUID rather than base64. + + **Bugfixes**: + - Fixed ui bug with shares with password. + - Fixes baseurl related bugs https://github.com/gtsteffaniak/filebrowser/pull/228 Thanks @SimLV + - Fixed empty directory load issue. + - Fixed image preview cutoff on mobile. + - Fixed issue introduced in v0.2.10 where new files and folders were not showing up on ui + - Fixed preview issue where preview would not load after viewing video files. + - Fixed sorting issue where files were not sorted by name by default. + - Fixed copy file prompt issue + ## v0.2.10 **New Features**: @@ -15,9 +42,9 @@ All notable changes to this project will be documented in this file. For commit - Fixed issue searching "smaller than" actually returned files "larger than" **Notes**: - - Memory usage from index is reduced by ~40% - - Indexing time has increased 2x due to the extra processing time required to calculate directory sizes. - - File size calcuations use 1024 base vs previous 1000 base (matching windows explorer) + - Memory usage from index is reduced by ~40% + - Indexing time has increased 2x due to the extra processing time required to calculate directory sizes. + - File size calculations use 1024 base vs previous 1000 base (matching windows explorer) ## v0.2.9 @@ -40,7 +67,7 @@ All notable changes to this project will be documented in this file. For commit ## v0.2.8 -- **Feature**: New gallary view scaling options (closes [#141](https://github.com/gtsteffaniak/filebrowser/issues/141)) +- **Feature**: New gallery view scaling options (closes [#141](https://github.com/gtsteffaniak/filebrowser/issues/141)) - **Change**: Refactored backend files functions - **Change**: Improved UI response to filesystem changes - **Change**: Added frontend tests for deployment integrity diff --git a/Dockerfile b/Dockerfile index 49a31790..f6137d1e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,10 @@ -FROM golang:1.22-alpine AS base +FROM golang:1.23-alpine AS base ARG VERSION ARG REVISION WORKDIR /app COPY ./backend ./ +#RUN swag init --output swagger/docs +RUN ln -s swagger /usr/local/go/src/ RUN go build -ldflags="-w -s \ -X 'github.com/gtsteffaniak/filebrowser/version.Version=${VERSION}' \ -X 'github.com/gtsteffaniak/filebrowser/version.CommitSHA=${REVISION}'" \ @@ -19,5 +21,7 @@ FROM alpine:latest ENV FILEBROWSER_NO_EMBEDED="true" RUN apk --no-cache add ca-certificates mailcap COPY --from=base /app/filebrowser* ./ -COPY --from=nbuild /app/dist/ ./frontend/dist/ +# exposing default port for auto discovery. +EXPOSE 80 +COPY --from=nbuild /app/dist/ ./http/dist/ ENTRYPOINT [ "./filebrowser" ] diff --git a/Dockerfile.playwright b/Dockerfile.playwright index c8ffaf2a..a792362e 100644 --- a/Dockerfile.playwright +++ b/Dockerfile.playwright @@ -1,4 +1,4 @@ -FROM golang:1.22-alpine AS base +FROM golang:1.23-alpine AS base WORKDIR /app COPY ./backend ./ RUN go build -ldflags="-w -s" -o filebrowser . diff --git a/README.md b/README.md index 164c09bf..034a7a3f 100644 --- a/README.md +++ b/README.md @@ -10,41 +10,43 @@

> [!WARNING] -> Starting with v0.2.0, *ALL* configuration is done via `filebrowser.yaml` -> Configuration file. -> Starting with v0.2.4 *ALL* share links need to be re-created (due to -> security fix). +> Starting with `v0.3.0` API routes have been slightly altered for friendly usage outside of the UI. +> If on windows, please use docker. The windows binary is unstable and may not work. -FileBrowser Quantum is a fork of the filebrowser opensource project with the -following changes: +FileBrowser Quantum is a fork of the file browser opensource project with the following changes: 1. [x] Efficiently indexed files - Real-time search results as you type - Search Works with more type filters - Enhanced interactive results page. - 2. [x] Revamped and simplified GUI navbar and sidebar menu. + - file/folder sizes are shown in the response + 1. [x] Revamped and simplified GUI navbar and sidebar menu. - Additional compact view mode as well as refreshed view mode styles. - 3. [x] Revamped and simplified configuration via `filebrowser.yml` config file. - 4. [x] Faster listing browsing + 1. [x] Revamped and simplified configuration via `filebrowser.yml` config file. + 1. [x] Better listing browsing - Switching view modes is instant + - Folder sizes are shown as well - Changing Sort order is instant - The entire directory is loaded in 1/3 the time + 1. Developer API support + - Can create long-live API Tokens. + - Helpful Swagger page available at `/swagger` endpoint. ## About -FileBrowser Quantum provides a file managing interface within a specified directory +FileBrowser Quantum provides a file-managing interface within a specified directory and can be used to upload, delete, preview, rename, and edit your files. It allows the creation of multiple users and each user can have its directory. This repository is a fork of the original [filebrowser](https://github.com/filebrowser/filebrowser) with a collection of changes that make this program work better in terms of -aesthetics and performance. Improved search, simplified ui +aesthetics and performance. Improved search, simplified UI (without removing features) and more secure and up-to-date build are just a few examples. -FileBrowser Quantum differs significantly to the original. +FileBrowser Quantum differs significantly from the original. There are hundreds of thousands of lines changed and they are generally no longer compatible with each other. This has been intentional -- the focus of this fork is on a few key principles: @@ -68,10 +70,9 @@ action panel. If the action is available based on context, it will show up as a popup menu.

- - - - + + +

## Install @@ -89,7 +90,6 @@ docker run -it -v /path/to/folder:/srv -p 80:80 gtstef/filebrowser - with local storage ``` -version: '3.7' services: filebrowser: volumes: @@ -105,7 +105,6 @@ services: - with network share ``` -version: '3.7' services: filebrowser: volumes: @@ -121,7 +120,7 @@ volumes: driver_opts: type: cifs o: "username=admin,password=password,rw" # enter valid info here - device: "//192.168.1.100/share/" # enter valid hinfo here + device: "//192.168.1.100/share/" # enter valid info here ``` @@ -135,10 +134,24 @@ Not using docker (not recommended), download your binary from releases and run w There are very few commands available. There are 3 actions done via command line: -1. Running the program, as shown on install step. Only argument used is the config file, if you choose to override default "filebrowser.yaml" +1. Running the program, as shown on the install step. The only argument used is the config file, if you choose to override default "filebrowser.yaml" 2. Checking the version info via `./filebrowser version` 3. Updating the DB, which currently only supports adding users via `./filebrowser set -u username,password [-a] [-s "example/scope"]` +## API Usage + +FileBrowser Quantum comes with a swagger page that can be accessed from the API section of settings or by going to `/swagger` to see the full list: + +![image](https://github.com/user-attachments/assets/12abd1f6-21d3-4437-98ed-9b0da6cf2c73) + +You use the token as a bearer token. For example in postman: + +Successful Request: +image +Failed Request +image + + ## Configuration All configuration is now done via a single configuration file: @@ -149,11 +162,12 @@ View the [Configuration Help Page](./docs/configuration.md) for available configuration options and other help. -## Migration from filebrowser/filebrowser +## Migration from the original filebrowser If you currently use the original filebrowser but want to try using this. -I recommend you start fresh without reusing the database. If you want to -migrate your existing database to FileBrowser Quantum, visit the [migration +I would recommend that you start fresh without reusing the database. However, +If you want to migrate your existing database to FileBrowser Quantum, +visit the [migration readme](./docs/migration.md) ## Comparison Chart @@ -185,7 +199,8 @@ Multiple users | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | Single sign-on support | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | LDAP sign-on support | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | 2FA sign-on support | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | -Long-live API key support | ❌ | ❌ | ✅ | ✅ | ✅ | ✅ | +Long-live API key support | ✅ | ❌ | ✅ | ✅ | ✅ | ✅ | +API documentation page | ✅ | ❌ | ✅ | ✅ | ❌ | ✅ | Mobile App | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ | open source? | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ | tags support | ❌ | ❌ | ❌ | ✅ | ❌ | ✅ | diff --git a/backend/auth/auth.go b/backend/auth/auth.go index a1121c6f..3df1a955 100644 --- a/backend/auth/auth.go +++ b/backend/auth/auth.go @@ -9,7 +9,7 @@ import ( // Auther is the authentication interface. type Auther interface { // Auth is called to authenticate a request. - Auth(r *http.Request, usr users.Store) (*users.User, error) + Auth(r *http.Request, userStore *users.Storage) (*users.User, error) // LoginPage indicates if this auther needs a login page. LoginPage() bool } diff --git a/backend/auth/hook.go b/backend/auth/hook.go index 3351d507..75297b95 100644 --- a/backend/auth/hook.go +++ b/backend/auth/hook.go @@ -30,7 +30,7 @@ type HookAuth struct { } // Auth authenticates the user via a json in content body. -func (a *HookAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) { +func (a *HookAuth) Auth(r *http.Request, usr *users.Storage) (*users.User, error) { var cred hookCred if r.Body == nil { @@ -51,7 +51,6 @@ func (a *HookAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) { if err != nil { return nil, err } - switch action { case "auth": u, err := a.SaveUser() @@ -187,7 +186,7 @@ func (a *HookAuth) SaveUser() (*users.User, error) { func (a *HookAuth) GetUser(d *users.User) *users.User { // adds all permissions when user is admin isAdmin := d.Perm.Admin - perms := settings.Permissions{ + perms := users.Permissions{ Admin: isAdmin, Execute: isAdmin || d.Perm.Execute, Create: isAdmin || d.Perm.Create, diff --git a/backend/auth/json.go b/backend/auth/json.go index e05547de..cc082597 100644 --- a/backend/auth/json.go +++ b/backend/auth/json.go @@ -23,7 +23,7 @@ type JSONAuth struct { } // Auth authenticates the user via a json in content body. -func (a JSONAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) { +func (a JSONAuth) Auth(r *http.Request, userStore *users.Storage) (*users.User, error) { config := &settings.Config var cred jsonCred @@ -47,8 +47,7 @@ func (a JSONAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) { return nil, os.ErrPermission } } - - u, err := usr.Get(config.Server.Root, cred.Username) + u, err := userStore.Get(config.Server.Root, cred.Username) if err != nil || !users.CheckPwd(cred.Password, u.Password) { return nil, os.ErrPermission } diff --git a/backend/auth/none.go b/backend/auth/none.go index 0d6a9691..bc2a53c4 100644 --- a/backend/auth/none.go +++ b/backend/auth/none.go @@ -14,7 +14,7 @@ const MethodNoAuth = "noauth" type NoAuth struct{} // Auth uses authenticates user 1. -func (a NoAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) { +func (a NoAuth) Auth(r *http.Request, usr *users.Storage) (*users.User, error) { return usr.Get(settings.Config.Server.Root, uint(1)) } diff --git a/backend/auth/proxy.go b/backend/auth/proxy.go index 522618b0..a5e6cd1a 100644 --- a/backend/auth/proxy.go +++ b/backend/auth/proxy.go @@ -19,7 +19,7 @@ type ProxyAuth struct { } // Auth authenticates the user via an HTTP header. -func (a ProxyAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) { +func (a ProxyAuth) Auth(r *http.Request, usr *users.Storage) (*users.User, error) { username := r.Header.Get(a.Header) user, err := usr.Get(settings.Config.Server.Root, username) if err == errors.ErrNotExist { diff --git a/backend/cmd/dist/.gitignore b/backend/cmd/dist/.gitignore deleted file mode 100644 index 86d0cb27..00000000 --- a/backend/cmd/dist/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore \ No newline at end of file diff --git a/backend/cmd/root.go b/backend/cmd/root.go index f5a892fa..68f33cd8 100644 --- a/backend/cmd/root.go +++ b/backend/cmd/root.go @@ -1,20 +1,11 @@ package cmd import ( - "crypto/tls" "flag" "fmt" - "io/fs" "log" - "net" - "net/http" "os" - "os/signal" - "strconv" "strings" - "syscall" - - "embed" "github.com/gtsteffaniak/filebrowser/diskcache" "github.com/gtsteffaniak/filebrowser/files" @@ -22,29 +13,15 @@ import ( "github.com/gtsteffaniak/filebrowser/img" "github.com/gtsteffaniak/filebrowser/settings" "github.com/gtsteffaniak/filebrowser/storage" + "github.com/gtsteffaniak/filebrowser/swagger/docs" + "github.com/swaggo/swag" + "github.com/gtsteffaniak/filebrowser/users" - "github.com/gtsteffaniak/filebrowser/utils" "github.com/gtsteffaniak/filebrowser/version" ) -//go:embed dist/* -var assets embed.FS - -var ( - nonEmbededFS = os.Getenv("FILEBROWSER_NO_EMBEDED") == "true" -) - -type dirFS struct { - http.Dir -} - -func (d dirFS) Open(name string) (fs.File, error) { - return d.Dir.Open(name) -} - func getStore(config string) (*storage.Storage, bool) { // Use the config file (global flag) - log.Printf("Using Config file : %v", config) settings.Initialize(config) store, hasDB, err := storage.InitializeDb(settings.Config.Server.Database) if err != nil { @@ -146,12 +123,16 @@ func StartFilebrowser() { database = fmt.Sprintf("Creating new database : %v", settings.Config.Server.Database) } log.Printf("Initializing FileBrowser Quantum (%v)\n", version.Version) - log.Println("Embeded frontend :", !nonEmbededFS) + log.Printf("Using Config file : %v", configPath) + log.Println("Embeded frontend :", os.Getenv("FILEBROWSER_NO_EMBEDED") != "true") log.Println(database) log.Println("Sources :", settings.Config.Server.Root) - log.Print("Indexing interval : ", indexingInterval) + log.Println("Indexing interval :", indexingInterval) serverConfig := settings.Config.Server + swagInfo := docs.SwaggerInfo + swagInfo.BasePath = serverConfig.BaseURL + swag.Register(docs.SwaggerInfo.InstanceName(), swagInfo) // initialize indexing and schedule indexing ever n minutes (default 5) go files.InitializeIndex(serverConfig.IndexingInterval, serverConfig.Indexing) if err := rootCMD(store, &serverConfig); err != nil { @@ -159,13 +140,6 @@ func StartFilebrowser() { } } -func cleanupHandler(listener net.Listener, c chan os.Signal) { //nolint:interfacer - sig := <-c - log.Printf("Caught signal %s: shutting down.", sig) - listener.Close() - os.Exit(0) -} - func rootCMD(store *storage.Storage, serverConfig *settings.Server) error { if serverConfig.NumImageProcessors < 1 { log.Fatal("Image resize workers count could not be < 1") @@ -186,57 +160,7 @@ func rootCMD(store *storage.Storage, serverConfig *settings.Server) error { // No-op cache if no cacheDir is specified fileCache = diskcache.NewNoOp() } + fbhttp.StartHttp(imgSvc, store, fileCache) - fbhttp.SetupEnv(store, serverConfig, fileCache) - - _, err := os.Stat(serverConfig.Root) - utils.CheckErr(fmt.Sprint("cmd os.Stat ", serverConfig.Root), err) - var listener net.Listener - address := serverConfig.Address + ":" + strconv.Itoa(serverConfig.Port) - switch { - case serverConfig.Socket != "": - listener, err = net.Listen("unix", serverConfig.Socket) - utils.CheckErr("net.Listen", err) - err = os.Chmod(serverConfig.Socket, os.FileMode(0666)) // socket-perm - utils.CheckErr("os.Chmod", err) - case serverConfig.TLSKey != "" && serverConfig.TLSCert != "": - cer, err := tls.LoadX509KeyPair(serverConfig.TLSCert, serverConfig.TLSKey) //nolint:govet - utils.CheckErr("tls.LoadX509KeyPair", err) - listener, err = tls.Listen("tcp", address, &tls.Config{ - MinVersion: tls.VersionTLS12, - Certificates: []tls.Certificate{cer}}, - ) - utils.CheckErr("tls.Listen", err) - default: - listener, err = net.Listen("tcp", address) - utils.CheckErr("net.Listen", err) - } - sigc := make(chan os.Signal, 1) - signal.Notify(sigc, os.Interrupt, syscall.SIGTERM) - go cleanupHandler(listener, sigc) - if !nonEmbededFS { - assetsFs, err := fs.Sub(assets, "dist") - if err != nil { - log.Fatal("Could not embed frontend. Does backend/cmd/dist exist? Must be built and exist first") - } - handler, err := fbhttp.NewHandler(imgSvc, assetsFs) - utils.CheckErr("fbhttp.NewHandler", err) - defer listener.Close() - log.Println("Listening on", listener.Addr().String()) - //nolint: gosec - if err := http.Serve(listener, handler); err != nil { - log.Fatalf("Could not start server on port %d: %v", serverConfig.Port, err) - } - } else { - assetsFs := dirFS{Dir: http.Dir("frontend/dist")} - handler, err := fbhttp.NewHandler(imgSvc, assetsFs) - utils.CheckErr("fbhttp.NewHandler", err) - defer listener.Close() - log.Println("Listening on", listener.Addr().String()) - //nolint: gosec - if err := http.Serve(listener, handler); err != nil { - log.Fatalf("Could not start server on port %d: %v", serverConfig.Port, err) - } - } return nil } diff --git a/backend/cmd/rules.go b/backend/cmd/rules.go index bfa2e6a1..dec58a68 100644 --- a/backend/cmd/rules.go +++ b/backend/cmd/rules.go @@ -6,7 +6,6 @@ import ( "github.com/spf13/cobra" "github.com/spf13/pflag" - "github.com/gtsteffaniak/filebrowser/rules" "github.com/gtsteffaniak/filebrowser/settings" "github.com/gtsteffaniak/filebrowser/storage" "github.com/gtsteffaniak/filebrowser/users" @@ -66,7 +65,7 @@ func getUserIdentifier(flags *pflag.FlagSet) interface{} { return nil } -func printRules(rulez []rules.Rule, id interface{}) { +func printRules(rulez []users.Rule, id interface{}) { for id, rule := range rulez { fmt.Printf("(%d) ", id) diff --git a/backend/cmd/rules_add.go b/backend/cmd/rules_add.go index d7fa4064..27d8ba36 100644 --- a/backend/cmd/rules_add.go +++ b/backend/cmd/rules_add.go @@ -5,7 +5,6 @@ import ( "github.com/spf13/cobra" - "github.com/gtsteffaniak/filebrowser/rules" "github.com/gtsteffaniak/filebrowser/settings" "github.com/gtsteffaniak/filebrowser/storage" "github.com/gtsteffaniak/filebrowser/users" @@ -32,13 +31,13 @@ var rulesAddCmd = &cobra.Command{ regexp.MustCompile(exp) } - rule := rules.Rule{ + rule := users.Rule{ Allow: allow, Regex: regex, } if regex { - rule.Regexp = &rules.Regexp{Raw: exp} + rule.Regexp = &users.Regexp{Raw: exp} } else { rule.Path = exp } diff --git a/backend/filebrowser b/backend/filebrowser new file mode 100755 index 00000000..81ec32af Binary files /dev/null and b/backend/filebrowser differ diff --git a/backend/files/file.go b/backend/files/file.go index 6d362dc6..e254221f 100644 --- a/backend/files/file.go +++ b/backend/files/file.go @@ -19,8 +19,9 @@ import ( "unicode/utf8" "github.com/gtsteffaniak/filebrowser/errors" - "github.com/gtsteffaniak/filebrowser/rules" + "github.com/gtsteffaniak/filebrowser/fileutils" "github.com/gtsteffaniak/filebrowser/settings" + "github.com/gtsteffaniak/filebrowser/users" ) var ( @@ -29,34 +30,33 @@ var ( ) type ReducedItem struct { - Name string `json:"name"` - Size int64 `json:"size"` - ModTime time.Time `json:"modified"` - IsDir bool `json:"isDir,omitempty"` - Type string `json:"type"` + Name string `json:"name"` + Size int64 `json:"size"` + ModTime time.Time `json:"modified"` + Type string `json:"type"` + Mode os.FileMode `json:"-"` + Content string `json:"content,omitempty"` } // FileInfo describes a file. // reduced item is non-recursive reduced "Items", used to pass flat items array type FileInfo struct { - Items []*FileInfo `json:"-"` - ReducedItems []ReducedItem `json:"items,omitempty"` - Path string `json:"path,omitempty"` - Name string `json:"name"` - Size int64 `json:"size"` - Extension string `json:"-"` - ModTime time.Time `json:"modified"` - CacheTime time.Time `json:"-"` - Mode os.FileMode `json:"-"` - IsDir bool `json:"isDir,omitempty"` - IsSymlink bool `json:"isSymlink,omitempty"` - Type string `json:"type"` - Subtitles []string `json:"subtitles,omitempty"` - Content string `json:"content,omitempty"` - Checksums map[string]string `json:"checksums,omitempty"` - Token string `json:"token,omitempty"` - NumDirs int `json:"numDirs"` - NumFiles int `json:"numFiles"` + Files []ReducedItem `json:"-"` + Dirs map[string]*FileInfo `json:"-"` + Path string `json:"path"` + Name string `json:"name"` + Items []ReducedItem `json:"items"` + Size int64 `json:"size"` + Extension string `json:"-"` + ModTime time.Time `json:"modified"` + CacheTime time.Time `json:"-"` + Mode os.FileMode `json:"-"` + IsSymlink bool `json:"isSymlink,omitempty"` + Type string `json:"type"` + Subtitles []string `json:"subtitles,omitempty"` + Content string `json:"content,omitempty"` + Checksums map[string]string `json:"checksums,omitempty"` + Token string `json:"token,omitempty"` } // FileOptions are the options when getting a file info. @@ -67,58 +67,18 @@ type FileOptions struct { Expand bool ReadHeader bool Token string - Checker rules.Checker + Checker users.Checker Content bool } -// Legacy file info method, only called on non-indexed directories. -// Once indexing completes for the first time, NewFileInfo is never called. -func NewFileInfo(opts FileOptions) (*FileInfo, error) { - - index := GetIndex(rootPath) - if !opts.Checker.Check(opts.Path) { - return nil, os.ErrPermission - } - file, err := stat(opts) - if err != nil { - return nil, err - } - if opts.Expand { - if file.IsDir { - if err = file.readListing(opts.Path, opts.Checker, opts.ReadHeader); err != nil { - return nil, err - } - cleanedItems := []ReducedItem{} - for _, item := range file.Items { - // This is particularly useful for root of index, while indexing hasn't finished. - // adds the directory sizes for directories that have been indexed already. - if item.IsDir { - adjustedPath := index.makeIndexPath(opts.Path+"/"+item.Name, true) - info, _ := index.GetMetadataInfo(adjustedPath) - item.Size = info.Size - } - cleanedItems = append(cleanedItems, ReducedItem{ - Name: item.Name, - Size: item.Size, - IsDir: item.IsDir, - ModTime: item.ModTime, - Type: item.Type, - }) - } - - file.Items = nil - file.ReducedItems = cleanedItems - return file, nil - } - err = file.detectType(opts.Path, opts.Modify, opts.Content, true) - if err != nil { - return nil, err - } - } - return file, err +func (f FileOptions) Components() (string, string) { + return filepath.Dir(f.Path), filepath.Base(f.Path) } func FileInfoFaster(opts FileOptions) (*FileInfo, error) { + index := GetIndex(rootPath) + opts.Path = index.makeIndexPath(opts.Path) + // Lock access for the specific path pathMutex := getMutex(opts.Path) pathMutex.Lock() @@ -126,68 +86,94 @@ func FileInfoFaster(opts FileOptions) (*FileInfo, error) { if !opts.Checker.Check(opts.Path) { return nil, os.ErrPermission } - index := GetIndex(rootPath) - adjustedPath := index.makeIndexPath(opts.Path, opts.IsDir) - if opts.IsDir { - info, exists := index.GetMetadataInfo(adjustedPath) - if exists && !opts.Content { - // Let's not refresh if less than a second has passed - if time.Since(info.CacheTime) > time.Second { - go RefreshFileInfo(opts) //nolint:errcheck - } - // refresh cache after - return &info, nil - } - } - // don't bother caching content - if opts.Content { - file, err := NewFileInfo(opts) - return file, err - } - err := RefreshFileInfo(opts) + _, isDir, err := GetRealPath(opts.Path) if err != nil { - file, err := NewFileInfo(opts) - return file, err + return nil, err } - info, exists := index.GetMetadataInfo(adjustedPath + "/" + filepath.Base(opts.Path)) - if !exists || info.Name == "" { - return NewFileInfo(opts) + opts.IsDir = isDir + // check if the file exists in the index + info, exists := index.GetReducedMetadata(opts.Path, opts.IsDir) + if exists { + // Let's not refresh if less than a second has passed + if time.Since(info.CacheTime) > time.Second { + RefreshFileInfo(opts) //nolint:errcheck + } + if opts.Content { + content := "" + content, err = getContent(opts.Path) + if err != nil { + return info, err + } + info.Content = content + } + return info, nil } - return &info, nil + err = RefreshFileInfo(opts) + if err != nil { + return nil, err + } + info, exists = index.GetReducedMetadata(opts.Path, opts.IsDir) + if !exists { + return nil, err + } + if opts.Content { + content, err := getContent(opts.Path) + if err != nil { + return info, err + } + info.Content = content + } + return info, nil } func RefreshFileInfo(opts FileOptions) error { - if !opts.Checker.Check(opts.Path) { - return fmt.Errorf("permission denied: %s", opts.Path) + refreshOptions := FileOptions{ + Path: opts.Path, + IsDir: opts.IsDir, + Token: opts.Token, } index := GetIndex(rootPath) - adjustedPath := index.makeIndexPath(opts.Path, opts.IsDir) - file, err := stat(opts) + + if !refreshOptions.IsDir { + refreshOptions.Path = index.makeIndexPath(filepath.Dir(refreshOptions.Path)) + refreshOptions.IsDir = true + } else { + refreshOptions.Path = index.makeIndexPath(refreshOptions.Path) + } + + current, exists := index.GetMetadataInfo(refreshOptions.Path, true) + + file, err := stat(refreshOptions) if err != nil { - return fmt.Errorf("File/folder does not exist to refresh data: %s", opts.Path) + return fmt.Errorf("file/folder does not exist to refresh data: %s", refreshOptions.Path) } - _ = file.detectType(opts.Path, true, opts.Content, opts.ReadHeader) - if file.IsDir { - err := file.readListing(opts.Path, opts.Checker, opts.ReadHeader) - if err != nil { - return fmt.Errorf("Dir info could not be read: %s", opts.Path) - } - } - result := index.UpdateFileMetadata(adjustedPath, *file) + + //utils.PrintStructFields(*file) + result := index.UpdateMetadata(file) if !result { - return fmt.Errorf("File/folder does not exist in metadata: %s", adjustedPath) + return fmt.Errorf("file/folder does not exist in metadata: %s", refreshOptions.Path) + } + if !exists { + return nil + } + if current.Size != file.Size { + index.recursiveUpdateDirSizes(filepath.Dir(refreshOptions.Path), file, current.Size) } return nil } func stat(opts FileOptions) (*FileInfo, error) { - info, err := os.Lstat(opts.Path) + realPath, _, err := GetRealPath(rootPath, opts.Path) + if err != nil { + return nil, err + } + info, err := os.Lstat(realPath) if err != nil { return nil, err } file := &FileInfo{ Path: opts.Path, - Name: info.Name(), + Name: filepath.Base(opts.Path), ModTime: info.ModTime(), Mode: info.Mode(), Size: info.Size(), @@ -195,32 +181,98 @@ func stat(opts FileOptions) (*FileInfo, error) { Token: opts.Token, } if info.IsDir() { - file.IsDir = true - } - if info.Mode()&os.ModeSymlink != 0 { - file.IsSymlink = true - targetInfo, err := os.Stat(opts.Path) - if err == nil { - file.Size = targetInfo.Size() - file.IsDir = targetInfo.IsDir() + // Open and read directory contents + dir, err := os.Open(realPath) + if err != nil { + return nil, err } - } + defer dir.Close() + dirInfo, err := dir.Stat() + if err != nil { + return nil, err + } + index := GetIndex(rootPath) + // Check cached metadata to decide if refresh is needed + cachedParentDir, exists := index.GetMetadataInfo(opts.Path, true) + if exists && dirInfo.ModTime().Before(cachedParentDir.CacheTime) { + return cachedParentDir, nil + } + + // Read directory contents and process + files, err := dir.Readdir(-1) + if err != nil { + return nil, err + } + + file.Files = []ReducedItem{} + file.Dirs = map[string]*FileInfo{} + + var totalSize int64 + for _, item := range files { + itemPath := filepath.Join(realPath, item.Name()) + + if item.IsDir() { + itemInfo := &FileInfo{ + Name: item.Name(), + ModTime: item.ModTime(), + Mode: item.Mode(), + } + + if exists { + // if directory size was already cached use that. + cachedDir, ok := cachedParentDir.Dirs[item.Name()] + if ok { + itemInfo.Size = cachedDir.Size + } + } + file.Dirs[item.Name()] = itemInfo + totalSize += itemInfo.Size + } else { + itemInfo := ReducedItem{ + Name: item.Name(), + Size: item.Size(), + ModTime: item.ModTime(), + Mode: item.Mode(), + } + if IsSymlink(item.Mode()) { + itemInfo.Type = "symlink" + info, err := os.Stat(itemPath) + if err == nil { + itemInfo.Name = info.Name() + itemInfo.ModTime = info.ModTime() + itemInfo.Size = info.Size() + itemInfo.Mode = info.Mode() + } else { + file.Type = "invalid_link" + } + } + if file.Type != "invalid_link" { + err := itemInfo.detectType(itemPath, true, opts.Content, opts.ReadHeader) + if err != nil { + fmt.Printf("failed to detect type for %v: %v \n", itemPath, err) + } + file.Files = append(file.Files, itemInfo) + } + totalSize += itemInfo.Size + + } + } + + file.Size = totalSize + } return file, nil } // Checksum checksums a given File for a given User, using a specific // algorithm. The checksums data is saved on File object. func (i *FileInfo) Checksum(algo string) error { - if i.IsDir { - return errors.ErrIsDirectory - } if i.Checksums == nil { i.Checksums = map[string]string{} } - - reader, err := os.Open(i.Path) + fullpath := filepath.Join(i.Path, i.Name) + reader, err := os.Open(fullpath) if err != nil { return err } @@ -265,10 +317,7 @@ func GetRealPath(relativePath ...string) (string, bool, error) { // Convert relative path to absolute path absolutePath, err := filepath.Abs(joinedPath) if err != nil { - return "", false, err - } - if !Exists(absolutePath) { - return absolutePath, false, nil // return without error + return absolutePath, false, fmt.Errorf("could not get real path: %v, %s", combined, err) } // Resolve symlinks and get the real path return resolveSymlinks(absolutePath) @@ -279,8 +328,48 @@ func DeleteFiles(absPath string, opts FileOptions) error { if err != nil { return err } - opts.Path = filepath.Dir(absPath) err = RefreshFileInfo(opts) + if err != nil { + return err + } + return nil +} + +func MoveResource(realsrc, realdst string, isSrcDir bool) error { + err := fileutils.MoveFile(realsrc, realdst) + if err != nil { + return err + } + // refresh info for source and dest + err = RefreshFileInfo(FileOptions{ + Path: realsrc, + IsDir: isSrcDir, + }) + if err != nil { + return errors.ErrEmptyKey + } + refreshConfig := FileOptions{Path: realdst, IsDir: true} + if !isSrcDir { + refreshConfig.Path = filepath.Dir(realdst) + } + err = RefreshFileInfo(refreshConfig) + if err != nil { + return errors.ErrEmptyKey + } + return nil +} + +func CopyResource(realsrc, realdst string, isSrcDir bool) error { + err := fileutils.CopyFile(realsrc, realdst) + if err != nil { + return err + } + + refreshConfig := FileOptions{Path: realdst, IsDir: true} + if !isSrcDir { + refreshConfig.Path = filepath.Dir(realdst) + } + err = RefreshFileInfo(refreshConfig) if err != nil { return errors.ErrEmptyKey } @@ -288,12 +377,12 @@ func DeleteFiles(absPath string, opts FileOptions) error { } func WriteDirectory(opts FileOptions) error { + realPath, _, _ := GetRealPath(rootPath, opts.Path) // Ensure the parent directories exist - err := os.MkdirAll(opts.Path, 0775) + err := os.MkdirAll(realPath, 0775) if err != nil { return err } - opts.Path = filepath.Dir(opts.Path) err = RefreshFileInfo(opts) if err != nil { return errors.ErrEmptyKey @@ -339,7 +428,7 @@ func resolveSymlinks(path string) (string, bool, error) { // Get the file info info, err := os.Lstat(path) if err != nil { - return "", false, err + return path, false, fmt.Errorf("could not stat path: %v, %s", path, err) } // Check if it's a symlink @@ -347,7 +436,7 @@ func resolveSymlinks(path string) (string, bool, error) { // Read the symlink target target, err := os.Readlink(path) if err != nil { - return "", false, err + return path, false, err } // Resolve the target relative to the symlink's directory @@ -360,78 +449,83 @@ func resolveSymlinks(path string) (string, bool, error) { } // addContent reads and sets content based on the file type. -func (i *FileInfo) addContent(path string) error { - if !i.IsDir { - content, err := os.ReadFile(path) - if err != nil { - return err - } - stringContent := string(content) - if !utf8.ValidString(stringContent) { - return nil - } - if stringContent == "" { - i.Content = "empty-file-x6OlSil" - return nil - } - i.Content = stringContent +func getContent(path string) (string, error) { + realPath, _, err := GetRealPath(rootPath, path) + if err != nil { + return "", err } - return nil + + content, err := os.ReadFile(realPath) + if err != nil { + return "", err + } + stringContent := string(content) + if !utf8.ValidString(stringContent) { + return "", fmt.Errorf("file is not utf8 encoded") + } + if stringContent == "" { + return "empty-file-x6OlSil", nil + } + return stringContent, nil } // detectType detects the file type. -func (i *FileInfo) detectType(path string, modify, saveContent, readHeader bool) error { - if i.IsDir { - return nil - } - if IsNamedPipe(i.Mode) { - i.Type = "blob" - if saveContent { - return i.addContent(path) +func (i *ReducedItem) detectType(path string, modify, saveContent, readHeader bool) error { + name := i.Name + var contentErr error + var contentString string + if saveContent { + contentString, contentErr = getContent(path) + if contentErr == nil { + i.Content = contentString } - return nil } + if IsNamedPipe(i.Mode) { + i.Type = "blob" + return contentErr + } + + ext := filepath.Ext(name) var buffer []byte if readHeader { - buffer = i.readFirstBytes() - mimetype := mime.TypeByExtension(i.Extension) + buffer = i.readFirstBytes(path) + mimetype := mime.TypeByExtension(ext) if mimetype == "" { http.DetectContentType(buffer) } } - ext := filepath.Ext(i.Name) for _, fileType := range AllFiletypeOptions { if IsMatchingType(ext, fileType) { i.Type = fileType } - switch i.Type { case "text": if !modify { i.Type = "textImmutable" } if saveContent { - return i.addContent(path) + return contentErr } case "video": - parentDir := strings.TrimRight(path, i.Name) - i.detectSubtitles(parentDir) + // TODO add back somewhere else, not during metadata fetch + //parentDir := strings.TrimRight(path, name) + //i.detectSubtitles(parentDir) case "doc": if ext == ".pdf" { i.Type = "pdf" return nil } if saveContent { - return i.addContent(path) + return nil } } } if i.Type == "" { i.Type = "blob" if saveContent { - return i.addContent(path) + return contentErr } } @@ -439,8 +533,8 @@ func (i *FileInfo) detectType(path string, modify, saveContent, readHeader bool) } // readFirstBytes reads the first bytes of the file. -func (i *FileInfo) readFirstBytes() []byte { - file, err := os.Open(i.Path) +func (i *ReducedItem) readFirstBytes(path string) []byte { + file, err := os.Open(path) if err != nil { i.Type = "blob" return nil @@ -458,113 +552,42 @@ func (i *FileInfo) readFirstBytes() []byte { } // detectSubtitles detects subtitles for video files. -func (i *FileInfo) detectSubtitles(parentDir string) { - if i.Type != "video" { - return - } - i.Subtitles = []string{} - ext := filepath.Ext(i.Name) - dir, err := os.Open(parentDir) - if err != nil { - // Directory must have been deleted, remove it from the index - return - } - defer dir.Close() // Ensure directory handle is closed - - files, err := dir.Readdir(-1) - if err != nil { - return - } - - base := strings.TrimSuffix(i.Name, ext) - subtitleExts := []string{".vtt", ".txt", ".srt", ".lrc"} - - for _, f := range files { - if f.IsDir() || !strings.HasPrefix(f.Name(), base) { - continue - } - - for _, subtitleExt := range subtitleExts { - if strings.HasSuffix(f.Name(), subtitleExt) { - i.Subtitles = append(i.Subtitles, filepath.Join(parentDir, f.Name())) - break - } - } - } -} - -// readListing reads the contents of a directory and fills the listing. -func (i *FileInfo) readListing(path string, checker rules.Checker, readHeader bool) error { - dir, err := os.Open(i.Path) - if err != nil { - return err - } - defer dir.Close() - - files, err := dir.Readdir(-1) - if err != nil { - return err - } - - listing := &FileInfo{ - Items: []*FileInfo{}, - NumDirs: 0, - NumFiles: 0, - } - - for _, f := range files { - name := f.Name() - fPath := filepath.Join(i.Path, name) - - if !checker.Check(fPath) { - continue - } - - isSymlink, isInvalidLink := false, false - if IsSymlink(f.Mode()) { - isSymlink = true - info, err := os.Stat(fPath) - if err == nil { - f = info - } else { - isInvalidLink = true - } - } - - file := &FileInfo{ - Name: name, - Size: f.Size(), - ModTime: f.ModTime(), - Mode: f.Mode(), - } - if f.IsDir() { - file.IsDir = true - } - if isSymlink { - file.IsSymlink = true - } - - if file.IsDir { - listing.NumDirs++ - } else { - listing.NumFiles++ - - if isInvalidLink { - file.Type = "invalid_link" - } else { - err := file.detectType(path, true, false, readHeader) - if err != nil { - return err - } - } - } - - listing.Items = append(listing.Items, file) - } - - i.Items = listing.Items - return nil -} +//func (i *FileInfo) detectSubtitles(path string) { +// if i.Type != "video" { +// return +// } +// parentDir := filepath.Dir(path) +// fileName := filepath.Base(path) +// i.Subtitles = []string{} +// ext := filepath.Ext(fileName) +// dir, err := os.Open(parentDir) +// if err != nil { +// // Directory must have been deleted, remove it from the index +// return +// } +// defer dir.Close() // Ensure directory handle is closed +// +// files, err := dir.Readdir(-1) +// if err != nil { +// return +// } +// +// base := strings.TrimSuffix(fileName, ext) +// subtitleExts := []string{".vtt", ".txt", ".srt", ".lrc"} +// +// for _, f := range files { +// if f.IsDir() || !strings.HasPrefix(f.Name(), base) { +// continue +// } +// +// for _, subtitleExt := range subtitleExts { +// if strings.HasSuffix(f.Name(), subtitleExt) { +// i.Subtitles = append(i.Subtitles, filepath.Join(parentDir, f.Name())) +// break +// } +// } +// } +//} func IsNamedPipe(mode os.FileMode) bool { return mode&os.ModeNamedPipe != 0 diff --git a/backend/files/file_test.go b/backend/files/file_test.go index 281d06dd..77bfbe60 100644 --- a/backend/files/file_test.go +++ b/backend/files/file_test.go @@ -63,14 +63,11 @@ func Test_GetRealPath(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - realPath, isDir, err := GetRealPath(tt.paths...) + realPath, isDir, _ := GetRealPath(tt.paths...) adjustedRealPath := strings.TrimPrefix(realPath, trimPrefix) if tt.want.path != adjustedRealPath || tt.want.isDir != isDir { t.Errorf("expected %v:%v but got: %v:%v", tt.want.path, tt.want.isDir, adjustedRealPath, isDir) } - if err != nil { - t.Error("got error", err) - } }) } } diff --git a/backend/files/indexing.go b/backend/files/indexing.go index 505c8e47..7c1859e0 100644 --- a/backend/files/indexing.go +++ b/backend/files/indexing.go @@ -13,7 +13,7 @@ import ( type Index struct { Root string - Directories map[string]FileInfo + Directories map[string]*FileInfo NumDirs int NumFiles int inProgress bool @@ -43,7 +43,7 @@ func indexingScheduler(intervalMinutes uint32) { // Set the indexing flag to indicate that indexing is in progress si.resetCount() // Perform the indexing operation - err := si.indexFiles(si.Root) + err := si.indexFiles("/") // Reset the indexing flag to indicate that indexing has finished si.inProgress = false // Update the LastIndexed time @@ -64,15 +64,13 @@ func indexingScheduler(intervalMinutes uint32) { } // Define a function to recursively index files and directories -func (si *Index) indexFiles(path string) error { - // Ensure path is cleaned and normalized - adjustedPath := si.makeIndexPath(path, true) +func (si *Index) indexFiles(adjustedPath string) error { + realPath := strings.TrimRight(si.Root, "/") + adjustedPath // Open the directory - dir, err := os.Open(path) + dir, err := os.Open(realPath) if err != nil { - // If the directory can't be opened (e.g., deleted), remove it from the index - si.RemoveDirectory(adjustedPath) + si.RemoveDirectory(adjustedPath) // Remove if it can't be opened return err } defer dir.Close() @@ -82,7 +80,7 @@ func (si *Index) indexFiles(path string) error { return err } - // Check if the directory is already up-to-date + // Skip directories that haven't been modified since the last index if dirInfo.ModTime().Before(si.LastIndexed) { return nil } @@ -93,90 +91,73 @@ func (si *Index) indexFiles(path string) error { return err } - // Recursively process files and directories - fileInfos := []*FileInfo{} var totalSize int64 var numDirs, numFiles int + fileInfos := []ReducedItem{} + dirInfos := map[string]*FileInfo{} + combinedPath := adjustedPath + "/" + if adjustedPath == "/" { + combinedPath = "/" + } + // Process each file and directory in the current directory for _, file := range files { - parentInfo := &FileInfo{ - Name: file.Name(), - Size: file.Size(), + itemInfo := &FileInfo{ ModTime: file.ModTime(), - IsDir: file.IsDir(), } - childInfo, err := si.InsertInfo(path, parentInfo) - if err != nil { - // Log error, but continue processing other files - continue - } - - // Accumulate directory size and items - totalSize += childInfo.Size - if childInfo.IsDir { + if file.IsDir() { + itemInfo.Name = file.Name() + itemInfo.Path = combinedPath + file.Name() + // Recursively index the subdirectory + err := si.indexFiles(itemInfo.Path) + if err != nil { + log.Printf("Failed to index directory %s: %v", itemInfo.Path, err) + continue + } + // Fetch the metadata for the subdirectory after indexing + subDirInfo, exists := si.GetMetadataInfo(itemInfo.Path, true) + if exists { + itemInfo.Size = subDirInfo.Size + totalSize += subDirInfo.Size // Add subdirectory size to the total + } + dirInfos[itemInfo.Name] = itemInfo numDirs++ } else { + itemInfo := &ReducedItem{ + Name: file.Name(), + ModTime: file.ModTime(), + Size: file.Size(), + Mode: file.Mode(), + } + _ = itemInfo.detectType(combinedPath+file.Name(), true, false, false) + fileInfos = append(fileInfos, *itemInfo) + totalSize += itemInfo.Size numFiles++ } - _ = childInfo.detectType(path, true, false, false) - fileInfos = append(fileInfos, childInfo) } // Create FileInfo for the current directory dirFileInfo := &FileInfo{ - Items: fileInfos, - Name: filepath.Base(path), - Size: totalSize, - ModTime: dirInfo.ModTime(), - CacheTime: time.Now(), - IsDir: true, - NumDirs: numDirs, - NumFiles: numFiles, + Path: adjustedPath, + Files: fileInfos, + Dirs: dirInfos, + Size: totalSize, + ModTime: dirInfo.ModTime(), } - // Add directory to index - si.mu.Lock() - si.Directories[adjustedPath] = *dirFileInfo + // Update the current directory metadata in the index + si.UpdateMetadata(dirFileInfo) si.NumDirs += numDirs si.NumFiles += numFiles - si.mu.Unlock() + return nil } -// InsertInfo function to handle adding a file or directory into the index -func (si *Index) InsertInfo(parentPath string, file *FileInfo) (*FileInfo, error) { - filePath := filepath.Join(parentPath, file.Name) - - // Check if it's a directory and recursively index it - if file.IsDir { - // Recursively index directory - err := si.indexFiles(filePath) - if err != nil { - return nil, err - } - - // Return directory info from the index - adjustedPath := si.makeIndexPath(filePath, true) - si.mu.RLock() - dirInfo := si.Directories[adjustedPath] - si.mu.RUnlock() - return &dirInfo, nil +func (si *Index) makeIndexPath(subPath string) string { + if strings.HasPrefix(subPath, "./") { + subPath = strings.TrimPrefix(subPath, ".") } - - // Create FileInfo for regular files - fileInfo := &FileInfo{ - Path: filePath, - Name: file.Name, - Size: file.Size, - ModTime: file.ModTime, - IsDir: false, - } - - return fileInfo, nil -} - -func (si *Index) makeIndexPath(subPath string, isDir bool) string { - if si.Root == subPath { + if strings.HasPrefix(subPath, ".") || si.Root == subPath { return "/" } // clean path @@ -185,14 +166,39 @@ func (si *Index) makeIndexPath(subPath string, isDir bool) string { adjustedPath := strings.TrimPrefix(subPath, si.Root) // remove trailing slash adjustedPath = strings.TrimSuffix(adjustedPath, "/") - // add leading slash for root of index - if adjustedPath == "" { - adjustedPath = "/" - } else if !isDir { - adjustedPath = filepath.Dir(adjustedPath) - } if !strings.HasPrefix(adjustedPath, "/") { adjustedPath = "/" + adjustedPath } return adjustedPath } + +//func getParentPath(path string) string { +// // Trim trailing slash for consistency +// path = strings.TrimSuffix(path, "/") +// if path == "" || path == "/" { +// return "" // Root has no parent +// } +// +// lastSlash := strings.LastIndex(path, "/") +// if lastSlash == -1 { +// return "/" // Parent of a top-level directory +// } +// return path[:lastSlash] +//} + +func (si *Index) recursiveUpdateDirSizes(parentDir string, childInfo *FileInfo, previousSize int64) { + childDirName := filepath.Base(childInfo.Path) + if parentDir == childDirName { + return + } + dir, exists := si.GetMetadataInfo(parentDir, true) + if !exists { + return + } + dir.Dirs[childDirName] = childInfo + newSize := dir.Size - previousSize + childInfo.Size + dir.Size += newSize + si.UpdateMetadata(dir) + dir, _ = si.GetMetadataInfo(parentDir, true) + si.recursiveUpdateDirSizes(filepath.Dir(parentDir), dir, newSize) +} diff --git a/backend/files/indexing_test.go b/backend/files/indexing_test.go index 0758f1f4..7500285a 100644 --- a/backend/files/indexing_test.go +++ b/backend/files/indexing_test.go @@ -2,8 +2,8 @@ package files import ( "encoding/json" - "fmt" "math/rand" + "path/filepath" "reflect" "testing" "time" @@ -23,27 +23,26 @@ func BenchmarkFillIndex(b *testing.B) { func (si *Index) createMockData(numDirs, numFilesPerDir int) { for i := 0; i < numDirs; i++ { - dirName := generateRandomPath(rand.Intn(3) + 1) - files := []*FileInfo{} // Slice of FileInfo + dirPath := generateRandomPath(rand.Intn(3) + 1) + files := []ReducedItem{} // Slice of FileInfo // Simulating files and directories with FileInfo for j := 0; j < numFilesPerDir; j++ { - newFile := &FileInfo{ + newFile := ReducedItem{ Name: "file-" + getRandomTerm() + getRandomExtension(), - IsDir: false, Size: rand.Int63n(1000), // Random size ModTime: time.Now().Add(-time.Duration(rand.Intn(100)) * time.Hour), // Random mod time + Type: "blob", } files = append(files, newFile) } - - // Simulate inserting files into index - for _, file := range files { - _, err := si.InsertInfo(dirName, file) - if err != nil { - fmt.Println("Error inserting file:", err) - } + dirInfo := &FileInfo{ + Name: filepath.Base(dirPath), + Path: dirPath, + Files: files, } + + si.UpdateMetadata(dirInfo) } } diff --git a/backend/files/search.go b/backend/files/search.go index 24e45b74..e77b2b01 100644 --- a/backend/files/search.go +++ b/backend/files/search.go @@ -1,12 +1,12 @@ package files import ( - "math/rand" "path/filepath" "sort" "strings" "sync" - "time" + + "github.com/gtsteffaniak/filebrowser/utils" ) var ( @@ -14,123 +14,116 @@ var ( maxSearchResults = 100 ) -func (si *Index) Search(search string, scope string, sourceSession string) ([]string, map[string]map[string]bool) { +type searchResult struct { + Path string `json:"path"` + Type string `json:"type"` + Size int64 `json:"size"` +} + +func (si *Index) Search(search string, scope string, sourceSession string) []searchResult { // Remove slashes - scope = strings.TrimLeft(scope, "/") - scope = strings.TrimRight(scope, "/") - runningHash := generateRandomHash(4) + scope = si.makeIndexPath(scope) + runningHash := utils.GenerateRandomHash(4) sessionInProgress.Store(sourceSession, runningHash) // Store the value in the sync.Map searchOptions := ParseSearch(search) - fileListTypes := make(map[string]map[string]bool) - matching := []string{} + results := make(map[string]searchResult, 0) count := 0 + directories := si.getDirsInScope(scope) for _, searchTerm := range searchOptions.Terms { if searchTerm == "" { continue } + if count > maxSearchResults { + break + } si.mu.Lock() - for dirName, dir := range si.Directories { - isDir := true - files := []string{} - for _, item := range dir.Items { - if !item.IsDir { - files = append(files, item.Name) - } - } - value, found := sessionInProgress.Load(sourceSession) - if !found || value != runningHash { - si.mu.Unlock() - return []string{}, map[string]map[string]bool{} + for _, dirName := range directories { + si.mu.Unlock() + dir, found := si.GetReducedMetadata(dirName, true) + si.mu.Lock() + if !found { + continue } if count > maxSearchResults { break } - pathName := scopedPathNameFilter(dirName, scope, isDir) - if pathName == "" { - continue // path not matched + reducedDir := ReducedItem{ + Name: filepath.Base(dirName), + Type: "directory", + Size: dir.Size, } - fileTypes := map[string]bool{} - si.mu.Unlock() - matches, fileType := si.containsSearchTerm(dirName, searchTerm, *searchOptions, isDir, fileTypes) - si.mu.Lock() + + matches := reducedDir.containsSearchTerm(searchTerm, searchOptions) if matches { - fileListTypes[pathName] = fileType - matching = append(matching, pathName) + scopedPath := strings.TrimPrefix(strings.TrimPrefix(dirName, scope), "/") + "/" + results[scopedPath] = searchResult{Path: scopedPath, Type: "directory", Size: dir.Size} count++ } - isDir = false - for _, file := range files { - if file == "" { - continue + + // search files first + for _, item := range dir.Items { + + fullPath := dirName + "/" + item.Name + if item.Type == "directory" { + fullPath += "/" } value, found := sessionInProgress.Load(sourceSession) if !found || value != runningHash { - return []string{}, map[string]map[string]bool{} + si.mu.Unlock() + return []searchResult{} } - if count > maxSearchResults { break } - fullName := strings.TrimLeft(pathName+file, "/") - fileTypes := map[string]bool{} - si.mu.Unlock() - matches, fileType := si.containsSearchTerm(fullName, searchTerm, *searchOptions, isDir, fileTypes) - si.mu.Lock() - if !matches { - continue + matches := item.containsSearchTerm(searchTerm, searchOptions) + if matches { + scopedPath := strings.TrimPrefix(strings.TrimPrefix(fullPath, scope), "/") + results[scopedPath] = searchResult{Path: scopedPath, Type: item.Type, Size: item.Size} + count++ } - fileListTypes[fullName] = fileType - matching = append(matching, fullName) - count++ } } si.mu.Unlock() } + + // Sort keys based on the number of elements in the path after splitting by "/" + sortedKeys := make([]searchResult, 0, len(results)) + for _, v := range results { + sortedKeys = append(sortedKeys, v) + } // Sort the strings based on the number of elements after splitting by "/" - sort.Slice(matching, func(i, j int) bool { - parts1 := strings.Split(matching[i], "/") - parts2 := strings.Split(matching[j], "/") + sort.Slice(sortedKeys, func(i, j int) bool { + parts1 := strings.Split(sortedKeys[i].Path, "/") + parts2 := strings.Split(sortedKeys[j].Path, "/") return len(parts1) < len(parts2) }) - return matching, fileListTypes + return sortedKeys } -func scopedPathNameFilter(pathName string, scope string, isDir bool) string { - pathName = strings.TrimLeft(pathName, "/") - pathName = strings.TrimRight(pathName, "/") - if strings.HasPrefix(pathName, scope) || scope == "" { - pathName = strings.TrimPrefix(pathName, scope) - pathName = strings.TrimLeft(pathName, "/") - if isDir { - pathName = pathName + "/" - } - } else { - pathName = "" // return not matched - } - return pathName -} +// returns true if the file name contains the search term +// returns file type if the file name contains the search term +// returns size of file/dir if the file name contains the search term +func (fi ReducedItem) containsSearchTerm(searchTerm string, options *SearchOptions) bool { -func (si *Index) containsSearchTerm(pathName string, searchTerm string, options SearchOptions, isDir bool, fileTypes map[string]bool) (bool, map[string]bool) { + fileTypes := map[string]bool{} largerThan := int64(options.LargerThan) * 1024 * 1024 smallerThan := int64(options.SmallerThan) * 1024 * 1024 conditions := options.Conditions - fileName := filepath.Base(pathName) - adjustedPath := si.makeIndexPath(pathName, isDir) + lowerFileName := strings.ToLower(fi.Name) // Convert to lowercase if not exact match if !conditions["exact"] { - fileName = strings.ToLower(fileName) searchTerm = strings.ToLower(searchTerm) } // Check if the file name contains the search term - if !strings.Contains(fileName, searchTerm) { - return false, map[string]bool{} + if !strings.Contains(lowerFileName, searchTerm) { + return false } // Initialize file size and fileTypes map var fileSize int64 - extension := filepath.Ext(fileName) + extension := filepath.Ext(lowerFileName) // Collect file types for _, k := range AllFiletypeOptions { @@ -138,31 +131,9 @@ func (si *Index) containsSearchTerm(pathName string, searchTerm string, options fileTypes[k] = true } } + isDir := fi.Type == "directory" fileTypes["dir"] = isDir - // Get file info if needed for size-related conditions - if largerThan > 0 || smallerThan > 0 { - fileInfo, exists := si.GetMetadataInfo(adjustedPath) - if !exists { - return false, fileTypes - } else if !isDir { - // Look for specific file in ReducedItems - for _, item := range fileInfo.ReducedItems { - lower := strings.ToLower(item.Name) - if strings.Contains(lower, searchTerm) { - if item.Size == 0 { - return false, fileTypes - } - fileSize = item.Size - break - } - } - } else { - fileSize = fileInfo.Size - } - if fileSize == 0 { - return false, fileTypes - } - } + fileSize = fi.Size // Evaluate all conditions for t, v := range conditions { @@ -173,33 +144,35 @@ func (si *Index) containsSearchTerm(pathName string, searchTerm string, options case "larger": if largerThan > 0 { if fileSize <= largerThan { - return false, fileTypes + return false } } case "smaller": if smallerThan > 0 { if fileSize >= smallerThan { - return false, fileTypes + return false } } default: // Handle other file type conditions notMatchType := v != fileTypes[t] if notMatchType { - return false, fileTypes + return false } } } - return true, fileTypes + return true } -func generateRandomHash(length int) string { - const charset = "abcdefghijklmnopqrstuvwxyz0123456789" - rand.New(rand.NewSource(time.Now().UnixNano())) - result := make([]byte, length) - for i := range result { - result[i] = charset[rand.Intn(len(charset))] +func (si *Index) getDirsInScope(scope string) []string { + newList := []string{} + si.mu.Lock() + defer si.mu.Unlock() + for k := range si.Directories { + if strings.HasPrefix(k, scope) || scope == "" { + newList = append(newList, k) + } } - return string(result) + return newList } diff --git a/backend/files/search_test.go b/backend/files/search_test.go index a78f0968..cc33e3a0 100644 --- a/backend/files/search_test.go +++ b/backend/files/search_test.go @@ -88,22 +88,26 @@ func TestSearchWhileIndexing(t *testing.T) { func TestSearchIndexes(t *testing.T) { index := Index{ - Directories: map[string]FileInfo{ - "test": {Items: []*FileInfo{{Name: "audio1.wav"}}}, - "test/path": {Items: []*FileInfo{{Name: "file.txt"}}}, - "new/test": {Items: []*FileInfo{ - {Name: "audio.wav"}, - {Name: "video.mp4"}, - {Name: "video.MP4"}, - }}, - "new/test/path": {Items: []*FileInfo{{Name: "archive.zip"}}}, - "/firstDir": {Items: []*FileInfo{ - {Name: "archive.zip", Size: 100}, - {Name: "thisIsDir", IsDir: true, Size: 2 * 1024 * 1024}, + Directories: map[string]*FileInfo{ + "/test": {Files: []ReducedItem{{Name: "audio1.wav", Type: "audio"}}}, + "/test/path": {Files: []ReducedItem{{Name: "file.txt", Type: "text"}}}, + "/new/test": {Files: []ReducedItem{ + {Name: "audio.wav", Type: "audio"}, + {Name: "video.mp4", Type: "video"}, + {Name: "video.MP4", Type: "video"}, }}, + "/new/test/path": {Files: []ReducedItem{{Name: "archive.zip", Type: "archive"}}}, + "/firstDir": { + Files: []ReducedItem{ + {Name: "archive.zip", Size: 100, Type: "archive"}, + }, + Dirs: map[string]*FileInfo{ + "thisIsDir": {Name: "thisIsDir", Size: 2 * 1024 * 1024}, + }, + }, "/firstDir/thisIsDir": { - Items: []*FileInfo{ - {Name: "hi.txt"}, + Files: []ReducedItem{ + {Name: "hi.txt", Type: "text"}, }, Size: 2 * 1024 * 1024, }, @@ -113,112 +117,106 @@ func TestSearchIndexes(t *testing.T) { tests := []struct { search string scope string - expectedResult []string - expectedTypes map[string]map[string]bool + expectedResult []searchResult }{ { - search: "audio", - scope: "/new/", - expectedResult: []string{"test/audio.wav"}, - expectedTypes: map[string]map[string]bool{ - "test/audio.wav": {"audio": true, "dir": false}, + search: "audio", + scope: "/new/", + expectedResult: []searchResult{ + { + Path: "test/audio.wav", + Type: "audio", + Size: 0, + }, }, }, { - search: "test", - scope: "/", - expectedResult: []string{"test/", "new/test/"}, - expectedTypes: map[string]map[string]bool{ - "test/": {"dir": true}, - "new/test/": {"dir": true}, + search: "test", + scope: "/", + expectedResult: []searchResult{ + { + Path: "test/", + Type: "directory", + Size: 0, + }, + { + Path: "new/test/", + Type: "directory", + Size: 0, + }, }, }, { - search: "archive", - scope: "/", - expectedResult: []string{"firstDir/archive.zip", "new/test/path/archive.zip"}, - expectedTypes: map[string]map[string]bool{ - "new/test/path/archive.zip": {"archive": true, "dir": false}, - "firstDir/archive.zip": {"archive": true, "dir": false}, + search: "archive", + scope: "/", + expectedResult: []searchResult{ + { + Path: "firstDir/archive.zip", + Type: "archive", + Size: 100, + }, + { + Path: "new/test/path/archive.zip", + Type: "archive", + Size: 0, + }, }, }, { - search: "arch", - scope: "/firstDir", - expectedResult: []string{"archive.zip"}, - expectedTypes: map[string]map[string]bool{ - "archive.zip": {"archive": true, "dir": false}, + search: "arch", + scope: "/firstDir", + expectedResult: []searchResult{ + { + Path: "archive.zip", + Type: "archive", + Size: 100, + }, }, }, { - search: "isdir", - scope: "/", - expectedResult: []string{"firstDir/thisIsDir/"}, - expectedTypes: map[string]map[string]bool{ - "firstDir/thisIsDir/": {"dir": true}, + search: "isdir", + scope: "/", + expectedResult: []searchResult{ + { + Path: "firstDir/thisIsDir/", + Type: "directory", + Size: 2097152, + }, }, }, { - search: "dir type:largerThan=1", - scope: "/", - expectedResult: []string{"firstDir/thisIsDir/"}, - expectedTypes: map[string]map[string]bool{ - "firstDir/thisIsDir/": {"dir": true}, + search: "IsDir type:largerThan=1", + scope: "/", + expectedResult: []searchResult{ + { + Path: "firstDir/thisIsDir/", + Type: "directory", + Size: 2097152, + }, }, }, { search: "video", scope: "/", - expectedResult: []string{ - "new/test/video.mp4", - "new/test/video.MP4", - }, - expectedTypes: map[string]map[string]bool{ - "new/test/video.MP4": {"video": true, "dir": false}, - "new/test/video.mp4": {"video": true, "dir": false}, + expectedResult: []searchResult{ + { + Path: "new/test/video.MP4", + Type: "video", + Size: 0, + }, + { + Path: "new/test/video.mp4", + Type: "video", + Size: 0, + }, }, }, } for _, tt := range tests { t.Run(tt.search, func(t *testing.T) { - actualResult, actualTypes := index.Search(tt.search, tt.scope, "") - assert.Equal(t, tt.expectedResult, actualResult) - assert.Equal(t, tt.expectedTypes, actualTypes) - }) - } -} - -func Test_scopedPathNameFilter(t *testing.T) { - tests := []struct { - name string - args struct { - pathName string - scope string - isDir bool // Assuming isDir should be included in args - } - want string - }{ - { - name: "scope test", - args: struct { - pathName string - scope string - isDir bool - }{ - pathName: "/", - scope: "/", - isDir: false, - }, - want: "", // Update this with the expected result - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := scopedPathNameFilter(tt.args.pathName, tt.args.scope, tt.args.isDir); got != tt.want { - t.Errorf("scopedPathNameFilter() = %v, want %v", got, tt.want) - } + result := index.Search(tt.search, tt.scope, "") + assert.Equal(t, tt.expectedResult, result) }) } } diff --git a/backend/files/sync.go b/backend/files/sync.go index 050e262f..7eb35ba1 100644 --- a/backend/files/sync.go +++ b/backend/files/sync.go @@ -2,72 +2,89 @@ package files import ( "log" + "path/filepath" + "sort" "time" "github.com/gtsteffaniak/filebrowser/settings" ) // UpdateFileMetadata updates the FileInfo for the specified directory in the index. -func (si *Index) UpdateFileMetadata(adjustedPath string, info FileInfo) bool { +func (si *Index) UpdateMetadata(info *FileInfo) bool { si.mu.Lock() defer si.mu.Unlock() - dir, exists := si.Directories[adjustedPath] - if !exists { - si.Directories[adjustedPath] = FileInfo{} - } - return si.SetFileMetadata(adjustedPath, dir) -} - -// SetFileMetadata sets the FileInfo for the specified directory in the index. -// internal use only -func (si *Index) SetFileMetadata(adjustedPath string, info FileInfo) bool { - _, exists := si.Directories[adjustedPath] - if !exists { - return false - } info.CacheTime = time.Now() - si.Directories[adjustedPath] = info + si.Directories[info.Path] = info return true } // GetMetadataInfo retrieves the FileInfo from the specified directory in the index. -func (si *Index) GetMetadataInfo(adjustedPath string) (FileInfo, bool) { +func (si *Index) GetReducedMetadata(target string, isDir bool) (*FileInfo, bool) { si.mu.RLock() - dir, exists := si.Directories[adjustedPath] - si.mu.RUnlock() - if !exists { - return dir, exists + defer si.mu.RUnlock() + checkDir := si.makeIndexPath(target) + if !isDir { + checkDir = si.makeIndexPath(filepath.Dir(target)) + } + dir, exists := si.Directories[checkDir] + if !exists { + return nil, false + } + if !isDir { + if checkDir == "/" { + checkDir = "" + } + + baseName := filepath.Base(target) + for _, item := range dir.Files { + if item.Name == baseName { + return &FileInfo{ + Name: item.Name, + Size: item.Size, + ModTime: item.ModTime, + Type: item.Type, + Path: checkDir + "/" + item.Name, + }, true + } + } + return nil, false } - // remove recursive items, we only want this directories direct files cleanedItems := []ReducedItem{} - for _, item := range dir.Items { + for name, item := range dir.Dirs { cleanedItems = append(cleanedItems, ReducedItem{ - Name: item.Name, + Name: name, Size: item.Size, - IsDir: item.IsDir, ModTime: item.ModTime, - Type: item.Type, + Type: "directory", }) } - dir.Items = nil - dir.ReducedItems = cleanedItems - realPath, _, _ := GetRealPath(adjustedPath) - dir.Path = realPath - return dir, exists + cleanedItems = append(cleanedItems, dir.Files...) + sort.Slice(cleanedItems, func(i, j int) bool { + return cleanedItems[i].Name < cleanedItems[j].Name + }) + dirname := filepath.Base(dir.Path) + if dirname == "." { + dirname = "/" + } + // construct file info + return &FileInfo{ + Name: dirname, + Type: "directory", + Items: cleanedItems, + ModTime: dir.ModTime, + Size: dir.Size, + }, true } -// SetDirectoryInfo sets the directory information in the index. -func (si *Index) SetDirectoryInfo(adjustedPath string, dir FileInfo) { - si.mu.Lock() - si.Directories[adjustedPath] = dir - si.mu.Unlock() -} - -// SetDirectoryInfo sets the directory information in the index. -func (si *Index) GetDirectoryInfo(adjustedPath string) (FileInfo, bool) { +// GetMetadataInfo retrieves the FileInfo from the specified directory in the index. +func (si *Index) GetMetadataInfo(target string, isDir bool) (*FileInfo, bool) { si.mu.RLock() - dir, exists := si.Directories[adjustedPath] - si.mu.RUnlock() + defer si.mu.RUnlock() + checkDir := si.makeIndexPath(target) + if !isDir { + checkDir = si.makeIndexPath(filepath.Dir(target)) + } + dir, exists := si.Directories[checkDir] return dir, exists } @@ -108,11 +125,12 @@ func GetIndex(root string) *Index { } newIndex := &Index{ Root: rootPath, - Directories: map[string]FileInfo{}, + Directories: map[string]*FileInfo{}, NumDirs: 0, NumFiles: 0, inProgress: false, } + newIndex.Directories["/"] = &FileInfo{} indexesMutex.Lock() indexes = append(indexes, newIndex) indexesMutex.Unlock() diff --git a/backend/files/sync_test.go b/backend/files/sync_test.go index 6727e4d7..36333750 100644 --- a/backend/files/sync_test.go +++ b/backend/files/sync_test.go @@ -32,9 +32,9 @@ func TestGetFileMetadataSize(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fileInfo, _ := testIndex.GetMetadataInfo(tt.adjustedPath) + fileInfo, _ := testIndex.GetReducedMetadata(tt.adjustedPath, true) // Iterate over fileInfo.Items to look for expectedName - for _, item := range fileInfo.ReducedItems { + for _, item := range fileInfo.Items { // Assert the existence and the name if item.Name == tt.expectedName { assert.Equal(t, tt.expectedSize, item.Size) @@ -53,28 +53,29 @@ func TestGetFileMetadata(t *testing.T) { adjustedPath string expectedName string expectedExists bool + isDir bool }{ { name: "testpath exists", - adjustedPath: "/testpath", + adjustedPath: "/testpath/testfile.txt", expectedName: "testfile.txt", expectedExists: true, }, { name: "testpath not exists", - adjustedPath: "/testpath", + adjustedPath: "/testpath/nonexistent.txt", expectedName: "nonexistent.txt", expectedExists: false, }, { name: "File exists in /anotherpath", - adjustedPath: "/anotherpath", + adjustedPath: "/anotherpath/afile.txt", expectedName: "afile.txt", expectedExists: true, }, { name: "File does not exist in /anotherpath", - adjustedPath: "/anotherpath", + adjustedPath: "/anotherpath/nonexistentfile.txt", expectedName: "nonexistentfile.txt", expectedExists: false, }, @@ -83,20 +84,33 @@ func TestGetFileMetadata(t *testing.T) { adjustedPath: "/nonexistentpath", expectedName: "", expectedExists: false, + isDir: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fileInfo, _ := testIndex.GetMetadataInfo(tt.adjustedPath) + fileInfo, _ := testIndex.GetReducedMetadata(tt.adjustedPath, tt.isDir) + if fileInfo == nil { + found := false + assert.Equal(t, tt.expectedExists, found) + return + } found := false - // Iterate over fileInfo.Items to look for expectedName - for _, item := range fileInfo.ReducedItems { - // Assert the existence and the name - if item.Name == tt.expectedName { + if tt.isDir { + // Iterate over fileInfo.Items to look for expectedName + for _, item := range fileInfo.Items { + // Assert the existence and the name + if item.Name == tt.expectedName { + found = true + break + } + } + } else { + if fileInfo.Name == tt.expectedName { found = true - break } } + assert.Equal(t, tt.expectedExists, found) }) } @@ -104,42 +118,42 @@ func TestGetFileMetadata(t *testing.T) { // Test for UpdateFileMetadata func TestUpdateFileMetadata(t *testing.T) { - index := &Index{ - Directories: map[string]FileInfo{ - "/testpath": { - Path: "/testpath", - Name: "testpath", - IsDir: true, - ReducedItems: []ReducedItem{ - {Name: "testfile.txt"}, - {Name: "anotherfile.txt"}, - }, - }, + info := &FileInfo{ + Path: "/testpath", + Name: "testpath", + Type: "directory", + Files: []ReducedItem{ + {Name: "testfile.txt"}, + {Name: "anotherfile.txt"}, }, } - info := FileInfo{Name: "testfile.txt"} + index := &Index{ + Directories: map[string]*FileInfo{ + "/testpath": info, + }, + } - success := index.UpdateFileMetadata("/testpath", info) + success := index.UpdateMetadata(info) if !success { t.Fatalf("expected UpdateFileMetadata to succeed") } - dir, exists := index.Directories["/testpath"] - if !exists || dir.ReducedItems[0].Name != "testfile.txt" { - t.Fatalf("expected testfile.txt to be updated in the directory metadata") + fileInfo, exists := index.GetReducedMetadata("/testpath/testfile.txt", false) + if !exists || fileInfo.Name != "testfile.txt" { + t.Fatalf("expected testfile.txt to be updated in the directory metadata:%v %v", exists, info.Name) } } // Test for GetDirMetadata func TestGetDirMetadata(t *testing.T) { t.Parallel() - _, exists := testIndex.GetMetadataInfo("/testpath") + _, exists := testIndex.GetReducedMetadata("/testpath", true) if !exists { t.Fatalf("expected GetDirMetadata to return initialized metadata map") } - _, exists = testIndex.GetMetadataInfo("/nonexistent") + _, exists = testIndex.GetReducedMetadata("/nonexistent", true) if exists { t.Fatalf("expected GetDirMetadata to return false for nonexistent directory") } @@ -148,51 +162,37 @@ func TestGetDirMetadata(t *testing.T) { // Test for SetDirectoryInfo func TestSetDirectoryInfo(t *testing.T) { index := &Index{ - Directories: map[string]FileInfo{ + Directories: map[string]*FileInfo{ "/testpath": { - Path: "/testpath", - Name: "testpath", - IsDir: true, - Items: []*FileInfo{ + Path: "/testpath", + Name: "testpath", + Type: "directory", + Items: []ReducedItem{ {Name: "testfile.txt"}, {Name: "anotherfile.txt"}, }, }, }, } - dir := FileInfo{ - Path: "/newPath", - Name: "newPath", - IsDir: true, - Items: []*FileInfo{ + dir := &FileInfo{ + Path: "/newPath", + Name: "newPath", + Type: "directory", + Items: []ReducedItem{ {Name: "testfile.txt"}, }, } - index.SetDirectoryInfo("/newPath", dir) + index.UpdateMetadata(dir) storedDir, exists := index.Directories["/newPath"] if !exists || storedDir.Items[0].Name != "testfile.txt" { t.Fatalf("expected SetDirectoryInfo to store directory info correctly") } } -// Test for GetDirectoryInfo -func TestGetDirectoryInfo(t *testing.T) { - t.Parallel() - dir, exists := testIndex.GetDirectoryInfo("/testpath") - if !exists || dir.Items[0].Name != "testfile.txt" { - t.Fatalf("expected GetDirectoryInfo to return correct directory info") - } - - _, exists = testIndex.GetDirectoryInfo("/nonexistent") - if exists { - t.Fatalf("expected GetDirectoryInfo to return false for nonexistent directory") - } -} - // Test for RemoveDirectory func TestRemoveDirectory(t *testing.T) { index := &Index{ - Directories: map[string]FileInfo{ + Directories: map[string]*FileInfo{ "/testpath": {}, }, } @@ -234,28 +234,26 @@ func init() { NumFiles: 10, NumDirs: 5, inProgress: false, - Directories: map[string]FileInfo{ + Directories: map[string]*FileInfo{ "/testpath": { - Path: "/testpath", - Name: "testpath", - IsDir: true, - NumDirs: 1, - NumFiles: 2, - Items: []*FileInfo{ + Path: "/testpath", + Name: "testpath", + Type: "directory", + Files: []ReducedItem{ {Name: "testfile.txt", Size: 100}, {Name: "anotherfile.txt", Size: 100}, }, }, "/anotherpath": { - Path: "/anotherpath", - Name: "anotherpath", - IsDir: true, - NumDirs: 1, - NumFiles: 1, - Items: []*FileInfo{ - {Name: "directory", IsDir: true, Size: 100}, + Path: "/anotherpath", + Name: "anotherpath", + Type: "directory", + Files: []ReducedItem{ {Name: "afile.txt", Size: 100}, }, + Dirs: map[string]*FileInfo{ + "directory": {Name: "directory", Type: "directory", Size: 100}, + }, }, }, } diff --git a/backend/fileutils/copy.go b/backend/fileutils/copy.go index 6f3d09c7..a0bb6d12 100644 --- a/backend/fileutils/copy.go +++ b/backend/fileutils/copy.go @@ -6,7 +6,7 @@ import ( ) // Copy copies a file or folder from one place to another. -func Copy(src, dst string) error { +func CopyHelper(src, dst string) error { src = filepath.Clean(src) if src == "" { return os.ErrNotExist diff --git a/backend/frontend/dist b/backend/frontend/dist deleted file mode 120000 index e4608c09..00000000 --- a/backend/frontend/dist +++ /dev/null @@ -1 +0,0 @@ -../../frontend/dist \ No newline at end of file diff --git a/backend/frontend/package-lock.json b/backend/frontend/package-lock.json deleted file mode 100644 index aba25f73..00000000 --- a/backend/frontend/package-lock.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "frontend", - "lockfileVersion": 3, - "requires": true, - "packages": {} -} diff --git a/backend/go.mod b/backend/go.mod index bfa86b2a..7a2d36db 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -7,10 +7,9 @@ require ( github.com/disintegration/imaging v1.6.2 github.com/dsoprea/go-exif/v3 v3.0.1 github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 - github.com/goccy/go-yaml v1.12.0 - github.com/golang-jwt/jwt/v4 v4.5.0 + github.com/goccy/go-yaml v1.14.3 + github.com/golang-jwt/jwt/v4 v4.5.1 github.com/google/go-cmp v0.6.0 - github.com/gorilla/mux v1.8.1 github.com/marusama/semaphore/v2 v2.5.0 github.com/mholt/archiver/v3 v3.5.1 github.com/shirou/gopsutil/v3 v3.24.5 @@ -18,39 +17,45 @@ require ( github.com/spf13/cobra v1.8.1 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.9.0 - github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce - golang.org/x/crypto v0.26.0 - golang.org/x/image v0.19.0 - golang.org/x/text v0.17.0 + github.com/swaggo/http-swagger v1.3.4 + github.com/swaggo/swag v1.16.4 + golang.org/x/crypto v0.29.0 + golang.org/x/image v0.22.0 + golang.org/x/text v0.20.0 ) require ( - github.com/andybalholm/brotli v1.1.0 // indirect + github.com/KyleBanks/depth v1.2.1 // indirect + github.com/andybalholm/brotli v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd // indirect github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349 // indirect - github.com/fatih/color v1.17.0 // indirect github.com/go-errors/errors v1.5.1 // indirect github.com/go-ole/go-ole v1.3.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/jsonreference v0.21.0 // indirect + github.com/go-openapi/spec v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect github.com/golang/geo v0.0.0-20230421003525-6adc56603217 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/klauspost/compress v1.17.9 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/klauspost/compress v1.17.11 // indirect github.com/klauspost/pgzip v1.2.6 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/nwaples/rardecode v1.1.3 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect + github.com/swaggo/files v1.0.1 // indirect github.com/ulikunitz/xz v0.5.12 // indirect github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect go.etcd.io/bbolt v1.3.11 // indirect - golang.org/x/net v0.28.0 // indirect - golang.org/x/sys v0.24.0 // indirect - golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/tools v0.27.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/backend/go.sum b/backend/go.sum index 1aaf45c9..b10f10aa 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -1,10 +1,12 @@ github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= github.com/DataDog/zstd v1.4.1/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/Sereal/Sereal v0.0.0-20190618215532-0b8ac451a863 h1:BRrxwOZBolJN4gIwvZMJY1tzqBvQgpaZiQRuIDD40jM= github.com/Sereal/Sereal v0.0.0-20190618215532-0b8ac451a863/go.mod h1:D0JMgToj/WdxCgd30Kc1UcA9E+WdZoJqeVOuYW7iTBM= github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= -github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= -github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= github.com/asdine/storm/v3 v3.2.1 h1:I5AqhkPK6nBZ/qJXySdI7ot5BlXSZ7qvDY1zAn5ZJac= github.com/asdine/storm/v3 v3.2.1/go.mod h1:LEpXwGt4pIqrE/XcTvCnZHT5MgZCV6Ub9q7yQzOFWr0= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= @@ -32,8 +34,6 @@ github.com/dsoprea/go-utility/v2 v2.0.0-20221003142440-7a1927d49d9d/go.mod h1:LV github.com/dsoprea/go-utility/v2 v2.0.0-20221003160719-7bc88537c05e/go.mod h1:VZ7cB0pTjm1ADBWhJUOHESu4ZYy9JN+ZPqjfiW09EPU= github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349 h1:DilThiXje0z+3UQ5YjYiSRRzVdtamFpvBQXKwMglWqw= github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349/go.mod h1:4GC5sXji84i/p+irqghpPFZBF8tRN/Q7+700G0/DLe8= -github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= -github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= @@ -45,16 +45,18 @@ github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3Bop github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= -github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= -github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= -github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= -github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= -github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= -github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/goccy/go-yaml v1.12.0 h1:/1WHjnMsI1dlIBQutrvSMGZRQufVO3asrHfTwfACoPM= -github.com/goccy/go-yaml v1.12.0/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= +github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= +github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= +github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/goccy/go-yaml v1.14.3 h1:8tVD+aqqPLWisSEhM+6wWoiURWXCx6BwaTKS6ZeITgM= +github.com/goccy/go-yaml v1.14.3/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= +github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI= github.com/golang/geo v0.0.0-20200319012246-673a6f80352d/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI= github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI= @@ -71,34 +73,31 @@ github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= -github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= -github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= +github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= -github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/marusama/semaphore/v2 v2.5.0 h1:o/1QJD9DBYOWRnDhPwDVAXQn6mQYD0gZaS1Tpx6DJGM= github.com/marusama/semaphore/v2 v2.5.0/go.mod h1:z9nMiNUekt/LTpTUQdpp+4sJeYqUGpwMHfW0Z8V8fnQ= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= -github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo= github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4= github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= @@ -111,6 +110,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU= github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/shirou/gopsutil/v3 v3.24.5 h1:i0t8kL+kQTvpAYToeuiVk3TgDeKOFioZO3Ztz/iZ9pI= github.com/shirou/gopsutil/v3 v3.24.5/go.mod h1:bsoOS1aStSs9ErQ1WWfxllSeS1K5D+U30r2NfcubMVk= @@ -123,8 +124,12 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce h1:fb190+cK2Xz/dvi9Hv8eCYJYvIGUTN2/KLq1pT6CjEc= -github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= +github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= +github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg= +github.com/swaggo/http-swagger v1.3.4 h1:q7t/XLx0n15H1Q9/tk3Y9L4n210XzJF5WtnDX64a5ww= +github.com/swaggo/http-swagger v1.3.4/go.mod h1:9dAh0unqMBAlbp1uE2Uc2mQTxNMU/ha4UbucIg1MFkQ= +github.com/swaggo/swag v1.16.4 h1:clWJtd9LStiG3VeijiCfOVODP6VpHtKdQy9ELFG3s1A= +github.com/swaggo/swag v1.16.4/go.mod h1:VBsHJRsDvfYvqoiMKnsdwhNV9LEMHgEDZcyVYX0sxPg= github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc= @@ -133,60 +138,83 @@ github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaU github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo= github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0= go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= -golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ= -golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys= +golang.org/x/image v0.22.0 h1:UtK5yLUzilVrkjMAZAZ34DXGpASN8i8pj8g+O+yd10g= +golang.org/x/image v0.22.0/go.mod h1:9hPFhljd4zZ1GNSIZJ49sqbp45GKK9t6w+iXvGqZUz4= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191105084925-a882066a44e0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= -golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg= -golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= -golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 h1:LLhsEBxRTBLuKlQxFBYUOU8xyFgXv6cOTp2HASDlsDk= -golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= diff --git a/backend/http/api.go b/backend/http/api.go new file mode 100644 index 00000000..6f12acfb --- /dev/null +++ b/backend/http/api.go @@ -0,0 +1,121 @@ +package http + +import ( + "fmt" + "net/http" + "strconv" + "strings" + "time" + + "github.com/gtsteffaniak/filebrowser/users" +) + +func createApiKeyHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + name := r.URL.Query().Get("name") + durationStr := r.URL.Query().Get("days") + permissionsStr := r.URL.Query().Get("permissions") + + if name == "" { + return http.StatusInternalServerError, fmt.Errorf("api name must be valid") + } + if durationStr == "" { + return http.StatusInternalServerError, fmt.Errorf("api duration must be valid") + } + if permissionsStr == "" { + return http.StatusInternalServerError, fmt.Errorf("api permissions must be valid") + } + // Parse permissions from the query parameter + permissions := users.Permissions{ + Api: strings.Contains(permissionsStr, "api") && d.user.Perm.Api, + Admin: strings.Contains(permissionsStr, "admin") && d.user.Perm.Admin, + Execute: strings.Contains(permissionsStr, "execute") && d.user.Perm.Execute, + Create: strings.Contains(permissionsStr, "create") && d.user.Perm.Create, + Rename: strings.Contains(permissionsStr, "rename") && d.user.Perm.Rename, + Modify: strings.Contains(permissionsStr, "modify") && d.user.Perm.Modify, + Delete: strings.Contains(permissionsStr, "delete") && d.user.Perm.Delete, + Share: strings.Contains(permissionsStr, "share") && d.user.Perm.Share, + Download: strings.Contains(permissionsStr, "download") && d.user.Perm.Download, + } + + // Convert the duration string to an int64 + durationInt, err := strconv.ParseInt(durationStr, 10, 64) // Base 10 and bit size of 64 + if err != nil { + return http.StatusBadRequest, fmt.Errorf("invalid duration value: %w", err) + } + + // Here we assume the duration is in seconds; convert to time.Duration + duration := time.Duration(durationInt) * time.Hour * 24 + + // get request body like: + token, err := makeSignedTokenAPI(d.user, name, duration, permissions) + if err != nil { + if strings.Contains(err.Error(), "key already exists with same name") { + return http.StatusConflict, err + } + return http.StatusInternalServerError, err + } + response := HttpResponse{ + Message: "here is your token!", + Token: token.Key, + } + return renderJSON(w, r, response) +} + +func deleteApiKeyHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + name := r.URL.Query().Get("name") + + keyInfo, ok := d.user.ApiKeys[name] + if !ok { + return http.StatusNotFound, fmt.Errorf("api key not found") + } + // Perform the user update + err := store.Users.DeleteApiKey(d.user.ID, name) + if err != nil { + return http.StatusNotFound, err + } + + revokeAPIKey(keyInfo.Key) // add to blacklist + response := HttpResponse{ + Message: "successfully deleted api key from user", + } + return renderJSON(w, r, response) +} + +type AuthTokenMin struct { + Key string `json:"key"` + Name string `json:"name"` + Created int64 `json:"created"` + Expires int64 `json:"expires"` + Permissions users.Permissions `json:"Permissions"` +} + +func listApiKeysHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + key := r.URL.Query().Get("key") + + if key != "" { + keyInfo, ok := d.user.ApiKeys[key] + if !ok { + return http.StatusNotFound, fmt.Errorf("api key not found") + } + modifiedKey := AuthTokenMin{ + Key: keyInfo.Key, + Name: key, + Created: keyInfo.Created, + Expires: keyInfo.Expires, + Permissions: keyInfo.Permissions, + } + return renderJSON(w, r, modifiedKey) + } + + modifiedList := map[string]AuthTokenMin{} + for key, value := range d.user.ApiKeys { + modifiedList[key] = AuthTokenMin{ + Key: value.Key, + Created: value.Created, + Expires: value.Expires, + Permissions: value.Permissions, + } + } + + return renderJSON(w, r, modifiedList) +} diff --git a/backend/http/auth.go b/backend/http/auth.go index 694cb97f..abf34674 100644 --- a/backend/http/auth.go +++ b/backend/http/auth.go @@ -2,10 +2,12 @@ package http import ( "encoding/json" + "fmt" "log" "net/http" "os" "strings" + "sync" "time" "github.com/golang-jwt/jwt/v4" @@ -14,91 +16,77 @@ import ( "github.com/gtsteffaniak/filebrowser/errors" "github.com/gtsteffaniak/filebrowser/settings" "github.com/gtsteffaniak/filebrowser/users" + "github.com/gtsteffaniak/filebrowser/utils" ) -type authToken struct { - User users.User `json:"user"` - jwt.RegisteredClaims -} +var ( + revokedApiKeyList map[string]bool + revokeMu sync.Mutex +) -type extractor []string +// first checks for cookie +// then checks for header Authorization as Bearer token +// then checks for query parameter +func extractToken(r *http.Request) (string, error) { + hasToken := false + tokenObj, err := r.Cookie("auth") + if err == nil { + hasToken = true + token := tokenObj.Value + // Checks if the token isn't empty and if it contains two dots. + // The former prevents incompatibility with URLs that previously + // used basic auth. + if token != "" && strings.Count(token, ".") == 2 { + return token, nil + } + } -func (e extractor) ExtractToken(r *http.Request) (string, error) { - token, _ := request.HeaderExtractor{"X-Auth"}.ExtractToken(r) - - // Checks if the token isn't empty and if it contains two dots. - // The former prevents incompatibility with URLs that previously - // used basic auth. - if token != "" && strings.Count(token, ".") == 2 { - return token, nil + // Check for Authorization header + authHeader := r.Header.Get("Authorization") + if authHeader != "" { + hasToken = true + // Split the header to get "Bearer {token}" + parts := strings.Split(authHeader, " ") + if len(parts) == 2 && parts[0] == "Bearer" { + token := parts[1] + return token, nil + } } auth := r.URL.Query().Get("auth") - if auth != "" && strings.Count(auth, ".") == 2 { - return auth, nil + if auth != "" { + hasToken = true + if strings.Count(auth, ".") == 2 { + return auth, nil + } } - if r.Method == http.MethodGet { - cookie, _ := r.Cookie("auth") - if cookie != nil && strings.Count(cookie.Value, ".") == 2 { - return cookie.Value, nil - } + if hasToken { + return "", fmt.Errorf("invalid token provided") } return "", request.ErrNoTokenInRequest } -func withUser(fn handleFunc) handleFunc { - return func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - keyFunc := func(token *jwt.Token) (interface{}, error) { - return d.settings.Auth.Key, nil - } - - var tk authToken - token, err := request.ParseFromRequest(r, &extractor{}, keyFunc, request.WithClaims(&tk)) - - if err != nil || !token.Valid { - return http.StatusUnauthorized, nil - } - - expired := !tk.VerifyExpiresAt(time.Now().Add(time.Hour), true) - updated := tk.IssuedAt != nil && tk.IssuedAt.Unix() < d.store.Users.LastUpdate(tk.User.ID) - - if expired || updated { - w.Header().Add("X-Renew-Token", "true") - } - - d.user, err = d.store.Users.Get(d.server.Root, tk.User.ID) - if err != nil { - return http.StatusInternalServerError, err - } - return fn(w, r, d) - } -} - -func withAdmin(fn handleFunc) handleFunc { - return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - if !d.user.Perm.Admin { - return http.StatusForbidden, nil - } - - return fn(w, r, d) - }) -} - -var loginHandler = func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - auther, err := d.store.Auth.Get(d.settings.Auth.Method) +func loginHandler(w http.ResponseWriter, r *http.Request) { + // Get the authentication method from the settings + auther, err := store.Auth.Get(config.Auth.Method) if err != nil { - return http.StatusInternalServerError, err + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return } - - user, err := auther.Auth(r, d.store.Users) + // Authenticate the user based on the request + user, err := auther.Auth(r, store.Users) if err == os.ErrPermission { - return http.StatusForbidden, nil + http.Error(w, http.StatusText(http.StatusForbidden), http.StatusForbidden) + return } else if err != nil { - return http.StatusInternalServerError, err - } else { - return printToken(w, r, d, user) + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return + } + status, err := printToken(w, r, user) // Pass the data object + if err != nil { + http.Error(w, http.StatusText(status), status) } } @@ -107,73 +95,115 @@ type signupBody struct { Password string `json:"password"` } -var signupHandler = func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +func signupHandler(w http.ResponseWriter, r *http.Request) { if !settings.Config.Auth.Signup { - return http.StatusMethodNotAllowed, nil + http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed) + return } if r.Body == nil { - return http.StatusBadRequest, nil + http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest) + return } info := &signupBody{} err := json.NewDecoder(r.Body).Decode(info) if err != nil { - return http.StatusBadRequest, err + http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest) + return } if info.Password == "" || info.Username == "" { - return http.StatusBadRequest, nil + http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest) + return } - user := users.ApplyDefaults(users.User{}) + user := settings.ApplyUserDefaults(users.User{}) user.Username = info.Username user.Password = info.Password - userHome, err := d.settings.MakeUserDir(user.Username, user.Scope, d.server.Root) + userHome, err := config.MakeUserDir(user.Username, user.Scope, config.Server.Root) if err != nil { log.Printf("create user: failed to mkdir user home dir: [%s]", userHome) - return http.StatusInternalServerError, err + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return } user.Scope = userHome log.Printf("new user: %s, home dir: [%s].", user.Username, userHome) - err = d.store.Users.Save(&user) + err = store.Users.Save(&user) if err == errors.ErrExist { - return http.StatusConflict, err + http.Error(w, http.StatusText(http.StatusConflict), http.StatusConflict) + return } else if err != nil { - return http.StatusInternalServerError, err + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return } - - return http.StatusOK, nil } -var renewHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - return printToken(w, r, d, d.user) -}) +func renewHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + // check if x-auth header is present and token is + return printToken(w, r, d.user) +} -func printToken(w http.ResponseWriter, _ *http.Request, d *data, user *users.User) (int, error) { - duration, err := time.ParseDuration(settings.Config.Auth.TokenExpirationTime) - if err != nil { - duration = time.Hour * 2 - } - claims := &authToken{ - User: *user, - RegisteredClaims: jwt.RegisteredClaims{ - IssuedAt: jwt.NewNumericDate(time.Now()), - ExpiresAt: jwt.NewNumericDate(time.Now().Add(duration)), - Issuer: "File Browser", - }, - } - - token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) - signed, err := token.SignedString(d.settings.Auth.Key) +func printToken(w http.ResponseWriter, _ *http.Request, user *users.User) (int, error) { + signed, err := makeSignedTokenAPI(user, "WEB_TOKEN_"+utils.GenerateRandomHash(4), time.Hour*2, user.Perm) if err != nil { + if strings.Contains(err.Error(), "key already exists with same name") { + return http.StatusConflict, err + } return http.StatusInternalServerError, err } - w.Header().Set("Content-Type", "text/plain") - if _, err := w.Write([]byte(signed)); err != nil { + if _, err := w.Write([]byte(signed.Key)); err != nil { return http.StatusInternalServerError, err } return 0, nil } + +func isRevokedApiKey(key string) bool { + _, exists := revokedApiKeyList[key] + return exists +} + +func revokeAPIKey(key string) { + revokeMu.Lock() + delete(revokedApiKeyList, key) + revokeMu.Unlock() +} + +func makeSignedTokenAPI(user *users.User, name string, duration time.Duration, perms users.Permissions) (users.AuthToken, error) { + _, ok := user.ApiKeys[name] + if ok { + return users.AuthToken{}, fmt.Errorf("key already exists with same name %v ", name) + } + now := time.Now() + expires := now.Add(duration) + claim := users.AuthToken{ + Permissions: perms, + Created: now.Unix(), + Expires: expires.Unix(), + Name: name, + BelongsTo: user.ID, + RegisteredClaims: jwt.RegisteredClaims{ + IssuedAt: jwt.NewNumericDate(now), + ExpiresAt: jwt.NewNumericDate(expires), + Issuer: "FileBrowser Quantum", + }, + } + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claim) + tokenString, err := token.SignedString(config.Auth.Key) + if err != nil { + return claim, err + } + claim.Key = tokenString + if strings.HasPrefix(name, "WEB_TOKEN") { + // don't add to api tokens, its a short lived web token + return claim, err + } + // Perform the user update + err = store.Users.AddApiKey(user.ID, name, claim) + if err != nil { + return claim, err + } + return claim, err +} diff --git a/backend/http/data.go b/backend/http/data.go deleted file mode 100644 index 564c7b40..00000000 --- a/backend/http/data.go +++ /dev/null @@ -1,75 +0,0 @@ -package http - -import ( - "log" - "net/http" - "strconv" - - "github.com/tomasen/realip" - - "github.com/gtsteffaniak/filebrowser/runner" - "github.com/gtsteffaniak/filebrowser/settings" - "github.com/gtsteffaniak/filebrowser/storage" - "github.com/gtsteffaniak/filebrowser/users" -) - -type handleFunc func(w http.ResponseWriter, r *http.Request, d *data) (int, error) - -type data struct { - *runner.Runner - settings *settings.Settings - server *settings.Server - store *storage.Storage - user *users.User - raw interface{} -} - -// Check implements rules.Checker. -func (d *data) Check(path string) bool { - - allow := true - for _, rule := range d.settings.Rules { - if rule.Matches(path) { - allow = rule.Allow - } - } - for _, rule := range d.user.Rules { - if rule.Matches(path) { - allow = rule.Allow - } - } - - return allow -} - -func handle(fn handleFunc, prefix string, store *storage.Storage, server *settings.Server) http.Handler { - handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate") - - settings, err := store.Settings.Get() - if err != nil { - log.Fatalf("ERROR: couldn't get settings: %v\n", err) - return - } - - status, err := fn(w, r, &data{ - Runner: &runner.Runner{Enabled: server.EnableExec, Settings: settings}, - store: store, - settings: settings, - server: server, - }) - - if status >= 400 || err != nil { - clientIP := realip.FromRequest(r) - log.Printf("%s: %v %s %v", r.URL.Path, status, clientIP, err) - } - - if status != 0 { - txt := http.StatusText(status) - http.Error(w, strconv.Itoa(status)+" "+txt, status) - return - } - }) - - return stripPrefix(prefix, handler) -} diff --git a/backend/http/embed/.gitignore b/backend/http/embed/.gitignore new file mode 100644 index 00000000..f59ec20a --- /dev/null +++ b/backend/http/embed/.gitignore @@ -0,0 +1 @@ +* \ No newline at end of file diff --git a/backend/http/http.go b/backend/http/http.go deleted file mode 100644 index e6e566a5..00000000 --- a/backend/http/http.go +++ /dev/null @@ -1,85 +0,0 @@ -package http - -import ( - "io/fs" - "net/http" - - "github.com/gorilla/mux" - - "github.com/gtsteffaniak/filebrowser/settings" - "github.com/gtsteffaniak/filebrowser/storage" -) - -type modifyRequest struct { - What string `json:"what"` // Answer to: what data type? - Which []string `json:"which"` // Answer to: which fields? -} - -var ( - store *storage.Storage - server *settings.Server - fileCache FileCache -) - -func SetupEnv(storage *storage.Storage, s *settings.Server, cache FileCache) { - store = storage - server = s - fileCache = cache -} - -func NewHandler( - imgSvc ImgService, - assetsFs fs.FS, -) (http.Handler, error) { - server.Clean() - - r := mux.NewRouter() - r.Use(func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-Security-Policy", `default-src 'self'; style-src 'unsafe-inline';`) - next.ServeHTTP(w, r) - }) - }) - index, static := getStaticHandlers(store, server, assetsFs) - // NOTE: This fixes the issue where it would redirect if people did not put a - // trailing slash in the end. I hate this decision since this allows some awful - // URLs https://www.gorillatoolkit.org/pkg/mux#Router.SkipClean - r = r.SkipClean(true) - monkey := func(fn handleFunc, prefix string) http.Handler { - return handle(fn, prefix, store, server) - } - r.HandleFunc("/health", healthHandler) - r.PathPrefix("/static").Handler(static) - r.NotFoundHandler = index - api := r.PathPrefix("/api").Subrouter() - api.Handle("/login", monkey(loginHandler, "")) - api.Handle("/signup", monkey(signupHandler, "")) - api.Handle("/renew", monkey(renewHandler, "")) - users := api.PathPrefix("/users").Subrouter() - users.Handle("", monkey(usersGetHandler, "")).Methods("GET") - users.Handle("", monkey(userPostHandler, "")).Methods("POST") - users.Handle("/{id:[0-9]+}", monkey(userPutHandler, "")).Methods("PUT") - users.Handle("/{id:[0-9]+}", monkey(userGetHandler, "")).Methods("GET") - users.Handle("/{id:[0-9]+}", monkey(userDeleteHandler, "")).Methods("DELETE") - api.PathPrefix("/resources").Handler(monkey(resourceGetHandler, "/api/resources")).Methods("GET") - api.PathPrefix("/resources").Handler(monkey(resourceDeleteHandler(fileCache), "/api/resources")).Methods("DELETE") - api.PathPrefix("/resources").Handler(monkey(resourcePostHandler(fileCache), "/api/resources")).Methods("POST") - api.PathPrefix("/resources").Handler(monkey(resourcePutHandler, "/api/resources")).Methods("PUT") - api.PathPrefix("/resources").Handler(monkey(resourcePatchHandler(fileCache), "/api/resources")).Methods("PATCH") - api.PathPrefix("/usage").Handler(monkey(diskUsage, "/api/usage")).Methods("GET") - api.Path("/shares").Handler(monkey(shareListHandler, "/api/shares")).Methods("GET") - api.PathPrefix("/share").Handler(monkey(shareGetsHandler, "/api/share")).Methods("GET") - api.PathPrefix("/share").Handler(monkey(sharePostHandler, "/api/share")).Methods("POST") - api.PathPrefix("/share").Handler(monkey(shareDeleteHandler, "/api/share")).Methods("DELETE") - api.Handle("/settings", monkey(settingsGetHandler, "")).Methods("GET") - api.Handle("/settings", monkey(settingsPutHandler, "")).Methods("PUT") - api.PathPrefix("/raw").Handler(monkey(rawHandler, "/api/raw")).Methods("GET") - api.PathPrefix("/preview/{size}/{path:.*}"). - Handler(monkey(previewHandler(imgSvc, fileCache, server.EnableThumbnails, server.ResizePreview), "/api/preview")).Methods("GET") - api.PathPrefix("/search").Handler(monkey(searchHandler, "/api/search")).Methods("GET") - public := api.PathPrefix("/public").Subrouter() - public.Handle("/publicUser", monkey(publicUserGetHandler, "")).Methods("GET") - public.PathPrefix("/dl").Handler(monkey(publicDlHandler, "/api/public/dl/")).Methods("GET") - public.PathPrefix("/share").Handler(monkey(publicShareHandler, "/api/public/share/")).Methods("GET") - return stripPrefix(server.BaseURL, r), nil -} diff --git a/backend/http/middleware.go b/backend/http/middleware.go new file mode 100644 index 00000000..2b8054d2 --- /dev/null +++ b/backend/http/middleware.go @@ -0,0 +1,291 @@ +package http + +import ( + "encoding/json" + "fmt" + "log" + "net/http" + "path/filepath" + "time" + + "github.com/golang-jwt/jwt/v4" + "github.com/gtsteffaniak/filebrowser/files" + "github.com/gtsteffaniak/filebrowser/runner" + "github.com/gtsteffaniak/filebrowser/users" +) + +type requestContext struct { + user *users.User + *runner.Runner + raw interface{} +} + +type HttpResponse struct { + Status int `json:"status,omitempty"` + Message string `json:"message,omitempty"` + Token string `json:"token,omitempty"` +} + +// Updated handleFunc to match the new signature +type handleFunc func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) + +// Middleware to handle file requests by hash and pass it to the handler +func withHashFileHelper(fn handleFunc) handleFunc { + return func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) { + path := r.URL.Query().Get("path") + hash := r.URL.Query().Get("hash") + data.user = &users.PublicUser + + // Get the file link by hash + link, err := store.Share.GetByHash(hash) + if err != nil { + return http.StatusNotFound, err + } + // Authenticate the share request if needed + var status int + if link.Hash != "" { + status, err = authenticateShareRequest(r, link) + if err != nil || status != http.StatusOK { + return status, err + } + } + // Retrieve the user (using the public user by default) + user := &users.PublicUser + + // Get file information with options + file, err := files.FileInfoFaster(files.FileOptions{ + Path: filepath.Join(user.Scope, link.Path+"/"+path), + Modify: user.Perm.Modify, + Expand: true, + ReadHeader: config.Server.TypeDetectionByHeader, + Checker: user, // Call your checker function here + Token: link.Token, + }) + if err != nil { + return errToStatus(err), err + } + + // Set the file info in the `data` object + data.raw = file + + // Call the next handler with the data + return fn(w, r, data) + } +} + +// Middleware to ensure the user is an admin +func withAdminHelper(fn handleFunc) handleFunc { + return withUserHelper(func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) { + // Ensure the user has admin permissions + if !data.user.Perm.Admin { + return http.StatusForbidden, nil + } + + // Proceed to the actual handler if the user is admin + return fn(w, r, data) + }) +} + +// Middleware to retrieve and authenticate user +func withUserHelper(fn handleFunc) handleFunc { + return func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) { + keyFunc := func(token *jwt.Token) (interface{}, error) { + return config.Auth.Key, nil + } + tokenString, err := extractToken(r) + if err != nil { + return http.StatusUnauthorized, err + } + + var tk users.AuthToken + token, err := jwt.ParseWithClaims(tokenString, &tk, keyFunc) + if err != nil { + return http.StatusUnauthorized, fmt.Errorf("error processing token, %v", err) + } + if !token.Valid { + return http.StatusUnauthorized, fmt.Errorf("invalid token") + } + if isRevokedApiKey(tk.Key) || tk.Expires < time.Now().Unix() { + return http.StatusUnauthorized, fmt.Errorf("token expired or revoked") + } + // Check if the token is about to expire and send a header to renew it + if tk.Expires < time.Now().Add(time.Hour).Unix() { + w.Header().Add("X-Renew-Token", "true") + } + // Retrieve the user from the store and store it in the context + data.user, err = store.Users.Get(config.Server.Root, tk.BelongsTo) + if err != nil { + return http.StatusInternalServerError, err + } + // Call the handler function, passing in the context + return fn(w, r, data) + } +} + +// Middleware to ensure the user is either the requested user or an admin +func withSelfOrAdminHelper(fn handleFunc) handleFunc { + return withUserHelper(func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) { + // Check if the current user is the same as the requested user or if they are an admin + if !data.user.Perm.Admin { + return http.StatusForbidden, nil + } + // Call the actual handler function with the updated context + return fn(w, r, data) + }) +} + +func wrapHandler(fn handleFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + data := &requestContext{ + Runner: &runner.Runner{Enabled: config.Server.EnableExec, Settings: config}, + } + + // Call the actual handler function and get status code and error + status, err := fn(w, r, data) + + // Handle the error case if there is one + if err != nil { + // Create an error response in JSON format + response := &HttpResponse{ + Status: status, // Use the status code from the middleware + Message: err.Error(), + } + + // Set the content type to JSON and status code + w.Header().Set("Content-Type", "application/json; charset=utf-8") + w.WriteHeader(status) + + // Marshal the error response to JSON + errorBytes, marshalErr := json.Marshal(response) + if marshalErr != nil { + log.Printf("Error marshalling error response: %v", marshalErr) + http.Error(w, "Internal Server Error", http.StatusInternalServerError) + return + } + + // Write the JSON error response + if _, writeErr := w.Write(errorBytes); writeErr != nil { + log.Printf("Error writing error response: %v", writeErr) + } + return + } + + // No error, proceed to write status if non-zero + if status != 0 { + w.WriteHeader(status) + } + } +} + +func withPermShareHelper(fn handleFunc) handleFunc { + return withUserHelper(func(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + if !d.user.Perm.Share { + return http.StatusForbidden, nil + } + return fn(w, r, d) + }) +} + +func withPermShare(fn handleFunc) http.HandlerFunc { + return wrapHandler(withPermShareHelper(fn)) +} + +// Example of wrapping specific middleware functions for use with http.HandleFunc +func withHashFile(fn handleFunc) http.HandlerFunc { + return wrapHandler(withHashFileHelper(fn)) +} + +func withAdmin(fn handleFunc) http.HandlerFunc { + return wrapHandler(withAdminHelper(fn)) +} + +func withUser(fn handleFunc) http.HandlerFunc { + return wrapHandler(withUserHelper(fn)) +} + +func withSelfOrAdmin(fn handleFunc) http.HandlerFunc { + return wrapHandler(withSelfOrAdminHelper(fn)) +} + +func muxWithMiddleware(mux *http.ServeMux) *http.ServeMux { + wrappedMux := http.NewServeMux() + wrappedMux.Handle("/", LoggingMiddleware(mux)) + return wrappedMux +} + +// ResponseWriterWrapper wraps the standard http.ResponseWriter to capture the status code +type ResponseWriterWrapper struct { + http.ResponseWriter + StatusCode int + wroteHeader bool +} + +// WriteHeader captures the status code and ensures it's only written once +func (w *ResponseWriterWrapper) WriteHeader(statusCode int) { + if !w.wroteHeader { // Prevent WriteHeader from being called multiple times + if statusCode == 0 { + statusCode = http.StatusInternalServerError + } + w.StatusCode = statusCode + w.ResponseWriter.WriteHeader(statusCode) + w.wroteHeader = true + } +} + +// Write is the method to write the response body and ensure WriteHeader is called +func (w *ResponseWriterWrapper) Write(b []byte) (int, error) { + if !w.wroteHeader { // Default to 200 if WriteHeader wasn't called explicitly + w.WriteHeader(http.StatusOK) + } + return w.ResponseWriter.Write(b) +} + +// LoggingMiddleware logs each request and its status code +func LoggingMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + start := time.Now() + + // Wrap the ResponseWriter to capture the status code + wrappedWriter := &ResponseWriterWrapper{ResponseWriter: w, StatusCode: http.StatusOK} + + // Call the next handler + next.ServeHTTP(wrappedWriter, r) + + // Determine the color based on the status code + color := "\033[32m" // Default green color + if wrappedWriter.StatusCode >= 300 && wrappedWriter.StatusCode < 500 { + color = "\033[33m" // Yellow for client errors (4xx) + } else if wrappedWriter.StatusCode >= 500 { + color = "\033[31m" // Red for server errors (5xx) + } + // Capture the full URL path including the query parameters + fullURL := r.URL.Path + if r.URL.RawQuery != "" { + fullURL += "?" + r.URL.RawQuery + } + + // Log the request and its status code + log.Printf("%s%-7s | %3d | %-15s | %-12s | \"%s\"%s", + color, + r.Method, + wrappedWriter.StatusCode, // Now capturing the correct status + r.RemoteAddr, + time.Since(start).String(), + fullURL, + "\033[0m", // Reset color + ) + }) +} + +func renderJSON(w http.ResponseWriter, _ *http.Request, data interface{}) (int, error) { + marsh, err := json.Marshal(data) + if err != nil { + return http.StatusInternalServerError, err + } + w.Header().Set("Content-Type", "application/json; charset=utf-8") + if _, err := w.Write(marsh); err != nil { + return http.StatusInternalServerError, err + } + + return 0, nil +} diff --git a/backend/http/middleware_test.go b/backend/http/middleware_test.go new file mode 100644 index 00000000..31264c98 --- /dev/null +++ b/backend/http/middleware_test.go @@ -0,0 +1,252 @@ +package http + +import ( + "net/http" + "net/http/httptest" + "path/filepath" + "testing" + "time" + + "github.com/asdine/storm/v3" + "github.com/gtsteffaniak/filebrowser/diskcache" + "github.com/gtsteffaniak/filebrowser/img" + "github.com/gtsteffaniak/filebrowser/settings" + "github.com/gtsteffaniak/filebrowser/share" + "github.com/gtsteffaniak/filebrowser/storage" + "github.com/gtsteffaniak/filebrowser/storage/bolt" + "github.com/gtsteffaniak/filebrowser/users" + "github.com/gtsteffaniak/filebrowser/utils" +) + +func setupTestEnv(t *testing.T) { + dbPath := filepath.Join(t.TempDir(), "db") + db, err := storm.Open(dbPath) + if err != nil { + t.Fatal(err) + } + authStore, userStore, shareStore, settingsStore, err := bolt.NewStorage(db) + if err != nil { + t.Fatal(err) + } + store = &storage.Storage{ + Auth: authStore, + Users: userStore, + Share: shareStore, + Settings: settingsStore, + } + fileCache = diskcache.NewNoOp() // mocked + imgSvc = img.New(1) // mocked + config = &settings.Config // mocked +} + +func TestWithAdminHelper(t *testing.T) { + setupTestEnv(t) + // Mock a user who has admin permissions + adminUser := &users.User{ + ID: 1, + Username: "admin", + Perm: users.Permissions{Admin: true}, // Ensure the user is an admin + } + nonAdminUser := &users.User{ + ID: 2, + Username: "non-admin", + Perm: users.Permissions{Admin: false}, // Non-admin user + } + // Save the users to the mock database + if err := store.Users.Save(adminUser); err != nil { + t.Fatal("failed to save admin user:", err) + } + if err := store.Users.Save(nonAdminUser); err != nil { + t.Fatal("failed to save non-admin user:", err) + } + // Test cases for different scenarios + testCases := []struct { + name string + expectedStatusCode int + user *users.User + }{ + { + name: "Admin access allowed", + expectedStatusCode: http.StatusOK, // Admin should be able to access + user: adminUser, + }, + { + name: "Non-admin access forbidden", + expectedStatusCode: http.StatusForbidden, // Non-admin should be forbidden + user: nonAdminUser, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Mock the context with the current user + data := &requestContext{ + user: tc.user, + } + token, err := makeSignedTokenAPI(tc.user, "WEB_TOKEN_"+utils.GenerateRandomHash(4), time.Hour*2, tc.user.Perm) + if err != nil { + t.Fatalf("Error making token for request: %v", err) + } + + // Wrap the usersGetHandler with the middleware + handler := withAdminHelper(mockHandler) + + // Create a response recorder to capture the handler's output + recorder := httptest.NewRecorder() + // Create the request and apply the token as a cookie + req, err := http.NewRequest(http.MethodGet, "/users", http.NoBody) + if err != nil { + t.Fatalf("Error creating request: %v", err) + } + req.AddCookie(&http.Cookie{ + Name: "auth", + Value: token.Key, + }) + + // Call the handler with the test request and mock context + status, err := handler(recorder, req, data) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Verify the status code + if status != tc.expectedStatusCode { + t.Errorf("\"%v\" expected status code %d, got %d", tc.name, tc.expectedStatusCode, status) + } + }) + } +} + +func TestPublicShareHandlerAuthentication(t *testing.T) { + setupTestEnv(t) + + const passwordBcrypt = "$2y$10$TFAmdCbyd/mEZDe5fUeZJu.MaJQXRTwdqb/IQV.eTn6dWrF58gCSe" // bcrypt hashed password + + testCases := []struct { + name string + share *share.Link + token string + password string + extraHeaders map[string]string + expectedStatusCode int + }{ + { + name: "Public share, no auth required", + share: &share.Link{ + Hash: "public_hash", + }, + expectedStatusCode: 0, // zero means 200 on helpers + }, + { + name: "Private share, no auth provided", + share: &share.Link{ + Hash: "private_hash", + UserID: 1, + PasswordHash: passwordBcrypt, + Token: "123", + }, + expectedStatusCode: http.StatusUnauthorized, + }, + { + name: "Private share, valid token", + share: &share.Link{ + Hash: "token_hash", + UserID: 1, + PasswordHash: passwordBcrypt, + Token: "123", + }, + token: "123", + expectedStatusCode: 0, // zero means 200 on helpers + }, + { + name: "Private share, invalid password", + share: &share.Link{ + Hash: "pw_hash", + UserID: 1, + PasswordHash: passwordBcrypt, + Token: "123", + }, + extraHeaders: map[string]string{ + "X-SHARE-PASSWORD": "wrong-password", + }, + expectedStatusCode: http.StatusUnauthorized, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Save the share in the mock store + if err := store.Share.Save(tc.share); err != nil { + t.Fatal("failed to save share:", err) + } + + // Create a response recorder to capture handler output + recorder := httptest.NewRecorder() + + // Wrap the handler with authentication middleware + handler := withHashFileHelper(publicShareHandler) + if err := store.Settings.Save(&settings.Settings{ + Auth: settings.Auth{ + Key: []byte("key"), + }, + }); err != nil { + t.Fatalf("failed to save settings: %v", err) + } + + // Prepare the request with query parameters and optional headers + req := newTestRequest(t, tc.share.Hash, tc.token, tc.password, tc.extraHeaders) + + // Serve the request + status, err := handler(recorder, req, &requestContext{}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Check if the response matches the expected status code + if status != tc.expectedStatusCode { + t.Errorf("expected status code %d, got %d", tc.expectedStatusCode, status) + } + }) + } +} + +// Helper function to create a new HTTP request with optional parameters +func newTestRequest(t *testing.T, hash, token, password string, headers map[string]string) *http.Request { + req := newHTTPRequest(t, hash, func(r *http.Request) { + // Set query parameters based on provided values + q := r.URL.Query() + q.Set("path", "/") + q.Set("hash", hash) + if token != "" { + q.Set("token", token) + } + if password != "" { + q.Set("password", password) + } + r.URL.RawQuery = q.Encode() + + // Set any extra headers if provided + for key, value := range headers { + r.Header.Set(key, value) + } + }) + return req +} + +func mockHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + return http.StatusOK, nil // mock response +} + +// Modify newHTTPRequest to accept the hash and use it in the URL path. +func newHTTPRequest(t *testing.T, hash string, requestModifiers ...func(*http.Request)) *http.Request { + t.Helper() + url := "/public/share/" + hash + "/" // Dynamically include the hash in the URL path + r, err := http.NewRequest(http.MethodGet, url, http.NoBody) + if err != nil { + t.Fatalf("failed to create request: %v", err) + } + for _, modify := range requestModifiers { + modify(r) + } + return r +} diff --git a/backend/http/preview.go b/backend/http/preview.go index 61d932cb..3b3d050b 100644 --- a/backend/http/preview.go +++ b/backend/http/preview.go @@ -1,4 +1,3 @@ -//go:generate go-enum --sql --marshal --names --file $GOFILE package http import ( @@ -8,21 +7,12 @@ import ( "io" "net/http" "os" - - "github.com/gorilla/mux" + "path/filepath" "github.com/gtsteffaniak/filebrowser/files" "github.com/gtsteffaniak/filebrowser/img" ) -/* -ENUM( -thumb -big -) -*/ -type PreviewSize int - type ImgService interface { FormatFromExtension(ext string) (img.Format, error) Resize(ctx context.Context, in io.Reader, width, height int, out io.Writer, options ...img.Option) error @@ -34,82 +24,92 @@ type FileCache interface { Delete(ctx context.Context, key string) error } -func previewHandler(imgSvc ImgService, fileCache FileCache, enableThumbnails, resizePreview bool) handleFunc { - return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +// previewHandler handles the preview request for images. +// @Summary Get image preview +// @Description Returns a preview image based on the requested path and size. +// @Tags Resources +// @Accept json +// @Produce json +// @Param path query string true "File path of the image to preview" +// @Param size query string false "Preview size ('small' or 'large'). Default is based on server config." +// @Success 200 {file} file "Preview image content" +// @Failure 202 {object} map[string]string "Download permissions required" +// @Failure 400 {object} map[string]string "Invalid request path" +// @Failure 404 {object} map[string]string "File not found" +// @Failure 415 {object} map[string]string "Unsupported file type for preview" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/preview [get] +func previewHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + path := r.URL.Query().Get("path") + previewSize := r.URL.Query().Get("size") + if previewSize != "small" { + previewSize = "large" + } + + if path == "" { + return http.StatusBadRequest, fmt.Errorf("invalid request path") + } + file, err := files.FileInfoFaster(files.FileOptions{ + Path: filepath.Join(d.user.Scope, path), + Modify: d.user.Perm.Modify, + Expand: true, + ReadHeader: config.Server.TypeDetectionByHeader, + Checker: d.user, + }) + if err != nil { + return errToStatus(err), err + } + realPath, _, err := files.GetRealPath(file.Path) + if err != nil { + return http.StatusInternalServerError, err + } + file.Path = realPath + if file.Type == "directory" { + return http.StatusBadRequest, fmt.Errorf("can't create preview for directory") + } + setContentDisposition(w, r, file) + if file.Type != "image" { + return http.StatusNotImplemented, fmt.Errorf("can't create preview for %s type", file.Type) + } + + if (previewSize == "large" && !config.Server.ResizePreview) || + (previewSize == "small" && !config.Server.EnableThumbnails) { if !d.user.Perm.Download { return http.StatusAccepted, nil } - vars := mux.Vars(r) - - previewSize, err := ParsePreviewSize(vars["size"]) - if err != nil { - return http.StatusBadRequest, err - } - - file, err := files.FileInfoFaster(files.FileOptions{ - Path: "/" + vars["path"], - Modify: d.user.Perm.Modify, - Expand: true, - ReadHeader: d.server.TypeDetectionByHeader, - Checker: d, - }) - - if err != nil { - return errToStatus(err), err - } - setContentDisposition(w, r, file) - - switch file.Type { - case "image": - return handleImagePreview(w, r, imgSvc, fileCache, file, previewSize, enableThumbnails, resizePreview) - default: - return http.StatusNotImplemented, fmt.Errorf("can't create preview for %s type", file.Type) - } - }) -} - -func handleImagePreview( - w http.ResponseWriter, - r *http.Request, - imgSvc ImgService, - fileCache FileCache, - file *files.FileInfo, - previewSize PreviewSize, - enableThumbnails, resizePreview bool, -) (int, error) { - if (previewSize == PreviewSizeBig && !resizePreview) || - (previewSize == PreviewSizeThumb && !enableThumbnails) { return rawFileHandler(w, r, file) } - format, err := imgSvc.FormatFromExtension(file.Extension) + + format, err := imgSvc.FormatFromExtension(filepath.Ext(file.Name)) // Unsupported extensions directly return the raw data if err == img.ErrUnsupportedFormat || format == img.FormatGif { + if !d.user.Perm.Download { + return http.StatusAccepted, nil + } return rawFileHandler(w, r, file) } if err != nil { return errToStatus(err), err } - cacheKey := previewCacheKey(file, previewSize) resizedImage, ok, err := fileCache.Load(r.Context(), cacheKey) if err != nil { return errToStatus(err), err } + if !ok { resizedImage, err = createPreview(imgSvc, fileCache, file, previewSize) if err != nil { return errToStatus(err), err } } - w.Header().Set("Cache-Control", "private") - http.ServeContent(w, r, file.Name, file.ModTime, bytes.NewReader(resizedImage)) + http.ServeContent(w, r, file.Path, file.ModTime, bytes.NewReader(resizedImage)) return 0, nil } -func createPreview(imgSvc ImgService, fileCache FileCache, - file *files.FileInfo, previewSize PreviewSize) ([]byte, error) { +func createPreview(imgSvc ImgService, fileCache FileCache, file *files.FileInfo, previewSize string) ([]byte, error) { fd, err := os.Open(file.Path) if err != nil { return nil, err @@ -123,11 +123,11 @@ func createPreview(imgSvc ImgService, fileCache FileCache, ) switch { - case previewSize == PreviewSizeBig: + case previewSize == "large": width = 1080 height = 1080 options = append(options, img.WithMode(img.ResizeModeFit), img.WithQuality(img.QualityMedium)) - case previewSize == PreviewSizeThumb: + case previewSize == "small": width = 256 height = 256 options = append(options, img.WithMode(img.ResizeModeFill), img.WithQuality(img.QualityLow), img.WithFormat(img.FormatJpeg)) @@ -150,6 +150,7 @@ func createPreview(imgSvc ImgService, fileCache FileCache, return buf.Bytes(), nil } -func previewCacheKey(f *files.FileInfo, previewSize PreviewSize) string { +// Generates a cache key for the preview image +func previewCacheKey(f *files.FileInfo, previewSize string) string { return fmt.Sprintf("%x%x%x", f.RealPath(), f.ModTime.Unix(), previewSize) } diff --git a/backend/http/preview_enum.go b/backend/http/preview_enum.go deleted file mode 100644 index 50e3372c..00000000 --- a/backend/http/preview_enum.go +++ /dev/null @@ -1,100 +0,0 @@ -// Code generated by go-enum -// DO NOT EDIT! - -package http - -import ( - "database/sql/driver" - "fmt" - "strings" -) - -const ( - // PreviewSizeThumb is a PreviewSize of type Thumb - PreviewSizeThumb PreviewSize = iota - // PreviewSizeBig is a PreviewSize of type Big - PreviewSizeBig -) - -const _PreviewSizeName = "thumbbig" - -var _PreviewSizeNames = []string{ - _PreviewSizeName[0:5], - _PreviewSizeName[5:8], -} - -// PreviewSizeNames returns a list of possible string values of PreviewSize. -func PreviewSizeNames() []string { - tmp := make([]string, len(_PreviewSizeNames)) - copy(tmp, _PreviewSizeNames) - return tmp -} - -var _PreviewSizeMap = map[PreviewSize]string{ - 0: _PreviewSizeName[0:5], - 1: _PreviewSizeName[5:8], -} - -// String implements the Stringer interface. -func (x PreviewSize) String() string { - if str, ok := _PreviewSizeMap[x]; ok { - return str - } - return fmt.Sprintf("PreviewSize(%d)", x) -} - -var _PreviewSizeValue = map[string]PreviewSize{ - _PreviewSizeName[0:5]: 0, - _PreviewSizeName[5:8]: 1, -} - -// ParsePreviewSize attempts to convert a string to a PreviewSize -func ParsePreviewSize(name string) (PreviewSize, error) { - if x, ok := _PreviewSizeValue[name]; ok { - return x, nil - } - return PreviewSize(0), fmt.Errorf("%s is not a valid PreviewSize, try [%s]", name, strings.Join(_PreviewSizeNames, ", ")) -} - -// MarshalText implements the text marshaller method -func (x PreviewSize) MarshalText() ([]byte, error) { - return []byte(x.String()), nil -} - -// UnmarshalText implements the text unmarshaller method -func (x *PreviewSize) UnmarshalText(text []byte) error { - name := string(text) - tmp, err := ParsePreviewSize(name) - if err != nil { - return err - } - *x = tmp - return nil -} - -// Scan implements the Scanner interface. -func (x *PreviewSize) Scan(value interface{}) error { - var name string - - switch v := value.(type) { - case string: - name = v - case []byte: - name = string(v) - case nil: - *x = PreviewSize(0) - return nil - } - - tmp, err := ParsePreviewSize(name) - if err != nil { - return err - } - *x = tmp - return nil -} - -// Value implements the driver Valuer interface. -func (x PreviewSize) Value() (driver.Value, error) { - return x.String(), nil -} diff --git a/backend/http/public.go b/backend/http/public.go index 9a0fd1e8..42f98c9e 100644 --- a/backend/http/public.go +++ b/backend/http/public.go @@ -1,11 +1,11 @@ package http import ( + "encoding/json" "errors" "fmt" "net/http" "net/url" - "path" "strings" "golang.org/x/crypto/bcrypt" @@ -14,97 +14,58 @@ import ( "github.com/gtsteffaniak/filebrowser/settings" "github.com/gtsteffaniak/filebrowser/share" "github.com/gtsteffaniak/filebrowser/users" + + _ "github.com/gtsteffaniak/filebrowser/swagger/docs" ) -var withHashFile = func(fn handleFunc) handleFunc { - return func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - id, path := ifPathWithName(r) - link, err := d.store.Share.GetByHash(id) - if err != nil { - return errToStatus(err), err - } - if link.Hash != "" { - var status int - status, err = authenticateShareRequest(r, link) - if err != nil || status != 0 { - return status, err - } - } - d.user = &users.PublicUser - realPath, isDir, err := files.GetRealPath(d.user.Scope, link.Path, path) - if err != nil { - return http.StatusNotFound, err - } - file, err := files.FileInfoFaster(files.FileOptions{ - Path: realPath, - IsDir: isDir, - Modify: d.user.Perm.Modify, - Expand: true, - ReadHeader: d.server.TypeDetectionByHeader, - Checker: d, - Token: link.Token, - }) - if err != nil { - return errToStatus(err), err - } - d.raw = file - return fn(w, r, d) - } -} - -func ifPathWithName(r *http.Request) (id, filePath string) { - pathElements := strings.Split(r.URL.Path, "/") - id = pathElements[0] - allButFirst := path.Join(pathElements[1:]...) - return id, allButFirst -} - -var publicShareHandler = withHashFile(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +func publicShareHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { file, ok := d.raw.(*files.FileInfo) if !ok { return http.StatusInternalServerError, fmt.Errorf("failed to assert type *files.FileInfo") } - file.Path = strings.TrimPrefix(file.Path, settings.Config.Server.Root) - if file.IsDir { - return renderJSON(w, r, file) - } - return renderJSON(w, r, file) -}) +} -var publicUserGetHandler = func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +func publicUserGetHandler(w http.ResponseWriter, r *http.Request) { // Call the actual handler logic here (e.g., renderJSON, etc.) // You may need to replace `fn` with the actual handler logic. - return renderJSON(w, r, users.PublicUser) + status, err := renderJSON(w, r, users.PublicUser) + if err != nil { + http.Error(w, http.StatusText(status), status) + } + } -var publicDlHandler = withHashFile(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - file, ok := d.raw.(*files.FileInfo) - if !ok { - return http.StatusInternalServerError, fmt.Errorf("failed to assert type *files.FileInfo") +func publicDlHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + file, _ := d.raw.(*files.FileInfo) + if file == nil { + return http.StatusInternalServerError, fmt.Errorf("failed to assert type files.FileInfo") + } + if d.user == nil { + return http.StatusUnauthorized, fmt.Errorf("failed to get user") } - if !file.IsDir { - return rawFileHandler(w, r, file) + if file.Type == "directory" { + return rawDirHandler(w, r, d, file) } - return rawDirHandler(w, r, d, file) -}) + return rawFileHandler(w, r, file) +} func authenticateShareRequest(r *http.Request, l *share.Link) (int, error) { if l.PasswordHash == "" { - return 0, nil + return 200, nil } if r.URL.Query().Get("token") == l.Token { - return 0, nil + return 200, nil } password := r.Header.Get("X-SHARE-PASSWORD") password, err := url.QueryUnescape(password) if err != nil { - return 0, err + return http.StatusUnauthorized, err } if password == "" { return http.StatusUnauthorized, nil @@ -113,12 +74,25 @@ func authenticateShareRequest(r *http.Request, l *share.Link) (int, error) { if errors.Is(err, bcrypt.ErrMismatchedHashAndPassword) { return http.StatusUnauthorized, nil } - return 0, err + return 401, err } - return 0, nil + return 200, nil } -func healthHandler(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(`{"status":"OK"}`)) +// health godoc +// @Summary Health Check +// @Schemes +// @Description Returns the health status of the API. +// @Tags Health +// @Accept json +// @Produce json +// @Success 200 {object} HttpResponse "successful health check response" +// @Router /health [get] +func healthHandler(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + response := HttpResponse{Message: "ok"} // Create response with status "ok" + err := json.NewEncoder(w).Encode(response) // Encode the response into JSON + if err != nil { + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + } } diff --git a/backend/http/public_test.go b/backend/http/public_test.go deleted file mode 100644 index 3648c6ab..00000000 --- a/backend/http/public_test.go +++ /dev/null @@ -1,137 +0,0 @@ -package http - -import ( - "fmt" - "net/http" - "net/http/httptest" - "path/filepath" - "testing" - - "github.com/asdine/storm/v3" - - "github.com/gtsteffaniak/filebrowser/settings" - "github.com/gtsteffaniak/filebrowser/share" - "github.com/gtsteffaniak/filebrowser/storage" - "github.com/gtsteffaniak/filebrowser/storage/bolt" - "github.com/gtsteffaniak/filebrowser/users" -) - -func TestPublicShareHandlerAuthentication(t *testing.T) { - t.Parallel() - - const passwordBcrypt = "$2y$10$TFAmdCbyd/mEZDe5fUeZJu.MaJQXRTwdqb/IQV.eTn6dWrF58gCSe" //nolint:gosec - testCases := map[string]struct { - share *share.Link - req *http.Request - expectedStatusCode int - }{ - "Public share, no auth required": { - share: &share.Link{Hash: "h"}, - req: newHTTPRequest(t), - expectedStatusCode: 200, - }, - "Private share, no auth provided, 401": { - share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"}, - req: newHTTPRequest(t), - expectedStatusCode: 401, - }, - "Private share, authentication via token": { - share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"}, - req: newHTTPRequest(t, func(r *http.Request) { r.URL.RawQuery = "token=123" }), - expectedStatusCode: 200, - }, - "Private share, authentication via invalid token, 401": { - share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"}, - req: newHTTPRequest(t, func(r *http.Request) { r.URL.RawQuery = "token=1234" }), - expectedStatusCode: 401, - }, - "Private share, authentication via password": { - share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"}, - req: newHTTPRequest(t, func(r *http.Request) { r.Header.Set("X-SHARE-PASSWORD", "password") }), - expectedStatusCode: 200, - }, - "Private share, authentication via invalid password, 401": { - share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"}, - req: newHTTPRequest(t, func(r *http.Request) { r.Header.Set("X-SHARE-PASSWORD", "wrong-password") }), - expectedStatusCode: 401, - }, - } - - for name, tc := range testCases { - for handlerName, handler := range map[string]handleFunc{"public share handler": publicShareHandler, "public dl handler": publicDlHandler} { - name, tc, handlerName, handler := name, tc, handlerName, handler - t.Run(fmt.Sprintf("%s: %s", handlerName, name), func(t *testing.T) { - t.Parallel() - - dbPath := filepath.Join(t.TempDir(), "db") - db, err := storm.Open(dbPath) - if err != nil { - t.Fatalf("failed to open db: %v", err) - } - - t.Cleanup(func() { - if err := db.Close(); err != nil { //nolint:govet - t.Errorf("failed to close db: %v", err) - } - }) - authStore, userStore, shareStore, settingsStore, err := bolt.NewStorage(db) - storage := &storage.Storage{ - Auth: authStore, - Users: userStore, - Share: shareStore, - Settings: settingsStore, - } - if err != nil { - t.Fatalf("failed to get storage: %v", err) - } - if err := storage.Share.Save(tc.share); err != nil { - t.Fatalf("failed to save share: %v", err) - } - if err := storage.Settings.Save(&settings.Settings{ - Auth: settings.Auth{ - Key: []byte("key"), - }, - }); err != nil { - t.Fatalf("failed to save settings: %v", err) - } - - storage.Users = &customFSUser{ - Store: storage.Users, - } - - recorder := httptest.NewRecorder() - handler := handle(handler, "", storage, &settings.Server{}) - handler.ServeHTTP(recorder, tc.req) - result := recorder.Result() - defer result.Body.Close() - if result.StatusCode != tc.expectedStatusCode { - t.Errorf("expected status code %d, got status code %d", tc.expectedStatusCode, result.StatusCode) - } - }) - } - } -} - -func newHTTPRequest(t *testing.T, requestModifiers ...func(*http.Request)) *http.Request { - t.Helper() - r, err := http.NewRequest(http.MethodGet, "h", http.NoBody) - if err != nil { - t.Fatalf("failed to construct request: %v", err) - } - for _, modify := range requestModifiers { - modify(r) - } - return r -} - -type customFSUser struct { - users.Store -} - -func (cu *customFSUser) Get(baseScope string, id interface{}) (*users.User, error) { - user, err := cu.Store.Get(baseScope, id) - if err != nil { - return nil, err - } - return user, nil -} diff --git a/backend/http/raw.go b/backend/http/raw.go index 18f84aa1..7bac12d1 100644 --- a/backend/http/raw.go +++ b/backend/http/raw.go @@ -77,21 +77,34 @@ func setContentDisposition(w http.ResponseWriter, r *http.Request, file *files.F } } -var rawHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +// rawHandler serves the raw content of a file, multiple files, or directory in various formats. +// @Summary Get raw content of a file, multiple files, or directory +// @Description Returns the raw content of a file, multiple files, or a directory. Supports downloading files as archives in various formats. +// @Tags Resources +// @Accept json +// @Produce json +// @Param path query string true "Path to the file or directory" +// @Param files query string false "Comma-separated list of specific files within the directory (optional)" +// @Param inline query bool false "If true, sets 'Content-Disposition' to 'inline'. Otherwise, defaults to 'attachment'." +// @Param algo query string false "Compression algorithm for archiving multiple files or directories. Options: 'zip', 'tar', 'targz', 'tarbz2', 'tarxz', 'tarlz4', 'tarsz'. Default is 'zip'." +// @Success 200 {file} file "Raw file or directory content, or archive for multiple files" +// @Failure 202 {object} map[string]string "Download permissions required" +// @Failure 400 {object} map[string]string "Invalid request path" +// @Failure 404 {object} map[string]string "File or directory not found" +// @Failure 415 {object} map[string]string "Unsupported file type for preview" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/raw [get] +func rawHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { if !d.user.Perm.Download { return http.StatusAccepted, nil } - realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path) - if err != nil { - return http.StatusInternalServerError, err - } + path := r.URL.Query().Get("path") file, err := files.FileInfoFaster(files.FileOptions{ - Path: realPath, - IsDir: isDir, + Path: filepath.Join(d.user.Scope, path), Modify: d.user.Perm.Modify, Expand: false, - ReadHeader: d.server.TypeDetectionByHeader, - Checker: d, + ReadHeader: config.Server.TypeDetectionByHeader, + Checker: d.user, }) if err != nil { return errToStatus(err), err @@ -100,16 +113,15 @@ var rawHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) setContentDisposition(w, r, file) return 0, nil } - - if !file.IsDir { - return rawFileHandler(w, r, file) + if file.Type == "directory" { + return rawDirHandler(w, r, d, file) } - return rawDirHandler(w, r, d, file) -}) + return rawFileHandler(w, r, file) +} -func addFile(ar archiver.Writer, d *data, path, commonPath string) error { - if !d.Check(path) { +func addFile(ar archiver.Writer, d *requestContext, path, commonPath string) error { + if !d.user.Check(path) { return nil } info, err := os.Stat(path) @@ -160,12 +172,11 @@ func addFile(ar archiver.Writer, d *data, path, commonPath string) error { return nil } -func rawDirHandler(w http.ResponseWriter, r *http.Request, d *data, file *files.FileInfo) (int, error) { +func rawDirHandler(w http.ResponseWriter, r *http.Request, d *requestContext, file *files.FileInfo) (int, error) { filenames, err := parseQueryFiles(r, file, d.user) if err != nil { return http.StatusInternalServerError, err } - extension, ar, err := parseQueryAlgorithm(r) if err != nil { return http.StatusInternalServerError, err @@ -202,7 +213,8 @@ func rawDirHandler(w http.ResponseWriter, r *http.Request, d *data, file *files. } func rawFileHandler(w http.ResponseWriter, r *http.Request, file *files.FileInfo) (int, error) { - fd, err := os.Open(file.Path) + realPath, _, _ := files.GetRealPath(file.Path) + fd, err := os.Open(realPath) if err != nil { return http.StatusInternalServerError, err } diff --git a/backend/http/resource.go b/backend/http/resource.go index 589e624e..ba5b5adb 100644 --- a/backend/http/resource.go +++ b/backend/http/resource.go @@ -14,132 +14,184 @@ import ( "github.com/gtsteffaniak/filebrowser/errors" "github.com/gtsteffaniak/filebrowser/files" - "github.com/gtsteffaniak/filebrowser/fileutils" ) -var resourceGetHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path) - if err != nil { - return http.StatusNotFound, err - } +// resourceGetHandler retrieves information about a resource. +// @Summary Get resource information +// @Description Returns metadata and optionally file contents for a specified resource path. +// @Tags Resources +// @Accept json +// @Produce json +// @Param path query string true "Path to the resource" +// @Param source query string false "Name for the desired source, default is used if not provided" +// @Param content query string false "Include file content if true" +// @Param checksum query string false "Optional checksum validation" +// @Success 200 {object} files.FileInfo "Resource metadata" +// @Failure 404 {object} map[string]string "Resource not found" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/resources [get] +func resourceGetHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + // TODO source := r.URL.Query().Get("source") + path := r.URL.Query().Get("path") file, err := files.FileInfoFaster(files.FileOptions{ - Path: realPath, - IsDir: isDir, + Path: filepath.Join(d.user.Scope, path), Modify: d.user.Perm.Modify, Expand: true, - ReadHeader: d.server.TypeDetectionByHeader, - Checker: d, + ReadHeader: config.Server.TypeDetectionByHeader, + Checker: d.user, Content: r.URL.Query().Get("content") == "true", }) if err != nil { return errToStatus(err), err } - if !file.IsDir { - if checksum := r.URL.Query().Get("checksum"); checksum != "" { - err := file.Checksum(checksum) - if err == errors.ErrInvalidOption { - return http.StatusBadRequest, nil - } else if err != nil { - return http.StatusInternalServerError, err - } + if file.Type == "directory" { + return renderJSON(w, r, file) + } + if checksum := r.URL.Query().Get("checksum"); checksum != "" { + err := file.Checksum(checksum) + if err == errors.ErrInvalidOption { + return http.StatusBadRequest, nil + } else if err != nil { + return http.StatusInternalServerError, err } } return renderJSON(w, r, file) -}) -func resourceDeleteHandler(fileCache FileCache) handleFunc { - return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - if r.URL.Path == "/" || !d.user.Perm.Delete { - return http.StatusForbidden, nil - } - realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path) - if err != nil { - return http.StatusNotFound, err - } - fileOpts := files.FileOptions{ - Path: realPath, - IsDir: isDir, - Modify: d.user.Perm.Modify, - Expand: false, - ReadHeader: d.server.TypeDetectionByHeader, - Checker: d, - } - file, err := files.FileInfoFaster(fileOpts) - if err != nil { - return errToStatus(err), err - } +} - // delete thumbnails - err = delThumbs(r.Context(), fileCache, file) - if err != nil { - return errToStatus(err), err - } +// resourceDeleteHandler deletes a resource at a specified path. +// @Summary Delete a resource +// @Description Deletes a resource located at the specified path. +// @Tags Resources +// @Accept json +// @Produce json +// @Param path query string true "Path to the resource" +// @Param source query string false "Name for the desired source, default is used if not provided" +// @Success 200 "Resource deleted successfully" +// @Failure 403 {object} map[string]string "Forbidden" +// @Failure 404 {object} map[string]string "Resource not found" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/resources [delete] +func resourceDeleteHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + // TODO source := r.URL.Query().Get("source") + path := r.URL.Query().Get("path") + if path == "/" || !d.user.Perm.Delete { + return http.StatusForbidden, nil + } + realPath, isDir, err := files.GetRealPath(d.user.Scope, path) + if err != nil { + return http.StatusNotFound, err + } + fileOpts := files.FileOptions{ + Path: filepath.Join(d.user.Scope, path), + IsDir: isDir, + Modify: d.user.Perm.Modify, + Expand: false, + ReadHeader: config.Server.TypeDetectionByHeader, + Checker: d.user, + } + file, err := files.FileInfoFaster(fileOpts) + if err != nil { + return errToStatus(err), err + } - err = files.DeleteFiles(realPath, fileOpts) + // delete thumbnails + err = delThumbs(r.Context(), fileCache, file) + if err != nil { + return errToStatus(err), err + } + + err = files.DeleteFiles(realPath, fileOpts) + if err != nil { + return errToStatus(err), err + } + return http.StatusOK, nil + +} + +// resourcePostHandler creates or uploads a new resource. +// @Summary Create or upload a resource +// @Description Creates a new resource or uploads a file at the specified path. Supports file uploads and directory creation. +// @Tags Resources +// @Accept json +// @Produce json +// @Param path query string true "Path to the resource" +// @Param source query string false "Name for the desired source, default is used if not provided" +// @Param override query bool false "Override existing file if true" +// @Success 200 "Resource created successfully" +// @Failure 403 {object} map[string]string "Forbidden" +// @Failure 404 {object} map[string]string "Resource not found" +// @Failure 409 {object} map[string]string "Conflict - Resource already exists" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/resources [post] +func resourcePostHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + // TODO source := r.URL.Query().Get("source") + path := r.URL.Query().Get("path") + if !d.user.Perm.Create || !d.user.Check(path) { + return http.StatusForbidden, nil + } + fileOpts := files.FileOptions{ + Path: filepath.Join(d.user.Scope, path), + Modify: d.user.Perm.Modify, + Expand: false, + ReadHeader: config.Server.TypeDetectionByHeader, + Checker: d.user, + } + // Directories creation on POST. + if strings.HasSuffix(path, "/") { + err := files.WriteDirectory(fileOpts) if err != nil { return errToStatus(err), err } return http.StatusOK, nil - }) -} + } + file, err := files.FileInfoFaster(fileOpts) + if err == nil { + if r.URL.Query().Get("override") != "true" { + return http.StatusConflict, nil + } -func resourcePostHandler(fileCache FileCache) handleFunc { - return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - if !d.user.Perm.Create || !d.Check(r.URL.Path) { + // Permission for overwriting the file + if !d.user.Perm.Modify { return http.StatusForbidden, nil } - realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path) + + err = delThumbs(r.Context(), fileCache, file) if err != nil { - return http.StatusNotFound, err + return errToStatus(err), err } - fileOpts := files.FileOptions{ - Path: realPath, - IsDir: isDir, - Modify: d.user.Perm.Modify, - Expand: false, - ReadHeader: d.server.TypeDetectionByHeader, - Checker: d, - } - // Directories creation on POST. - if strings.HasSuffix(r.URL.Path, "/") { - err = files.WriteDirectory(fileOpts) // Assign to the existing `err` variable - if err != nil { - return errToStatus(err), err - } - return http.StatusOK, nil - } - file, err := files.FileInfoFaster(fileOpts) - if err == nil { - if r.URL.Query().Get("override") != "true" { - return http.StatusConflict, nil - } - - // Permission for overwriting the file - if !d.user.Perm.Modify { - return http.StatusForbidden, nil - } - - err = delThumbs(r.Context(), fileCache, file) - if err != nil { - return errToStatus(err), err - } - } - err = files.WriteFile(fileOpts, r.Body) - return errToStatus(err), err - }) + } + err = files.WriteFile(fileOpts, r.Body) + return errToStatus(err), err } -var resourcePutHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - if !d.user.Perm.Modify || !d.Check(r.URL.Path) { +// resourcePutHandler updates an existing file resource. +// @Summary Update a file resource +// @Description Updates an existing file at the specified path. +// @Tags Resources +// @Accept json +// @Produce json +// @Param path query string true "Path to the resource" +// @Param source query string false "Name for the desired source, default is used if not provided" +// @Success 200 "Resource updated successfully" +// @Failure 403 {object} map[string]string "Forbidden" +// @Failure 404 {object} map[string]string "Resource not found" +// @Failure 405 {object} map[string]string "Method not allowed" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/resources [put] +func resourcePutHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + // TODO source := r.URL.Query().Get("source") + path := r.URL.Query().Get("path") + if !d.user.Perm.Modify || !d.user.Check(path) { return http.StatusForbidden, nil } // Only allow PUT for files. - if strings.HasSuffix(r.URL.Path, "/") { + if strings.HasSuffix(path, "/") { return http.StatusMethodNotAllowed, nil } - realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path) + realPath, isDir, err := files.GetRealPath(d.user.Scope, path) if err != nil { return http.StatusNotFound, err } @@ -148,49 +200,70 @@ var resourcePutHandler = withUser(func(w http.ResponseWriter, r *http.Request, d IsDir: isDir, Modify: d.user.Perm.Modify, Expand: false, - ReadHeader: d.server.TypeDetectionByHeader, - Checker: d, + ReadHeader: config.Server.TypeDetectionByHeader, + Checker: d.user, } err = files.WriteFile(fileOpts, r.Body) return errToStatus(err), err -}) - -// TODO fix and verify this function still works in tests -func resourcePatchHandler(fileCache FileCache) handleFunc { - return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - src := r.URL.Path - dst := r.URL.Query().Get("destination") - action := r.URL.Query().Get("action") - dst, err := url.QueryUnescape(dst) - if !d.Check(src) || !d.Check(dst) { - return http.StatusForbidden, nil - } - if err != nil { - return errToStatus(err), err - } - if dst == "/" || src == "/" { - return http.StatusForbidden, nil - } - override := r.URL.Query().Get("override") == "true" - rename := r.URL.Query().Get("rename") == "true" - if !override && !rename { - if _, err = os.Stat(dst); err == nil { - return http.StatusConflict, nil - } - } - if rename { - dst = addVersionSuffix(dst) - } - // Permission for overwriting the file - if override && !d.user.Perm.Modify { - return http.StatusForbidden, nil - } - err = d.RunHook(func() error { - return patchAction(r.Context(), action, src, dst, d, fileCache) - }, action, src, dst, d.user) +} +// resourcePatchHandler performs a patch operation (e.g., move, rename) on a resource. +// @Summary Patch resource (move/rename) +// @Description Moves or renames a resource to a new destination. +// @Tags Resources +// @Accept json +// @Produce json +// @Param from query string true "Path from resource" +// @Param destination query string true "Destination path for the resource" +// @Param action query string true "Action to perform (copy, rename)" +// @Param overwrite query bool false "Overwrite if destination exists" +// @Param rename query bool false "Rename if destination exists" +// @Success 200 "Resource moved/renamed successfully" +// @Failure 403 {object} map[string]string "Forbidden" +// @Failure 404 {object} map[string]string "Resource not found" +// @Failure 409 {object} map[string]string "Conflict - Destination exists" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/resources [patch] +func resourcePatchHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + // TODO source := r.URL.Query().Get("source") + src := r.URL.Query().Get("from") + dst := r.URL.Query().Get("destination") + action := r.URL.Query().Get("action") + dst, err := url.QueryUnescape(dst) + if !d.user.Check(src) || !d.user.Check(dst) { + return http.StatusForbidden, nil + } + if err != nil { return errToStatus(err), err - }) + } + if dst == "/" || src == "/" { + return http.StatusForbidden, nil + } + + // check target dir exists + parentDir, _, err := files.GetRealPath(d.user.Scope, filepath.Dir(dst)) + if err != nil { + return http.StatusNotFound, err + } + realDest := parentDir + "/" + filepath.Base(dst) + realSrc, isSrcDir, err := files.GetRealPath(d.user.Scope, src) + if err != nil { + return http.StatusNotFound, err + } + overwrite := r.URL.Query().Get("overwrite") == "true" + rename := r.URL.Query().Get("rename") == "true" + if rename { + realDest = addVersionSuffix(realDest) + } + // Permission for overwriting the file + if overwrite && !d.user.Perm.Modify { + return http.StatusForbidden, nil + } + err = d.RunHook(func() error { + return patchAction(r.Context(), action, realSrc, realDest, d, fileCache, isSrcDir) + }, action, realSrc, realDest, d.user) + + return errToStatus(err), err } func addVersionSuffix(source string) string { @@ -210,46 +283,31 @@ func addVersionSuffix(source string) string { } func delThumbs(ctx context.Context, fileCache FileCache, file *files.FileInfo) error { - for _, previewSizeName := range PreviewSizeNames() { - size, _ := ParsePreviewSize(previewSizeName) - if err := fileCache.Delete(ctx, previewCacheKey(file, size)); err != nil { - return err - } + if err := fileCache.Delete(ctx, previewCacheKey(file, "small")); err != nil { + return err } - return nil } -func patchAction(ctx context.Context, action, src, dst string, d *data, fileCache FileCache) error { +func patchAction(ctx context.Context, action, src, dst string, d *requestContext, fileCache FileCache, isSrcDir bool) error { switch action { // TODO: use enum case "copy": if !d.user.Perm.Create { return errors.ErrPermissionDenied } - - return fileutils.Copy(src, dst) - case "rename": + return files.CopyResource(src, dst, isSrcDir) + case "rename", "move": if !d.user.Perm.Rename { return errors.ErrPermissionDenied } - src = path.Clean("/" + src) - dst = path.Clean("/" + dst) - realDest, _, err := files.GetRealPath(d.user.Scope, dst) - if err != nil { - return err - } - realSrc, isDir, err := files.GetRealPath(d.user.Scope, src) - if err != nil { - return err - } file, err := files.FileInfoFaster(files.FileOptions{ - Path: realSrc, - IsDir: isDir, + Path: src, + IsDir: isSrcDir, Modify: d.user.Perm.Modify, Expand: false, ReadHeader: false, - Checker: d, + Checker: d.user, }) if err != nil { return err @@ -260,8 +318,7 @@ func patchAction(ctx context.Context, action, src, dst string, d *data, fileCach if err != nil { return err } - - return fileutils.MoveFile(realSrc, realDest) + return files.MoveResource(src, dst, isSrcDir) default: return fmt.Errorf("unsupported action %s: %w", action, errors.ErrInvalidRequestParams) } @@ -272,28 +329,32 @@ type DiskUsageResponse struct { Used uint64 `json:"used"` } -var diskUsage = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path) - if err != nil { - return http.StatusNotFound, err +// diskUsage returns the disk usage information for a given directory. +// @Summary Get disk usage +// @Description Returns the total and used disk space for a specified directory. +// @Tags Resources +// @Accept json +// @Produce json +// @Param source query string false "Name for the desired source, default is used if not provided" +// @Success 200 {object} DiskUsageResponse "Disk usage details" +// @Failure 404 {object} map[string]string "Directory not found" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/usage [get] +func diskUsage(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + source := r.URL.Query().Get("source") + if source == "" { + source = "/" } file, err := files.FileInfoFaster(files.FileOptions{ - Path: realPath, - IsDir: isDir, - Modify: d.user.Perm.Modify, - Expand: false, - ReadHeader: false, - Checker: d, + Path: source, + Checker: d.user, }) if err != nil { return errToStatus(err), err } fPath := file.RealPath() - if !file.IsDir { - return renderJSON(w, r, &DiskUsageResponse{ - Total: 0, - Used: 0, - }) + if file.Type != "directory" { + return http.StatusBadRequest, fmt.Errorf("path is not a directory") } usage, err := disk.UsageWithContext(r.Context(), fPath) if err != nil { @@ -303,4 +364,12 @@ var diskUsage = withUser(func(w http.ResponseWriter, r *http.Request, d *data) ( Total: usage.Total, Used: usage.Used, }) -}) +} + +func inspectIndex(w http.ResponseWriter, r *http.Request) { + path := r.URL.Query().Get("path") + isDir := r.URL.Query().Get("isDir") == "true" + index := files.GetIndex(config.Server.Root) + info, _ := index.GetReducedMetadata(path, isDir) + renderJSON(w, r, info) // nolint:errcheck +} diff --git a/backend/http/router.go b/backend/http/router.go new file mode 100644 index 00000000..d7c9492f --- /dev/null +++ b/backend/http/router.go @@ -0,0 +1,186 @@ +package http + +import ( + "crypto/tls" + "embed" + "fmt" + "io/fs" + "log" + "net/http" + "os" + "text/template" + + "github.com/gtsteffaniak/filebrowser/settings" + "github.com/gtsteffaniak/filebrowser/storage" + "github.com/gtsteffaniak/filebrowser/version" + + httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware +) + +// Embed the files in the frontend/dist directory +// +//go:embed embed/* +var assets embed.FS + +// Boolean flag to determine whether to use the embedded FS or not +var embeddedFS = os.Getenv("FILEBROWSER_NO_EMBEDED") != "true" + +// Custom dirFS to handle both embedded and non-embedded file systems +type dirFS struct { + http.Dir +} + +// Implement the Open method for dirFS, which wraps http.Dir +func (d dirFS) Open(name string) (fs.File, error) { + return d.Dir.Open(name) +} + +var ( + store *storage.Storage + config *settings.Settings + fileCache FileCache + imgSvc ImgService + assetFs fs.FS +) + +func StartHttp(Service ImgService, storage *storage.Storage, cache FileCache) { + + store = storage + fileCache = cache + imgSvc = Service + config = &settings.Config + + var err error + + if embeddedFS { + // Embedded mode: Serve files from the embedded assets + assetFs, err = fs.Sub(assets, "embed") + if err != nil { + log.Fatal("Could not embed frontend. Does dist exist?") + } + } else { + assetFs = dirFS{Dir: http.Dir("http/dist")} + } + + templateRenderer = &TemplateRenderer{ + templates: template.Must(template.ParseFS(assetFs, "public/index.html")), + } + + router := http.NewServeMux() + // API group routing + api := http.NewServeMux() + + // User routes + api.HandleFunc("GET /users", withUser(userGetHandler)) + api.HandleFunc("POST /users", withSelfOrAdmin(usersPostHandler)) + api.HandleFunc("PUT /users", withUser(userPutHandler)) + api.HandleFunc("DELETE /users", withSelfOrAdmin(userDeleteHandler)) + + // Auth routes + api.HandleFunc("POST /auth/login", loginHandler) + api.HandleFunc("GET /auth/signup", signupHandler) + api.HandleFunc("POST /auth/renew", withUser(renewHandler)) + api.HandleFunc("PUT /auth/token", withUser(createApiKeyHandler)) + api.HandleFunc("GET /auth/token", withUser(createApiKeyHandler)) + api.HandleFunc("DELETE /auth/token", withUser(deleteApiKeyHandler)) + api.HandleFunc("GET /auth/tokens", withUser(listApiKeysHandler)) + + // Resources routes + api.HandleFunc("GET /resources", withUser(resourceGetHandler)) + api.HandleFunc("DELETE /resources", withUser(resourceDeleteHandler)) + api.HandleFunc("POST /resources", withUser(resourcePostHandler)) + api.HandleFunc("PUT /resources", withUser(resourcePutHandler)) + api.HandleFunc("PATCH /resources", withUser(resourcePatchHandler)) + api.HandleFunc("GET /usage", withUser(diskUsage)) + api.HandleFunc("GET /raw", withUser(rawHandler)) + api.HandleFunc("GET /preview", withUser(previewHandler)) + if version.Version == "testing" || version.Version == "untracked" { + api.HandleFunc("GET /inspectIndex", inspectIndex) + } + + // Share routes + api.HandleFunc("GET /shares", withPermShare(shareListHandler)) + api.HandleFunc("GET /share", withPermShare(shareGetsHandler)) + api.HandleFunc("POST /share", withPermShare(sharePostHandler)) + api.HandleFunc("DELETE /share", withPermShare(shareDeleteHandler)) + + // Public routes + api.HandleFunc("GET /public/publicUser", publicUserGetHandler) + api.HandleFunc("GET /public/dl", withHashFile(publicDlHandler)) + api.HandleFunc("GET /public/share", withHashFile(publicShareHandler)) + + // Settings routes + api.HandleFunc("GET /settings", withAdmin(settingsGetHandler)) + api.HandleFunc("PUT /settings", withAdmin(settingsPutHandler)) + + api.HandleFunc("GET /search", withUser(searchHandler)) + apiPath := config.Server.BaseURL + "api" + router.Handle(apiPath+"/", http.StripPrefix(apiPath, api)) + + // Static and index file handlers + router.HandleFunc(fmt.Sprintf("GET %vstatic/", config.Server.BaseURL), staticFilesHandler) + router.HandleFunc(config.Server.BaseURL, indexHandler) + + // health + router.HandleFunc(fmt.Sprintf("GET %vhealth/", config.Server.BaseURL), healthHandler) + + // Swagger + router.Handle(fmt.Sprintf("%vswagger/", config.Server.BaseURL), + httpSwagger.Handler( + httpSwagger.URL(config.Server.BaseURL+"swagger/doc.json"), //The url pointing to API definition + httpSwagger.DeepLinking(true), + httpSwagger.DocExpansion("none"), + httpSwagger.DomID("swagger-ui"), + ), + ) + + var scheme string + port := "" + + // Determine whether to use HTTPS (TLS) or HTTP + if config.Server.TLSCert != "" && config.Server.TLSKey != "" { + // Load the TLS certificate and key + cer, err := tls.LoadX509KeyPair(config.Server.TLSCert, config.Server.TLSKey) + if err != nil { + log.Fatalf("could not load certificate: %v", err) + } + + // Create a custom TLS listener + tlsConfig := &tls.Config{ + MinVersion: tls.VersionTLS12, + Certificates: []tls.Certificate{cer}, + } + + // Set HTTPS scheme and default port for TLS + scheme = "https" + + // Listen on TCP and wrap with TLS + listener, err := tls.Listen("tcp", fmt.Sprintf(":%v", config.Server.Port), tlsConfig) + if err != nil { + log.Fatalf("could not start TLS server: %v", err) + } + if config.Server.Port != 443 { + port = fmt.Sprintf(":%d", config.Server.Port) + } + // Build the full URL with host and port + fullURL := fmt.Sprintf("%s://localhost%s%s", scheme, port, config.Server.BaseURL) + log.Printf("Running at : %s", fullURL) + err = http.Serve(listener, muxWithMiddleware(router)) + if err != nil { + log.Fatalf("could not start server: %v", err) + } + } else { + // Set HTTP scheme and the default port for HTTP + scheme = "http" + if config.Server.Port != 443 { + port = fmt.Sprintf(":%d", config.Server.Port) + } + // Build the full URL with host and port + fullURL := fmt.Sprintf("%s://localhost%s%s", scheme, port, config.Server.BaseURL) + log.Printf("Running at : %s", fullURL) + err := http.ListenAndServe(fmt.Sprintf(":%v", config.Server.Port), muxWithMiddleware(router)) + if err != nil { + log.Fatalf("could not start server: %v", err) + } + } +} diff --git a/backend/http/search.go b/backend/http/search.go index a82e86aa..35487338 100644 --- a/backend/http/search.go +++ b/backend/http/search.go @@ -8,29 +8,63 @@ import ( "github.com/gtsteffaniak/filebrowser/settings" ) -var searchHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - response := []map[string]interface{}{} +// searchHandler handles search requests for files based on the provided query. +// +// This endpoint processes a search query, retrieves relevant file paths, and +// returns a JSON response with the search results. The search is performed +// against the file index, which is built from the root directory specified in +// the server's configuration. The results are filtered based on the user's scope. +// +// The handler expects the following headers in the request: +// - SessionId: A unique identifier for the user's session. +// - UserScope: The scope of the user, which influences the search context. +// +// The request URL should include a query parameter named `query` that specifies +// the search terms to use. The response will include an array of searchResponse objects +// containing the path, type, and dir status. +// +// Example request: +// +// GET api/search?query=myfile +// +// Example response: +// [ +// +// { +// "path": "/path/to/myfile.txt", +// "type": "text" +// }, +// { +// "path": "/path/to/mydir/", +// "type": "directory" +// } +// +// ] +// +// @Summary Search Files +// @Description Searches for files matching the provided query. Returns file paths and metadata based on the user's session and scope. +// @Tags Search +// @Accept json +// @Produce json +// @Param query query string true "Search query" +// @Param scope query string false "path within user scope to search, for example '/first/second' to search within the second directory only" +// @Param SessionId header string false "User session ID, add unique value to prevent collisions" +// @Success 200 {array} files.searchResult "List of search results" +// @Failure 400 {object} map[string]string "Bad Request" +// @Router /api/search [get] +func searchHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { query := r.URL.Query().Get("query") + searchScope := strings.TrimPrefix(r.URL.Query().Get("scope"), ".") + searchScope = strings.TrimPrefix(searchScope, "/") // Retrieve the User-Agent and X-Auth headers from the request sessionId := r.Header.Get("SessionId") - userScope := r.Header.Get("UserScope") index := files.GetIndex(settings.Config.Server.Root) - combinedScope := strings.TrimPrefix(userScope+r.URL.Path, ".") - results, fileTypes := index.Search(query, combinedScope, sessionId) - for _, path := range results { - responseObj := map[string]interface{}{ - "path": path, - "dir": true, - } - if _, ok := fileTypes[path]; ok { - responseObj["dir"] = false - for filterType, value := range fileTypes[path] { - if value { - responseObj[filterType] = value - } - } - } - response = append(response, responseObj) - } + userScope := strings.TrimPrefix(d.user.Scope, ".") + combinedScope := strings.TrimPrefix(userScope+"/"+searchScope, "/") + + // Perform the search using the provided query and user scope + response := index.Search(query, combinedScope, sessionId) + // Set the Content-Type header to application/json + w.Header().Set("Content-Type", "application/json") return renderJSON(w, r, response) -}) +} diff --git a/backend/http/settings.go b/backend/http/settings.go index 121fac46..b2f7efc7 100644 --- a/backend/http/settings.go +++ b/backend/http/settings.go @@ -4,8 +4,8 @@ import ( "encoding/json" "net/http" - "github.com/gtsteffaniak/filebrowser/rules" "github.com/gtsteffaniak/filebrowser/settings" + "github.com/gtsteffaniak/filebrowser/users" ) type settingsData struct { @@ -13,37 +13,56 @@ type settingsData struct { CreateUserDir bool `json:"createUserDir"` UserHomeBasePath string `json:"userHomeBasePath"` Defaults settings.UserDefaults `json:"defaults"` - Rules []rules.Rule `json:"rules"` + Rules []users.Rule `json:"rules"` Frontend settings.Frontend `json:"frontend"` Commands map[string][]string `json:"commands"` } -var settingsGetHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +// settingsGetHandler retrieves the current system settings. +// @Summary Get system settings +// @Description Returns the current configuration settings for signup, user directories, rules, frontend, and commands. +// @Tags Settings +// @Accept json +// @Produce json +// @Success 200 {object} settingsData "System settings data" +// @Router /api/settings [get] +func settingsGetHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { data := &settingsData{ - Signup: d.settings.Auth.Signup, - CreateUserDir: d.settings.Server.CreateUserDir, - UserHomeBasePath: d.settings.Server.UserHomeBasePath, - Defaults: d.settings.UserDefaults, - Rules: d.settings.Rules, - Frontend: d.settings.Frontend, + Signup: config.Auth.Signup, + CreateUserDir: config.Server.CreateUserDir, + UserHomeBasePath: config.Server.UserHomeBasePath, + Defaults: config.UserDefaults, + Rules: config.Rules, + Frontend: config.Frontend, } return renderJSON(w, r, data) -}) +} -var settingsPutHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +// settingsPutHandler updates the system settings. +// @Summary Update system settings +// @Description Updates the system configuration settings for signup, user directories, rules, frontend, and commands. +// @Tags Settings +// @Accept json +// @Produce json +// @Param body body settingsData true "Settings data to update" +// @Success 200 "Settings updated successfully" +// @Failure 400 {object} map[string]string "Bad request - failed to decode body" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/settings [put] +func settingsPutHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { req := &settingsData{} err := json.NewDecoder(r.Body).Decode(req) if err != nil { return http.StatusBadRequest, err } - d.settings.Server.CreateUserDir = req.CreateUserDir - d.settings.Server.UserHomeBasePath = req.UserHomeBasePath - d.settings.UserDefaults = req.Defaults - d.settings.Rules = req.Rules - d.settings.Frontend = req.Frontend - d.settings.Auth.Signup = req.Signup - err = d.store.Settings.Save(d.settings) + config.Server.CreateUserDir = req.CreateUserDir + config.Server.UserHomeBasePath = req.UserHomeBasePath + config.UserDefaults = req.Defaults + config.Rules = req.Rules + config.Frontend = req.Frontend + config.Auth.Signup = req.Signup + err = store.Settings.Save(config) return errToStatus(err), err -}) +} diff --git a/backend/http/share.go b/backend/http/share.go index 9bbfcb65..96ee4e89 100644 --- a/backend/http/share.go +++ b/backend/http/share.go @@ -8,7 +8,6 @@ import ( "net/http" "sort" "strconv" - "strings" "time" "golang.org/x/crypto/bcrypt" @@ -17,24 +16,24 @@ import ( "github.com/gtsteffaniak/filebrowser/share" ) -func withPermShare(fn handleFunc) handleFunc { - return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - if !d.user.Perm.Share { - return http.StatusForbidden, nil - } - return fn(w, r, d) - }) -} - -var shareListHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +// shareListHandler returns a list of all share links. +// @Summary List share links +// @Description Returns a list of share links for the current user, or all links if the user is an admin. +// @Tags Shares +// @Accept json +// @Produce json +// @Success 200 {array} share.Link "List of share links" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/shares [get] +func shareListHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { var ( s []*share.Link err error ) if d.user.Perm.Admin { - s, err = d.store.Share.All() + s, err = store.Share.All() } else { - s, err = d.store.Share.FindByUserID(d.user.ID) + s, err = store.Share.FindByUserID(d.user.ID) } if err == errors.ErrNotExist { return renderJSON(w, r, []*share.Link{}) @@ -51,39 +50,68 @@ var shareListHandler = withPermShare(func(w http.ResponseWriter, r *http.Request return s[i].Expire < s[j].Expire }) return renderJSON(w, r, s) -}) +} -var shareGetsHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - s, err := d.store.Share.Gets(r.URL.Path, d.user.ID) +// shareGetsHandler retrieves share links for a specific resource path. +// @Summary Get share links by path +// @Description Retrieves all share links associated with a specific resource path for the current user. +// @Tags Shares +// @Accept json +// @Produce json +// @Param path query string true "Resource path for which to retrieve share links" +// @Success 200 {array} share.Link "List of share links for the specified path" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/share [get] +func shareGetsHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + path := r.URL.Query().Get("path") + s, err := store.Share.Gets(path, d.user.ID) if err == errors.ErrNotExist { return renderJSON(w, r, []*share.Link{}) } - if err != nil { return http.StatusInternalServerError, err } - return renderJSON(w, r, s) -}) +} -var shareDeleteHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - hash := strings.TrimSuffix(r.URL.Path, "/") - hash = strings.TrimPrefix(hash, "/") +// shareDeleteHandler deletes a specific share link by its hash. +// @Summary Delete a share link +// @Description Deletes a share link specified by its hash. +// @Tags Shares +// @Accept json +// @Produce json +// @Param hash path string true "Hash of the share link to delete" +// @Success 200 "Share link deleted successfully" +// @Failure 400 {object} map[string]string "Bad request - missing or invalid hash" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/shares/{hash} [delete] +func shareDeleteHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + hash := r.URL.Query().Get("hash") if hash == "" { return http.StatusBadRequest, nil } - err := d.store.Share.Delete(hash) + err := store.Share.Delete(hash) if err != nil { return errToStatus(err), err } return errToStatus(err), err -}) - -var sharePostHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { +} +// sharePostHandler creates a new share link. +// @Summary Create a share link +// @Description Creates a new share link with an optional expiration time and password protection. +// @Tags Shares +// @Accept json +// @Produce json +// @Param body body share.CreateBody true "Share link creation parameters" +// @Success 200 {object} share.Link "Created share link" +// @Failure 400 {object} map[string]string "Bad request - failed to decode body" +// @Failure 500 {object} map[string]string "Internal server error" +// @Router /api/shares [post] +func sharePostHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { var s *share.Link var body share.CreateBody if r.Body != nil { @@ -93,14 +121,11 @@ var sharePostHandler = withPermShare(func(w http.ResponseWriter, r *http.Request defer r.Body.Close() } - bytes := make([]byte, 6) //nolint:gomnd - _, err := rand.Read(bytes) + secure_hash, err := generateShortUUID() if err != nil { return http.StatusInternalServerError, err } - str := base64.URLEncoding.EncodeToString(bytes) - var expire int64 = 0 if body.Expires != "" { @@ -139,24 +164,24 @@ var sharePostHandler = withPermShare(func(w http.ResponseWriter, r *http.Request token = base64.URLEncoding.EncodeToString(tokenBuffer) stringHash = string(hash) } + path := r.URL.Query().Get("path") s = &share.Link{ - Path: strings.TrimSuffix(r.URL.Path, "/"), - Hash: str, + Path: path, + Hash: secure_hash, Expire: expire, UserID: d.user.ID, PasswordHash: stringHash, Token: token, } - if err := d.store.Share.Save(s); err != nil { + if err := store.Share.Save(s); err != nil { return http.StatusInternalServerError, err } return renderJSON(w, r, s) -}) +} func getSharePasswordHash(body share.CreateBody) (data []byte, statuscode int, err error) { - if body.Password == "" { return nil, 0, nil } @@ -168,3 +193,18 @@ func getSharePasswordHash(body share.CreateBody) (data []byte, statuscode int, e return hash, 0, nil } + +func generateShortUUID() (string, error) { + // Generate 16 random bytes (128 bits of entropy) + bytes := make([]byte, 16) + _, err := rand.Read(bytes) + if err != nil { + return "", err + } + + // Encode the bytes to a URL-safe base64 string + uuid := base64.RawURLEncoding.EncodeToString(bytes) + + // Trim the length to 22 characters for a shorter ID + return uuid[:22], nil +} diff --git a/backend/http/static.go b/backend/http/static.go index 9b7eee13..c0a055ef 100644 --- a/backend/http/static.go +++ b/backend/http/static.go @@ -14,41 +14,58 @@ import ( "github.com/gtsteffaniak/filebrowser/auth" "github.com/gtsteffaniak/filebrowser/settings" - "github.com/gtsteffaniak/filebrowser/storage" "github.com/gtsteffaniak/filebrowser/version" ) -func handleWithStaticData(w http.ResponseWriter, _ *http.Request, d *data, fSys fs.FS, file, contentType string) (int, error) { +var templateRenderer *TemplateRenderer + +type TemplateRenderer struct { + templates *template.Template +} + +// Render renders a template document with headers and data +func (t *TemplateRenderer) Render(w http.ResponseWriter, name string, data interface{}) error { + // Set headers + w.Header().Set("Cache-Control", "no-cache, private, max-age=0") + w.Header().Set("Pragma", "no-cache") + w.Header().Set("X-Accel-Expires", "0") + w.Header().Set("Transfer-Encoding", "identity") + // Execute the template with the provided data + return t.templates.ExecuteTemplate(w, name, data) +} + +func handleWithStaticData(w http.ResponseWriter, r *http.Request, file, contentType string) { w.Header().Set("Content-Type", contentType) - auther, err := d.store.Auth.Get(d.settings.Auth.Method) + auther, err := store.Auth.Get(config.Auth.Method) if err != nil { - return http.StatusInternalServerError, err + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return } data := map[string]interface{}{ - "Name": d.settings.Frontend.Name, - "DisableExternal": d.settings.Frontend.DisableExternal, - "DisableUsedPercentage": d.settings.Frontend.DisableUsedPercentage, + "Name": config.Frontend.Name, + "DisableExternal": config.Frontend.DisableExternal, + "DisableUsedPercentage": config.Frontend.DisableUsedPercentage, "darkMode": settings.Config.UserDefaults.DarkMode, - "Color": d.settings.Frontend.Color, - "BaseURL": d.server.BaseURL, + "Color": config.Frontend.Color, + "BaseURL": config.Server.BaseURL, "Version": version.Version, "CommitSHA": version.CommitSHA, - "StaticURL": path.Join(d.server.BaseURL, "/static"), + "StaticURL": path.Join(config.Server.BaseURL, "static"), "Signup": settings.Config.Auth.Signup, - "NoAuth": d.settings.Auth.Method == "noauth", - "AuthMethod": d.settings.Auth.Method, + "NoAuth": config.Auth.Method == "noauth", + "AuthMethod": config.Auth.Method, "LoginPage": auther.LoginPage(), "CSS": false, "ReCaptcha": false, - "EnableThumbs": d.server.EnableThumbnails, - "ResizePreview": d.server.ResizePreview, - "EnableExec": d.server.EnableExec, + "EnableThumbs": config.Server.EnableThumbnails, + "ResizePreview": config.Server.ResizePreview, + "EnableExec": config.Server.EnableExec, } - if d.settings.Frontend.Files != "" { - fPath := filepath.Join(d.settings.Frontend.Files, "custom.css") + if config.Frontend.Files != "" { + fPath := filepath.Join(config.Frontend.Files, "custom.css") _, err := os.Stat(fPath) //nolint:govet if err != nil && !os.IsNotExist(err) { @@ -60,15 +77,17 @@ func handleWithStaticData(w http.ResponseWriter, _ *http.Request, d *data, fSys } } - if d.settings.Auth.Method == "password" { - raw, err := d.store.Auth.Get(d.settings.Auth.Method) //nolint:govet + if config.Auth.Method == "password" { + raw, err := store.Auth.Get(config.Auth.Method) //nolint:govet if err != nil { - return http.StatusInternalServerError, err + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return } auther, ok := raw.(*auth.JSONAuth) if !ok { - return http.StatusInternalServerError, fmt.Errorf("failed to assert type *auth.JSONAuth") + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return } if auther.ReCaptcha != nil { @@ -80,77 +99,47 @@ func handleWithStaticData(w http.ResponseWriter, _ *http.Request, d *data, fSys b, err := json.Marshal(data) if err != nil { - return http.StatusInternalServerError, err + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return } - data["Json"] = strings.ReplaceAll(string(b), `'`, `\'`) + data["globalVars"] = strings.ReplaceAll(string(b), `'`, `\'`) - fileContents, err := fs.ReadFile(fSys, file) - if err != nil { - if err == os.ErrNotExist { - return http.StatusNotFound, err - } - return http.StatusInternalServerError, err + // Render the template with global variables + if err := templateRenderer.Render(w, file, data); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) } - index := template.Must(template.New("index").Delims("[{[", "]}]").Parse(string(fileContents))) - err = index.Execute(w, data) - if err != nil { - return http.StatusInternalServerError, err - } - - return 0, nil } -func getStaticHandlers(store *storage.Storage, server *settings.Server, assetsFs fs.FS) (index, static http.Handler) { - index = handle(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - if r.Method != http.MethodGet { - return http.StatusNotFound, nil - } - - w.Header().Set("x-xss-protection", "1; mode=block") - return handleWithStaticData(w, r, d, assetsFs, "public/index.html", "text/html; charset=utf-8") - }, "", store, server) - - static = handle(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - if r.Method != http.MethodGet { - return http.StatusNotFound, nil - } - - const maxAge = 86400 // 1 day - w.Header().Set("Cache-Control", fmt.Sprintf("public, max-age=%v", maxAge)) - - if d.settings.Frontend.Files != "" { - if strings.HasPrefix(r.URL.Path, "img/") { - fPath := filepath.Join(d.settings.Frontend.Files, r.URL.Path) - if _, err := os.Stat(fPath); err == nil { - http.ServeFile(w, r, fPath) - return 0, nil - } - } else if r.URL.Path == "custom.css" && d.settings.Frontend.Files != "" { - http.ServeFile(w, r, filepath.Join(d.settings.Frontend.Files, "custom.css")) - return 0, nil - } - } - - if !strings.HasSuffix(r.URL.Path, ".js") { - http.FileServer(http.FS(assetsFs)).ServeHTTP(w, r) - return 0, nil - } - - fileContents, err := fs.ReadFile(assetsFs, r.URL.Path+".gz") - if err != nil { - return http.StatusNotFound, err - } - - w.Header().Set("Content-Encoding", "gzip") +func staticFilesHandler(w http.ResponseWriter, r *http.Request) { + const maxAge = 86400 // 1 day + w.Header().Set("Cache-Control", fmt.Sprintf("public, max-age=%v", maxAge)) + w.Header().Set("Content-Security-Policy", `default-src 'self'; style-src 'unsafe-inline';`) + // Remove "/static/" from the request path + adjustedPath := strings.TrimPrefix(r.URL.Path, fmt.Sprintf("%vstatic/", config.Server.BaseURL)) + adjustedCompressed := adjustedPath + ".gz" + if strings.HasSuffix(adjustedPath, ".js") { w.Header().Set("Content-Type", "application/javascript; charset=utf-8") // Set the correct MIME type for JavaScript files - - if _, err := w.Write(fileContents); err != nil { - return http.StatusInternalServerError, err + } + // Check if the gzipped version of the file exists + fileContents, err := fs.ReadFile(assetFs, adjustedCompressed) + if err == nil { + w.Header().Set("Content-Encoding", "gzip") // Let the browser know the file is compressed + status, err := w.Write(fileContents) // Write the gzipped file content to the response + if err != nil { + http.Error(w, http.StatusText(status), status) } - - return 0, nil - }, "/static/", store, server) - - return index, static + } else { + // Otherwise, serve the regular file + http.StripPrefix(fmt.Sprintf("%vstatic/", config.Server.BaseURL), http.FileServer(http.FS(assetFs))).ServeHTTP(w, r) + } +} + +func indexHandler(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound) + return + } + handleWithStaticData(w, r, "index.html", "text/html") + } diff --git a/backend/http/users.go b/backend/http/users.go index 7bfdd50a..60b94262 100644 --- a/backend/http/users.go +++ b/backend/http/users.go @@ -2,12 +2,12 @@ package http import ( "encoding/json" + "io" "net/http" "reflect" "sort" "strconv" - "github.com/gorilla/mux" "golang.org/x/text/cases" "golang.org/x/text/language" @@ -26,102 +26,145 @@ type Sorting struct { By string `json:"by"` Asc bool `json:"asc"` } -type modifyUserRequest struct { - modifyRequest - Data *users.User `json:"data"` +type UserRequest struct { + What string `json:"what"` + Which []string `json:"which"` + Data *users.User `json:"data"` } -func getUserID(r *http.Request) (uint, error) { - vars := mux.Vars(r) - i, err := strconv.ParseUint(vars["id"], 10, 0) - if err != nil { - return 0, err - } - return uint(i), err -} +// userGetHandler retrieves a user by ID. +// @Summary Retrieve a user by ID +// @Description Returns a user's details based on their ID. +// @Tags Users +// @Accept json +// @Produce json +// @Param id path int true "User ID" or "self" +// @Success 200 {object} users.User "User details" +// @Failure 403 {object} map[string]string "Forbidden" +// @Failure 404 {object} map[string]string "Not Found" +// @Failure 500 {object} map[string]string "Internal Server Error" +// @Router /api/users/{id} [get] +func userGetHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + givenUserIdString := r.URL.Query().Get("id") -func getUser(_ http.ResponseWriter, r *http.Request) (*modifyUserRequest, error) { - if r.Body == nil { - return nil, errors.ErrEmptyRequest - } + // since api self is used to validate a logged in user + w.Header().Add("X-Renew-Token", "false") - req := &modifyUserRequest{} - err := json.NewDecoder(r.Body).Decode(req) - if err != nil { - return nil, err - } - - if req.What != "user" { - return nil, errors.ErrInvalidDataType - } - - return req, nil -} - -func withSelfOrAdmin(fn handleFunc) handleFunc { - return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - id, err := getUserID(r) + var givenUserId uint + if givenUserIdString == "self" { + givenUserId = d.user.ID + } else if givenUserIdString == "" { + if !d.user.Perm.Admin { + return http.StatusForbidden, nil + } + users, err := store.Users.Gets(config.Server.Root) if err != nil { return http.StatusInternalServerError, err } - if d.user.ID != id && !d.user.Perm.Admin { - return http.StatusForbidden, nil + + for _, u := range users { + u.Password = "" + } + for _, u := range users { + u.ApiKeys = nil } - d.raw = id - return fn(w, r, d) - }) -} + sort.Slice(users, func(i, j int) bool { + return users[i].ID < users[j].ID + }) -var usersGetHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - users, err := d.store.Users.Gets(d.server.Root) - if err != nil { - return http.StatusInternalServerError, err + return renderJSON(w, r, users) + } else { + num, _ := strconv.ParseUint(givenUserIdString, 10, 32) + givenUserId = uint(num) } - for _, u := range users { - u.Password = "" + if givenUserId != d.user.ID && !d.user.Perm.Admin { + return http.StatusForbidden, nil } - sort.Slice(users, func(i, j int) bool { - return users[i].ID < users[j].ID - }) - - return renderJSON(w, r, users) -}) - -var userGetHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - u, err := d.store.Users.Get(d.server.Root, d.raw.(uint)) + // Fetch the user details + u, err := store.Users.Get(config.Server.Root, givenUserId) if err == errors.ErrNotExist { return http.StatusNotFound, err } - if err != nil { return http.StatusInternalServerError, err } + // Remove the password from the response if the user is not an admin u.Password = "" + u.ApiKeys = nil if !d.user.Perm.Admin { u.Scope = "" } - return renderJSON(w, r, u) -}) -var userDeleteHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - err := d.store.Users.Delete(d.raw.(uint)) + return renderJSON(w, r, u) +} + +// userDeleteHandler deletes a user by ID. +// @Summary Delete a user by ID +// @Description Deletes a user identified by their ID. +// @Tags Users +// @Accept json +// @Produce json +// @Param id path int true "User ID" +// @Success 200 "User deleted successfully" +// @Failure 403 {object} map[string]string "Forbidden" +// @Failure 500 {object} map[string]string "Internal Server Error" +// @Router /api/users/{id} [delete] +func userDeleteHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + givenUserIdString := r.URL.Query().Get("id") + num, _ := strconv.ParseUint(givenUserIdString, 10, 32) + givenUserId := uint(num) + + if givenUserId == d.user.ID || !d.user.Perm.Admin { + return http.StatusForbidden, nil + } + + // Delete the user + err := store.Users.Delete(givenUserId) if err != nil { return errToStatus(err), err } - return http.StatusOK, nil -}) +} -var userPostHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - req, err := getUser(w, r) +// usersPostHandler creates a new user. +// @Summary Create a new user +// @Description Adds a new user to the system. +// @Tags Users +// @Accept json +// @Produce json +// @Param data body users.User true "User data to create a new user" +// @Success 201 {object} users.User "Created user" +// @Failure 400 {object} map[string]string "Bad Request" +// @Failure 500 {object} map[string]string "Internal Server Error" +// @Router /api/users [post] +func usersPostHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + if !d.user.Perm.Admin { + return http.StatusForbidden, nil + } + + // Validate the user's scope + _, _, err := files.GetRealPath(config.Server.Root, d.user.Scope) if err != nil { return http.StatusBadRequest, err } + // Read the JSON body + body, err := io.ReadAll(r.Body) + if err != nil { + return http.StatusInternalServerError, err + } + defer r.Body.Close() + + // Parse the JSON into the UserRequest struct + var req UserRequest + if err = json.Unmarshal(body, &req); err != nil { + return http.StatusBadRequest, err + } + if len(req.Which) != 0 { return http.StatusBadRequest, nil } @@ -137,22 +180,50 @@ var userPostHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d * w.Header().Set("Location", "/settings/users/"+strconv.FormatUint(uint64(req.Data.ID), 10)) return http.StatusCreated, nil -}) +} -var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) { - req, err := getUser(w, r) +// userPutHandler updates an existing user's details. +// @Summary Update a user's details +// @Description Updates the details of a user identified by ID. +// @Tags Users +// @Accept json +// @Produce json +// @Param id path int true "User ID" +// @Param data body users.User true "User data to update" +// @Success 200 {object} users.User "Updated user details" +// @Failure 400 {object} map[string]string "Bad Request" +// @Failure 403 {object} map[string]string "Forbidden" +// @Failure 500 {object} map[string]string "Internal Server Error" +// @Router /api/users/{id} [put] +func userPutHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) { + givenUserIdString := r.URL.Query().Get("id") + num, _ := strconv.ParseUint(givenUserIdString, 10, 32) + givenUserId := uint(num) + + if givenUserId != d.user.ID && !d.user.Perm.Admin { + return http.StatusForbidden, nil + } + + // Validate the user's scope + _, _, err := files.GetRealPath(config.Server.Root, d.user.Scope) if err != nil { return http.StatusBadRequest, err } - if req.Data.ID != d.raw.(uint) { - return http.StatusBadRequest, nil - } - _, _, err = files.GetRealPath(d.server.Root, req.Data.Scope) + // Read the JSON body + body, err := io.ReadAll(r.Body) if err != nil { - return http.StatusBadRequest, nil + return http.StatusInternalServerError, err + } + defer r.Body.Close() + + // Parse the JSON into the UserRequest struct + var req UserRequest + if err = json.Unmarshal(body, &req); err != nil { + return http.StatusBadRequest, err } + // If `Which` is not specified, default to updating all fields if len(req.Which) == 0 || req.Which[0] == "all" { req.Which = []string{} v := reflect.ValueOf(req.Data) @@ -160,6 +231,8 @@ var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request v = v.Elem() } t := v.Type() + + // Dynamically populate fields to update for i := 0; i < t.NumField(); i++ { field := t.Field(i) if field.Name == "Password" && req.Data.Password != "" { @@ -170,10 +243,13 @@ var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request } } - for k, v := range req.Which { - v = cases.Title(language.English, cases.NoLower).String(v) - req.Which[k] = v - if v == "Password" { + // Process the fields to update + for _, field := range req.Which { + // Title case field names + field = cases.Title(language.English, cases.NoLower).String(field) + + // Handle password update + if field == "Password" { if !d.user.Perm.Admin && d.user.LockPassword { return http.StatusForbidden, nil } @@ -183,16 +259,20 @@ var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request } } - for _, f := range NonModifiableFieldsForNonAdmin { - if !d.user.Perm.Admin && v == f { + // Prevent non-admins from modifying certain fields + for _, restrictedField := range NonModifiableFieldsForNonAdmin { + if !d.user.Perm.Admin && field == restrictedField { return http.StatusForbidden, nil } } } - err = d.store.Users.Update(req.Data, req.Which...) + // Perform the user update + err = store.Users.Update(req.Data, req.Which...) if err != nil { return http.StatusInternalServerError, err } - return http.StatusOK, nil -}) + // Return the updated user (with the password hidden) as JSON response + req.Data.Password = "" + return renderJSON(w, r, req.Data) +} diff --git a/backend/http/utils.go b/backend/http/utils.go index 1f216635..0aec1c9f 100644 --- a/backend/http/utils.go +++ b/backend/http/utils.go @@ -1,30 +1,13 @@ package http import ( - "encoding/json" "errors" "net/http" - "net/url" "os" - "strings" libErrors "github.com/gtsteffaniak/filebrowser/errors" ) -func renderJSON(w http.ResponseWriter, _ *http.Request, data interface{}) (int, error) { - marsh, err := json.Marshal(data) - if err != nil { - return http.StatusInternalServerError, err - } - - w.Header().Set("Content-Type", "application/json; charset=utf-8") - if _, err := w.Write(marsh); err != nil { - return http.StatusInternalServerError, err - } - - return 0, nil -} - func errToStatus(err error) int { switch { case err == nil: @@ -45,23 +28,3 @@ func errToStatus(err error) int { return http.StatusInternalServerError } } - -// This is an addaptation if http.StripPrefix in which we don't -// return 404 if the page doesn't have the needed prefix. -func stripPrefix(prefix string, h http.Handler) http.Handler { - if prefix == "" || prefix == "/" { - return h - } - - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - p := strings.TrimPrefix(r.URL.Path, prefix) - rp := strings.TrimPrefix(r.URL.RawPath, prefix) - r2 := new(http.Request) - *r2 = *r - r2.URL = new(url.URL) - *r2.URL = *r.URL - r2.URL.Path = p - r2.URL.RawPath = rp - h.ServeHTTP(w, r2) - }) -} diff --git a/backend/myfolder/subfolder/Screenshot 2024-11-18 at 2.16.29 PM.png b/backend/myfolder/subfolder/Screenshot 2024-11-18 at 2.16.29 PM.png new file mode 100755 index 00000000..4deb950c Binary files /dev/null and b/backend/myfolder/subfolder/Screenshot 2024-11-18 at 2.16.29 PM.png differ diff --git a/backend/rules/rules_test.go b/backend/rules/rules_test.go deleted file mode 100644 index 570f921f..00000000 --- a/backend/rules/rules_test.go +++ /dev/null @@ -1,23 +0,0 @@ -package rules - -import "testing" - -func TestMatchHidden(t *testing.T) { - cases := map[string]bool{ - "/": false, - "/src": false, - "/src/": false, - "/.circleci": true, - "/a/b/c/.docker.json": true, - ".docker.json": true, - "Dockerfile": false, - "/Dockerfile": false, - } - - for path, want := range cases { - got := MatchHidden(path) - if got != want { - t.Errorf("MatchHidden(%s)=%v; want %v", path, got, want) - } - } -} diff --git a/backend/settings/config.go b/backend/settings/config.go index 479a1c40..7dbd9ad2 100644 --- a/backend/settings/config.go +++ b/backend/settings/config.go @@ -4,8 +4,10 @@ import ( "log" "os" "path/filepath" + "strings" "github.com/goccy/go-yaml" + "github.com/gtsteffaniak/filebrowser/users" ) var Config Settings @@ -28,6 +30,12 @@ func Initialize(configFile string) { log.Fatalf("ERROR: Configured Root Path does not exist! %v", err) } Config.Server.Root = realRoot + baseurl := strings.Trim(Config.Server.BaseURL, "/") + if baseurl == "" { + Config.Server.BaseURL = "/" + } else { + Config.Server.BaseURL = "/" + baseurl + "/" + } } func loadConfigFile(configFile string) []byte { @@ -86,7 +94,7 @@ func setDefaults() Settings { DisableSettings: false, ViewMode: "normal", Locale: "en", - Permissions: Permissions{ + Permissions: users.Permissions{ Create: false, Rename: false, Modify: false, @@ -94,6 +102,7 @@ func setDefaults() Settings { Share: false, Download: false, Admin: false, + Api: false, }, }, } diff --git a/backend/settings/dir_test.go b/backend/settings/dir_test.go index fd28e5b1..f55e0074 100644 --- a/backend/settings/dir_test.go +++ b/backend/settings/dir_test.go @@ -3,7 +3,7 @@ package settings import ( "testing" - "github.com/gtsteffaniak/filebrowser/rules" + "github.com/gtsteffaniak/filebrowser/users" ) func TestSettings_MakeUserDir(t *testing.T) { @@ -15,7 +15,7 @@ func TestSettings_MakeUserDir(t *testing.T) { Shell []string AdminUsername string AdminPassword string - Rules []rules.Rule + Rules []users.Rule Server Server Auth Auth Frontend Frontend diff --git a/backend/settings/settings.go b/backend/settings/settings.go index ae04b3d3..b5bedecf 100644 --- a/backend/settings/settings.go +++ b/backend/settings/settings.go @@ -2,9 +2,8 @@ package settings import ( "crypto/rand" - "strings" - "github.com/gtsteffaniak/filebrowser/rules" + "github.com/gtsteffaniak/filebrowser/users" ) const DefaultUsersHomeBasePath = "/users" @@ -12,18 +11,6 @@ const DefaultUsersHomeBasePath = "/users" // AuthMethod describes an authentication method. type AuthMethod string -// Settings contain the main settings of the application. -// GetRules implements rules.Provider. -func (s *Settings) GetRules() []rules.Rule { - return s.Rules -} - -// Server specific settings -// Clean cleans any variables that might need cleaning. -func (s *Server) Clean() { - s.BaseURL = strings.TrimSuffix(s.BaseURL, "/") -} - // GenerateKey generates a key of 512 bits. func GenerateKey() ([]byte, error) { b := make([]byte, 64) //nolint:gomnd @@ -40,8 +27,8 @@ func GetSettingsConfig(nameType string, Value string) string { return nameType + Value } -func AdminPerms() Permissions { - return Permissions{ +func AdminPerms() users.Permissions { + return users.Permissions{ Create: true, Rename: true, Modify: true, @@ -49,5 +36,23 @@ func AdminPerms() Permissions { Share: true, Download: true, Admin: true, + Api: true, } } + +// Apply applies the default options to a user. +func ApplyUserDefaults(u users.User) users.User { + u.StickySidebar = Config.UserDefaults.StickySidebar + u.DisableSettings = Config.UserDefaults.DisableSettings + u.DarkMode = Config.UserDefaults.DarkMode + u.Scope = Config.UserDefaults.Scope + u.Locale = Config.UserDefaults.Locale + u.ViewMode = Config.UserDefaults.ViewMode + u.SingleClick = Config.UserDefaults.SingleClick + u.Perm = Config.UserDefaults.Perm + u.Sorting = Config.UserDefaults.Sorting + u.Commands = Config.UserDefaults.Commands + u.HideDotfiles = Config.UserDefaults.HideDotfiles + u.DateFormat = Config.UserDefaults.DateFormat + return u +} diff --git a/backend/settings/storage.go b/backend/settings/storage.go index 40a6eb35..8d0f6f37 100644 --- a/backend/settings/storage.go +++ b/backend/settings/storage.go @@ -2,7 +2,7 @@ package settings import ( "github.com/gtsteffaniak/filebrowser/errors" - "github.com/gtsteffaniak/filebrowser/rules" + "github.com/gtsteffaniak/filebrowser/users" ) // StorageBackend is a settings storage backend. @@ -62,7 +62,7 @@ func (s *Storage) Save(set *Settings) error { } if set.Rules == nil { - set.Rules = []rules.Rule{} + set.Rules = []users.Rule{} } if set.Commands == nil { @@ -94,6 +94,5 @@ func (s *Storage) GetServer() (*Server, error) { // SaveServer wraps StorageBackend.SaveServer and adds some verification. func (s *Storage) SaveServer(ser *Server) error { - ser.Clean() return s.back.SaveServer(ser) } diff --git a/backend/settings/structs.go b/backend/settings/structs.go index f9a003a3..78d3304a 100644 --- a/backend/settings/structs.go +++ b/backend/settings/structs.go @@ -1,13 +1,13 @@ package settings import ( - "github.com/gtsteffaniak/filebrowser/rules" + "github.com/gtsteffaniak/filebrowser/users" ) type Settings struct { Commands map[string][]string `json:"commands"` Shell []string `json:"shell"` - Rules []rules.Rule `json:"rules"` + Rules []users.Rule `json:"rules"` Server Server `json:"server"` Auth Auth `json:"auth"` Frontend Frontend `json:"frontend"` @@ -76,25 +76,14 @@ type UserDefaults struct { ViewMode string `json:"viewMode"` GallerySize int `json:"gallerySize"` SingleClick bool `json:"singleClick"` - Rules []rules.Rule `json:"rules"` + Rules []users.Rule `json:"rules"` Sorting struct { By string `json:"by"` Asc bool `json:"asc"` } `json:"sorting"` - Perm Permissions `json:"perm"` - Permissions Permissions `json:"permissions"` - Commands []string `json:"commands,omitempty"` - HideDotfiles bool `json:"hideDotfiles"` - DateFormat bool `json:"dateFormat"` -} - -type Permissions struct { - Admin bool `json:"admin"` - Execute bool `json:"execute"` - Create bool `json:"create"` - Rename bool `json:"rename"` - Modify bool `json:"modify"` - Delete bool `json:"delete"` - Share bool `json:"share"` - Download bool `json:"download"` + Perm users.Permissions `json:"perm"` + Permissions users.Permissions `json:"permissions"` + Commands []string `json:"commands,omitempty"` + HideDotfiles bool `json:"hideDotfiles"` + DateFormat bool `json:"dateFormat"` } diff --git a/backend/storage/bolt/users.go b/backend/storage/bolt/users.go index 64e9490d..4ec2ea76 100644 --- a/backend/storage/bolt/users.go +++ b/backend/storage/bolt/users.go @@ -8,6 +8,7 @@ import ( "github.com/gtsteffaniak/filebrowser/errors" "github.com/gtsteffaniak/filebrowser/users" + "github.com/gtsteffaniak/filebrowser/utils" ) type usersBackend struct { @@ -55,14 +56,24 @@ func (st usersBackend) Update(user *users.User, fields ...string) error { if len(fields) == 0 { return st.Save(user) } + + val := reflect.ValueOf(user).Elem() + for _, field := range fields { - userField := reflect.ValueOf(user).Elem().FieldByName(field) + // Capitalize the first letter (you can adjust this based on your field naming convention) + correctedField := utils.CapitalizeFirst(field) + + userField := val.FieldByName(correctedField) if !userField.IsValid() { return fmt.Errorf("invalid field: %s", field) } + if !userField.CanSet() { + return fmt.Errorf("cannot update unexported field: %s", field) + } + val := userField.Interface() - if err := st.db.UpdateField(user, field, val); err != nil { - return fmt.Errorf("Error updating user field: %s, error: %v", field, err.Error()) + if err := st.db.UpdateField(user, correctedField, val); err != nil { + return fmt.Errorf("Error updating user field: %s, error: %v", correctedField, err.Error()) } } return nil diff --git a/backend/storage/storage.go b/backend/storage/storage.go index 46431318..5a83a6d3 100644 --- a/backend/storage/storage.go +++ b/backend/storage/storage.go @@ -20,7 +20,7 @@ import ( // Storage is a storage powered by a Backend which makes the necessary // verifications when fetching and saving data to ensure consistency. type Storage struct { - Users users.Store + Users *users.Storage Share *share.Storage Auth *auth.Storage Settings *settings.Storage @@ -92,7 +92,7 @@ func quickSetup(store *Storage) { utils.CheckErr("store.Settings.Save", err) err = store.Settings.SaveServer(&settings.Config.Server) utils.CheckErr("store.Settings.SaveServer", err) - user := users.ApplyDefaults(users.User{}) + user := settings.ApplyUserDefaults(users.User{}) user.Username = settings.Config.Auth.AdminUsername user.Password = settings.Config.Auth.AdminPassword user.Perm.Admin = true @@ -111,7 +111,7 @@ func CreateUser(userInfo users.User, asAdmin bool) error { if userInfo.Username == "" || userInfo.Password == "" { return errors.ErrInvalidRequestParams } - newUser := users.ApplyDefaults(userInfo) + newUser := settings.ApplyUserDefaults(userInfo) if asAdmin { newUser.Perm = settings.AdminPerms() } diff --git a/backend/swagger/docs/docs.go b/backend/swagger/docs/docs.go new file mode 100644 index 00000000..b010ba4b --- /dev/null +++ b/backend/swagger/docs/docs.go @@ -0,0 +1,1602 @@ +// Package docs Code generated by swaggo/swag. DO NOT EDIT +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "contact": {}, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/api/preview": { + "get": { + "description": "Returns a preview image based on the requested path and size.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get image preview", + "parameters": [ + { + "type": "string", + "description": "File path of the image to preview", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Preview size ('small' or 'large'). Default is based on server config.", + "name": "size", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Preview image content", + "schema": { + "type": "file" + } + }, + "202": { + "description": "Download permissions required", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Invalid request path", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "File not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "415": { + "description": "Unsupported file type for preview", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/raw": { + "get": { + "description": "Returns the raw content of a file, multiple files, or a directory. Supports downloading files as archives in various formats.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get raw content of a file, multiple files, or directory", + "parameters": [ + { + "type": "string", + "description": "Path to the file or directory", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Comma-separated list of specific files within the directory (optional)", + "name": "files", + "in": "query" + }, + { + "type": "boolean", + "description": "If true, sets 'Content-Disposition' to 'inline'. Otherwise, defaults to 'attachment'.", + "name": "inline", + "in": "query" + }, + { + "type": "string", + "description": "Compression algorithm for archiving multiple files or directories. Options: 'zip', 'tar', 'targz', 'tarbz2', 'tarxz', 'tarlz4', 'tarsz'. Default is 'zip'.", + "name": "algo", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Raw file or directory content, or archive for multiple files", + "schema": { + "type": "file" + } + }, + "202": { + "description": "Download permissions required", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Invalid request path", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "File or directory not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "415": { + "description": "Unsupported file type for preview", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/resources": { + "get": { + "description": "Returns metadata and optionally file contents for a specified resource path.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get resource information", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + }, + { + "type": "string", + "description": "Include file content if true", + "name": "content", + "in": "query" + }, + { + "type": "string", + "description": "Optional checksum validation", + "name": "checksum", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource metadata", + "schema": { + "$ref": "#/definitions/files.FileInfo" + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "put": { + "description": "Updates an existing file at the specified path.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Update a file resource", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource updated successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "405": { + "description": "Method not allowed", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "post": { + "description": "Creates a new resource or uploads a file at the specified path. Supports file uploads and directory creation.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Create or upload a resource", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + }, + { + "type": "boolean", + "description": "Override existing file if true", + "name": "override", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource created successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "409": { + "description": "Conflict - Resource already exists", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "delete": { + "description": "Deletes a resource located at the specified path.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Delete a resource", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource deleted successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "patch": { + "description": "Moves or renames a resource to a new destination.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Patch resource (move/rename)", + "parameters": [ + { + "type": "string", + "description": "Path from resource", + "name": "from", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Destination path for the resource", + "name": "destination", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Action to perform (copy, rename)", + "name": "action", + "in": "query", + "required": true + }, + { + "type": "boolean", + "description": "Overwrite if destination exists", + "name": "overwrite", + "in": "query" + }, + { + "type": "boolean", + "description": "Rename if destination exists", + "name": "rename", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource moved/renamed successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "409": { + "description": "Conflict - Destination exists", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/search": { + "get": { + "description": "Searches for files matching the provided query. Returns file paths and metadata based on the user's session and scope.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Search" + ], + "summary": "Search Files", + "parameters": [ + { + "type": "string", + "description": "Search query", + "name": "query", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "path within user scope to search, for example '/first/second' to search within the second directory only", + "name": "scope", + "in": "query" + }, + { + "type": "string", + "description": "User session ID, add unique value to prevent collisions", + "name": "SessionId", + "in": "header" + } + ], + "responses": { + "200": { + "description": "List of search results", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/files.searchResult" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/settings": { + "get": { + "description": "Returns the current configuration settings for signup, user directories, rules, frontend, and commands.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Settings" + ], + "summary": "Get system settings", + "responses": { + "200": { + "description": "System settings data", + "schema": { + "$ref": "#/definitions/http.settingsData" + } + } + } + }, + "put": { + "description": "Updates the system configuration settings for signup, user directories, rules, frontend, and commands.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Settings" + ], + "summary": "Update system settings", + "parameters": [ + { + "description": "Settings data to update", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/http.settingsData" + } + } + ], + "responses": { + "200": { + "description": "Settings updated successfully" + }, + "400": { + "description": "Bad request - failed to decode body", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/share": { + "get": { + "description": "Retrieves all share links associated with a specific resource path for the current user.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "Get share links by path", + "parameters": [ + { + "type": "string", + "description": "Resource path for which to retrieve share links", + "name": "path", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "List of share links for the specified path", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/share.Link" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/shares": { + "get": { + "description": "Returns a list of share links for the current user, or all links if the user is an admin.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "List share links", + "responses": { + "200": { + "description": "List of share links", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/share.Link" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "post": { + "description": "Creates a new share link with an optional expiration time and password protection.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "Create a share link", + "parameters": [ + { + "description": "Share link creation parameters", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/share.CreateBody" + } + } + ], + "responses": { + "200": { + "description": "Created share link", + "schema": { + "$ref": "#/definitions/share.Link" + } + }, + "400": { + "description": "Bad request - failed to decode body", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/shares/{hash}": { + "delete": { + "description": "Deletes a share link specified by its hash.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "Delete a share link", + "parameters": [ + { + "type": "string", + "description": "Hash of the share link to delete", + "name": "hash", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Share link deleted successfully" + }, + "400": { + "description": "Bad request - missing or invalid hash", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/usage": { + "get": { + "description": "Returns the total and used disk space for a specified directory.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get disk usage", + "parameters": [ + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Disk usage details", + "schema": { + "$ref": "#/definitions/http.DiskUsageResponse" + } + }, + "404": { + "description": "Directory not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/users": { + "post": { + "description": "Adds a new user to the system.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Create a new user", + "parameters": [ + { + "description": "User data to create a new user", + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/users.User" + } + } + ], + "responses": { + "201": { + "description": "Created user", + "schema": { + "$ref": "#/definitions/users.User" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/users/{id}": { + "get": { + "description": "Returns a user's details based on their ID.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Retrieve a user by ID", + "parameters": [ + { + "type": "integer", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "User details", + "schema": { + "$ref": "#/definitions/users.User" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Not Found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "put": { + "description": "Updates the details of a user identified by ID.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Update a user's details", + "parameters": [ + { + "type": "integer", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "User data to update", + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/users.User" + } + } + ], + "responses": { + "200": { + "description": "Updated user details", + "schema": { + "$ref": "#/definitions/users.User" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "delete": { + "description": "Deletes a user identified by their ID.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Delete a user by ID", + "parameters": [ + { + "type": "integer", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "User deleted successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/health": { + "get": { + "description": "Returns the health status of the API.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "summary": "Health Check", + "responses": { + "200": { + "description": "successful health check response", + "schema": { + "$ref": "#/definitions/http.HttpResponse" + } + } + } + } + } + }, + "definitions": { + "files.FileInfo": { + "type": "object", + "properties": { + "checksums": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "content": { + "type": "string" + }, + "isSymlink": { + "type": "boolean" + }, + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/files.ReducedItem" + } + }, + "modified": { + "type": "string" + }, + "name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "subtitles": { + "type": "array", + "items": { + "type": "string" + } + }, + "token": { + "type": "string" + }, + "type": { + "type": "string" + } + } + }, + "files.ReducedItem": { + "type": "object", + "properties": { + "content": { + "type": "string" + }, + "modified": { + "type": "string" + }, + "name": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "type": { + "type": "string" + } + } + }, + "files.searchResult": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "type": { + "type": "string" + } + } + }, + "http.DiskUsageResponse": { + "type": "object", + "properties": { + "total": { + "type": "integer" + }, + "used": { + "type": "integer" + } + } + }, + "http.HttpResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "status": { + "type": "integer" + }, + "token": { + "type": "string" + } + } + }, + "http.settingsData": { + "type": "object", + "properties": { + "commands": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "createUserDir": { + "type": "boolean" + }, + "defaults": { + "$ref": "#/definitions/settings.UserDefaults" + }, + "frontend": { + "$ref": "#/definitions/settings.Frontend" + }, + "rules": { + "type": "array", + "items": { + "$ref": "#/definitions/users.Rule" + } + }, + "signup": { + "type": "boolean" + }, + "userHomeBasePath": { + "type": "string" + } + } + }, + "settings.Frontend": { + "type": "object", + "properties": { + "color": { + "type": "string" + }, + "disableExternal": { + "type": "boolean" + }, + "disableUsedPercentage": { + "type": "boolean" + }, + "files": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "settings.UserDefaults": { + "type": "object", + "properties": { + "commands": { + "type": "array", + "items": { + "type": "string" + } + }, + "darkMode": { + "type": "boolean" + }, + "dateFormat": { + "type": "boolean" + }, + "disableSettings": { + "type": "boolean" + }, + "gallerySize": { + "type": "integer" + }, + "hideDotfiles": { + "type": "boolean" + }, + "locale": { + "type": "string" + }, + "lockPassword": { + "type": "boolean" + }, + "perm": { + "$ref": "#/definitions/users.Permissions" + }, + "permissions": { + "$ref": "#/definitions/users.Permissions" + }, + "rules": { + "type": "array", + "items": { + "$ref": "#/definitions/users.Rule" + } + }, + "scope": { + "type": "string" + }, + "singleClick": { + "type": "boolean" + }, + "sorting": { + "type": "object", + "properties": { + "asc": { + "type": "boolean" + }, + "by": { + "type": "string" + } + } + }, + "stickySidebar": { + "type": "boolean" + }, + "viewMode": { + "type": "string" + } + } + }, + "share.CreateBody": { + "type": "object", + "properties": { + "expires": { + "type": "string" + }, + "password": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "share.Link": { + "type": "object", + "properties": { + "expire": { + "type": "integer" + }, + "hash": { + "type": "string" + }, + "password_hash": { + "type": "string" + }, + "path": { + "type": "string" + }, + "token": { + "description": "Token is a random value that will only be set when PasswordHash is set. It is\nURL-Safe and is used to download links in password-protected shares via a\nquery arg.", + "type": "string" + }, + "userID": { + "type": "integer" + } + } + }, + "users.AuthToken": { + "type": "object", + "properties": { + "Permissions": { + "$ref": "#/definitions/users.Permissions" + }, + "belongsTo": { + "type": "integer" + }, + "createdAt": { + "type": "integer" + }, + "expiresAt": { + "type": "integer" + }, + "key": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "users.Permissions": { + "type": "object", + "properties": { + "admin": { + "type": "boolean" + }, + "api": { + "type": "boolean" + }, + "create": { + "type": "boolean" + }, + "delete": { + "type": "boolean" + }, + "download": { + "type": "boolean" + }, + "execute": { + "type": "boolean" + }, + "modify": { + "type": "boolean" + }, + "rename": { + "type": "boolean" + }, + "share": { + "type": "boolean" + } + } + }, + "users.Regexp": { + "type": "object", + "properties": { + "raw": { + "type": "string" + } + } + }, + "users.Rule": { + "type": "object", + "properties": { + "allow": { + "type": "boolean" + }, + "id": { + "type": "string" + }, + "path": { + "type": "string" + }, + "regex": { + "type": "boolean" + }, + "regexp": { + "$ref": "#/definitions/users.Regexp" + } + } + }, + "users.Sorting": { + "type": "object", + "properties": { + "asc": { + "type": "boolean" + }, + "by": { + "type": "string" + } + } + }, + "users.User": { + "type": "object", + "properties": { + "apiKeys": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/users.AuthToken" + } + }, + "commands": { + "type": "array", + "items": { + "type": "string" + } + }, + "darkMode": { + "type": "boolean" + }, + "dateFormat": { + "type": "boolean" + }, + "disableSettings": { + "type": "boolean" + }, + "gallerySize": { + "type": "integer" + }, + "hideDotfiles": { + "type": "boolean" + }, + "id": { + "type": "integer" + }, + "locale": { + "type": "string" + }, + "lockPassword": { + "type": "boolean" + }, + "password": { + "type": "string" + }, + "perm": { + "$ref": "#/definitions/users.Permissions" + }, + "rules": { + "type": "array", + "items": { + "$ref": "#/definitions/users.Rule" + } + }, + "scope": { + "type": "string" + }, + "singleClick": { + "type": "boolean" + }, + "sorting": { + "$ref": "#/definitions/users.Sorting" + }, + "stickySidebar": { + "type": "boolean" + }, + "username": { + "type": "string" + }, + "viewMode": { + "type": "string" + } + } + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "", + Host: "", + BasePath: "", + Schemes: []string{}, + Title: "", + Description: "", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, + LeftDelim: "{{", + RightDelim: "}}", +} + diff --git a/backend/swagger/docs/swagger.json b/backend/swagger/docs/swagger.json new file mode 100644 index 00000000..6df9e4c5 --- /dev/null +++ b/backend/swagger/docs/swagger.json @@ -0,0 +1,1576 @@ +{ + "swagger": "2.0", + "info": { + "contact": {} + }, + "paths": { + "/api/preview": { + "get": { + "description": "Returns a preview image based on the requested path and size.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get image preview", + "parameters": [ + { + "type": "string", + "description": "File path of the image to preview", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Preview size ('small' or 'large'). Default is based on server config.", + "name": "size", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Preview image content", + "schema": { + "type": "file" + } + }, + "202": { + "description": "Download permissions required", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Invalid request path", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "File not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "415": { + "description": "Unsupported file type for preview", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/raw": { + "get": { + "description": "Returns the raw content of a file, multiple files, or a directory. Supports downloading files as archives in various formats.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get raw content of a file, multiple files, or directory", + "parameters": [ + { + "type": "string", + "description": "Path to the file or directory", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Comma-separated list of specific files within the directory (optional)", + "name": "files", + "in": "query" + }, + { + "type": "boolean", + "description": "If true, sets 'Content-Disposition' to 'inline'. Otherwise, defaults to 'attachment'.", + "name": "inline", + "in": "query" + }, + { + "type": "string", + "description": "Compression algorithm for archiving multiple files or directories. Options: 'zip', 'tar', 'targz', 'tarbz2', 'tarxz', 'tarlz4', 'tarsz'. Default is 'zip'.", + "name": "algo", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Raw file or directory content, or archive for multiple files", + "schema": { + "type": "file" + } + }, + "202": { + "description": "Download permissions required", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Invalid request path", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "File or directory not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "415": { + "description": "Unsupported file type for preview", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/resources": { + "get": { + "description": "Returns metadata and optionally file contents for a specified resource path.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get resource information", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + }, + { + "type": "string", + "description": "Include file content if true", + "name": "content", + "in": "query" + }, + { + "type": "string", + "description": "Optional checksum validation", + "name": "checksum", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource metadata", + "schema": { + "$ref": "#/definitions/files.FileInfo" + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "put": { + "description": "Updates an existing file at the specified path.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Update a file resource", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource updated successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "405": { + "description": "Method not allowed", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "post": { + "description": "Creates a new resource or uploads a file at the specified path. Supports file uploads and directory creation.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Create or upload a resource", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + }, + { + "type": "boolean", + "description": "Override existing file if true", + "name": "override", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource created successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "409": { + "description": "Conflict - Resource already exists", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "delete": { + "description": "Deletes a resource located at the specified path.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Delete a resource", + "parameters": [ + { + "type": "string", + "description": "Path to the resource", + "name": "path", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource deleted successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "patch": { + "description": "Moves or renames a resource to a new destination.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Patch resource (move/rename)", + "parameters": [ + { + "type": "string", + "description": "Path from resource", + "name": "from", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Destination path for the resource", + "name": "destination", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Action to perform (copy, rename)", + "name": "action", + "in": "query", + "required": true + }, + { + "type": "boolean", + "description": "Overwrite if destination exists", + "name": "overwrite", + "in": "query" + }, + { + "type": "boolean", + "description": "Rename if destination exists", + "name": "rename", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Resource moved/renamed successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Resource not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "409": { + "description": "Conflict - Destination exists", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/search": { + "get": { + "description": "Searches for files matching the provided query. Returns file paths and metadata based on the user's session and scope.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Search" + ], + "summary": "Search Files", + "parameters": [ + { + "type": "string", + "description": "Search query", + "name": "query", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "path within user scope to search, for example '/first/second' to search within the second directory only", + "name": "scope", + "in": "query" + }, + { + "type": "string", + "description": "User session ID, add unique value to prevent collisions", + "name": "SessionId", + "in": "header" + } + ], + "responses": { + "200": { + "description": "List of search results", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/files.searchResult" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/settings": { + "get": { + "description": "Returns the current configuration settings for signup, user directories, rules, frontend, and commands.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Settings" + ], + "summary": "Get system settings", + "responses": { + "200": { + "description": "System settings data", + "schema": { + "$ref": "#/definitions/http.settingsData" + } + } + } + }, + "put": { + "description": "Updates the system configuration settings for signup, user directories, rules, frontend, and commands.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Settings" + ], + "summary": "Update system settings", + "parameters": [ + { + "description": "Settings data to update", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/http.settingsData" + } + } + ], + "responses": { + "200": { + "description": "Settings updated successfully" + }, + "400": { + "description": "Bad request - failed to decode body", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/share": { + "get": { + "description": "Retrieves all share links associated with a specific resource path for the current user.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "Get share links by path", + "parameters": [ + { + "type": "string", + "description": "Resource path for which to retrieve share links", + "name": "path", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "List of share links for the specified path", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/share.Link" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/shares": { + "get": { + "description": "Returns a list of share links for the current user, or all links if the user is an admin.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "List share links", + "responses": { + "200": { + "description": "List of share links", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/share.Link" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "post": { + "description": "Creates a new share link with an optional expiration time and password protection.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "Create a share link", + "parameters": [ + { + "description": "Share link creation parameters", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/share.CreateBody" + } + } + ], + "responses": { + "200": { + "description": "Created share link", + "schema": { + "$ref": "#/definitions/share.Link" + } + }, + "400": { + "description": "Bad request - failed to decode body", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/shares/{hash}": { + "delete": { + "description": "Deletes a share link specified by its hash.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Shares" + ], + "summary": "Delete a share link", + "parameters": [ + { + "type": "string", + "description": "Hash of the share link to delete", + "name": "hash", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Share link deleted successfully" + }, + "400": { + "description": "Bad request - missing or invalid hash", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/usage": { + "get": { + "description": "Returns the total and used disk space for a specified directory.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Resources" + ], + "summary": "Get disk usage", + "parameters": [ + { + "type": "string", + "description": "Name for the desired source, default is used if not provided", + "name": "source", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Disk usage details", + "schema": { + "$ref": "#/definitions/http.DiskUsageResponse" + } + }, + "404": { + "description": "Directory not found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal server error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/users": { + "post": { + "description": "Adds a new user to the system.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Create a new user", + "parameters": [ + { + "description": "User data to create a new user", + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/users.User" + } + } + ], + "responses": { + "201": { + "description": "Created user", + "schema": { + "$ref": "#/definitions/users.User" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/api/users/{id}": { + "get": { + "description": "Returns a user's details based on their ID.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Retrieve a user by ID", + "parameters": [ + { + "type": "integer", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "User details", + "schema": { + "$ref": "#/definitions/users.User" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "404": { + "description": "Not Found", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "put": { + "description": "Updates the details of a user identified by ID.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Update a user's details", + "parameters": [ + { + "type": "integer", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + }, + { + "description": "User data to update", + "name": "data", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/users.User" + } + } + ], + "responses": { + "200": { + "description": "Updated user details", + "schema": { + "$ref": "#/definitions/users.User" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + "delete": { + "description": "Deletes a user identified by their ID.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Delete a user by ID", + "parameters": [ + { + "type": "integer", + "description": "User ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "User deleted successfully" + }, + "403": { + "description": "Forbidden", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + } + } + }, + "/health": { + "get": { + "description": "Returns the health status of the API.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "summary": "Health Check", + "responses": { + "200": { + "description": "successful health check response", + "schema": { + "$ref": "#/definitions/http.HttpResponse" + } + } + } + } + } + }, + "definitions": { + "files.FileInfo": { + "type": "object", + "properties": { + "checksums": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "content": { + "type": "string" + }, + "isSymlink": { + "type": "boolean" + }, + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/files.ReducedItem" + } + }, + "modified": { + "type": "string" + }, + "name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "subtitles": { + "type": "array", + "items": { + "type": "string" + } + }, + "token": { + "type": "string" + }, + "type": { + "type": "string" + } + } + }, + "files.ReducedItem": { + "type": "object", + "properties": { + "content": { + "type": "string" + }, + "modified": { + "type": "string" + }, + "name": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "type": { + "type": "string" + } + } + }, + "files.searchResult": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "type": { + "type": "string" + } + } + }, + "http.DiskUsageResponse": { + "type": "object", + "properties": { + "total": { + "type": "integer" + }, + "used": { + "type": "integer" + } + } + }, + "http.HttpResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "status": { + "type": "integer" + }, + "token": { + "type": "string" + } + } + }, + "http.settingsData": { + "type": "object", + "properties": { + "commands": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "createUserDir": { + "type": "boolean" + }, + "defaults": { + "$ref": "#/definitions/settings.UserDefaults" + }, + "frontend": { + "$ref": "#/definitions/settings.Frontend" + }, + "rules": { + "type": "array", + "items": { + "$ref": "#/definitions/users.Rule" + } + }, + "signup": { + "type": "boolean" + }, + "userHomeBasePath": { + "type": "string" + } + } + }, + "settings.Frontend": { + "type": "object", + "properties": { + "color": { + "type": "string" + }, + "disableExternal": { + "type": "boolean" + }, + "disableUsedPercentage": { + "type": "boolean" + }, + "files": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "settings.UserDefaults": { + "type": "object", + "properties": { + "commands": { + "type": "array", + "items": { + "type": "string" + } + }, + "darkMode": { + "type": "boolean" + }, + "dateFormat": { + "type": "boolean" + }, + "disableSettings": { + "type": "boolean" + }, + "gallerySize": { + "type": "integer" + }, + "hideDotfiles": { + "type": "boolean" + }, + "locale": { + "type": "string" + }, + "lockPassword": { + "type": "boolean" + }, + "perm": { + "$ref": "#/definitions/users.Permissions" + }, + "permissions": { + "$ref": "#/definitions/users.Permissions" + }, + "rules": { + "type": "array", + "items": { + "$ref": "#/definitions/users.Rule" + } + }, + "scope": { + "type": "string" + }, + "singleClick": { + "type": "boolean" + }, + "sorting": { + "type": "object", + "properties": { + "asc": { + "type": "boolean" + }, + "by": { + "type": "string" + } + } + }, + "stickySidebar": { + "type": "boolean" + }, + "viewMode": { + "type": "string" + } + } + }, + "share.CreateBody": { + "type": "object", + "properties": { + "expires": { + "type": "string" + }, + "password": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "share.Link": { + "type": "object", + "properties": { + "expire": { + "type": "integer" + }, + "hash": { + "type": "string" + }, + "password_hash": { + "type": "string" + }, + "path": { + "type": "string" + }, + "token": { + "description": "Token is a random value that will only be set when PasswordHash is set. It is\nURL-Safe and is used to download links in password-protected shares via a\nquery arg.", + "type": "string" + }, + "userID": { + "type": "integer" + } + } + }, + "users.AuthToken": { + "type": "object", + "properties": { + "Permissions": { + "$ref": "#/definitions/users.Permissions" + }, + "belongsTo": { + "type": "integer" + }, + "createdAt": { + "type": "integer" + }, + "expiresAt": { + "type": "integer" + }, + "key": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "users.Permissions": { + "type": "object", + "properties": { + "admin": { + "type": "boolean" + }, + "api": { + "type": "boolean" + }, + "create": { + "type": "boolean" + }, + "delete": { + "type": "boolean" + }, + "download": { + "type": "boolean" + }, + "execute": { + "type": "boolean" + }, + "modify": { + "type": "boolean" + }, + "rename": { + "type": "boolean" + }, + "share": { + "type": "boolean" + } + } + }, + "users.Regexp": { + "type": "object", + "properties": { + "raw": { + "type": "string" + } + } + }, + "users.Rule": { + "type": "object", + "properties": { + "allow": { + "type": "boolean" + }, + "id": { + "type": "string" + }, + "path": { + "type": "string" + }, + "regex": { + "type": "boolean" + }, + "regexp": { + "$ref": "#/definitions/users.Regexp" + } + } + }, + "users.Sorting": { + "type": "object", + "properties": { + "asc": { + "type": "boolean" + }, + "by": { + "type": "string" + } + } + }, + "users.User": { + "type": "object", + "properties": { + "apiKeys": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/users.AuthToken" + } + }, + "commands": { + "type": "array", + "items": { + "type": "string" + } + }, + "darkMode": { + "type": "boolean" + }, + "dateFormat": { + "type": "boolean" + }, + "disableSettings": { + "type": "boolean" + }, + "gallerySize": { + "type": "integer" + }, + "hideDotfiles": { + "type": "boolean" + }, + "id": { + "type": "integer" + }, + "locale": { + "type": "string" + }, + "lockPassword": { + "type": "boolean" + }, + "password": { + "type": "string" + }, + "perm": { + "$ref": "#/definitions/users.Permissions" + }, + "rules": { + "type": "array", + "items": { + "$ref": "#/definitions/users.Rule" + } + }, + "scope": { + "type": "string" + }, + "singleClick": { + "type": "boolean" + }, + "sorting": { + "$ref": "#/definitions/users.Sorting" + }, + "stickySidebar": { + "type": "boolean" + }, + "username": { + "type": "string" + }, + "viewMode": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/backend/swagger/docs/swagger.yaml b/backend/swagger/docs/swagger.yaml new file mode 100644 index 00000000..bb26c052 --- /dev/null +++ b/backend/swagger/docs/swagger.yaml @@ -0,0 +1,1056 @@ +definitions: + files.FileInfo: + properties: + checksums: + additionalProperties: + type: string + type: object + content: + type: string + isSymlink: + type: boolean + items: + items: + $ref: '#/definitions/files.ReducedItem' + type: array + modified: + type: string + name: + type: string + path: + type: string + size: + type: integer + subtitles: + items: + type: string + type: array + token: + type: string + type: + type: string + type: object + files.ReducedItem: + properties: + content: + type: string + modified: + type: string + name: + type: string + size: + type: integer + type: + type: string + type: object + files.searchResult: + properties: + path: + type: string + size: + type: integer + type: + type: string + type: object + http.DiskUsageResponse: + properties: + total: + type: integer + used: + type: integer + type: object + http.HttpResponse: + properties: + message: + type: string + status: + type: integer + token: + type: string + type: object + http.settingsData: + properties: + commands: + additionalProperties: + items: + type: string + type: array + type: object + createUserDir: + type: boolean + defaults: + $ref: '#/definitions/settings.UserDefaults' + frontend: + $ref: '#/definitions/settings.Frontend' + rules: + items: + $ref: '#/definitions/users.Rule' + type: array + signup: + type: boolean + userHomeBasePath: + type: string + type: object + settings.Frontend: + properties: + color: + type: string + disableExternal: + type: boolean + disableUsedPercentage: + type: boolean + files: + type: string + name: + type: string + type: object + settings.UserDefaults: + properties: + commands: + items: + type: string + type: array + darkMode: + type: boolean + dateFormat: + type: boolean + disableSettings: + type: boolean + gallerySize: + type: integer + hideDotfiles: + type: boolean + locale: + type: string + lockPassword: + type: boolean + perm: + $ref: '#/definitions/users.Permissions' + permissions: + $ref: '#/definitions/users.Permissions' + rules: + items: + $ref: '#/definitions/users.Rule' + type: array + scope: + type: string + singleClick: + type: boolean + sorting: + properties: + asc: + type: boolean + by: + type: string + type: object + stickySidebar: + type: boolean + viewMode: + type: string + type: object + share.CreateBody: + properties: + expires: + type: string + password: + type: string + unit: + type: string + type: object + share.Link: + properties: + expire: + type: integer + hash: + type: string + password_hash: + type: string + path: + type: string + token: + description: |- + Token is a random value that will only be set when PasswordHash is set. It is + URL-Safe and is used to download links in password-protected shares via a + query arg. + type: string + userID: + type: integer + type: object + users.AuthToken: + properties: + Permissions: + $ref: '#/definitions/users.Permissions' + belongsTo: + type: integer + createdAt: + type: integer + expiresAt: + type: integer + key: + type: string + name: + type: string + type: object + users.Permissions: + properties: + admin: + type: boolean + api: + type: boolean + create: + type: boolean + delete: + type: boolean + download: + type: boolean + execute: + type: boolean + modify: + type: boolean + rename: + type: boolean + share: + type: boolean + type: object + users.Regexp: + properties: + raw: + type: string + type: object + users.Rule: + properties: + allow: + type: boolean + id: + type: string + path: + type: string + regex: + type: boolean + regexp: + $ref: '#/definitions/users.Regexp' + type: object + users.Sorting: + properties: + asc: + type: boolean + by: + type: string + type: object + users.User: + properties: + apiKeys: + additionalProperties: + $ref: '#/definitions/users.AuthToken' + type: object + commands: + items: + type: string + type: array + darkMode: + type: boolean + dateFormat: + type: boolean + disableSettings: + type: boolean + gallerySize: + type: integer + hideDotfiles: + type: boolean + id: + type: integer + locale: + type: string + lockPassword: + type: boolean + password: + type: string + perm: + $ref: '#/definitions/users.Permissions' + rules: + items: + $ref: '#/definitions/users.Rule' + type: array + scope: + type: string + singleClick: + type: boolean + sorting: + $ref: '#/definitions/users.Sorting' + stickySidebar: + type: boolean + username: + type: string + viewMode: + type: string + type: object +info: + contact: {} +paths: + /api/preview: + get: + consumes: + - application/json + description: Returns a preview image based on the requested path and size. + parameters: + - description: File path of the image to preview + in: query + name: path + required: true + type: string + - description: Preview size ('small' or 'large'). Default is based on server + config. + in: query + name: size + type: string + produces: + - application/json + responses: + "200": + description: Preview image content + schema: + type: file + "202": + description: Download permissions required + schema: + additionalProperties: + type: string + type: object + "400": + description: Invalid request path + schema: + additionalProperties: + type: string + type: object + "404": + description: File not found + schema: + additionalProperties: + type: string + type: object + "415": + description: Unsupported file type for preview + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Get image preview + tags: + - Resources + /api/raw: + get: + consumes: + - application/json + description: Returns the raw content of a file, multiple files, or a directory. + Supports downloading files as archives in various formats. + parameters: + - description: Path to the file or directory + in: query + name: path + required: true + type: string + - description: Comma-separated list of specific files within the directory (optional) + in: query + name: files + type: string + - description: If true, sets 'Content-Disposition' to 'inline'. Otherwise, defaults + to 'attachment'. + in: query + name: inline + type: boolean + - description: 'Compression algorithm for archiving multiple files or directories. + Options: ''zip'', ''tar'', ''targz'', ''tarbz2'', ''tarxz'', ''tarlz4'', + ''tarsz''. Default is ''zip''.' + in: query + name: algo + type: string + produces: + - application/json + responses: + "200": + description: Raw file or directory content, or archive for multiple files + schema: + type: file + "202": + description: Download permissions required + schema: + additionalProperties: + type: string + type: object + "400": + description: Invalid request path + schema: + additionalProperties: + type: string + type: object + "404": + description: File or directory not found + schema: + additionalProperties: + type: string + type: object + "415": + description: Unsupported file type for preview + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Get raw content of a file, multiple files, or directory + tags: + - Resources + /api/resources: + delete: + consumes: + - application/json + description: Deletes a resource located at the specified path. + parameters: + - description: Path to the resource + in: query + name: path + required: true + type: string + - description: Name for the desired source, default is used if not provided + in: query + name: source + type: string + produces: + - application/json + responses: + "200": + description: Resource deleted successfully + "403": + description: Forbidden + schema: + additionalProperties: + type: string + type: object + "404": + description: Resource not found + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Delete a resource + tags: + - Resources + get: + consumes: + - application/json + description: Returns metadata and optionally file contents for a specified resource + path. + parameters: + - description: Path to the resource + in: query + name: path + required: true + type: string + - description: Name for the desired source, default is used if not provided + in: query + name: source + type: string + - description: Include file content if true + in: query + name: content + type: string + - description: Optional checksum validation + in: query + name: checksum + type: string + produces: + - application/json + responses: + "200": + description: Resource metadata + schema: + $ref: '#/definitions/files.FileInfo' + "404": + description: Resource not found + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Get resource information + tags: + - Resources + patch: + consumes: + - application/json + description: Moves or renames a resource to a new destination. + parameters: + - description: Path from resource + in: query + name: from + required: true + type: string + - description: Destination path for the resource + in: query + name: destination + required: true + type: string + - description: Action to perform (copy, rename) + in: query + name: action + required: true + type: string + - description: Overwrite if destination exists + in: query + name: overwrite + type: boolean + - description: Rename if destination exists + in: query + name: rename + type: boolean + produces: + - application/json + responses: + "200": + description: Resource moved/renamed successfully + "403": + description: Forbidden + schema: + additionalProperties: + type: string + type: object + "404": + description: Resource not found + schema: + additionalProperties: + type: string + type: object + "409": + description: Conflict - Destination exists + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Patch resource (move/rename) + tags: + - Resources + post: + consumes: + - application/json + description: Creates a new resource or uploads a file at the specified path. + Supports file uploads and directory creation. + parameters: + - description: Path to the resource + in: query + name: path + required: true + type: string + - description: Name for the desired source, default is used if not provided + in: query + name: source + type: string + - description: Override existing file if true + in: query + name: override + type: boolean + produces: + - application/json + responses: + "200": + description: Resource created successfully + "403": + description: Forbidden + schema: + additionalProperties: + type: string + type: object + "404": + description: Resource not found + schema: + additionalProperties: + type: string + type: object + "409": + description: Conflict - Resource already exists + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Create or upload a resource + tags: + - Resources + put: + consumes: + - application/json + description: Updates an existing file at the specified path. + parameters: + - description: Path to the resource + in: query + name: path + required: true + type: string + - description: Name for the desired source, default is used if not provided + in: query + name: source + type: string + produces: + - application/json + responses: + "200": + description: Resource updated successfully + "403": + description: Forbidden + schema: + additionalProperties: + type: string + type: object + "404": + description: Resource not found + schema: + additionalProperties: + type: string + type: object + "405": + description: Method not allowed + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Update a file resource + tags: + - Resources + /api/search: + get: + consumes: + - application/json + description: Searches for files matching the provided query. Returns file paths + and metadata based on the user's session and scope. + parameters: + - description: Search query + in: query + name: query + required: true + type: string + - description: path within user scope to search, for example '/first/second' + to search within the second directory only + in: query + name: scope + type: string + - description: User session ID, add unique value to prevent collisions + in: header + name: SessionId + type: string + produces: + - application/json + responses: + "200": + description: List of search results + schema: + items: + $ref: '#/definitions/files.searchResult' + type: array + "400": + description: Bad Request + schema: + additionalProperties: + type: string + type: object + summary: Search Files + tags: + - Search + /api/settings: + get: + consumes: + - application/json + description: Returns the current configuration settings for signup, user directories, + rules, frontend, and commands. + produces: + - application/json + responses: + "200": + description: System settings data + schema: + $ref: '#/definitions/http.settingsData' + summary: Get system settings + tags: + - Settings + put: + consumes: + - application/json + description: Updates the system configuration settings for signup, user directories, + rules, frontend, and commands. + parameters: + - description: Settings data to update + in: body + name: body + required: true + schema: + $ref: '#/definitions/http.settingsData' + produces: + - application/json + responses: + "200": + description: Settings updated successfully + "400": + description: Bad request - failed to decode body + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Update system settings + tags: + - Settings + /api/share: + get: + consumes: + - application/json + description: Retrieves all share links associated with a specific resource path + for the current user. + parameters: + - description: Resource path for which to retrieve share links + in: query + name: path + required: true + type: string + produces: + - application/json + responses: + "200": + description: List of share links for the specified path + schema: + items: + $ref: '#/definitions/share.Link' + type: array + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Get share links by path + tags: + - Shares + /api/shares: + get: + consumes: + - application/json + description: Returns a list of share links for the current user, or all links + if the user is an admin. + produces: + - application/json + responses: + "200": + description: List of share links + schema: + items: + $ref: '#/definitions/share.Link' + type: array + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: List share links + tags: + - Shares + post: + consumes: + - application/json + description: Creates a new share link with an optional expiration time and password + protection. + parameters: + - description: Share link creation parameters + in: body + name: body + required: true + schema: + $ref: '#/definitions/share.CreateBody' + produces: + - application/json + responses: + "200": + description: Created share link + schema: + $ref: '#/definitions/share.Link' + "400": + description: Bad request - failed to decode body + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Create a share link + tags: + - Shares + /api/shares/{hash}: + delete: + consumes: + - application/json + description: Deletes a share link specified by its hash. + parameters: + - description: Hash of the share link to delete + in: path + name: hash + required: true + type: string + produces: + - application/json + responses: + "200": + description: Share link deleted successfully + "400": + description: Bad request - missing or invalid hash + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Delete a share link + tags: + - Shares + /api/usage: + get: + consumes: + - application/json + description: Returns the total and used disk space for a specified directory. + parameters: + - description: Name for the desired source, default is used if not provided + in: query + name: source + type: string + produces: + - application/json + responses: + "200": + description: Disk usage details + schema: + $ref: '#/definitions/http.DiskUsageResponse' + "404": + description: Directory not found + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal server error + schema: + additionalProperties: + type: string + type: object + summary: Get disk usage + tags: + - Resources + /api/users: + post: + consumes: + - application/json + description: Adds a new user to the system. + parameters: + - description: User data to create a new user + in: body + name: data + required: true + schema: + $ref: '#/definitions/users.User' + produces: + - application/json + responses: + "201": + description: Created user + schema: + $ref: '#/definitions/users.User' + "400": + description: Bad Request + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal Server Error + schema: + additionalProperties: + type: string + type: object + summary: Create a new user + tags: + - Users + /api/users/{id}: + delete: + consumes: + - application/json + description: Deletes a user identified by their ID. + parameters: + - description: User ID + in: path + name: id + required: true + type: integer + produces: + - application/json + responses: + "200": + description: User deleted successfully + "403": + description: Forbidden + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal Server Error + schema: + additionalProperties: + type: string + type: object + summary: Delete a user by ID + tags: + - Users + get: + consumes: + - application/json + description: Returns a user's details based on their ID. + parameters: + - description: User ID + in: path + name: id + required: true + type: integer + produces: + - application/json + responses: + "200": + description: User details + schema: + $ref: '#/definitions/users.User' + "403": + description: Forbidden + schema: + additionalProperties: + type: string + type: object + "404": + description: Not Found + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal Server Error + schema: + additionalProperties: + type: string + type: object + summary: Retrieve a user by ID + tags: + - Users + put: + consumes: + - application/json + description: Updates the details of a user identified by ID. + parameters: + - description: User ID + in: path + name: id + required: true + type: integer + - description: User data to update + in: body + name: data + required: true + schema: + $ref: '#/definitions/users.User' + produces: + - application/json + responses: + "200": + description: Updated user details + schema: + $ref: '#/definitions/users.User' + "400": + description: Bad Request + schema: + additionalProperties: + type: string + type: object + "403": + description: Forbidden + schema: + additionalProperties: + type: string + type: object + "500": + description: Internal Server Error + schema: + additionalProperties: + type: string + type: object + summary: Update a user's details + tags: + - Users + /health: + get: + consumes: + - application/json + description: Returns the health status of the API. + produces: + - application/json + responses: + "200": + description: successful health check response + schema: + $ref: '#/definitions/http.HttpResponse' + summary: Health Check + tags: + - Health +swagger: "2.0" diff --git a/backend/rules/rules.go b/backend/users/rules.go similarity index 83% rename from backend/rules/rules.go rename to backend/users/rules.go index 12d1fc15..7b7616e4 100644 --- a/backend/rules/rules.go +++ b/backend/users/rules.go @@ -1,4 +1,4 @@ -package rules +package users import ( "path/filepath" @@ -11,6 +11,18 @@ type Checker interface { Check(path string) bool } +// Check implements rules.Checker. +func (user *User) Check(path string) bool { + allow := true + for _, rule := range user.Rules { + if rule.Matches(path) { + allow = rule.Allow + } + } + + return allow +} + // Rule is a allow/disallow rule. type Rule struct { Regex bool `json:"regex"` diff --git a/backend/users/storage.go b/backend/users/storage.go index 01439089..9b9029bb 100644 --- a/backend/users/storage.go +++ b/backend/users/storage.go @@ -5,7 +5,6 @@ import ( "time" "github.com/gtsteffaniak/filebrowser/errors" - "github.com/gtsteffaniak/filebrowser/rules" ) // StorageBackend is the interface to implement for a users storage. @@ -26,7 +25,9 @@ type Store interface { Save(user *User) error Delete(id interface{}) error LastUpdate(id uint) int64 - AddRule(username string, rule rules.Rule) error + AddApiKey(username uint, name string, key AuthToken) error + DeleteApiKey(username uint, name string) error + AddRule(username string, rule Rule) error DeleteRule(username string, ruleID string) error } @@ -79,7 +80,7 @@ func (s *Storage) Update(user *User, fields ...string) error { } // AddRule adds a rule to the user's rules list and updates the user in the database. -func (s *Storage) AddRule(userID string, rule rules.Rule) error { +func (s *Storage) AddRule(userID string, rule Rule) error { user, err := s.Get("", userID) if err != nil { return err @@ -95,6 +96,42 @@ func (s *Storage) AddRule(userID string, rule rules.Rule) error { return nil } +func (s *Storage) AddApiKey(userID uint, name string, key AuthToken) error { + user, err := s.Get("", userID) + if err != nil { + return err + } + // Initialize the ApiKeys map if it is nil + if user.ApiKeys == nil { + user.ApiKeys = make(map[string]AuthToken) + } + user.ApiKeys[name] = key + err = s.Update(user, "ApiKeys") + if err != nil { + return err + } + + return nil +} + +func (s *Storage) DeleteApiKey(userID uint, name string) error { + user, err := s.Get("", userID) + if err != nil { + return err + } + // Initialize the ApiKeys map if it is nil + if user.ApiKeys == nil { + user.ApiKeys = make(map[string]AuthToken) + } + delete(user.ApiKeys, name) + err = s.Update(user, "ApiKeys") + if err != nil { + return err + } + + return nil +} + // DeleteRule deletes a rule specified by ID from the user's rules list and updates the user in the database. func (s *Storage) DeleteRule(userID string, ruleID string) error { user, err := s.Get("", userID) @@ -103,7 +140,7 @@ func (s *Storage) DeleteRule(userID string, ruleID string) error { } // Find and remove the rule with the specified ID - var updatedRules []rules.Rule + var updatedRules []Rule for _, r := range user.Rules { if r.Id != ruleID { updatedRules = append(updatedRules, r) diff --git a/backend/users/users.go b/backend/users/users.go index 2bcd378e..49fb335c 100644 --- a/backend/users/users.go +++ b/backend/users/users.go @@ -3,10 +3,31 @@ package users import ( "regexp" - "github.com/gtsteffaniak/filebrowser/rules" - "github.com/gtsteffaniak/filebrowser/settings" + "github.com/golang-jwt/jwt/v4" ) +type AuthToken struct { + Key string `json:"key"` + Name string `json:"name"` + Created int64 `json:"createdAt"` + Expires int64 `json:"expiresAt"` + BelongsTo uint `json:"belongsTo"` + Permissions Permissions `json:"Permissions"` + jwt.RegisteredClaims `json:"-"` +} + +type Permissions struct { + Api bool `json:"api"` + Admin bool `json:"admin"` + Execute bool `json:"execute"` + Create bool `json:"create"` + Rename bool `json:"rename"` + Modify bool `json:"modify"` + Delete bool `json:"delete"` + Share bool `json:"share"` + Download bool `json:"download"` +} + // SortingSettings represents the sorting settings. type Sorting struct { By string `json:"by"` @@ -20,16 +41,17 @@ type User struct { DisableSettings bool `json:"disableSettings"` ID uint `storm:"id,increment" json:"id"` Username string `storm:"unique" json:"username"` - Password string `json:"password"` + Password string `json:"password,omitempty"` Scope string `json:"scope"` Locale string `json:"locale"` LockPassword bool `json:"lockPassword"` ViewMode string `json:"viewMode"` SingleClick bool `json:"singleClick"` - Perm settings.Permissions `json:"perm"` - Commands []string `json:"commands"` Sorting Sorting `json:"sorting"` - Rules []rules.Rule `json:"rules"` + Perm Permissions `json:"perm"` + Commands []string `json:"commands"` + Rules []Rule `json:"rules"` + ApiKeys map[string]AuthToken `json:"apiKeys,omitempty"` HideDotfiles bool `json:"hideDotfiles"` DateFormat bool `json:"dateFormat"` GallerySize int `json:"gallerySize"` @@ -41,19 +63,20 @@ var PublicUser = User{ Scope: "./", ViewMode: "normal", LockPassword: true, - Perm: settings.Permissions{ + Perm: Permissions{ Create: false, Rename: false, Modify: false, Delete: false, - Share: true, + Share: false, Download: true, Admin: false, + Api: false, }, } // GetRules implements rules.Provider. -func (u *User) GetRules() []rules.Rule { +func (u *User) GetRules() []Rule { return u.Rules } @@ -71,20 +94,3 @@ func (u *User) CanExecute(command string) bool { return false } - -// Apply applies the default options to a user. -func ApplyDefaults(u User) User { - u.StickySidebar = settings.Config.UserDefaults.StickySidebar - u.DisableSettings = settings.Config.UserDefaults.DisableSettings - u.DarkMode = settings.Config.UserDefaults.DarkMode - u.Scope = settings.Config.UserDefaults.Scope - u.Locale = settings.Config.UserDefaults.Locale - u.ViewMode = settings.Config.UserDefaults.ViewMode - u.SingleClick = settings.Config.UserDefaults.SingleClick - u.Perm = settings.Config.UserDefaults.Perm - u.Sorting = settings.Config.UserDefaults.Sorting - u.Commands = settings.Config.UserDefaults.Commands - u.HideDotfiles = settings.Config.UserDefaults.HideDotfiles - u.DateFormat = settings.Config.UserDefaults.DateFormat - return u -} diff --git a/backend/utils/main.go b/backend/utils/main.go index 3c44aff4..600b17ac 100644 --- a/backend/utils/main.go +++ b/backend/utils/main.go @@ -1,9 +1,13 @@ package utils import ( + "crypto/rand" + "fmt" "log" - - "github.com/gtsteffaniak/filebrowser/settings" + math "math/rand" + "reflect" + "strings" + "time" ) func CheckErr(source string, err error) { @@ -13,7 +17,55 @@ func CheckErr(source string, err error) { } func GenerateKey() []byte { - k, err := settings.GenerateKey() - CheckErr("generateKey", err) - return k + b := make([]byte, 64) + _, err := rand.Read(b) + if err != nil { + return nil + } + return b +} + +// CapitalizeFirst returns the input string with the first letter capitalized. +func CapitalizeFirst(s string) string { + if len(s) == 0 { + return s // Return the empty string as is + } + return strings.ToUpper(string(s[0])) + s[1:] +} + +func GenerateRandomHash(length int) string { + const charset = "abcdefghijklmnopqrstuvwxyz0123456789" + math.New(math.NewSource(time.Now().UnixNano())) + result := make([]byte, length) + for i := range result { + result[i] = charset[math.Intn(len(charset))] + } + return string(result) +} + +func PrintStructFields(v interface{}) { + val := reflect.ValueOf(v) + typ := reflect.TypeOf(v) + + // Ensure the input is a struct + if val.Kind() != reflect.Struct { + fmt.Println("Provided value is not a struct") + return + } + + // Iterate over the fields of the struct + for i := 0; i < val.NumField(); i++ { + field := val.Field(i) + fieldType := typ.Field(i) + + // Convert field value to string, if possible + fieldValue := fmt.Sprintf("%v", field.Interface()) + + // Limit to 50 characters + if len(fieldValue) > 100 { + fieldValue = fieldValue[:100] + "..." + } + + fmt.Printf("Field: %s, %s\n", fieldType.Name, fieldValue) + } } diff --git a/docs/configuration.md b/docs/configuration.md index 6060dd84..33560b32 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -220,6 +220,7 @@ userDefaults: - `download`: This boolean value determines whether download permissions are granted. + - `api`: Ability to create and manage API keys. - `hideDotfiles`: This boolean value determines whether dotfiles are hidden. (`true` or `false`) diff --git a/docs/getting_started.md b/docs/getting_started.md deleted file mode 100644 index eb6de3c9..00000000 --- a/docs/getting_started.md +++ /dev/null @@ -1,2 +0,0 @@ -# Getting Started using FileBrowser Quantum - diff --git a/docs/migration.md b/docs/migration.md index 45a5ffa1..1da2a228 100644 --- a/docs/migration.md +++ b/docs/migration.md @@ -19,4 +19,7 @@ Note: share links will not work and will need to be re-created after migration. The filebrowser Quantum application should run with the same user and rules that you have from the original. But keep in mind the differences that may not work -the same way, but all user configuration should be available. \ No newline at end of file +the same way, but all user configuration should be available. + +The windows binary is particularly untested, I would advise using docker if testing on windows. + diff --git a/docs/roadmap.md b/docs/roadmap.md index bb25f5fd..baf81a51 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -1,22 +1,21 @@ # Planned Roadmap -upcoming 0.2.x releases: +upcoming 0.3.x releases: -- Replace http routes for gorilla/mux with stdlib - Theme configuration from settings -- File syncronization improvements +- File synchronization improvements - more filetype previews - -next major 0.3.0 release : - -- multiple sources https://github.com/filebrowser/filebrowser/issues/2514 - introduce jobs as replacement to runners. -- Add Job status to the sidebar + - Add Job status to the sidebar - index status. - Job status from users - upload status +- opentelemetry metrics +- simple search/filter for current listings. +- Enable mobile search with same features as desktop Unplanned Future releases: + - multiple sources https://github.com/filebrowser/filebrowser/issues/2514 - Add tools to sidebar - duplicate file detector. - bulk rename https://github.com/filebrowser/filebrowser/issues/2473 diff --git a/frontend/package.json b/frontend/package.json index c8731b0c..220c6ac3 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -9,7 +9,7 @@ }, "scripts": { "dev": "vite dev", - "build": "vite build && cp -R dist/ ../backend/cmd/", + "build": "vite build && cp -r dist/* ../backend/http/embed", "build-docker": "vite build", "watch": "vite build --watch", "typecheck": "vue-tsc -p ./tsconfig.json --noEmit", diff --git a/frontend/public/index.html b/frontend/public/index.html index e973325a..7ce7b913 100644 --- a/frontend/public/index.html +++ b/frontend/public/index.html @@ -4,32 +4,30 @@ + {{ if .ReCaptcha }} + + {{ end }} - [{[ if .ReCaptcha -]}] - - [{[ end ]}] + {{ if .Name }}{{ .Name }}{{ else }}FileBrowser Quantum{{ end }} - [{[ if .Name -]}][{[ .Name ]}][{[ else ]}]FileBrowser Quantum[{[ end ]}] - - + - + - - + - - + + - [{[ if .CSS -]}] - - [{[ end ]}] + {{ if .CSS }} + + {{ end }} diff --git a/frontend/public/manifest.json b/frontend/public/manifest.json index 1686a426..5ea16b9a 100644 --- a/frontend/public/manifest.json +++ b/frontend/public/manifest.json @@ -13,7 +13,7 @@ "type": "image/png" } ], - "start_url": "/", + "start_url": "./", "display": "standalone", "background_color": "#ffffff", "theme_color": "#455a64" diff --git a/frontend/src/api/commands.js b/frontend/src/api/commands.js index fc5d5006..72fc44fd 100644 --- a/frontend/src/api/commands.js +++ b/frontend/src/api/commands.js @@ -1,14 +1,10 @@ -import { removePrefix } from "./utils"; import { baseURL } from "@/utils/constants"; -import { state } from "@/store"; const ssl = window.location.protocol === "https:"; const protocol = ssl ? "wss:" : "ws:"; export default function command(url, command, onmessage, onclose) { - url = removePrefix(url); - url = `${protocol}//${window.location.host}${baseURL}/api/command${url}?auth=${state.jwt}`; - + url = `${protocol}//${window.location.host}${baseURL}api/command${url}`; let conn = new window.WebSocket(url); conn.onopen = () => conn.send(command); conn.onmessage = onmessage; diff --git a/frontend/src/api/files.js b/frontend/src/api/files.js index cd7ac10a..87de7ed4 100644 --- a/frontend/src/api/files.js +++ b/frontend/src/api/files.js @@ -1,32 +1,17 @@ -import { createURL, fetchURL, removePrefix } from "./utils"; +import { createURL, fetchURL, adjustedData} from "./utils"; import { baseURL } from "@/utils/constants"; +import { removePrefix,getApiPath } from "@/utils/url.js"; import { state } from "@/store"; import { notify } from "@/notify"; // Notify if errors occur -export async function fetch(url, content = false) { +export async function fetchFiles(url, content = false) { try { - url = removePrefix(url); - - const res = await fetchURL(`/api/resources${url}?content=${content}`, {}); + url = removePrefix(url,"files"); + const apiPath = getApiPath("api/resources",{path: url, content: content}); + const res = await fetchURL(apiPath); const data = await res.json(); - data.url = `/files${url}`; - - if (data.isDir) { - if (!data.url.endsWith("/")) data.url += "/"; - data.items = data.items.map((item, index) => { - item.index = index; - item.url = `${data.url}${encodeURIComponent(item.name)}`; - - if (item.isDir) { - item.url += "/"; - } - - return item; - }); - } - - return data; + return adjustedData(data,url); } catch (err) { notify.showError(err.message || "Error fetching data"); throw err; @@ -35,15 +20,12 @@ export async function fetch(url, content = false) { async function resourceAction(url, method, content) { try { - url = removePrefix(url); - let opts = { method }; - if (content) { opts.body = content; } - - const res = await fetchURL(`/api/resources${url}`, opts); + const apiPath = getApiPath("api/resources", { path: url }); + const res = await fetchURL(apiPath, opts); return res; } catch (err) { notify.showError(err.message || "Error performing resource action"); @@ -72,27 +54,22 @@ export async function put(url, content = "") { export function download(format, ...files) { try { let url = `${baseURL}/api/raw`; - if (files.length === 1) { - url += removePrefix(files[0]) + "?"; + url += "?path="+removePrefix(files[0], "files"); } else { let arg = ""; for (let file of files) { - arg += removePrefix(file) + ","; + arg += removePrefix(file,"files") + ","; } arg = arg.substring(0, arg.length - 1); arg = encodeURIComponent(arg); - url += `/?files=${arg}&`; + url += `?files=${arg}`; } if (format) { - url += `algo=${format}&`; - } - - if (state.jwt) { - url += `auth=${state.jwt}&`; + url += `&algo=${format}`; } window.open(url); @@ -103,7 +80,7 @@ export function download(format, ...files) { export async function post(url, content = "", overwrite = false, onupload) { try { - url = removePrefix(url); + url = removePrefix(url,"files"); let bufferContent; if ( @@ -113,11 +90,12 @@ export async function post(url, content = "", overwrite = false, onupload) { bufferContent = await new Response(content).arrayBuffer(); } + const apiPath = getApiPath("api/resources", { path: url, override: overwrite }); return new Promise((resolve, reject) => { let request = new XMLHttpRequest(); request.open( "POST", - `${baseURL}/api/resources${url}?override=${overwrite}`, + apiPath, true ); request.setRequestHeader("X-Auth", state.jwt); @@ -148,30 +126,27 @@ export async function post(url, content = "", overwrite = false, onupload) { } } -function moveCopy(items, copy = false, overwrite = false, rename = false) { +export async function moveCopy(items, action = "copy", overwrite = false, rename = false) { let promises = []; - - for (let item of items) { - const from = item.from; - const to = encodeURIComponent(removePrefix(item.to)); - const url = `${from}?action=${ - copy ? "copy" : "rename" - }&destination=${to}&override=${overwrite}&rename=${rename}`; - promises.push(resourceAction(url, "PATCH")); + let params = { + overwrite: overwrite, + action: action, + rename: rename, } + try { + for (let item of items) { + let localParams = { ...params }; + localParams.destination = item.to; + localParams.from = item.from; + const apiPath = getApiPath("api/resources", localParams); + promises.push(fetch(apiPath, { method: "PATCH" })); + } + return promises; - return Promise.all(promises).catch((err) => { + } catch (err) { notify.showError(err.message || "Error moving/copying resources"); throw err; - }); -} - -export function move(items, overwrite = false, rename = false) { - return moveCopy(items, false, overwrite, rename); -} - -export function copy(items, overwrite = false, rename = false) { - return moveCopy(items, true, overwrite, rename); + } } export async function checksum(url, algo) { @@ -184,27 +159,29 @@ export async function checksum(url, algo) { } } -export function getDownloadURL(file, inline) { +export function getDownloadURL(path, inline) { try { const params = { + path: path, ...(inline && { inline: "true" }), }; - - return createURL("api/raw" + file.path, params); + return createURL("api/raw", params); } catch (err) { notify.showError(err.message || "Error getting download URL"); throw err; } } -export function getPreviewURL(file, size) { +export function getPreviewURL(path, size, modified) { try { const params = { + path: path, + size: size, + key: Date.parse(modified), inline: "true", - key: Date.parse(file.modified), }; - return createURL("api/preview/" + size + file.path, params); + return createURL("api/preview", params); } catch (err) { notify.showError(err.message || "Error getting preview URL"); throw err; @@ -229,11 +206,10 @@ export function getSubtitlesURL(file) { } } -export async function usage(url) { +export async function usage(source) { try { - url = removePrefix(url); - - const res = await fetchURL(`/api/usage${url}`, {}); + const apiPath = getApiPath("api/usage", { source: source }); + const res = await fetchURL(apiPath); return await res.json(); } catch (err) { notify.showError(err.message || "Error fetching usage data"); diff --git a/frontend/src/api/index.js b/frontend/src/api/index.js index abc189dc..22c214ae 100644 --- a/frontend/src/api/index.js +++ b/frontend/src/api/index.js @@ -1,9 +1,9 @@ -import * as files from "./files"; -import * as share from "./share"; -import * as users from "./users"; -import * as settings from "./settings"; -import * as pub from "./pub"; +import * as filesApi from "./files"; +import * as shareApi from "./share"; +import * as usersApi from "./users"; +import * as settingsApi from "./settings"; +import * as publicApi from "./public"; import search from "./search"; import commands from "./commands"; -export { files, share, users, settings, pub, commands, search }; +export { filesApi, shareApi, usersApi, settingsApi, publicApi, commands, search }; diff --git a/frontend/src/api/pub.js b/frontend/src/api/pub.js deleted file mode 100644 index d7eaca3f..00000000 --- a/frontend/src/api/pub.js +++ /dev/null @@ -1,89 +0,0 @@ -import { removePrefix, createURL } from "./utils"; -import { baseURL } from "@/utils/constants"; - -export async function fetchPub(url, password = "") { - url = removePrefix(url); - const res = await fetch( - `/api/public/share${url}`, - { - headers: { - "X-SHARE-PASSWORD": encodeURIComponent(password), - }, - } - ); - if (res.status != 200) { - const error = new Error("000 No connection"); - error.status = res.status; - throw error; - } - - let data = await res.json(); - data.url = `/share${url}`; - if (data.isDir) { - if (!data.url.endsWith("/")) data.url += "/"; - data.items = data.items.map((item, index) => { - item.index = index; - item.url = `${data.url}${encodeURIComponent(item.name)}`; - - if (item.isDir) { - item.url += "/"; - } - - return item; - }); - } - - return data; -} - -export function download(format, hash, token, ...files) { - let url = `${baseURL}/api/public/dl/${hash}`; - if (files.length === 1) { - url += encodeURIComponent(files[0]) + "?"; - } else { - let arg = ""; - for (let file of files) { - arg += encodeURIComponent(file) + ","; - } - - arg = arg.substring(0, arg.length - 1); - arg = encodeURIComponent(arg); - url += `/?files=${arg}&`; - } - - if (format) { - url += `algo=${format}&`; - } - - if (token) { - url += `token=${token}&`; - } - - window.open(url); -} - -export function getPublicUser() { - return fetch("/api/public/publicUser") - .then(response => { - if (!response.ok) { - throw new Error(`HTTP error! Status: ${response.status}`); - } - return response.json(); - }) - .catch(error => { - console.error("Error fetching public user:", error); - throw error; - }); -} - -export function getDownloadURL(share, inline = false) { - const params = { - ...(inline && { inline: "true" }), - ...(share.token && { token: share.token }), - }; - if (share.path == undefined) { - share.path = "" - } - const path = share.path.replace("/share/"+share.hash +"/","") - return createURL("api/public/dl/" + share.hash + "/"+path, params, false); -} diff --git a/frontend/src/api/public.js b/frontend/src/api/public.js new file mode 100644 index 00000000..388fde8d --- /dev/null +++ b/frontend/src/api/public.js @@ -0,0 +1,74 @@ +import { createURL, adjustedData } from "./utils"; +import { getApiPath } from "@/utils/url.js"; +import { notify } from "@/notify"; + +// Fetch public share data +export async function fetchPub(path, hash, password = "") { + try { + const params = { path, hash } + const apiPath = getApiPath("api/public/share", params); + const response = await fetch(apiPath, { + headers: { + "X-SHARE-PASSWORD": password ? encodeURIComponent(password) : "", + }, + }); + + if (!response.ok) { + const error = new Error("Failed to connect to the server."); + error.status = response.status; + throw error; + } + let data = await response.json() + return adjustedData(data, `${hash}${path}`); + } catch (err) { + notify.showError(err.message || "Error fetching public share data"); + throw err; + } +} + +// Download files with given parameters +export function download(path, hash, token, format, ...files) { + try { + let fileInfo = files[0] + if (files.length > 1) { + fileInfo = files.map(encodeURIComponent).join(","); + } + const params = { + path, + hash, + ...(format && { format}), + ...(token && { token }), + fileInfo + }; + const url = createURL(`api/public/dl`, params, false); + window.open(url); + } catch (err) { + notify.showError(err.message || "Error downloading files"); + throw err; + } + + +} + +// Get the public user data +export async function getPublicUser() { + try { + const apiPath = getApiPath("api/public/publicUser"); + const response = await fetch(apiPath); + return response.json(); + } catch (err) { + notify.showError(err.message || "Error fetching public user"); + throw err; + } +} + +// Generate a download URL +export function getDownloadURL(share) { + const params = { + "path": share.path, + "hash": share.hash, + "token": share.token, + ...(share.inline && { inline: "true" }), + }; + return createURL(`api/public/dl`, params, false); +} diff --git a/frontend/src/api/search.js b/frontend/src/api/search.js index 1e13fde7..33c89c58 100644 --- a/frontend/src/api/search.js +++ b/frontend/src/api/search.js @@ -1,26 +1,21 @@ -import { fetchURL, removePrefix } from "./utils"; -import url from "../utils/url"; +import { fetchURL } from "./utils"; import { notify } from "@/notify"; // Import notify for error handling +import { removePrefix, getApiPath } from "@/utils/url.js"; export default async function search(base, query) { try { - base = removePrefix(base); + base = removePrefix(base,"files"); query = encodeURIComponent(query); if (!base.endsWith("/")) { base += "/"; } - const res = await fetchURL(`/api/search${base}?query=${query}`, {}); - + const apiPath = getApiPath("api/search", { scope: base, query: query }); + const res = await fetchURL(apiPath); let data = await res.json(); - data = data.map((item) => { - item.url = `/files${base}` + url.encodePath(item.path); - return item; - }); - - return data; + return data } catch (err) { notify.showError(err.message || "Error occurred during search"); throw err; diff --git a/frontend/src/api/settings.js b/frontend/src/api/settings.js index e03b0db1..0e53378f 100644 --- a/frontend/src/api/settings.js +++ b/frontend/src/api/settings.js @@ -1,11 +1,15 @@ import { fetchURL, fetchJSON } from "./utils"; +import { getApiPath } from "@/utils/url.js"; + +const apiPath = getApiPath("api/settings"); export function get() { - return fetchJSON(`/api/settings`, {}); + return fetchJSON(apiPath); } export async function update(settings) { - await fetchURL(`/api/settings`, { + + await fetchURL(apiPath, { method: "PUT", body: JSON.stringify(settings), }); diff --git a/frontend/src/api/share.js b/frontend/src/api/share.js index efa7fa12..44080495 100644 --- a/frontend/src/api/share.js +++ b/frontend/src/api/share.js @@ -1,27 +1,33 @@ -import { fetchURL, fetchJSON, removePrefix, createURL } from "./utils"; +import { fetchURL, fetchJSON, createURL, adjustedData } from "./utils"; +import { notify } from "@/notify"; export async function list() { - return fetchJSON("/api/shares"); + return fetchJSON("api/shares"); } -export async function get(url) { - url = removePrefix(url); - return fetchJSON(`/api/share${url}`); +export async function get(path, hash) { + try { + const params = { path, hash }; + const url = createURL(`api/share`, params, false); + let data = fetchJSON(url); + return adjustedData(data, `api/share${path}`); + } catch (err) { + notify.showError(err.message || "Error fetching data"); + throw err; + } } export async function remove(hash) { - await fetchURL(`/api/share/${hash}`, { + const params = { hash }; + const url = createURL(`api/share`, params, false); + await fetchURL(url, { method: "DELETE", }); } -export async function create(url, password = "", expires = "", unit = "hours") { - url = removePrefix(url); - url = `/api/share${url}`; - expires = String(expires); - if (expires !== "") { - url += `?expires=${expires}&unit=${unit}`; - } +export async function create(path, password = "", expires = "", unit = "hours") { + const params = { path }; + const url = createURL(`api/share`, params, false); let body = "{}"; if (password != "" || expires !== "" || unit !== "hours") { body = JSON.stringify({ password: password, expires: expires, unit: unit }); diff --git a/frontend/src/api/users.js b/frontend/src/api/users.js index 0c93d621..110d4c38 100644 --- a/frontend/src/api/users.js +++ b/frontend/src/api/users.js @@ -1,27 +1,66 @@ import { fetchURL, fetchJSON } from "@/api/utils"; +import { getApiPath } from "@/utils/url.js"; import { notify } from "@/notify"; // Import notify for error handling export async function getAllUsers() { try { - return await fetchJSON(`/api/users`, {}); + const apiPath = getApiPath("api/users"); + return await fetchJSON(apiPath); } catch (err) { notify.showError(err.message || "Failed to fetch users"); throw err; // Re-throw to handle further if needed } } + export async function get(id) { try { - return await fetchJSON(`/api/users/${id}`, {}); + const apiPath = getApiPath("api/users", { id: id }); + return await fetchJSON(apiPath); } catch (err) { notify.showError(err.message || `Failed to fetch user with ID: ${id}`); throw err; } } +export async function getApiKeys() { + try { + const apiPath = getApiPath("api/auth/tokens"); + return await fetchJSON(apiPath); + } catch (err) { + notify.showError(err.message || `Failed to get api keys`); + throw err; + } +} + + +export async function createApiKey(params) { + try { + const apiPath = getApiPath("api/auth/token", params); + await fetchURL(apiPath, { + method: "PUT", + }); + } catch (err) { + notify.showError(err.message || `Failed to create API key`); + throw err; + } +} + +export function deleteApiKey(params) { + try { + const apiPath = getApiPath("api/auth/token", params); + fetchURL(apiPath, { + method: "DELETE", + }); + } catch (err) { + notify.showError(err.message || `Failed to delete API key`); + throw err; + } +} + export async function create(user) { try { - const res = await fetchURL(`/api/users`, { + const res = await fetchURL(`api/users`, { method: "POST", body: JSON.stringify({ what: "user", @@ -50,7 +89,8 @@ export async function update(user, which = ["all"]) { if (user.username === "publicUser") { return; } - await fetchURL(`/api/users/${user.id}`, { + const apiPath = getApiPath("api/users", { id: user.id }); + await fetchURL(apiPath, { method: "PUT", body: JSON.stringify({ what: "user", @@ -66,7 +106,8 @@ export async function update(user, which = ["all"]) { export async function remove(id) { try { - await fetchURL(`/api/users/${id}`, { + const apiPath = getApiPath("api/users", { id: id }); + await fetchURL(apiPath, { method: "DELETE", }); } catch (err) { diff --git a/frontend/src/api/utils.js b/frontend/src/api/utils.js index 1900acb1..67e474c2 100644 --- a/frontend/src/api/utils.js +++ b/frontend/src/api/utils.js @@ -1,7 +1,6 @@ import { state } from "@/store"; import { renew, logout } from "@/utils/auth"; import { baseURL } from "@/utils/constants"; -import { encodePath } from "@/utils/url"; import { notify } from "@/notify"; export async function fetchURL(url, opts, auth = true) { @@ -12,11 +11,14 @@ export async function fetchURL(url, opts, auth = true) { let res; try { - res = await fetch(`${baseURL}${url}`, { + let userScope = ""; + if (state.user) { + userScope = state.user.scope; + } + res = await fetch(url, { headers: { - "X-Auth": state.jwt, "sessionId": state.sessionId, - "userScope": state.user.scope, + "userScope": userScope, ...headers, }, ...rest, @@ -48,30 +50,22 @@ export async function fetchURL(url, opts, auth = true) { export async function fetchJSON(url, opts) { const res = await fetchURL(url, opts); - if (res.status === 200) { + if (res.status < 300) { return res.json(); } else { - notify.showError("unable to fetch : " + url + "status" + res.status); + notify.showError("received status: "+res.status+" on url " + url); throw new Error(res.status); } } -export function removePrefix(url) { - url = url.split("/").splice(2).join("/"); - if (url === "") url = "/"; - if (url[0] !== "/") url = "/" + url; - return url; -} - -export function createURL(endpoint, params = {}, auth = true) { +export function createURL(endpoint, params = {}) { let prefix = baseURL; if (!prefix.endsWith("/")) { prefix = prefix + "/"; } - const url = new URL(prefix + encodePath(endpoint), origin); + const url = new URL(prefix + endpoint, origin); const searchParams = { - ...(auth && { auth: state.jwt }), ...params, }; @@ -81,3 +75,19 @@ export function createURL(endpoint, params = {}, auth = true) { return url.toString(); } + +export function adjustedData(data, url) { + data.url = url; + if (data.type == "directory") { + if (!data.url.endsWith("/")) data.url += "/"; + data.items = data.items.map((item, index) => { + item.index = index; + item.url = `${data.url}${item.name}`; + if (item.type == "directory") { + item.url += "/"; + } + return item; + }); + } + return data +} \ No newline at end of file diff --git a/frontend/src/components/Breadcrumbs.vue b/frontend/src/components/Breadcrumbs.vue index b8a05072..d27804e8 100644 --- a/frontend/src/components/Breadcrumbs.vue +++ b/frontend/src/components/Breadcrumbs.vue @@ -33,6 +33,7 @@ + + diff --git a/frontend/src/components/prompts/Copy.vue b/frontend/src/components/prompts/Copy.vue index d305e699..6767444f 100644 --- a/frontend/src/components/prompts/Copy.vue +++ b/frontend/src/components/prompts/Copy.vue @@ -49,10 +49,10 @@ diff --git a/frontend/src/components/prompts/Delete.vue b/frontend/src/components/prompts/Delete.vue index 749e2b7d..de838584 100644 --- a/frontend/src/components/prompts/Delete.vue +++ b/frontend/src/components/prompts/Delete.vue @@ -30,7 +30,7 @@ diff --git a/frontend/src/views/bars/Default.vue b/frontend/src/views/bars/Default.vue index dda05e5e..e8ce2767 100644 --- a/frontend/src/views/bars/Default.vue +++ b/frontend/src/views/bars/Default.vue @@ -15,10 +15,10 @@ + diff --git a/frontend/src/views/settings/Global.vue b/frontend/src/views/settings/Global.vue index e5e21f29..ff51be74 100644 --- a/frontend/src/views/settings/Global.vue +++ b/frontend/src/views/settings/Global.vue @@ -88,7 +88,7 @@