v0.3.0 release
This commit is contained in:
parent
822dc2f5fd
commit
a5548bb776
|
@ -13,6 +13,7 @@ rice-box.go
|
|||
/backend/*.cov
|
||||
/backend/test_config.yaml
|
||||
/backend/srv
|
||||
/backend/http/dist
|
||||
|
||||
.DS_Store
|
||||
node_modules
|
||||
|
|
35
CHANGELOG.md
35
CHANGELOG.md
|
@ -2,6 +2,33 @@
|
|||
|
||||
All notable changes to this project will be documented in this file. For commit guidelines, please refer to [Standard Version](https://github.com/conventional-changelog/standard-version).
|
||||
|
||||
## v0.3.0
|
||||
|
||||
This Release focuses on the API and making it more accessible for developers to access functions without the UI.
|
||||
|
||||
**New Features**:
|
||||
- You can now long-live api tokens to interact with API from the user settings page.
|
||||
- These tokens have the same permissions as your user.
|
||||
- Helpful swagger page for API usage.
|
||||
- Some API's were refactored for friendlier API usage, moving some attributes to parameters and first looking for a api token, then using the stored cookie if none is found. This allows for all api requests from swagger page to work without a token.
|
||||
- Add file size to search preview! Should have been in last release... sorry!
|
||||
|
||||
**Notes**:
|
||||
- Replaced backend http framework with go standard library.
|
||||
- Right-click Context menu can target the item that was right-clicked. To fully address https://github.com/gtsteffaniak/filebrowser/issues/214
|
||||
- adjusted settings menu for mobile, always shows all available cards rather than grayed out cards that need to be clicked.
|
||||
- longer and more cryptographically secure share links based on UUID rather than base64.
|
||||
|
||||
**Bugfixes**:
|
||||
- Fixed ui bug with shares with password.
|
||||
- Fixes baseurl related bugs https://github.com/gtsteffaniak/filebrowser/pull/228 Thanks @SimLV
|
||||
- Fixed empty directory load issue.
|
||||
- Fixed image preview cutoff on mobile.
|
||||
- Fixed issue introduced in v0.2.10 where new files and folders were not showing up on ui
|
||||
- Fixed preview issue where preview would not load after viewing video files.
|
||||
- Fixed sorting issue where files were not sorted by name by default.
|
||||
- Fixed copy file prompt issue
|
||||
|
||||
## v0.2.10
|
||||
|
||||
**New Features**:
|
||||
|
@ -15,9 +42,9 @@ All notable changes to this project will be documented in this file. For commit
|
|||
- Fixed issue searching "smaller than" actually returned files "larger than"
|
||||
|
||||
**Notes**:
|
||||
- Memory usage from index is reduced by ~40%
|
||||
- Indexing time has increased 2x due to the extra processing time required to calculate directory sizes.
|
||||
- File size calcuations use 1024 base vs previous 1000 base (matching windows explorer)
|
||||
- Memory usage from index is reduced by ~40%
|
||||
- Indexing time has increased 2x due to the extra processing time required to calculate directory sizes.
|
||||
- File size calculations use 1024 base vs previous 1000 base (matching windows explorer)
|
||||
|
||||
## v0.2.9
|
||||
|
||||
|
@ -40,7 +67,7 @@ All notable changes to this project will be documented in this file. For commit
|
|||
|
||||
## v0.2.8
|
||||
|
||||
- **Feature**: New gallary view scaling options (closes [#141](https://github.com/gtsteffaniak/filebrowser/issues/141))
|
||||
- **Feature**: New gallery view scaling options (closes [#141](https://github.com/gtsteffaniak/filebrowser/issues/141))
|
||||
- **Change**: Refactored backend files functions
|
||||
- **Change**: Improved UI response to filesystem changes
|
||||
- **Change**: Added frontend tests for deployment integrity
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
FROM golang:1.22-alpine AS base
|
||||
FROM golang:1.23-alpine AS base
|
||||
ARG VERSION
|
||||
ARG REVISION
|
||||
WORKDIR /app
|
||||
COPY ./backend ./
|
||||
#RUN swag init --output swagger/docs
|
||||
RUN ln -s swagger /usr/local/go/src/
|
||||
RUN go build -ldflags="-w -s \
|
||||
-X 'github.com/gtsteffaniak/filebrowser/version.Version=${VERSION}' \
|
||||
-X 'github.com/gtsteffaniak/filebrowser/version.CommitSHA=${REVISION}'" \
|
||||
|
@ -19,5 +21,7 @@ FROM alpine:latest
|
|||
ENV FILEBROWSER_NO_EMBEDED="true"
|
||||
RUN apk --no-cache add ca-certificates mailcap
|
||||
COPY --from=base /app/filebrowser* ./
|
||||
COPY --from=nbuild /app/dist/ ./frontend/dist/
|
||||
# exposing default port for auto discovery.
|
||||
EXPOSE 80
|
||||
COPY --from=nbuild /app/dist/ ./http/dist/
|
||||
ENTRYPOINT [ "./filebrowser" ]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM golang:1.22-alpine AS base
|
||||
FROM golang:1.23-alpine AS base
|
||||
WORKDIR /app
|
||||
COPY ./backend ./
|
||||
RUN go build -ldflags="-w -s" -o filebrowser .
|
||||
|
|
63
README.md
63
README.md
|
@ -10,41 +10,43 @@
|
|||
</p>
|
||||
|
||||
> [!WARNING]
|
||||
> Starting with v0.2.0, *ALL* configuration is done via `filebrowser.yaml`
|
||||
> Configuration file.
|
||||
> Starting with v0.2.4 *ALL* share links need to be re-created (due to
|
||||
> security fix).
|
||||
> Starting with `v0.3.0` API routes have been slightly altered for friendly usage outside of the UI.
|
||||
> If on windows, please use docker. The windows binary is unstable and may not work.
|
||||
|
||||
FileBrowser Quantum is a fork of the filebrowser opensource project with the
|
||||
following changes:
|
||||
FileBrowser Quantum is a fork of the file browser opensource project with the following changes:
|
||||
|
||||
1. [x] Efficiently indexed files
|
||||
- Real-time search results as you type
|
||||
- Search Works with more type filters
|
||||
- Enhanced interactive results page.
|
||||
2. [x] Revamped and simplified GUI navbar and sidebar menu.
|
||||
- file/folder sizes are shown in the response
|
||||
1. [x] Revamped and simplified GUI navbar and sidebar menu.
|
||||
- Additional compact view mode as well as refreshed view mode
|
||||
styles.
|
||||
3. [x] Revamped and simplified configuration via `filebrowser.yml` config file.
|
||||
4. [x] Faster listing browsing
|
||||
1. [x] Revamped and simplified configuration via `filebrowser.yml` config file.
|
||||
1. [x] Better listing browsing
|
||||
- Switching view modes is instant
|
||||
- Folder sizes are shown as well
|
||||
- Changing Sort order is instant
|
||||
- The entire directory is loaded in 1/3 the time
|
||||
1. Developer API support
|
||||
- Can create long-live API Tokens.
|
||||
- Helpful Swagger page available at `/swagger` endpoint.
|
||||
|
||||
## About
|
||||
|
||||
FileBrowser Quantum provides a file managing interface within a specified directory
|
||||
FileBrowser Quantum provides a file-managing interface within a specified directory
|
||||
and can be used to upload, delete, preview, rename, and edit your files.
|
||||
It allows the creation of multiple users and each user can have its
|
||||
directory.
|
||||
|
||||
This repository is a fork of the original [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||
with a collection of changes that make this program work better in terms of
|
||||
aesthetics and performance. Improved search, simplified ui
|
||||
aesthetics and performance. Improved search, simplified UI
|
||||
(without removing features) and more secure and up-to-date
|
||||
build are just a few examples.
|
||||
|
||||
FileBrowser Quantum differs significantly to the original.
|
||||
FileBrowser Quantum differs significantly from the original.
|
||||
There are hundreds of thousands of lines changed and they are generally
|
||||
no longer compatible with each other. This has been intentional -- the
|
||||
focus of this fork is on a few key principles:
|
||||
|
@ -68,10 +70,9 @@ action panel. If the action is available based on context, it will show up as
|
|||
a popup menu.
|
||||
|
||||
<p align="center">
|
||||
<img width="800" src="https://github.com/gtsteffaniak/filebrowser/assets/42989099/899152cf-3e69-4179-aa82-752af2df3fc6" title="Main Screenshot">
|
||||
<img width="800" src="https://github.com/user-attachments/assets/18c02d03-5c60-4e15-9c32-3cfe058a0c49" title="Main Screenshot">
|
||||
<img width="800" src="https://github.com/user-attachments/assets/75226dc4-9802-46f0-9e3c-e4403d3275da" title="Main Screenshot">
|
||||
|
||||
<img width="800" src="https://github.com/user-attachments/assets/2be7a6c5-0f95-4d9f-bc05-484ee71246d8" title="Search GIF">
|
||||
<img width="800" src="https://github.com/user-attachments/assets/f55a6f1f-b930-4399-98b5-94da6e90527a" title="Navigation GIF">
|
||||
<img width="800" src="https://github.com/user-attachments/assets/75226dc4-9802-46f0-9e3c-e4403d3275da" title="Main Screenshot">
|
||||
</p>
|
||||
|
||||
## Install
|
||||
|
@ -89,7 +90,6 @@ docker run -it -v /path/to/folder:/srv -p 80:80 gtstef/filebrowser
|
|||
- with local storage
|
||||
|
||||
```
|
||||
version: '3.7'
|
||||
services:
|
||||
filebrowser:
|
||||
volumes:
|
||||
|
@ -105,7 +105,6 @@ services:
|
|||
- with network share
|
||||
|
||||
```
|
||||
version: '3.7'
|
||||
services:
|
||||
filebrowser:
|
||||
volumes:
|
||||
|
@ -121,7 +120,7 @@ volumes:
|
|||
driver_opts:
|
||||
type: cifs
|
||||
o: "username=admin,password=password,rw" # enter valid info here
|
||||
device: "//192.168.1.100/share/" # enter valid hinfo here
|
||||
device: "//192.168.1.100/share/" # enter valid info here
|
||||
|
||||
```
|
||||
|
||||
|
@ -135,10 +134,24 @@ Not using docker (not recommended), download your binary from releases and run w
|
|||
|
||||
There are very few commands available. There are 3 actions done via command line:
|
||||
|
||||
1. Running the program, as shown on install step. Only argument used is the config file, if you choose to override default "filebrowser.yaml"
|
||||
1. Running the program, as shown on the install step. The only argument used is the config file, if you choose to override default "filebrowser.yaml"
|
||||
2. Checking the version info via `./filebrowser version`
|
||||
3. Updating the DB, which currently only supports adding users via `./filebrowser set -u username,password [-a] [-s "example/scope"]`
|
||||
|
||||
## API Usage
|
||||
|
||||
FileBrowser Quantum comes with a swagger page that can be accessed from the API section of settings or by going to `/swagger` to see the full list:
|
||||
|
||||

|
||||
|
||||
You use the token as a bearer token. For example in postman:
|
||||
|
||||
Successful Request:
|
||||
<img width="500" alt="image" src="https://github.com/user-attachments/assets/4f18fa8a-8d87-4f40-9dc7-3d4407769b59">
|
||||
Failed Request
|
||||
<img width="500" alt="image" src="https://github.com/user-attachments/assets/4da0deae-f93d-4d94-83b1-68806afb343a">
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
All configuration is now done via a single configuration file:
|
||||
|
@ -149,11 +162,12 @@ View the [Configuration Help Page](./docs/configuration.md) for available
|
|||
configuration options and other help.
|
||||
|
||||
|
||||
## Migration from filebrowser/filebrowser
|
||||
## Migration from the original filebrowser
|
||||
|
||||
If you currently use the original filebrowser but want to try using this.
|
||||
I recommend you start fresh without reusing the database. If you want to
|
||||
migrate your existing database to FileBrowser Quantum, visit the [migration
|
||||
I would recommend that you start fresh without reusing the database. However,
|
||||
If you want to migrate your existing database to FileBrowser Quantum,
|
||||
visit the [migration
|
||||
readme](./docs/migration.md)
|
||||
|
||||
## Comparison Chart
|
||||
|
@ -185,7 +199,8 @@ Multiple users | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
|
|||
Single sign-on support | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ |
|
||||
LDAP sign-on support | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ |
|
||||
2FA sign-on support | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ |
|
||||
Long-live API key support | ❌ | ❌ | ✅ | ✅ | ✅ | ✅ |
|
||||
Long-live API key support | ✅ | ❌ | ✅ | ✅ | ✅ | ✅ |
|
||||
API documentation page | ✅ | ❌ | ✅ | ✅ | ❌ | ✅ |
|
||||
Mobile App | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ |
|
||||
open source? | ✅ | ✅ | ✅ | ✅ | ❌ | ✅ |
|
||||
tags support | ❌ | ❌ | ❌ | ✅ | ❌ | ✅ |
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
// Auther is the authentication interface.
|
||||
type Auther interface {
|
||||
// Auth is called to authenticate a request.
|
||||
Auth(r *http.Request, usr users.Store) (*users.User, error)
|
||||
Auth(r *http.Request, userStore *users.Storage) (*users.User, error)
|
||||
// LoginPage indicates if this auther needs a login page.
|
||||
LoginPage() bool
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ type HookAuth struct {
|
|||
}
|
||||
|
||||
// Auth authenticates the user via a json in content body.
|
||||
func (a *HookAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) {
|
||||
func (a *HookAuth) Auth(r *http.Request, usr *users.Storage) (*users.User, error) {
|
||||
var cred hookCred
|
||||
|
||||
if r.Body == nil {
|
||||
|
@ -51,7 +51,6 @@ func (a *HookAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch action {
|
||||
case "auth":
|
||||
u, err := a.SaveUser()
|
||||
|
@ -187,7 +186,7 @@ func (a *HookAuth) SaveUser() (*users.User, error) {
|
|||
func (a *HookAuth) GetUser(d *users.User) *users.User {
|
||||
// adds all permissions when user is admin
|
||||
isAdmin := d.Perm.Admin
|
||||
perms := settings.Permissions{
|
||||
perms := users.Permissions{
|
||||
Admin: isAdmin,
|
||||
Execute: isAdmin || d.Perm.Execute,
|
||||
Create: isAdmin || d.Perm.Create,
|
||||
|
|
|
@ -23,7 +23,7 @@ type JSONAuth struct {
|
|||
}
|
||||
|
||||
// Auth authenticates the user via a json in content body.
|
||||
func (a JSONAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) {
|
||||
func (a JSONAuth) Auth(r *http.Request, userStore *users.Storage) (*users.User, error) {
|
||||
config := &settings.Config
|
||||
var cred jsonCred
|
||||
|
||||
|
@ -47,8 +47,7 @@ func (a JSONAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) {
|
|||
return nil, os.ErrPermission
|
||||
}
|
||||
}
|
||||
|
||||
u, err := usr.Get(config.Server.Root, cred.Username)
|
||||
u, err := userStore.Get(config.Server.Root, cred.Username)
|
||||
if err != nil || !users.CheckPwd(cred.Password, u.Password) {
|
||||
return nil, os.ErrPermission
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ const MethodNoAuth = "noauth"
|
|||
type NoAuth struct{}
|
||||
|
||||
// Auth uses authenticates user 1.
|
||||
func (a NoAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) {
|
||||
func (a NoAuth) Auth(r *http.Request, usr *users.Storage) (*users.User, error) {
|
||||
return usr.Get(settings.Config.Server.Root, uint(1))
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ type ProxyAuth struct {
|
|||
}
|
||||
|
||||
// Auth authenticates the user via an HTTP header.
|
||||
func (a ProxyAuth) Auth(r *http.Request, usr users.Store) (*users.User, error) {
|
||||
func (a ProxyAuth) Auth(r *http.Request, usr *users.Storage) (*users.User, error) {
|
||||
username := r.Header.Get(a.Header)
|
||||
user, err := usr.Get(settings.Config.Server.Root, username)
|
||||
if err == errors.ErrNotExist {
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
|
@ -1,20 +1,11 @@
|
|||
package cmd
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
"embed"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/diskcache"
|
||||
"github.com/gtsteffaniak/filebrowser/files"
|
||||
|
@ -22,29 +13,15 @@ import (
|
|||
"github.com/gtsteffaniak/filebrowser/img"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/swagger/docs"
|
||||
"github.com/swaggo/swag"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
"github.com/gtsteffaniak/filebrowser/utils"
|
||||
"github.com/gtsteffaniak/filebrowser/version"
|
||||
)
|
||||
|
||||
//go:embed dist/*
|
||||
var assets embed.FS
|
||||
|
||||
var (
|
||||
nonEmbededFS = os.Getenv("FILEBROWSER_NO_EMBEDED") == "true"
|
||||
)
|
||||
|
||||
type dirFS struct {
|
||||
http.Dir
|
||||
}
|
||||
|
||||
func (d dirFS) Open(name string) (fs.File, error) {
|
||||
return d.Dir.Open(name)
|
||||
}
|
||||
|
||||
func getStore(config string) (*storage.Storage, bool) {
|
||||
// Use the config file (global flag)
|
||||
log.Printf("Using Config file : %v", config)
|
||||
settings.Initialize(config)
|
||||
store, hasDB, err := storage.InitializeDb(settings.Config.Server.Database)
|
||||
if err != nil {
|
||||
|
@ -146,12 +123,16 @@ func StartFilebrowser() {
|
|||
database = fmt.Sprintf("Creating new database : %v", settings.Config.Server.Database)
|
||||
}
|
||||
log.Printf("Initializing FileBrowser Quantum (%v)\n", version.Version)
|
||||
log.Println("Embeded frontend :", !nonEmbededFS)
|
||||
log.Printf("Using Config file : %v", configPath)
|
||||
log.Println("Embeded frontend :", os.Getenv("FILEBROWSER_NO_EMBEDED") != "true")
|
||||
log.Println(database)
|
||||
log.Println("Sources :", settings.Config.Server.Root)
|
||||
log.Print("Indexing interval : ", indexingInterval)
|
||||
log.Println("Indexing interval :", indexingInterval)
|
||||
|
||||
serverConfig := settings.Config.Server
|
||||
swagInfo := docs.SwaggerInfo
|
||||
swagInfo.BasePath = serverConfig.BaseURL
|
||||
swag.Register(docs.SwaggerInfo.InstanceName(), swagInfo)
|
||||
// initialize indexing and schedule indexing ever n minutes (default 5)
|
||||
go files.InitializeIndex(serverConfig.IndexingInterval, serverConfig.Indexing)
|
||||
if err := rootCMD(store, &serverConfig); err != nil {
|
||||
|
@ -159,13 +140,6 @@ func StartFilebrowser() {
|
|||
}
|
||||
}
|
||||
|
||||
func cleanupHandler(listener net.Listener, c chan os.Signal) { //nolint:interfacer
|
||||
sig := <-c
|
||||
log.Printf("Caught signal %s: shutting down.", sig)
|
||||
listener.Close()
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
func rootCMD(store *storage.Storage, serverConfig *settings.Server) error {
|
||||
if serverConfig.NumImageProcessors < 1 {
|
||||
log.Fatal("Image resize workers count could not be < 1")
|
||||
|
@ -186,57 +160,7 @@ func rootCMD(store *storage.Storage, serverConfig *settings.Server) error {
|
|||
// No-op cache if no cacheDir is specified
|
||||
fileCache = diskcache.NewNoOp()
|
||||
}
|
||||
fbhttp.StartHttp(imgSvc, store, fileCache)
|
||||
|
||||
fbhttp.SetupEnv(store, serverConfig, fileCache)
|
||||
|
||||
_, err := os.Stat(serverConfig.Root)
|
||||
utils.CheckErr(fmt.Sprint("cmd os.Stat ", serverConfig.Root), err)
|
||||
var listener net.Listener
|
||||
address := serverConfig.Address + ":" + strconv.Itoa(serverConfig.Port)
|
||||
switch {
|
||||
case serverConfig.Socket != "":
|
||||
listener, err = net.Listen("unix", serverConfig.Socket)
|
||||
utils.CheckErr("net.Listen", err)
|
||||
err = os.Chmod(serverConfig.Socket, os.FileMode(0666)) // socket-perm
|
||||
utils.CheckErr("os.Chmod", err)
|
||||
case serverConfig.TLSKey != "" && serverConfig.TLSCert != "":
|
||||
cer, err := tls.LoadX509KeyPair(serverConfig.TLSCert, serverConfig.TLSKey) //nolint:govet
|
||||
utils.CheckErr("tls.LoadX509KeyPair", err)
|
||||
listener, err = tls.Listen("tcp", address, &tls.Config{
|
||||
MinVersion: tls.VersionTLS12,
|
||||
Certificates: []tls.Certificate{cer}},
|
||||
)
|
||||
utils.CheckErr("tls.Listen", err)
|
||||
default:
|
||||
listener, err = net.Listen("tcp", address)
|
||||
utils.CheckErr("net.Listen", err)
|
||||
}
|
||||
sigc := make(chan os.Signal, 1)
|
||||
signal.Notify(sigc, os.Interrupt, syscall.SIGTERM)
|
||||
go cleanupHandler(listener, sigc)
|
||||
if !nonEmbededFS {
|
||||
assetsFs, err := fs.Sub(assets, "dist")
|
||||
if err != nil {
|
||||
log.Fatal("Could not embed frontend. Does backend/cmd/dist exist? Must be built and exist first")
|
||||
}
|
||||
handler, err := fbhttp.NewHandler(imgSvc, assetsFs)
|
||||
utils.CheckErr("fbhttp.NewHandler", err)
|
||||
defer listener.Close()
|
||||
log.Println("Listening on", listener.Addr().String())
|
||||
//nolint: gosec
|
||||
if err := http.Serve(listener, handler); err != nil {
|
||||
log.Fatalf("Could not start server on port %d: %v", serverConfig.Port, err)
|
||||
}
|
||||
} else {
|
||||
assetsFs := dirFS{Dir: http.Dir("frontend/dist")}
|
||||
handler, err := fbhttp.NewHandler(imgSvc, assetsFs)
|
||||
utils.CheckErr("fbhttp.NewHandler", err)
|
||||
defer listener.Close()
|
||||
log.Println("Listening on", listener.Addr().String())
|
||||
//nolint: gosec
|
||||
if err := http.Serve(listener, handler); err != nil {
|
||||
log.Fatalf("Could not start server on port %d: %v", serverConfig.Port, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ import (
|
|||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
|
@ -66,7 +65,7 @@ func getUserIdentifier(flags *pflag.FlagSet) interface{} {
|
|||
return nil
|
||||
}
|
||||
|
||||
func printRules(rulez []rules.Rule, id interface{}) {
|
||||
func printRules(rulez []users.Rule, id interface{}) {
|
||||
|
||||
for id, rule := range rulez {
|
||||
fmt.Printf("(%d) ", id)
|
||||
|
|
|
@ -5,7 +5,6 @@ import (
|
|||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
|
@ -32,13 +31,13 @@ var rulesAddCmd = &cobra.Command{
|
|||
regexp.MustCompile(exp)
|
||||
}
|
||||
|
||||
rule := rules.Rule{
|
||||
rule := users.Rule{
|
||||
Allow: allow,
|
||||
Regex: regex,
|
||||
}
|
||||
|
||||
if regex {
|
||||
rule.Regexp = &rules.Regexp{Raw: exp}
|
||||
rule.Regexp = &users.Regexp{Raw: exp}
|
||||
} else {
|
||||
rule.Path = exp
|
||||
}
|
||||
|
|
Binary file not shown.
|
@ -19,8 +19,9 @@ import (
|
|||
"unicode/utf8"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/errors"
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/fileutils"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -29,34 +30,33 @@ var (
|
|||
)
|
||||
|
||||
type ReducedItem struct {
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
ModTime time.Time `json:"modified"`
|
||||
IsDir bool `json:"isDir,omitempty"`
|
||||
Type string `json:"type"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
ModTime time.Time `json:"modified"`
|
||||
Type string `json:"type"`
|
||||
Mode os.FileMode `json:"-"`
|
||||
Content string `json:"content,omitempty"`
|
||||
}
|
||||
|
||||
// FileInfo describes a file.
|
||||
// reduced item is non-recursive reduced "Items", used to pass flat items array
|
||||
type FileInfo struct {
|
||||
Items []*FileInfo `json:"-"`
|
||||
ReducedItems []ReducedItem `json:"items,omitempty"`
|
||||
Path string `json:"path,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
Extension string `json:"-"`
|
||||
ModTime time.Time `json:"modified"`
|
||||
CacheTime time.Time `json:"-"`
|
||||
Mode os.FileMode `json:"-"`
|
||||
IsDir bool `json:"isDir,omitempty"`
|
||||
IsSymlink bool `json:"isSymlink,omitempty"`
|
||||
Type string `json:"type"`
|
||||
Subtitles []string `json:"subtitles,omitempty"`
|
||||
Content string `json:"content,omitempty"`
|
||||
Checksums map[string]string `json:"checksums,omitempty"`
|
||||
Token string `json:"token,omitempty"`
|
||||
NumDirs int `json:"numDirs"`
|
||||
NumFiles int `json:"numFiles"`
|
||||
Files []ReducedItem `json:"-"`
|
||||
Dirs map[string]*FileInfo `json:"-"`
|
||||
Path string `json:"path"`
|
||||
Name string `json:"name"`
|
||||
Items []ReducedItem `json:"items"`
|
||||
Size int64 `json:"size"`
|
||||
Extension string `json:"-"`
|
||||
ModTime time.Time `json:"modified"`
|
||||
CacheTime time.Time `json:"-"`
|
||||
Mode os.FileMode `json:"-"`
|
||||
IsSymlink bool `json:"isSymlink,omitempty"`
|
||||
Type string `json:"type"`
|
||||
Subtitles []string `json:"subtitles,omitempty"`
|
||||
Content string `json:"content,omitempty"`
|
||||
Checksums map[string]string `json:"checksums,omitempty"`
|
||||
Token string `json:"token,omitempty"`
|
||||
}
|
||||
|
||||
// FileOptions are the options when getting a file info.
|
||||
|
@ -67,58 +67,18 @@ type FileOptions struct {
|
|||
Expand bool
|
||||
ReadHeader bool
|
||||
Token string
|
||||
Checker rules.Checker
|
||||
Checker users.Checker
|
||||
Content bool
|
||||
}
|
||||
|
||||
// Legacy file info method, only called on non-indexed directories.
|
||||
// Once indexing completes for the first time, NewFileInfo is never called.
|
||||
func NewFileInfo(opts FileOptions) (*FileInfo, error) {
|
||||
|
||||
index := GetIndex(rootPath)
|
||||
if !opts.Checker.Check(opts.Path) {
|
||||
return nil, os.ErrPermission
|
||||
}
|
||||
file, err := stat(opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.Expand {
|
||||
if file.IsDir {
|
||||
if err = file.readListing(opts.Path, opts.Checker, opts.ReadHeader); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cleanedItems := []ReducedItem{}
|
||||
for _, item := range file.Items {
|
||||
// This is particularly useful for root of index, while indexing hasn't finished.
|
||||
// adds the directory sizes for directories that have been indexed already.
|
||||
if item.IsDir {
|
||||
adjustedPath := index.makeIndexPath(opts.Path+"/"+item.Name, true)
|
||||
info, _ := index.GetMetadataInfo(adjustedPath)
|
||||
item.Size = info.Size
|
||||
}
|
||||
cleanedItems = append(cleanedItems, ReducedItem{
|
||||
Name: item.Name,
|
||||
Size: item.Size,
|
||||
IsDir: item.IsDir,
|
||||
ModTime: item.ModTime,
|
||||
Type: item.Type,
|
||||
})
|
||||
}
|
||||
|
||||
file.Items = nil
|
||||
file.ReducedItems = cleanedItems
|
||||
return file, nil
|
||||
}
|
||||
err = file.detectType(opts.Path, opts.Modify, opts.Content, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return file, err
|
||||
func (f FileOptions) Components() (string, string) {
|
||||
return filepath.Dir(f.Path), filepath.Base(f.Path)
|
||||
}
|
||||
|
||||
func FileInfoFaster(opts FileOptions) (*FileInfo, error) {
|
||||
index := GetIndex(rootPath)
|
||||
opts.Path = index.makeIndexPath(opts.Path)
|
||||
|
||||
// Lock access for the specific path
|
||||
pathMutex := getMutex(opts.Path)
|
||||
pathMutex.Lock()
|
||||
|
@ -126,68 +86,94 @@ func FileInfoFaster(opts FileOptions) (*FileInfo, error) {
|
|||
if !opts.Checker.Check(opts.Path) {
|
||||
return nil, os.ErrPermission
|
||||
}
|
||||
index := GetIndex(rootPath)
|
||||
adjustedPath := index.makeIndexPath(opts.Path, opts.IsDir)
|
||||
if opts.IsDir {
|
||||
info, exists := index.GetMetadataInfo(adjustedPath)
|
||||
if exists && !opts.Content {
|
||||
// Let's not refresh if less than a second has passed
|
||||
if time.Since(info.CacheTime) > time.Second {
|
||||
go RefreshFileInfo(opts) //nolint:errcheck
|
||||
}
|
||||
// refresh cache after
|
||||
return &info, nil
|
||||
}
|
||||
}
|
||||
// don't bother caching content
|
||||
if opts.Content {
|
||||
file, err := NewFileInfo(opts)
|
||||
return file, err
|
||||
}
|
||||
err := RefreshFileInfo(opts)
|
||||
_, isDir, err := GetRealPath(opts.Path)
|
||||
if err != nil {
|
||||
file, err := NewFileInfo(opts)
|
||||
return file, err
|
||||
return nil, err
|
||||
}
|
||||
info, exists := index.GetMetadataInfo(adjustedPath + "/" + filepath.Base(opts.Path))
|
||||
if !exists || info.Name == "" {
|
||||
return NewFileInfo(opts)
|
||||
opts.IsDir = isDir
|
||||
// check if the file exists in the index
|
||||
info, exists := index.GetReducedMetadata(opts.Path, opts.IsDir)
|
||||
if exists {
|
||||
// Let's not refresh if less than a second has passed
|
||||
if time.Since(info.CacheTime) > time.Second {
|
||||
RefreshFileInfo(opts) //nolint:errcheck
|
||||
}
|
||||
if opts.Content {
|
||||
content := ""
|
||||
content, err = getContent(opts.Path)
|
||||
if err != nil {
|
||||
return info, err
|
||||
}
|
||||
info.Content = content
|
||||
}
|
||||
return info, nil
|
||||
}
|
||||
return &info, nil
|
||||
err = RefreshFileInfo(opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
info, exists = index.GetReducedMetadata(opts.Path, opts.IsDir)
|
||||
if !exists {
|
||||
return nil, err
|
||||
}
|
||||
if opts.Content {
|
||||
content, err := getContent(opts.Path)
|
||||
if err != nil {
|
||||
return info, err
|
||||
}
|
||||
info.Content = content
|
||||
}
|
||||
return info, nil
|
||||
}
|
||||
|
||||
func RefreshFileInfo(opts FileOptions) error {
|
||||
if !opts.Checker.Check(opts.Path) {
|
||||
return fmt.Errorf("permission denied: %s", opts.Path)
|
||||
refreshOptions := FileOptions{
|
||||
Path: opts.Path,
|
||||
IsDir: opts.IsDir,
|
||||
Token: opts.Token,
|
||||
}
|
||||
index := GetIndex(rootPath)
|
||||
adjustedPath := index.makeIndexPath(opts.Path, opts.IsDir)
|
||||
file, err := stat(opts)
|
||||
|
||||
if !refreshOptions.IsDir {
|
||||
refreshOptions.Path = index.makeIndexPath(filepath.Dir(refreshOptions.Path))
|
||||
refreshOptions.IsDir = true
|
||||
} else {
|
||||
refreshOptions.Path = index.makeIndexPath(refreshOptions.Path)
|
||||
}
|
||||
|
||||
current, exists := index.GetMetadataInfo(refreshOptions.Path, true)
|
||||
|
||||
file, err := stat(refreshOptions)
|
||||
if err != nil {
|
||||
return fmt.Errorf("File/folder does not exist to refresh data: %s", opts.Path)
|
||||
return fmt.Errorf("file/folder does not exist to refresh data: %s", refreshOptions.Path)
|
||||
}
|
||||
_ = file.detectType(opts.Path, true, opts.Content, opts.ReadHeader)
|
||||
if file.IsDir {
|
||||
err := file.readListing(opts.Path, opts.Checker, opts.ReadHeader)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Dir info could not be read: %s", opts.Path)
|
||||
}
|
||||
}
|
||||
result := index.UpdateFileMetadata(adjustedPath, *file)
|
||||
|
||||
//utils.PrintStructFields(*file)
|
||||
result := index.UpdateMetadata(file)
|
||||
if !result {
|
||||
return fmt.Errorf("File/folder does not exist in metadata: %s", adjustedPath)
|
||||
return fmt.Errorf("file/folder does not exist in metadata: %s", refreshOptions.Path)
|
||||
}
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
if current.Size != file.Size {
|
||||
index.recursiveUpdateDirSizes(filepath.Dir(refreshOptions.Path), file, current.Size)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func stat(opts FileOptions) (*FileInfo, error) {
|
||||
info, err := os.Lstat(opts.Path)
|
||||
realPath, _, err := GetRealPath(rootPath, opts.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
info, err := os.Lstat(realPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
file := &FileInfo{
|
||||
Path: opts.Path,
|
||||
Name: info.Name(),
|
||||
Name: filepath.Base(opts.Path),
|
||||
ModTime: info.ModTime(),
|
||||
Mode: info.Mode(),
|
||||
Size: info.Size(),
|
||||
|
@ -195,32 +181,98 @@ func stat(opts FileOptions) (*FileInfo, error) {
|
|||
Token: opts.Token,
|
||||
}
|
||||
if info.IsDir() {
|
||||
file.IsDir = true
|
||||
}
|
||||
if info.Mode()&os.ModeSymlink != 0 {
|
||||
file.IsSymlink = true
|
||||
targetInfo, err := os.Stat(opts.Path)
|
||||
if err == nil {
|
||||
file.Size = targetInfo.Size()
|
||||
file.IsDir = targetInfo.IsDir()
|
||||
// Open and read directory contents
|
||||
dir, err := os.Open(realPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
defer dir.Close()
|
||||
|
||||
dirInfo, err := dir.Stat()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
index := GetIndex(rootPath)
|
||||
// Check cached metadata to decide if refresh is needed
|
||||
cachedParentDir, exists := index.GetMetadataInfo(opts.Path, true)
|
||||
if exists && dirInfo.ModTime().Before(cachedParentDir.CacheTime) {
|
||||
return cachedParentDir, nil
|
||||
}
|
||||
|
||||
// Read directory contents and process
|
||||
files, err := dir.Readdir(-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
file.Files = []ReducedItem{}
|
||||
file.Dirs = map[string]*FileInfo{}
|
||||
|
||||
var totalSize int64
|
||||
for _, item := range files {
|
||||
itemPath := filepath.Join(realPath, item.Name())
|
||||
|
||||
if item.IsDir() {
|
||||
itemInfo := &FileInfo{
|
||||
Name: item.Name(),
|
||||
ModTime: item.ModTime(),
|
||||
Mode: item.Mode(),
|
||||
}
|
||||
|
||||
if exists {
|
||||
// if directory size was already cached use that.
|
||||
cachedDir, ok := cachedParentDir.Dirs[item.Name()]
|
||||
if ok {
|
||||
itemInfo.Size = cachedDir.Size
|
||||
}
|
||||
}
|
||||
file.Dirs[item.Name()] = itemInfo
|
||||
totalSize += itemInfo.Size
|
||||
} else {
|
||||
itemInfo := ReducedItem{
|
||||
Name: item.Name(),
|
||||
Size: item.Size(),
|
||||
ModTime: item.ModTime(),
|
||||
Mode: item.Mode(),
|
||||
}
|
||||
if IsSymlink(item.Mode()) {
|
||||
itemInfo.Type = "symlink"
|
||||
info, err := os.Stat(itemPath)
|
||||
if err == nil {
|
||||
itemInfo.Name = info.Name()
|
||||
itemInfo.ModTime = info.ModTime()
|
||||
itemInfo.Size = info.Size()
|
||||
itemInfo.Mode = info.Mode()
|
||||
} else {
|
||||
file.Type = "invalid_link"
|
||||
}
|
||||
}
|
||||
if file.Type != "invalid_link" {
|
||||
err := itemInfo.detectType(itemPath, true, opts.Content, opts.ReadHeader)
|
||||
if err != nil {
|
||||
fmt.Printf("failed to detect type for %v: %v \n", itemPath, err)
|
||||
}
|
||||
file.Files = append(file.Files, itemInfo)
|
||||
}
|
||||
totalSize += itemInfo.Size
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
file.Size = totalSize
|
||||
}
|
||||
return file, nil
|
||||
}
|
||||
|
||||
// Checksum checksums a given File for a given User, using a specific
|
||||
// algorithm. The checksums data is saved on File object.
|
||||
func (i *FileInfo) Checksum(algo string) error {
|
||||
if i.IsDir {
|
||||
return errors.ErrIsDirectory
|
||||
}
|
||||
|
||||
if i.Checksums == nil {
|
||||
i.Checksums = map[string]string{}
|
||||
}
|
||||
|
||||
reader, err := os.Open(i.Path)
|
||||
fullpath := filepath.Join(i.Path, i.Name)
|
||||
reader, err := os.Open(fullpath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -265,10 +317,7 @@ func GetRealPath(relativePath ...string) (string, bool, error) {
|
|||
// Convert relative path to absolute path
|
||||
absolutePath, err := filepath.Abs(joinedPath)
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
}
|
||||
if !Exists(absolutePath) {
|
||||
return absolutePath, false, nil // return without error
|
||||
return absolutePath, false, fmt.Errorf("could not get real path: %v, %s", combined, err)
|
||||
}
|
||||
// Resolve symlinks and get the real path
|
||||
return resolveSymlinks(absolutePath)
|
||||
|
@ -279,8 +328,48 @@ func DeleteFiles(absPath string, opts FileOptions) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.Path = filepath.Dir(absPath)
|
||||
err = RefreshFileInfo(opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func MoveResource(realsrc, realdst string, isSrcDir bool) error {
|
||||
err := fileutils.MoveFile(realsrc, realdst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// refresh info for source and dest
|
||||
err = RefreshFileInfo(FileOptions{
|
||||
Path: realsrc,
|
||||
IsDir: isSrcDir,
|
||||
})
|
||||
if err != nil {
|
||||
return errors.ErrEmptyKey
|
||||
}
|
||||
refreshConfig := FileOptions{Path: realdst, IsDir: true}
|
||||
if !isSrcDir {
|
||||
refreshConfig.Path = filepath.Dir(realdst)
|
||||
}
|
||||
err = RefreshFileInfo(refreshConfig)
|
||||
if err != nil {
|
||||
return errors.ErrEmptyKey
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func CopyResource(realsrc, realdst string, isSrcDir bool) error {
|
||||
err := fileutils.CopyFile(realsrc, realdst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
refreshConfig := FileOptions{Path: realdst, IsDir: true}
|
||||
if !isSrcDir {
|
||||
refreshConfig.Path = filepath.Dir(realdst)
|
||||
}
|
||||
err = RefreshFileInfo(refreshConfig)
|
||||
if err != nil {
|
||||
return errors.ErrEmptyKey
|
||||
}
|
||||
|
@ -288,12 +377,12 @@ func DeleteFiles(absPath string, opts FileOptions) error {
|
|||
}
|
||||
|
||||
func WriteDirectory(opts FileOptions) error {
|
||||
realPath, _, _ := GetRealPath(rootPath, opts.Path)
|
||||
// Ensure the parent directories exist
|
||||
err := os.MkdirAll(opts.Path, 0775)
|
||||
err := os.MkdirAll(realPath, 0775)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.Path = filepath.Dir(opts.Path)
|
||||
err = RefreshFileInfo(opts)
|
||||
if err != nil {
|
||||
return errors.ErrEmptyKey
|
||||
|
@ -339,7 +428,7 @@ func resolveSymlinks(path string) (string, bool, error) {
|
|||
// Get the file info
|
||||
info, err := os.Lstat(path)
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
return path, false, fmt.Errorf("could not stat path: %v, %s", path, err)
|
||||
}
|
||||
|
||||
// Check if it's a symlink
|
||||
|
@ -347,7 +436,7 @@ func resolveSymlinks(path string) (string, bool, error) {
|
|||
// Read the symlink target
|
||||
target, err := os.Readlink(path)
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
return path, false, err
|
||||
}
|
||||
|
||||
// Resolve the target relative to the symlink's directory
|
||||
|
@ -360,78 +449,83 @@ func resolveSymlinks(path string) (string, bool, error) {
|
|||
}
|
||||
|
||||
// addContent reads and sets content based on the file type.
|
||||
func (i *FileInfo) addContent(path string) error {
|
||||
if !i.IsDir {
|
||||
content, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
stringContent := string(content)
|
||||
if !utf8.ValidString(stringContent) {
|
||||
return nil
|
||||
}
|
||||
if stringContent == "" {
|
||||
i.Content = "empty-file-x6OlSil"
|
||||
return nil
|
||||
}
|
||||
i.Content = stringContent
|
||||
func getContent(path string) (string, error) {
|
||||
realPath, _, err := GetRealPath(rootPath, path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return nil
|
||||
|
||||
content, err := os.ReadFile(realPath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
stringContent := string(content)
|
||||
if !utf8.ValidString(stringContent) {
|
||||
return "", fmt.Errorf("file is not utf8 encoded")
|
||||
}
|
||||
if stringContent == "" {
|
||||
return "empty-file-x6OlSil", nil
|
||||
}
|
||||
return stringContent, nil
|
||||
}
|
||||
|
||||
// detectType detects the file type.
|
||||
func (i *FileInfo) detectType(path string, modify, saveContent, readHeader bool) error {
|
||||
if i.IsDir {
|
||||
return nil
|
||||
}
|
||||
if IsNamedPipe(i.Mode) {
|
||||
i.Type = "blob"
|
||||
if saveContent {
|
||||
return i.addContent(path)
|
||||
func (i *ReducedItem) detectType(path string, modify, saveContent, readHeader bool) error {
|
||||
name := i.Name
|
||||
var contentErr error
|
||||
var contentString string
|
||||
if saveContent {
|
||||
contentString, contentErr = getContent(path)
|
||||
if contentErr == nil {
|
||||
i.Content = contentString
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if IsNamedPipe(i.Mode) {
|
||||
i.Type = "blob"
|
||||
return contentErr
|
||||
}
|
||||
|
||||
ext := filepath.Ext(name)
|
||||
var buffer []byte
|
||||
if readHeader {
|
||||
buffer = i.readFirstBytes()
|
||||
mimetype := mime.TypeByExtension(i.Extension)
|
||||
buffer = i.readFirstBytes(path)
|
||||
mimetype := mime.TypeByExtension(ext)
|
||||
if mimetype == "" {
|
||||
http.DetectContentType(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
ext := filepath.Ext(i.Name)
|
||||
for _, fileType := range AllFiletypeOptions {
|
||||
if IsMatchingType(ext, fileType) {
|
||||
i.Type = fileType
|
||||
}
|
||||
|
||||
switch i.Type {
|
||||
case "text":
|
||||
if !modify {
|
||||
i.Type = "textImmutable"
|
||||
}
|
||||
if saveContent {
|
||||
return i.addContent(path)
|
||||
return contentErr
|
||||
}
|
||||
case "video":
|
||||
parentDir := strings.TrimRight(path, i.Name)
|
||||
i.detectSubtitles(parentDir)
|
||||
// TODO add back somewhere else, not during metadata fetch
|
||||
//parentDir := strings.TrimRight(path, name)
|
||||
//i.detectSubtitles(parentDir)
|
||||
case "doc":
|
||||
if ext == ".pdf" {
|
||||
i.Type = "pdf"
|
||||
return nil
|
||||
}
|
||||
if saveContent {
|
||||
return i.addContent(path)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
if i.Type == "" {
|
||||
i.Type = "blob"
|
||||
if saveContent {
|
||||
return i.addContent(path)
|
||||
return contentErr
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -439,8 +533,8 @@ func (i *FileInfo) detectType(path string, modify, saveContent, readHeader bool)
|
|||
}
|
||||
|
||||
// readFirstBytes reads the first bytes of the file.
|
||||
func (i *FileInfo) readFirstBytes() []byte {
|
||||
file, err := os.Open(i.Path)
|
||||
func (i *ReducedItem) readFirstBytes(path string) []byte {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
i.Type = "blob"
|
||||
return nil
|
||||
|
@ -458,113 +552,42 @@ func (i *FileInfo) readFirstBytes() []byte {
|
|||
}
|
||||
|
||||
// detectSubtitles detects subtitles for video files.
|
||||
func (i *FileInfo) detectSubtitles(parentDir string) {
|
||||
if i.Type != "video" {
|
||||
return
|
||||
}
|
||||
i.Subtitles = []string{}
|
||||
ext := filepath.Ext(i.Name)
|
||||
dir, err := os.Open(parentDir)
|
||||
if err != nil {
|
||||
// Directory must have been deleted, remove it from the index
|
||||
return
|
||||
}
|
||||
defer dir.Close() // Ensure directory handle is closed
|
||||
|
||||
files, err := dir.Readdir(-1)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
base := strings.TrimSuffix(i.Name, ext)
|
||||
subtitleExts := []string{".vtt", ".txt", ".srt", ".lrc"}
|
||||
|
||||
for _, f := range files {
|
||||
if f.IsDir() || !strings.HasPrefix(f.Name(), base) {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, subtitleExt := range subtitleExts {
|
||||
if strings.HasSuffix(f.Name(), subtitleExt) {
|
||||
i.Subtitles = append(i.Subtitles, filepath.Join(parentDir, f.Name()))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// readListing reads the contents of a directory and fills the listing.
|
||||
func (i *FileInfo) readListing(path string, checker rules.Checker, readHeader bool) error {
|
||||
dir, err := os.Open(i.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dir.Close()
|
||||
|
||||
files, err := dir.Readdir(-1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
listing := &FileInfo{
|
||||
Items: []*FileInfo{},
|
||||
NumDirs: 0,
|
||||
NumFiles: 0,
|
||||
}
|
||||
|
||||
for _, f := range files {
|
||||
name := f.Name()
|
||||
fPath := filepath.Join(i.Path, name)
|
||||
|
||||
if !checker.Check(fPath) {
|
||||
continue
|
||||
}
|
||||
|
||||
isSymlink, isInvalidLink := false, false
|
||||
if IsSymlink(f.Mode()) {
|
||||
isSymlink = true
|
||||
info, err := os.Stat(fPath)
|
||||
if err == nil {
|
||||
f = info
|
||||
} else {
|
||||
isInvalidLink = true
|
||||
}
|
||||
}
|
||||
|
||||
file := &FileInfo{
|
||||
Name: name,
|
||||
Size: f.Size(),
|
||||
ModTime: f.ModTime(),
|
||||
Mode: f.Mode(),
|
||||
}
|
||||
if f.IsDir() {
|
||||
file.IsDir = true
|
||||
}
|
||||
if isSymlink {
|
||||
file.IsSymlink = true
|
||||
}
|
||||
|
||||
if file.IsDir {
|
||||
listing.NumDirs++
|
||||
} else {
|
||||
listing.NumFiles++
|
||||
|
||||
if isInvalidLink {
|
||||
file.Type = "invalid_link"
|
||||
} else {
|
||||
err := file.detectType(path, true, false, readHeader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listing.Items = append(listing.Items, file)
|
||||
}
|
||||
|
||||
i.Items = listing.Items
|
||||
return nil
|
||||
}
|
||||
//func (i *FileInfo) detectSubtitles(path string) {
|
||||
// if i.Type != "video" {
|
||||
// return
|
||||
// }
|
||||
// parentDir := filepath.Dir(path)
|
||||
// fileName := filepath.Base(path)
|
||||
// i.Subtitles = []string{}
|
||||
// ext := filepath.Ext(fileName)
|
||||
// dir, err := os.Open(parentDir)
|
||||
// if err != nil {
|
||||
// // Directory must have been deleted, remove it from the index
|
||||
// return
|
||||
// }
|
||||
// defer dir.Close() // Ensure directory handle is closed
|
||||
//
|
||||
// files, err := dir.Readdir(-1)
|
||||
// if err != nil {
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// base := strings.TrimSuffix(fileName, ext)
|
||||
// subtitleExts := []string{".vtt", ".txt", ".srt", ".lrc"}
|
||||
//
|
||||
// for _, f := range files {
|
||||
// if f.IsDir() || !strings.HasPrefix(f.Name(), base) {
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// for _, subtitleExt := range subtitleExts {
|
||||
// if strings.HasSuffix(f.Name(), subtitleExt) {
|
||||
// i.Subtitles = append(i.Subtitles, filepath.Join(parentDir, f.Name()))
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
func IsNamedPipe(mode os.FileMode) bool {
|
||||
return mode&os.ModeNamedPipe != 0
|
||||
|
|
|
@ -63,14 +63,11 @@ func Test_GetRealPath(t *testing.T) {
|
|||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
realPath, isDir, err := GetRealPath(tt.paths...)
|
||||
realPath, isDir, _ := GetRealPath(tt.paths...)
|
||||
adjustedRealPath := strings.TrimPrefix(realPath, trimPrefix)
|
||||
if tt.want.path != adjustedRealPath || tt.want.isDir != isDir {
|
||||
t.Errorf("expected %v:%v but got: %v:%v", tt.want.path, tt.want.isDir, adjustedRealPath, isDir)
|
||||
}
|
||||
if err != nil {
|
||||
t.Error("got error", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
|
||||
type Index struct {
|
||||
Root string
|
||||
Directories map[string]FileInfo
|
||||
Directories map[string]*FileInfo
|
||||
NumDirs int
|
||||
NumFiles int
|
||||
inProgress bool
|
||||
|
@ -43,7 +43,7 @@ func indexingScheduler(intervalMinutes uint32) {
|
|||
// Set the indexing flag to indicate that indexing is in progress
|
||||
si.resetCount()
|
||||
// Perform the indexing operation
|
||||
err := si.indexFiles(si.Root)
|
||||
err := si.indexFiles("/")
|
||||
// Reset the indexing flag to indicate that indexing has finished
|
||||
si.inProgress = false
|
||||
// Update the LastIndexed time
|
||||
|
@ -64,15 +64,13 @@ func indexingScheduler(intervalMinutes uint32) {
|
|||
}
|
||||
|
||||
// Define a function to recursively index files and directories
|
||||
func (si *Index) indexFiles(path string) error {
|
||||
// Ensure path is cleaned and normalized
|
||||
adjustedPath := si.makeIndexPath(path, true)
|
||||
func (si *Index) indexFiles(adjustedPath string) error {
|
||||
realPath := strings.TrimRight(si.Root, "/") + adjustedPath
|
||||
|
||||
// Open the directory
|
||||
dir, err := os.Open(path)
|
||||
dir, err := os.Open(realPath)
|
||||
if err != nil {
|
||||
// If the directory can't be opened (e.g., deleted), remove it from the index
|
||||
si.RemoveDirectory(adjustedPath)
|
||||
si.RemoveDirectory(adjustedPath) // Remove if it can't be opened
|
||||
return err
|
||||
}
|
||||
defer dir.Close()
|
||||
|
@ -82,7 +80,7 @@ func (si *Index) indexFiles(path string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// Check if the directory is already up-to-date
|
||||
// Skip directories that haven't been modified since the last index
|
||||
if dirInfo.ModTime().Before(si.LastIndexed) {
|
||||
return nil
|
||||
}
|
||||
|
@ -93,90 +91,73 @@ func (si *Index) indexFiles(path string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// Recursively process files and directories
|
||||
fileInfos := []*FileInfo{}
|
||||
var totalSize int64
|
||||
var numDirs, numFiles int
|
||||
fileInfos := []ReducedItem{}
|
||||
dirInfos := map[string]*FileInfo{}
|
||||
combinedPath := adjustedPath + "/"
|
||||
if adjustedPath == "/" {
|
||||
combinedPath = "/"
|
||||
}
|
||||
|
||||
// Process each file and directory in the current directory
|
||||
for _, file := range files {
|
||||
parentInfo := &FileInfo{
|
||||
Name: file.Name(),
|
||||
Size: file.Size(),
|
||||
itemInfo := &FileInfo{
|
||||
ModTime: file.ModTime(),
|
||||
IsDir: file.IsDir(),
|
||||
}
|
||||
childInfo, err := si.InsertInfo(path, parentInfo)
|
||||
if err != nil {
|
||||
// Log error, but continue processing other files
|
||||
continue
|
||||
}
|
||||
|
||||
// Accumulate directory size and items
|
||||
totalSize += childInfo.Size
|
||||
if childInfo.IsDir {
|
||||
if file.IsDir() {
|
||||
itemInfo.Name = file.Name()
|
||||
itemInfo.Path = combinedPath + file.Name()
|
||||
// Recursively index the subdirectory
|
||||
err := si.indexFiles(itemInfo.Path)
|
||||
if err != nil {
|
||||
log.Printf("Failed to index directory %s: %v", itemInfo.Path, err)
|
||||
continue
|
||||
}
|
||||
// Fetch the metadata for the subdirectory after indexing
|
||||
subDirInfo, exists := si.GetMetadataInfo(itemInfo.Path, true)
|
||||
if exists {
|
||||
itemInfo.Size = subDirInfo.Size
|
||||
totalSize += subDirInfo.Size // Add subdirectory size to the total
|
||||
}
|
||||
dirInfos[itemInfo.Name] = itemInfo
|
||||
numDirs++
|
||||
} else {
|
||||
itemInfo := &ReducedItem{
|
||||
Name: file.Name(),
|
||||
ModTime: file.ModTime(),
|
||||
Size: file.Size(),
|
||||
Mode: file.Mode(),
|
||||
}
|
||||
_ = itemInfo.detectType(combinedPath+file.Name(), true, false, false)
|
||||
fileInfos = append(fileInfos, *itemInfo)
|
||||
totalSize += itemInfo.Size
|
||||
numFiles++
|
||||
}
|
||||
_ = childInfo.detectType(path, true, false, false)
|
||||
fileInfos = append(fileInfos, childInfo)
|
||||
}
|
||||
|
||||
// Create FileInfo for the current directory
|
||||
dirFileInfo := &FileInfo{
|
||||
Items: fileInfos,
|
||||
Name: filepath.Base(path),
|
||||
Size: totalSize,
|
||||
ModTime: dirInfo.ModTime(),
|
||||
CacheTime: time.Now(),
|
||||
IsDir: true,
|
||||
NumDirs: numDirs,
|
||||
NumFiles: numFiles,
|
||||
Path: adjustedPath,
|
||||
Files: fileInfos,
|
||||
Dirs: dirInfos,
|
||||
Size: totalSize,
|
||||
ModTime: dirInfo.ModTime(),
|
||||
}
|
||||
|
||||
// Add directory to index
|
||||
si.mu.Lock()
|
||||
si.Directories[adjustedPath] = *dirFileInfo
|
||||
// Update the current directory metadata in the index
|
||||
si.UpdateMetadata(dirFileInfo)
|
||||
si.NumDirs += numDirs
|
||||
si.NumFiles += numFiles
|
||||
si.mu.Unlock()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// InsertInfo function to handle adding a file or directory into the index
|
||||
func (si *Index) InsertInfo(parentPath string, file *FileInfo) (*FileInfo, error) {
|
||||
filePath := filepath.Join(parentPath, file.Name)
|
||||
|
||||
// Check if it's a directory and recursively index it
|
||||
if file.IsDir {
|
||||
// Recursively index directory
|
||||
err := si.indexFiles(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Return directory info from the index
|
||||
adjustedPath := si.makeIndexPath(filePath, true)
|
||||
si.mu.RLock()
|
||||
dirInfo := si.Directories[adjustedPath]
|
||||
si.mu.RUnlock()
|
||||
return &dirInfo, nil
|
||||
func (si *Index) makeIndexPath(subPath string) string {
|
||||
if strings.HasPrefix(subPath, "./") {
|
||||
subPath = strings.TrimPrefix(subPath, ".")
|
||||
}
|
||||
|
||||
// Create FileInfo for regular files
|
||||
fileInfo := &FileInfo{
|
||||
Path: filePath,
|
||||
Name: file.Name,
|
||||
Size: file.Size,
|
||||
ModTime: file.ModTime,
|
||||
IsDir: false,
|
||||
}
|
||||
|
||||
return fileInfo, nil
|
||||
}
|
||||
|
||||
func (si *Index) makeIndexPath(subPath string, isDir bool) string {
|
||||
if si.Root == subPath {
|
||||
if strings.HasPrefix(subPath, ".") || si.Root == subPath {
|
||||
return "/"
|
||||
}
|
||||
// clean path
|
||||
|
@ -185,14 +166,39 @@ func (si *Index) makeIndexPath(subPath string, isDir bool) string {
|
|||
adjustedPath := strings.TrimPrefix(subPath, si.Root)
|
||||
// remove trailing slash
|
||||
adjustedPath = strings.TrimSuffix(adjustedPath, "/")
|
||||
// add leading slash for root of index
|
||||
if adjustedPath == "" {
|
||||
adjustedPath = "/"
|
||||
} else if !isDir {
|
||||
adjustedPath = filepath.Dir(adjustedPath)
|
||||
}
|
||||
if !strings.HasPrefix(adjustedPath, "/") {
|
||||
adjustedPath = "/" + adjustedPath
|
||||
}
|
||||
return adjustedPath
|
||||
}
|
||||
|
||||
//func getParentPath(path string) string {
|
||||
// // Trim trailing slash for consistency
|
||||
// path = strings.TrimSuffix(path, "/")
|
||||
// if path == "" || path == "/" {
|
||||
// return "" // Root has no parent
|
||||
// }
|
||||
//
|
||||
// lastSlash := strings.LastIndex(path, "/")
|
||||
// if lastSlash == -1 {
|
||||
// return "/" // Parent of a top-level directory
|
||||
// }
|
||||
// return path[:lastSlash]
|
||||
//}
|
||||
|
||||
func (si *Index) recursiveUpdateDirSizes(parentDir string, childInfo *FileInfo, previousSize int64) {
|
||||
childDirName := filepath.Base(childInfo.Path)
|
||||
if parentDir == childDirName {
|
||||
return
|
||||
}
|
||||
dir, exists := si.GetMetadataInfo(parentDir, true)
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
dir.Dirs[childDirName] = childInfo
|
||||
newSize := dir.Size - previousSize + childInfo.Size
|
||||
dir.Size += newSize
|
||||
si.UpdateMetadata(dir)
|
||||
dir, _ = si.GetMetadataInfo(parentDir, true)
|
||||
si.recursiveUpdateDirSizes(filepath.Dir(parentDir), dir, newSize)
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@ package files
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
@ -23,27 +23,26 @@ func BenchmarkFillIndex(b *testing.B) {
|
|||
|
||||
func (si *Index) createMockData(numDirs, numFilesPerDir int) {
|
||||
for i := 0; i < numDirs; i++ {
|
||||
dirName := generateRandomPath(rand.Intn(3) + 1)
|
||||
files := []*FileInfo{} // Slice of FileInfo
|
||||
dirPath := generateRandomPath(rand.Intn(3) + 1)
|
||||
files := []ReducedItem{} // Slice of FileInfo
|
||||
|
||||
// Simulating files and directories with FileInfo
|
||||
for j := 0; j < numFilesPerDir; j++ {
|
||||
newFile := &FileInfo{
|
||||
newFile := ReducedItem{
|
||||
Name: "file-" + getRandomTerm() + getRandomExtension(),
|
||||
IsDir: false,
|
||||
Size: rand.Int63n(1000), // Random size
|
||||
ModTime: time.Now().Add(-time.Duration(rand.Intn(100)) * time.Hour), // Random mod time
|
||||
Type: "blob",
|
||||
}
|
||||
files = append(files, newFile)
|
||||
}
|
||||
|
||||
// Simulate inserting files into index
|
||||
for _, file := range files {
|
||||
_, err := si.InsertInfo(dirName, file)
|
||||
if err != nil {
|
||||
fmt.Println("Error inserting file:", err)
|
||||
}
|
||||
dirInfo := &FileInfo{
|
||||
Name: filepath.Base(dirPath),
|
||||
Path: dirPath,
|
||||
Files: files,
|
||||
}
|
||||
|
||||
si.UpdateMetadata(dirInfo)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
package files
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/utils"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -14,123 +14,116 @@ var (
|
|||
maxSearchResults = 100
|
||||
)
|
||||
|
||||
func (si *Index) Search(search string, scope string, sourceSession string) ([]string, map[string]map[string]bool) {
|
||||
type searchResult struct {
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
func (si *Index) Search(search string, scope string, sourceSession string) []searchResult {
|
||||
// Remove slashes
|
||||
scope = strings.TrimLeft(scope, "/")
|
||||
scope = strings.TrimRight(scope, "/")
|
||||
runningHash := generateRandomHash(4)
|
||||
scope = si.makeIndexPath(scope)
|
||||
runningHash := utils.GenerateRandomHash(4)
|
||||
sessionInProgress.Store(sourceSession, runningHash) // Store the value in the sync.Map
|
||||
searchOptions := ParseSearch(search)
|
||||
fileListTypes := make(map[string]map[string]bool)
|
||||
matching := []string{}
|
||||
results := make(map[string]searchResult, 0)
|
||||
count := 0
|
||||
directories := si.getDirsInScope(scope)
|
||||
for _, searchTerm := range searchOptions.Terms {
|
||||
if searchTerm == "" {
|
||||
continue
|
||||
}
|
||||
if count > maxSearchResults {
|
||||
break
|
||||
}
|
||||
si.mu.Lock()
|
||||
for dirName, dir := range si.Directories {
|
||||
isDir := true
|
||||
files := []string{}
|
||||
for _, item := range dir.Items {
|
||||
if !item.IsDir {
|
||||
files = append(files, item.Name)
|
||||
}
|
||||
}
|
||||
value, found := sessionInProgress.Load(sourceSession)
|
||||
if !found || value != runningHash {
|
||||
si.mu.Unlock()
|
||||
return []string{}, map[string]map[string]bool{}
|
||||
for _, dirName := range directories {
|
||||
si.mu.Unlock()
|
||||
dir, found := si.GetReducedMetadata(dirName, true)
|
||||
si.mu.Lock()
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
if count > maxSearchResults {
|
||||
break
|
||||
}
|
||||
pathName := scopedPathNameFilter(dirName, scope, isDir)
|
||||
if pathName == "" {
|
||||
continue // path not matched
|
||||
reducedDir := ReducedItem{
|
||||
Name: filepath.Base(dirName),
|
||||
Type: "directory",
|
||||
Size: dir.Size,
|
||||
}
|
||||
fileTypes := map[string]bool{}
|
||||
si.mu.Unlock()
|
||||
matches, fileType := si.containsSearchTerm(dirName, searchTerm, *searchOptions, isDir, fileTypes)
|
||||
si.mu.Lock()
|
||||
|
||||
matches := reducedDir.containsSearchTerm(searchTerm, searchOptions)
|
||||
if matches {
|
||||
fileListTypes[pathName] = fileType
|
||||
matching = append(matching, pathName)
|
||||
scopedPath := strings.TrimPrefix(strings.TrimPrefix(dirName, scope), "/") + "/"
|
||||
results[scopedPath] = searchResult{Path: scopedPath, Type: "directory", Size: dir.Size}
|
||||
count++
|
||||
}
|
||||
isDir = false
|
||||
for _, file := range files {
|
||||
if file == "" {
|
||||
continue
|
||||
|
||||
// search files first
|
||||
for _, item := range dir.Items {
|
||||
|
||||
fullPath := dirName + "/" + item.Name
|
||||
if item.Type == "directory" {
|
||||
fullPath += "/"
|
||||
}
|
||||
value, found := sessionInProgress.Load(sourceSession)
|
||||
if !found || value != runningHash {
|
||||
return []string{}, map[string]map[string]bool{}
|
||||
si.mu.Unlock()
|
||||
return []searchResult{}
|
||||
}
|
||||
|
||||
if count > maxSearchResults {
|
||||
break
|
||||
}
|
||||
fullName := strings.TrimLeft(pathName+file, "/")
|
||||
fileTypes := map[string]bool{}
|
||||
si.mu.Unlock()
|
||||
matches, fileType := si.containsSearchTerm(fullName, searchTerm, *searchOptions, isDir, fileTypes)
|
||||
si.mu.Lock()
|
||||
if !matches {
|
||||
continue
|
||||
matches := item.containsSearchTerm(searchTerm, searchOptions)
|
||||
if matches {
|
||||
scopedPath := strings.TrimPrefix(strings.TrimPrefix(fullPath, scope), "/")
|
||||
results[scopedPath] = searchResult{Path: scopedPath, Type: item.Type, Size: item.Size}
|
||||
count++
|
||||
}
|
||||
fileListTypes[fullName] = fileType
|
||||
matching = append(matching, fullName)
|
||||
count++
|
||||
}
|
||||
}
|
||||
si.mu.Unlock()
|
||||
}
|
||||
|
||||
// Sort keys based on the number of elements in the path after splitting by "/"
|
||||
sortedKeys := make([]searchResult, 0, len(results))
|
||||
for _, v := range results {
|
||||
sortedKeys = append(sortedKeys, v)
|
||||
}
|
||||
// Sort the strings based on the number of elements after splitting by "/"
|
||||
sort.Slice(matching, func(i, j int) bool {
|
||||
parts1 := strings.Split(matching[i], "/")
|
||||
parts2 := strings.Split(matching[j], "/")
|
||||
sort.Slice(sortedKeys, func(i, j int) bool {
|
||||
parts1 := strings.Split(sortedKeys[i].Path, "/")
|
||||
parts2 := strings.Split(sortedKeys[j].Path, "/")
|
||||
return len(parts1) < len(parts2)
|
||||
})
|
||||
return matching, fileListTypes
|
||||
return sortedKeys
|
||||
}
|
||||
|
||||
func scopedPathNameFilter(pathName string, scope string, isDir bool) string {
|
||||
pathName = strings.TrimLeft(pathName, "/")
|
||||
pathName = strings.TrimRight(pathName, "/")
|
||||
if strings.HasPrefix(pathName, scope) || scope == "" {
|
||||
pathName = strings.TrimPrefix(pathName, scope)
|
||||
pathName = strings.TrimLeft(pathName, "/")
|
||||
if isDir {
|
||||
pathName = pathName + "/"
|
||||
}
|
||||
} else {
|
||||
pathName = "" // return not matched
|
||||
}
|
||||
return pathName
|
||||
}
|
||||
// returns true if the file name contains the search term
|
||||
// returns file type if the file name contains the search term
|
||||
// returns size of file/dir if the file name contains the search term
|
||||
func (fi ReducedItem) containsSearchTerm(searchTerm string, options *SearchOptions) bool {
|
||||
|
||||
func (si *Index) containsSearchTerm(pathName string, searchTerm string, options SearchOptions, isDir bool, fileTypes map[string]bool) (bool, map[string]bool) {
|
||||
fileTypes := map[string]bool{}
|
||||
largerThan := int64(options.LargerThan) * 1024 * 1024
|
||||
smallerThan := int64(options.SmallerThan) * 1024 * 1024
|
||||
conditions := options.Conditions
|
||||
fileName := filepath.Base(pathName)
|
||||
adjustedPath := si.makeIndexPath(pathName, isDir)
|
||||
lowerFileName := strings.ToLower(fi.Name)
|
||||
|
||||
// Convert to lowercase if not exact match
|
||||
if !conditions["exact"] {
|
||||
fileName = strings.ToLower(fileName)
|
||||
searchTerm = strings.ToLower(searchTerm)
|
||||
}
|
||||
|
||||
// Check if the file name contains the search term
|
||||
if !strings.Contains(fileName, searchTerm) {
|
||||
return false, map[string]bool{}
|
||||
if !strings.Contains(lowerFileName, searchTerm) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Initialize file size and fileTypes map
|
||||
var fileSize int64
|
||||
extension := filepath.Ext(fileName)
|
||||
extension := filepath.Ext(lowerFileName)
|
||||
|
||||
// Collect file types
|
||||
for _, k := range AllFiletypeOptions {
|
||||
|
@ -138,31 +131,9 @@ func (si *Index) containsSearchTerm(pathName string, searchTerm string, options
|
|||
fileTypes[k] = true
|
||||
}
|
||||
}
|
||||
isDir := fi.Type == "directory"
|
||||
fileTypes["dir"] = isDir
|
||||
// Get file info if needed for size-related conditions
|
||||
if largerThan > 0 || smallerThan > 0 {
|
||||
fileInfo, exists := si.GetMetadataInfo(adjustedPath)
|
||||
if !exists {
|
||||
return false, fileTypes
|
||||
} else if !isDir {
|
||||
// Look for specific file in ReducedItems
|
||||
for _, item := range fileInfo.ReducedItems {
|
||||
lower := strings.ToLower(item.Name)
|
||||
if strings.Contains(lower, searchTerm) {
|
||||
if item.Size == 0 {
|
||||
return false, fileTypes
|
||||
}
|
||||
fileSize = item.Size
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fileSize = fileInfo.Size
|
||||
}
|
||||
if fileSize == 0 {
|
||||
return false, fileTypes
|
||||
}
|
||||
}
|
||||
fileSize = fi.Size
|
||||
|
||||
// Evaluate all conditions
|
||||
for t, v := range conditions {
|
||||
|
@ -173,33 +144,35 @@ func (si *Index) containsSearchTerm(pathName string, searchTerm string, options
|
|||
case "larger":
|
||||
if largerThan > 0 {
|
||||
if fileSize <= largerThan {
|
||||
return false, fileTypes
|
||||
return false
|
||||
}
|
||||
}
|
||||
case "smaller":
|
||||
if smallerThan > 0 {
|
||||
if fileSize >= smallerThan {
|
||||
return false, fileTypes
|
||||
return false
|
||||
}
|
||||
}
|
||||
default:
|
||||
// Handle other file type conditions
|
||||
notMatchType := v != fileTypes[t]
|
||||
if notMatchType {
|
||||
return false, fileTypes
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true, fileTypes
|
||||
return true
|
||||
}
|
||||
|
||||
func generateRandomHash(length int) string {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyz0123456789"
|
||||
rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
result := make([]byte, length)
|
||||
for i := range result {
|
||||
result[i] = charset[rand.Intn(len(charset))]
|
||||
func (si *Index) getDirsInScope(scope string) []string {
|
||||
newList := []string{}
|
||||
si.mu.Lock()
|
||||
defer si.mu.Unlock()
|
||||
for k := range si.Directories {
|
||||
if strings.HasPrefix(k, scope) || scope == "" {
|
||||
newList = append(newList, k)
|
||||
}
|
||||
}
|
||||
return string(result)
|
||||
return newList
|
||||
}
|
||||
|
|
|
@ -88,22 +88,26 @@ func TestSearchWhileIndexing(t *testing.T) {
|
|||
|
||||
func TestSearchIndexes(t *testing.T) {
|
||||
index := Index{
|
||||
Directories: map[string]FileInfo{
|
||||
"test": {Items: []*FileInfo{{Name: "audio1.wav"}}},
|
||||
"test/path": {Items: []*FileInfo{{Name: "file.txt"}}},
|
||||
"new/test": {Items: []*FileInfo{
|
||||
{Name: "audio.wav"},
|
||||
{Name: "video.mp4"},
|
||||
{Name: "video.MP4"},
|
||||
}},
|
||||
"new/test/path": {Items: []*FileInfo{{Name: "archive.zip"}}},
|
||||
"/firstDir": {Items: []*FileInfo{
|
||||
{Name: "archive.zip", Size: 100},
|
||||
{Name: "thisIsDir", IsDir: true, Size: 2 * 1024 * 1024},
|
||||
Directories: map[string]*FileInfo{
|
||||
"/test": {Files: []ReducedItem{{Name: "audio1.wav", Type: "audio"}}},
|
||||
"/test/path": {Files: []ReducedItem{{Name: "file.txt", Type: "text"}}},
|
||||
"/new/test": {Files: []ReducedItem{
|
||||
{Name: "audio.wav", Type: "audio"},
|
||||
{Name: "video.mp4", Type: "video"},
|
||||
{Name: "video.MP4", Type: "video"},
|
||||
}},
|
||||
"/new/test/path": {Files: []ReducedItem{{Name: "archive.zip", Type: "archive"}}},
|
||||
"/firstDir": {
|
||||
Files: []ReducedItem{
|
||||
{Name: "archive.zip", Size: 100, Type: "archive"},
|
||||
},
|
||||
Dirs: map[string]*FileInfo{
|
||||
"thisIsDir": {Name: "thisIsDir", Size: 2 * 1024 * 1024},
|
||||
},
|
||||
},
|
||||
"/firstDir/thisIsDir": {
|
||||
Items: []*FileInfo{
|
||||
{Name: "hi.txt"},
|
||||
Files: []ReducedItem{
|
||||
{Name: "hi.txt", Type: "text"},
|
||||
},
|
||||
Size: 2 * 1024 * 1024,
|
||||
},
|
||||
|
@ -113,112 +117,106 @@ func TestSearchIndexes(t *testing.T) {
|
|||
tests := []struct {
|
||||
search string
|
||||
scope string
|
||||
expectedResult []string
|
||||
expectedTypes map[string]map[string]bool
|
||||
expectedResult []searchResult
|
||||
}{
|
||||
{
|
||||
search: "audio",
|
||||
scope: "/new/",
|
||||
expectedResult: []string{"test/audio.wav"},
|
||||
expectedTypes: map[string]map[string]bool{
|
||||
"test/audio.wav": {"audio": true, "dir": false},
|
||||
search: "audio",
|
||||
scope: "/new/",
|
||||
expectedResult: []searchResult{
|
||||
{
|
||||
Path: "test/audio.wav",
|
||||
Type: "audio",
|
||||
Size: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
search: "test",
|
||||
scope: "/",
|
||||
expectedResult: []string{"test/", "new/test/"},
|
||||
expectedTypes: map[string]map[string]bool{
|
||||
"test/": {"dir": true},
|
||||
"new/test/": {"dir": true},
|
||||
search: "test",
|
||||
scope: "/",
|
||||
expectedResult: []searchResult{
|
||||
{
|
||||
Path: "test/",
|
||||
Type: "directory",
|
||||
Size: 0,
|
||||
},
|
||||
{
|
||||
Path: "new/test/",
|
||||
Type: "directory",
|
||||
Size: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
search: "archive",
|
||||
scope: "/",
|
||||
expectedResult: []string{"firstDir/archive.zip", "new/test/path/archive.zip"},
|
||||
expectedTypes: map[string]map[string]bool{
|
||||
"new/test/path/archive.zip": {"archive": true, "dir": false},
|
||||
"firstDir/archive.zip": {"archive": true, "dir": false},
|
||||
search: "archive",
|
||||
scope: "/",
|
||||
expectedResult: []searchResult{
|
||||
{
|
||||
Path: "firstDir/archive.zip",
|
||||
Type: "archive",
|
||||
Size: 100,
|
||||
},
|
||||
{
|
||||
Path: "new/test/path/archive.zip",
|
||||
Type: "archive",
|
||||
Size: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
search: "arch",
|
||||
scope: "/firstDir",
|
||||
expectedResult: []string{"archive.zip"},
|
||||
expectedTypes: map[string]map[string]bool{
|
||||
"archive.zip": {"archive": true, "dir": false},
|
||||
search: "arch",
|
||||
scope: "/firstDir",
|
||||
expectedResult: []searchResult{
|
||||
{
|
||||
Path: "archive.zip",
|
||||
Type: "archive",
|
||||
Size: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
search: "isdir",
|
||||
scope: "/",
|
||||
expectedResult: []string{"firstDir/thisIsDir/"},
|
||||
expectedTypes: map[string]map[string]bool{
|
||||
"firstDir/thisIsDir/": {"dir": true},
|
||||
search: "isdir",
|
||||
scope: "/",
|
||||
expectedResult: []searchResult{
|
||||
{
|
||||
Path: "firstDir/thisIsDir/",
|
||||
Type: "directory",
|
||||
Size: 2097152,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
search: "dir type:largerThan=1",
|
||||
scope: "/",
|
||||
expectedResult: []string{"firstDir/thisIsDir/"},
|
||||
expectedTypes: map[string]map[string]bool{
|
||||
"firstDir/thisIsDir/": {"dir": true},
|
||||
search: "IsDir type:largerThan=1",
|
||||
scope: "/",
|
||||
expectedResult: []searchResult{
|
||||
{
|
||||
Path: "firstDir/thisIsDir/",
|
||||
Type: "directory",
|
||||
Size: 2097152,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
search: "video",
|
||||
scope: "/",
|
||||
expectedResult: []string{
|
||||
"new/test/video.mp4",
|
||||
"new/test/video.MP4",
|
||||
},
|
||||
expectedTypes: map[string]map[string]bool{
|
||||
"new/test/video.MP4": {"video": true, "dir": false},
|
||||
"new/test/video.mp4": {"video": true, "dir": false},
|
||||
expectedResult: []searchResult{
|
||||
{
|
||||
Path: "new/test/video.MP4",
|
||||
Type: "video",
|
||||
Size: 0,
|
||||
},
|
||||
{
|
||||
Path: "new/test/video.mp4",
|
||||
Type: "video",
|
||||
Size: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.search, func(t *testing.T) {
|
||||
actualResult, actualTypes := index.Search(tt.search, tt.scope, "")
|
||||
assert.Equal(t, tt.expectedResult, actualResult)
|
||||
assert.Equal(t, tt.expectedTypes, actualTypes)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_scopedPathNameFilter(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
args struct {
|
||||
pathName string
|
||||
scope string
|
||||
isDir bool // Assuming isDir should be included in args
|
||||
}
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "scope test",
|
||||
args: struct {
|
||||
pathName string
|
||||
scope string
|
||||
isDir bool
|
||||
}{
|
||||
pathName: "/",
|
||||
scope: "/",
|
||||
isDir: false,
|
||||
},
|
||||
want: "", // Update this with the expected result
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := scopedPathNameFilter(tt.args.pathName, tt.args.scope, tt.args.isDir); got != tt.want {
|
||||
t.Errorf("scopedPathNameFilter() = %v, want %v", got, tt.want)
|
||||
}
|
||||
result := index.Search(tt.search, tt.scope, "")
|
||||
assert.Equal(t, tt.expectedResult, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,72 +2,89 @@ package files
|
|||
|
||||
import (
|
||||
"log"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
)
|
||||
|
||||
// UpdateFileMetadata updates the FileInfo for the specified directory in the index.
|
||||
func (si *Index) UpdateFileMetadata(adjustedPath string, info FileInfo) bool {
|
||||
func (si *Index) UpdateMetadata(info *FileInfo) bool {
|
||||
si.mu.Lock()
|
||||
defer si.mu.Unlock()
|
||||
dir, exists := si.Directories[adjustedPath]
|
||||
if !exists {
|
||||
si.Directories[adjustedPath] = FileInfo{}
|
||||
}
|
||||
return si.SetFileMetadata(adjustedPath, dir)
|
||||
}
|
||||
|
||||
// SetFileMetadata sets the FileInfo for the specified directory in the index.
|
||||
// internal use only
|
||||
func (si *Index) SetFileMetadata(adjustedPath string, info FileInfo) bool {
|
||||
_, exists := si.Directories[adjustedPath]
|
||||
if !exists {
|
||||
return false
|
||||
}
|
||||
info.CacheTime = time.Now()
|
||||
si.Directories[adjustedPath] = info
|
||||
si.Directories[info.Path] = info
|
||||
return true
|
||||
}
|
||||
|
||||
// GetMetadataInfo retrieves the FileInfo from the specified directory in the index.
|
||||
func (si *Index) GetMetadataInfo(adjustedPath string) (FileInfo, bool) {
|
||||
func (si *Index) GetReducedMetadata(target string, isDir bool) (*FileInfo, bool) {
|
||||
si.mu.RLock()
|
||||
dir, exists := si.Directories[adjustedPath]
|
||||
si.mu.RUnlock()
|
||||
if !exists {
|
||||
return dir, exists
|
||||
defer si.mu.RUnlock()
|
||||
checkDir := si.makeIndexPath(target)
|
||||
if !isDir {
|
||||
checkDir = si.makeIndexPath(filepath.Dir(target))
|
||||
}
|
||||
dir, exists := si.Directories[checkDir]
|
||||
if !exists {
|
||||
return nil, false
|
||||
}
|
||||
if !isDir {
|
||||
if checkDir == "/" {
|
||||
checkDir = ""
|
||||
}
|
||||
|
||||
baseName := filepath.Base(target)
|
||||
for _, item := range dir.Files {
|
||||
if item.Name == baseName {
|
||||
return &FileInfo{
|
||||
Name: item.Name,
|
||||
Size: item.Size,
|
||||
ModTime: item.ModTime,
|
||||
Type: item.Type,
|
||||
Path: checkDir + "/" + item.Name,
|
||||
}, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
// remove recursive items, we only want this directories direct files
|
||||
cleanedItems := []ReducedItem{}
|
||||
for _, item := range dir.Items {
|
||||
for name, item := range dir.Dirs {
|
||||
cleanedItems = append(cleanedItems, ReducedItem{
|
||||
Name: item.Name,
|
||||
Name: name,
|
||||
Size: item.Size,
|
||||
IsDir: item.IsDir,
|
||||
ModTime: item.ModTime,
|
||||
Type: item.Type,
|
||||
Type: "directory",
|
||||
})
|
||||
}
|
||||
dir.Items = nil
|
||||
dir.ReducedItems = cleanedItems
|
||||
realPath, _, _ := GetRealPath(adjustedPath)
|
||||
dir.Path = realPath
|
||||
return dir, exists
|
||||
cleanedItems = append(cleanedItems, dir.Files...)
|
||||
sort.Slice(cleanedItems, func(i, j int) bool {
|
||||
return cleanedItems[i].Name < cleanedItems[j].Name
|
||||
})
|
||||
dirname := filepath.Base(dir.Path)
|
||||
if dirname == "." {
|
||||
dirname = "/"
|
||||
}
|
||||
// construct file info
|
||||
return &FileInfo{
|
||||
Name: dirname,
|
||||
Type: "directory",
|
||||
Items: cleanedItems,
|
||||
ModTime: dir.ModTime,
|
||||
Size: dir.Size,
|
||||
}, true
|
||||
}
|
||||
|
||||
// SetDirectoryInfo sets the directory information in the index.
|
||||
func (si *Index) SetDirectoryInfo(adjustedPath string, dir FileInfo) {
|
||||
si.mu.Lock()
|
||||
si.Directories[adjustedPath] = dir
|
||||
si.mu.Unlock()
|
||||
}
|
||||
|
||||
// SetDirectoryInfo sets the directory information in the index.
|
||||
func (si *Index) GetDirectoryInfo(adjustedPath string) (FileInfo, bool) {
|
||||
// GetMetadataInfo retrieves the FileInfo from the specified directory in the index.
|
||||
func (si *Index) GetMetadataInfo(target string, isDir bool) (*FileInfo, bool) {
|
||||
si.mu.RLock()
|
||||
dir, exists := si.Directories[adjustedPath]
|
||||
si.mu.RUnlock()
|
||||
defer si.mu.RUnlock()
|
||||
checkDir := si.makeIndexPath(target)
|
||||
if !isDir {
|
||||
checkDir = si.makeIndexPath(filepath.Dir(target))
|
||||
}
|
||||
dir, exists := si.Directories[checkDir]
|
||||
return dir, exists
|
||||
}
|
||||
|
||||
|
@ -108,11 +125,12 @@ func GetIndex(root string) *Index {
|
|||
}
|
||||
newIndex := &Index{
|
||||
Root: rootPath,
|
||||
Directories: map[string]FileInfo{},
|
||||
Directories: map[string]*FileInfo{},
|
||||
NumDirs: 0,
|
||||
NumFiles: 0,
|
||||
inProgress: false,
|
||||
}
|
||||
newIndex.Directories["/"] = &FileInfo{}
|
||||
indexesMutex.Lock()
|
||||
indexes = append(indexes, newIndex)
|
||||
indexesMutex.Unlock()
|
||||
|
|
|
@ -32,9 +32,9 @@ func TestGetFileMetadataSize(t *testing.T) {
|
|||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
fileInfo, _ := testIndex.GetMetadataInfo(tt.adjustedPath)
|
||||
fileInfo, _ := testIndex.GetReducedMetadata(tt.adjustedPath, true)
|
||||
// Iterate over fileInfo.Items to look for expectedName
|
||||
for _, item := range fileInfo.ReducedItems {
|
||||
for _, item := range fileInfo.Items {
|
||||
// Assert the existence and the name
|
||||
if item.Name == tt.expectedName {
|
||||
assert.Equal(t, tt.expectedSize, item.Size)
|
||||
|
@ -53,28 +53,29 @@ func TestGetFileMetadata(t *testing.T) {
|
|||
adjustedPath string
|
||||
expectedName string
|
||||
expectedExists bool
|
||||
isDir bool
|
||||
}{
|
||||
{
|
||||
name: "testpath exists",
|
||||
adjustedPath: "/testpath",
|
||||
adjustedPath: "/testpath/testfile.txt",
|
||||
expectedName: "testfile.txt",
|
||||
expectedExists: true,
|
||||
},
|
||||
{
|
||||
name: "testpath not exists",
|
||||
adjustedPath: "/testpath",
|
||||
adjustedPath: "/testpath/nonexistent.txt",
|
||||
expectedName: "nonexistent.txt",
|
||||
expectedExists: false,
|
||||
},
|
||||
{
|
||||
name: "File exists in /anotherpath",
|
||||
adjustedPath: "/anotherpath",
|
||||
adjustedPath: "/anotherpath/afile.txt",
|
||||
expectedName: "afile.txt",
|
||||
expectedExists: true,
|
||||
},
|
||||
{
|
||||
name: "File does not exist in /anotherpath",
|
||||
adjustedPath: "/anotherpath",
|
||||
adjustedPath: "/anotherpath/nonexistentfile.txt",
|
||||
expectedName: "nonexistentfile.txt",
|
||||
expectedExists: false,
|
||||
},
|
||||
|
@ -83,20 +84,33 @@ func TestGetFileMetadata(t *testing.T) {
|
|||
adjustedPath: "/nonexistentpath",
|
||||
expectedName: "",
|
||||
expectedExists: false,
|
||||
isDir: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
fileInfo, _ := testIndex.GetMetadataInfo(tt.adjustedPath)
|
||||
fileInfo, _ := testIndex.GetReducedMetadata(tt.adjustedPath, tt.isDir)
|
||||
if fileInfo == nil {
|
||||
found := false
|
||||
assert.Equal(t, tt.expectedExists, found)
|
||||
return
|
||||
}
|
||||
found := false
|
||||
// Iterate over fileInfo.Items to look for expectedName
|
||||
for _, item := range fileInfo.ReducedItems {
|
||||
// Assert the existence and the name
|
||||
if item.Name == tt.expectedName {
|
||||
if tt.isDir {
|
||||
// Iterate over fileInfo.Items to look for expectedName
|
||||
for _, item := range fileInfo.Items {
|
||||
// Assert the existence and the name
|
||||
if item.Name == tt.expectedName {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if fileInfo.Name == tt.expectedName {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.expectedExists, found)
|
||||
})
|
||||
}
|
||||
|
@ -104,42 +118,42 @@ func TestGetFileMetadata(t *testing.T) {
|
|||
|
||||
// Test for UpdateFileMetadata
|
||||
func TestUpdateFileMetadata(t *testing.T) {
|
||||
index := &Index{
|
||||
Directories: map[string]FileInfo{
|
||||
"/testpath": {
|
||||
Path: "/testpath",
|
||||
Name: "testpath",
|
||||
IsDir: true,
|
||||
ReducedItems: []ReducedItem{
|
||||
{Name: "testfile.txt"},
|
||||
{Name: "anotherfile.txt"},
|
||||
},
|
||||
},
|
||||
info := &FileInfo{
|
||||
Path: "/testpath",
|
||||
Name: "testpath",
|
||||
Type: "directory",
|
||||
Files: []ReducedItem{
|
||||
{Name: "testfile.txt"},
|
||||
{Name: "anotherfile.txt"},
|
||||
},
|
||||
}
|
||||
|
||||
info := FileInfo{Name: "testfile.txt"}
|
||||
index := &Index{
|
||||
Directories: map[string]*FileInfo{
|
||||
"/testpath": info,
|
||||
},
|
||||
}
|
||||
|
||||
success := index.UpdateFileMetadata("/testpath", info)
|
||||
success := index.UpdateMetadata(info)
|
||||
if !success {
|
||||
t.Fatalf("expected UpdateFileMetadata to succeed")
|
||||
}
|
||||
|
||||
dir, exists := index.Directories["/testpath"]
|
||||
if !exists || dir.ReducedItems[0].Name != "testfile.txt" {
|
||||
t.Fatalf("expected testfile.txt to be updated in the directory metadata")
|
||||
fileInfo, exists := index.GetReducedMetadata("/testpath/testfile.txt", false)
|
||||
if !exists || fileInfo.Name != "testfile.txt" {
|
||||
t.Fatalf("expected testfile.txt to be updated in the directory metadata:%v %v", exists, info.Name)
|
||||
}
|
||||
}
|
||||
|
||||
// Test for GetDirMetadata
|
||||
func TestGetDirMetadata(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, exists := testIndex.GetMetadataInfo("/testpath")
|
||||
_, exists := testIndex.GetReducedMetadata("/testpath", true)
|
||||
if !exists {
|
||||
t.Fatalf("expected GetDirMetadata to return initialized metadata map")
|
||||
}
|
||||
|
||||
_, exists = testIndex.GetMetadataInfo("/nonexistent")
|
||||
_, exists = testIndex.GetReducedMetadata("/nonexistent", true)
|
||||
if exists {
|
||||
t.Fatalf("expected GetDirMetadata to return false for nonexistent directory")
|
||||
}
|
||||
|
@ -148,51 +162,37 @@ func TestGetDirMetadata(t *testing.T) {
|
|||
// Test for SetDirectoryInfo
|
||||
func TestSetDirectoryInfo(t *testing.T) {
|
||||
index := &Index{
|
||||
Directories: map[string]FileInfo{
|
||||
Directories: map[string]*FileInfo{
|
||||
"/testpath": {
|
||||
Path: "/testpath",
|
||||
Name: "testpath",
|
||||
IsDir: true,
|
||||
Items: []*FileInfo{
|
||||
Path: "/testpath",
|
||||
Name: "testpath",
|
||||
Type: "directory",
|
||||
Items: []ReducedItem{
|
||||
{Name: "testfile.txt"},
|
||||
{Name: "anotherfile.txt"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
dir := FileInfo{
|
||||
Path: "/newPath",
|
||||
Name: "newPath",
|
||||
IsDir: true,
|
||||
Items: []*FileInfo{
|
||||
dir := &FileInfo{
|
||||
Path: "/newPath",
|
||||
Name: "newPath",
|
||||
Type: "directory",
|
||||
Items: []ReducedItem{
|
||||
{Name: "testfile.txt"},
|
||||
},
|
||||
}
|
||||
index.SetDirectoryInfo("/newPath", dir)
|
||||
index.UpdateMetadata(dir)
|
||||
storedDir, exists := index.Directories["/newPath"]
|
||||
if !exists || storedDir.Items[0].Name != "testfile.txt" {
|
||||
t.Fatalf("expected SetDirectoryInfo to store directory info correctly")
|
||||
}
|
||||
}
|
||||
|
||||
// Test for GetDirectoryInfo
|
||||
func TestGetDirectoryInfo(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir, exists := testIndex.GetDirectoryInfo("/testpath")
|
||||
if !exists || dir.Items[0].Name != "testfile.txt" {
|
||||
t.Fatalf("expected GetDirectoryInfo to return correct directory info")
|
||||
}
|
||||
|
||||
_, exists = testIndex.GetDirectoryInfo("/nonexistent")
|
||||
if exists {
|
||||
t.Fatalf("expected GetDirectoryInfo to return false for nonexistent directory")
|
||||
}
|
||||
}
|
||||
|
||||
// Test for RemoveDirectory
|
||||
func TestRemoveDirectory(t *testing.T) {
|
||||
index := &Index{
|
||||
Directories: map[string]FileInfo{
|
||||
Directories: map[string]*FileInfo{
|
||||
"/testpath": {},
|
||||
},
|
||||
}
|
||||
|
@ -234,28 +234,26 @@ func init() {
|
|||
NumFiles: 10,
|
||||
NumDirs: 5,
|
||||
inProgress: false,
|
||||
Directories: map[string]FileInfo{
|
||||
Directories: map[string]*FileInfo{
|
||||
"/testpath": {
|
||||
Path: "/testpath",
|
||||
Name: "testpath",
|
||||
IsDir: true,
|
||||
NumDirs: 1,
|
||||
NumFiles: 2,
|
||||
Items: []*FileInfo{
|
||||
Path: "/testpath",
|
||||
Name: "testpath",
|
||||
Type: "directory",
|
||||
Files: []ReducedItem{
|
||||
{Name: "testfile.txt", Size: 100},
|
||||
{Name: "anotherfile.txt", Size: 100},
|
||||
},
|
||||
},
|
||||
"/anotherpath": {
|
||||
Path: "/anotherpath",
|
||||
Name: "anotherpath",
|
||||
IsDir: true,
|
||||
NumDirs: 1,
|
||||
NumFiles: 1,
|
||||
Items: []*FileInfo{
|
||||
{Name: "directory", IsDir: true, Size: 100},
|
||||
Path: "/anotherpath",
|
||||
Name: "anotherpath",
|
||||
Type: "directory",
|
||||
Files: []ReducedItem{
|
||||
{Name: "afile.txt", Size: 100},
|
||||
},
|
||||
Dirs: map[string]*FileInfo{
|
||||
"directory": {Name: "directory", Type: "directory", Size: 100},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
)
|
||||
|
||||
// Copy copies a file or folder from one place to another.
|
||||
func Copy(src, dst string) error {
|
||||
func CopyHelper(src, dst string) error {
|
||||
src = filepath.Clean(src)
|
||||
if src == "" {
|
||||
return os.ErrNotExist
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
../../frontend/dist
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"name": "frontend",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {}
|
||||
}
|
|
@ -7,10 +7,9 @@ require (
|
|||
github.com/disintegration/imaging v1.6.2
|
||||
github.com/dsoprea/go-exif/v3 v3.0.1
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568
|
||||
github.com/goccy/go-yaml v1.12.0
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0
|
||||
github.com/goccy/go-yaml v1.14.3
|
||||
github.com/golang-jwt/jwt/v4 v4.5.1
|
||||
github.com/google/go-cmp v0.6.0
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/marusama/semaphore/v2 v2.5.0
|
||||
github.com/mholt/archiver/v3 v3.5.1
|
||||
github.com/shirou/gopsutil/v3 v3.24.5
|
||||
|
@ -18,39 +17,45 @@ require (
|
|||
github.com/spf13/cobra v1.8.1
|
||||
github.com/spf13/pflag v1.0.5
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
|
||||
golang.org/x/crypto v0.26.0
|
||||
golang.org/x/image v0.19.0
|
||||
golang.org/x/text v0.17.0
|
||||
github.com/swaggo/http-swagger v1.3.4
|
||||
github.com/swaggo/swag v1.16.4
|
||||
golang.org/x/crypto v0.29.0
|
||||
golang.org/x/image v0.22.0
|
||||
golang.org/x/text v0.20.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/andybalholm/brotli v1.1.0 // indirect
|
||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect
|
||||
github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd // indirect
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349 // indirect
|
||||
github.com/fatih/color v1.17.0 // indirect
|
||||
github.com/go-errors/errors v1.5.1 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/golang/geo v0.0.0-20230421003525-6adc56603217 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/klauspost/compress v1.17.9 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/klauspost/compress v1.17.11 // indirect
|
||||
github.com/klauspost/pgzip v1.2.6 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/nwaples/rardecode v1.1.3 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.21 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
|
||||
github.com/swaggo/files v1.0.1 // indirect
|
||||
github.com/ulikunitz/xz v0.5.12 // indirect
|
||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
go.etcd.io/bbolt v1.3.11 // indirect
|
||||
golang.org/x/net v0.28.0 // indirect
|
||||
golang.org/x/sys v0.24.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 // indirect
|
||||
golang.org/x/net v0.31.0 // indirect
|
||||
golang.org/x/sys v0.27.0 // indirect
|
||||
golang.org/x/tools v0.27.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
|
120
backend/go.sum
120
backend/go.sum
|
@ -1,10 +1,12 @@
|
|||
github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM=
|
||||
github.com/DataDog/zstd v1.4.1/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
|
||||
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
|
||||
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
|
||||
github.com/Sereal/Sereal v0.0.0-20190618215532-0b8ac451a863 h1:BRrxwOZBolJN4gIwvZMJY1tzqBvQgpaZiQRuIDD40jM=
|
||||
github.com/Sereal/Sereal v0.0.0-20190618215532-0b8ac451a863/go.mod h1:D0JMgToj/WdxCgd30Kc1UcA9E+WdZoJqeVOuYW7iTBM=
|
||||
github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=
|
||||
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
||||
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/asdine/storm/v3 v3.2.1 h1:I5AqhkPK6nBZ/qJXySdI7ot5BlXSZ7qvDY1zAn5ZJac=
|
||||
github.com/asdine/storm/v3 v3.2.1/go.mod h1:LEpXwGt4pIqrE/XcTvCnZHT5MgZCV6Ub9q7yQzOFWr0=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
|
@ -32,8 +34,6 @@ github.com/dsoprea/go-utility/v2 v2.0.0-20221003142440-7a1927d49d9d/go.mod h1:LV
|
|||
github.com/dsoprea/go-utility/v2 v2.0.0-20221003160719-7bc88537c05e/go.mod h1:VZ7cB0pTjm1ADBWhJUOHESu4ZYy9JN+ZPqjfiW09EPU=
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349 h1:DilThiXje0z+3UQ5YjYiSRRzVdtamFpvBQXKwMglWqw=
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349/go.mod h1:4GC5sXji84i/p+irqghpPFZBF8tRN/Q7+700G0/DLe8=
|
||||
github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
|
||||
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
|
||||
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||
|
@ -45,16 +45,18 @@ github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3Bop
|
|||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
||||
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
|
||||
github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
|
||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
||||
github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
|
||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||
github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
|
||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||
github.com/goccy/go-yaml v1.12.0 h1:/1WHjnMsI1dlIBQutrvSMGZRQufVO3asrHfTwfACoPM=
|
||||
github.com/goccy/go-yaml v1.12.0/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
|
||||
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
|
||||
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
|
||||
github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
|
||||
github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY=
|
||||
github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
|
||||
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
|
||||
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
|
||||
github.com/goccy/go-yaml v1.14.3 h1:8tVD+aqqPLWisSEhM+6wWoiURWXCx6BwaTKS6ZeITgM=
|
||||
github.com/goccy/go-yaml v1.14.3/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
|
@ -71,34 +73,31 @@ github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW
|
|||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
||||
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
|
||||
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
|
||||
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
|
||||
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/marusama/semaphore/v2 v2.5.0 h1:o/1QJD9DBYOWRnDhPwDVAXQn6mQYD0gZaS1Tpx6DJGM=
|
||||
github.com/marusama/semaphore/v2 v2.5.0/go.mod h1:z9nMiNUekt/LTpTUQdpp+4sJeYqUGpwMHfW0Z8V8fnQ=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo=
|
||||
github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4=
|
||||
github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||
|
@ -111,6 +110,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
|||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU=
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/shirou/gopsutil/v3 v3.24.5 h1:i0t8kL+kQTvpAYToeuiVk3TgDeKOFioZO3Ztz/iZ9pI=
|
||||
github.com/shirou/gopsutil/v3 v3.24.5/go.mod h1:bsoOS1aStSs9ErQ1WWfxllSeS1K5D+U30r2NfcubMVk=
|
||||
|
@ -123,8 +124,12 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An
|
|||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce h1:fb190+cK2Xz/dvi9Hv8eCYJYvIGUTN2/KLq1pT6CjEc=
|
||||
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4=
|
||||
github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE=
|
||||
github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg=
|
||||
github.com/swaggo/http-swagger v1.3.4 h1:q7t/XLx0n15H1Q9/tk3Y9L4n210XzJF5WtnDX64a5ww=
|
||||
github.com/swaggo/http-swagger v1.3.4/go.mod h1:9dAh0unqMBAlbp1uE2Uc2mQTxNMU/ha4UbucIg1MFkQ=
|
||||
github.com/swaggo/swag v1.16.4 h1:clWJtd9LStiG3VeijiCfOVODP6VpHtKdQy9ELFG3s1A=
|
||||
github.com/swaggo/swag v1.16.4/go.mod h1:VBsHJRsDvfYvqoiMKnsdwhNV9LEMHgEDZcyVYX0sxPg=
|
||||
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
|
||||
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
|
||||
|
@ -133,60 +138,83 @@ github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaU
|
|||
github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
|
||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
|
||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||
go.etcd.io/bbolt v1.3.11 h1:yGEzV1wPz2yVCLsD8ZAiGHhHVlczyC9d1rP43/VCRJ0=
|
||||
go.etcd.io/bbolt v1.3.11/go.mod h1:dksAq7YMXoljX0xu6VF5DMZGbhYYoLUalEiSySYAS4I=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw=
|
||||
golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
|
||||
golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ=
|
||||
golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys=
|
||||
golang.org/x/image v0.22.0 h1:UtK5yLUzilVrkjMAZAZ34DXGpASN8i8pj8g+O+yd10g=
|
||||
golang.org/x/image v0.22.0/go.mod h1:9hPFhljd4zZ1GNSIZJ49sqbp45GKK9t6w+iXvGqZUz4=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
|
||||
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20191105084925-a882066a44e0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
|
||||
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
|
||||
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
|
||||
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
|
||||
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo=
|
||||
golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
|
||||
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg=
|
||||
golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
|
||||
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
|
||||
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
|
||||
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o=
|
||||
golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 h1:LLhsEBxRTBLuKlQxFBYUOU8xyFgXv6cOTp2HASDlsDk=
|
||||
golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
|
||||
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
||||
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
|
|
|
@ -0,0 +1,121 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
func createApiKeyHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
name := r.URL.Query().Get("name")
|
||||
durationStr := r.URL.Query().Get("days")
|
||||
permissionsStr := r.URL.Query().Get("permissions")
|
||||
|
||||
if name == "" {
|
||||
return http.StatusInternalServerError, fmt.Errorf("api name must be valid")
|
||||
}
|
||||
if durationStr == "" {
|
||||
return http.StatusInternalServerError, fmt.Errorf("api duration must be valid")
|
||||
}
|
||||
if permissionsStr == "" {
|
||||
return http.StatusInternalServerError, fmt.Errorf("api permissions must be valid")
|
||||
}
|
||||
// Parse permissions from the query parameter
|
||||
permissions := users.Permissions{
|
||||
Api: strings.Contains(permissionsStr, "api") && d.user.Perm.Api,
|
||||
Admin: strings.Contains(permissionsStr, "admin") && d.user.Perm.Admin,
|
||||
Execute: strings.Contains(permissionsStr, "execute") && d.user.Perm.Execute,
|
||||
Create: strings.Contains(permissionsStr, "create") && d.user.Perm.Create,
|
||||
Rename: strings.Contains(permissionsStr, "rename") && d.user.Perm.Rename,
|
||||
Modify: strings.Contains(permissionsStr, "modify") && d.user.Perm.Modify,
|
||||
Delete: strings.Contains(permissionsStr, "delete") && d.user.Perm.Delete,
|
||||
Share: strings.Contains(permissionsStr, "share") && d.user.Perm.Share,
|
||||
Download: strings.Contains(permissionsStr, "download") && d.user.Perm.Download,
|
||||
}
|
||||
|
||||
// Convert the duration string to an int64
|
||||
durationInt, err := strconv.ParseInt(durationStr, 10, 64) // Base 10 and bit size of 64
|
||||
if err != nil {
|
||||
return http.StatusBadRequest, fmt.Errorf("invalid duration value: %w", err)
|
||||
}
|
||||
|
||||
// Here we assume the duration is in seconds; convert to time.Duration
|
||||
duration := time.Duration(durationInt) * time.Hour * 24
|
||||
|
||||
// get request body like:
|
||||
token, err := makeSignedTokenAPI(d.user, name, duration, permissions)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "key already exists with same name") {
|
||||
return http.StatusConflict, err
|
||||
}
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
response := HttpResponse{
|
||||
Message: "here is your token!",
|
||||
Token: token.Key,
|
||||
}
|
||||
return renderJSON(w, r, response)
|
||||
}
|
||||
|
||||
func deleteApiKeyHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
name := r.URL.Query().Get("name")
|
||||
|
||||
keyInfo, ok := d.user.ApiKeys[name]
|
||||
if !ok {
|
||||
return http.StatusNotFound, fmt.Errorf("api key not found")
|
||||
}
|
||||
// Perform the user update
|
||||
err := store.Users.DeleteApiKey(d.user.ID, name)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
|
||||
revokeAPIKey(keyInfo.Key) // add to blacklist
|
||||
response := HttpResponse{
|
||||
Message: "successfully deleted api key from user",
|
||||
}
|
||||
return renderJSON(w, r, response)
|
||||
}
|
||||
|
||||
type AuthTokenMin struct {
|
||||
Key string `json:"key"`
|
||||
Name string `json:"name"`
|
||||
Created int64 `json:"created"`
|
||||
Expires int64 `json:"expires"`
|
||||
Permissions users.Permissions `json:"Permissions"`
|
||||
}
|
||||
|
||||
func listApiKeysHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
key := r.URL.Query().Get("key")
|
||||
|
||||
if key != "" {
|
||||
keyInfo, ok := d.user.ApiKeys[key]
|
||||
if !ok {
|
||||
return http.StatusNotFound, fmt.Errorf("api key not found")
|
||||
}
|
||||
modifiedKey := AuthTokenMin{
|
||||
Key: keyInfo.Key,
|
||||
Name: key,
|
||||
Created: keyInfo.Created,
|
||||
Expires: keyInfo.Expires,
|
||||
Permissions: keyInfo.Permissions,
|
||||
}
|
||||
return renderJSON(w, r, modifiedKey)
|
||||
}
|
||||
|
||||
modifiedList := map[string]AuthTokenMin{}
|
||||
for key, value := range d.user.ApiKeys {
|
||||
modifiedList[key] = AuthTokenMin{
|
||||
Key: value.Key,
|
||||
Created: value.Created,
|
||||
Expires: value.Expires,
|
||||
Permissions: value.Permissions,
|
||||
}
|
||||
}
|
||||
|
||||
return renderJSON(w, r, modifiedList)
|
||||
}
|
|
@ -2,10 +2,12 @@ package http
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
|
@ -14,91 +16,77 @@ import (
|
|||
"github.com/gtsteffaniak/filebrowser/errors"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
"github.com/gtsteffaniak/filebrowser/utils"
|
||||
)
|
||||
|
||||
type authToken struct {
|
||||
User users.User `json:"user"`
|
||||
jwt.RegisteredClaims
|
||||
}
|
||||
var (
|
||||
revokedApiKeyList map[string]bool
|
||||
revokeMu sync.Mutex
|
||||
)
|
||||
|
||||
type extractor []string
|
||||
// first checks for cookie
|
||||
// then checks for header Authorization as Bearer token
|
||||
// then checks for query parameter
|
||||
func extractToken(r *http.Request) (string, error) {
|
||||
hasToken := false
|
||||
tokenObj, err := r.Cookie("auth")
|
||||
if err == nil {
|
||||
hasToken = true
|
||||
token := tokenObj.Value
|
||||
// Checks if the token isn't empty and if it contains two dots.
|
||||
// The former prevents incompatibility with URLs that previously
|
||||
// used basic auth.
|
||||
if token != "" && strings.Count(token, ".") == 2 {
|
||||
return token, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (e extractor) ExtractToken(r *http.Request) (string, error) {
|
||||
token, _ := request.HeaderExtractor{"X-Auth"}.ExtractToken(r)
|
||||
|
||||
// Checks if the token isn't empty and if it contains two dots.
|
||||
// The former prevents incompatibility with URLs that previously
|
||||
// used basic auth.
|
||||
if token != "" && strings.Count(token, ".") == 2 {
|
||||
return token, nil
|
||||
// Check for Authorization header
|
||||
authHeader := r.Header.Get("Authorization")
|
||||
if authHeader != "" {
|
||||
hasToken = true
|
||||
// Split the header to get "Bearer {token}"
|
||||
parts := strings.Split(authHeader, " ")
|
||||
if len(parts) == 2 && parts[0] == "Bearer" {
|
||||
token := parts[1]
|
||||
return token, nil
|
||||
}
|
||||
}
|
||||
|
||||
auth := r.URL.Query().Get("auth")
|
||||
if auth != "" && strings.Count(auth, ".") == 2 {
|
||||
return auth, nil
|
||||
if auth != "" {
|
||||
hasToken = true
|
||||
if strings.Count(auth, ".") == 2 {
|
||||
return auth, nil
|
||||
}
|
||||
}
|
||||
|
||||
if r.Method == http.MethodGet {
|
||||
cookie, _ := r.Cookie("auth")
|
||||
if cookie != nil && strings.Count(cookie.Value, ".") == 2 {
|
||||
return cookie.Value, nil
|
||||
}
|
||||
if hasToken {
|
||||
return "", fmt.Errorf("invalid token provided")
|
||||
}
|
||||
|
||||
return "", request.ErrNoTokenInRequest
|
||||
}
|
||||
|
||||
func withUser(fn handleFunc) handleFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
keyFunc := func(token *jwt.Token) (interface{}, error) {
|
||||
return d.settings.Auth.Key, nil
|
||||
}
|
||||
|
||||
var tk authToken
|
||||
token, err := request.ParseFromRequest(r, &extractor{}, keyFunc, request.WithClaims(&tk))
|
||||
|
||||
if err != nil || !token.Valid {
|
||||
return http.StatusUnauthorized, nil
|
||||
}
|
||||
|
||||
expired := !tk.VerifyExpiresAt(time.Now().Add(time.Hour), true)
|
||||
updated := tk.IssuedAt != nil && tk.IssuedAt.Unix() < d.store.Users.LastUpdate(tk.User.ID)
|
||||
|
||||
if expired || updated {
|
||||
w.Header().Add("X-Renew-Token", "true")
|
||||
}
|
||||
|
||||
d.user, err = d.store.Users.Get(d.server.Root, tk.User.ID)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
return fn(w, r, d)
|
||||
}
|
||||
}
|
||||
|
||||
func withAdmin(fn handleFunc) handleFunc {
|
||||
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
if !d.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
return fn(w, r, d)
|
||||
})
|
||||
}
|
||||
|
||||
var loginHandler = func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
auther, err := d.store.Auth.Get(d.settings.Auth.Method)
|
||||
func loginHandler(w http.ResponseWriter, r *http.Request) {
|
||||
// Get the authentication method from the settings
|
||||
auther, err := store.Auth.Get(config.Auth.Method)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
user, err := auther.Auth(r, d.store.Users)
|
||||
// Authenticate the user based on the request
|
||||
user, err := auther.Auth(r, store.Users)
|
||||
if err == os.ErrPermission {
|
||||
return http.StatusForbidden, nil
|
||||
http.Error(w, http.StatusText(http.StatusForbidden), http.StatusForbidden)
|
||||
return
|
||||
} else if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
} else {
|
||||
return printToken(w, r, d, user)
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
status, err := printToken(w, r, user) // Pass the data object
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(status), status)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,73 +95,115 @@ type signupBody struct {
|
|||
Password string `json:"password"`
|
||||
}
|
||||
|
||||
var signupHandler = func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
func signupHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if !settings.Config.Auth.Signup {
|
||||
return http.StatusMethodNotAllowed, nil
|
||||
http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
if r.Body == nil {
|
||||
return http.StatusBadRequest, nil
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
info := &signupBody{}
|
||||
err := json.NewDecoder(r.Body).Decode(info)
|
||||
if err != nil {
|
||||
return http.StatusBadRequest, err
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if info.Password == "" || info.Username == "" {
|
||||
return http.StatusBadRequest, nil
|
||||
http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
user := users.ApplyDefaults(users.User{})
|
||||
user := settings.ApplyUserDefaults(users.User{})
|
||||
user.Username = info.Username
|
||||
user.Password = info.Password
|
||||
|
||||
userHome, err := d.settings.MakeUserDir(user.Username, user.Scope, d.server.Root)
|
||||
userHome, err := config.MakeUserDir(user.Username, user.Scope, config.Server.Root)
|
||||
if err != nil {
|
||||
log.Printf("create user: failed to mkdir user home dir: [%s]", userHome)
|
||||
return http.StatusInternalServerError, err
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
user.Scope = userHome
|
||||
log.Printf("new user: %s, home dir: [%s].", user.Username, userHome)
|
||||
err = d.store.Users.Save(&user)
|
||||
err = store.Users.Save(&user)
|
||||
if err == errors.ErrExist {
|
||||
return http.StatusConflict, err
|
||||
http.Error(w, http.StatusText(http.StatusConflict), http.StatusConflict)
|
||||
return
|
||||
} else if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
return http.StatusOK, nil
|
||||
}
|
||||
|
||||
var renewHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
return printToken(w, r, d, d.user)
|
||||
})
|
||||
func renewHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
// check if x-auth header is present and token is
|
||||
return printToken(w, r, d.user)
|
||||
}
|
||||
|
||||
func printToken(w http.ResponseWriter, _ *http.Request, d *data, user *users.User) (int, error) {
|
||||
duration, err := time.ParseDuration(settings.Config.Auth.TokenExpirationTime)
|
||||
if err != nil {
|
||||
duration = time.Hour * 2
|
||||
}
|
||||
claims := &authToken{
|
||||
User: *user,
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(time.Now()),
|
||||
ExpiresAt: jwt.NewNumericDate(time.Now().Add(duration)),
|
||||
Issuer: "File Browser",
|
||||
},
|
||||
}
|
||||
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
|
||||
signed, err := token.SignedString(d.settings.Auth.Key)
|
||||
func printToken(w http.ResponseWriter, _ *http.Request, user *users.User) (int, error) {
|
||||
signed, err := makeSignedTokenAPI(user, "WEB_TOKEN_"+utils.GenerateRandomHash(4), time.Hour*2, user.Perm)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "key already exists with same name") {
|
||||
return http.StatusConflict, err
|
||||
}
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/plain")
|
||||
if _, err := w.Write([]byte(signed)); err != nil {
|
||||
if _, err := w.Write([]byte(signed.Key)); err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func isRevokedApiKey(key string) bool {
|
||||
_, exists := revokedApiKeyList[key]
|
||||
return exists
|
||||
}
|
||||
|
||||
func revokeAPIKey(key string) {
|
||||
revokeMu.Lock()
|
||||
delete(revokedApiKeyList, key)
|
||||
revokeMu.Unlock()
|
||||
}
|
||||
|
||||
func makeSignedTokenAPI(user *users.User, name string, duration time.Duration, perms users.Permissions) (users.AuthToken, error) {
|
||||
_, ok := user.ApiKeys[name]
|
||||
if ok {
|
||||
return users.AuthToken{}, fmt.Errorf("key already exists with same name %v ", name)
|
||||
}
|
||||
now := time.Now()
|
||||
expires := now.Add(duration)
|
||||
claim := users.AuthToken{
|
||||
Permissions: perms,
|
||||
Created: now.Unix(),
|
||||
Expires: expires.Unix(),
|
||||
Name: name,
|
||||
BelongsTo: user.ID,
|
||||
RegisteredClaims: jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(now),
|
||||
ExpiresAt: jwt.NewNumericDate(expires),
|
||||
Issuer: "FileBrowser Quantum",
|
||||
},
|
||||
}
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claim)
|
||||
tokenString, err := token.SignedString(config.Auth.Key)
|
||||
if err != nil {
|
||||
return claim, err
|
||||
}
|
||||
claim.Key = tokenString
|
||||
if strings.HasPrefix(name, "WEB_TOKEN") {
|
||||
// don't add to api tokens, its a short lived web token
|
||||
return claim, err
|
||||
}
|
||||
// Perform the user update
|
||||
err = store.Users.AddApiKey(user.ID, name, claim)
|
||||
if err != nil {
|
||||
return claim, err
|
||||
}
|
||||
return claim, err
|
||||
}
|
||||
|
|
|
@ -1,75 +0,0 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/tomasen/realip"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/runner"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
type handleFunc func(w http.ResponseWriter, r *http.Request, d *data) (int, error)
|
||||
|
||||
type data struct {
|
||||
*runner.Runner
|
||||
settings *settings.Settings
|
||||
server *settings.Server
|
||||
store *storage.Storage
|
||||
user *users.User
|
||||
raw interface{}
|
||||
}
|
||||
|
||||
// Check implements rules.Checker.
|
||||
func (d *data) Check(path string) bool {
|
||||
|
||||
allow := true
|
||||
for _, rule := range d.settings.Rules {
|
||||
if rule.Matches(path) {
|
||||
allow = rule.Allow
|
||||
}
|
||||
}
|
||||
for _, rule := range d.user.Rules {
|
||||
if rule.Matches(path) {
|
||||
allow = rule.Allow
|
||||
}
|
||||
}
|
||||
|
||||
return allow
|
||||
}
|
||||
|
||||
func handle(fn handleFunc, prefix string, store *storage.Storage, server *settings.Server) http.Handler {
|
||||
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
|
||||
settings, err := store.Settings.Get()
|
||||
if err != nil {
|
||||
log.Fatalf("ERROR: couldn't get settings: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
status, err := fn(w, r, &data{
|
||||
Runner: &runner.Runner{Enabled: server.EnableExec, Settings: settings},
|
||||
store: store,
|
||||
settings: settings,
|
||||
server: server,
|
||||
})
|
||||
|
||||
if status >= 400 || err != nil {
|
||||
clientIP := realip.FromRequest(r)
|
||||
log.Printf("%s: %v %s %v", r.URL.Path, status, clientIP, err)
|
||||
}
|
||||
|
||||
if status != 0 {
|
||||
txt := http.StatusText(status)
|
||||
http.Error(w, strconv.Itoa(status)+" "+txt, status)
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
return stripPrefix(prefix, handler)
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
*
|
|
@ -1,85 +0,0 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"net/http"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
)
|
||||
|
||||
type modifyRequest struct {
|
||||
What string `json:"what"` // Answer to: what data type?
|
||||
Which []string `json:"which"` // Answer to: which fields?
|
||||
}
|
||||
|
||||
var (
|
||||
store *storage.Storage
|
||||
server *settings.Server
|
||||
fileCache FileCache
|
||||
)
|
||||
|
||||
func SetupEnv(storage *storage.Storage, s *settings.Server, cache FileCache) {
|
||||
store = storage
|
||||
server = s
|
||||
fileCache = cache
|
||||
}
|
||||
|
||||
func NewHandler(
|
||||
imgSvc ImgService,
|
||||
assetsFs fs.FS,
|
||||
) (http.Handler, error) {
|
||||
server.Clean()
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.Use(func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Security-Policy", `default-src 'self'; style-src 'unsafe-inline';`)
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
})
|
||||
index, static := getStaticHandlers(store, server, assetsFs)
|
||||
// NOTE: This fixes the issue where it would redirect if people did not put a
|
||||
// trailing slash in the end. I hate this decision since this allows some awful
|
||||
// URLs https://www.gorillatoolkit.org/pkg/mux#Router.SkipClean
|
||||
r = r.SkipClean(true)
|
||||
monkey := func(fn handleFunc, prefix string) http.Handler {
|
||||
return handle(fn, prefix, store, server)
|
||||
}
|
||||
r.HandleFunc("/health", healthHandler)
|
||||
r.PathPrefix("/static").Handler(static)
|
||||
r.NotFoundHandler = index
|
||||
api := r.PathPrefix("/api").Subrouter()
|
||||
api.Handle("/login", monkey(loginHandler, ""))
|
||||
api.Handle("/signup", monkey(signupHandler, ""))
|
||||
api.Handle("/renew", monkey(renewHandler, ""))
|
||||
users := api.PathPrefix("/users").Subrouter()
|
||||
users.Handle("", monkey(usersGetHandler, "")).Methods("GET")
|
||||
users.Handle("", monkey(userPostHandler, "")).Methods("POST")
|
||||
users.Handle("/{id:[0-9]+}", monkey(userPutHandler, "")).Methods("PUT")
|
||||
users.Handle("/{id:[0-9]+}", monkey(userGetHandler, "")).Methods("GET")
|
||||
users.Handle("/{id:[0-9]+}", monkey(userDeleteHandler, "")).Methods("DELETE")
|
||||
api.PathPrefix("/resources").Handler(monkey(resourceGetHandler, "/api/resources")).Methods("GET")
|
||||
api.PathPrefix("/resources").Handler(monkey(resourceDeleteHandler(fileCache), "/api/resources")).Methods("DELETE")
|
||||
api.PathPrefix("/resources").Handler(monkey(resourcePostHandler(fileCache), "/api/resources")).Methods("POST")
|
||||
api.PathPrefix("/resources").Handler(monkey(resourcePutHandler, "/api/resources")).Methods("PUT")
|
||||
api.PathPrefix("/resources").Handler(monkey(resourcePatchHandler(fileCache), "/api/resources")).Methods("PATCH")
|
||||
api.PathPrefix("/usage").Handler(monkey(diskUsage, "/api/usage")).Methods("GET")
|
||||
api.Path("/shares").Handler(monkey(shareListHandler, "/api/shares")).Methods("GET")
|
||||
api.PathPrefix("/share").Handler(monkey(shareGetsHandler, "/api/share")).Methods("GET")
|
||||
api.PathPrefix("/share").Handler(monkey(sharePostHandler, "/api/share")).Methods("POST")
|
||||
api.PathPrefix("/share").Handler(monkey(shareDeleteHandler, "/api/share")).Methods("DELETE")
|
||||
api.Handle("/settings", monkey(settingsGetHandler, "")).Methods("GET")
|
||||
api.Handle("/settings", monkey(settingsPutHandler, "")).Methods("PUT")
|
||||
api.PathPrefix("/raw").Handler(monkey(rawHandler, "/api/raw")).Methods("GET")
|
||||
api.PathPrefix("/preview/{size}/{path:.*}").
|
||||
Handler(monkey(previewHandler(imgSvc, fileCache, server.EnableThumbnails, server.ResizePreview), "/api/preview")).Methods("GET")
|
||||
api.PathPrefix("/search").Handler(monkey(searchHandler, "/api/search")).Methods("GET")
|
||||
public := api.PathPrefix("/public").Subrouter()
|
||||
public.Handle("/publicUser", monkey(publicUserGetHandler, "")).Methods("GET")
|
||||
public.PathPrefix("/dl").Handler(monkey(publicDlHandler, "/api/public/dl/")).Methods("GET")
|
||||
public.PathPrefix("/share").Handler(monkey(publicShareHandler, "/api/public/share/")).Methods("GET")
|
||||
return stripPrefix(server.BaseURL, r), nil
|
||||
}
|
|
@ -0,0 +1,291 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
"github.com/gtsteffaniak/filebrowser/files"
|
||||
"github.com/gtsteffaniak/filebrowser/runner"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
type requestContext struct {
|
||||
user *users.User
|
||||
*runner.Runner
|
||||
raw interface{}
|
||||
}
|
||||
|
||||
type HttpResponse struct {
|
||||
Status int `json:"status,omitempty"`
|
||||
Message string `json:"message,omitempty"`
|
||||
Token string `json:"token,omitempty"`
|
||||
}
|
||||
|
||||
// Updated handleFunc to match the new signature
|
||||
type handleFunc func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error)
|
||||
|
||||
// Middleware to handle file requests by hash and pass it to the handler
|
||||
func withHashFileHelper(fn handleFunc) handleFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) {
|
||||
path := r.URL.Query().Get("path")
|
||||
hash := r.URL.Query().Get("hash")
|
||||
data.user = &users.PublicUser
|
||||
|
||||
// Get the file link by hash
|
||||
link, err := store.Share.GetByHash(hash)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
// Authenticate the share request if needed
|
||||
var status int
|
||||
if link.Hash != "" {
|
||||
status, err = authenticateShareRequest(r, link)
|
||||
if err != nil || status != http.StatusOK {
|
||||
return status, err
|
||||
}
|
||||
}
|
||||
// Retrieve the user (using the public user by default)
|
||||
user := &users.PublicUser
|
||||
|
||||
// Get file information with options
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: filepath.Join(user.Scope, link.Path+"/"+path),
|
||||
Modify: user.Perm.Modify,
|
||||
Expand: true,
|
||||
ReadHeader: config.Server.TypeDetectionByHeader,
|
||||
Checker: user, // Call your checker function here
|
||||
Token: link.Token,
|
||||
})
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
// Set the file info in the `data` object
|
||||
data.raw = file
|
||||
|
||||
// Call the next handler with the data
|
||||
return fn(w, r, data)
|
||||
}
|
||||
}
|
||||
|
||||
// Middleware to ensure the user is an admin
|
||||
func withAdminHelper(fn handleFunc) handleFunc {
|
||||
return withUserHelper(func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) {
|
||||
// Ensure the user has admin permissions
|
||||
if !data.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
// Proceed to the actual handler if the user is admin
|
||||
return fn(w, r, data)
|
||||
})
|
||||
}
|
||||
|
||||
// Middleware to retrieve and authenticate user
|
||||
func withUserHelper(fn handleFunc) handleFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) {
|
||||
keyFunc := func(token *jwt.Token) (interface{}, error) {
|
||||
return config.Auth.Key, nil
|
||||
}
|
||||
tokenString, err := extractToken(r)
|
||||
if err != nil {
|
||||
return http.StatusUnauthorized, err
|
||||
}
|
||||
|
||||
var tk users.AuthToken
|
||||
token, err := jwt.ParseWithClaims(tokenString, &tk, keyFunc)
|
||||
if err != nil {
|
||||
return http.StatusUnauthorized, fmt.Errorf("error processing token, %v", err)
|
||||
}
|
||||
if !token.Valid {
|
||||
return http.StatusUnauthorized, fmt.Errorf("invalid token")
|
||||
}
|
||||
if isRevokedApiKey(tk.Key) || tk.Expires < time.Now().Unix() {
|
||||
return http.StatusUnauthorized, fmt.Errorf("token expired or revoked")
|
||||
}
|
||||
// Check if the token is about to expire and send a header to renew it
|
||||
if tk.Expires < time.Now().Add(time.Hour).Unix() {
|
||||
w.Header().Add("X-Renew-Token", "true")
|
||||
}
|
||||
// Retrieve the user from the store and store it in the context
|
||||
data.user, err = store.Users.Get(config.Server.Root, tk.BelongsTo)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
// Call the handler function, passing in the context
|
||||
return fn(w, r, data)
|
||||
}
|
||||
}
|
||||
|
||||
// Middleware to ensure the user is either the requested user or an admin
|
||||
func withSelfOrAdminHelper(fn handleFunc) handleFunc {
|
||||
return withUserHelper(func(w http.ResponseWriter, r *http.Request, data *requestContext) (int, error) {
|
||||
// Check if the current user is the same as the requested user or if they are an admin
|
||||
if !data.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
// Call the actual handler function with the updated context
|
||||
return fn(w, r, data)
|
||||
})
|
||||
}
|
||||
|
||||
func wrapHandler(fn handleFunc) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
data := &requestContext{
|
||||
Runner: &runner.Runner{Enabled: config.Server.EnableExec, Settings: config},
|
||||
}
|
||||
|
||||
// Call the actual handler function and get status code and error
|
||||
status, err := fn(w, r, data)
|
||||
|
||||
// Handle the error case if there is one
|
||||
if err != nil {
|
||||
// Create an error response in JSON format
|
||||
response := &HttpResponse{
|
||||
Status: status, // Use the status code from the middleware
|
||||
Message: err.Error(),
|
||||
}
|
||||
|
||||
// Set the content type to JSON and status code
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
w.WriteHeader(status)
|
||||
|
||||
// Marshal the error response to JSON
|
||||
errorBytes, marshalErr := json.Marshal(response)
|
||||
if marshalErr != nil {
|
||||
log.Printf("Error marshalling error response: %v", marshalErr)
|
||||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Write the JSON error response
|
||||
if _, writeErr := w.Write(errorBytes); writeErr != nil {
|
||||
log.Printf("Error writing error response: %v", writeErr)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// No error, proceed to write status if non-zero
|
||||
if status != 0 {
|
||||
w.WriteHeader(status)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func withPermShareHelper(fn handleFunc) handleFunc {
|
||||
return withUserHelper(func(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
if !d.user.Perm.Share {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
return fn(w, r, d)
|
||||
})
|
||||
}
|
||||
|
||||
func withPermShare(fn handleFunc) http.HandlerFunc {
|
||||
return wrapHandler(withPermShareHelper(fn))
|
||||
}
|
||||
|
||||
// Example of wrapping specific middleware functions for use with http.HandleFunc
|
||||
func withHashFile(fn handleFunc) http.HandlerFunc {
|
||||
return wrapHandler(withHashFileHelper(fn))
|
||||
}
|
||||
|
||||
func withAdmin(fn handleFunc) http.HandlerFunc {
|
||||
return wrapHandler(withAdminHelper(fn))
|
||||
}
|
||||
|
||||
func withUser(fn handleFunc) http.HandlerFunc {
|
||||
return wrapHandler(withUserHelper(fn))
|
||||
}
|
||||
|
||||
func withSelfOrAdmin(fn handleFunc) http.HandlerFunc {
|
||||
return wrapHandler(withSelfOrAdminHelper(fn))
|
||||
}
|
||||
|
||||
func muxWithMiddleware(mux *http.ServeMux) *http.ServeMux {
|
||||
wrappedMux := http.NewServeMux()
|
||||
wrappedMux.Handle("/", LoggingMiddleware(mux))
|
||||
return wrappedMux
|
||||
}
|
||||
|
||||
// ResponseWriterWrapper wraps the standard http.ResponseWriter to capture the status code
|
||||
type ResponseWriterWrapper struct {
|
||||
http.ResponseWriter
|
||||
StatusCode int
|
||||
wroteHeader bool
|
||||
}
|
||||
|
||||
// WriteHeader captures the status code and ensures it's only written once
|
||||
func (w *ResponseWriterWrapper) WriteHeader(statusCode int) {
|
||||
if !w.wroteHeader { // Prevent WriteHeader from being called multiple times
|
||||
if statusCode == 0 {
|
||||
statusCode = http.StatusInternalServerError
|
||||
}
|
||||
w.StatusCode = statusCode
|
||||
w.ResponseWriter.WriteHeader(statusCode)
|
||||
w.wroteHeader = true
|
||||
}
|
||||
}
|
||||
|
||||
// Write is the method to write the response body and ensure WriteHeader is called
|
||||
func (w *ResponseWriterWrapper) Write(b []byte) (int, error) {
|
||||
if !w.wroteHeader { // Default to 200 if WriteHeader wasn't called explicitly
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
return w.ResponseWriter.Write(b)
|
||||
}
|
||||
|
||||
// LoggingMiddleware logs each request and its status code
|
||||
func LoggingMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
start := time.Now()
|
||||
|
||||
// Wrap the ResponseWriter to capture the status code
|
||||
wrappedWriter := &ResponseWriterWrapper{ResponseWriter: w, StatusCode: http.StatusOK}
|
||||
|
||||
// Call the next handler
|
||||
next.ServeHTTP(wrappedWriter, r)
|
||||
|
||||
// Determine the color based on the status code
|
||||
color := "\033[32m" // Default green color
|
||||
if wrappedWriter.StatusCode >= 300 && wrappedWriter.StatusCode < 500 {
|
||||
color = "\033[33m" // Yellow for client errors (4xx)
|
||||
} else if wrappedWriter.StatusCode >= 500 {
|
||||
color = "\033[31m" // Red for server errors (5xx)
|
||||
}
|
||||
// Capture the full URL path including the query parameters
|
||||
fullURL := r.URL.Path
|
||||
if r.URL.RawQuery != "" {
|
||||
fullURL += "?" + r.URL.RawQuery
|
||||
}
|
||||
|
||||
// Log the request and its status code
|
||||
log.Printf("%s%-7s | %3d | %-15s | %-12s | \"%s\"%s",
|
||||
color,
|
||||
r.Method,
|
||||
wrappedWriter.StatusCode, // Now capturing the correct status
|
||||
r.RemoteAddr,
|
||||
time.Since(start).String(),
|
||||
fullURL,
|
||||
"\033[0m", // Reset color
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
func renderJSON(w http.ResponseWriter, _ *http.Request, data interface{}) (int, error) {
|
||||
marsh, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
if _, err := w.Write(marsh); err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
|
@ -0,0 +1,252 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/asdine/storm/v3"
|
||||
"github.com/gtsteffaniak/filebrowser/diskcache"
|
||||
"github.com/gtsteffaniak/filebrowser/img"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/share"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/storage/bolt"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
"github.com/gtsteffaniak/filebrowser/utils"
|
||||
)
|
||||
|
||||
func setupTestEnv(t *testing.T) {
|
||||
dbPath := filepath.Join(t.TempDir(), "db")
|
||||
db, err := storm.Open(dbPath)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
authStore, userStore, shareStore, settingsStore, err := bolt.NewStorage(db)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
store = &storage.Storage{
|
||||
Auth: authStore,
|
||||
Users: userStore,
|
||||
Share: shareStore,
|
||||
Settings: settingsStore,
|
||||
}
|
||||
fileCache = diskcache.NewNoOp() // mocked
|
||||
imgSvc = img.New(1) // mocked
|
||||
config = &settings.Config // mocked
|
||||
}
|
||||
|
||||
func TestWithAdminHelper(t *testing.T) {
|
||||
setupTestEnv(t)
|
||||
// Mock a user who has admin permissions
|
||||
adminUser := &users.User{
|
||||
ID: 1,
|
||||
Username: "admin",
|
||||
Perm: users.Permissions{Admin: true}, // Ensure the user is an admin
|
||||
}
|
||||
nonAdminUser := &users.User{
|
||||
ID: 2,
|
||||
Username: "non-admin",
|
||||
Perm: users.Permissions{Admin: false}, // Non-admin user
|
||||
}
|
||||
// Save the users to the mock database
|
||||
if err := store.Users.Save(adminUser); err != nil {
|
||||
t.Fatal("failed to save admin user:", err)
|
||||
}
|
||||
if err := store.Users.Save(nonAdminUser); err != nil {
|
||||
t.Fatal("failed to save non-admin user:", err)
|
||||
}
|
||||
// Test cases for different scenarios
|
||||
testCases := []struct {
|
||||
name string
|
||||
expectedStatusCode int
|
||||
user *users.User
|
||||
}{
|
||||
{
|
||||
name: "Admin access allowed",
|
||||
expectedStatusCode: http.StatusOK, // Admin should be able to access
|
||||
user: adminUser,
|
||||
},
|
||||
{
|
||||
name: "Non-admin access forbidden",
|
||||
expectedStatusCode: http.StatusForbidden, // Non-admin should be forbidden
|
||||
user: nonAdminUser,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// Mock the context with the current user
|
||||
data := &requestContext{
|
||||
user: tc.user,
|
||||
}
|
||||
token, err := makeSignedTokenAPI(tc.user, "WEB_TOKEN_"+utils.GenerateRandomHash(4), time.Hour*2, tc.user.Perm)
|
||||
if err != nil {
|
||||
t.Fatalf("Error making token for request: %v", err)
|
||||
}
|
||||
|
||||
// Wrap the usersGetHandler with the middleware
|
||||
handler := withAdminHelper(mockHandler)
|
||||
|
||||
// Create a response recorder to capture the handler's output
|
||||
recorder := httptest.NewRecorder()
|
||||
// Create the request and apply the token as a cookie
|
||||
req, err := http.NewRequest(http.MethodGet, "/users", http.NoBody)
|
||||
if err != nil {
|
||||
t.Fatalf("Error creating request: %v", err)
|
||||
}
|
||||
req.AddCookie(&http.Cookie{
|
||||
Name: "auth",
|
||||
Value: token.Key,
|
||||
})
|
||||
|
||||
// Call the handler with the test request and mock context
|
||||
status, err := handler(recorder, req, data)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
// Verify the status code
|
||||
if status != tc.expectedStatusCode {
|
||||
t.Errorf("\"%v\" expected status code %d, got %d", tc.name, tc.expectedStatusCode, status)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPublicShareHandlerAuthentication(t *testing.T) {
|
||||
setupTestEnv(t)
|
||||
|
||||
const passwordBcrypt = "$2y$10$TFAmdCbyd/mEZDe5fUeZJu.MaJQXRTwdqb/IQV.eTn6dWrF58gCSe" // bcrypt hashed password
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
share *share.Link
|
||||
token string
|
||||
password string
|
||||
extraHeaders map[string]string
|
||||
expectedStatusCode int
|
||||
}{
|
||||
{
|
||||
name: "Public share, no auth required",
|
||||
share: &share.Link{
|
||||
Hash: "public_hash",
|
||||
},
|
||||
expectedStatusCode: 0, // zero means 200 on helpers
|
||||
},
|
||||
{
|
||||
name: "Private share, no auth provided",
|
||||
share: &share.Link{
|
||||
Hash: "private_hash",
|
||||
UserID: 1,
|
||||
PasswordHash: passwordBcrypt,
|
||||
Token: "123",
|
||||
},
|
||||
expectedStatusCode: http.StatusUnauthorized,
|
||||
},
|
||||
{
|
||||
name: "Private share, valid token",
|
||||
share: &share.Link{
|
||||
Hash: "token_hash",
|
||||
UserID: 1,
|
||||
PasswordHash: passwordBcrypt,
|
||||
Token: "123",
|
||||
},
|
||||
token: "123",
|
||||
expectedStatusCode: 0, // zero means 200 on helpers
|
||||
},
|
||||
{
|
||||
name: "Private share, invalid password",
|
||||
share: &share.Link{
|
||||
Hash: "pw_hash",
|
||||
UserID: 1,
|
||||
PasswordHash: passwordBcrypt,
|
||||
Token: "123",
|
||||
},
|
||||
extraHeaders: map[string]string{
|
||||
"X-SHARE-PASSWORD": "wrong-password",
|
||||
},
|
||||
expectedStatusCode: http.StatusUnauthorized,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// Save the share in the mock store
|
||||
if err := store.Share.Save(tc.share); err != nil {
|
||||
t.Fatal("failed to save share:", err)
|
||||
}
|
||||
|
||||
// Create a response recorder to capture handler output
|
||||
recorder := httptest.NewRecorder()
|
||||
|
||||
// Wrap the handler with authentication middleware
|
||||
handler := withHashFileHelper(publicShareHandler)
|
||||
if err := store.Settings.Save(&settings.Settings{
|
||||
Auth: settings.Auth{
|
||||
Key: []byte("key"),
|
||||
},
|
||||
}); err != nil {
|
||||
t.Fatalf("failed to save settings: %v", err)
|
||||
}
|
||||
|
||||
// Prepare the request with query parameters and optional headers
|
||||
req := newTestRequest(t, tc.share.Hash, tc.token, tc.password, tc.extraHeaders)
|
||||
|
||||
// Serve the request
|
||||
status, err := handler(recorder, req, &requestContext{})
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
|
||||
// Check if the response matches the expected status code
|
||||
if status != tc.expectedStatusCode {
|
||||
t.Errorf("expected status code %d, got %d", tc.expectedStatusCode, status)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create a new HTTP request with optional parameters
|
||||
func newTestRequest(t *testing.T, hash, token, password string, headers map[string]string) *http.Request {
|
||||
req := newHTTPRequest(t, hash, func(r *http.Request) {
|
||||
// Set query parameters based on provided values
|
||||
q := r.URL.Query()
|
||||
q.Set("path", "/")
|
||||
q.Set("hash", hash)
|
||||
if token != "" {
|
||||
q.Set("token", token)
|
||||
}
|
||||
if password != "" {
|
||||
q.Set("password", password)
|
||||
}
|
||||
r.URL.RawQuery = q.Encode()
|
||||
|
||||
// Set any extra headers if provided
|
||||
for key, value := range headers {
|
||||
r.Header.Set(key, value)
|
||||
}
|
||||
})
|
||||
return req
|
||||
}
|
||||
|
||||
func mockHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
return http.StatusOK, nil // mock response
|
||||
}
|
||||
|
||||
// Modify newHTTPRequest to accept the hash and use it in the URL path.
|
||||
func newHTTPRequest(t *testing.T, hash string, requestModifiers ...func(*http.Request)) *http.Request {
|
||||
t.Helper()
|
||||
url := "/public/share/" + hash + "/" // Dynamically include the hash in the URL path
|
||||
r, err := http.NewRequest(http.MethodGet, url, http.NoBody)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create request: %v", err)
|
||||
}
|
||||
for _, modify := range requestModifiers {
|
||||
modify(r)
|
||||
}
|
||||
return r
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
//go:generate go-enum --sql --marshal --names --file $GOFILE
|
||||
package http
|
||||
|
||||
import (
|
||||
|
@ -8,21 +7,12 @@ import (
|
|||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/files"
|
||||
"github.com/gtsteffaniak/filebrowser/img"
|
||||
)
|
||||
|
||||
/*
|
||||
ENUM(
|
||||
thumb
|
||||
big
|
||||
)
|
||||
*/
|
||||
type PreviewSize int
|
||||
|
||||
type ImgService interface {
|
||||
FormatFromExtension(ext string) (img.Format, error)
|
||||
Resize(ctx context.Context, in io.Reader, width, height int, out io.Writer, options ...img.Option) error
|
||||
|
@ -34,82 +24,92 @@ type FileCache interface {
|
|||
Delete(ctx context.Context, key string) error
|
||||
}
|
||||
|
||||
func previewHandler(imgSvc ImgService, fileCache FileCache, enableThumbnails, resizePreview bool) handleFunc {
|
||||
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
// previewHandler handles the preview request for images.
|
||||
// @Summary Get image preview
|
||||
// @Description Returns a preview image based on the requested path and size.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param path query string true "File path of the image to preview"
|
||||
// @Param size query string false "Preview size ('small' or 'large'). Default is based on server config."
|
||||
// @Success 200 {file} file "Preview image content"
|
||||
// @Failure 202 {object} map[string]string "Download permissions required"
|
||||
// @Failure 400 {object} map[string]string "Invalid request path"
|
||||
// @Failure 404 {object} map[string]string "File not found"
|
||||
// @Failure 415 {object} map[string]string "Unsupported file type for preview"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/preview [get]
|
||||
func previewHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
path := r.URL.Query().Get("path")
|
||||
previewSize := r.URL.Query().Get("size")
|
||||
if previewSize != "small" {
|
||||
previewSize = "large"
|
||||
}
|
||||
|
||||
if path == "" {
|
||||
return http.StatusBadRequest, fmt.Errorf("invalid request path")
|
||||
}
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: filepath.Join(d.user.Scope, path),
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: true,
|
||||
ReadHeader: config.Server.TypeDetectionByHeader,
|
||||
Checker: d.user,
|
||||
})
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
realPath, _, err := files.GetRealPath(file.Path)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
file.Path = realPath
|
||||
if file.Type == "directory" {
|
||||
return http.StatusBadRequest, fmt.Errorf("can't create preview for directory")
|
||||
}
|
||||
setContentDisposition(w, r, file)
|
||||
if file.Type != "image" {
|
||||
return http.StatusNotImplemented, fmt.Errorf("can't create preview for %s type", file.Type)
|
||||
}
|
||||
|
||||
if (previewSize == "large" && !config.Server.ResizePreview) ||
|
||||
(previewSize == "small" && !config.Server.EnableThumbnails) {
|
||||
if !d.user.Perm.Download {
|
||||
return http.StatusAccepted, nil
|
||||
}
|
||||
vars := mux.Vars(r)
|
||||
|
||||
previewSize, err := ParsePreviewSize(vars["size"])
|
||||
if err != nil {
|
||||
return http.StatusBadRequest, err
|
||||
}
|
||||
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: "/" + vars["path"],
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: true,
|
||||
ReadHeader: d.server.TypeDetectionByHeader,
|
||||
Checker: d,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
setContentDisposition(w, r, file)
|
||||
|
||||
switch file.Type {
|
||||
case "image":
|
||||
return handleImagePreview(w, r, imgSvc, fileCache, file, previewSize, enableThumbnails, resizePreview)
|
||||
default:
|
||||
return http.StatusNotImplemented, fmt.Errorf("can't create preview for %s type", file.Type)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func handleImagePreview(
|
||||
w http.ResponseWriter,
|
||||
r *http.Request,
|
||||
imgSvc ImgService,
|
||||
fileCache FileCache,
|
||||
file *files.FileInfo,
|
||||
previewSize PreviewSize,
|
||||
enableThumbnails, resizePreview bool,
|
||||
) (int, error) {
|
||||
if (previewSize == PreviewSizeBig && !resizePreview) ||
|
||||
(previewSize == PreviewSizeThumb && !enableThumbnails) {
|
||||
return rawFileHandler(w, r, file)
|
||||
}
|
||||
format, err := imgSvc.FormatFromExtension(file.Extension)
|
||||
|
||||
format, err := imgSvc.FormatFromExtension(filepath.Ext(file.Name))
|
||||
// Unsupported extensions directly return the raw data
|
||||
if err == img.ErrUnsupportedFormat || format == img.FormatGif {
|
||||
if !d.user.Perm.Download {
|
||||
return http.StatusAccepted, nil
|
||||
}
|
||||
return rawFileHandler(w, r, file)
|
||||
}
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
cacheKey := previewCacheKey(file, previewSize)
|
||||
resizedImage, ok, err := fileCache.Load(r.Context(), cacheKey)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
if !ok {
|
||||
resizedImage, err = createPreview(imgSvc, fileCache, file, previewSize)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Set("Cache-Control", "private")
|
||||
http.ServeContent(w, r, file.Name, file.ModTime, bytes.NewReader(resizedImage))
|
||||
http.ServeContent(w, r, file.Path, file.ModTime, bytes.NewReader(resizedImage))
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func createPreview(imgSvc ImgService, fileCache FileCache,
|
||||
file *files.FileInfo, previewSize PreviewSize) ([]byte, error) {
|
||||
func createPreview(imgSvc ImgService, fileCache FileCache, file *files.FileInfo, previewSize string) ([]byte, error) {
|
||||
fd, err := os.Open(file.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -123,11 +123,11 @@ func createPreview(imgSvc ImgService, fileCache FileCache,
|
|||
)
|
||||
|
||||
switch {
|
||||
case previewSize == PreviewSizeBig:
|
||||
case previewSize == "large":
|
||||
width = 1080
|
||||
height = 1080
|
||||
options = append(options, img.WithMode(img.ResizeModeFit), img.WithQuality(img.QualityMedium))
|
||||
case previewSize == PreviewSizeThumb:
|
||||
case previewSize == "small":
|
||||
width = 256
|
||||
height = 256
|
||||
options = append(options, img.WithMode(img.ResizeModeFill), img.WithQuality(img.QualityLow), img.WithFormat(img.FormatJpeg))
|
||||
|
@ -150,6 +150,7 @@ func createPreview(imgSvc ImgService, fileCache FileCache,
|
|||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func previewCacheKey(f *files.FileInfo, previewSize PreviewSize) string {
|
||||
// Generates a cache key for the preview image
|
||||
func previewCacheKey(f *files.FileInfo, previewSize string) string {
|
||||
return fmt.Sprintf("%x%x%x", f.RealPath(), f.ModTime.Unix(), previewSize)
|
||||
}
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
// Code generated by go-enum
|
||||
// DO NOT EDIT!
|
||||
|
||||
package http
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
// PreviewSizeThumb is a PreviewSize of type Thumb
|
||||
PreviewSizeThumb PreviewSize = iota
|
||||
// PreviewSizeBig is a PreviewSize of type Big
|
||||
PreviewSizeBig
|
||||
)
|
||||
|
||||
const _PreviewSizeName = "thumbbig"
|
||||
|
||||
var _PreviewSizeNames = []string{
|
||||
_PreviewSizeName[0:5],
|
||||
_PreviewSizeName[5:8],
|
||||
}
|
||||
|
||||
// PreviewSizeNames returns a list of possible string values of PreviewSize.
|
||||
func PreviewSizeNames() []string {
|
||||
tmp := make([]string, len(_PreviewSizeNames))
|
||||
copy(tmp, _PreviewSizeNames)
|
||||
return tmp
|
||||
}
|
||||
|
||||
var _PreviewSizeMap = map[PreviewSize]string{
|
||||
0: _PreviewSizeName[0:5],
|
||||
1: _PreviewSizeName[5:8],
|
||||
}
|
||||
|
||||
// String implements the Stringer interface.
|
||||
func (x PreviewSize) String() string {
|
||||
if str, ok := _PreviewSizeMap[x]; ok {
|
||||
return str
|
||||
}
|
||||
return fmt.Sprintf("PreviewSize(%d)", x)
|
||||
}
|
||||
|
||||
var _PreviewSizeValue = map[string]PreviewSize{
|
||||
_PreviewSizeName[0:5]: 0,
|
||||
_PreviewSizeName[5:8]: 1,
|
||||
}
|
||||
|
||||
// ParsePreviewSize attempts to convert a string to a PreviewSize
|
||||
func ParsePreviewSize(name string) (PreviewSize, error) {
|
||||
if x, ok := _PreviewSizeValue[name]; ok {
|
||||
return x, nil
|
||||
}
|
||||
return PreviewSize(0), fmt.Errorf("%s is not a valid PreviewSize, try [%s]", name, strings.Join(_PreviewSizeNames, ", "))
|
||||
}
|
||||
|
||||
// MarshalText implements the text marshaller method
|
||||
func (x PreviewSize) MarshalText() ([]byte, error) {
|
||||
return []byte(x.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the text unmarshaller method
|
||||
func (x *PreviewSize) UnmarshalText(text []byte) error {
|
||||
name := string(text)
|
||||
tmp, err := ParsePreviewSize(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*x = tmp
|
||||
return nil
|
||||
}
|
||||
|
||||
// Scan implements the Scanner interface.
|
||||
func (x *PreviewSize) Scan(value interface{}) error {
|
||||
var name string
|
||||
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
name = v
|
||||
case []byte:
|
||||
name = string(v)
|
||||
case nil:
|
||||
*x = PreviewSize(0)
|
||||
return nil
|
||||
}
|
||||
|
||||
tmp, err := ParsePreviewSize(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*x = tmp
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value implements the driver Valuer interface.
|
||||
func (x PreviewSize) Value() (driver.Value, error) {
|
||||
return x.String(), nil
|
||||
}
|
|
@ -1,11 +1,11 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
|
@ -14,97 +14,58 @@ import (
|
|||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/share"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
|
||||
_ "github.com/gtsteffaniak/filebrowser/swagger/docs"
|
||||
)
|
||||
|
||||
var withHashFile = func(fn handleFunc) handleFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
id, path := ifPathWithName(r)
|
||||
link, err := d.store.Share.GetByHash(id)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
if link.Hash != "" {
|
||||
var status int
|
||||
status, err = authenticateShareRequest(r, link)
|
||||
if err != nil || status != 0 {
|
||||
return status, err
|
||||
}
|
||||
}
|
||||
d.user = &users.PublicUser
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, link.Path, path)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: realPath,
|
||||
IsDir: isDir,
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: true,
|
||||
ReadHeader: d.server.TypeDetectionByHeader,
|
||||
Checker: d,
|
||||
Token: link.Token,
|
||||
})
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
d.raw = file
|
||||
return fn(w, r, d)
|
||||
}
|
||||
}
|
||||
|
||||
func ifPathWithName(r *http.Request) (id, filePath string) {
|
||||
pathElements := strings.Split(r.URL.Path, "/")
|
||||
id = pathElements[0]
|
||||
allButFirst := path.Join(pathElements[1:]...)
|
||||
return id, allButFirst
|
||||
}
|
||||
|
||||
var publicShareHandler = withHashFile(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
func publicShareHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
file, ok := d.raw.(*files.FileInfo)
|
||||
if !ok {
|
||||
return http.StatusInternalServerError, fmt.Errorf("failed to assert type *files.FileInfo")
|
||||
}
|
||||
|
||||
file.Path = strings.TrimPrefix(file.Path, settings.Config.Server.Root)
|
||||
if file.IsDir {
|
||||
return renderJSON(w, r, file)
|
||||
}
|
||||
|
||||
return renderJSON(w, r, file)
|
||||
})
|
||||
}
|
||||
|
||||
var publicUserGetHandler = func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
func publicUserGetHandler(w http.ResponseWriter, r *http.Request) {
|
||||
// Call the actual handler logic here (e.g., renderJSON, etc.)
|
||||
// You may need to replace `fn` with the actual handler logic.
|
||||
return renderJSON(w, r, users.PublicUser)
|
||||
status, err := renderJSON(w, r, users.PublicUser)
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(status), status)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var publicDlHandler = withHashFile(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
file, ok := d.raw.(*files.FileInfo)
|
||||
if !ok {
|
||||
return http.StatusInternalServerError, fmt.Errorf("failed to assert type *files.FileInfo")
|
||||
func publicDlHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
file, _ := d.raw.(*files.FileInfo)
|
||||
if file == nil {
|
||||
return http.StatusInternalServerError, fmt.Errorf("failed to assert type files.FileInfo")
|
||||
}
|
||||
if d.user == nil {
|
||||
return http.StatusUnauthorized, fmt.Errorf("failed to get user")
|
||||
}
|
||||
|
||||
if !file.IsDir {
|
||||
return rawFileHandler(w, r, file)
|
||||
if file.Type == "directory" {
|
||||
return rawDirHandler(w, r, d, file)
|
||||
}
|
||||
|
||||
return rawDirHandler(w, r, d, file)
|
||||
})
|
||||
return rawFileHandler(w, r, file)
|
||||
}
|
||||
|
||||
func authenticateShareRequest(r *http.Request, l *share.Link) (int, error) {
|
||||
if l.PasswordHash == "" {
|
||||
return 0, nil
|
||||
return 200, nil
|
||||
}
|
||||
|
||||
if r.URL.Query().Get("token") == l.Token {
|
||||
return 0, nil
|
||||
return 200, nil
|
||||
}
|
||||
|
||||
password := r.Header.Get("X-SHARE-PASSWORD")
|
||||
password, err := url.QueryUnescape(password)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return http.StatusUnauthorized, err
|
||||
}
|
||||
if password == "" {
|
||||
return http.StatusUnauthorized, nil
|
||||
|
@ -113,12 +74,25 @@ func authenticateShareRequest(r *http.Request, l *share.Link) (int, error) {
|
|||
if errors.Is(err, bcrypt.ErrMismatchedHashAndPassword) {
|
||||
return http.StatusUnauthorized, nil
|
||||
}
|
||||
return 0, err
|
||||
return 401, err
|
||||
}
|
||||
return 0, nil
|
||||
return 200, nil
|
||||
}
|
||||
|
||||
func healthHandler(w http.ResponseWriter, _ *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, _ = w.Write([]byte(`{"status":"OK"}`))
|
||||
// health godoc
|
||||
// @Summary Health Check
|
||||
// @Schemes
|
||||
// @Description Returns the health status of the API.
|
||||
// @Tags Health
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Success 200 {object} HttpResponse "successful health check response"
|
||||
// @Router /health [get]
|
||||
func healthHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
response := HttpResponse{Message: "ok"} // Create response with status "ok"
|
||||
err := json.NewEncoder(w).Encode(response) // Encode the response into JSON
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,137 +0,0 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/asdine/storm/v3"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/share"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/storage/bolt"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
func TestPublicShareHandlerAuthentication(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const passwordBcrypt = "$2y$10$TFAmdCbyd/mEZDe5fUeZJu.MaJQXRTwdqb/IQV.eTn6dWrF58gCSe" //nolint:gosec
|
||||
testCases := map[string]struct {
|
||||
share *share.Link
|
||||
req *http.Request
|
||||
expectedStatusCode int
|
||||
}{
|
||||
"Public share, no auth required": {
|
||||
share: &share.Link{Hash: "h"},
|
||||
req: newHTTPRequest(t),
|
||||
expectedStatusCode: 200,
|
||||
},
|
||||
"Private share, no auth provided, 401": {
|
||||
share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"},
|
||||
req: newHTTPRequest(t),
|
||||
expectedStatusCode: 401,
|
||||
},
|
||||
"Private share, authentication via token": {
|
||||
share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"},
|
||||
req: newHTTPRequest(t, func(r *http.Request) { r.URL.RawQuery = "token=123" }),
|
||||
expectedStatusCode: 200,
|
||||
},
|
||||
"Private share, authentication via invalid token, 401": {
|
||||
share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"},
|
||||
req: newHTTPRequest(t, func(r *http.Request) { r.URL.RawQuery = "token=1234" }),
|
||||
expectedStatusCode: 401,
|
||||
},
|
||||
"Private share, authentication via password": {
|
||||
share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"},
|
||||
req: newHTTPRequest(t, func(r *http.Request) { r.Header.Set("X-SHARE-PASSWORD", "password") }),
|
||||
expectedStatusCode: 200,
|
||||
},
|
||||
"Private share, authentication via invalid password, 401": {
|
||||
share: &share.Link{Hash: "h", UserID: 1, PasswordHash: passwordBcrypt, Token: "123"},
|
||||
req: newHTTPRequest(t, func(r *http.Request) { r.Header.Set("X-SHARE-PASSWORD", "wrong-password") }),
|
||||
expectedStatusCode: 401,
|
||||
},
|
||||
}
|
||||
|
||||
for name, tc := range testCases {
|
||||
for handlerName, handler := range map[string]handleFunc{"public share handler": publicShareHandler, "public dl handler": publicDlHandler} {
|
||||
name, tc, handlerName, handler := name, tc, handlerName, handler
|
||||
t.Run(fmt.Sprintf("%s: %s", handlerName, name), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
dbPath := filepath.Join(t.TempDir(), "db")
|
||||
db, err := storm.Open(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open db: %v", err)
|
||||
}
|
||||
|
||||
t.Cleanup(func() {
|
||||
if err := db.Close(); err != nil { //nolint:govet
|
||||
t.Errorf("failed to close db: %v", err)
|
||||
}
|
||||
})
|
||||
authStore, userStore, shareStore, settingsStore, err := bolt.NewStorage(db)
|
||||
storage := &storage.Storage{
|
||||
Auth: authStore,
|
||||
Users: userStore,
|
||||
Share: shareStore,
|
||||
Settings: settingsStore,
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get storage: %v", err)
|
||||
}
|
||||
if err := storage.Share.Save(tc.share); err != nil {
|
||||
t.Fatalf("failed to save share: %v", err)
|
||||
}
|
||||
if err := storage.Settings.Save(&settings.Settings{
|
||||
Auth: settings.Auth{
|
||||
Key: []byte("key"),
|
||||
},
|
||||
}); err != nil {
|
||||
t.Fatalf("failed to save settings: %v", err)
|
||||
}
|
||||
|
||||
storage.Users = &customFSUser{
|
||||
Store: storage.Users,
|
||||
}
|
||||
|
||||
recorder := httptest.NewRecorder()
|
||||
handler := handle(handler, "", storage, &settings.Server{})
|
||||
handler.ServeHTTP(recorder, tc.req)
|
||||
result := recorder.Result()
|
||||
defer result.Body.Close()
|
||||
if result.StatusCode != tc.expectedStatusCode {
|
||||
t.Errorf("expected status code %d, got status code %d", tc.expectedStatusCode, result.StatusCode)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func newHTTPRequest(t *testing.T, requestModifiers ...func(*http.Request)) *http.Request {
|
||||
t.Helper()
|
||||
r, err := http.NewRequest(http.MethodGet, "h", http.NoBody)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to construct request: %v", err)
|
||||
}
|
||||
for _, modify := range requestModifiers {
|
||||
modify(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
type customFSUser struct {
|
||||
users.Store
|
||||
}
|
||||
|
||||
func (cu *customFSUser) Get(baseScope string, id interface{}) (*users.User, error) {
|
||||
user, err := cu.Store.Get(baseScope, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return user, nil
|
||||
}
|
|
@ -77,21 +77,34 @@ func setContentDisposition(w http.ResponseWriter, r *http.Request, file *files.F
|
|||
}
|
||||
}
|
||||
|
||||
var rawHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
// rawHandler serves the raw content of a file, multiple files, or directory in various formats.
|
||||
// @Summary Get raw content of a file, multiple files, or directory
|
||||
// @Description Returns the raw content of a file, multiple files, or a directory. Supports downloading files as archives in various formats.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param path query string true "Path to the file or directory"
|
||||
// @Param files query string false "Comma-separated list of specific files within the directory (optional)"
|
||||
// @Param inline query bool false "If true, sets 'Content-Disposition' to 'inline'. Otherwise, defaults to 'attachment'."
|
||||
// @Param algo query string false "Compression algorithm for archiving multiple files or directories. Options: 'zip', 'tar', 'targz', 'tarbz2', 'tarxz', 'tarlz4', 'tarsz'. Default is 'zip'."
|
||||
// @Success 200 {file} file "Raw file or directory content, or archive for multiple files"
|
||||
// @Failure 202 {object} map[string]string "Download permissions required"
|
||||
// @Failure 400 {object} map[string]string "Invalid request path"
|
||||
// @Failure 404 {object} map[string]string "File or directory not found"
|
||||
// @Failure 415 {object} map[string]string "Unsupported file type for preview"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/raw [get]
|
||||
func rawHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
if !d.user.Perm.Download {
|
||||
return http.StatusAccepted, nil
|
||||
}
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
path := r.URL.Query().Get("path")
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: realPath,
|
||||
IsDir: isDir,
|
||||
Path: filepath.Join(d.user.Scope, path),
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: d.server.TypeDetectionByHeader,
|
||||
Checker: d,
|
||||
ReadHeader: config.Server.TypeDetectionByHeader,
|
||||
Checker: d.user,
|
||||
})
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
|
@ -100,16 +113,15 @@ var rawHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data)
|
|||
setContentDisposition(w, r, file)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
if !file.IsDir {
|
||||
return rawFileHandler(w, r, file)
|
||||
if file.Type == "directory" {
|
||||
return rawDirHandler(w, r, d, file)
|
||||
}
|
||||
|
||||
return rawDirHandler(w, r, d, file)
|
||||
})
|
||||
return rawFileHandler(w, r, file)
|
||||
}
|
||||
|
||||
func addFile(ar archiver.Writer, d *data, path, commonPath string) error {
|
||||
if !d.Check(path) {
|
||||
func addFile(ar archiver.Writer, d *requestContext, path, commonPath string) error {
|
||||
if !d.user.Check(path) {
|
||||
return nil
|
||||
}
|
||||
info, err := os.Stat(path)
|
||||
|
@ -160,12 +172,11 @@ func addFile(ar archiver.Writer, d *data, path, commonPath string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func rawDirHandler(w http.ResponseWriter, r *http.Request, d *data, file *files.FileInfo) (int, error) {
|
||||
func rawDirHandler(w http.ResponseWriter, r *http.Request, d *requestContext, file *files.FileInfo) (int, error) {
|
||||
filenames, err := parseQueryFiles(r, file, d.user)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
extension, ar, err := parseQueryAlgorithm(r)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
|
@ -202,7 +213,8 @@ func rawDirHandler(w http.ResponseWriter, r *http.Request, d *data, file *files.
|
|||
}
|
||||
|
||||
func rawFileHandler(w http.ResponseWriter, r *http.Request, file *files.FileInfo) (int, error) {
|
||||
fd, err := os.Open(file.Path)
|
||||
realPath, _, _ := files.GetRealPath(file.Path)
|
||||
fd, err := os.Open(realPath)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
|
|
@ -14,132 +14,184 @@ import (
|
|||
|
||||
"github.com/gtsteffaniak/filebrowser/errors"
|
||||
"github.com/gtsteffaniak/filebrowser/files"
|
||||
"github.com/gtsteffaniak/filebrowser/fileutils"
|
||||
)
|
||||
|
||||
var resourceGetHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
// resourceGetHandler retrieves information about a resource.
|
||||
// @Summary Get resource information
|
||||
// @Description Returns metadata and optionally file contents for a specified resource path.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param path query string true "Path to the resource"
|
||||
// @Param source query string false "Name for the desired source, default is used if not provided"
|
||||
// @Param content query string false "Include file content if true"
|
||||
// @Param checksum query string false "Optional checksum validation"
|
||||
// @Success 200 {object} files.FileInfo "Resource metadata"
|
||||
// @Failure 404 {object} map[string]string "Resource not found"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/resources [get]
|
||||
func resourceGetHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
// TODO source := r.URL.Query().Get("source")
|
||||
path := r.URL.Query().Get("path")
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: realPath,
|
||||
IsDir: isDir,
|
||||
Path: filepath.Join(d.user.Scope, path),
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: true,
|
||||
ReadHeader: d.server.TypeDetectionByHeader,
|
||||
Checker: d,
|
||||
ReadHeader: config.Server.TypeDetectionByHeader,
|
||||
Checker: d.user,
|
||||
Content: r.URL.Query().Get("content") == "true",
|
||||
})
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
if !file.IsDir {
|
||||
if checksum := r.URL.Query().Get("checksum"); checksum != "" {
|
||||
err := file.Checksum(checksum)
|
||||
if err == errors.ErrInvalidOption {
|
||||
return http.StatusBadRequest, nil
|
||||
} else if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
if file.Type == "directory" {
|
||||
return renderJSON(w, r, file)
|
||||
}
|
||||
if checksum := r.URL.Query().Get("checksum"); checksum != "" {
|
||||
err := file.Checksum(checksum)
|
||||
if err == errors.ErrInvalidOption {
|
||||
return http.StatusBadRequest, nil
|
||||
} else if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
}
|
||||
return renderJSON(w, r, file)
|
||||
})
|
||||
|
||||
func resourceDeleteHandler(fileCache FileCache) handleFunc {
|
||||
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
if r.URL.Path == "/" || !d.user.Perm.Delete {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
fileOpts := files.FileOptions{
|
||||
Path: realPath,
|
||||
IsDir: isDir,
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: d.server.TypeDetectionByHeader,
|
||||
Checker: d,
|
||||
}
|
||||
file, err := files.FileInfoFaster(fileOpts)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
}
|
||||
|
||||
// delete thumbnails
|
||||
err = delThumbs(r.Context(), fileCache, file)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
// resourceDeleteHandler deletes a resource at a specified path.
|
||||
// @Summary Delete a resource
|
||||
// @Description Deletes a resource located at the specified path.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param path query string true "Path to the resource"
|
||||
// @Param source query string false "Name for the desired source, default is used if not provided"
|
||||
// @Success 200 "Resource deleted successfully"
|
||||
// @Failure 403 {object} map[string]string "Forbidden"
|
||||
// @Failure 404 {object} map[string]string "Resource not found"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/resources [delete]
|
||||
func resourceDeleteHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
// TODO source := r.URL.Query().Get("source")
|
||||
path := r.URL.Query().Get("path")
|
||||
if path == "/" || !d.user.Perm.Delete {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, path)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
fileOpts := files.FileOptions{
|
||||
Path: filepath.Join(d.user.Scope, path),
|
||||
IsDir: isDir,
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: config.Server.TypeDetectionByHeader,
|
||||
Checker: d.user,
|
||||
}
|
||||
file, err := files.FileInfoFaster(fileOpts)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
err = files.DeleteFiles(realPath, fileOpts)
|
||||
// delete thumbnails
|
||||
err = delThumbs(r.Context(), fileCache, file)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
err = files.DeleteFiles(realPath, fileOpts)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
return http.StatusOK, nil
|
||||
|
||||
}
|
||||
|
||||
// resourcePostHandler creates or uploads a new resource.
|
||||
// @Summary Create or upload a resource
|
||||
// @Description Creates a new resource or uploads a file at the specified path. Supports file uploads and directory creation.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param path query string true "Path to the resource"
|
||||
// @Param source query string false "Name for the desired source, default is used if not provided"
|
||||
// @Param override query bool false "Override existing file if true"
|
||||
// @Success 200 "Resource created successfully"
|
||||
// @Failure 403 {object} map[string]string "Forbidden"
|
||||
// @Failure 404 {object} map[string]string "Resource not found"
|
||||
// @Failure 409 {object} map[string]string "Conflict - Resource already exists"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/resources [post]
|
||||
func resourcePostHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
// TODO source := r.URL.Query().Get("source")
|
||||
path := r.URL.Query().Get("path")
|
||||
if !d.user.Perm.Create || !d.user.Check(path) {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
fileOpts := files.FileOptions{
|
||||
Path: filepath.Join(d.user.Scope, path),
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: config.Server.TypeDetectionByHeader,
|
||||
Checker: d.user,
|
||||
}
|
||||
// Directories creation on POST.
|
||||
if strings.HasSuffix(path, "/") {
|
||||
err := files.WriteDirectory(fileOpts)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
return http.StatusOK, nil
|
||||
})
|
||||
}
|
||||
}
|
||||
file, err := files.FileInfoFaster(fileOpts)
|
||||
if err == nil {
|
||||
if r.URL.Query().Get("override") != "true" {
|
||||
return http.StatusConflict, nil
|
||||
}
|
||||
|
||||
func resourcePostHandler(fileCache FileCache) handleFunc {
|
||||
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
if !d.user.Perm.Create || !d.Check(r.URL.Path) {
|
||||
// Permission for overwriting the file
|
||||
if !d.user.Perm.Modify {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path)
|
||||
|
||||
err = delThumbs(r.Context(), fileCache, file)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
return errToStatus(err), err
|
||||
}
|
||||
fileOpts := files.FileOptions{
|
||||
Path: realPath,
|
||||
IsDir: isDir,
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: d.server.TypeDetectionByHeader,
|
||||
Checker: d,
|
||||
}
|
||||
// Directories creation on POST.
|
||||
if strings.HasSuffix(r.URL.Path, "/") {
|
||||
err = files.WriteDirectory(fileOpts) // Assign to the existing `err` variable
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
return http.StatusOK, nil
|
||||
}
|
||||
file, err := files.FileInfoFaster(fileOpts)
|
||||
if err == nil {
|
||||
if r.URL.Query().Get("override") != "true" {
|
||||
return http.StatusConflict, nil
|
||||
}
|
||||
|
||||
// Permission for overwriting the file
|
||||
if !d.user.Perm.Modify {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
err = delThumbs(r.Context(), fileCache, file)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
}
|
||||
err = files.WriteFile(fileOpts, r.Body)
|
||||
return errToStatus(err), err
|
||||
})
|
||||
}
|
||||
err = files.WriteFile(fileOpts, r.Body)
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
var resourcePutHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
if !d.user.Perm.Modify || !d.Check(r.URL.Path) {
|
||||
// resourcePutHandler updates an existing file resource.
|
||||
// @Summary Update a file resource
|
||||
// @Description Updates an existing file at the specified path.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param path query string true "Path to the resource"
|
||||
// @Param source query string false "Name for the desired source, default is used if not provided"
|
||||
// @Success 200 "Resource updated successfully"
|
||||
// @Failure 403 {object} map[string]string "Forbidden"
|
||||
// @Failure 404 {object} map[string]string "Resource not found"
|
||||
// @Failure 405 {object} map[string]string "Method not allowed"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/resources [put]
|
||||
func resourcePutHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
// TODO source := r.URL.Query().Get("source")
|
||||
path := r.URL.Query().Get("path")
|
||||
if !d.user.Perm.Modify || !d.user.Check(path) {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
// Only allow PUT for files.
|
||||
if strings.HasSuffix(r.URL.Path, "/") {
|
||||
if strings.HasSuffix(path, "/") {
|
||||
return http.StatusMethodNotAllowed, nil
|
||||
}
|
||||
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path)
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, path)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
|
@ -148,49 +200,70 @@ var resourcePutHandler = withUser(func(w http.ResponseWriter, r *http.Request, d
|
|||
IsDir: isDir,
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: d.server.TypeDetectionByHeader,
|
||||
Checker: d,
|
||||
ReadHeader: config.Server.TypeDetectionByHeader,
|
||||
Checker: d.user,
|
||||
}
|
||||
err = files.WriteFile(fileOpts, r.Body)
|
||||
return errToStatus(err), err
|
||||
})
|
||||
|
||||
// TODO fix and verify this function still works in tests
|
||||
func resourcePatchHandler(fileCache FileCache) handleFunc {
|
||||
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
src := r.URL.Path
|
||||
dst := r.URL.Query().Get("destination")
|
||||
action := r.URL.Query().Get("action")
|
||||
dst, err := url.QueryUnescape(dst)
|
||||
if !d.Check(src) || !d.Check(dst) {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
if dst == "/" || src == "/" {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
override := r.URL.Query().Get("override") == "true"
|
||||
rename := r.URL.Query().Get("rename") == "true"
|
||||
if !override && !rename {
|
||||
if _, err = os.Stat(dst); err == nil {
|
||||
return http.StatusConflict, nil
|
||||
}
|
||||
}
|
||||
if rename {
|
||||
dst = addVersionSuffix(dst)
|
||||
}
|
||||
// Permission for overwriting the file
|
||||
if override && !d.user.Perm.Modify {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
err = d.RunHook(func() error {
|
||||
return patchAction(r.Context(), action, src, dst, d, fileCache)
|
||||
}, action, src, dst, d.user)
|
||||
}
|
||||
|
||||
// resourcePatchHandler performs a patch operation (e.g., move, rename) on a resource.
|
||||
// @Summary Patch resource (move/rename)
|
||||
// @Description Moves or renames a resource to a new destination.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param from query string true "Path from resource"
|
||||
// @Param destination query string true "Destination path for the resource"
|
||||
// @Param action query string true "Action to perform (copy, rename)"
|
||||
// @Param overwrite query bool false "Overwrite if destination exists"
|
||||
// @Param rename query bool false "Rename if destination exists"
|
||||
// @Success 200 "Resource moved/renamed successfully"
|
||||
// @Failure 403 {object} map[string]string "Forbidden"
|
||||
// @Failure 404 {object} map[string]string "Resource not found"
|
||||
// @Failure 409 {object} map[string]string "Conflict - Destination exists"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/resources [patch]
|
||||
func resourcePatchHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
// TODO source := r.URL.Query().Get("source")
|
||||
src := r.URL.Query().Get("from")
|
||||
dst := r.URL.Query().Get("destination")
|
||||
action := r.URL.Query().Get("action")
|
||||
dst, err := url.QueryUnescape(dst)
|
||||
if !d.user.Check(src) || !d.user.Check(dst) {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
})
|
||||
}
|
||||
if dst == "/" || src == "/" {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
// check target dir exists
|
||||
parentDir, _, err := files.GetRealPath(d.user.Scope, filepath.Dir(dst))
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
realDest := parentDir + "/" + filepath.Base(dst)
|
||||
realSrc, isSrcDir, err := files.GetRealPath(d.user.Scope, src)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
overwrite := r.URL.Query().Get("overwrite") == "true"
|
||||
rename := r.URL.Query().Get("rename") == "true"
|
||||
if rename {
|
||||
realDest = addVersionSuffix(realDest)
|
||||
}
|
||||
// Permission for overwriting the file
|
||||
if overwrite && !d.user.Perm.Modify {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
err = d.RunHook(func() error {
|
||||
return patchAction(r.Context(), action, realSrc, realDest, d, fileCache, isSrcDir)
|
||||
}, action, realSrc, realDest, d.user)
|
||||
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
func addVersionSuffix(source string) string {
|
||||
|
@ -210,46 +283,31 @@ func addVersionSuffix(source string) string {
|
|||
}
|
||||
|
||||
func delThumbs(ctx context.Context, fileCache FileCache, file *files.FileInfo) error {
|
||||
for _, previewSizeName := range PreviewSizeNames() {
|
||||
size, _ := ParsePreviewSize(previewSizeName)
|
||||
if err := fileCache.Delete(ctx, previewCacheKey(file, size)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := fileCache.Delete(ctx, previewCacheKey(file, "small")); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func patchAction(ctx context.Context, action, src, dst string, d *data, fileCache FileCache) error {
|
||||
func patchAction(ctx context.Context, action, src, dst string, d *requestContext, fileCache FileCache, isSrcDir bool) error {
|
||||
switch action {
|
||||
// TODO: use enum
|
||||
case "copy":
|
||||
if !d.user.Perm.Create {
|
||||
return errors.ErrPermissionDenied
|
||||
}
|
||||
|
||||
return fileutils.Copy(src, dst)
|
||||
case "rename":
|
||||
return files.CopyResource(src, dst, isSrcDir)
|
||||
case "rename", "move":
|
||||
if !d.user.Perm.Rename {
|
||||
return errors.ErrPermissionDenied
|
||||
}
|
||||
src = path.Clean("/" + src)
|
||||
dst = path.Clean("/" + dst)
|
||||
realDest, _, err := files.GetRealPath(d.user.Scope, dst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
realSrc, isDir, err := files.GetRealPath(d.user.Scope, src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: realSrc,
|
||||
IsDir: isDir,
|
||||
Path: src,
|
||||
IsDir: isSrcDir,
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: false,
|
||||
Checker: d,
|
||||
Checker: d.user,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -260,8 +318,7 @@ func patchAction(ctx context.Context, action, src, dst string, d *data, fileCach
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return fileutils.MoveFile(realSrc, realDest)
|
||||
return files.MoveResource(src, dst, isSrcDir)
|
||||
default:
|
||||
return fmt.Errorf("unsupported action %s: %w", action, errors.ErrInvalidRequestParams)
|
||||
}
|
||||
|
@ -272,28 +329,32 @@ type DiskUsageResponse struct {
|
|||
Used uint64 `json:"used"`
|
||||
}
|
||||
|
||||
var diskUsage = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
realPath, isDir, err := files.GetRealPath(d.user.Scope, r.URL.Path)
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
// diskUsage returns the disk usage information for a given directory.
|
||||
// @Summary Get disk usage
|
||||
// @Description Returns the total and used disk space for a specified directory.
|
||||
// @Tags Resources
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param source query string false "Name for the desired source, default is used if not provided"
|
||||
// @Success 200 {object} DiskUsageResponse "Disk usage details"
|
||||
// @Failure 404 {object} map[string]string "Directory not found"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/usage [get]
|
||||
func diskUsage(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
source := r.URL.Query().Get("source")
|
||||
if source == "" {
|
||||
source = "/"
|
||||
}
|
||||
file, err := files.FileInfoFaster(files.FileOptions{
|
||||
Path: realPath,
|
||||
IsDir: isDir,
|
||||
Modify: d.user.Perm.Modify,
|
||||
Expand: false,
|
||||
ReadHeader: false,
|
||||
Checker: d,
|
||||
Path: source,
|
||||
Checker: d.user,
|
||||
})
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
fPath := file.RealPath()
|
||||
if !file.IsDir {
|
||||
return renderJSON(w, r, &DiskUsageResponse{
|
||||
Total: 0,
|
||||
Used: 0,
|
||||
})
|
||||
if file.Type != "directory" {
|
||||
return http.StatusBadRequest, fmt.Errorf("path is not a directory")
|
||||
}
|
||||
usage, err := disk.UsageWithContext(r.Context(), fPath)
|
||||
if err != nil {
|
||||
|
@ -303,4 +364,12 @@ var diskUsage = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (
|
|||
Total: usage.Total,
|
||||
Used: usage.Used,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func inspectIndex(w http.ResponseWriter, r *http.Request) {
|
||||
path := r.URL.Query().Get("path")
|
||||
isDir := r.URL.Query().Get("isDir") == "true"
|
||||
index := files.GetIndex(config.Server.Root)
|
||||
info, _ := index.GetReducedMetadata(path, isDir)
|
||||
renderJSON(w, r, info) // nolint:errcheck
|
||||
}
|
||||
|
|
|
@ -0,0 +1,186 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"embed"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"text/template"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/version"
|
||||
|
||||
httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware
|
||||
)
|
||||
|
||||
// Embed the files in the frontend/dist directory
|
||||
//
|
||||
//go:embed embed/*
|
||||
var assets embed.FS
|
||||
|
||||
// Boolean flag to determine whether to use the embedded FS or not
|
||||
var embeddedFS = os.Getenv("FILEBROWSER_NO_EMBEDED") != "true"
|
||||
|
||||
// Custom dirFS to handle both embedded and non-embedded file systems
|
||||
type dirFS struct {
|
||||
http.Dir
|
||||
}
|
||||
|
||||
// Implement the Open method for dirFS, which wraps http.Dir
|
||||
func (d dirFS) Open(name string) (fs.File, error) {
|
||||
return d.Dir.Open(name)
|
||||
}
|
||||
|
||||
var (
|
||||
store *storage.Storage
|
||||
config *settings.Settings
|
||||
fileCache FileCache
|
||||
imgSvc ImgService
|
||||
assetFs fs.FS
|
||||
)
|
||||
|
||||
func StartHttp(Service ImgService, storage *storage.Storage, cache FileCache) {
|
||||
|
||||
store = storage
|
||||
fileCache = cache
|
||||
imgSvc = Service
|
||||
config = &settings.Config
|
||||
|
||||
var err error
|
||||
|
||||
if embeddedFS {
|
||||
// Embedded mode: Serve files from the embedded assets
|
||||
assetFs, err = fs.Sub(assets, "embed")
|
||||
if err != nil {
|
||||
log.Fatal("Could not embed frontend. Does dist exist?")
|
||||
}
|
||||
} else {
|
||||
assetFs = dirFS{Dir: http.Dir("http/dist")}
|
||||
}
|
||||
|
||||
templateRenderer = &TemplateRenderer{
|
||||
templates: template.Must(template.ParseFS(assetFs, "public/index.html")),
|
||||
}
|
||||
|
||||
router := http.NewServeMux()
|
||||
// API group routing
|
||||
api := http.NewServeMux()
|
||||
|
||||
// User routes
|
||||
api.HandleFunc("GET /users", withUser(userGetHandler))
|
||||
api.HandleFunc("POST /users", withSelfOrAdmin(usersPostHandler))
|
||||
api.HandleFunc("PUT /users", withUser(userPutHandler))
|
||||
api.HandleFunc("DELETE /users", withSelfOrAdmin(userDeleteHandler))
|
||||
|
||||
// Auth routes
|
||||
api.HandleFunc("POST /auth/login", loginHandler)
|
||||
api.HandleFunc("GET /auth/signup", signupHandler)
|
||||
api.HandleFunc("POST /auth/renew", withUser(renewHandler))
|
||||
api.HandleFunc("PUT /auth/token", withUser(createApiKeyHandler))
|
||||
api.HandleFunc("GET /auth/token", withUser(createApiKeyHandler))
|
||||
api.HandleFunc("DELETE /auth/token", withUser(deleteApiKeyHandler))
|
||||
api.HandleFunc("GET /auth/tokens", withUser(listApiKeysHandler))
|
||||
|
||||
// Resources routes
|
||||
api.HandleFunc("GET /resources", withUser(resourceGetHandler))
|
||||
api.HandleFunc("DELETE /resources", withUser(resourceDeleteHandler))
|
||||
api.HandleFunc("POST /resources", withUser(resourcePostHandler))
|
||||
api.HandleFunc("PUT /resources", withUser(resourcePutHandler))
|
||||
api.HandleFunc("PATCH /resources", withUser(resourcePatchHandler))
|
||||
api.HandleFunc("GET /usage", withUser(diskUsage))
|
||||
api.HandleFunc("GET /raw", withUser(rawHandler))
|
||||
api.HandleFunc("GET /preview", withUser(previewHandler))
|
||||
if version.Version == "testing" || version.Version == "untracked" {
|
||||
api.HandleFunc("GET /inspectIndex", inspectIndex)
|
||||
}
|
||||
|
||||
// Share routes
|
||||
api.HandleFunc("GET /shares", withPermShare(shareListHandler))
|
||||
api.HandleFunc("GET /share", withPermShare(shareGetsHandler))
|
||||
api.HandleFunc("POST /share", withPermShare(sharePostHandler))
|
||||
api.HandleFunc("DELETE /share", withPermShare(shareDeleteHandler))
|
||||
|
||||
// Public routes
|
||||
api.HandleFunc("GET /public/publicUser", publicUserGetHandler)
|
||||
api.HandleFunc("GET /public/dl", withHashFile(publicDlHandler))
|
||||
api.HandleFunc("GET /public/share", withHashFile(publicShareHandler))
|
||||
|
||||
// Settings routes
|
||||
api.HandleFunc("GET /settings", withAdmin(settingsGetHandler))
|
||||
api.HandleFunc("PUT /settings", withAdmin(settingsPutHandler))
|
||||
|
||||
api.HandleFunc("GET /search", withUser(searchHandler))
|
||||
apiPath := config.Server.BaseURL + "api"
|
||||
router.Handle(apiPath+"/", http.StripPrefix(apiPath, api))
|
||||
|
||||
// Static and index file handlers
|
||||
router.HandleFunc(fmt.Sprintf("GET %vstatic/", config.Server.BaseURL), staticFilesHandler)
|
||||
router.HandleFunc(config.Server.BaseURL, indexHandler)
|
||||
|
||||
// health
|
||||
router.HandleFunc(fmt.Sprintf("GET %vhealth/", config.Server.BaseURL), healthHandler)
|
||||
|
||||
// Swagger
|
||||
router.Handle(fmt.Sprintf("%vswagger/", config.Server.BaseURL),
|
||||
httpSwagger.Handler(
|
||||
httpSwagger.URL(config.Server.BaseURL+"swagger/doc.json"), //The url pointing to API definition
|
||||
httpSwagger.DeepLinking(true),
|
||||
httpSwagger.DocExpansion("none"),
|
||||
httpSwagger.DomID("swagger-ui"),
|
||||
),
|
||||
)
|
||||
|
||||
var scheme string
|
||||
port := ""
|
||||
|
||||
// Determine whether to use HTTPS (TLS) or HTTP
|
||||
if config.Server.TLSCert != "" && config.Server.TLSKey != "" {
|
||||
// Load the TLS certificate and key
|
||||
cer, err := tls.LoadX509KeyPair(config.Server.TLSCert, config.Server.TLSKey)
|
||||
if err != nil {
|
||||
log.Fatalf("could not load certificate: %v", err)
|
||||
}
|
||||
|
||||
// Create a custom TLS listener
|
||||
tlsConfig := &tls.Config{
|
||||
MinVersion: tls.VersionTLS12,
|
||||
Certificates: []tls.Certificate{cer},
|
||||
}
|
||||
|
||||
// Set HTTPS scheme and default port for TLS
|
||||
scheme = "https"
|
||||
|
||||
// Listen on TCP and wrap with TLS
|
||||
listener, err := tls.Listen("tcp", fmt.Sprintf(":%v", config.Server.Port), tlsConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("could not start TLS server: %v", err)
|
||||
}
|
||||
if config.Server.Port != 443 {
|
||||
port = fmt.Sprintf(":%d", config.Server.Port)
|
||||
}
|
||||
// Build the full URL with host and port
|
||||
fullURL := fmt.Sprintf("%s://localhost%s%s", scheme, port, config.Server.BaseURL)
|
||||
log.Printf("Running at : %s", fullURL)
|
||||
err = http.Serve(listener, muxWithMiddleware(router))
|
||||
if err != nil {
|
||||
log.Fatalf("could not start server: %v", err)
|
||||
}
|
||||
} else {
|
||||
// Set HTTP scheme and the default port for HTTP
|
||||
scheme = "http"
|
||||
if config.Server.Port != 443 {
|
||||
port = fmt.Sprintf(":%d", config.Server.Port)
|
||||
}
|
||||
// Build the full URL with host and port
|
||||
fullURL := fmt.Sprintf("%s://localhost%s%s", scheme, port, config.Server.BaseURL)
|
||||
log.Printf("Running at : %s", fullURL)
|
||||
err := http.ListenAndServe(fmt.Sprintf(":%v", config.Server.Port), muxWithMiddleware(router))
|
||||
if err != nil {
|
||||
log.Fatalf("could not start server: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -8,29 +8,63 @@ import (
|
|||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
)
|
||||
|
||||
var searchHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
response := []map[string]interface{}{}
|
||||
// searchHandler handles search requests for files based on the provided query.
|
||||
//
|
||||
// This endpoint processes a search query, retrieves relevant file paths, and
|
||||
// returns a JSON response with the search results. The search is performed
|
||||
// against the file index, which is built from the root directory specified in
|
||||
// the server's configuration. The results are filtered based on the user's scope.
|
||||
//
|
||||
// The handler expects the following headers in the request:
|
||||
// - SessionId: A unique identifier for the user's session.
|
||||
// - UserScope: The scope of the user, which influences the search context.
|
||||
//
|
||||
// The request URL should include a query parameter named `query` that specifies
|
||||
// the search terms to use. The response will include an array of searchResponse objects
|
||||
// containing the path, type, and dir status.
|
||||
//
|
||||
// Example request:
|
||||
//
|
||||
// GET api/search?query=myfile
|
||||
//
|
||||
// Example response:
|
||||
// [
|
||||
//
|
||||
// {
|
||||
// "path": "/path/to/myfile.txt",
|
||||
// "type": "text"
|
||||
// },
|
||||
// {
|
||||
// "path": "/path/to/mydir/",
|
||||
// "type": "directory"
|
||||
// }
|
||||
//
|
||||
// ]
|
||||
//
|
||||
// @Summary Search Files
|
||||
// @Description Searches for files matching the provided query. Returns file paths and metadata based on the user's session and scope.
|
||||
// @Tags Search
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param query query string true "Search query"
|
||||
// @Param scope query string false "path within user scope to search, for example '/first/second' to search within the second directory only"
|
||||
// @Param SessionId header string false "User session ID, add unique value to prevent collisions"
|
||||
// @Success 200 {array} files.searchResult "List of search results"
|
||||
// @Failure 400 {object} map[string]string "Bad Request"
|
||||
// @Router /api/search [get]
|
||||
func searchHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
query := r.URL.Query().Get("query")
|
||||
searchScope := strings.TrimPrefix(r.URL.Query().Get("scope"), ".")
|
||||
searchScope = strings.TrimPrefix(searchScope, "/")
|
||||
// Retrieve the User-Agent and X-Auth headers from the request
|
||||
sessionId := r.Header.Get("SessionId")
|
||||
userScope := r.Header.Get("UserScope")
|
||||
index := files.GetIndex(settings.Config.Server.Root)
|
||||
combinedScope := strings.TrimPrefix(userScope+r.URL.Path, ".")
|
||||
results, fileTypes := index.Search(query, combinedScope, sessionId)
|
||||
for _, path := range results {
|
||||
responseObj := map[string]interface{}{
|
||||
"path": path,
|
||||
"dir": true,
|
||||
}
|
||||
if _, ok := fileTypes[path]; ok {
|
||||
responseObj["dir"] = false
|
||||
for filterType, value := range fileTypes[path] {
|
||||
if value {
|
||||
responseObj[filterType] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
response = append(response, responseObj)
|
||||
}
|
||||
userScope := strings.TrimPrefix(d.user.Scope, ".")
|
||||
combinedScope := strings.TrimPrefix(userScope+"/"+searchScope, "/")
|
||||
|
||||
// Perform the search using the provided query and user scope
|
||||
response := index.Search(query, combinedScope, sessionId)
|
||||
// Set the Content-Type header to application/json
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
return renderJSON(w, r, response)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -4,8 +4,8 @@ import (
|
|||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
type settingsData struct {
|
||||
|
@ -13,37 +13,56 @@ type settingsData struct {
|
|||
CreateUserDir bool `json:"createUserDir"`
|
||||
UserHomeBasePath string `json:"userHomeBasePath"`
|
||||
Defaults settings.UserDefaults `json:"defaults"`
|
||||
Rules []rules.Rule `json:"rules"`
|
||||
Rules []users.Rule `json:"rules"`
|
||||
Frontend settings.Frontend `json:"frontend"`
|
||||
Commands map[string][]string `json:"commands"`
|
||||
}
|
||||
|
||||
var settingsGetHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
// settingsGetHandler retrieves the current system settings.
|
||||
// @Summary Get system settings
|
||||
// @Description Returns the current configuration settings for signup, user directories, rules, frontend, and commands.
|
||||
// @Tags Settings
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Success 200 {object} settingsData "System settings data"
|
||||
// @Router /api/settings [get]
|
||||
func settingsGetHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
data := &settingsData{
|
||||
Signup: d.settings.Auth.Signup,
|
||||
CreateUserDir: d.settings.Server.CreateUserDir,
|
||||
UserHomeBasePath: d.settings.Server.UserHomeBasePath,
|
||||
Defaults: d.settings.UserDefaults,
|
||||
Rules: d.settings.Rules,
|
||||
Frontend: d.settings.Frontend,
|
||||
Signup: config.Auth.Signup,
|
||||
CreateUserDir: config.Server.CreateUserDir,
|
||||
UserHomeBasePath: config.Server.UserHomeBasePath,
|
||||
Defaults: config.UserDefaults,
|
||||
Rules: config.Rules,
|
||||
Frontend: config.Frontend,
|
||||
}
|
||||
|
||||
return renderJSON(w, r, data)
|
||||
})
|
||||
}
|
||||
|
||||
var settingsPutHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
// settingsPutHandler updates the system settings.
|
||||
// @Summary Update system settings
|
||||
// @Description Updates the system configuration settings for signup, user directories, rules, frontend, and commands.
|
||||
// @Tags Settings
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param body body settingsData true "Settings data to update"
|
||||
// @Success 200 "Settings updated successfully"
|
||||
// @Failure 400 {object} map[string]string "Bad request - failed to decode body"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/settings [put]
|
||||
func settingsPutHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
req := &settingsData{}
|
||||
err := json.NewDecoder(r.Body).Decode(req)
|
||||
if err != nil {
|
||||
return http.StatusBadRequest, err
|
||||
}
|
||||
|
||||
d.settings.Server.CreateUserDir = req.CreateUserDir
|
||||
d.settings.Server.UserHomeBasePath = req.UserHomeBasePath
|
||||
d.settings.UserDefaults = req.Defaults
|
||||
d.settings.Rules = req.Rules
|
||||
d.settings.Frontend = req.Frontend
|
||||
d.settings.Auth.Signup = req.Signup
|
||||
err = d.store.Settings.Save(d.settings)
|
||||
config.Server.CreateUserDir = req.CreateUserDir
|
||||
config.Server.UserHomeBasePath = req.UserHomeBasePath
|
||||
config.UserDefaults = req.Defaults
|
||||
config.Rules = req.Rules
|
||||
config.Frontend = req.Frontend
|
||||
config.Auth.Signup = req.Signup
|
||||
err = store.Settings.Save(config)
|
||||
return errToStatus(err), err
|
||||
})
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ import (
|
|||
"net/http"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
|
@ -17,24 +16,24 @@ import (
|
|||
"github.com/gtsteffaniak/filebrowser/share"
|
||||
)
|
||||
|
||||
func withPermShare(fn handleFunc) handleFunc {
|
||||
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
if !d.user.Perm.Share {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
return fn(w, r, d)
|
||||
})
|
||||
}
|
||||
|
||||
var shareListHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
// shareListHandler returns a list of all share links.
|
||||
// @Summary List share links
|
||||
// @Description Returns a list of share links for the current user, or all links if the user is an admin.
|
||||
// @Tags Shares
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Success 200 {array} share.Link "List of share links"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/shares [get]
|
||||
func shareListHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
var (
|
||||
s []*share.Link
|
||||
err error
|
||||
)
|
||||
if d.user.Perm.Admin {
|
||||
s, err = d.store.Share.All()
|
||||
s, err = store.Share.All()
|
||||
} else {
|
||||
s, err = d.store.Share.FindByUserID(d.user.ID)
|
||||
s, err = store.Share.FindByUserID(d.user.ID)
|
||||
}
|
||||
if err == errors.ErrNotExist {
|
||||
return renderJSON(w, r, []*share.Link{})
|
||||
|
@ -51,39 +50,68 @@ var shareListHandler = withPermShare(func(w http.ResponseWriter, r *http.Request
|
|||
return s[i].Expire < s[j].Expire
|
||||
})
|
||||
return renderJSON(w, r, s)
|
||||
})
|
||||
}
|
||||
|
||||
var shareGetsHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
s, err := d.store.Share.Gets(r.URL.Path, d.user.ID)
|
||||
// shareGetsHandler retrieves share links for a specific resource path.
|
||||
// @Summary Get share links by path
|
||||
// @Description Retrieves all share links associated with a specific resource path for the current user.
|
||||
// @Tags Shares
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param path query string true "Resource path for which to retrieve share links"
|
||||
// @Success 200 {array} share.Link "List of share links for the specified path"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/share [get]
|
||||
func shareGetsHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
path := r.URL.Query().Get("path")
|
||||
s, err := store.Share.Gets(path, d.user.ID)
|
||||
if err == errors.ErrNotExist {
|
||||
return renderJSON(w, r, []*share.Link{})
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
return renderJSON(w, r, s)
|
||||
})
|
||||
}
|
||||
|
||||
var shareDeleteHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
hash := strings.TrimSuffix(r.URL.Path, "/")
|
||||
hash = strings.TrimPrefix(hash, "/")
|
||||
// shareDeleteHandler deletes a specific share link by its hash.
|
||||
// @Summary Delete a share link
|
||||
// @Description Deletes a share link specified by its hash.
|
||||
// @Tags Shares
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param hash path string true "Hash of the share link to delete"
|
||||
// @Success 200 "Share link deleted successfully"
|
||||
// @Failure 400 {object} map[string]string "Bad request - missing or invalid hash"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/shares/{hash} [delete]
|
||||
func shareDeleteHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
hash := r.URL.Query().Get("hash")
|
||||
|
||||
if hash == "" {
|
||||
return http.StatusBadRequest, nil
|
||||
}
|
||||
|
||||
err := d.store.Share.Delete(hash)
|
||||
err := store.Share.Delete(hash)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
return errToStatus(err), err
|
||||
})
|
||||
|
||||
var sharePostHandler = withPermShare(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
}
|
||||
|
||||
// sharePostHandler creates a new share link.
|
||||
// @Summary Create a share link
|
||||
// @Description Creates a new share link with an optional expiration time and password protection.
|
||||
// @Tags Shares
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param body body share.CreateBody true "Share link creation parameters"
|
||||
// @Success 200 {object} share.Link "Created share link"
|
||||
// @Failure 400 {object} map[string]string "Bad request - failed to decode body"
|
||||
// @Failure 500 {object} map[string]string "Internal server error"
|
||||
// @Router /api/shares [post]
|
||||
func sharePostHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
var s *share.Link
|
||||
var body share.CreateBody
|
||||
if r.Body != nil {
|
||||
|
@ -93,14 +121,11 @@ var sharePostHandler = withPermShare(func(w http.ResponseWriter, r *http.Request
|
|||
defer r.Body.Close()
|
||||
}
|
||||
|
||||
bytes := make([]byte, 6) //nolint:gomnd
|
||||
_, err := rand.Read(bytes)
|
||||
secure_hash, err := generateShortUUID()
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
str := base64.URLEncoding.EncodeToString(bytes)
|
||||
|
||||
var expire int64 = 0
|
||||
|
||||
if body.Expires != "" {
|
||||
|
@ -139,24 +164,24 @@ var sharePostHandler = withPermShare(func(w http.ResponseWriter, r *http.Request
|
|||
token = base64.URLEncoding.EncodeToString(tokenBuffer)
|
||||
stringHash = string(hash)
|
||||
}
|
||||
path := r.URL.Query().Get("path")
|
||||
s = &share.Link{
|
||||
Path: strings.TrimSuffix(r.URL.Path, "/"),
|
||||
Hash: str,
|
||||
Path: path,
|
||||
Hash: secure_hash,
|
||||
Expire: expire,
|
||||
UserID: d.user.ID,
|
||||
PasswordHash: stringHash,
|
||||
Token: token,
|
||||
}
|
||||
|
||||
if err := d.store.Share.Save(s); err != nil {
|
||||
if err := store.Share.Save(s); err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
return renderJSON(w, r, s)
|
||||
})
|
||||
}
|
||||
|
||||
func getSharePasswordHash(body share.CreateBody) (data []byte, statuscode int, err error) {
|
||||
|
||||
if body.Password == "" {
|
||||
return nil, 0, nil
|
||||
}
|
||||
|
@ -168,3 +193,18 @@ func getSharePasswordHash(body share.CreateBody) (data []byte, statuscode int, e
|
|||
|
||||
return hash, 0, nil
|
||||
}
|
||||
|
||||
func generateShortUUID() (string, error) {
|
||||
// Generate 16 random bytes (128 bits of entropy)
|
||||
bytes := make([]byte, 16)
|
||||
_, err := rand.Read(bytes)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Encode the bytes to a URL-safe base64 string
|
||||
uuid := base64.RawURLEncoding.EncodeToString(bytes)
|
||||
|
||||
// Trim the length to 22 characters for a shorter ID
|
||||
return uuid[:22], nil
|
||||
}
|
||||
|
|
|
@ -14,41 +14,58 @@ import (
|
|||
|
||||
"github.com/gtsteffaniak/filebrowser/auth"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/gtsteffaniak/filebrowser/storage"
|
||||
"github.com/gtsteffaniak/filebrowser/version"
|
||||
)
|
||||
|
||||
func handleWithStaticData(w http.ResponseWriter, _ *http.Request, d *data, fSys fs.FS, file, contentType string) (int, error) {
|
||||
var templateRenderer *TemplateRenderer
|
||||
|
||||
type TemplateRenderer struct {
|
||||
templates *template.Template
|
||||
}
|
||||
|
||||
// Render renders a template document with headers and data
|
||||
func (t *TemplateRenderer) Render(w http.ResponseWriter, name string, data interface{}) error {
|
||||
// Set headers
|
||||
w.Header().Set("Cache-Control", "no-cache, private, max-age=0")
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("X-Accel-Expires", "0")
|
||||
w.Header().Set("Transfer-Encoding", "identity")
|
||||
// Execute the template with the provided data
|
||||
return t.templates.ExecuteTemplate(w, name, data)
|
||||
}
|
||||
|
||||
func handleWithStaticData(w http.ResponseWriter, r *http.Request, file, contentType string) {
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
|
||||
auther, err := d.store.Auth.Get(d.settings.Auth.Method)
|
||||
auther, err := store.Auth.Get(config.Auth.Method)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data := map[string]interface{}{
|
||||
"Name": d.settings.Frontend.Name,
|
||||
"DisableExternal": d.settings.Frontend.DisableExternal,
|
||||
"DisableUsedPercentage": d.settings.Frontend.DisableUsedPercentage,
|
||||
"Name": config.Frontend.Name,
|
||||
"DisableExternal": config.Frontend.DisableExternal,
|
||||
"DisableUsedPercentage": config.Frontend.DisableUsedPercentage,
|
||||
"darkMode": settings.Config.UserDefaults.DarkMode,
|
||||
"Color": d.settings.Frontend.Color,
|
||||
"BaseURL": d.server.BaseURL,
|
||||
"Color": config.Frontend.Color,
|
||||
"BaseURL": config.Server.BaseURL,
|
||||
"Version": version.Version,
|
||||
"CommitSHA": version.CommitSHA,
|
||||
"StaticURL": path.Join(d.server.BaseURL, "/static"),
|
||||
"StaticURL": path.Join(config.Server.BaseURL, "static"),
|
||||
"Signup": settings.Config.Auth.Signup,
|
||||
"NoAuth": d.settings.Auth.Method == "noauth",
|
||||
"AuthMethod": d.settings.Auth.Method,
|
||||
"NoAuth": config.Auth.Method == "noauth",
|
||||
"AuthMethod": config.Auth.Method,
|
||||
"LoginPage": auther.LoginPage(),
|
||||
"CSS": false,
|
||||
"ReCaptcha": false,
|
||||
"EnableThumbs": d.server.EnableThumbnails,
|
||||
"ResizePreview": d.server.ResizePreview,
|
||||
"EnableExec": d.server.EnableExec,
|
||||
"EnableThumbs": config.Server.EnableThumbnails,
|
||||
"ResizePreview": config.Server.ResizePreview,
|
||||
"EnableExec": config.Server.EnableExec,
|
||||
}
|
||||
|
||||
if d.settings.Frontend.Files != "" {
|
||||
fPath := filepath.Join(d.settings.Frontend.Files, "custom.css")
|
||||
if config.Frontend.Files != "" {
|
||||
fPath := filepath.Join(config.Frontend.Files, "custom.css")
|
||||
_, err := os.Stat(fPath) //nolint:govet
|
||||
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
|
@ -60,15 +77,17 @@ func handleWithStaticData(w http.ResponseWriter, _ *http.Request, d *data, fSys
|
|||
}
|
||||
}
|
||||
|
||||
if d.settings.Auth.Method == "password" {
|
||||
raw, err := d.store.Auth.Get(d.settings.Auth.Method) //nolint:govet
|
||||
if config.Auth.Method == "password" {
|
||||
raw, err := store.Auth.Get(config.Auth.Method) //nolint:govet
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
auther, ok := raw.(*auth.JSONAuth)
|
||||
if !ok {
|
||||
return http.StatusInternalServerError, fmt.Errorf("failed to assert type *auth.JSONAuth")
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if auther.ReCaptcha != nil {
|
||||
|
@ -80,77 +99,47 @@ func handleWithStaticData(w http.ResponseWriter, _ *http.Request, d *data, fSys
|
|||
|
||||
b, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
data["Json"] = strings.ReplaceAll(string(b), `'`, `\'`)
|
||||
data["globalVars"] = strings.ReplaceAll(string(b), `'`, `\'`)
|
||||
|
||||
fileContents, err := fs.ReadFile(fSys, file)
|
||||
if err != nil {
|
||||
if err == os.ErrNotExist {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
return http.StatusInternalServerError, err
|
||||
// Render the template with global variables
|
||||
if err := templateRenderer.Render(w, file, data); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
index := template.Must(template.New("index").Delims("[{[", "]}]").Parse(string(fileContents)))
|
||||
err = index.Execute(w, data)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func getStaticHandlers(store *storage.Storage, server *settings.Server, assetsFs fs.FS) (index, static http.Handler) {
|
||||
index = handle(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
if r.Method != http.MethodGet {
|
||||
return http.StatusNotFound, nil
|
||||
}
|
||||
|
||||
w.Header().Set("x-xss-protection", "1; mode=block")
|
||||
return handleWithStaticData(w, r, d, assetsFs, "public/index.html", "text/html; charset=utf-8")
|
||||
}, "", store, server)
|
||||
|
||||
static = handle(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
if r.Method != http.MethodGet {
|
||||
return http.StatusNotFound, nil
|
||||
}
|
||||
|
||||
const maxAge = 86400 // 1 day
|
||||
w.Header().Set("Cache-Control", fmt.Sprintf("public, max-age=%v", maxAge))
|
||||
|
||||
if d.settings.Frontend.Files != "" {
|
||||
if strings.HasPrefix(r.URL.Path, "img/") {
|
||||
fPath := filepath.Join(d.settings.Frontend.Files, r.URL.Path)
|
||||
if _, err := os.Stat(fPath); err == nil {
|
||||
http.ServeFile(w, r, fPath)
|
||||
return 0, nil
|
||||
}
|
||||
} else if r.URL.Path == "custom.css" && d.settings.Frontend.Files != "" {
|
||||
http.ServeFile(w, r, filepath.Join(d.settings.Frontend.Files, "custom.css"))
|
||||
return 0, nil
|
||||
}
|
||||
}
|
||||
|
||||
if !strings.HasSuffix(r.URL.Path, ".js") {
|
||||
http.FileServer(http.FS(assetsFs)).ServeHTTP(w, r)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
fileContents, err := fs.ReadFile(assetsFs, r.URL.Path+".gz")
|
||||
if err != nil {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Encoding", "gzip")
|
||||
func staticFilesHandler(w http.ResponseWriter, r *http.Request) {
|
||||
const maxAge = 86400 // 1 day
|
||||
w.Header().Set("Cache-Control", fmt.Sprintf("public, max-age=%v", maxAge))
|
||||
w.Header().Set("Content-Security-Policy", `default-src 'self'; style-src 'unsafe-inline';`)
|
||||
// Remove "/static/" from the request path
|
||||
adjustedPath := strings.TrimPrefix(r.URL.Path, fmt.Sprintf("%vstatic/", config.Server.BaseURL))
|
||||
adjustedCompressed := adjustedPath + ".gz"
|
||||
if strings.HasSuffix(adjustedPath, ".js") {
|
||||
w.Header().Set("Content-Type", "application/javascript; charset=utf-8") // Set the correct MIME type for JavaScript files
|
||||
|
||||
if _, err := w.Write(fileContents); err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
// Check if the gzipped version of the file exists
|
||||
fileContents, err := fs.ReadFile(assetFs, adjustedCompressed)
|
||||
if err == nil {
|
||||
w.Header().Set("Content-Encoding", "gzip") // Let the browser know the file is compressed
|
||||
status, err := w.Write(fileContents) // Write the gzipped file content to the response
|
||||
if err != nil {
|
||||
http.Error(w, http.StatusText(status), status)
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}, "/static/", store, server)
|
||||
|
||||
return index, static
|
||||
} else {
|
||||
// Otherwise, serve the regular file
|
||||
http.StripPrefix(fmt.Sprintf("%vstatic/", config.Server.BaseURL), http.FileServer(http.FS(assetFs))).ServeHTTP(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
func indexHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
handleWithStaticData(w, r, "index.html", "text/html")
|
||||
|
||||
}
|
||||
|
|
|
@ -2,12 +2,12 @@ package http
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/language"
|
||||
|
||||
|
@ -26,102 +26,145 @@ type Sorting struct {
|
|||
By string `json:"by"`
|
||||
Asc bool `json:"asc"`
|
||||
}
|
||||
type modifyUserRequest struct {
|
||||
modifyRequest
|
||||
Data *users.User `json:"data"`
|
||||
type UserRequest struct {
|
||||
What string `json:"what"`
|
||||
Which []string `json:"which"`
|
||||
Data *users.User `json:"data"`
|
||||
}
|
||||
|
||||
func getUserID(r *http.Request) (uint, error) {
|
||||
vars := mux.Vars(r)
|
||||
i, err := strconv.ParseUint(vars["id"], 10, 0)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint(i), err
|
||||
}
|
||||
// userGetHandler retrieves a user by ID.
|
||||
// @Summary Retrieve a user by ID
|
||||
// @Description Returns a user's details based on their ID.
|
||||
// @Tags Users
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path int true "User ID" or "self"
|
||||
// @Success 200 {object} users.User "User details"
|
||||
// @Failure 403 {object} map[string]string "Forbidden"
|
||||
// @Failure 404 {object} map[string]string "Not Found"
|
||||
// @Failure 500 {object} map[string]string "Internal Server Error"
|
||||
// @Router /api/users/{id} [get]
|
||||
func userGetHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
givenUserIdString := r.URL.Query().Get("id")
|
||||
|
||||
func getUser(_ http.ResponseWriter, r *http.Request) (*modifyUserRequest, error) {
|
||||
if r.Body == nil {
|
||||
return nil, errors.ErrEmptyRequest
|
||||
}
|
||||
// since api self is used to validate a logged in user
|
||||
w.Header().Add("X-Renew-Token", "false")
|
||||
|
||||
req := &modifyUserRequest{}
|
||||
err := json.NewDecoder(r.Body).Decode(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if req.What != "user" {
|
||||
return nil, errors.ErrInvalidDataType
|
||||
}
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func withSelfOrAdmin(fn handleFunc) handleFunc {
|
||||
return withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
id, err := getUserID(r)
|
||||
var givenUserId uint
|
||||
if givenUserIdString == "self" {
|
||||
givenUserId = d.user.ID
|
||||
} else if givenUserIdString == "" {
|
||||
if !d.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
users, err := store.Users.Gets(config.Server.Root)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
if d.user.ID != id && !d.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
|
||||
for _, u := range users {
|
||||
u.Password = ""
|
||||
}
|
||||
for _, u := range users {
|
||||
u.ApiKeys = nil
|
||||
}
|
||||
|
||||
d.raw = id
|
||||
return fn(w, r, d)
|
||||
})
|
||||
}
|
||||
sort.Slice(users, func(i, j int) bool {
|
||||
return users[i].ID < users[j].ID
|
||||
})
|
||||
|
||||
var usersGetHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
users, err := d.store.Users.Gets(d.server.Root)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
return renderJSON(w, r, users)
|
||||
} else {
|
||||
num, _ := strconv.ParseUint(givenUserIdString, 10, 32)
|
||||
givenUserId = uint(num)
|
||||
}
|
||||
|
||||
for _, u := range users {
|
||||
u.Password = ""
|
||||
if givenUserId != d.user.ID && !d.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
sort.Slice(users, func(i, j int) bool {
|
||||
return users[i].ID < users[j].ID
|
||||
})
|
||||
|
||||
return renderJSON(w, r, users)
|
||||
})
|
||||
|
||||
var userGetHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
u, err := d.store.Users.Get(d.server.Root, d.raw.(uint))
|
||||
// Fetch the user details
|
||||
u, err := store.Users.Get(config.Server.Root, givenUserId)
|
||||
if err == errors.ErrNotExist {
|
||||
return http.StatusNotFound, err
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
// Remove the password from the response if the user is not an admin
|
||||
u.Password = ""
|
||||
u.ApiKeys = nil
|
||||
if !d.user.Perm.Admin {
|
||||
u.Scope = ""
|
||||
}
|
||||
return renderJSON(w, r, u)
|
||||
})
|
||||
|
||||
var userDeleteHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
err := d.store.Users.Delete(d.raw.(uint))
|
||||
return renderJSON(w, r, u)
|
||||
}
|
||||
|
||||
// userDeleteHandler deletes a user by ID.
|
||||
// @Summary Delete a user by ID
|
||||
// @Description Deletes a user identified by their ID.
|
||||
// @Tags Users
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path int true "User ID"
|
||||
// @Success 200 "User deleted successfully"
|
||||
// @Failure 403 {object} map[string]string "Forbidden"
|
||||
// @Failure 500 {object} map[string]string "Internal Server Error"
|
||||
// @Router /api/users/{id} [delete]
|
||||
func userDeleteHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
givenUserIdString := r.URL.Query().Get("id")
|
||||
num, _ := strconv.ParseUint(givenUserIdString, 10, 32)
|
||||
givenUserId := uint(num)
|
||||
|
||||
if givenUserId == d.user.ID || !d.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
// Delete the user
|
||||
err := store.Users.Delete(givenUserId)
|
||||
if err != nil {
|
||||
return errToStatus(err), err
|
||||
}
|
||||
|
||||
return http.StatusOK, nil
|
||||
})
|
||||
}
|
||||
|
||||
var userPostHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
req, err := getUser(w, r)
|
||||
// usersPostHandler creates a new user.
|
||||
// @Summary Create a new user
|
||||
// @Description Adds a new user to the system.
|
||||
// @Tags Users
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param data body users.User true "User data to create a new user"
|
||||
// @Success 201 {object} users.User "Created user"
|
||||
// @Failure 400 {object} map[string]string "Bad Request"
|
||||
// @Failure 500 {object} map[string]string "Internal Server Error"
|
||||
// @Router /api/users [post]
|
||||
func usersPostHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
if !d.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
// Validate the user's scope
|
||||
_, _, err := files.GetRealPath(config.Server.Root, d.user.Scope)
|
||||
if err != nil {
|
||||
return http.StatusBadRequest, err
|
||||
}
|
||||
|
||||
// Read the JSON body
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
defer r.Body.Close()
|
||||
|
||||
// Parse the JSON into the UserRequest struct
|
||||
var req UserRequest
|
||||
if err = json.Unmarshal(body, &req); err != nil {
|
||||
return http.StatusBadRequest, err
|
||||
}
|
||||
|
||||
if len(req.Which) != 0 {
|
||||
return http.StatusBadRequest, nil
|
||||
}
|
||||
|
@ -137,22 +180,50 @@ var userPostHandler = withAdmin(func(w http.ResponseWriter, r *http.Request, d *
|
|||
|
||||
w.Header().Set("Location", "/settings/users/"+strconv.FormatUint(uint64(req.Data.ID), 10))
|
||||
return http.StatusCreated, nil
|
||||
})
|
||||
}
|
||||
|
||||
var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||
req, err := getUser(w, r)
|
||||
// userPutHandler updates an existing user's details.
|
||||
// @Summary Update a user's details
|
||||
// @Description Updates the details of a user identified by ID.
|
||||
// @Tags Users
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path int true "User ID"
|
||||
// @Param data body users.User true "User data to update"
|
||||
// @Success 200 {object} users.User "Updated user details"
|
||||
// @Failure 400 {object} map[string]string "Bad Request"
|
||||
// @Failure 403 {object} map[string]string "Forbidden"
|
||||
// @Failure 500 {object} map[string]string "Internal Server Error"
|
||||
// @Router /api/users/{id} [put]
|
||||
func userPutHandler(w http.ResponseWriter, r *http.Request, d *requestContext) (int, error) {
|
||||
givenUserIdString := r.URL.Query().Get("id")
|
||||
num, _ := strconv.ParseUint(givenUserIdString, 10, 32)
|
||||
givenUserId := uint(num)
|
||||
|
||||
if givenUserId != d.user.ID && !d.user.Perm.Admin {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
||||
// Validate the user's scope
|
||||
_, _, err := files.GetRealPath(config.Server.Root, d.user.Scope)
|
||||
if err != nil {
|
||||
return http.StatusBadRequest, err
|
||||
}
|
||||
|
||||
if req.Data.ID != d.raw.(uint) {
|
||||
return http.StatusBadRequest, nil
|
||||
}
|
||||
_, _, err = files.GetRealPath(d.server.Root, req.Data.Scope)
|
||||
// Read the JSON body
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
return http.StatusBadRequest, nil
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
defer r.Body.Close()
|
||||
|
||||
// Parse the JSON into the UserRequest struct
|
||||
var req UserRequest
|
||||
if err = json.Unmarshal(body, &req); err != nil {
|
||||
return http.StatusBadRequest, err
|
||||
}
|
||||
|
||||
// If `Which` is not specified, default to updating all fields
|
||||
if len(req.Which) == 0 || req.Which[0] == "all" {
|
||||
req.Which = []string{}
|
||||
v := reflect.ValueOf(req.Data)
|
||||
|
@ -160,6 +231,8 @@ var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request
|
|||
v = v.Elem()
|
||||
}
|
||||
t := v.Type()
|
||||
|
||||
// Dynamically populate fields to update
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := t.Field(i)
|
||||
if field.Name == "Password" && req.Data.Password != "" {
|
||||
|
@ -170,10 +243,13 @@ var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request
|
|||
}
|
||||
}
|
||||
|
||||
for k, v := range req.Which {
|
||||
v = cases.Title(language.English, cases.NoLower).String(v)
|
||||
req.Which[k] = v
|
||||
if v == "Password" {
|
||||
// Process the fields to update
|
||||
for _, field := range req.Which {
|
||||
// Title case field names
|
||||
field = cases.Title(language.English, cases.NoLower).String(field)
|
||||
|
||||
// Handle password update
|
||||
if field == "Password" {
|
||||
if !d.user.Perm.Admin && d.user.LockPassword {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
|
@ -183,16 +259,20 @@ var userPutHandler = withSelfOrAdmin(func(w http.ResponseWriter, r *http.Request
|
|||
}
|
||||
}
|
||||
|
||||
for _, f := range NonModifiableFieldsForNonAdmin {
|
||||
if !d.user.Perm.Admin && v == f {
|
||||
// Prevent non-admins from modifying certain fields
|
||||
for _, restrictedField := range NonModifiableFieldsForNonAdmin {
|
||||
if !d.user.Perm.Admin && field == restrictedField {
|
||||
return http.StatusForbidden, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
err = d.store.Users.Update(req.Data, req.Which...)
|
||||
// Perform the user update
|
||||
err = store.Users.Update(req.Data, req.Which...)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
return http.StatusOK, nil
|
||||
})
|
||||
// Return the updated user (with the password hidden) as JSON response
|
||||
req.Data.Password = ""
|
||||
return renderJSON(w, r, req.Data)
|
||||
}
|
||||
|
|
|
@ -1,30 +1,13 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
libErrors "github.com/gtsteffaniak/filebrowser/errors"
|
||||
)
|
||||
|
||||
func renderJSON(w http.ResponseWriter, _ *http.Request, data interface{}) (int, error) {
|
||||
marsh, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
if _, err := w.Write(marsh); err != nil {
|
||||
return http.StatusInternalServerError, err
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func errToStatus(err error) int {
|
||||
switch {
|
||||
case err == nil:
|
||||
|
@ -45,23 +28,3 @@ func errToStatus(err error) int {
|
|||
return http.StatusInternalServerError
|
||||
}
|
||||
}
|
||||
|
||||
// This is an addaptation if http.StripPrefix in which we don't
|
||||
// return 404 if the page doesn't have the needed prefix.
|
||||
func stripPrefix(prefix string, h http.Handler) http.Handler {
|
||||
if prefix == "" || prefix == "/" {
|
||||
return h
|
||||
}
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
p := strings.TrimPrefix(r.URL.Path, prefix)
|
||||
rp := strings.TrimPrefix(r.URL.RawPath, prefix)
|
||||
r2 := new(http.Request)
|
||||
*r2 = *r
|
||||
r2.URL = new(url.URL)
|
||||
*r2.URL = *r.URL
|
||||
r2.URL.Path = p
|
||||
r2.URL.RawPath = rp
|
||||
h.ServeHTTP(w, r2)
|
||||
})
|
||||
}
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 72 KiB |
|
@ -1,23 +0,0 @@
|
|||
package rules
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestMatchHidden(t *testing.T) {
|
||||
cases := map[string]bool{
|
||||
"/": false,
|
||||
"/src": false,
|
||||
"/src/": false,
|
||||
"/.circleci": true,
|
||||
"/a/b/c/.docker.json": true,
|
||||
".docker.json": true,
|
||||
"Dockerfile": false,
|
||||
"/Dockerfile": false,
|
||||
}
|
||||
|
||||
for path, want := range cases {
|
||||
got := MatchHidden(path)
|
||||
if got != want {
|
||||
t.Errorf("MatchHidden(%s)=%v; want %v", path, got, want)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,8 +4,10 @@ import (
|
|||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
var Config Settings
|
||||
|
@ -28,6 +30,12 @@ func Initialize(configFile string) {
|
|||
log.Fatalf("ERROR: Configured Root Path does not exist! %v", err)
|
||||
}
|
||||
Config.Server.Root = realRoot
|
||||
baseurl := strings.Trim(Config.Server.BaseURL, "/")
|
||||
if baseurl == "" {
|
||||
Config.Server.BaseURL = "/"
|
||||
} else {
|
||||
Config.Server.BaseURL = "/" + baseurl + "/"
|
||||
}
|
||||
}
|
||||
|
||||
func loadConfigFile(configFile string) []byte {
|
||||
|
@ -86,7 +94,7 @@ func setDefaults() Settings {
|
|||
DisableSettings: false,
|
||||
ViewMode: "normal",
|
||||
Locale: "en",
|
||||
Permissions: Permissions{
|
||||
Permissions: users.Permissions{
|
||||
Create: false,
|
||||
Rename: false,
|
||||
Modify: false,
|
||||
|
@ -94,6 +102,7 @@ func setDefaults() Settings {
|
|||
Share: false,
|
||||
Download: false,
|
||||
Admin: false,
|
||||
Api: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ package settings
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
func TestSettings_MakeUserDir(t *testing.T) {
|
||||
|
@ -15,7 +15,7 @@ func TestSettings_MakeUserDir(t *testing.T) {
|
|||
Shell []string
|
||||
AdminUsername string
|
||||
AdminPassword string
|
||||
Rules []rules.Rule
|
||||
Rules []users.Rule
|
||||
Server Server
|
||||
Auth Auth
|
||||
Frontend Frontend
|
||||
|
|
|
@ -2,9 +2,8 @@ package settings
|
|||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"strings"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
const DefaultUsersHomeBasePath = "/users"
|
||||
|
@ -12,18 +11,6 @@ const DefaultUsersHomeBasePath = "/users"
|
|||
// AuthMethod describes an authentication method.
|
||||
type AuthMethod string
|
||||
|
||||
// Settings contain the main settings of the application.
|
||||
// GetRules implements rules.Provider.
|
||||
func (s *Settings) GetRules() []rules.Rule {
|
||||
return s.Rules
|
||||
}
|
||||
|
||||
// Server specific settings
|
||||
// Clean cleans any variables that might need cleaning.
|
||||
func (s *Server) Clean() {
|
||||
s.BaseURL = strings.TrimSuffix(s.BaseURL, "/")
|
||||
}
|
||||
|
||||
// GenerateKey generates a key of 512 bits.
|
||||
func GenerateKey() ([]byte, error) {
|
||||
b := make([]byte, 64) //nolint:gomnd
|
||||
|
@ -40,8 +27,8 @@ func GetSettingsConfig(nameType string, Value string) string {
|
|||
return nameType + Value
|
||||
}
|
||||
|
||||
func AdminPerms() Permissions {
|
||||
return Permissions{
|
||||
func AdminPerms() users.Permissions {
|
||||
return users.Permissions{
|
||||
Create: true,
|
||||
Rename: true,
|
||||
Modify: true,
|
||||
|
@ -49,5 +36,23 @@ func AdminPerms() Permissions {
|
|||
Share: true,
|
||||
Download: true,
|
||||
Admin: true,
|
||||
Api: true,
|
||||
}
|
||||
}
|
||||
|
||||
// Apply applies the default options to a user.
|
||||
func ApplyUserDefaults(u users.User) users.User {
|
||||
u.StickySidebar = Config.UserDefaults.StickySidebar
|
||||
u.DisableSettings = Config.UserDefaults.DisableSettings
|
||||
u.DarkMode = Config.UserDefaults.DarkMode
|
||||
u.Scope = Config.UserDefaults.Scope
|
||||
u.Locale = Config.UserDefaults.Locale
|
||||
u.ViewMode = Config.UserDefaults.ViewMode
|
||||
u.SingleClick = Config.UserDefaults.SingleClick
|
||||
u.Perm = Config.UserDefaults.Perm
|
||||
u.Sorting = Config.UserDefaults.Sorting
|
||||
u.Commands = Config.UserDefaults.Commands
|
||||
u.HideDotfiles = Config.UserDefaults.HideDotfiles
|
||||
u.DateFormat = Config.UserDefaults.DateFormat
|
||||
return u
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package settings
|
|||
|
||||
import (
|
||||
"github.com/gtsteffaniak/filebrowser/errors"
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
// StorageBackend is a settings storage backend.
|
||||
|
@ -62,7 +62,7 @@ func (s *Storage) Save(set *Settings) error {
|
|||
}
|
||||
|
||||
if set.Rules == nil {
|
||||
set.Rules = []rules.Rule{}
|
||||
set.Rules = []users.Rule{}
|
||||
}
|
||||
|
||||
if set.Commands == nil {
|
||||
|
@ -94,6 +94,5 @@ func (s *Storage) GetServer() (*Server, error) {
|
|||
|
||||
// SaveServer wraps StorageBackend.SaveServer and adds some verification.
|
||||
func (s *Storage) SaveServer(ser *Server) error {
|
||||
ser.Clean()
|
||||
return s.back.SaveServer(ser)
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
package settings
|
||||
|
||||
import (
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
)
|
||||
|
||||
type Settings struct {
|
||||
Commands map[string][]string `json:"commands"`
|
||||
Shell []string `json:"shell"`
|
||||
Rules []rules.Rule `json:"rules"`
|
||||
Rules []users.Rule `json:"rules"`
|
||||
Server Server `json:"server"`
|
||||
Auth Auth `json:"auth"`
|
||||
Frontend Frontend `json:"frontend"`
|
||||
|
@ -76,25 +76,14 @@ type UserDefaults struct {
|
|||
ViewMode string `json:"viewMode"`
|
||||
GallerySize int `json:"gallerySize"`
|
||||
SingleClick bool `json:"singleClick"`
|
||||
Rules []rules.Rule `json:"rules"`
|
||||
Rules []users.Rule `json:"rules"`
|
||||
Sorting struct {
|
||||
By string `json:"by"`
|
||||
Asc bool `json:"asc"`
|
||||
} `json:"sorting"`
|
||||
Perm Permissions `json:"perm"`
|
||||
Permissions Permissions `json:"permissions"`
|
||||
Commands []string `json:"commands,omitempty"`
|
||||
HideDotfiles bool `json:"hideDotfiles"`
|
||||
DateFormat bool `json:"dateFormat"`
|
||||
}
|
||||
|
||||
type Permissions struct {
|
||||
Admin bool `json:"admin"`
|
||||
Execute bool `json:"execute"`
|
||||
Create bool `json:"create"`
|
||||
Rename bool `json:"rename"`
|
||||
Modify bool `json:"modify"`
|
||||
Delete bool `json:"delete"`
|
||||
Share bool `json:"share"`
|
||||
Download bool `json:"download"`
|
||||
Perm users.Permissions `json:"perm"`
|
||||
Permissions users.Permissions `json:"permissions"`
|
||||
Commands []string `json:"commands,omitempty"`
|
||||
HideDotfiles bool `json:"hideDotfiles"`
|
||||
DateFormat bool `json:"dateFormat"`
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
|
||||
"github.com/gtsteffaniak/filebrowser/errors"
|
||||
"github.com/gtsteffaniak/filebrowser/users"
|
||||
"github.com/gtsteffaniak/filebrowser/utils"
|
||||
)
|
||||
|
||||
type usersBackend struct {
|
||||
|
@ -55,14 +56,24 @@ func (st usersBackend) Update(user *users.User, fields ...string) error {
|
|||
if len(fields) == 0 {
|
||||
return st.Save(user)
|
||||
}
|
||||
|
||||
val := reflect.ValueOf(user).Elem()
|
||||
|
||||
for _, field := range fields {
|
||||
userField := reflect.ValueOf(user).Elem().FieldByName(field)
|
||||
// Capitalize the first letter (you can adjust this based on your field naming convention)
|
||||
correctedField := utils.CapitalizeFirst(field)
|
||||
|
||||
userField := val.FieldByName(correctedField)
|
||||
if !userField.IsValid() {
|
||||
return fmt.Errorf("invalid field: %s", field)
|
||||
}
|
||||
if !userField.CanSet() {
|
||||
return fmt.Errorf("cannot update unexported field: %s", field)
|
||||
}
|
||||
|
||||
val := userField.Interface()
|
||||
if err := st.db.UpdateField(user, field, val); err != nil {
|
||||
return fmt.Errorf("Error updating user field: %s, error: %v", field, err.Error())
|
||||
if err := st.db.UpdateField(user, correctedField, val); err != nil {
|
||||
return fmt.Errorf("Error updating user field: %s, error: %v", correctedField, err.Error())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -20,7 +20,7 @@ import (
|
|||
// Storage is a storage powered by a Backend which makes the necessary
|
||||
// verifications when fetching and saving data to ensure consistency.
|
||||
type Storage struct {
|
||||
Users users.Store
|
||||
Users *users.Storage
|
||||
Share *share.Storage
|
||||
Auth *auth.Storage
|
||||
Settings *settings.Storage
|
||||
|
@ -92,7 +92,7 @@ func quickSetup(store *Storage) {
|
|||
utils.CheckErr("store.Settings.Save", err)
|
||||
err = store.Settings.SaveServer(&settings.Config.Server)
|
||||
utils.CheckErr("store.Settings.SaveServer", err)
|
||||
user := users.ApplyDefaults(users.User{})
|
||||
user := settings.ApplyUserDefaults(users.User{})
|
||||
user.Username = settings.Config.Auth.AdminUsername
|
||||
user.Password = settings.Config.Auth.AdminPassword
|
||||
user.Perm.Admin = true
|
||||
|
@ -111,7 +111,7 @@ func CreateUser(userInfo users.User, asAdmin bool) error {
|
|||
if userInfo.Username == "" || userInfo.Password == "" {
|
||||
return errors.ErrInvalidRequestParams
|
||||
}
|
||||
newUser := users.ApplyDefaults(userInfo)
|
||||
newUser := settings.ApplyUserDefaults(userInfo)
|
||||
if asAdmin {
|
||||
newUser.Perm = settings.AdminPerms()
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,4 @@
|
|||
package rules
|
||||
package users
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
@ -11,6 +11,18 @@ type Checker interface {
|
|||
Check(path string) bool
|
||||
}
|
||||
|
||||
// Check implements rules.Checker.
|
||||
func (user *User) Check(path string) bool {
|
||||
allow := true
|
||||
for _, rule := range user.Rules {
|
||||
if rule.Matches(path) {
|
||||
allow = rule.Allow
|
||||
}
|
||||
}
|
||||
|
||||
return allow
|
||||
}
|
||||
|
||||
// Rule is a allow/disallow rule.
|
||||
type Rule struct {
|
||||
Regex bool `json:"regex"`
|
|
@ -5,7 +5,6 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/errors"
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
)
|
||||
|
||||
// StorageBackend is the interface to implement for a users storage.
|
||||
|
@ -26,7 +25,9 @@ type Store interface {
|
|||
Save(user *User) error
|
||||
Delete(id interface{}) error
|
||||
LastUpdate(id uint) int64
|
||||
AddRule(username string, rule rules.Rule) error
|
||||
AddApiKey(username uint, name string, key AuthToken) error
|
||||
DeleteApiKey(username uint, name string) error
|
||||
AddRule(username string, rule Rule) error
|
||||
DeleteRule(username string, ruleID string) error
|
||||
}
|
||||
|
||||
|
@ -79,7 +80,7 @@ func (s *Storage) Update(user *User, fields ...string) error {
|
|||
}
|
||||
|
||||
// AddRule adds a rule to the user's rules list and updates the user in the database.
|
||||
func (s *Storage) AddRule(userID string, rule rules.Rule) error {
|
||||
func (s *Storage) AddRule(userID string, rule Rule) error {
|
||||
user, err := s.Get("", userID)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -95,6 +96,42 @@ func (s *Storage) AddRule(userID string, rule rules.Rule) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *Storage) AddApiKey(userID uint, name string, key AuthToken) error {
|
||||
user, err := s.Get("", userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Initialize the ApiKeys map if it is nil
|
||||
if user.ApiKeys == nil {
|
||||
user.ApiKeys = make(map[string]AuthToken)
|
||||
}
|
||||
user.ApiKeys[name] = key
|
||||
err = s.Update(user, "ApiKeys")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Storage) DeleteApiKey(userID uint, name string) error {
|
||||
user, err := s.Get("", userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Initialize the ApiKeys map if it is nil
|
||||
if user.ApiKeys == nil {
|
||||
user.ApiKeys = make(map[string]AuthToken)
|
||||
}
|
||||
delete(user.ApiKeys, name)
|
||||
err = s.Update(user, "ApiKeys")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteRule deletes a rule specified by ID from the user's rules list and updates the user in the database.
|
||||
func (s *Storage) DeleteRule(userID string, ruleID string) error {
|
||||
user, err := s.Get("", userID)
|
||||
|
@ -103,7 +140,7 @@ func (s *Storage) DeleteRule(userID string, ruleID string) error {
|
|||
}
|
||||
|
||||
// Find and remove the rule with the specified ID
|
||||
var updatedRules []rules.Rule
|
||||
var updatedRules []Rule
|
||||
for _, r := range user.Rules {
|
||||
if r.Id != ruleID {
|
||||
updatedRules = append(updatedRules, r)
|
||||
|
|
|
@ -3,10 +3,31 @@ package users
|
|||
import (
|
||||
"regexp"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/rules"
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
)
|
||||
|
||||
type AuthToken struct {
|
||||
Key string `json:"key"`
|
||||
Name string `json:"name"`
|
||||
Created int64 `json:"createdAt"`
|
||||
Expires int64 `json:"expiresAt"`
|
||||
BelongsTo uint `json:"belongsTo"`
|
||||
Permissions Permissions `json:"Permissions"`
|
||||
jwt.RegisteredClaims `json:"-"`
|
||||
}
|
||||
|
||||
type Permissions struct {
|
||||
Api bool `json:"api"`
|
||||
Admin bool `json:"admin"`
|
||||
Execute bool `json:"execute"`
|
||||
Create bool `json:"create"`
|
||||
Rename bool `json:"rename"`
|
||||
Modify bool `json:"modify"`
|
||||
Delete bool `json:"delete"`
|
||||
Share bool `json:"share"`
|
||||
Download bool `json:"download"`
|
||||
}
|
||||
|
||||
// SortingSettings represents the sorting settings.
|
||||
type Sorting struct {
|
||||
By string `json:"by"`
|
||||
|
@ -20,16 +41,17 @@ type User struct {
|
|||
DisableSettings bool `json:"disableSettings"`
|
||||
ID uint `storm:"id,increment" json:"id"`
|
||||
Username string `storm:"unique" json:"username"`
|
||||
Password string `json:"password"`
|
||||
Password string `json:"password,omitempty"`
|
||||
Scope string `json:"scope"`
|
||||
Locale string `json:"locale"`
|
||||
LockPassword bool `json:"lockPassword"`
|
||||
ViewMode string `json:"viewMode"`
|
||||
SingleClick bool `json:"singleClick"`
|
||||
Perm settings.Permissions `json:"perm"`
|
||||
Commands []string `json:"commands"`
|
||||
Sorting Sorting `json:"sorting"`
|
||||
Rules []rules.Rule `json:"rules"`
|
||||
Perm Permissions `json:"perm"`
|
||||
Commands []string `json:"commands"`
|
||||
Rules []Rule `json:"rules"`
|
||||
ApiKeys map[string]AuthToken `json:"apiKeys,omitempty"`
|
||||
HideDotfiles bool `json:"hideDotfiles"`
|
||||
DateFormat bool `json:"dateFormat"`
|
||||
GallerySize int `json:"gallerySize"`
|
||||
|
@ -41,19 +63,20 @@ var PublicUser = User{
|
|||
Scope: "./",
|
||||
ViewMode: "normal",
|
||||
LockPassword: true,
|
||||
Perm: settings.Permissions{
|
||||
Perm: Permissions{
|
||||
Create: false,
|
||||
Rename: false,
|
||||
Modify: false,
|
||||
Delete: false,
|
||||
Share: true,
|
||||
Share: false,
|
||||
Download: true,
|
||||
Admin: false,
|
||||
Api: false,
|
||||
},
|
||||
}
|
||||
|
||||
// GetRules implements rules.Provider.
|
||||
func (u *User) GetRules() []rules.Rule {
|
||||
func (u *User) GetRules() []Rule {
|
||||
return u.Rules
|
||||
}
|
||||
|
||||
|
@ -71,20 +94,3 @@ func (u *User) CanExecute(command string) bool {
|
|||
|
||||
return false
|
||||
}
|
||||
|
||||
// Apply applies the default options to a user.
|
||||
func ApplyDefaults(u User) User {
|
||||
u.StickySidebar = settings.Config.UserDefaults.StickySidebar
|
||||
u.DisableSettings = settings.Config.UserDefaults.DisableSettings
|
||||
u.DarkMode = settings.Config.UserDefaults.DarkMode
|
||||
u.Scope = settings.Config.UserDefaults.Scope
|
||||
u.Locale = settings.Config.UserDefaults.Locale
|
||||
u.ViewMode = settings.Config.UserDefaults.ViewMode
|
||||
u.SingleClick = settings.Config.UserDefaults.SingleClick
|
||||
u.Perm = settings.Config.UserDefaults.Perm
|
||||
u.Sorting = settings.Config.UserDefaults.Sorting
|
||||
u.Commands = settings.Config.UserDefaults.Commands
|
||||
u.HideDotfiles = settings.Config.UserDefaults.HideDotfiles
|
||||
u.DateFormat = settings.Config.UserDefaults.DateFormat
|
||||
return u
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
package utils
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/gtsteffaniak/filebrowser/settings"
|
||||
math "math/rand"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func CheckErr(source string, err error) {
|
||||
|
@ -13,7 +17,55 @@ func CheckErr(source string, err error) {
|
|||
}
|
||||
|
||||
func GenerateKey() []byte {
|
||||
k, err := settings.GenerateKey()
|
||||
CheckErr("generateKey", err)
|
||||
return k
|
||||
b := make([]byte, 64)
|
||||
_, err := rand.Read(b)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// CapitalizeFirst returns the input string with the first letter capitalized.
|
||||
func CapitalizeFirst(s string) string {
|
||||
if len(s) == 0 {
|
||||
return s // Return the empty string as is
|
||||
}
|
||||
return strings.ToUpper(string(s[0])) + s[1:]
|
||||
}
|
||||
|
||||
func GenerateRandomHash(length int) string {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyz0123456789"
|
||||
math.New(math.NewSource(time.Now().UnixNano()))
|
||||
result := make([]byte, length)
|
||||
for i := range result {
|
||||
result[i] = charset[math.Intn(len(charset))]
|
||||
}
|
||||
return string(result)
|
||||
}
|
||||
|
||||
func PrintStructFields(v interface{}) {
|
||||
val := reflect.ValueOf(v)
|
||||
typ := reflect.TypeOf(v)
|
||||
|
||||
// Ensure the input is a struct
|
||||
if val.Kind() != reflect.Struct {
|
||||
fmt.Println("Provided value is not a struct")
|
||||
return
|
||||
}
|
||||
|
||||
// Iterate over the fields of the struct
|
||||
for i := 0; i < val.NumField(); i++ {
|
||||
field := val.Field(i)
|
||||
fieldType := typ.Field(i)
|
||||
|
||||
// Convert field value to string, if possible
|
||||
fieldValue := fmt.Sprintf("%v", field.Interface())
|
||||
|
||||
// Limit to 50 characters
|
||||
if len(fieldValue) > 100 {
|
||||
fieldValue = fieldValue[:100] + "..."
|
||||
}
|
||||
|
||||
fmt.Printf("Field: %s, %s\n", fieldType.Name, fieldValue)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -220,6 +220,7 @@ userDefaults:
|
|||
|
||||
- `download`: This boolean value determines whether download permissions are granted.
|
||||
|
||||
- `api`: Ability to create and manage API keys.
|
||||
|
||||
- `hideDotfiles`: This boolean value determines whether dotfiles are hidden. (`true` or `false`)
|
||||
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
# Getting Started using FileBrowser Quantum
|
||||
|
|
@ -19,4 +19,7 @@ Note: share links will not work and will need to be re-created after migration.
|
|||
|
||||
The filebrowser Quantum application should run with the same user and rules that
|
||||
you have from the original. But keep in mind the differences that may not work
|
||||
the same way, but all user configuration should be available.
|
||||
the same way, but all user configuration should be available.
|
||||
|
||||
The windows binary is particularly untested, I would advise using docker if testing on windows.
|
||||
|
||||
|
|
|
@ -1,22 +1,21 @@
|
|||
# Planned Roadmap
|
||||
|
||||
upcoming 0.2.x releases:
|
||||
upcoming 0.3.x releases:
|
||||
|
||||
- Replace http routes for gorilla/mux with stdlib
|
||||
- Theme configuration from settings
|
||||
- File syncronization improvements
|
||||
- File synchronization improvements
|
||||
- more filetype previews
|
||||
|
||||
next major 0.3.0 release :
|
||||
|
||||
- multiple sources https://github.com/filebrowser/filebrowser/issues/2514
|
||||
- introduce jobs as replacement to runners.
|
||||
- Add Job status to the sidebar
|
||||
- Add Job status to the sidebar
|
||||
- index status.
|
||||
- Job status from users
|
||||
- upload status
|
||||
- opentelemetry metrics
|
||||
- simple search/filter for current listings.
|
||||
- Enable mobile search with same features as desktop
|
||||
|
||||
Unplanned Future releases:
|
||||
- multiple sources https://github.com/filebrowser/filebrowser/issues/2514
|
||||
- Add tools to sidebar
|
||||
- duplicate file detector.
|
||||
- bulk rename https://github.com/filebrowser/filebrowser/issues/2473
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"dev": "vite dev",
|
||||
"build": "vite build && cp -R dist/ ../backend/cmd/",
|
||||
"build": "vite build && cp -r dist/* ../backend/http/embed",
|
||||
"build-docker": "vite build",
|
||||
"watch": "vite build --watch",
|
||||
"typecheck": "vue-tsc -p ./tsconfig.json --noEmit",
|
||||
|
|
|
@ -4,32 +4,30 @@
|
|||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
|
||||
{{ if .ReCaptcha }}
|
||||
<script src="{{ .ReCaptchaHost }}/recaptcha/api.js?render=explicit" data-vite-ignore></script>
|
||||
{{ end }}
|
||||
|
||||
[{[ if .ReCaptcha -]}]
|
||||
<script src="[{[ .ReCaptchaHost ]}]/recaptcha/api.js?render=explicit" data-vite-ignore></script>
|
||||
[{[ end ]}]
|
||||
<title>{{ if .Name }}{{ .Name }}{{ else }}FileBrowser Quantum{{ end }}</title>
|
||||
|
||||
<title>[{[ if .Name -]}][{[ .Name ]}][{[ else ]}]FileBrowser Quantum[{[ end ]}]</title>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="256x256" href="[{[ .StaticURL ]}]/img/icons/favicon-256x256.png">
|
||||
<link rel="icon" type="image/png" sizes="256x256" href="{{ .StaticURL }}/img/icons/favicon-256x256.png">
|
||||
|
||||
<!-- Add to home screen for Android and modern mobile browsers -->
|
||||
<link rel="manifest" id="manifestPlaceholder" crossorigin="use-credentials">
|
||||
<meta name="theme-color" content="[{[ if .Color -]}][{[ .Color ]}][{[ else ]}]#2979ff[{[ end ]}]">
|
||||
<meta name="theme-color" content="{{ if .Color }}{{ .Color }}{{ else }}#2979ff{{ end }}">
|
||||
|
||||
<!-- Add to home screen for Safari on iOS/iPadOS -->
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-status-bar-style" content="black">
|
||||
<meta name="apple-mobile-web-app-title" content="assets">
|
||||
<link rel="apple-touch-icon" href="[{[ .StaticURL ]}]/img/icons/apple-touch-icon.png">
|
||||
<link rel="apple-touch-icon" href="{{ .StaticURL }}/img/icons/apple-touch-icon.png">
|
||||
|
||||
<!-- Add to home screen for Windows -->
|
||||
<meta name="msapplication-TileImage" content="[{[ .StaticURL ]}]/img/icons/mstile-144x144.png">
|
||||
<meta name="msapplication-TileColor" content="[{[ if .Color -]}][{[ .Color ]}][{[ else ]}]#2979ff[{[ end ]}]">
|
||||
<meta name="msapplication-TileImage" content="{{ .StaticURL }}/img/icons/mstile-144x144.png">
|
||||
<meta name="msapplication-TileColor" content="{{ if .Color }}{{ .Color }}{{ else }}#2979ff{{ end }}">
|
||||
|
||||
<!-- Inject Some Variables and generate the manifest json -->
|
||||
<script>
|
||||
window.FileBrowser = JSON.parse('[{[ .Json ]}]');
|
||||
window.FileBrowser = JSON.parse('{{ .globalVars }}');
|
||||
|
||||
var fullStaticURL = window.location.origin + window.FileBrowser.StaticURL;
|
||||
var dynamicManifest = {
|
||||
|
@ -37,12 +35,12 @@
|
|||
"short_name": window.FileBrowser.Name || 'FileBrowser',
|
||||
"icons": [
|
||||
{
|
||||
"src": fullStaticURL + "/img/icons/android-chrome-256x256.png",
|
||||
"src": fullStaticURL + "/img/icons/android-chrome-256x256.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"start_url": window.location.origin + window.FileBrowser.BaseURL,
|
||||
"start_url": fullStaticURL,
|
||||
"display": "standalone",
|
||||
"background_color": "#ffffff",
|
||||
"theme_color": window.FileBrowser.Color || "#455a64"
|
||||
|
@ -120,7 +118,7 @@
|
|||
<body>
|
||||
<div id="app"></div>
|
||||
|
||||
[{[ if .darkMode -]}]
|
||||
{{ if .darkMode -}}
|
||||
<div id="loading" class="dark-mode">
|
||||
<div class="spinner">
|
||||
<div class="bounce1"></div>
|
||||
|
@ -128,18 +126,19 @@
|
|||
<div class="bounce3"></div>
|
||||
</div>
|
||||
</div>
|
||||
[{[ else ]}]
|
||||
{{ else }}
|
||||
<div id="loading">
|
||||
<div class="spinner">
|
||||
<div class="bounce1"></div>
|
||||
<div class="bounce2"></div>
|
||||
<div class="bounce3"></div>
|
||||
</div>
|
||||
</div> [{[ end ]}]
|
||||
</div>
|
||||
{{end}}
|
||||
<script type="module" src="/src/main.ts"></script>
|
||||
|
||||
[{[ if .CSS -]}]
|
||||
<link rel="stylesheet" href="[{[ .StaticURL ]}]/custom.css" />
|
||||
[{[ end ]}]
|
||||
{{ if .CSS }}
|
||||
<link rel="stylesheet" href="{{ .StaticURL }}/custom.css" >
|
||||
{{ end }}
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"start_url": "/",
|
||||
"start_url": "./",
|
||||
"display": "standalone",
|
||||
"background_color": "#ffffff",
|
||||
"theme_color": "#455a64"
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
import { removePrefix } from "./utils";
|
||||
import { baseURL } from "@/utils/constants";
|
||||
import { state } from "@/store";
|
||||
|
||||
const ssl = window.location.protocol === "https:";
|
||||
const protocol = ssl ? "wss:" : "ws:";
|
||||
|
||||
export default function command(url, command, onmessage, onclose) {
|
||||
url = removePrefix(url);
|
||||
url = `${protocol}//${window.location.host}${baseURL}/api/command${url}?auth=${state.jwt}`;
|
||||
|
||||
url = `${protocol}//${window.location.host}${baseURL}api/command${url}`;
|
||||
let conn = new window.WebSocket(url);
|
||||
conn.onopen = () => conn.send(command);
|
||||
conn.onmessage = onmessage;
|
||||
|
|
|
@ -1,32 +1,17 @@
|
|||
import { createURL, fetchURL, removePrefix } from "./utils";
|
||||
import { createURL, fetchURL, adjustedData} from "./utils";
|
||||
import { baseURL } from "@/utils/constants";
|
||||
import { removePrefix,getApiPath } from "@/utils/url.js";
|
||||
import { state } from "@/store";
|
||||
import { notify } from "@/notify";
|
||||
|
||||
// Notify if errors occur
|
||||
export async function fetch(url, content = false) {
|
||||
export async function fetchFiles(url, content = false) {
|
||||
try {
|
||||
url = removePrefix(url);
|
||||
|
||||
const res = await fetchURL(`/api/resources${url}?content=${content}`, {});
|
||||
url = removePrefix(url,"files");
|
||||
const apiPath = getApiPath("api/resources",{path: url, content: content});
|
||||
const res = await fetchURL(apiPath);
|
||||
const data = await res.json();
|
||||
data.url = `/files${url}`;
|
||||
|
||||
if (data.isDir) {
|
||||
if (!data.url.endsWith("/")) data.url += "/";
|
||||
data.items = data.items.map((item, index) => {
|
||||
item.index = index;
|
||||
item.url = `${data.url}${encodeURIComponent(item.name)}`;
|
||||
|
||||
if (item.isDir) {
|
||||
item.url += "/";
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
return data;
|
||||
return adjustedData(data,url);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error fetching data");
|
||||
throw err;
|
||||
|
@ -35,15 +20,12 @@ export async function fetch(url, content = false) {
|
|||
|
||||
async function resourceAction(url, method, content) {
|
||||
try {
|
||||
url = removePrefix(url);
|
||||
|
||||
let opts = { method };
|
||||
|
||||
if (content) {
|
||||
opts.body = content;
|
||||
}
|
||||
|
||||
const res = await fetchURL(`/api/resources${url}`, opts);
|
||||
const apiPath = getApiPath("api/resources", { path: url });
|
||||
const res = await fetchURL(apiPath, opts);
|
||||
return res;
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error performing resource action");
|
||||
|
@ -72,27 +54,22 @@ export async function put(url, content = "") {
|
|||
export function download(format, ...files) {
|
||||
try {
|
||||
let url = `${baseURL}/api/raw`;
|
||||
|
||||
if (files.length === 1) {
|
||||
url += removePrefix(files[0]) + "?";
|
||||
url += "?path="+removePrefix(files[0], "files");
|
||||
} else {
|
||||
let arg = "";
|
||||
|
||||
for (let file of files) {
|
||||
arg += removePrefix(file) + ",";
|
||||
arg += removePrefix(file,"files") + ",";
|
||||
}
|
||||
|
||||
arg = arg.substring(0, arg.length - 1);
|
||||
arg = encodeURIComponent(arg);
|
||||
url += `/?files=${arg}&`;
|
||||
url += `?files=${arg}`;
|
||||
}
|
||||
|
||||
if (format) {
|
||||
url += `algo=${format}&`;
|
||||
}
|
||||
|
||||
if (state.jwt) {
|
||||
url += `auth=${state.jwt}&`;
|
||||
url += `&algo=${format}`;
|
||||
}
|
||||
|
||||
window.open(url);
|
||||
|
@ -103,7 +80,7 @@ export function download(format, ...files) {
|
|||
|
||||
export async function post(url, content = "", overwrite = false, onupload) {
|
||||
try {
|
||||
url = removePrefix(url);
|
||||
url = removePrefix(url,"files");
|
||||
|
||||
let bufferContent;
|
||||
if (
|
||||
|
@ -113,11 +90,12 @@ export async function post(url, content = "", overwrite = false, onupload) {
|
|||
bufferContent = await new Response(content).arrayBuffer();
|
||||
}
|
||||
|
||||
const apiPath = getApiPath("api/resources", { path: url, override: overwrite });
|
||||
return new Promise((resolve, reject) => {
|
||||
let request = new XMLHttpRequest();
|
||||
request.open(
|
||||
"POST",
|
||||
`${baseURL}/api/resources${url}?override=${overwrite}`,
|
||||
apiPath,
|
||||
true
|
||||
);
|
||||
request.setRequestHeader("X-Auth", state.jwt);
|
||||
|
@ -148,30 +126,27 @@ export async function post(url, content = "", overwrite = false, onupload) {
|
|||
}
|
||||
}
|
||||
|
||||
function moveCopy(items, copy = false, overwrite = false, rename = false) {
|
||||
export async function moveCopy(items, action = "copy", overwrite = false, rename = false) {
|
||||
let promises = [];
|
||||
|
||||
for (let item of items) {
|
||||
const from = item.from;
|
||||
const to = encodeURIComponent(removePrefix(item.to));
|
||||
const url = `${from}?action=${
|
||||
copy ? "copy" : "rename"
|
||||
}&destination=${to}&override=${overwrite}&rename=${rename}`;
|
||||
promises.push(resourceAction(url, "PATCH"));
|
||||
let params = {
|
||||
overwrite: overwrite,
|
||||
action: action,
|
||||
rename: rename,
|
||||
}
|
||||
try {
|
||||
for (let item of items) {
|
||||
let localParams = { ...params };
|
||||
localParams.destination = item.to;
|
||||
localParams.from = item.from;
|
||||
const apiPath = getApiPath("api/resources", localParams);
|
||||
promises.push(fetch(apiPath, { method: "PATCH" }));
|
||||
}
|
||||
return promises;
|
||||
|
||||
return Promise.all(promises).catch((err) => {
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error moving/copying resources");
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
export function move(items, overwrite = false, rename = false) {
|
||||
return moveCopy(items, false, overwrite, rename);
|
||||
}
|
||||
|
||||
export function copy(items, overwrite = false, rename = false) {
|
||||
return moveCopy(items, true, overwrite, rename);
|
||||
}
|
||||
}
|
||||
|
||||
export async function checksum(url, algo) {
|
||||
|
@ -184,27 +159,29 @@ export async function checksum(url, algo) {
|
|||
}
|
||||
}
|
||||
|
||||
export function getDownloadURL(file, inline) {
|
||||
export function getDownloadURL(path, inline) {
|
||||
try {
|
||||
const params = {
|
||||
path: path,
|
||||
...(inline && { inline: "true" }),
|
||||
};
|
||||
|
||||
return createURL("api/raw" + file.path, params);
|
||||
return createURL("api/raw", params);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error getting download URL");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export function getPreviewURL(file, size) {
|
||||
export function getPreviewURL(path, size, modified) {
|
||||
try {
|
||||
const params = {
|
||||
path: path,
|
||||
size: size,
|
||||
key: Date.parse(modified),
|
||||
inline: "true",
|
||||
key: Date.parse(file.modified),
|
||||
};
|
||||
|
||||
return createURL("api/preview/" + size + file.path, params);
|
||||
return createURL("api/preview", params);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error getting preview URL");
|
||||
throw err;
|
||||
|
@ -229,11 +206,10 @@ export function getSubtitlesURL(file) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function usage(url) {
|
||||
export async function usage(source) {
|
||||
try {
|
||||
url = removePrefix(url);
|
||||
|
||||
const res = await fetchURL(`/api/usage${url}`, {});
|
||||
const apiPath = getApiPath("api/usage", { source: source });
|
||||
const res = await fetchURL(apiPath);
|
||||
return await res.json();
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error fetching usage data");
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import * as files from "./files";
|
||||
import * as share from "./share";
|
||||
import * as users from "./users";
|
||||
import * as settings from "./settings";
|
||||
import * as pub from "./pub";
|
||||
import * as filesApi from "./files";
|
||||
import * as shareApi from "./share";
|
||||
import * as usersApi from "./users";
|
||||
import * as settingsApi from "./settings";
|
||||
import * as publicApi from "./public";
|
||||
import search from "./search";
|
||||
import commands from "./commands";
|
||||
|
||||
export { files, share, users, settings, pub, commands, search };
|
||||
export { filesApi, shareApi, usersApi, settingsApi, publicApi, commands, search };
|
||||
|
|
|
@ -1,89 +0,0 @@
|
|||
import { removePrefix, createURL } from "./utils";
|
||||
import { baseURL } from "@/utils/constants";
|
||||
|
||||
export async function fetchPub(url, password = "") {
|
||||
url = removePrefix(url);
|
||||
const res = await fetch(
|
||||
`/api/public/share${url}`,
|
||||
{
|
||||
headers: {
|
||||
"X-SHARE-PASSWORD": encodeURIComponent(password),
|
||||
},
|
||||
}
|
||||
);
|
||||
if (res.status != 200) {
|
||||
const error = new Error("000 No connection");
|
||||
error.status = res.status;
|
||||
throw error;
|
||||
}
|
||||
|
||||
let data = await res.json();
|
||||
data.url = `/share${url}`;
|
||||
if (data.isDir) {
|
||||
if (!data.url.endsWith("/")) data.url += "/";
|
||||
data.items = data.items.map((item, index) => {
|
||||
item.index = index;
|
||||
item.url = `${data.url}${encodeURIComponent(item.name)}`;
|
||||
|
||||
if (item.isDir) {
|
||||
item.url += "/";
|
||||
}
|
||||
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
export function download(format, hash, token, ...files) {
|
||||
let url = `${baseURL}/api/public/dl/${hash}`;
|
||||
if (files.length === 1) {
|
||||
url += encodeURIComponent(files[0]) + "?";
|
||||
} else {
|
||||
let arg = "";
|
||||
for (let file of files) {
|
||||
arg += encodeURIComponent(file) + ",";
|
||||
}
|
||||
|
||||
arg = arg.substring(0, arg.length - 1);
|
||||
arg = encodeURIComponent(arg);
|
||||
url += `/?files=${arg}&`;
|
||||
}
|
||||
|
||||
if (format) {
|
||||
url += `algo=${format}&`;
|
||||
}
|
||||
|
||||
if (token) {
|
||||
url += `token=${token}&`;
|
||||
}
|
||||
|
||||
window.open(url);
|
||||
}
|
||||
|
||||
export function getPublicUser() {
|
||||
return fetch("/api/public/publicUser")
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! Status: ${response.status}`);
|
||||
}
|
||||
return response.json();
|
||||
})
|
||||
.catch(error => {
|
||||
console.error("Error fetching public user:", error);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
export function getDownloadURL(share, inline = false) {
|
||||
const params = {
|
||||
...(inline && { inline: "true" }),
|
||||
...(share.token && { token: share.token }),
|
||||
};
|
||||
if (share.path == undefined) {
|
||||
share.path = ""
|
||||
}
|
||||
const path = share.path.replace("/share/"+share.hash +"/","")
|
||||
return createURL("api/public/dl/" + share.hash + "/"+path, params, false);
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
import { createURL, adjustedData } from "./utils";
|
||||
import { getApiPath } from "@/utils/url.js";
|
||||
import { notify } from "@/notify";
|
||||
|
||||
// Fetch public share data
|
||||
export async function fetchPub(path, hash, password = "") {
|
||||
try {
|
||||
const params = { path, hash }
|
||||
const apiPath = getApiPath("api/public/share", params);
|
||||
const response = await fetch(apiPath, {
|
||||
headers: {
|
||||
"X-SHARE-PASSWORD": password ? encodeURIComponent(password) : "",
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = new Error("Failed to connect to the server.");
|
||||
error.status = response.status;
|
||||
throw error;
|
||||
}
|
||||
let data = await response.json()
|
||||
return adjustedData(data, `${hash}${path}`);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error fetching public share data");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// Download files with given parameters
|
||||
export function download(path, hash, token, format, ...files) {
|
||||
try {
|
||||
let fileInfo = files[0]
|
||||
if (files.length > 1) {
|
||||
fileInfo = files.map(encodeURIComponent).join(",");
|
||||
}
|
||||
const params = {
|
||||
path,
|
||||
hash,
|
||||
...(format && { format}),
|
||||
...(token && { token }),
|
||||
fileInfo
|
||||
};
|
||||
const url = createURL(`api/public/dl`, params, false);
|
||||
window.open(url);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error downloading files");
|
||||
throw err;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// Get the public user data
|
||||
export async function getPublicUser() {
|
||||
try {
|
||||
const apiPath = getApiPath("api/public/publicUser");
|
||||
const response = await fetch(apiPath);
|
||||
return response.json();
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error fetching public user");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a download URL
|
||||
export function getDownloadURL(share) {
|
||||
const params = {
|
||||
"path": share.path,
|
||||
"hash": share.hash,
|
||||
"token": share.token,
|
||||
...(share.inline && { inline: "true" }),
|
||||
};
|
||||
return createURL(`api/public/dl`, params, false);
|
||||
}
|
|
@ -1,26 +1,21 @@
|
|||
import { fetchURL, removePrefix } from "./utils";
|
||||
import url from "../utils/url";
|
||||
import { fetchURL } from "./utils";
|
||||
import { notify } from "@/notify"; // Import notify for error handling
|
||||
import { removePrefix, getApiPath } from "@/utils/url.js";
|
||||
|
||||
export default async function search(base, query) {
|
||||
try {
|
||||
base = removePrefix(base);
|
||||
base = removePrefix(base,"files");
|
||||
query = encodeURIComponent(query);
|
||||
|
||||
if (!base.endsWith("/")) {
|
||||
base += "/";
|
||||
}
|
||||
|
||||
const res = await fetchURL(`/api/search${base}?query=${query}`, {});
|
||||
|
||||
const apiPath = getApiPath("api/search", { scope: base, query: query });
|
||||
const res = await fetchURL(apiPath);
|
||||
let data = await res.json();
|
||||
|
||||
data = data.map((item) => {
|
||||
item.url = `/files${base}` + url.encodePath(item.path);
|
||||
return item;
|
||||
});
|
||||
|
||||
return data;
|
||||
return data
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error occurred during search");
|
||||
throw err;
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
import { fetchURL, fetchJSON } from "./utils";
|
||||
import { getApiPath } from "@/utils/url.js";
|
||||
|
||||
const apiPath = getApiPath("api/settings");
|
||||
|
||||
export function get() {
|
||||
return fetchJSON(`/api/settings`, {});
|
||||
return fetchJSON(apiPath);
|
||||
}
|
||||
|
||||
export async function update(settings) {
|
||||
await fetchURL(`/api/settings`, {
|
||||
|
||||
await fetchURL(apiPath, {
|
||||
method: "PUT",
|
||||
body: JSON.stringify(settings),
|
||||
});
|
||||
|
|
|
@ -1,27 +1,33 @@
|
|||
import { fetchURL, fetchJSON, removePrefix, createURL } from "./utils";
|
||||
import { fetchURL, fetchJSON, createURL, adjustedData } from "./utils";
|
||||
import { notify } from "@/notify";
|
||||
|
||||
export async function list() {
|
||||
return fetchJSON("/api/shares");
|
||||
return fetchJSON("api/shares");
|
||||
}
|
||||
|
||||
export async function get(url) {
|
||||
url = removePrefix(url);
|
||||
return fetchJSON(`/api/share${url}`);
|
||||
export async function get(path, hash) {
|
||||
try {
|
||||
const params = { path, hash };
|
||||
const url = createURL(`api/share`, params, false);
|
||||
let data = fetchJSON(url);
|
||||
return adjustedData(data, `api/share${path}`);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Error fetching data");
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function remove(hash) {
|
||||
await fetchURL(`/api/share/${hash}`, {
|
||||
const params = { hash };
|
||||
const url = createURL(`api/share`, params, false);
|
||||
await fetchURL(url, {
|
||||
method: "DELETE",
|
||||
});
|
||||
}
|
||||
|
||||
export async function create(url, password = "", expires = "", unit = "hours") {
|
||||
url = removePrefix(url);
|
||||
url = `/api/share${url}`;
|
||||
expires = String(expires);
|
||||
if (expires !== "") {
|
||||
url += `?expires=${expires}&unit=${unit}`;
|
||||
}
|
||||
export async function create(path, password = "", expires = "", unit = "hours") {
|
||||
const params = { path };
|
||||
const url = createURL(`api/share`, params, false);
|
||||
let body = "{}";
|
||||
if (password != "" || expires !== "" || unit !== "hours") {
|
||||
body = JSON.stringify({ password: password, expires: expires, unit: unit });
|
||||
|
|
|
@ -1,27 +1,66 @@
|
|||
import { fetchURL, fetchJSON } from "@/api/utils";
|
||||
import { getApiPath } from "@/utils/url.js";
|
||||
import { notify } from "@/notify"; // Import notify for error handling
|
||||
|
||||
export async function getAllUsers() {
|
||||
try {
|
||||
return await fetchJSON(`/api/users`, {});
|
||||
const apiPath = getApiPath("api/users");
|
||||
return await fetchJSON(apiPath);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || "Failed to fetch users");
|
||||
throw err; // Re-throw to handle further if needed
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export async function get(id) {
|
||||
try {
|
||||
return await fetchJSON(`/api/users/${id}`, {});
|
||||
const apiPath = getApiPath("api/users", { id: id });
|
||||
return await fetchJSON(apiPath);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || `Failed to fetch user with ID: ${id}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getApiKeys() {
|
||||
try {
|
||||
const apiPath = getApiPath("api/auth/tokens");
|
||||
return await fetchJSON(apiPath);
|
||||
} catch (err) {
|
||||
notify.showError(err.message || `Failed to get api keys`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export async function createApiKey(params) {
|
||||
try {
|
||||
const apiPath = getApiPath("api/auth/token", params);
|
||||
await fetchURL(apiPath, {
|
||||
method: "PUT",
|
||||
});
|
||||
} catch (err) {
|
||||
notify.showError(err.message || `Failed to create API key`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export function deleteApiKey(params) {
|
||||
try {
|
||||
const apiPath = getApiPath("api/auth/token", params);
|
||||
fetchURL(apiPath, {
|
||||
method: "DELETE",
|
||||
});
|
||||
} catch (err) {
|
||||
notify.showError(err.message || `Failed to delete API key`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function create(user) {
|
||||
try {
|
||||
const res = await fetchURL(`/api/users`, {
|
||||
const res = await fetchURL(`api/users`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
what: "user",
|
||||
|
@ -50,7 +89,8 @@ export async function update(user, which = ["all"]) {
|
|||
if (user.username === "publicUser") {
|
||||
return;
|
||||
}
|
||||
await fetchURL(`/api/users/${user.id}`, {
|
||||
const apiPath = getApiPath("api/users", { id: user.id });
|
||||
await fetchURL(apiPath, {
|
||||
method: "PUT",
|
||||
body: JSON.stringify({
|
||||
what: "user",
|
||||
|
@ -66,7 +106,8 @@ export async function update(user, which = ["all"]) {
|
|||
|
||||
export async function remove(id) {
|
||||
try {
|
||||
await fetchURL(`/api/users/${id}`, {
|
||||
const apiPath = getApiPath("api/users", { id: id });
|
||||
await fetchURL(apiPath, {
|
||||
method: "DELETE",
|
||||
});
|
||||
} catch (err) {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { state } from "@/store";
|
||||
import { renew, logout } from "@/utils/auth";
|
||||
import { baseURL } from "@/utils/constants";
|
||||
import { encodePath } from "@/utils/url";
|
||||
import { notify } from "@/notify";
|
||||
|
||||
export async function fetchURL(url, opts, auth = true) {
|
||||
|
@ -12,11 +11,14 @@ export async function fetchURL(url, opts, auth = true) {
|
|||
|
||||
let res;
|
||||
try {
|
||||
res = await fetch(`${baseURL}${url}`, {
|
||||
let userScope = "";
|
||||
if (state.user) {
|
||||
userScope = state.user.scope;
|
||||
}
|
||||
res = await fetch(url, {
|
||||
headers: {
|
||||
"X-Auth": state.jwt,
|
||||
"sessionId": state.sessionId,
|
||||
"userScope": state.user.scope,
|
||||
"userScope": userScope,
|
||||
...headers,
|
||||
},
|
||||
...rest,
|
||||
|
@ -48,30 +50,22 @@ export async function fetchURL(url, opts, auth = true) {
|
|||
|
||||
export async function fetchJSON(url, opts) {
|
||||
const res = await fetchURL(url, opts);
|
||||
if (res.status === 200) {
|
||||
if (res.status < 300) {
|
||||
return res.json();
|
||||
} else {
|
||||
notify.showError("unable to fetch : " + url + "status" + res.status);
|
||||
notify.showError("received status: "+res.status+" on url " + url);
|
||||
throw new Error(res.status);
|
||||
}
|
||||
}
|
||||
|
||||
export function removePrefix(url) {
|
||||
url = url.split("/").splice(2).join("/");
|
||||
if (url === "") url = "/";
|
||||
if (url[0] !== "/") url = "/" + url;
|
||||
return url;
|
||||
}
|
||||
|
||||
export function createURL(endpoint, params = {}, auth = true) {
|
||||
export function createURL(endpoint, params = {}) {
|
||||
let prefix = baseURL;
|
||||
if (!prefix.endsWith("/")) {
|
||||
prefix = prefix + "/";
|
||||
}
|
||||
const url = new URL(prefix + encodePath(endpoint), origin);
|
||||
const url = new URL(prefix + endpoint, origin);
|
||||
|
||||
const searchParams = {
|
||||
...(auth && { auth: state.jwt }),
|
||||
...params,
|
||||
};
|
||||
|
||||
|
@ -81,3 +75,19 @@ export function createURL(endpoint, params = {}, auth = true) {
|
|||
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
export function adjustedData(data, url) {
|
||||
data.url = url;
|
||||
if (data.type == "directory") {
|
||||
if (!data.url.endsWith("/")) data.url += "/";
|
||||
data.items = data.items.map((item, index) => {
|
||||
item.index = index;
|
||||
item.url = `${data.url}${item.name}`;
|
||||
if (item.type == "directory") {
|
||||
item.url += "/";
|
||||
}
|
||||
return item;
|
||||
});
|
||||
}
|
||||
return data
|
||||
}
|
|
@ -33,6 +33,7 @@
|
|||
|
||||
<script>
|
||||
import { state, mutations, getters } from "@/store";
|
||||
import { removePrefix } from "@/utils/url.js";
|
||||
import Action from "@/components/Action.vue";
|
||||
|
||||
export default {
|
||||
|
@ -51,7 +52,11 @@ export default {
|
|||
return getters.isCardView();
|
||||
},
|
||||
items() {
|
||||
const relativePath = state.route.path.replace(this.base, "");
|
||||
let relativePath = removePrefix(state.route.path, "files");
|
||||
if (getters.currentView() == "share") {
|
||||
// Split the path, filter out any empty elements, then join again with slashes
|
||||
relativePath = removePrefix(state.route.path, "share");
|
||||
}
|
||||
let parts = relativePath.split("/");
|
||||
|
||||
if (parts[0] === "") {
|
||||
|
@ -96,7 +101,9 @@ export default {
|
|||
return "router-link";
|
||||
},
|
||||
showShare() {
|
||||
return state.user?.perm && state.user?.perm.share;
|
||||
return (
|
||||
state.user?.perm && state.user?.perm.share && state.user.username != "publicUser"
|
||||
);
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
|
|
|
@ -35,17 +35,28 @@
|
|||
<div class="button" style="width: 100%">Search Context: {{ getContext }}</div>
|
||||
<!-- List of search results -->
|
||||
<ul v-show="results.length > 0">
|
||||
<li v-for="(s, k) in results" :key="k" style="cursor: pointer">
|
||||
<router-link :to="s.url">
|
||||
<i v-if="s.dir" class="material-icons folder-icons"> folder </i>
|
||||
<i v-else-if="s.audio" class="material-icons audio-icons"> volume_up </i>
|
||||
<i v-else-if="s.image" class="material-icons image-icons"> photo </i>
|
||||
<i v-else-if="s.video" class="material-icons video-icons"> movie </i>
|
||||
<i v-else-if="s.archive" class="material-icons archive-icons"> archive </i>
|
||||
<li v-for="(s, k) in results" :key="k" class="search-entry">
|
||||
<router-link :to="s.path">
|
||||
<i v-if="s.type == 'directory'" class="material-icons folder-icons">
|
||||
folder
|
||||
</i>
|
||||
<i v-else-if="s.type == 'audio'" class="material-icons audio-icons">
|
||||
volume_up
|
||||
</i>
|
||||
<i v-else-if="s.type == 'image'" class="material-icons image-icons">
|
||||
photo
|
||||
</i>
|
||||
<i v-else-if="s.type == 'video'" class="material-icons video-icons">
|
||||
movie
|
||||
</i>
|
||||
<i v-else-if="s.type == 'archive'" class="material-icons archive-icons">
|
||||
archive
|
||||
</i>
|
||||
<i v-else class="material-icons file-icons"> insert_drive_file </i>
|
||||
<span class="text-container">
|
||||
{{ basePath(s.path, s.dir) }}<b>{{ baseName(s.path) }}</b>
|
||||
{{ basePath(s.path, s.type == "directory") }}<b>{{ baseName(s.path) }}</b>
|
||||
</span>
|
||||
<div class="filesize">{{ humanSize(s.size) }}</div>
|
||||
</router-link>
|
||||
</li>
|
||||
</ul>
|
||||
|
@ -97,7 +108,7 @@
|
|||
<div class="searchContext">Search Context: {{ getContext }}</div>
|
||||
<div id="result-list">
|
||||
<div>
|
||||
<div>
|
||||
<div v-if="!isMobile && active">
|
||||
<!-- Button groups for filtering search results -->
|
||||
<ButtonGroup
|
||||
:buttons="folderSelect"
|
||||
|
@ -113,7 +124,7 @@
|
|||
:isDisabled="isTypeSelectDisabled"
|
||||
/>
|
||||
<!-- Inputs for filtering by file size -->
|
||||
<div v-if="!foldersOnly" class="sizeConstraints">
|
||||
<div class="sizeConstraints">
|
||||
<div class="sizeInputWrapper">
|
||||
<p>Smaller Than:</p>
|
||||
<input
|
||||
|
@ -169,17 +180,28 @@
|
|||
</div>
|
||||
<!-- List of search results -->
|
||||
<ul v-show="results.length > 0">
|
||||
<li v-for="(s, k) in results" :key="k" style="cursor: pointer">
|
||||
<router-link :to="s.url">
|
||||
<i v-if="s.dir" class="material-icons folder-icons"> folder </i>
|
||||
<i v-else-if="s.audio" class="material-icons audio-icons"> volume_up </i>
|
||||
<i v-else-if="s.image" class="material-icons image-icons"> photo </i>
|
||||
<i v-else-if="s.video" class="material-icons video-icons"> movie </i>
|
||||
<i v-else-if="s.archive" class="material-icons archive-icons"> archive </i>
|
||||
<li v-for="(s, k) in results" :key="k" class="search-entry">
|
||||
<router-link :to="s.path">
|
||||
<i v-if="s.type == 'directory'" class="material-icons folder-icons">
|
||||
folder
|
||||
</i>
|
||||
<i v-else-if="s.type == 'audio'" class="material-icons audio-icons">
|
||||
volume_up
|
||||
</i>
|
||||
<i v-else-if="s.type == 'image'" class="material-icons image-icons">
|
||||
photo
|
||||
</i>
|
||||
<i v-else-if="s.type == 'video'" class="material-icons video-icons">
|
||||
movie
|
||||
</i>
|
||||
<i v-else-if="s.type == 'archive'" class="material-icons archive-icons">
|
||||
archive
|
||||
</i>
|
||||
<i v-else class="material-icons file-icons"> insert_drive_file </i>
|
||||
<span class="text-container">
|
||||
{{ basePath(s.path, s.dir) }}<b>{{ baseName(s.path) }}</b>
|
||||
{{ basePath(s.path, s.type == "directory") }}<b>{{ baseName(s.path) }}</b>
|
||||
</span>
|
||||
<div class="filesize">{{ humanSize(s.size) }}</div>
|
||||
</router-link>
|
||||
</li>
|
||||
</ul>
|
||||
|
@ -191,6 +213,7 @@
|
|||
import ButtonGroup from "./ButtonGroup.vue";
|
||||
import { search } from "@/api";
|
||||
import { getters, mutations, state } from "@/store";
|
||||
import { getHumanReadableFilesize } from "@/utils/filesizes";
|
||||
|
||||
var boxes = {
|
||||
folder: { label: "folders", icon: "folder" },
|
||||
|
@ -314,6 +337,9 @@ export default {
|
|||
},
|
||||
},
|
||||
methods: {
|
||||
humanSize(size) {
|
||||
return getHumanReadableFilesize(size);
|
||||
},
|
||||
basePath(str, isDir) {
|
||||
let parts = str.replace(/(\/$|^\/)/, "").split("/");
|
||||
if (parts.length <= 1) {
|
||||
|
@ -336,11 +362,13 @@ export default {
|
|||
},
|
||||
open() {
|
||||
if (!this.active) {
|
||||
this.resetSearchFilters();
|
||||
mutations.showHover("search");
|
||||
}
|
||||
},
|
||||
close(event) {
|
||||
this.value = "";
|
||||
|
||||
event.stopPropagation();
|
||||
mutations.closeHovers();
|
||||
},
|
||||
|
@ -390,10 +418,10 @@ export default {
|
|||
return;
|
||||
}
|
||||
let searchTypesFull = this.searchTypes;
|
||||
if (this.largerThan != "" && !this.isTypeSelectDisabled) {
|
||||
if (this.largerThan != "") {
|
||||
searchTypesFull = searchTypesFull + "type:largerThan=" + this.largerThan + " ";
|
||||
}
|
||||
if (this.smallerThan != "" && !this.isTypeSelectDisabled) {
|
||||
if (this.smallerThan != "") {
|
||||
searchTypesFull = searchTypesFull + "type:smallerThan=" + this.smallerThan + " ";
|
||||
}
|
||||
let path = state.route.path;
|
||||
|
@ -536,6 +564,15 @@ export default {
|
|||
/* IE and Edge */
|
||||
}
|
||||
|
||||
.search-entry {
|
||||
cursor: pointer;
|
||||
border-radius: 0.25em;
|
||||
}
|
||||
|
||||
.search-entry:hover {
|
||||
background-color: var(--surfacePrimary);
|
||||
}
|
||||
|
||||
.text-container {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
|
@ -611,10 +648,6 @@ body.rtl #search #result ul > * {
|
|||
border-bottom-left-radius: 0;
|
||||
}
|
||||
|
||||
input.sizeInput:disabled {
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Search Input Placeholder */
|
||||
#search::-webkit-input-placeholder {
|
||||
color: rgba(255, 255, 255, 0.5);
|
||||
|
@ -698,31 +731,6 @@ body.rtl #search .boxes h3 {
|
|||
justify-content: center;
|
||||
}
|
||||
|
||||
.sizeInput {
|
||||
height: 100%;
|
||||
text-align: center;
|
||||
width: 5em;
|
||||
border-radius: 1em;
|
||||
padding: 1em;
|
||||
backdrop-filter: invert(0.1);
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.sizeInputWrapper {
|
||||
border-radius: 1em;
|
||||
margin-left: 0.5em;
|
||||
margin-right: 0.5em;
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
background-color: rgb(245, 245, 245);
|
||||
padding: 0.25em;
|
||||
height: 3em;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
border: 1px solid #ccc;
|
||||
}
|
||||
|
||||
.helpButton {
|
||||
position: absolute;
|
||||
right: 10px;
|
||||
|
@ -740,4 +748,13 @@ body.rtl #search .boxes h3 {
|
|||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.filesize {
|
||||
background-color: var(--surfaceSecondary);
|
||||
border-radius: 1em;
|
||||
padding: 0.25em;
|
||||
padding-left: 0.5em;
|
||||
padding-right: 0.5em;
|
||||
min-width: fit-content;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<template>
|
||||
<component
|
||||
:is="quickNav ? 'a' : 'div'"
|
||||
:href="quickNav ? url : undefined"
|
||||
:href="quickNav ? getUrl() : undefined"
|
||||
:class="{
|
||||
item: true,
|
||||
activebutton: isMaximized && isSelected,
|
||||
|
@ -16,6 +16,7 @@
|
|||
:data-type="type"
|
||||
:aria-label="name"
|
||||
:aria-selected="isSelected"
|
||||
@contextmenu="onRightClick"
|
||||
@click="quickNav ? toggleClick() : itemClick($event)"
|
||||
>
|
||||
<div @click="toggleClick" :class="{ activetitle: isMaximized && isSelected }">
|
||||
|
@ -46,17 +47,21 @@
|
|||
.activebutton {
|
||||
height: 10em;
|
||||
}
|
||||
|
||||
.activecontent {
|
||||
height: 5em !important;
|
||||
display: grid !important;
|
||||
}
|
||||
|
||||
.activeimg {
|
||||
width: 8em !important;
|
||||
height: 8em !important;
|
||||
}
|
||||
|
||||
.iconActive {
|
||||
font-size: 6em !important;
|
||||
}
|
||||
|
||||
.activetitle {
|
||||
width: 9em !important;
|
||||
margin-right: 1em !important;
|
||||
|
@ -67,9 +72,10 @@
|
|||
import { enableThumbs } from "@/utils/constants";
|
||||
import { getHumanReadableFilesize } from "@/utils/filesizes";
|
||||
import { fromNow } from "@/utils/moment";
|
||||
import { files as api } from "@/api";
|
||||
import { filesApi } from "@/api";
|
||||
import * as upload from "@/utils/upload";
|
||||
import { state, getters, mutations } from "@/store"; // Import your custom store
|
||||
import { baseURL } from "@/utils/constants";
|
||||
|
||||
export default {
|
||||
name: "item",
|
||||
|
@ -129,12 +135,7 @@ export default {
|
|||
if (state.req.path == "/") {
|
||||
path = "";
|
||||
}
|
||||
const file = {
|
||||
path: path + "/" + this.name,
|
||||
modified: this.modified,
|
||||
};
|
||||
|
||||
return api.getPreviewURL(file, "thumb");
|
||||
return filesApi.getPreviewURL(path + "/" + this.name, "small", state.req.modified);
|
||||
},
|
||||
isThumbsEnabled() {
|
||||
return enableThumbs;
|
||||
|
@ -157,6 +158,24 @@ export default {
|
|||
}
|
||||
},
|
||||
methods: {
|
||||
getUrl() {
|
||||
return baseURL.slice(0, -1) + this.url;
|
||||
},
|
||||
onRightClick(event) {
|
||||
event.preventDefault(); // Prevent default context menu
|
||||
|
||||
// If no items are selected, select the right-clicked item
|
||||
if (getters.selectedCount() === 0) {
|
||||
mutations.addSelected(this.index);
|
||||
}
|
||||
mutations.showHover({
|
||||
name: "ContextMenu",
|
||||
props: {
|
||||
posX: event.clientX,
|
||||
posY: event.clientY,
|
||||
},
|
||||
});
|
||||
},
|
||||
handleIntersect(entries, observer) {
|
||||
entries.forEach((entry) => {
|
||||
if (entry.isIntersecting) {
|
||||
|
@ -227,21 +246,16 @@ export default {
|
|||
name: state.req.items[i].name,
|
||||
});
|
||||
}
|
||||
let response = await filesApi.fetchFiles(el.__vue__.url);
|
||||
|
||||
// Get url from ListingItem instance
|
||||
let path = el.__vue__.url;
|
||||
let baseItems = (await api.fetch(path)).items;
|
||||
|
||||
let action = (overwrite, rename) => {
|
||||
api
|
||||
.move(items, overwrite, rename)
|
||||
.then(() => {
|
||||
mutations.setReload(true);
|
||||
})
|
||||
.catch(showError);
|
||||
let action = async (overwrite, rename) => {
|
||||
await filesApi.moveCopy(items, "move", overwrite, rename);
|
||||
setTimeout(() => {
|
||||
mutations.setReload(true);
|
||||
}, 50);
|
||||
};
|
||||
|
||||
let conflict = upload.checkConflict(items, baseItems);
|
||||
let conflict = upload.checkConflict(items, response.items);
|
||||
|
||||
let overwrite = false;
|
||||
let rename = false;
|
||||
|
@ -258,14 +272,12 @@ export default {
|
|||
action(overwrite, rename);
|
||||
},
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
action(overwrite, rename);
|
||||
},
|
||||
itemClick(event) {
|
||||
console.log("should say something");
|
||||
if (this.singleClick && !state.multiple) this.open();
|
||||
else this.click(event);
|
||||
},
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
<template>
|
||||
<div class="card floating">
|
||||
<div class="card-title">
|
||||
<h2>API Key Details</h2>
|
||||
</div>
|
||||
|
||||
<div class="card-content">
|
||||
<button
|
||||
class="action copy-clipboard"
|
||||
:data-clipboard-text="info.key"
|
||||
:aria-label="$t('buttons.copyToClipboard')"
|
||||
:title="$t('buttons.copyToClipboard')"
|
||||
>
|
||||
API Key Name : {{ name }}
|
||||
<i class="material-icons">content_paste</i>
|
||||
</button>
|
||||
|
||||
<h3>Created At :</h3>
|
||||
{{ formatTime(info.created) }}
|
||||
<h3>Expires At :</h3>
|
||||
{{ formatTime(info.expires) }}
|
||||
<h3>Permissions:</h3>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr v-for="(isEnabled, perm) in info.Permissions" :key="perm">
|
||||
<td>{{ perm }}</td>
|
||||
<td>{{ isEnabled ? "✓" : "✗" }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="card-action">
|
||||
<button
|
||||
class="button button--flat button--grey"
|
||||
@click="closeHovers"
|
||||
:aria-label="$t('buttons.close')"
|
||||
:title="$t('buttons.close')"
|
||||
>
|
||||
{{ $t("buttons.close") }}
|
||||
</button>
|
||||
<button
|
||||
class="button button--flat button--red"
|
||||
@click="deleteApi"
|
||||
:title="$t('buttons.delete')"
|
||||
>
|
||||
{{ $t("buttons.delete") }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mutations } from "@/store";
|
||||
import { notify } from "@/notify";
|
||||
import { usersApi } from "@/api";
|
||||
|
||||
export default {
|
||||
name: "ActionApi",
|
||||
props: {
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
info: {
|
||||
type: Object,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
formatTime(timestamp) {
|
||||
return new Date(timestamp * 1000).toLocaleDateString("en-US", {
|
||||
year: "numeric",
|
||||
month: "long",
|
||||
day: "numeric",
|
||||
});
|
||||
},
|
||||
closeHovers() {
|
||||
mutations.closeHovers();
|
||||
},
|
||||
deleteApi() {
|
||||
// Dummy delete function, to be filled in later
|
||||
try {
|
||||
usersApi.deleteApiKey({ name: this.name });
|
||||
notify.showSuccess("API key deleted!");
|
||||
window.location.reload();
|
||||
} catch (error) {
|
||||
notify.showError(error);
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
/* Basic styling for the prompt */
|
||||
.card.floating {
|
||||
padding: 20px;
|
||||
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
.card-content {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
.card-action {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
.card-action .button {
|
||||
margin-left: 8px;
|
||||
}
|
||||
</style>
|
|
@ -49,10 +49,10 @@
|
|||
<script>
|
||||
import { mutations, state } from "@/store";
|
||||
import FileList from "./FileList.vue";
|
||||
import { files as api } from "@/api";
|
||||
import { filesApi } from "@/api";
|
||||
import buttons from "@/utils/buttons";
|
||||
import * as upload from "@/utils/upload";
|
||||
import { notify } from "@/notify";
|
||||
//import { notify } from "@/notify";
|
||||
|
||||
export default {
|
||||
name: "copy",
|
||||
|
@ -77,33 +77,19 @@ export default {
|
|||
let items = [];
|
||||
|
||||
// Create a new promise for each file.
|
||||
for (let item of this.selected) {
|
||||
for (let item of state.selected) {
|
||||
items.push({
|
||||
from: store.req.items[item].url,
|
||||
to: this.dest + encodeURIComponent(store.req.items[item].name),
|
||||
name: store.req.items[item].name,
|
||||
from: state.req.items[item].url,
|
||||
to: this.dest + encodeURIComponent(state.req.items[item].name),
|
||||
name: state.req.items[item].name,
|
||||
});
|
||||
}
|
||||
|
||||
let action = async (overwrite, rename) => {
|
||||
buttons.loading("copy");
|
||||
|
||||
await api
|
||||
.copy(items, overwrite, rename)
|
||||
.then(() => {
|
||||
buttons.success("copy");
|
||||
|
||||
if (state.route.path === this.dest) {
|
||||
mutations.setReload(true);
|
||||
return;
|
||||
}
|
||||
|
||||
this.$router.push({ path: this.dest });
|
||||
})
|
||||
.catch((e) => {
|
||||
buttons.done("copy");
|
||||
notify.showError(e);
|
||||
});
|
||||
await filesApi.moveCopy(items, "copy", overwrite, rename);
|
||||
this.$router.push({ path: this.dest });
|
||||
mutations.setReload(true);
|
||||
};
|
||||
|
||||
if (state.route.path === this.dest) {
|
||||
|
@ -113,7 +99,7 @@ export default {
|
|||
return;
|
||||
}
|
||||
|
||||
let dstItems = (await api.fetch(this.dest)).items;
|
||||
let dstItems = (await filesApi.fetchFiles(this.dest)).items;
|
||||
let conflict = upload.checkConflict(items, dstItems);
|
||||
|
||||
let overwrite = false;
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
<template>
|
||||
<div class="card floating create-api__prompt__card" id="create-api">
|
||||
<div class="card-title">
|
||||
<h2>Create API Key</h2>
|
||||
</div>
|
||||
|
||||
<div class="card-content">
|
||||
<!-- API Key Name Input -->
|
||||
<p>API Key Name</p>
|
||||
<input
|
||||
class="input input--block"
|
||||
type="text"
|
||||
v-model.trim="apiName"
|
||||
placeholder="enter a uinque api key name"
|
||||
/>
|
||||
|
||||
<!-- Duration Input -->
|
||||
<p>Token Duration</p>
|
||||
<div class="inputWrapper">
|
||||
<input
|
||||
class="sizeInput roundedInputLeft input"
|
||||
v-model.number="duration"
|
||||
type="number"
|
||||
min="1"
|
||||
placeholder="number"
|
||||
/>
|
||||
<select v-model="unit" class="roundedInputRight input">
|
||||
<option value="days">days</option>
|
||||
<option value="months">months</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<!-- Permissions Input -->
|
||||
<p>
|
||||
Choose at least one permission for the key. Your User must also have the
|
||||
permission.
|
||||
</p>
|
||||
<div>
|
||||
<p v-for="(isEnabled, perm) in availablePermissions" :key="perm">
|
||||
<input type="checkbox" v-model="permissions[perm]" />
|
||||
{{ perm }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card-action">
|
||||
<button
|
||||
@click="closeHovers"
|
||||
class="button button--flat button--grey"
|
||||
:aria-label="$t('buttons.cancel')"
|
||||
:title="$t('buttons.cancel')"
|
||||
>
|
||||
{{ $t("buttons.cancel") }}
|
||||
</button>
|
||||
<button
|
||||
class="button button--flat button--blue"
|
||||
@click="createAPIKey"
|
||||
:title="$t('buttons.create')"
|
||||
>
|
||||
{{ $t("buttons.create") }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mutations, state } from "@/store";
|
||||
import { notify } from "@/notify";
|
||||
import { usersApi } from "@/api";
|
||||
|
||||
export default {
|
||||
name: "CreateAPI",
|
||||
data() {
|
||||
return {
|
||||
apiName: "",
|
||||
duration: 1,
|
||||
unit: "days",
|
||||
permissions: {},
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
availablePermissions() {
|
||||
return state.user.perm;
|
||||
},
|
||||
durationInDays() {
|
||||
// Calculate duration based on unit
|
||||
return this.unit === "days" ? this.duration : this.duration * 30; // assuming 30 days per month
|
||||
},
|
||||
},
|
||||
created() {
|
||||
// Initialize permissions with the same structure as availablePermissions
|
||||
this.permissions = Object.fromEntries(
|
||||
Object.keys(this.availablePermissions).map((perm) => [perm, false])
|
||||
);
|
||||
},
|
||||
methods: {
|
||||
closeHovers() {
|
||||
mutations.closeHovers();
|
||||
},
|
||||
async createAPIKey() {
|
||||
try {
|
||||
// Filter to get keys of permissions set to true and join them as a comma-separated string
|
||||
const permissionsString = Object.keys(this.permissions)
|
||||
.filter((key) => this.permissions[key])
|
||||
.join(",");
|
||||
|
||||
const params = {
|
||||
name: this.apiName,
|
||||
days: this.durationInDays,
|
||||
permissions: permissionsString,
|
||||
};
|
||||
|
||||
usersApi.createApiKey(params);
|
||||
notify.showSuccess("successfully created!");
|
||||
window.location.reload();
|
||||
} catch (error) {
|
||||
notify.showError(this.$t("errors.createKeyFailed"));
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
|
@ -30,7 +30,7 @@
|
|||
</template>
|
||||
|
||||
<script>
|
||||
import { files as api } from "@/api";
|
||||
import { filesApi } from "@/api";
|
||||
import buttons from "@/utils/buttons";
|
||||
import { state, getters, mutations } from "@/store";
|
||||
import { notify } from "@/notify";
|
||||
|
@ -57,7 +57,7 @@ export default {
|
|||
|
||||
try {
|
||||
if (!this.isListing) {
|
||||
await api.remove(state.route.path);
|
||||
await filesApi.remove(state.route.path);
|
||||
buttons.success("delete");
|
||||
showSuccess("Deleted item successfully");
|
||||
|
||||
|
@ -74,7 +74,7 @@ export default {
|
|||
|
||||
let promises = [];
|
||||
for (let index of state.selected) {
|
||||
promises.push(api.remove(state.req.items[index].url));
|
||||
promises.push(filesApi.remove(state.req.items[index].url));
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
</div>
|
||||
</template>
|
||||
<script>
|
||||
import { users as api } from "@/api";
|
||||
import { usersApi } from "@/api";
|
||||
import { notify } from "@/notify";
|
||||
import buttons from "@/utils/buttons";
|
||||
import { state, mutations, getters } from "@/store";
|
||||
|
@ -39,7 +39,7 @@ export default {
|
|||
async deleteUser(event) {
|
||||
event.preventDefault();
|
||||
try {
|
||||
await api.remove(this.user.id);
|
||||
await usersApi.remove(this.user.id);
|
||||
this.$router.push({ path: "/settings",hash:"#users-main" });
|
||||
notify.showSuccess(this.$t("settings.userDeleted"));
|
||||
} catch (e) {
|
||||
|
@ -56,7 +56,7 @@ export default {
|
|||
|
||||
try {
|
||||
if (!this.isListing) {
|
||||
await api.remove(this.$route.path);
|
||||
await usersApi.remove(this.$route.path);
|
||||
buttons.success("delete");
|
||||
|
||||
this.currentPrompt?.confirm();
|
||||
|
@ -72,7 +72,7 @@ export default {
|
|||
|
||||
let promises = [];
|
||||
for (let index of this.selected) {
|
||||
promises.push(api.remove(state.req.items[index].url));
|
||||
promises.push(usersApi.remove(state.req.items[index].url));
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
|
|
@ -22,8 +22,8 @@
|
|||
|
||||
<script>
|
||||
import { state, mutations } from "@/store";
|
||||
import url from "@/utils/url";
|
||||
import { files } from "@/api";
|
||||
import url from "@/utils/url.js";
|
||||
import { filesApi } from "@/api";
|
||||
|
||||
export default {
|
||||
name: "file-list",
|
||||
|
@ -71,8 +71,7 @@ export default {
|
|||
// Otherwise we add every directory to the
|
||||
// move options.
|
||||
for (let item of req.items) {
|
||||
if (!item.isDir) continue;
|
||||
|
||||
if (item.type != "directory") continue;
|
||||
this.items.push({
|
||||
name: item.name,
|
||||
url: item.url,
|
||||
|
@ -84,8 +83,7 @@ export default {
|
|||
// just clicked in and fill the options with its
|
||||
// content.
|
||||
let uri = event.currentTarget.dataset.url;
|
||||
|
||||
files.fetch(uri).then(this.fillOptions);
|
||||
filesApi.fetchFiles(uri).then(this.fillOptions);
|
||||
},
|
||||
touchstart(event) {
|
||||
let url = event.currentTarget.dataset.url;
|
||||
|
|
|
@ -73,7 +73,7 @@
|
|||
<script>
|
||||
import { getHumanReadableFilesize } from "@/utils/filesizes";
|
||||
import { formatTimestamp } from "@/utils/moment";
|
||||
import { files as api } from "@/api";
|
||||
import { filesApi } from "@/api";
|
||||
import { state, getters, mutations } from "@/store"; // Import your custom store
|
||||
|
||||
export default {
|
||||
|
@ -128,8 +128,8 @@ export default {
|
|||
return (
|
||||
getters.selectedCount() > 1 ||
|
||||
(getters.selectedCount() === 0
|
||||
? state.req.isDir
|
||||
: state.req.items[this.selected[0]].isDir)
|
||||
? state.req.type == "directory"
|
||||
: state.req.items[this.selected[0]].type == "directory")
|
||||
);
|
||||
},
|
||||
},
|
||||
|
@ -145,7 +145,7 @@ export default {
|
|||
link = state.route.path;
|
||||
}
|
||||
|
||||
const hash = await api.checksum(link, algo);
|
||||
const hash = await filesApi.checksum(link, algo);
|
||||
event.target.innerHTML = hash;
|
||||
},
|
||||
},
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
<script>
|
||||
import { mutations, state } from "@/store";
|
||||
import FileList from "./FileList.vue";
|
||||
import { files as api } from "@/api";
|
||||
import { filesApi } from "@/api";
|
||||
import buttons from "@/utils/buttons";
|
||||
import * as upload from "@/utils/upload";
|
||||
import { notify } from "@/notify";
|
||||
|
@ -79,48 +79,42 @@ export default {
|
|||
for (let item of state.selected) {
|
||||
items.push({
|
||||
from: state.req.items[item].url,
|
||||
to: this.dest + encodeURIComponent(state.req.items[item].name),
|
||||
to: this.dest + state.req.items[item].name,
|
||||
name: state.req.items[item].name,
|
||||
});
|
||||
}
|
||||
|
||||
let action = async (overwrite, rename) => {
|
||||
buttons.loading("move");
|
||||
await api
|
||||
.move(items, overwrite, rename)
|
||||
.then(() => {
|
||||
buttons.success("move");
|
||||
this.$router.push({ path: this.dest });
|
||||
mutations.setReload(true);
|
||||
})
|
||||
.catch((e) => {
|
||||
buttons.done("move");
|
||||
notify.showError(e);
|
||||
});
|
||||
await filesApi.moveCopy(items, "move", overwrite, rename);
|
||||
buttons.success("move");
|
||||
this.$router.push({ path: this.dest });
|
||||
mutations.closeHovers();
|
||||
};
|
||||
|
||||
let dstItems = (await api.fetch(this.dest)).items;
|
||||
let dstItems = (await filesApi.fetchFiles(this.dest)).items;
|
||||
let conflict = upload.checkConflict(items, dstItems);
|
||||
|
||||
let overwrite = false;
|
||||
let rename = false;
|
||||
|
||||
if (conflict) {
|
||||
mutations.showHover({
|
||||
name: "replace-rename",
|
||||
confirm: (event, option) => {
|
||||
overwrite = option == "overwrite";
|
||||
rename = option == "rename";
|
||||
event.preventDefault();
|
||||
mutations.closeHovers();
|
||||
action(overwrite, rename);
|
||||
mutations.setReload(true);
|
||||
},
|
||||
});
|
||||
return;
|
||||
try {
|
||||
if (conflict) {
|
||||
mutations.showHover({
|
||||
name: "replace-rename",
|
||||
confirm: (event, option) => {
|
||||
overwrite = option == "overwrite";
|
||||
rename = option == "rename";
|
||||
event.preventDefault();
|
||||
action(overwrite, rename);
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
action(overwrite, rename);
|
||||
} catch (e) {
|
||||
notify.error(e);
|
||||
}
|
||||
|
||||
action(overwrite, rename);
|
||||
return;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -36,8 +36,8 @@
|
|||
</div>
|
||||
</template>
|
||||
<script>
|
||||
import { files as api } from "@/api";
|
||||
import url from "@/utils/url";
|
||||
import { filesApi } from "@/api";
|
||||
import url from "@/utils/url.js";
|
||||
import { getters, mutations, state } from "@/store"; // Import your custom store
|
||||
|
||||
export default {
|
||||
|
@ -86,11 +86,11 @@ export default {
|
|||
uri += encodeURIComponent(this.name) + "/";
|
||||
uri = uri.replace("//", "/");
|
||||
|
||||
await api.post(uri);
|
||||
await filesApi.post(uri);
|
||||
if (this.redirect) {
|
||||
this.$router.push({ path: uri });
|
||||
} else if (!this.base) {
|
||||
const res = await api.fetch(url.removeLastDir(uri) + "/");
|
||||
const res = await filesApi.fetchFiles(url.removeLastDir(uri) + "/");
|
||||
mutations.updateRequest(res);
|
||||
}
|
||||
|
||||
|
|
|
@ -37,8 +37,8 @@
|
|||
</template>
|
||||
<script>
|
||||
import { state } from "@/store";
|
||||
import { files as api } from "@/api";
|
||||
import url from "@/utils/url";
|
||||
import { filesApi } from "@/api";
|
||||
import url from "@/utils/url.js";
|
||||
import { getters, mutations } from "@/store"; // Import your custom store
|
||||
|
||||
export default {
|
||||
|
@ -73,7 +73,7 @@ export default {
|
|||
uri += encodeURIComponent(this.name);
|
||||
uri = uri.replace("//", "/");
|
||||
|
||||
await api.post(uri);
|
||||
await filesApi.post(uri);
|
||||
this.$router.push({ path: uri });
|
||||
|
||||
mutations.closeHovers();
|
||||
|
|
|
@ -25,6 +25,8 @@ import Share from "./Share.vue";
|
|||
import Upload from "./Upload.vue";
|
||||
import ShareDelete from "./ShareDelete.vue";
|
||||
import DeleteUser from "./DeleteUser.vue";
|
||||
import CreateApi from "./CreateApi.vue";
|
||||
import ActionApi from "./ActionApi.vue";
|
||||
import Sidebar from "../sidebar/Sidebar.vue";
|
||||
import buttons from "@/utils/buttons";
|
||||
import { state, getters, mutations } from "@/store"; // Import your custom store
|
||||
|
@ -48,6 +50,8 @@ export default {
|
|||
ShareDelete,
|
||||
Sidebar,
|
||||
DeleteUser,
|
||||
CreateApi,
|
||||
ActionApi,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
|
|
@ -40,8 +40,8 @@
|
|||
</div>
|
||||
</template>
|
||||
<script>
|
||||
import url from "@/utils/url";
|
||||
import { files as api } from "@/api";
|
||||
import url from "@/utils/url.js";
|
||||
import { filesApi } from "@/api";
|
||||
import { state, getters, mutations } from "@/store";
|
||||
|
||||
export default {
|
||||
|
@ -98,13 +98,15 @@ export default {
|
|||
|
||||
newLink = url.removeLastDir(oldLink) + "/" + encodeURIComponent(this.name);
|
||||
|
||||
await api.move([{ from: oldLink, to: newLink }]);
|
||||
await filesApi.moveCopy([{ from: oldLink, to: newLink }], "move");
|
||||
if (!this.isListing) {
|
||||
this.$router.push({ path: newLink });
|
||||
return;
|
||||
}
|
||||
|
||||
mutations.setReload(true);
|
||||
setTimeout(() => {
|
||||
mutations.setReload(true);
|
||||
}, 50);
|
||||
|
||||
mutations.closeHovers();
|
||||
},
|
||||
|
|
|
@ -122,7 +122,7 @@
|
|||
<script>
|
||||
import { notify } from "@/notify";
|
||||
import { state, getters, mutations } from "@/store";
|
||||
import { share as api, pub as pub_api } from "@/api";
|
||||
import { shareApi, publicApi } from "@/api";
|
||||
import { fromNow } from "@/utils/moment";
|
||||
import Clipboard from "clipboard";
|
||||
|
||||
|
@ -134,6 +134,7 @@ export default {
|
|||
unit: "hours",
|
||||
links: [],
|
||||
clip: null,
|
||||
subpath: "",
|
||||
password: "",
|
||||
listing: true,
|
||||
};
|
||||
|
@ -165,16 +166,32 @@ export default {
|
|||
return state.req.items[this.selected[0]].url;
|
||||
},
|
||||
getContext() {
|
||||
let path = state.route.path.replace("/files/", "./");
|
||||
const prefix = `/files/`;
|
||||
let path = state.route.path.replace(prefix, "./");
|
||||
if (getters.selectedCount() === 1) {
|
||||
path = path + state.req.items[this.selected[0]].name;
|
||||
}
|
||||
return path;
|
||||
return decodeURIComponent(path);
|
||||
},
|
||||
},
|
||||
async beforeMount() {
|
||||
const links = await api.get(this.url);
|
||||
this.links = links;
|
||||
try {
|
||||
const prefix = `/files`;
|
||||
let path = state.route.path.startsWith(prefix)
|
||||
? state.route.path.slice(prefix.length)
|
||||
: state.route.path;
|
||||
path = decodeURIComponent(path);
|
||||
if (path == "") {
|
||||
path = "/";
|
||||
}
|
||||
this.subpath = path;
|
||||
// get last element of the path
|
||||
const links = await shareApi.get(this.subpath);
|
||||
this.links = links;
|
||||
} catch (err) {
|
||||
notify.showError(err);
|
||||
return;
|
||||
}
|
||||
this.sort();
|
||||
|
||||
if (this.links.length === 0) {
|
||||
|
@ -197,9 +214,9 @@ export default {
|
|||
let res = null;
|
||||
|
||||
if (isPermanent) {
|
||||
res = await api.create(this.url, this.password);
|
||||
res = await shareApi.create(this.subpath, this.password);
|
||||
} else {
|
||||
res = await api.create(this.url, this.password, this.time, this.unit);
|
||||
res = await shareApi.create(this.subpath, this.password, this.time, this.unit);
|
||||
}
|
||||
|
||||
this.links.push(res);
|
||||
|
@ -213,9 +230,8 @@ export default {
|
|||
},
|
||||
async deleteLink(event, link) {
|
||||
event.preventDefault();
|
||||
await api.remove(link.hash);
|
||||
await shareApi.remove(link.hash);
|
||||
this.links = this.links.filter((item) => item.hash !== link.hash);
|
||||
|
||||
if (this.links.length === 0) {
|
||||
this.listing = false;
|
||||
}
|
||||
|
@ -224,13 +240,13 @@ export default {
|
|||
return fromNow(time, state.user.locale);
|
||||
},
|
||||
buildLink(share) {
|
||||
return api.getShareURL(share);
|
||||
return shareApi.getShareURL(share);
|
||||
},
|
||||
hasDownloadLink() {
|
||||
return this.selected.length === 1 && !state.req.items[this.selected[0]].isDir;
|
||||
},
|
||||
buildDownloadLink(share) {
|
||||
return pub_api.getDownloadURL(share);
|
||||
return publicApi.getDownloadURL(share);
|
||||
},
|
||||
sort() {
|
||||
this.links = this.links.sort((a, b) => {
|
||||
|
|
|
@ -88,7 +88,7 @@ export default {
|
|||
});
|
||||
}
|
||||
|
||||
const path = getters.getRoutePath();
|
||||
const path = getters.routePath();
|
||||
const conflict = upload.checkConflict(uploadFiles, state.req.items);
|
||||
|
||||
if (conflict) {
|
||||
|
|
|
@ -36,6 +36,10 @@
|
|||
<input type="checkbox" :disabled="admin" v-model="perm.share" />
|
||||
{{ $t("settings.perm.share") }}
|
||||
</p>
|
||||
<p>
|
||||
<input type="checkbox" v-model="perm.api" />
|
||||
{{ $t("settings.perm.api") }}
|
||||
</p>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
|
|
@ -101,7 +101,6 @@ export default {
|
|||
watch: {
|
||||
user: {
|
||||
handler(newUser) {
|
||||
console.log("UserForm: user changed", newUser);
|
||||
this.localUser = { ...newUser }; // Watch for changes in the parent and update the local copy
|
||||
},
|
||||
immediate: true,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue