updated to better index files
This commit is contained in:
parent
8d6c468a0c
commit
126561a464
12
Dockerfile
12
Dockerfile
|
@ -7,24 +7,16 @@ RUN npm run build
|
||||||
FROM golang:alpine as base
|
FROM golang:alpine as base
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY ./src/backend ./
|
COPY ./src/backend ./
|
||||||
RUN go build -ldflags='-w -s' -o filebrowser .
|
RUN go build -ldflags="-w -s" -o filebrowser .
|
||||||
|
|
||||||
FROM alpine:latest
|
FROM alpine:latest
|
||||||
RUN apk --no-cache add \
|
RUN apk --no-cache add \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
mailcap \
|
mailcap
|
||||||
curl
|
|
||||||
|
|
||||||
HEALTHCHECK --start-period=2s --interval=5s --timeout=3s \
|
|
||||||
CMD curl -f http://localhost/health || exit 1
|
|
||||||
|
|
||||||
VOLUME /srv
|
VOLUME /srv
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY --from=base /app/docker_config.json ./.filebrowser.json
|
COPY --from=base /app/docker_config.json ./.filebrowser.json
|
||||||
COPY --from=base /app/filebrowser ./filebrowser
|
COPY --from=base /app/filebrowser ./filebrowser
|
||||||
COPY --from=nbuild /app/dist/ ./frontend/dist/
|
COPY --from=nbuild /app/dist/ ./frontend/dist/
|
||||||
|
|
||||||
ENTRYPOINT [ "./filebrowser" ]
|
ENTRYPOINT [ "./filebrowser" ]
|
12
README.md
12
README.md
|
@ -5,18 +5,18 @@
|
||||||
This fork makes the following significant changes to filebrowser for origin:
|
This fork makes the following significant changes to filebrowser for origin:
|
||||||
|
|
||||||
1. [x] Improves search to use index instead of filesystem.
|
1. [x] Improves search to use index instead of filesystem.
|
||||||
- [x] lightning fast
|
- [x] Lightning fast
|
||||||
- [ ] realtime results as you type
|
- [x] Realtime results as you type
|
||||||
|
- [ ] Works with file type filter
|
||||||
1. [ ] Preview enhancements
|
1. [ ] Preview enhancements
|
||||||
- preview default view is constrained to files subwindow,
|
- preview default view is constrained to files subwindow,
|
||||||
which can be toggled to fullscreen.
|
which can be toggled to fullscreen.
|
||||||
1. [ ] Updated node version and dependencies
|
1. [ ] Updated version and dependencies
|
||||||
- [ ] uses latest npm and node version
|
- [ ] uses latest npm and node version
|
||||||
- [ ] removes deprecated npm packages
|
- [ ] removes deprecated npm packages
|
||||||
1. [ ] Improved routing
|
- [x] Updates golang dependencies
|
||||||
- fixed bugs in original version
|
|
||||||
1. [ ] Added authentication type
|
1. [ ] Added authentication type
|
||||||
- Using bearer token with remote authentication server
|
- [ ] Using bearer token with remote authentication server
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
homedir "github.com/mitchellh/go-homedir"
|
homedir "github.com/mitchellh/go-homedir"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
|
@ -49,13 +50,9 @@ func init() {
|
||||||
rootCmd.SetVersionTemplate("File Browser version {{printf \"%s\" .Version}}\n")
|
rootCmd.SetVersionTemplate("File Browser version {{printf \"%s\" .Version}}\n")
|
||||||
|
|
||||||
flags := rootCmd.Flags()
|
flags := rootCmd.Flags()
|
||||||
// initialize indexing and schedule indexing ever n minutes
|
// initialize indexing and schedule indexing ever n minutes (default 5)
|
||||||
indexInterval, err := flags.GetUint32("indexing-interval") //nolint:govet
|
indexingInterval := getEnvVariableAsUint32("INDEXING_INTERVAL")
|
||||||
if err != nil {
|
go search.InitializeIndex(indexingInterval)
|
||||||
log.Println(err)
|
|
||||||
indexInterval = 60
|
|
||||||
}
|
|
||||||
go search.InitializeIndex(indexInterval)
|
|
||||||
|
|
||||||
persistent := rootCmd.PersistentFlags()
|
persistent := rootCmd.PersistentFlags()
|
||||||
|
|
||||||
|
@ -68,6 +65,15 @@ func init() {
|
||||||
addServerFlags(flags)
|
addServerFlags(flags)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getEnvVariableAsUint32(key string) uint32 {
|
||||||
|
valueStr := os.Getenv(key)
|
||||||
|
value, err := strconv.ParseUint(valueStr, 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return 5 // default value every 5 minutes
|
||||||
|
}
|
||||||
|
return uint32(value)
|
||||||
|
}
|
||||||
|
|
||||||
func addServerFlags(flags *pflag.FlagSet) {
|
func addServerFlags(flags *pflag.FlagSet) {
|
||||||
flags.StringP("address", "a", "127.0.0.1", "address to listen on")
|
flags.StringP("address", "a", "127.0.0.1", "address to listen on")
|
||||||
flags.StringP("log", "l", "stdout", "log output")
|
flags.StringP("log", "l", "stdout", "log output")
|
||||||
|
@ -77,7 +83,6 @@ func addServerFlags(flags *pflag.FlagSet) {
|
||||||
flags.StringP("root", "r", ".", "root to prepend to relative paths")
|
flags.StringP("root", "r", ".", "root to prepend to relative paths")
|
||||||
flags.String("socket", "", "socket to listen to (cannot be used with address, port, cert nor key flags)")
|
flags.String("socket", "", "socket to listen to (cannot be used with address, port, cert nor key flags)")
|
||||||
flags.Uint32("socket-perm", 0666, "unix socket file permissions") //nolint:gomnd
|
flags.Uint32("socket-perm", 0666, "unix socket file permissions") //nolint:gomnd
|
||||||
flags.Uint32("indexing-interval", 60, "how frequently to index files, in minutes") //nolint:gomnd
|
|
||||||
flags.StringP("baseurl", "b", "", "base url")
|
flags.StringP("baseurl", "b", "", "base url")
|
||||||
flags.String("cache-dir", "", "file cache directory (disabled if empty)")
|
flags.String("cache-dir", "", "file cache directory (disabled if empty)")
|
||||||
flags.Int("img-processors", 4, "image processors count") //nolint:gomnd
|
flags.Int("img-processors", 4, "image processors count") //nolint:gomnd
|
||||||
|
|
Binary file not shown.
|
@ -8,8 +8,9 @@ import (
|
||||||
var searchHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
var searchHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
||||||
response := []map[string]interface{}{}
|
response := []map[string]interface{}{}
|
||||||
query := r.URL.Query().Get("query")
|
query := r.URL.Query().Get("query")
|
||||||
|
var files []string
|
||||||
files, dirs := search.IndexedSearch(query,r.URL.Path)
|
var dirs []string
|
||||||
|
files, dirs = search.IndexedSearch(query,r.URL.Path,&files,&dirs)
|
||||||
for _,v := range(files){
|
for _,v := range(files){
|
||||||
response = append(response, map[string]interface{}{
|
response = append(response, map[string]interface{}{
|
||||||
"dir": false,
|
"dir": false,
|
||||||
|
@ -22,6 +23,8 @@ var searchHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *dat
|
||||||
"path": v,
|
"path": v,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
files = files[:0]
|
||||||
|
dirs = dirs[:0]
|
||||||
// err := search.Search(d.user.Fs, r.URL.Path, query, d, func(path string, f os.FileInfo) error {
|
// err := search.Search(d.user.Fs, r.URL.Path, query, d, func(path string, f os.FileInfo) error {
|
||||||
// response = append(response, map[string]interface{}{
|
// response = append(response, map[string]interface{}{
|
||||||
// "dir": f.IsDir(),
|
// "dir": f.IsDir(),
|
||||||
|
|
|
@ -5,43 +5,55 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"sort"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PathInfo struct {
|
var (
|
||||||
DirPathNames []string
|
rootPath = "/srv" // DO NOT include trailing slash
|
||||||
FilePathNames []string
|
indexes map[string][]string
|
||||||
LastIndexed time.Time
|
mutex sync.RWMutex
|
||||||
}
|
lastIndexed time.Time
|
||||||
var rootPath = "/srv/"
|
)
|
||||||
var indexes map[string]PathInfo
|
|
||||||
|
|
||||||
func InitializeIndex(intervalMinutes uint32) {
|
func InitializeIndex(intervalMinutes uint32) {
|
||||||
// Initialize the indexes map
|
// Initialize the indexes map
|
||||||
indexes = make(map[string]PathInfo)
|
indexes = make(map[string][]string)
|
||||||
|
var numFiles, numDirs int
|
||||||
log.Println("Indexing files...")
|
log.Println("Indexing files...")
|
||||||
|
lastIndexedStart := time.Now()
|
||||||
// Call the function to index files and directories
|
// Call the function to index files and directories
|
||||||
err := indexFiles(rootPath, 1)
|
totalNumFiles, totalNumDirs, err := indexFiles(rootPath,&numFiles,&numDirs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
lastIndexed = lastIndexedStart
|
||||||
go indexingScheduler(intervalMinutes)
|
go indexingScheduler(intervalMinutes)
|
||||||
log.Println("Successfully indexed files.")
|
log.Println("Successfully indexed files.")
|
||||||
|
log.Println("Files found :",totalNumFiles)
|
||||||
|
log.Println("Directories found :",totalNumDirs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func indexingScheduler(intervalMinutes uint32) {
|
func indexingScheduler(intervalMinutes uint32) {
|
||||||
|
log.Printf("Indexing scheduler will run every %v minutes",intervalMinutes)
|
||||||
for {
|
for {
|
||||||
time.Sleep(time.Duration(intervalMinutes) * time.Minute)
|
time.Sleep(time.Duration(intervalMinutes) * time.Minute)
|
||||||
err := indexFiles(rootPath, 1)
|
var numFiles, numDirs int
|
||||||
|
lastIndexedStart := time.Now()
|
||||||
|
totalNumFiles, totalNumDirs, err := indexFiles(rootPath,&numFiles,&numDirs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
lastIndexed = lastIndexedStart
|
||||||
|
if totalNumFiles+totalNumDirs > 0 {
|
||||||
|
log.Println("re-indexing found changes and updated the index.")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Define a function to recursively index files and directories
|
// Define a function to recursively index files and directories
|
||||||
func indexFiles(path string, depth int) error {
|
func indexFiles(path string, numFiles *int, numDirs *int) (int,int,error) {
|
||||||
// Check if the current directory has been modified since last indexing
|
// Check if the current directory has been modified since last indexing
|
||||||
dir, err := os.Open(path)
|
dir, err := os.Open(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -51,89 +63,99 @@ func indexFiles(path string, depth int) error {
|
||||||
defer dir.Close()
|
defer dir.Close()
|
||||||
dirInfo, err := dir.Stat()
|
dirInfo, err := dir.Stat()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return *numFiles,*numDirs,err
|
||||||
}
|
}
|
||||||
// Compare the last modified time of the directory with the last indexed time
|
// Compare the last modified time of the directory with the last indexed time
|
||||||
if dirInfo.ModTime().Before(indexes[path].LastIndexed) {
|
if dirInfo.ModTime().Before(lastIndexed) {
|
||||||
return nil
|
return *numFiles,*numDirs,nil
|
||||||
}
|
|
||||||
// Check if the directory path is more than 3 levels deep
|
|
||||||
if depth > 3 {
|
|
||||||
// Index the directory and its subdirectories
|
|
||||||
err = indexEverythingFlattened(path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
// Read the directory contents
|
// Read the directory contents
|
||||||
files, err := dir.Readdir(-1)
|
files, err := dir.Readdir(-1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return *numFiles,*numDirs,err
|
||||||
}
|
}
|
||||||
// Iterate over the files and directories
|
// Iterate over the files and directories
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
filePath := filepath.Join(path, file.Name())
|
|
||||||
if file.IsDir() {
|
if file.IsDir() {
|
||||||
// Recursively index subdirectories
|
*numDirs++
|
||||||
err = indexFiles(filePath, depth+1)
|
indexFiles(path+"/"+file.Name(),numFiles,numDirs)
|
||||||
} else {
|
|
||||||
addToIndex(path, filePath, file.ModTime(),file.IsDir())
|
|
||||||
}
|
}
|
||||||
|
*numFiles++
|
||||||
|
addToIndex(path, file.Name())
|
||||||
}
|
}
|
||||||
return nil
|
return *numFiles,*numDirs,nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func indexEverythingFlattened(path string) error {
|
func addToIndex(path string, fileName string) {
|
||||||
// Index the directory and its subdirectories
|
mutex.Lock()
|
||||||
err := filepath.Walk(path, func(filePath string, info os.FileInfo, err error) error {
|
defer mutex.Unlock()
|
||||||
if err != nil {
|
path = strings.TrimPrefix(path,rootPath+"/")
|
||||||
return err
|
path = strings.TrimSuffix(path,"/")
|
||||||
}
|
if path == rootPath {
|
||||||
addToIndex(path, filePath, info.ModTime(),info.IsDir())
|
path = "/"
|
||||||
return nil
|
}
|
||||||
})
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func addToIndex(path string, filePath string, lastModified time.Time, isDir bool) {
|
|
||||||
filePath = strings.TrimPrefix(filePath, rootPath)
|
|
||||||
currentTime := time.Now()
|
|
||||||
info, exists := indexes[path]
|
info, exists := indexes[path]
|
||||||
if !exists {
|
if !exists {
|
||||||
info = PathInfo{}
|
info = []string{}
|
||||||
}
|
}
|
||||||
if isDir {
|
info = append(info, fileName)
|
||||||
info.DirPathNames = append(info.DirPathNames, filePath)
|
|
||||||
}else{
|
|
||||||
info.FilePathNames = append(info.FilePathNames, filePath)
|
|
||||||
}
|
|
||||||
info.LastIndexed = currentTime
|
|
||||||
indexes[path] = info
|
indexes[path] = info
|
||||||
}
|
}
|
||||||
|
|
||||||
func searchAllIndexes(searchTerm string,isDir bool,scope string) []string {
|
func SearchAllIndexes(searchTerm string, scope string, files []string, dirs []string) ([]string, []string) {
|
||||||
var matchingResults []string
|
mutex.RLock()
|
||||||
|
defer mutex.RUnlock()
|
||||||
|
|
||||||
|
var matchingFiles []string
|
||||||
|
var matchingDirs []string
|
||||||
|
|
||||||
// Iterate over the indexes
|
// Iterate over the indexes
|
||||||
for _, subFiles := range indexes {
|
for dirName, v := range indexes {
|
||||||
searchItems := subFiles.FilePathNames
|
searchItems := v
|
||||||
if isDir {
|
|
||||||
searchItems = subFiles.DirPathNames
|
|
||||||
}
|
|
||||||
// Iterate over the path names
|
// Iterate over the path names
|
||||||
for _, pathName := range searchItems {
|
for _, pathName := range searchItems {
|
||||||
|
if dirName != "/" {
|
||||||
|
pathName = dirName+"/"+pathName
|
||||||
|
}
|
||||||
// Check if the path name contains the search term
|
// Check if the path name contains the search term
|
||||||
if !containsSearchTerm(pathName, searchTerm) {
|
if !containsSearchTerm(pathName, searchTerm) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
pathName = scopedPathNameFilter(pathName,scope)
|
pathName = scopedPathNameFilter(pathName, scope)
|
||||||
if pathName == "" {
|
if pathName == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
matchingResults = append(matchingResults, pathName)
|
matchingFiles = append(matchingFiles, pathName)
|
||||||
}
|
}
|
||||||
|
// Check if the path name contains the search term
|
||||||
|
if !containsSearchTerm(dirName, searchTerm) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pathName := scopedPathNameFilter(dirName, scope)
|
||||||
|
if pathName == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
matchingDirs = append(matchingDirs, pathName)
|
||||||
}
|
}
|
||||||
return matchingResults
|
|
||||||
|
// Sort the strings based on the number of elements after splitting by "/"
|
||||||
|
sort.Slice(matchingFiles, func(i, j int) bool {
|
||||||
|
parts1 := strings.Split(matchingFiles[i], "/")
|
||||||
|
parts2 := strings.Split(matchingFiles[j], "/")
|
||||||
|
return len(parts1) < len(parts2)
|
||||||
|
})
|
||||||
|
// Sort the strings based on the number of elements after splitting by "/"
|
||||||
|
sort.Slice(matchingDirs, func(i, j int) bool {
|
||||||
|
parts1 := strings.Split(matchingDirs[i], "/")
|
||||||
|
parts2 := strings.Split(matchingDirs[j], "/")
|
||||||
|
return len(parts1) < len(parts2)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Copy the matching files and dirs to the final slices
|
||||||
|
files = append([]string{}, matchingFiles...)
|
||||||
|
dirs = append([]string{}, matchingDirs...)
|
||||||
|
|
||||||
|
return files, dirs
|
||||||
}
|
}
|
||||||
|
|
||||||
func scopedPathNameFilter(pathName string, scope string) string {
|
func scopedPathNameFilter(pathName string, scope string) string {
|
||||||
|
|
|
@ -16,10 +16,9 @@ type searchOptions struct {
|
||||||
Terms []string
|
Terms []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func IndexedSearch(query string,scope string) (files []string, dirs []string) {
|
func IndexedSearch(query string, scope string, files *[]string, dirs *[]string) ([]string, []string) {
|
||||||
fileList := searchAllIndexes(query,false,scope)
|
*files, *dirs = SearchAllIndexes(query, scope, *files, *dirs)
|
||||||
dirList := searchAllIndexes(query,true,scope)
|
return *files, *dirs
|
||||||
return fileList, dirList
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search searches for a query in a fs.
|
// Search searches for a query in a fs.
|
||||||
|
|
|
@ -2,7 +2,7 @@ package version
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// Version is the current File Browser version.
|
// Version is the current File Browser version.
|
||||||
Version = "(untracked)"
|
Version = "(0.1.0)"
|
||||||
// CommitSHA is the commmit sha.
|
// CommitSHA is the commmit sha.
|
||||||
CommitSHA = "(unknown)"
|
CommitSHA = "(unknown)"
|
||||||
)
|
)
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
@keyup.exact="keyup"
|
@keyup.exact="keyup"
|
||||||
@keyup.enter="submit"
|
@input="submit"
|
||||||
ref="input"
|
ref="input"
|
||||||
:autofocus="active"
|
:autofocus="active"
|
||||||
v-model.trim="value"
|
v-model.trim="value"
|
||||||
|
@ -47,7 +47,7 @@
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
</template>
|
</template>
|
||||||
<ul v-show="results.length > 0">
|
<ul v-show="filteredResults.length > 0">
|
||||||
<li v-for="(s, k) in filteredResults" :key="k">
|
<li v-for="(s, k) in filteredResults" :key="k">
|
||||||
<router-link @click.native="close" :to="s.url">
|
<router-link @click.native="close" :to="s.url">
|
||||||
<i v-if="s.dir" class="material-icons">folder</i>
|
<i v-if="s.dir" class="material-icons">folder</i>
|
||||||
|
@ -163,7 +163,7 @@ export default {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.results.length = 0;
|
this.results.length === 0;
|
||||||
},
|
},
|
||||||
init(string) {
|
init(string) {
|
||||||
this.value = `${string} `;
|
this.value = `${string} `;
|
||||||
|
@ -177,7 +177,7 @@ export default {
|
||||||
async submit(event) {
|
async submit(event) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
|
|
||||||
if (this.value === "") {
|
if (this.value === "" || this.value.length < 3) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -99,7 +99,7 @@
|
||||||
v-else
|
v-else
|
||||||
rel="noopener noreferrer"
|
rel="noopener noreferrer"
|
||||||
target="_blank"
|
target="_blank"
|
||||||
href="https://github.com/filebrowser/filebrowser"
|
href="https://github.com/gtsteffaniak/filebrowser"
|
||||||
>File Browser</a
|
>File Browser</a
|
||||||
>
|
>
|
||||||
<span> {{ version }}</span>
|
<span> {{ version }}</span>
|
||||||
|
@ -156,8 +156,8 @@ export default {
|
||||||
try {
|
try {
|
||||||
let usage = await api.usage(path);
|
let usage = await api.usage(path);
|
||||||
usageStats = {
|
usageStats = {
|
||||||
used: prettyBytes(usage.used, { binary: true }),
|
used: prettyBytes(usage.used/1024, { binary: true }),
|
||||||
total: prettyBytes(usage.total, { binary: true }),
|
total: prettyBytes(usage.total/1024, { binary: true }),
|
||||||
usedPercentage: Math.round((usage.used / usage.total) * 100),
|
usedPercentage: Math.round((usage.used / usage.total) * 100),
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
@ -152,7 +152,7 @@
|
||||||
"images": "الصور",
|
"images": "الصور",
|
||||||
"music": "الموسيقى",
|
"music": "الموسيقى",
|
||||||
"pdf": "PDF",
|
"pdf": "PDF",
|
||||||
"pressToSearch": "Press enter to search...",
|
"pressToSearch": "No results found in indexed search.",
|
||||||
"search": "البحث...",
|
"search": "البحث...",
|
||||||
"typeToSearch": "Type to search...",
|
"typeToSearch": "Type to search...",
|
||||||
"types": "الأنواع",
|
"types": "الأنواع",
|
||||||
|
|
|
@ -162,7 +162,7 @@
|
||||||
"images": "Images",
|
"images": "Images",
|
||||||
"music": "Music",
|
"music": "Music",
|
||||||
"pdf": "PDF",
|
"pdf": "PDF",
|
||||||
"pressToSearch": "Press enter to search...",
|
"pressToSearch": "No results found in indexed search.",
|
||||||
"search": "Search...",
|
"search": "Search...",
|
||||||
"typeToSearch": "Type to search...",
|
"typeToSearch": "Type to search...",
|
||||||
"types": "Types",
|
"types": "Types",
|
||||||
|
|
|
@ -152,7 +152,7 @@
|
||||||
"images": "画像",
|
"images": "画像",
|
||||||
"music": "音楽",
|
"music": "音楽",
|
||||||
"pdf": "PDF",
|
"pdf": "PDF",
|
||||||
"pressToSearch": "Press enter to search...",
|
"pressToSearch": "No results found in indexed search.",
|
||||||
"search": "検索...",
|
"search": "検索...",
|
||||||
"typeToSearch": "Type to search...",
|
"typeToSearch": "Type to search...",
|
||||||
"types": "種類",
|
"types": "種類",
|
||||||
|
|
Loading…
Reference in New Issue