diff --git a/README.md b/README.md
index 32dab4d..a6527b9 100644
--- a/README.md
+++ b/README.md
@@ -122,6 +122,7 @@ Important values:
- `CORS_ALLOWED_ORIGIN`
- `APP_DOMAIN`
- `MAX_BODY_MB`
+- `OCR_LANGS` (default: `eng+osd`, example: `eng+osd+rus`)
- `RATE_LIMIT_PER_MIN`
- `AUTH_RATE_LIMIT_PER_MIN`
- `GOOGLE_AUTH_ENABLED`
diff --git a/backend/api_ftp_test.go b/backend/api_ftp_test.go
index f7a7ae4..e8896e2 100644
--- a/backend/api_ftp_test.go
+++ b/backend/api_ftp_test.go
@@ -61,7 +61,14 @@ func makeTestServer(t *testing.T, mutate func(*Config)) *Server {
t.Fatalf("newORMRepo failed: %v", err)
}
- return &Server{db: db, orm: orm, config: cfg, storage: storage, limiter: newRateLimiter()}
+ return &Server{
+ db: db,
+ orm: orm,
+ config: cfg,
+ storage: storage,
+ limiter: newRateLimiter(),
+ searchContent: newSearchContentCache(256),
+ }
}
func decodeJSONBody[T any](t *testing.T, res *http.Response, out *T) {
diff --git a/backend/config.go b/backend/config.go
index 5e35a89..adfc8bc 100644
--- a/backend/config.go
+++ b/backend/config.go
@@ -20,6 +20,7 @@ type Config struct {
CORSOrigin string
CookieSecure bool
MaxBodyBytes int64
+ OCRLangs string
GoogleAuthEnabled bool
GoogleClientID string
@@ -85,6 +86,7 @@ func loadConfig() Config {
CORSOrigin: getEnv("CORS_ALLOWED_ORIGIN", ""),
CookieSecure: getEnv("COOKIE_SECURE", "false") == "true",
MaxBodyBytes: int64(getEnvInt("MAX_BODY_MB", 8)) * 1024 * 1024,
+ OCRLangs: normalizeOCRLangs(getEnv("OCR_LANGS", "eng+osd")),
GoogleAuthEnabled: getEnv("GOOGLE_AUTH_ENABLED", "false") == "true",
GoogleClientID: getEnv("GOOGLE_CLIENT_ID", ""),
GoogleClientSecret: getEnv("GOOGLE_CLIENT_SECRET", ""),
@@ -177,6 +179,43 @@ func loadConfig() Config {
return cfg
}
+func normalizeOCRLangs(v string) string {
+ fields := strings.FieldsFunc(strings.TrimSpace(strings.ToLower(v)), func(r rune) bool {
+ return r == '+' || r == ',' || r == ';' || r == '|' || r == ' ' || r == '\t' || r == '\n' || r == '\r'
+ })
+ if len(fields) == 0 {
+ return "eng+osd"
+ }
+
+ seen := make(map[string]struct{}, len(fields))
+ out := make([]string, 0, len(fields))
+ for _, field := range fields {
+ field = strings.TrimSpace(field)
+ if field == "" {
+ continue
+ }
+ valid := true
+ for _, ch := range field {
+ if (ch < 'a' || ch > 'z') && (ch < '0' || ch > '9') && ch != '_' {
+ valid = false
+ break
+ }
+ }
+ if !valid {
+ continue
+ }
+ if _, ok := seen[field]; ok {
+ continue
+ }
+ seen[field] = struct{}{}
+ out = append(out, field)
+ }
+ if len(out) == 0 {
+ return "eng+osd"
+ }
+ return strings.Join(out, "+")
+}
+
func applyFTPSLetsEncryptDefaults(cfg *Config) {
if cfg == nil {
return
diff --git a/backend/config_test.go b/backend/config_test.go
index 35f9ed4..c24af48 100644
--- a/backend/config_test.go
+++ b/backend/config_test.go
@@ -31,3 +31,29 @@ func TestApplyFTPSLetsEncryptDefaultsCustomDirAndPreserveManual(t *testing.T) {
t.Fatalf("unexpected key path: %q", cfg.FTPSKeyFile)
}
}
+
+func TestNormalizeOCRLangs(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ in string
+ want string
+ }{
+ {name: "default on empty", in: "", want: "eng+osd"},
+ {name: "comma separated", in: "eng, rus, deu", want: "eng+rus+deu"},
+ {name: "plus separated", in: "eng+osd+rus", want: "eng+osd+rus"},
+ {name: "dedupe and lowercase", in: "ENG + rus + eng", want: "eng+rus"},
+ {name: "drops invalid tokens", in: "eng+ru-RU+osd", want: "eng+osd"},
+ }
+
+ for _, tc := range tests {
+ tc := tc
+ t.Run(tc.name, func(t *testing.T) {
+ t.Parallel()
+ if got := normalizeOCRLangs(tc.in); got != tc.want {
+ t.Fatalf("normalizeOCRLangs(%q) = %q, want %q", tc.in, got, tc.want)
+ }
+ })
+ }
+}
diff --git a/backend/main.go b/backend/main.go
index 745a2e1..6ef5872 100644
--- a/backend/main.go
+++ b/backend/main.go
@@ -3,6 +3,7 @@ package main
import (
"archive/tar"
"archive/zip"
+ "bytes"
"compress/gzip"
"context"
"crypto/rand"
@@ -11,10 +12,13 @@ import (
"database/sql"
"embed"
"encoding/base64"
+ "encoding/binary"
"encoding/hex"
"encoding/json"
+ "encoding/xml"
"errors"
"fmt"
+ "html"
"io"
"io/fs"
"log"
@@ -26,10 +30,14 @@ import (
"os/exec"
"path"
"path/filepath"
+ "sort"
"strconv"
"strings"
"sync"
"time"
+ "unicode"
+ "unicode/utf16"
+ "unicode/utf8"
"github.com/golang-jwt/jwt/v5"
"github.com/gorilla/mux"
@@ -41,11 +49,12 @@ import (
var embeddedWeb embed.FS
type Server struct {
- db *sql.DB
- orm *ormRepo
- config Config
- storage Storage
- limiter *rateLimiter
+ db *sql.DB
+ orm *ormRepo
+ config Config
+ storage Storage
+ limiter *rateLimiter
+ searchContent *searchContentCache
}
type rateLimiter struct {
@@ -58,10 +67,69 @@ type rateEntry struct {
WindowEnds time.Time
}
+type searchContentCache struct {
+ mu sync.Mutex
+ maxEntries int
+ clock uint64
+ items map[string]searchContentCacheEntry
+}
+
+type searchContentCacheEntry struct {
+ text string
+ used uint64
+}
+
func newRateLimiter() *rateLimiter {
return &rateLimiter{entries: make(map[string]*rateEntry)}
}
+func newSearchContentCache(maxEntries int) *searchContentCache {
+ if maxEntries < 1 {
+ maxEntries = 256
+ }
+ return &searchContentCache{
+ maxEntries: maxEntries,
+ items: make(map[string]searchContentCacheEntry, maxEntries),
+ }
+}
+
+func (c *searchContentCache) get(key string) (string, bool) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ entry, ok := c.items[key]
+ if !ok {
+ return "", false
+ }
+ c.clock++
+ entry.used = c.clock
+ c.items[key] = entry
+ return entry.text, true
+}
+
+func (c *searchContentCache) put(key, text string) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ c.clock++
+ c.items[key] = searchContentCacheEntry{text: text, used: c.clock}
+ if len(c.items) <= c.maxEntries {
+ return
+ }
+
+ oldestKey := ""
+ oldestUsed := c.clock
+ for candidate, entry := range c.items {
+ if oldestKey == "" || entry.used < oldestUsed {
+ oldestKey = candidate
+ oldestUsed = entry.used
+ }
+ }
+ if oldestKey != "" {
+ delete(c.items, oldestKey)
+ }
+}
+
func (rl *rateLimiter) allow(key string, limit int, now time.Time) bool {
rl.mu.Lock()
defer rl.mu.Unlock()
@@ -175,7 +243,14 @@ func main() {
log.Fatalf("protocol init failed: %v", err)
}
- s := &Server{db: db, orm: orm, config: cfg, storage: storage, limiter: newRateLimiter()}
+ s := &Server{
+ db: db,
+ orm: orm,
+ config: cfg,
+ storage: storage,
+ limiter: newRateLimiter(),
+ searchContent: newSearchContentCache(256),
+ }
r := mux.NewRouter()
r.Use(s.recoverMiddleware)
r.Use(s.securityHeadersMiddleware)
@@ -197,6 +272,8 @@ func main() {
r.HandleFunc("/api/admin/login", s.handleAdminLogin).Methods(http.MethodPost)
r.HandleFunc("/api/admin/logout", s.handleAdminLogout).Methods(http.MethodPost)
+ r.HandleFunc("/share/{token}", s.handleSharedPage).Methods(http.MethodGet, http.MethodHead)
+ r.HandleFunc("/api/share/{token}/preview", s.handleSharedPreview).Methods(http.MethodGet, http.MethodHead)
r.HandleFunc("/api/share/{token}", s.handleSharedDownload).Methods(http.MethodGet, http.MethodHead)
protected := r.PathPrefix("/api").Subrouter()
@@ -206,11 +283,14 @@ func main() {
protected.HandleFunc("/user/google/link/start", s.handleGoogleLinkStart).Methods(http.MethodGet)
protected.HandleFunc("/user/protocols", s.handleUserProtocols).Methods(http.MethodGet)
protected.HandleFunc("/files", s.handleListFiles).Methods(http.MethodGet)
+ protected.HandleFunc("/files/search", s.handleSearchFiles).Methods(http.MethodGet)
protected.HandleFunc("/files/upload", s.handleUpload).Methods(http.MethodPost)
protected.HandleFunc("/files/download", s.handleDownload).Methods(http.MethodGet, http.MethodHead)
protected.HandleFunc("/files/download-batch", s.handleBatchDownload).Methods(http.MethodPost)
protected.HandleFunc("/files/move-batch", s.handleBatchMove).Methods(http.MethodPost)
protected.HandleFunc("/files/preview", s.handlePreview).Methods(http.MethodGet, http.MethodHead)
+ protected.HandleFunc("/files/thumbnail", s.handleThumbnail).Methods(http.MethodGet, http.MethodHead)
+ protected.HandleFunc("/files/content-preview", s.handleContentPreview).Methods(http.MethodGet)
protected.HandleFunc("/files/text", s.handleReadTextFile).Methods(http.MethodGet)
protected.HandleFunc("/files/text", s.handleWriteTextFile).Methods(http.MethodPut)
protected.HandleFunc("/files/rename", s.handleRename).Methods(http.MethodPost)
@@ -300,6 +380,29 @@ func migrate(db *sql.DB) error {
);`,
`CREATE INDEX IF NOT EXISTS idx_file_tags_user_path ON file_tags(user_id, rel_path);`,
`CREATE INDEX IF NOT EXISTS idx_file_tags_user_tag ON file_tags(user_id, tag);`,
+ `CREATE TABLE IF NOT EXISTS search_content_cache (
+ user_id INTEGER NOT NULL,
+ rel_path TEXT NOT NULL,
+ extractor TEXT NOT NULL,
+ file_size INTEGER NOT NULL,
+ mod_time_ns INTEGER NOT NULL,
+ content TEXT NOT NULL,
+ updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY(user_id, rel_path, extractor),
+ FOREIGN KEY(user_id) REFERENCES users(id)
+ );`,
+ `CREATE TABLE IF NOT EXISTS preview_thumbnail_cache (
+ user_id INTEGER NOT NULL,
+ rel_path TEXT NOT NULL,
+ renderer TEXT NOT NULL,
+ file_size INTEGER NOT NULL,
+ mod_time_ns INTEGER NOT NULL,
+ content_type TEXT NOT NULL,
+ image BLOB NOT NULL,
+ updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY(user_id, rel_path, renderer),
+ FOREIGN KEY(user_id) REFERENCES users(id)
+ );`,
}
for _, stmt := range stmts {
@@ -568,6 +671,1110 @@ func (s *Server) handleListFiles(w http.ResponseWriter, r *http.Request) {
writeJSON(w, http.StatusOK, map[string]any{"path": normalizePath(rel), "entries": entries})
}
+func (s *Server) handleSearchFiles(w http.ResponseWriter, r *http.Request) {
+ uid := userIDFromContext(r.Context())
+ query := strings.TrimSpace(r.URL.Query().Get("q"))
+ limit := 200
+ if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
+ if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
+ limit = parsed
+ }
+ }
+ if limit > 400 {
+ limit = 400
+ }
+ log.Printf("file.search user_id=%d query=%q limit=%d", uid, query, limit)
+
+ entries, err := s.searchFiles(uid, query, limit)
+ if err != nil {
+ writeErr(w, http.StatusBadRequest, err.Error())
+ return
+ }
+ if len(entries) > 0 {
+ paths := make([]string, 0, len(entries))
+ for _, entry := range entries {
+ paths = append(paths, normalizePath(entry.Path))
+ }
+ tagsByPath, tagErr := s.fileTagsForPaths(uid, paths)
+ if tagErr == nil {
+ for i := range entries {
+ entries[i].Tags = tagsByPath[normalizePath(entries[i].Path)]
+ }
+ }
+ }
+ writeJSON(w, http.StatusOK, map[string]any{"query": query, "entries": entries})
+}
+
+type scoredFileEntry struct {
+ Entry FileEntry
+ Score int
+}
+
+type limitedBuffer struct {
+ limit int
+ buf bytes.Buffer
+}
+
+func (b *limitedBuffer) Write(p []byte) (int, error) {
+ if remaining := b.limit - b.buf.Len(); remaining > 0 {
+ if len(p) > remaining {
+ p = p[:remaining]
+ }
+ _, _ = b.buf.Write(p)
+ }
+ return len(p), nil
+}
+
+func (b *limitedBuffer) String() string {
+ return b.buf.String()
+}
+
+const (
+ searchContentCacheEntries = 256
+ maxSearchExtractBytes = 2 << 20
+ maxSearchPlainTextBytes = 2 << 20
+ maxSearchDocBytes = 64 << 20
+ maxSearchOCRBytes = 20 << 20
+ searchExtractTimeout = 12 * time.Second
+ previewRenderTimeout = 20 * time.Second
+ previewThumbnailScale = 960
+ maxPreviewThumbnailBytes = 2 << 20
+)
+
+var searchableTextExtensions = map[string]struct{}{
+ ".c": {}, ".cc": {}, ".cfg": {}, ".conf": {}, ".cpp": {}, ".cs": {}, ".css": {}, ".csv": {}, ".env": {},
+ ".go": {}, ".h": {}, ".hpp": {}, ".html": {}, ".ini": {}, ".java": {}, ".js": {}, ".json": {}, ".jsx": {},
+ ".kt": {}, ".less": {}, ".log": {}, ".lua": {}, ".markdown": {}, ".md": {}, ".php": {}, ".pl": {}, ".properties": {},
+ ".py": {}, ".rb": {}, ".rs": {}, ".scss": {}, ".sh": {}, ".sql": {}, ".svg": {}, ".svelte": {}, ".tex": {},
+ ".toml": {}, ".ts": {}, ".tsx": {}, ".txt": {}, ".vtt": {}, ".xml": {}, ".yaml": {}, ".yml": {},
+}
+
+var searchableZipDocumentExtensions = map[string]struct{}{
+ ".docm": {}, ".docx": {}, ".dotm": {}, ".dotx": {}, ".odp": {}, ".ods": {}, ".odt": {},
+ ".potm": {}, ".potx": {}, ".ppsm": {}, ".ppsx": {}, ".pptm": {}, ".pptx": {}, ".xlsm": {}, ".xlsx": {}, ".xltm": {}, ".xltx": {},
+}
+
+var searchableLibreOfficeExtensions = map[string]struct{}{
+ ".doc": {}, ".docm": {}, ".docx": {}, ".dotm": {}, ".dotx": {}, ".epub": {}, ".fodp": {}, ".fods": {}, ".fodt": {},
+ ".odg": {}, ".odp": {}, ".ods": {}, ".odt": {}, ".pages": {}, ".potm": {}, ".potx": {}, ".pps": {}, ".ppsm": {},
+ ".ppsx": {}, ".ppt": {}, ".pptm": {}, ".pptx": {}, ".rtf": {}, ".sxw": {}, ".sxc": {}, ".sxi": {}, ".wpd": {},
+ ".xls": {}, ".xlsm": {}, ".xlsx": {}, ".xltm": {}, ".xltx": {},
+}
+
+var searchableImageExtensions = map[string]struct{}{
+ ".bmp": {}, ".gif": {}, ".jpeg": {}, ".jpg": {}, ".png": {}, ".tif": {}, ".tiff": {}, ".webp": {},
+}
+
+func (s *Server) searchFiles(uid int64, query string, limit int) ([]FileEntry, error) {
+ q := strings.ToLower(strings.TrimSpace(query))
+ contentQuery := normalizeSearchText(query)
+ if q == "" {
+ return []FileEntry{}, nil
+ }
+ if limit < 1 {
+ limit = 1
+ }
+
+ stack := []string{"/"}
+ matchesByPath := make(map[string]scoredFileEntry)
+
+ for len(stack) > 0 {
+ dir := stack[len(stack)-1]
+ stack = stack[:len(stack)-1]
+
+ entries, err := s.storage.List(uid, dir)
+ if err != nil {
+ return nil, err
+ }
+ for _, entry := range entries {
+ if entry.IsDir {
+ stack = append(stack, entry.Path)
+ }
+ score, ok := fuzzyEntryScore(q, entry.Name, entry.Path)
+ if !entry.IsDir {
+ if contentScore, contentOK := s.fileContentSearchScore(uid, entry, contentQuery); contentOK && (!ok || contentScore > score) {
+ score = contentScore
+ ok = true
+ }
+ }
+ if !ok {
+ continue
+ }
+ key := normalizePath(entry.Path)
+ if existing, found := matchesByPath[key]; !found || score > existing.Score {
+ matchesByPath[key] = scoredFileEntry{Entry: entry, Score: score}
+ }
+ }
+ }
+
+ matches := make([]scoredFileEntry, 0, len(matchesByPath))
+ for _, match := range matchesByPath {
+ matches = append(matches, match)
+ }
+
+ sort.Slice(matches, func(i, j int) bool {
+ if matches[i].Score != matches[j].Score {
+ return matches[i].Score > matches[j].Score
+ }
+ if matches[i].Entry.IsDir != matches[j].Entry.IsDir {
+ return matches[i].Entry.IsDir
+ }
+ if matches[i].Entry.Name != matches[j].Entry.Name {
+ return matches[i].Entry.Name < matches[j].Entry.Name
+ }
+ return matches[i].Entry.Path < matches[j].Entry.Path
+ })
+
+ if len(matches) > limit {
+ matches = matches[:limit]
+ }
+ out := make([]FileEntry, 0, len(matches))
+ for _, match := range matches {
+ out = append(out, match.Entry)
+ }
+ return out, nil
+}
+
+func fuzzyEntryScore(query, name, fullPath string) (int, bool) {
+ best := -1
+ if score, ok := fuzzyCandidateScore(query, strings.ToLower(strings.TrimSpace(name))); ok {
+ best = score + 240
+ }
+ if score, ok := fuzzyCandidateScore(query, strings.ToLower(strings.TrimSpace(fullPath))); ok {
+ score += 80
+ if score > best {
+ best = score
+ }
+ }
+ return best, best >= 0
+}
+
+func fuzzyCandidateScore(query, candidate string) (int, bool) {
+ if query == "" || candidate == "" {
+ return 0, false
+ }
+ if idx := strings.Index(candidate, query); idx >= 0 {
+ return 1600 - idx*10 - (len(candidate) - len(query)), true
+ }
+
+ queryRunes := []rune(query)
+ candidateRunes := []rune(candidate)
+ qi := 0
+ score := 0
+ firstIdx := -1
+ prevMatch := -2
+ gaps := 0
+
+ for i, ch := range candidateRunes {
+ if qi >= len(queryRunes) {
+ break
+ }
+ if ch != queryRunes[qi] {
+ if firstIdx >= 0 {
+ gaps++
+ }
+ continue
+ }
+ if firstIdx < 0 {
+ firstIdx = i
+ score += 120
+ }
+ score += 85
+ if i == 0 || candidateRunes[i-1] == '/' || candidateRunes[i-1] == '-' || candidateRunes[i-1] == '_' || candidateRunes[i-1] == '.' || candidateRunes[i-1] == ' ' {
+ score += 45
+ }
+ if prevMatch == i-1 {
+ score += 60
+ }
+ prevMatch = i
+ qi++
+ }
+ if qi != len(queryRunes) {
+ return 0, false
+ }
+
+ score += 700 - gaps*12
+ if firstIdx > 0 {
+ score -= firstIdx * 6
+ }
+ overhang := len(candidateRunes) - len(queryRunes)
+ if overhang > 0 {
+ score -= overhang
+ }
+ return score, true
+}
+
+func (s *Server) fileContentSearchScore(uid int64, entry FileEntry, query string) (int, bool) {
+ if query == "" || entry.IsDir {
+ return 0, false
+ }
+ text, ok := s.searchableFileContent(uid, entry)
+ if !ok {
+ return 0, false
+ }
+ return contentMatchScore(query, text)
+}
+
+func (s *Server) searchableFileContent(uid int64, entry FileEntry) (string, bool) {
+ if entry.IsDir {
+ return "", false
+ }
+ if s.searchContent == nil {
+ s.searchContent = newSearchContentCache(searchContentCacheEntries)
+ }
+
+ extractor := cacheableSearchExtractor(entry)
+ cacheKey := fmt.Sprintf("%d|%s|%s|%d|%d", uid, normalizePath(entry.Path), extractor, entry.Size, entry.ModTime.UTC().UnixNano())
+ if text, ok := s.searchContent.get(cacheKey); ok {
+ return text, true
+ }
+ if extractor != "" {
+ if text, ok := s.loadPersistedSearchContent(uid, entry.Path, extractor, entry.Size, entry.ModTime); ok {
+ s.searchContent.put(cacheKey, text)
+ return text, true
+ }
+ }
+
+ text, err := s.extractSearchableText(uid, entry)
+ if err != nil {
+ log.Printf("file.search.extract path=%q error=%v", normalizePath(entry.Path), err)
+ text = ""
+ }
+ text = normalizeSearchText(text)
+ s.searchContent.put(cacheKey, text)
+ if extractor != "" && err == nil {
+ s.storePersistedSearchContent(uid, entry.Path, extractor, entry.Size, entry.ModTime, text)
+ }
+ return text, true
+}
+
+func (s *Server) extractSearchableText(uid int64, entry FileEntry) (string, error) {
+ if entry.IsDir || entry.Size == 0 {
+ return "", nil
+ }
+
+ fullPath, err := s.localStoragePath(uid, entry.Path)
+ if err != nil {
+ return "", err
+ }
+
+ ext := strings.ToLower(filepath.Ext(entry.Name))
+ switch {
+ case isSearchableZipDocumentExtension(ext):
+ text, err := extractZipDocumentText(fullPath, ext)
+ if strings.TrimSpace(text) != "" || err == nil {
+ return text, err
+ }
+ if !isSearchableLibreOfficeExtension(ext) {
+ return "", err
+ }
+ return extractLibreOfficeText(fullPath)
+ case ext == ".pdf":
+ if entry.Size > maxSearchDocBytes {
+ return "", nil
+ }
+ return extractPDFText(fullPath)
+ case isSearchableImageExtension(ext):
+ if entry.Size > maxSearchOCRBytes {
+ return "", nil
+ }
+ return extractImageOCRText(fullPath, s.ocrLangs())
+ case isSearchableLibreOfficeExtension(ext):
+ if entry.Size > maxSearchDocBytes {
+ return "", nil
+ }
+ return extractLibreOfficeText(fullPath)
+ default:
+ forceText := isSearchableTextExtension(ext)
+ if !forceText && entry.Size > maxSearchPlainTextBytes {
+ return "", nil
+ }
+ text, ok, err := extractPlainTextFile(fullPath, forceText)
+ if err != nil || ok {
+ return text, err
+ }
+ return "", nil
+ }
+}
+
+func (s *Server) extractPreviewText(uid int64, entry FileEntry) (string, error) {
+ if entry.IsDir || entry.Size == 0 {
+ return "", nil
+ }
+
+ fullPath, err := s.localStoragePath(uid, entry.Path)
+ if err != nil {
+ return "", err
+ }
+
+ ext := strings.ToLower(filepath.Ext(entry.Name))
+ switch {
+ case ext == ".pdf":
+ if entry.Size > maxSearchDocBytes {
+ return "", nil
+ }
+ return extractPDFText(fullPath)
+ case isSearchableZipDocumentExtension(ext), isSearchableLibreOfficeExtension(ext):
+ if entry.Size > maxSearchDocBytes {
+ return "", nil
+ }
+ text, err := extractLibreOfficeText(fullPath)
+ if strings.TrimSpace(text) != "" || err == nil {
+ return text, err
+ }
+ if isSearchableZipDocumentExtension(ext) {
+ return extractZipDocumentText(fullPath, ext)
+ }
+ return "", err
+ default:
+ forceText := isSearchableTextExtension(ext)
+ if !forceText && entry.Size > maxSearchPlainTextBytes {
+ return "", nil
+ }
+ text, ok, err := extractPlainTextFile(fullPath, forceText)
+ if err != nil || ok {
+ return text, err
+ }
+ return "", nil
+ }
+}
+
+func (s *Server) extractPreviewThumbnail(uid int64, entry FileEntry) ([]byte, error) {
+ if entry.IsDir || entry.Size == 0 {
+ return nil, nil
+ }
+
+ renderer := cacheableThumbnailRenderer(entry)
+ if data, ok := s.loadPersistedPreviewThumbnail(uid, entry.Path, renderer, entry.Size, entry.ModTime); ok {
+ return data, nil
+ }
+
+ fullPath, err := s.localStoragePath(uid, entry.Path)
+ if err != nil {
+ return nil, err
+ }
+
+ ext := strings.ToLower(filepath.Ext(entry.Name))
+ var data []byte
+ switch {
+ case ext == ".pdf":
+ if entry.Size > maxSearchDocBytes {
+ return nil, nil
+ }
+ if rendered, err := renderPDFThumbnail(fullPath); err == nil && len(rendered) > 0 {
+ data = rendered
+ break
+ }
+ text, err := extractPDFText(fullPath)
+ if err != nil {
+ return nil, err
+ }
+ data, err = renderTextThumbnail(text)
+ if err != nil {
+ return nil, err
+ }
+ case isSearchableZipDocumentExtension(ext), isSearchableLibreOfficeExtension(ext):
+ if entry.Size > maxSearchDocBytes {
+ return nil, nil
+ }
+ if rendered, err := renderLibreOfficeThumbnail(fullPath); err == nil && len(rendered) > 0 {
+ data = rendered
+ break
+ }
+ text, err := s.extractPreviewText(uid, entry)
+ if err != nil {
+ return nil, err
+ }
+ data, err = renderTextThumbnail(text)
+ if err != nil {
+ return nil, err
+ }
+ default:
+ return nil, nil
+ }
+
+ if len(data) > 0 {
+ s.storePersistedPreviewThumbnail(uid, entry.Path, renderer, entry.Size, entry.ModTime, "image/png", data)
+ }
+ return data, nil
+}
+
+func (s *Server) localStoragePath(uid int64, rel string) (string, error) {
+ local, ok := s.storage.(*LocalStorage)
+ if !ok {
+ return "", fmt.Errorf("content search is only available for local storage")
+ }
+ return local.fullPath(uid, rel)
+}
+
+func cacheableSearchExtractor(entry FileEntry) string {
+ if entry.IsDir {
+ return ""
+ }
+ ext := strings.ToLower(filepath.Ext(entry.Name))
+ switch {
+ case isSearchableImageExtension(ext):
+ return "ocr"
+ case ext == ".pdf":
+ return "pdf"
+ case isSearchableZipDocumentExtension(ext), isSearchableLibreOfficeExtension(ext):
+ return "document"
+ default:
+ return ""
+ }
+}
+
+func cacheableThumbnailRenderer(entry FileEntry) string {
+ if entry.IsDir {
+ return ""
+ }
+ ext := strings.ToLower(filepath.Ext(entry.Name))
+ switch {
+ case ext == ".pdf":
+ return "pdf:v1"
+ case isSearchableZipDocumentExtension(ext), isSearchableLibreOfficeExtension(ext):
+ return "document:v1"
+ default:
+ return ""
+ }
+}
+
+func (s *Server) loadPersistedSearchContent(uid int64, relPath, extractor string, size int64, modTime time.Time) (string, bool) {
+ if extractor == "" {
+ return "", false
+ }
+
+ norm := normalizePath(relPath)
+ var cachedSize int64
+ var cachedModTime int64
+ var content string
+ err := s.db.QueryRow(
+ `SELECT file_size, mod_time_ns, content FROM search_content_cache WHERE user_id = ? AND rel_path = ? AND extractor = ?`,
+ uid, norm, extractor,
+ ).Scan(&cachedSize, &cachedModTime, &content)
+ if err != nil {
+ return "", false
+ }
+ if cachedSize != size || cachedModTime != modTime.UTC().UnixNano() {
+ _, _ = s.db.Exec(`DELETE FROM search_content_cache WHERE user_id = ? AND rel_path = ? AND extractor = ?`, uid, norm, extractor)
+ return "", false
+ }
+ return content, true
+}
+
+func (s *Server) storePersistedSearchContent(uid int64, relPath, extractor string, size int64, modTime time.Time, content string) {
+ if extractor == "" {
+ return
+ }
+ _, err := s.db.Exec(
+ `INSERT INTO search_content_cache(user_id, rel_path, extractor, file_size, mod_time_ns, content, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
+ ON CONFLICT(user_id, rel_path, extractor) DO UPDATE SET
+ file_size = excluded.file_size,
+ mod_time_ns = excluded.mod_time_ns,
+ content = excluded.content,
+ updated_at = CURRENT_TIMESTAMP`,
+ uid,
+ normalizePath(relPath),
+ extractor,
+ size,
+ modTime.UTC().UnixNano(),
+ content,
+ )
+ if err != nil {
+ log.Printf("file.search.cache.store path=%q extractor=%s error=%v", normalizePath(relPath), extractor, err)
+ }
+}
+
+func (s *Server) purgePersistedSearchContent(uid int64, relPath string) {
+ norm := normalizePath(relPath)
+ _, _ = s.db.Exec(`DELETE FROM search_content_cache WHERE user_id = ? AND (rel_path = ? OR rel_path LIKE ?)`, uid, norm, strings.TrimSuffix(norm, "/")+"/%")
+}
+
+func (s *Server) purgePersistedSearchContentForUser(uid int64) {
+ _, _ = s.db.Exec(`DELETE FROM search_content_cache WHERE user_id = ?`, uid)
+}
+
+func (s *Server) loadPersistedPreviewThumbnail(uid int64, relPath, renderer string, size int64, modTime time.Time) ([]byte, bool) {
+ if renderer == "" {
+ return nil, false
+ }
+
+ norm := normalizePath(relPath)
+ var cachedSize int64
+ var cachedModTime int64
+ var image []byte
+ err := s.db.QueryRow(
+ `SELECT file_size, mod_time_ns, image FROM preview_thumbnail_cache WHERE user_id = ? AND rel_path = ? AND renderer = ?`,
+ uid, norm, renderer,
+ ).Scan(&cachedSize, &cachedModTime, &image)
+ if err != nil {
+ return nil, false
+ }
+ if cachedSize != size || cachedModTime != modTime.UTC().UnixNano() {
+ _, _ = s.db.Exec(`DELETE FROM preview_thumbnail_cache WHERE user_id = ? AND rel_path = ? AND renderer = ?`, uid, norm, renderer)
+ return nil, false
+ }
+ return image, true
+}
+
+func (s *Server) storePersistedPreviewThumbnail(uid int64, relPath, renderer string, size int64, modTime time.Time, contentType string, image []byte) {
+ if renderer == "" || len(image) == 0 || len(image) > maxPreviewThumbnailBytes {
+ return
+ }
+ _, err := s.db.Exec(
+ `INSERT INTO preview_thumbnail_cache(user_id, rel_path, renderer, file_size, mod_time_ns, content_type, image, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
+ ON CONFLICT(user_id, rel_path, renderer) DO UPDATE SET
+ file_size = excluded.file_size,
+ mod_time_ns = excluded.mod_time_ns,
+ content_type = excluded.content_type,
+ image = excluded.image,
+ updated_at = CURRENT_TIMESTAMP`,
+ uid,
+ normalizePath(relPath),
+ renderer,
+ size,
+ modTime.UTC().UnixNano(),
+ contentType,
+ image,
+ )
+ if err != nil {
+ log.Printf("file.thumbnail.cache.store path=%q renderer=%s error=%v", normalizePath(relPath), renderer, err)
+ }
+}
+
+func (s *Server) purgePersistedPreviewThumbnail(uid int64, relPath string) {
+ norm := normalizePath(relPath)
+ _, _ = s.db.Exec(`DELETE FROM preview_thumbnail_cache WHERE user_id = ? AND (rel_path = ? OR rel_path LIKE ?)`, uid, norm, strings.TrimSuffix(norm, "/")+"/%")
+}
+
+func (s *Server) purgePersistedPreviewThumbnailForUser(uid int64) {
+ _, _ = s.db.Exec(`DELETE FROM preview_thumbnail_cache WHERE user_id = ?`, uid)
+}
+
+func isSearchableTextExtension(ext string) bool {
+ _, ok := searchableTextExtensions[ext]
+ return ok
+}
+
+func isSearchableZipDocumentExtension(ext string) bool {
+ _, ok := searchableZipDocumentExtensions[ext]
+ return ok
+}
+
+func isSearchableLibreOfficeExtension(ext string) bool {
+ _, ok := searchableLibreOfficeExtensions[ext]
+ return ok
+}
+
+func isSearchableImageExtension(ext string) bool {
+ _, ok := searchableImageExtensions[ext]
+ return ok
+}
+
+func extractPlainTextFile(fullPath string, force bool) (string, bool, error) {
+ f, err := os.Open(fullPath)
+ if err != nil {
+ return "", false, err
+ }
+ defer f.Close()
+
+ data, err := io.ReadAll(io.LimitReader(f, maxSearchPlainTextBytes+1))
+ if err != nil {
+ return "", false, err
+ }
+ if len(data) > maxSearchPlainTextBytes {
+ data = data[:maxSearchPlainTextBytes]
+ }
+ if !force && !looksLikeText(data) {
+ return "", false, nil
+ }
+ return decodeSearchTextBytes(data), true, nil
+}
+
+func looksLikeText(data []byte) bool {
+ if len(data) == 0 {
+ return true
+ }
+ if hasUTF16BOM(data) {
+ return true
+ }
+ sample := data
+ if len(sample) > 8192 {
+ sample = sample[:8192]
+ }
+ if bytes.IndexByte(sample, 0) >= 0 {
+ return false
+ }
+ if utf8.Valid(sample) {
+ return true
+ }
+ printable := 0
+ for _, b := range sample {
+ switch {
+ case b == '\n' || b == '\r' || b == '\t':
+ printable++
+ case b >= 0x20 && b < 0x7f:
+ printable++
+ }
+ }
+ return printable*100 >= len(sample)*90
+}
+
+func hasUTF16BOM(data []byte) bool {
+ return len(data) >= 2 && ((data[0] == 0xff && data[1] == 0xfe) || (data[0] == 0xfe && data[1] == 0xff))
+}
+
+func decodeSearchTextBytes(data []byte) string {
+ switch {
+ case len(data) >= 3 && data[0] == 0xef && data[1] == 0xbb && data[2] == 0xbf:
+ data = data[3:]
+ case len(data) >= 2 && data[0] == 0xff && data[1] == 0xfe:
+ return decodeUTF16(data[2:], binary.LittleEndian)
+ case len(data) >= 2 && data[0] == 0xfe && data[1] == 0xff:
+ return decodeUTF16(data[2:], binary.BigEndian)
+ }
+ return string(bytes.ToValidUTF8(data, []byte(" ")))
+}
+
+func decodeUTF16(data []byte, order binary.ByteOrder) string {
+ if len(data) < 2 {
+ return ""
+ }
+ u16 := make([]uint16, 0, len(data)/2)
+ for i := 0; i+1 < len(data); i += 2 {
+ u16 = append(u16, order.Uint16(data[i:i+2]))
+ }
+ return string(utf16.Decode(u16))
+}
+
+func extractZipDocumentText(fullPath, ext string) (string, error) {
+ reader, err := zip.OpenReader(fullPath)
+ if err != nil {
+ return "", err
+ }
+ defer reader.Close()
+
+ var out strings.Builder
+ for _, file := range reader.File {
+ if out.Len() >= maxSearchExtractBytes {
+ break
+ }
+ if !searchableZipEntry(ext, file.Name) || file.FileInfo().IsDir() {
+ continue
+ }
+ rc, err := file.Open()
+ if err != nil {
+ continue
+ }
+ if err := appendXMLText(&out, io.LimitReader(rc, maxSearchExtractBytes)); err != nil {
+ rc.Close()
+ continue
+ }
+ rc.Close()
+ }
+ return out.String(), nil
+}
+
+func searchableZipEntry(ext, name string) bool {
+ name = strings.ToLower(name)
+ switch ext {
+ case ".docm", ".docx", ".dotm", ".dotx":
+ return strings.HasPrefix(name, "word/") && strings.HasSuffix(name, ".xml")
+ case ".xlsm", ".xlsx", ".xltm", ".xltx":
+ return strings.HasPrefix(name, "xl/") && strings.HasSuffix(name, ".xml")
+ case ".potm", ".potx", ".ppsm", ".ppsx", ".pptm", ".pptx":
+ return strings.HasPrefix(name, "ppt/") && strings.HasSuffix(name, ".xml")
+ case ".odp", ".ods", ".odt":
+ return name == "content.xml" || name == "styles.xml" || name == "meta.xml"
+ default:
+ return false
+ }
+}
+
+func appendXMLText(dst *strings.Builder, r io.Reader) error {
+ dec := xml.NewDecoder(r)
+ for dst.Len() < maxSearchExtractBytes {
+ tok, err := dec.Token()
+ if err == io.EOF {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+
+ data, ok := tok.(xml.CharData)
+ if !ok {
+ continue
+ }
+ text := strings.TrimSpace(string(data))
+ if text == "" {
+ continue
+ }
+ if dst.Len() > 0 {
+ dst.WriteByte(' ')
+ }
+ remaining := maxSearchExtractBytes - dst.Len()
+ if remaining <= 0 {
+ return nil
+ }
+ if len(text) > remaining {
+ text = text[:remaining]
+ }
+ dst.WriteString(text)
+ }
+ return nil
+}
+
+func extractPDFText(fullPath string) (string, error) {
+ return runSearchCommand("pdftotext", []string{"-q", "-enc", "UTF-8", fullPath, "-"})
+}
+
+func (s *Server) ocrLangs() string {
+ return normalizeOCRLangs(s.config.OCRLangs)
+}
+
+func extractImageOCRText(fullPath, langs string) (string, error) {
+ return runSearchCommand("tesseract", []string{fullPath, "stdout", "--psm", "6", "-l", normalizeOCRLangs(langs)})
+}
+
+func renderPDFThumbnail(fullPath string) ([]byte, error) {
+ tmpDir, err := os.MkdirTemp("", "filez-preview-pdf-*")
+ if err != nil {
+ return nil, err
+ }
+ defer os.RemoveAll(tmpDir)
+
+ outPrefix := filepath.Join(tmpDir, "thumb")
+ if err := runPreviewCommand("pdftoppm", []string{
+ "-png",
+ "-f", "1",
+ "-singlefile",
+ "-scale-to", strconv.Itoa(previewThumbnailScale),
+ fullPath,
+ outPrefix,
+ }); err != nil {
+ if fallbackErr := runPreviewCommand("pdftocairo", []string{
+ "-png",
+ "-f", "1",
+ "-singlefile",
+ "-scale-to", strconv.Itoa(previewThumbnailScale),
+ fullPath,
+ outPrefix,
+ }); fallbackErr != nil {
+ return nil, err
+ }
+ }
+
+ data, err := os.ReadFile(outPrefix + ".png")
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ return nil, fmt.Errorf("thumbnail renderer did not produce an image")
+ }
+ return nil, err
+ }
+ return data, nil
+}
+
+func renderLibreOfficeThumbnail(fullPath string) ([]byte, error) {
+ tmpDir, err := os.MkdirTemp("", "filez-preview-doc-*")
+ if err != nil {
+ return nil, err
+ }
+ defer os.RemoveAll(tmpDir)
+
+ pdfPath, err := convertLibreOfficeDocument(fullPath, tmpDir, "pdf")
+ if err != nil {
+ return nil, err
+ }
+ return renderPDFThumbnail(pdfPath)
+}
+
+func renderTextThumbnail(text string) ([]byte, error) {
+ text = compactPreviewThumbnailText(text)
+ if text == "" {
+ return nil, nil
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), previewRenderTimeout)
+ defer cancel()
+
+ var stdout bytes.Buffer
+ stderr := &limitedBuffer{limit: 16 << 10}
+ cmd := exec.CommandContext(ctx,
+ "convert",
+ "-background", "#f3f0e8",
+ "-fill", "#111111",
+ "-font", "DejaVu-Sans-Bold",
+ "-size", "960x720",
+ "caption:"+text,
+ "png:-",
+ )
+ cmd.Stdout = &stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ if ctx.Err() != nil {
+ return nil, ctx.Err()
+ }
+ msg := strings.TrimSpace(stderr.String())
+ if msg != "" {
+ return nil, errors.New(msg)
+ }
+ return nil, err
+ }
+ return stdout.Bytes(), nil
+}
+
+func extractLibreOfficeText(fullPath string) (string, error) {
+ tmpDir, err := os.MkdirTemp("", "filez-search-doc-*")
+ if err != nil {
+ return "", err
+ }
+ defer os.RemoveAll(tmpDir)
+
+ profileDir := filepath.Join(tmpDir, "profile")
+ if err := os.MkdirAll(profileDir, 0o755); err != nil {
+ return "", err
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), searchExtractTimeout)
+ defer cancel()
+
+ stdout := &limitedBuffer{limit: 8 << 10}
+ stderr := &limitedBuffer{limit: 16 << 10}
+ cmd := exec.CommandContext(ctx,
+ "soffice",
+ "-env:UserInstallation=file://"+filepath.ToSlash(profileDir),
+ "--headless",
+ "--nologo",
+ "--nodefault",
+ "--nolockcheck",
+ "--nofirststartwizard",
+ "--convert-to", "txt:Text",
+ "--outdir", tmpDir,
+ fullPath,
+ )
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ if ctx.Err() != nil {
+ return "", ctx.Err()
+ }
+ msg := strings.TrimSpace(stderr.String())
+ if msg == "" {
+ msg = strings.TrimSpace(stdout.String())
+ }
+ if msg != "" {
+ return "", errors.New(msg)
+ }
+ return "", err
+ }
+
+ entries, err := os.ReadDir(tmpDir)
+ if err != nil {
+ return "", err
+ }
+ for _, entry := range entries {
+ if entry.IsDir() || !strings.HasSuffix(strings.ToLower(entry.Name()), ".txt") {
+ continue
+ }
+ data, err := os.ReadFile(filepath.Join(tmpDir, entry.Name()))
+ if err != nil {
+ return "", err
+ }
+ if len(data) > maxSearchExtractBytes {
+ data = data[:maxSearchExtractBytes]
+ }
+ return decodeSearchTextBytes(data), nil
+ }
+ return "", nil
+}
+
+func convertLibreOfficeDocument(fullPath, tmpDir, format string) (string, error) {
+ profileDir := filepath.Join(tmpDir, "profile")
+ if err := os.MkdirAll(profileDir, 0o755); err != nil {
+ return "", err
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), previewRenderTimeout)
+ defer cancel()
+
+ stdout := &limitedBuffer{limit: 8 << 10}
+ stderr := &limitedBuffer{limit: 16 << 10}
+ cmd := exec.CommandContext(ctx,
+ "soffice",
+ "-env:UserInstallation=file://"+filepath.ToSlash(profileDir),
+ "--headless",
+ "--nologo",
+ "--nodefault",
+ "--nolockcheck",
+ "--nofirststartwizard",
+ "--convert-to", format,
+ "--outdir", tmpDir,
+ fullPath,
+ )
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ if ctx.Err() != nil {
+ return "", ctx.Err()
+ }
+ msg := strings.TrimSpace(stderr.String())
+ if msg == "" {
+ msg = strings.TrimSpace(stdout.String())
+ }
+ if msg != "" {
+ return "", errors.New(msg)
+ }
+ return "", err
+ }
+
+ wantExt := "." + strings.ToLower(strings.TrimPrefix(format, "."))
+ entries, err := os.ReadDir(tmpDir)
+ if err != nil {
+ return "", err
+ }
+ for _, entry := range entries {
+ if entry.IsDir() || !strings.HasSuffix(strings.ToLower(entry.Name()), wantExt) {
+ continue
+ }
+ return filepath.Join(tmpDir, entry.Name()), nil
+ }
+ return "", fmt.Errorf("libreoffice did not produce a %s file", wantExt)
+}
+
+func compactPreviewThumbnailText(text string) string {
+ text = strings.TrimSpace(text)
+ if text == "" {
+ return ""
+ }
+
+ words := strings.Fields(text)
+ if len(words) == 0 {
+ return ""
+ }
+
+ joined := strings.Join(words, " ")
+ runes := []rune(joined)
+ if len(runes) > 420 {
+ joined = string(runes[:420]) + "..."
+ }
+ return joined
+}
+
+func runSearchCommand(name string, args []string) (string, error) {
+ ctx, cancel := context.WithTimeout(context.Background(), searchExtractTimeout)
+ defer cancel()
+
+ stdout := &limitedBuffer{limit: maxSearchExtractBytes}
+ stderr := &limitedBuffer{limit: 16 << 10}
+ cmd := exec.CommandContext(ctx, name, args...)
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ if ctx.Err() != nil {
+ return "", ctx.Err()
+ }
+ msg := strings.TrimSpace(stderr.String())
+ if msg != "" {
+ return "", errors.New(msg)
+ }
+ return "", err
+ }
+ return stdout.String(), nil
+}
+
+func runPreviewCommand(name string, args []string) error {
+ ctx, cancel := context.WithTimeout(context.Background(), previewRenderTimeout)
+ defer cancel()
+
+ stdout := &limitedBuffer{limit: 8 << 10}
+ stderr := &limitedBuffer{limit: 16 << 10}
+ cmd := exec.CommandContext(ctx, name, args...)
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ if err := cmd.Run(); err != nil {
+ if ctx.Err() != nil {
+ return ctx.Err()
+ }
+ msg := strings.TrimSpace(stderr.String())
+ if msg == "" {
+ msg = strings.TrimSpace(stdout.String())
+ }
+ if msg != "" {
+ return errors.New(msg)
+ }
+ return err
+ }
+ return nil
+}
+
+func normalizeSearchText(v string) string {
+ var out strings.Builder
+ lastSpace := true
+ for _, ch := range strings.ToLower(v) {
+ if ch == 0 {
+ continue
+ }
+ if unicode.IsControl(ch) && !unicode.IsSpace(ch) {
+ continue
+ }
+ if unicode.IsSpace(ch) {
+ if !lastSpace {
+ out.WriteByte(' ')
+ lastSpace = true
+ }
+ continue
+ }
+ out.WriteRune(ch)
+ lastSpace = false
+ }
+ return strings.TrimSpace(out.String())
+}
+
+func contentMatchScore(query, content string) (int, bool) {
+ if query == "" || content == "" {
+ return 0, false
+ }
+ if idx := strings.Index(content, query); idx >= 0 {
+ score := 1180 - minInt(idx, 2400)/4
+ if score < 420 {
+ score = 420
+ }
+ return score, true
+ }
+
+ terms := strings.Fields(query)
+ if len(terms) < 2 {
+ return 0, false
+ }
+ positions := make([]int, 0, len(terms))
+ score := 0
+ for _, term := range terms {
+ idx := strings.Index(content, term)
+ if idx < 0 {
+ return 0, false
+ }
+ positions = append(positions, idx)
+ score += 180
+ }
+ sort.Ints(positions)
+ span := positions[len(positions)-1] - positions[0]
+ score += 620 - minInt(span, 2400)/6
+ if score < 360 {
+ score = 360
+ }
+ return score, true
+}
+
+func minInt(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
func (s *Server) handleUpload(w http.ResponseWriter, r *http.Request) {
uid := userIDFromContext(r.Context())
relDir := r.URL.Query().Get("path")
@@ -721,6 +1928,10 @@ func (s *Server) handleBatchMove(w http.ResponseWriter, r *http.Request) {
return
}
s.moveTags(uid, src, dst)
+ s.purgePersistedSearchContent(uid, src)
+ s.purgePersistedSearchContent(uid, dst)
+ s.purgePersistedPreviewThumbnail(uid, src)
+ s.purgePersistedPreviewThumbnail(uid, dst)
moved++
}
@@ -736,6 +1947,77 @@ func (s *Server) handlePreview(w http.ResponseWriter, r *http.Request) {
}
}
+func (s *Server) handleThumbnail(w http.ResponseWriter, r *http.Request) {
+ uid := userIDFromContext(r.Context())
+ rel := normalizePath(r.URL.Query().Get("path"))
+ if rel == "/" {
+ writeErr(w, http.StatusBadRequest, "path is required")
+ return
+ }
+
+ meta, err := s.storage.Stat(uid, rel)
+ if err != nil || meta.IsDir {
+ writeErr(w, http.StatusBadRequest, "file not found")
+ return
+ }
+
+ entry := FileEntry{
+ Name: meta.Name,
+ Path: rel,
+ IsDir: meta.IsDir,
+ Size: meta.Size,
+ ModTime: meta.ModTime,
+ }
+ data, err := s.extractPreviewThumbnail(uid, entry)
+ if err != nil {
+ writeErr(w, http.StatusBadRequest, err.Error())
+ return
+ }
+ if len(data) == 0 {
+ writeErr(w, http.StatusBadRequest, "thumbnail unavailable for this file type")
+ return
+ }
+
+ w.Header().Set("Cache-Control", "private, max-age=300")
+ w.Header().Set("Content-Type", "image/png")
+ http.ServeContent(w, r, entry.Name+".png", entry.ModTime, bytes.NewReader(data))
+}
+
+func (s *Server) handleContentPreview(w http.ResponseWriter, r *http.Request) {
+ uid := userIDFromContext(r.Context())
+ rel := normalizePath(r.URL.Query().Get("path"))
+ if rel == "/" {
+ writeErr(w, http.StatusBadRequest, "path is required")
+ return
+ }
+
+ meta, err := s.storage.Stat(uid, rel)
+ if err != nil || meta.IsDir {
+ writeErr(w, http.StatusBadRequest, "file not found")
+ return
+ }
+
+ entry := FileEntry{
+ Name: meta.Name,
+ Path: rel,
+ IsDir: meta.IsDir,
+ Size: meta.Size,
+ ModTime: meta.ModTime,
+ }
+ text, err := s.extractPreviewText(uid, entry)
+ if err != nil {
+ writeErr(w, http.StatusBadRequest, err.Error())
+ return
+ }
+ text = strings.TrimSpace(text)
+ if text == "" {
+ writeErr(w, http.StatusBadRequest, "preview unavailable for this file type")
+ return
+ }
+
+ writeJSON(w, http.StatusOK, map[string]any{"path": rel, "content": text})
+}
+
func (s *Server) handleReadTextFile(w http.ResponseWriter, r *http.Request) {
uid := userIDFromContext(r.Context())
rel := normalizePath(r.URL.Query().Get("path"))
@@ -1618,6 +2900,8 @@ func (s *Server) handleDelete(w http.ResponseWriter, r *http.Request) {
return
}
_, _ = s.db.Exec(`DELETE FROM file_tags WHERE user_id = ? AND (rel_path = ? OR rel_path LIKE ?)`, uid, norm, strings.TrimSuffix(norm, "/")+"/%")
+ s.purgePersistedSearchContent(uid, norm)
+ s.purgePersistedPreviewThumbnail(uid, norm)
writeJSON(w, http.StatusOK, map[string]string{"status": "deleted"})
}
@@ -1704,15 +2988,16 @@ func (s *Server) handleRename(w http.ResponseWriter, r *http.Request) {
return
}
s.moveTags(uid, src, dst)
+ s.purgePersistedSearchContent(uid, src)
+ s.purgePersistedSearchContent(uid, dst)
+ s.purgePersistedPreviewThumbnail(uid, src)
+ s.purgePersistedPreviewThumbnail(uid, dst)
writeJSON(w, http.StatusOK, map[string]any{"status": "renamed", "path": dst})
}
type shareInput struct {
- Path string `json:"path"`
- ExpiresMinutes int `json:"expiresMinutes"`
- MaxDownloads *int `json:"maxDownloads"`
- AllowPreview bool `json:"allowPreview"`
- PreferredInline bool `json:"preferredInline"`
+ Path string `json:"path"`
+ ExpiresMinutes int `json:"expiresMinutes"`
}
func (s *Server) handleCreateShareLink(w http.ResponseWriter, r *http.Request) {
@@ -1752,25 +3037,20 @@ func (s *Server) handleCreateShareLink(w http.ResponseWriter, r *http.Request) {
return
}
- var maxDownloads any
- if in.MaxDownloads != nil && *in.MaxDownloads > 0 {
- maxDownloads = *in.MaxDownloads
- }
-
expiresAt := time.Now().Add(ttl)
_, err = s.db.Exec(`INSERT INTO share_links(user_id, rel_path, token_hash, expires_at, max_downloads) VALUES (?, ?, ?, ?, ?)`,
uid,
rel,
hashToken(token),
expiresAt,
- maxDownloads,
+ nil,
)
if err != nil {
writeErr(w, http.StatusInternalServerError, "failed to create share")
return
}
- shareURL := fmt.Sprintf("%s://%s/api/share/%s", schemeOf(r), r.Host, token)
+ shareURL := fmt.Sprintf("%s://%s/share/%s", schemeOf(r), r.Host, token)
writeJSON(w, http.StatusCreated, map[string]any{
"url": shareURL,
"token": token,
@@ -1780,46 +3060,382 @@ func (s *Server) handleCreateShareLink(w http.ResponseWriter, r *http.Request) {
}
func (s *Server) handleSharedDownload(w http.ResponseWriter, r *http.Request) {
- token := mux.Vars(r)["token"]
- if strings.TrimSpace(token) == "" {
- writeErr(w, http.StatusBadRequest, "missing token")
- return
- }
-
- var uid int64
- var rel string
- var expiresAt time.Time
- var revokedAt sql.NullTime
- var maxDownloads sql.NullInt64
- var downloadCount int64
- err := s.db.QueryRow(`SELECT user_id, rel_path, expires_at, revoked_at, max_downloads, download_count FROM share_links WHERE token_hash = ?`, hashToken(token)).
- Scan(&uid, &rel, &expiresAt, &revokedAt, &maxDownloads, &downloadCount)
+ record, token, err := s.lookupActiveShare(r)
if err != nil {
- writeErr(w, http.StatusNotFound, "share link not found")
+ writeErr(w, shareHTTPStatus(err), err.Error())
return
}
- if revokedAt.Valid || expiresAt.Before(time.Now()) {
- writeErr(w, http.StatusGone, "share link expired")
- return
- }
- if maxDownloads.Valid && downloadCount >= maxDownloads.Int64 {
- writeErr(w, http.StatusGone, "share link download limit reached")
- return
+ if r.Method != http.MethodHead {
+ if _, err := s.db.Exec(`UPDATE share_links SET download_count = download_count + 1 WHERE token_hash = ?`, hashToken(token)); err != nil {
+ writeErr(w, http.StatusInternalServerError, "failed to track download")
+ return
+ }
}
- if _, err := s.db.Exec(`UPDATE share_links SET download_count = download_count + 1 WHERE token_hash = ?`, hashToken(token)); err != nil {
- writeErr(w, http.StatusInternalServerError, "failed to track download")
- return
- }
+ log.Printf("file.share.download user_id=%d path=%q ip=%q", record.UserID, normalizePath(record.RelPath), clientIP(r))
- log.Printf("file.share.download user_id=%d path=%q ip=%q", uid, normalizePath(rel), clientIP(r))
-
- if err := s.serveFile(w, r, uid, rel, true, ""); err != nil {
+ if err := s.serveFile(w, r, record.UserID, record.RelPath, true, ""); err != nil {
writeErr(w, http.StatusBadRequest, err.Error())
}
}
+func (s *Server) handleSharedPreview(w http.ResponseWriter, r *http.Request) {
+ record, _, err := s.lookupActiveShare(r)
+ if err != nil {
+ writeErr(w, shareHTTPStatus(err), err.Error())
+ return
+ }
+
+ if err := s.serveFile(w, r, record.UserID, record.RelPath, true, ""); err != nil {
+ writeErr(w, http.StatusBadRequest, err.Error())
+ }
+}
+
+func (s *Server) handleSharedPage(w http.ResponseWriter, r *http.Request) {
+ record, token, err := s.lookupActiveShare(r)
+ if err != nil {
+ writeSharedPageStatus(w, shareHTTPStatus(err), "Share unavailable", err.Error())
+ return
+ }
+
+ meta, err := s.storage.Stat(record.UserID, record.RelPath)
+ if err != nil {
+ writeSharedPageStatus(w, http.StatusNotFound, "Share unavailable", "file not found")
+ return
+ }
+
+ name := path.Base(normalizePath(record.RelPath))
+ if name == "." || name == "/" || name == "" {
+ name = meta.Name
+ }
+ if name == "" {
+ name = "Shared file"
+ }
+
+ ctype := mime.TypeByExtension(strings.ToLower(filepath.Ext(name)))
+ pageURL := fmt.Sprintf("%s://%s/share/%s", schemeOf(r), r.Host, token)
+ downloadURL := fmt.Sprintf("%s://%s/api/share/%s", schemeOf(r), r.Host, token)
+ previewURL := fmt.Sprintf("%s://%s/api/share/%s/preview", schemeOf(r), r.Host, token)
+ description := sharedFileDescription(meta, record.ExpiresAt)
+ cardType := "summary"
+ extraMeta := ""
+ extraBody := ""
+
+ switch {
+ case strings.HasPrefix(ctype, "image/"):
+ cardType = "summary_large_image"
+ extraMeta = fmt.Sprintf(`
+
+ `, html.EscapeString(previewURL), html.EscapeString(previewURL))
+ extraBody = fmt.Sprintf(`

`, html.EscapeString(previewURL), html.EscapeString(name))
+ case strings.HasPrefix(ctype, "video/"):
+ extraMeta = fmt.Sprintf(`
+
+ `, html.EscapeString(previewURL), html.EscapeString(ctype))
+ extraBody = fmt.Sprintf(``, html.EscapeString(previewURL))
+ }
+
+ page := fmt.Sprintf(`
+
+
+
+
+ %s
+
+
+
+
+
+ %s
+
+
+
+
+
+
FileZ Share
+
+
+ %s
+ %s
+
+ %s
+
+
+
+
+`,
+ html.EscapeString(name),
+ html.EscapeString(description),
+ html.EscapeString(name),
+ html.EscapeString(description),
+ html.EscapeString(pageURL),
+ html.EscapeString(cardType),
+ extraMeta,
+ html.EscapeString(name),
+ html.EscapeString(description),
+ html.EscapeString(sharedKindLabel(meta, ctype)),
+ html.EscapeString(sharedExpiryLabel(record.ExpiresAt)),
+ extraBody,
+ html.EscapeString(downloadURL),
+ html.EscapeString(downloadURL),
+ )
+
+ w.Header().Set("Content-Type", "text/html; charset=utf-8")
+ w.WriteHeader(http.StatusOK)
+ if r.Method == http.MethodHead {
+ return
+ }
+ if _, err := io.WriteString(w, page); err != nil {
+ return
+ }
+}
+
+type shareRecord struct {
+ UserID int64
+ RelPath string
+ ExpiresAt time.Time
+ RevokedAt sql.NullTime
+}
+
+func (s *Server) lookupActiveShare(r *http.Request) (shareRecord, string, error) {
+ token := strings.TrimSpace(mux.Vars(r)["token"])
+ if token == "" {
+ return shareRecord{}, "", fmt.Errorf("missing token")
+ }
+
+ var record shareRecord
+ err := s.db.QueryRow(`SELECT user_id, rel_path, expires_at, revoked_at FROM share_links WHERE token_hash = ?`, hashToken(token)).
+ Scan(&record.UserID, &record.RelPath, &record.ExpiresAt, &record.RevokedAt)
+ if err != nil {
+ if errors.Is(err, sql.ErrNoRows) {
+ return shareRecord{}, "", fmt.Errorf("share link not found")
+ }
+ return shareRecord{}, "", err
+ }
+ if record.RevokedAt.Valid || record.ExpiresAt.Before(time.Now()) {
+ return shareRecord{}, "", fmt.Errorf("share link expired")
+ }
+ return record, token, nil
+}
+
+func shareHTTPStatus(err error) int {
+ if err == nil {
+ return http.StatusOK
+ }
+ switch strings.TrimSpace(strings.ToLower(err.Error())) {
+ case "missing token":
+ return http.StatusBadRequest
+ case "share link expired", "share link download limit reached":
+ return http.StatusGone
+ case "share link not found":
+ return http.StatusNotFound
+ default:
+ return http.StatusInternalServerError
+ }
+}
+
+func writeSharedPageStatus(w http.ResponseWriter, code int, title, description string) {
+ title = strings.TrimSpace(title)
+ if title == "" {
+ title = "Share unavailable"
+ }
+ description = strings.TrimSpace(description)
+ if description == "" {
+ description = "This shared file could not be loaded."
+ }
+
+ page := fmt.Sprintf(`
+
+
+
+
+ %s
+
+
+
+
+ %s
+ %s
+
+
+`,
+ html.EscapeString(title),
+ html.EscapeString(title),
+ html.EscapeString(description),
+ )
+
+ w.Header().Set("Content-Type", "text/html; charset=utf-8")
+ w.WriteHeader(code)
+ _, _ = io.WriteString(w, page)
+}
+
+func sharedFileDescription(meta FileMeta, expiresAt time.Time) string {
+ parts := []string{sharedKindLabel(meta, mime.TypeByExtension(strings.ToLower(filepath.Ext(meta.Name))))}
+ if !meta.IsDir && meta.Size > 0 {
+ parts = append(parts, humanSize(meta.Size))
+ }
+ parts = append(parts, "Expires "+expiresAt.UTC().Format("02 Jan 2006 15:04 UTC"))
+ return strings.Join(parts, " • ")
+}
+
+func sharedKindLabel(meta FileMeta, ctype string) string {
+ if meta.IsDir {
+ return "Folder"
+ }
+ switch {
+ case strings.HasPrefix(ctype, "image/"):
+ return "Image"
+ case strings.HasPrefix(ctype, "video/"):
+ return "Video"
+ case strings.HasPrefix(ctype, "audio/"):
+ return "Audio"
+ default:
+ return "File"
+ }
+}
+
+func sharedExpiryLabel(expiresAt time.Time) string {
+ return "Expires " + expiresAt.UTC().Format("02 Jan 2006")
+}
+
+func humanSize(size int64) string {
+ if size < 1024 {
+ return fmt.Sprintf("%d B", size)
+ }
+ units := []string{"KB", "MB", "GB", "TB"}
+ value := float64(size)
+ unit := "B"
+ for _, next := range units {
+ value /= 1024
+ unit = next
+ if value < 1024 {
+ break
+ }
+ }
+ if value >= 10 || unit == "KB" {
+ return fmt.Sprintf("%.0f %s", value, unit)
+ }
+ return fmt.Sprintf("%.1f %s", value, unit)
+}
+
func (s *Server) handleAdminMe(w http.ResponseWriter, _ *http.Request) {
writeJSON(w, http.StatusOK, map[string]string{"login": s.config.AdminLogin})
}
@@ -1879,6 +3495,8 @@ func (s *Server) handleAdminUserDelete(w http.ResponseWriter, r *http.Request) {
_, _ = s.db.Exec(`DELETE FROM refresh_tokens WHERE user_id = ?`, id)
_, _ = s.db.Exec(`UPDATE share_links SET revoked_at = CURRENT_TIMESTAMP WHERE user_id = ?`, id)
_ = s.storage.Delete(id, "/")
+ s.purgePersistedSearchContentForUser(id)
+ s.purgePersistedPreviewThumbnailForUser(id)
writeJSON(w, http.StatusOK, map[string]string{"status": "deleted"})
log.Printf("admin.user.delete user_id=%d", id)
diff --git a/backend/search_share_test.go b/backend/search_share_test.go
new file mode 100644
index 0000000..350db63
--- /dev/null
+++ b/backend/search_share_test.go
@@ -0,0 +1,487 @@
+package main
+
+import (
+ "archive/zip"
+ "bytes"
+ "encoding/json"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gorilla/mux"
+)
+
+func TestAPISearchFilesFuzzyAcrossAllFiles(t *testing.T) {
+ t.Parallel()
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ if err := s.storage.Mkdir(user.ID, "/docs"); err != nil {
+ t.Fatalf("mkdir failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/docs/project-plan.md", []byte("# project plan")); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/notes.txt", []byte("notes")); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+
+ loginReq := httptest.NewRequest(http.MethodPost, "/api/auth/login", strings.NewReader(`{"username":"alice","password":"password123"}`))
+ loginReq.Header.Set("Content-Type", "application/json")
+ loginRec := httptest.NewRecorder()
+ s.handleLogin(loginRec, loginReq)
+ if loginRec.Code != http.StatusOK {
+ t.Fatalf("login status = %d", loginRec.Code)
+ }
+ access := cookieByName(loginRec.Result().Cookies(), "access_token")
+ if access == nil {
+ t.Fatal("missing access token")
+ }
+
+ req := httptest.NewRequest(http.MethodGet, "/api/files/search?q=prjpln&limit=10", nil)
+ req.AddCookie(access)
+ rec := httptest.NewRecorder()
+ s.authMiddleware(http.HandlerFunc(s.handleSearchFiles)).ServeHTTP(rec, req)
+
+ if rec.Code != http.StatusOK {
+ t.Fatalf("search status = %d, want %d", rec.Code, http.StatusOK)
+ }
+ var out struct {
+ Entries []FileEntry `json:"entries"`
+ }
+ if err := json.NewDecoder(rec.Body).Decode(&out); err != nil {
+ t.Fatalf("decode search response failed: %v", err)
+ }
+ if len(out.Entries) == 0 {
+ t.Fatal("expected at least one search result")
+ }
+ if out.Entries[0].Path != "/docs/project-plan.md" {
+ t.Fatalf("top search result = %q, want %q", out.Entries[0].Path, "/docs/project-plan.md")
+ }
+}
+
+func TestSharePageReturnsPublicURLAndDiscordMeta(t *testing.T) {
+ t.Parallel()
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/cover.png", []byte("png-data")); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+
+ loginReq := httptest.NewRequest(http.MethodPost, "/api/auth/login", strings.NewReader(`{"username":"alice","password":"password123"}`))
+ loginReq.Header.Set("Content-Type", "application/json")
+ loginRec := httptest.NewRecorder()
+ s.handleLogin(loginRec, loginReq)
+ if loginRec.Code != http.StatusOK {
+ t.Fatalf("login status = %d", loginRec.Code)
+ }
+ access := cookieByName(loginRec.Result().Cookies(), "access_token")
+ if access == nil {
+ t.Fatal("missing access token")
+ }
+
+ createReq := httptest.NewRequest(http.MethodPost, "/api/files/share", bytes.NewBufferString(`{"path":"/cover.png","expiresMinutes":60}`))
+ createReq.Header.Set("Content-Type", "application/json")
+ createReq.AddCookie(access)
+ createRec := httptest.NewRecorder()
+ s.authMiddleware(http.HandlerFunc(s.handleCreateShareLink)).ServeHTTP(createRec, createReq)
+ if createRec.Code != http.StatusCreated {
+ t.Fatalf("create share status = %d, want %d", createRec.Code, http.StatusCreated)
+ }
+
+ var shareResp struct {
+ URL string `json:"url"`
+ Token string `json:"token"`
+ }
+ if err := json.NewDecoder(createRec.Body).Decode(&shareResp); err != nil {
+ t.Fatalf("decode share response failed: %v", err)
+ }
+ if !strings.Contains(shareResp.URL, "/share/") {
+ t.Fatalf("share url = %q, want public /share/ URL", shareResp.URL)
+ }
+
+ router := mux.NewRouter()
+ router.HandleFunc("/share/{token}", s.handleSharedPage).Methods(http.MethodGet)
+ pageReq := httptest.NewRequest(http.MethodGet, "/share/"+shareResp.Token, nil)
+ pageRec := httptest.NewRecorder()
+ router.ServeHTTP(pageRec, pageReq)
+
+ if pageRec.Code != http.StatusOK {
+ t.Fatalf("share page status = %d, want %d", pageRec.Code, http.StatusOK)
+ }
+ body := pageRec.Body.String()
+ if !strings.Contains(body, `property="og:title"`) {
+ t.Fatal("share page is missing og:title metadata")
+ }
+ if !strings.Contains(body, "/api/share/"+shareResp.Token+"/preview") {
+ t.Fatal("share page is missing preview metadata URL")
+ }
+}
+
+func TestAPISearchFilesFindsPlainTextContent(t *testing.T) {
+ t.Parallel()
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/docs/meeting-notes.txt", []byte("Budget approval is scheduled for Monday morning.")); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+
+ access := loginAccessToken(t, s, "alice", "password123")
+ entries := runSearchRequest(t, s, access, "budget approval")
+ if len(entries) == 0 {
+ t.Fatal("expected at least one search result")
+ }
+ if entries[0].Path != "/docs/meeting-notes.txt" {
+ t.Fatalf("top search result = %q, want %q", entries[0].Path, "/docs/meeting-notes.txt")
+ }
+}
+
+func TestAPISearchFilesFindsDocxContent(t *testing.T) {
+ t.Parallel()
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ docPath := filepath.Join(t.TempDir(), "proposal.docx")
+ writeDocxFixture(t, docPath, "Quarterly roadmap milestone")
+ data, err := os.ReadFile(docPath)
+ if err != nil {
+ t.Fatalf("read docx failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/docs/proposal.docx", data); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+
+ access := loginAccessToken(t, s, "alice", "password123")
+ entries := runSearchRequest(t, s, access, "roadmap milestone")
+ if len(entries) == 0 {
+ t.Fatal("expected at least one search result")
+ }
+ if entries[0].Path != "/docs/proposal.docx" {
+ t.Fatalf("top search result = %q, want %q", entries[0].Path, "/docs/proposal.docx")
+ }
+}
+
+func TestAPISearchFilesFindsImageOCRContent(t *testing.T) {
+ t.Parallel()
+
+ if _, err := exec.LookPath("tesseract"); err != nil {
+ t.Skip("tesseract not installed")
+ }
+ convertBinary, err := exec.LookPath("convert")
+ if err != nil {
+ t.Skip("ImageMagick convert not installed")
+ }
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ imagePath := filepath.Join(t.TempDir(), "searchable.png")
+ cmd := exec.Command(convertBinary,
+ "-background", "white",
+ "-fill", "black",
+ "-font", "DejaVu-Sans-Bold",
+ "-pointsize", "96",
+ "-size", "1400x280",
+ "-gravity", "center",
+ "label:SEARCHABLE",
+ imagePath,
+ )
+ if out, err := cmd.CombinedOutput(); err != nil {
+ t.Fatalf("convert failed: %v (%s)", err, strings.TrimSpace(string(out)))
+ }
+ data, err := os.ReadFile(imagePath)
+ if err != nil {
+ t.Fatalf("read image failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/images/searchable.png", data); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+
+ access := loginAccessToken(t, s, "alice", "password123")
+ entries := runSearchRequest(t, s, access, "searchable")
+ if len(entries) == 0 {
+ t.Fatal("expected at least one search result")
+ }
+ found := false
+ for _, entry := range entries {
+ if entry.Path == "/images/searchable.png" {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Fatalf("expected OCR result for %q in search results", "/images/searchable.png")
+ }
+}
+
+func TestAPISearchFilesUsesPersistedOCRCache(t *testing.T) {
+ t.Parallel()
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/images/cached.png", []byte("not-a-real-image")); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+ meta, err := s.storage.Stat(user.ID, "/images/cached.png")
+ if err != nil {
+ t.Fatalf("stat failed: %v", err)
+ }
+ if _, err := s.db.Exec(
+ `INSERT INTO search_content_cache(user_id, rel_path, extractor, file_size, mod_time_ns, content) VALUES (?, ?, ?, ?, ?, ?)`,
+ user.ID,
+ "/images/cached.png",
+ "ocr",
+ meta.Size,
+ meta.ModTime.UTC().UnixNano(),
+ "vault phrase",
+ ); err != nil {
+ t.Fatalf("insert search cache failed: %v", err)
+ }
+ s.searchContent = newSearchContentCache(256)
+
+ access := loginAccessToken(t, s, "alice", "password123")
+ entries := runSearchRequest(t, s, access, "vault phrase")
+ if len(entries) == 0 {
+ t.Fatal("expected persisted OCR cache search result")
+ }
+ if entries[0].Path != "/images/cached.png" {
+ t.Fatalf("top search result = %q, want %q", entries[0].Path, "/images/cached.png")
+ }
+}
+
+func TestAPIContentPreviewReturnsDocxText(t *testing.T) {
+ t.Parallel()
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ docPath := filepath.Join(t.TempDir(), "preview.docx")
+ writeDocxFixture(t, docPath, "Quarterly preview memo")
+ data, err := os.ReadFile(docPath)
+ if err != nil {
+ t.Fatalf("read docx failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/docs/preview.docx", data); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+
+ access := loginAccessToken(t, s, "alice", "password123")
+ req := httptest.NewRequest(http.MethodGet, "/api/files/content-preview?path="+url.QueryEscape("/docs/preview.docx"), nil)
+ req.AddCookie(access)
+ rec := httptest.NewRecorder()
+ s.authMiddleware(http.HandlerFunc(s.handleContentPreview)).ServeHTTP(rec, req)
+ if rec.Code != http.StatusOK {
+ t.Fatalf("preview status = %d, want %d", rec.Code, http.StatusOK)
+ }
+
+ var out struct {
+ Content string `json:"content"`
+ }
+ if err := json.NewDecoder(rec.Body).Decode(&out); err != nil {
+ t.Fatalf("decode preview response failed: %v", err)
+ }
+ if !strings.Contains(out.Content, "Quarterly preview memo") {
+ t.Fatalf("preview content = %q, want extracted doc text", out.Content)
+ }
+}
+
+func TestAPIThumbnailReturnsDocxPreviewImage(t *testing.T) {
+ if _, err := exec.LookPath("soffice"); err != nil {
+ t.Skip("soffice not installed")
+ }
+ if _, err := exec.LookPath("pdftoppm"); err != nil {
+ t.Skip("pdftoppm not installed")
+ }
+ if _, err := exec.LookPath("convert"); err != nil {
+ t.Skip("ImageMagick convert not installed")
+ }
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ docPath := filepath.Join(t.TempDir(), "thumbnail.docx")
+ writeDocxFixture(t, docPath, "Quarterly thumbnail memo")
+ data, err := os.ReadFile(docPath)
+ if err != nil {
+ t.Fatalf("read docx failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/docs/thumbnail.docx", data); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+
+ access := loginAccessToken(t, s, "alice", "password123")
+ req := httptest.NewRequest(http.MethodGet, "/api/files/thumbnail?path="+url.QueryEscape("/docs/thumbnail.docx"), nil)
+ req.AddCookie(access)
+ rec := httptest.NewRecorder()
+ s.authMiddleware(http.HandlerFunc(s.handleThumbnail)).ServeHTTP(rec, req)
+ if rec.Code != http.StatusOK {
+ t.Fatalf("thumbnail status = %d, want %d (%s)", rec.Code, http.StatusOK, strings.TrimSpace(rec.Body.String()))
+ }
+ if ctype := rec.Header().Get("Content-Type"); !strings.HasPrefix(ctype, "image/png") {
+ t.Fatalf("thumbnail content-type = %q, want image/png", ctype)
+ }
+ if rec.Body.Len() == 0 {
+ t.Fatal("expected non-empty thumbnail image")
+ }
+}
+
+func TestAPIThumbnailUsesPersistedCache(t *testing.T) {
+ t.Parallel()
+
+ s := makeTestServer(t, nil)
+ user, err := s.createUser("alice", "password123", "dracula", "auto")
+ if err != nil {
+ t.Fatalf("createUser failed: %v", err)
+ }
+ if err := s.storage.SaveBytes(user.ID, "/docs/cached.docx", []byte("not-a-real-docx")); err != nil {
+ t.Fatalf("save bytes failed: %v", err)
+ }
+ meta, err := s.storage.Stat(user.ID, "/docs/cached.docx")
+ if err != nil {
+ t.Fatalf("stat failed: %v", err)
+ }
+
+ cachedImage := []byte{
+ 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a,
+ 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44, 0x52,
+ 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01,
+ 0x08, 0x06, 0x00, 0x00, 0x00, 0x1f, 0x15, 0xc4,
+ 0x89, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x44, 0x41,
+ 0x54, 0x78, 0x9c, 0x63, 0xf8, 0xcf, 0xc0, 0x00,
+ 0x00, 0x03, 0x01, 0x01, 0x00, 0xc9, 0xfe, 0x92,
+ 0xef, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4e,
+ 0x44, 0xae, 0x42, 0x60, 0x82,
+ }
+ if _, err := s.db.Exec(
+ `INSERT INTO preview_thumbnail_cache(user_id, rel_path, renderer, file_size, mod_time_ns, content_type, image) VALUES (?, ?, ?, ?, ?, ?, ?)`,
+ user.ID,
+ "/docs/cached.docx",
+ cacheableThumbnailRenderer(FileEntry{Name: "cached.docx"}),
+ meta.Size,
+ meta.ModTime.UTC().UnixNano(),
+ "image/png",
+ cachedImage,
+ ); err != nil {
+ t.Fatalf("insert thumbnail cache failed: %v", err)
+ }
+
+ access := loginAccessToken(t, s, "alice", "password123")
+ req := httptest.NewRequest(http.MethodGet, "/api/files/thumbnail?path="+url.QueryEscape("/docs/cached.docx"), nil)
+ req.AddCookie(access)
+ rec := httptest.NewRecorder()
+ s.authMiddleware(http.HandlerFunc(s.handleThumbnail)).ServeHTTP(rec, req)
+ if rec.Code != http.StatusOK {
+ t.Fatalf("thumbnail status = %d, want %d (%s)", rec.Code, http.StatusOK, strings.TrimSpace(rec.Body.String()))
+ }
+ if !bytes.Equal(rec.Body.Bytes(), cachedImage) {
+ t.Fatal("thumbnail response did not use persisted cache bytes")
+ }
+}
+
+func loginAccessToken(t *testing.T, s *Server, username, password string) *http.Cookie {
+ t.Helper()
+
+ loginReq := httptest.NewRequest(http.MethodPost, "/api/auth/login", strings.NewReader(`{"username":"`+username+`","password":"`+password+`"}`))
+ loginReq.Header.Set("Content-Type", "application/json")
+ loginRec := httptest.NewRecorder()
+ s.handleLogin(loginRec, loginReq)
+ if loginRec.Code != http.StatusOK {
+ t.Fatalf("login status = %d", loginRec.Code)
+ }
+ access := cookieByName(loginRec.Result().Cookies(), "access_token")
+ if access == nil {
+ t.Fatal("missing access token")
+ }
+ return access
+}
+
+func runSearchRequest(t *testing.T, s *Server, access *http.Cookie, query string) []FileEntry {
+ t.Helper()
+
+ req := httptest.NewRequest(http.MethodGet, "/api/files/search?q="+url.QueryEscape(query)+"&limit=20", nil)
+ req.AddCookie(access)
+ rec := httptest.NewRecorder()
+ s.authMiddleware(http.HandlerFunc(s.handleSearchFiles)).ServeHTTP(rec, req)
+ if rec.Code != http.StatusOK {
+ t.Fatalf("search status = %d, want %d", rec.Code, http.StatusOK)
+ }
+
+ var out struct {
+ Entries []FileEntry `json:"entries"`
+ }
+ if err := json.NewDecoder(rec.Body).Decode(&out); err != nil {
+ t.Fatalf("decode search response failed: %v", err)
+ }
+ return out.Entries
+}
+
+func writeDocxFixture(t *testing.T, filePath, text string) {
+ t.Helper()
+
+ f, err := os.Create(filePath)
+ if err != nil {
+ t.Fatalf("create docx failed: %v", err)
+ }
+ defer f.Close()
+
+ zw := zip.NewWriter(f)
+ writeZipFixtureFile(t, zw, "[Content_Types].xml", `
+
+
+
+
+`)
+ writeZipFixtureFile(t, zw, "_rels/.rels", `
+
+
+`)
+ writeZipFixtureFile(t, zw, "word/document.xml", `
+
+
+ `+text+`
+
+`)
+ if err := zw.Close(); err != nil {
+ t.Fatalf("close docx failed: %v", err)
+ }
+}
+
+func writeZipFixtureFile(t *testing.T, zw *zip.Writer, name, content string) {
+ t.Helper()
+
+ w, err := zw.Create(name)
+ if err != nil {
+ t.Fatalf("create zip entry %q failed: %v", name, err)
+ }
+ if _, err := w.Write([]byte(content)); err != nil {
+ t.Fatalf("write zip entry %q failed: %v", name, err)
+ }
+}
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index bf868dd..fdf0d11 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -1,6 +1,5 @@
import {
Archive,
- Copy,
File as FileIcon,
FileCode2,
FileSpreadsheet,
@@ -8,8 +7,12 @@ import {
Folder,
HardDriveUpload,
Image as ImageIcon,
+ LayoutGrid,
+ List,
LogOut,
Music2,
+ PanelLeftClose,
+ PanelLeftOpen,
Search,
Settings2,
Shield,
@@ -17,7 +20,7 @@ import {
Video,
} from 'lucide-react'
import { Suspense, lazy, useCallback, useEffect, useMemo, useRef, useState } from 'react'
-import type { ComponentType, FormEvent } from 'react'
+import type { ComponentType, DragEvent as ReactDragEvent, FormEvent, MouseEvent as ReactMouseEvent } from 'react'
import { Badge } from './components/ui/badge'
import { Button } from './components/ui/button'
@@ -28,32 +31,25 @@ import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '.
import { Tabs, TabsList, TabsTrigger } from './components/ui/tabs'
import { Textarea } from './components/ui/textarea'
import { ContextMenu, ContextMenuContent, ContextMenuItem, ContextMenuSeparator, ContextMenuTrigger } from './components/ui/context-menu'
+import { cn } from './lib/utils'
type Theme = 'dracula' | 'nord' | 'monokai' | 'solarized' | 'github' | 'bureau'
type ColorMode = 'auto' | 'light' | 'dark'
type Lang = 'en' | 'ru' | 'de'
type Route = 'landing' | 'drive' | 'admin'
type DriveView = 'all' | 'folders' | 'documents' | 'media' | 'archives' | 'tagged' | 'recent'
+type DriveLayout = 'list' | 'grid'
type SortKey = 'name' | 'size' | 'modTime' | 'kind'
type ArchiveFormat = 'zip' | 'rar' | 'tar.gz' | 'lz4'
type User = { id: number; username: string; theme: Theme; colorMode: ColorMode; archiveFormat: ArchiveFormat }
type FileEntry = { name: string; path: string; isDir: boolean; size: number; modTime: string; tags?: string[] }
-type ProtocolProfile = {
- host: string
- port: number
- username: string
- publicIP?: string
- passivePorts?: string
- explicitTLS?: boolean
- forceTLS?: boolean
-}
-type ProtocolInfo = { ftp?: ProtocolProfile; ftps?: ProtocolProfile }
type MarkdownRendererProps = { children?: string }
type CachedUIPrefs = { username: string; theme: Theme; colorMode: ColorMode; archiveFormat: ArchiveFormat }
const AdminPanel = lazy(() => import('./lazy/AdminPanel'))
const TransferSection = lazy(() => import('./lazy/TransferSection'))
+const driveDesktopMinWidth = 1400
function resolveModuleValue(mod: unknown): T | null {
if (!mod || typeof mod !== 'object') return null
@@ -118,6 +114,8 @@ const dict: Record> = {
accountSubtitle: 'Workspace',
settings: 'Settings',
language: 'Language',
+ showSidebar: 'Show bar',
+ hideSidebar: 'Hide bar',
theme: 'Color scheme',
mode: 'Mode',
archiveFormat: 'Folder download format',
@@ -131,9 +129,12 @@ const dict: Record> = {
solarized: 'Solarized',
github: 'GitHub',
bureau: 'Bureau',
- search: 'Search in this folder',
+ search: 'Search names and file contents',
+ listView: 'List view',
+ gridView: 'Grid view',
items: 'items',
empty: 'No files in this folder',
+ emptySearch: 'No matching files',
allFiles: 'All files',
folders: 'Folders',
documents: 'Documents',
@@ -147,6 +148,7 @@ const dict: Record> = {
addTag: 'Add tag',
openInBrowser: 'Open in browser',
preview: 'Preview',
+ previewUnavailable: 'Preview unavailable for this file.',
editMarkdown: 'Edit markdown',
writeMarkdown: 'Write markdown',
save: 'Save',
@@ -187,8 +189,9 @@ const dict: Record> = {
controlNode: 'Control node',
authFeatureDesc: 'Hardened authentication and session handling.',
storageFeatureDesc: 'Private storage lanes for each account.',
- shareFeatureDesc: 'Expiring links with resumable downloads.',
- remoteAccessDesc: 'Direct transfer access for desktop clients.',
+ shareFeatureDesc: 'Expiring links with Discord-friendly previews.',
+ fuzzyFinder: 'Fuzzy finder',
+ fuzzyFinderDesc: 'Global search across names, document text, and OCR-ready images.',
controlSurface: 'FileZ control surface',
},
de: {
@@ -230,6 +233,8 @@ const dict: Record> = {
accountSubtitle: 'Arbeitsbereich',
settings: 'Einstellungen',
language: 'Sprache',
+ showSidebar: 'Leiste zeigen',
+ hideSidebar: 'Leiste ausblenden',
theme: 'Farbschema',
mode: 'Modus',
archiveFormat: 'Format fur Ordner-Download',
@@ -243,9 +248,12 @@ const dict: Record> = {
solarized: 'Solarized',
github: 'GitHub',
bureau: 'Bureau',
- search: 'In diesem Ordner suchen',
+ search: 'Namen und Dateiinhalte durchsuchen',
+ listView: 'Listenansicht',
+ gridView: 'Rasteransicht',
items: 'Elemente',
empty: 'Keine Dateien in diesem Ordner',
+ emptySearch: 'Keine passenden Dateien',
allFiles: 'Alle Dateien',
folders: 'Ordner',
documents: 'Dokumente',
@@ -259,6 +267,7 @@ const dict: Record> = {
addTag: 'Tag hinzufugen',
openInBrowser: 'Im Browser offnen',
preview: 'Vorschau',
+ previewUnavailable: 'Vorschau fur diese Datei nicht verfugbar.',
editMarkdown: 'Markdown bearbeiten',
writeMarkdown: 'Markdown schreiben',
save: 'Speichern',
@@ -299,8 +308,9 @@ const dict: Record> = {
controlNode: 'Kontrollknoten',
authFeatureDesc: 'Geharte Authentifizierung und Sitzungsverwaltung.',
storageFeatureDesc: 'Private Speicherbereiche fur jedes Konto.',
- shareFeatureDesc: 'Ablaufende Links mit fortsetzbaren Downloads.',
- remoteAccessDesc: 'Direkter Transferzugang fur Desktop-Clients.',
+ shareFeatureDesc: 'Ablaufende Links mit Discord-Vorschau.',
+ fuzzyFinder: 'Fuzzy Finder',
+ fuzzyFinderDesc: 'Globale Suche uber Namen, Dokumenttexte und OCR-Bilder.',
controlSurface: 'FileZ Kontrolloberflache',
},
ru: {
@@ -342,6 +352,8 @@ const dict: Record> = {
accountSubtitle: 'Рабочее пространство',
settings: 'Настройки',
language: 'Язык',
+ showSidebar: 'Показать панель',
+ hideSidebar: 'Скрыть панель',
theme: 'Цветовая схема',
mode: 'Режим',
archiveFormat: 'Формат скачивания папок',
@@ -355,9 +367,12 @@ const dict: Record> = {
solarized: 'Solarized',
github: 'GitHub',
bureau: 'Bureau',
- search: 'Поиск в текущей папке',
+ search: 'Поиск по именам и содержимому',
+ listView: 'Список',
+ gridView: 'Сетка',
items: 'объектов',
empty: 'В этой папке нет файлов',
+ emptySearch: 'Ничего не найдено',
allFiles: 'Все файлы',
folders: 'Папки',
documents: 'Документы',
@@ -371,6 +386,7 @@ const dict: Record> = {
addTag: 'Добавить тег',
openInBrowser: 'Открыть в браузере',
preview: 'Предпросмотр',
+ previewUnavailable: 'Предпросмотр для этого файла недоступен.',
editMarkdown: 'Редактировать Markdown',
writeMarkdown: 'Писать Markdown',
save: 'Сохранить',
@@ -411,8 +427,9 @@ const dict: Record> = {
controlNode: 'Контрольный узел',
authFeatureDesc: 'Защищенная аутентификация и управление сессиями.',
storageFeatureDesc: 'Приватные зоны хранения для каждого аккаунта.',
- shareFeatureDesc: 'Ссылки с истечением срока и докачкой.',
- remoteAccessDesc: 'Прямой доступ для настольных клиентов.',
+ shareFeatureDesc: 'Ссылки с превью для Discord.',
+ fuzzyFinder: 'Fuzzy Finder',
+ fuzzyFinderDesc: 'Глобальный поиск по именам, тексту документов и OCR по изображениям.',
controlSurface: 'Контрольная панель FileZ',
},
}
@@ -490,6 +507,7 @@ const mediaExt = new Set(['mp4', 'mov', 'mkv', 'avi'])
const audioExt = new Set(['mp3', 'wav', 'flac', 'ogg'])
const archiveExt = new Set(['zip', 'rar', '7z', 'tar', 'gz', 'lz4'])
const markdownExt = new Set(['md', 'markdown'])
+const officePreviewExt = new Set(['doc', 'docm', 'docx', 'dotm', 'dotx', 'odt', 'ods', 'odp', 'pdf', 'pps', 'ppsm', 'ppsx', 'ppt', 'pptm', 'pptx', 'rtf', 'xls', 'xlsm', 'xlsx', 'xltm', 'xltx'])
function fileExt(name: string): string {
const i = name.lastIndexOf('.')
@@ -509,6 +527,14 @@ function driveUrl(p: string): string {
return `/drive?p=${encodeURIComponent(p)}`
}
+function filePreviewUrl(rel: string): string {
+ return `/api/files/preview?path=${encodeURIComponent(rel)}`
+}
+
+function fileThumbnailUrl(rel: string): string {
+ return `/api/files/thumbnail?path=${encodeURIComponent(rel)}`
+}
+
function fileKind(file: FileEntry): string {
if (file.isDir) return 'folder'
const ext = fileExt(file.name)
@@ -522,17 +548,17 @@ function fileKind(file: FileEntry): string {
return 'file'
}
-function KindIcon({ file }: { file: FileEntry }) {
+function KindIcon({ file, className = 'h-4 w-4' }: { file: FileEntry; className?: string }) {
const kind = fileKind(file)
- if (kind === 'folder') return
- if (kind === 'image') return
- if (kind === 'code') return
- if (kind === 'doc') return
- if (kind === 'sheet') return
- if (kind === 'video') return
- if (kind === 'audio') return
- if (kind === 'archive') return
- return
+ if (kind === 'folder') return
+ if (kind === 'image') return
+ if (kind === 'code') return
+ if (kind === 'doc') return
+ if (kind === 'sheet') return
+ if (kind === 'video') return
+ if (kind === 'audio') return
+ if (kind === 'archive') return
+ return
}
function GoogleIcon({ className }: { className?: string }) {
@@ -589,6 +615,13 @@ const formatSize = (n: number) => {
return `${(n / (1024 * 1024 * 1024)).toFixed(1)} GB`
}
+const parentPath = (rel: string) => {
+ const normalized = rel.trim() || '/'
+ const idx = normalized.lastIndexOf('/')
+ if (idx <= 0) return '/'
+ return normalized.slice(0, idx)
+}
+
function isMarkdownFile(name: string): boolean {
return markdownExt.has(fileExt(name))
}
@@ -601,6 +634,87 @@ function isVideoFile(name: string): boolean {
return mediaExt.has(fileExt(name))
}
+function isPdfFile(name: string): boolean {
+ return fileExt(name) === 'pdf'
+}
+
+function supportsExtractedPreview(name: string): boolean {
+ const ext = fileExt(name)
+ return codeExt.has(ext) || docExt.has(ext) || sheetExt.has(ext) || officePreviewExt.has(ext)
+}
+
+function supportsRenderedGridThumbnail(name: string): boolean {
+ return officePreviewExt.has(fileExt(name))
+}
+
+function GridFileFallbackVisual({ file, extLabel, kind }: { file: FileEntry; extLabel: string; kind: string }) {
+ return (
+
+ )
+}
+
+function GridFileVisual({ file }: { file: FileEntry }) {
+ const thumbKey = `${file.path}:${file.modTime}`
+ const [failedThumbKey, setFailedThumbKey] = useState('')
+ const kind = fileKind(file)
+ const extLabel = file.isDir ? 'DIR' : (fileExt(file.name) || kind).toUpperCase()
+ const thumbFailed = failedThumbKey === thumbKey
+
+ if (file.isDir) {
+ return (
+
+ )
+ }
+
+ if (isImageFile(file.name)) {
+ return (
+
+
})
+
+ {extLabel}
+
+
+ )
+ }
+
+ if (!thumbFailed && supportsRenderedGridThumbnail(file.name)) {
+ return (
+
+
})
setFailedThumbKey(thumbKey)}
+ />
+
+ {extLabel}
+
+
+
+
+
+ )
+ }
+
+ return
+}
+
export default function App() {
const [route, setRoute] = useState(routeFromPath(window.location.pathname))
@@ -615,6 +729,7 @@ export default function App() {
const [path, setPath] = useState('/')
const [query, setQuery] = useState('')
const [view, setView] = useState('all')
+ const [layout, setLayout] = useState('list')
const [activeTag, setActiveTag] = useState('')
const [sortKey, setSortKey] = useState('name')
const [sortDir, setSortDir] = useState<'asc' | 'desc'>('asc')
@@ -638,17 +753,21 @@ export default function App() {
const [renamePath, setRenamePath] = useState('')
const [renameName, setRenameName] = useState('')
const [shareTTL, setShareTTL] = useState('1440')
- const [shareLimit, setShareLimit] = useState('')
const [settingsOpen, setSettingsOpen] = useState(false)
const [previewPath, setPreviewPath] = useState('')
const [previewOpen, setPreviewOpen] = useState(false)
+ const [previewText, setPreviewText] = useState('')
+ const [previewLoading, setPreviewLoading] = useState(false)
+ const [previewError, setPreviewError] = useState('')
const [editorOpen, setEditorOpen] = useState(false)
const [editorPath, setEditorPath] = useState('')
const [editorContent, setEditorContent] = useState('')
const [editorSaving, setEditorSaving] = useState(false)
const [MarkdownRenderer, setMarkdownRenderer] = useState | null>(null)
- const [protocolInfo, setProtocolInfo] = useState(null)
+ const [searchResults, setSearchResults] = useState([])
+ const [searchLoading, setSearchLoading] = useState(false)
const [dragActive, setDragActive] = useState(false)
+ const [sidebarOpen, setSidebarOpen] = useState(() => window.innerWidth >= driveDesktopMinWidth)
const [selectedPaths, setSelectedPaths] = useState>({})
const [paintSelect, setPaintSelect] = useState<{ active: boolean; value: boolean }>({ active: false, value: true })
const [selectionAnchorPath, setSelectionAnchorPath] = useState('')
@@ -659,6 +778,8 @@ export default function App() {
const fileInputRef = useRef(null)
const folderInputRef = useRef(null)
const searchRef = useRef(null)
+ const searchRequestRef = useRef(0)
+ const previewRequestRef = useRef(0)
const driveHistoryRef = useRef(['/'])
const driveHistoryIndexRef = useRef(0)
@@ -717,27 +838,6 @@ export default function App() {
document.documentElement.classList.toggle('dark', effectiveMode === 'dark')
}, [effectiveTheme, effectiveMode])
- useEffect(() => {
- const onKey = (e: KeyboardEvent) => {
- const tag = (e.target as HTMLElement | null)?.tagName
- if (tag === 'INPUT' || tag === 'TEXTAREA') return
- if (route !== 'drive') return
- if (e.key === '/') {
- e.preventDefault()
- searchRef.current?.focus()
- } else if (e.key.toLowerCase() === 'u') {
- fileInputRef.current?.click()
- } else if (e.key.toLowerCase() === 'n') {
- setFolderDialog(true)
- } else if (e.key.toLowerCase() === 'r') {
- void loadFiles(path)
- }
- }
- window.addEventListener('keydown', onKey)
- return () => window.removeEventListener('keydown', onKey)
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [route, path])
-
useEffect(() => {
void bootstrap()
// eslint-disable-next-line react-hooks/exhaustive-deps
@@ -775,7 +875,7 @@ export default function App() {
}, [draggingPaths.length, dragClientY])
useEffect(() => {
- const current = new Set(files.map((f) => f.path))
+ const current = new Set((query.trim() ? searchResults : files).map((f) => f.path))
setSelectedPaths((prev) => {
const next: Record = {}
for (const k of Object.keys(prev)) {
@@ -783,10 +883,19 @@ export default function App() {
}
return next
})
- }, [files])
+ }, [files, query, searchResults])
useEffect(() => {
- if (!editorOpen || MarkdownRenderer) return
+ const onResize = () => {
+ if (window.innerWidth >= driveDesktopMinWidth) setSidebarOpen(true)
+ }
+ window.addEventListener('resize', onResize)
+ return () => window.removeEventListener('resize', onResize)
+ }, [])
+
+ useEffect(() => {
+ if (MarkdownRenderer) return
+ if (!editorOpen && !(previewOpen && isMarkdownFile(previewPath))) return
let cancelled = false
void import('react-markdown')
@@ -806,7 +915,7 @@ export default function App() {
return () => {
cancelled = true
}
- }, [editorOpen, MarkdownRenderer])
+ }, [editorOpen, MarkdownRenderer, previewOpen, previewPath])
const loadFiles = useCallback(async (nextPath: string, opts?: { syncUrl?: boolean; replace?: boolean; trackHistory?: boolean }) => {
const data = await api<{ path: string; entries: FileEntry[] }>(`/api/files?path=${encodeURIComponent(nextPath)}`)
@@ -842,6 +951,78 @@ export default function App() {
setUsers(data.users)
}, [])
+ const runSearch = useCallback(async (rawQuery: string) => {
+ const term = rawQuery.trim()
+ if (!term) {
+ searchRequestRef.current += 1
+ setSearchResults([])
+ setSearchLoading(false)
+ return [] as FileEntry[]
+ }
+
+ const requestId = searchRequestRef.current + 1
+ searchRequestRef.current = requestId
+ setSearchLoading(true)
+
+ try {
+ const data = await api<{ entries: FileEntry[] }>(`/api/files/search?q=${encodeURIComponent(term)}&limit=120`)
+ if (requestId !== searchRequestRef.current) return [] as FileEntry[]
+ setSearchResults(data.entries)
+ setErr('')
+ return data.entries
+ } finally {
+ if (requestId === searchRequestRef.current) {
+ setSearchLoading(false)
+ }
+ }
+ }, [])
+
+ const clearSearch = useCallback(() => {
+ searchRequestRef.current += 1
+ setQuery('')
+ setSearchResults([])
+ setSearchLoading(false)
+ }, [])
+
+ const openFolder = useCallback(async (nextPath: string) => {
+ clearSearch()
+ await loadFiles(nextPath)
+ }, [clearSearch, loadFiles])
+
+ const refreshVisibleEntries = useCallback(async () => {
+ const term = query.trim()
+ if (term) {
+ await runSearch(term)
+ return
+ }
+ await loadFiles(path)
+ }, [loadFiles, path, query, runSearch])
+
+ useEffect(() => {
+ if (route !== 'drive' || !user) {
+ searchRequestRef.current += 1
+ setSearchResults([])
+ setSearchLoading(false)
+ return
+ }
+
+ const term = query.trim()
+ if (!term) {
+ searchRequestRef.current += 1
+ setSearchResults([])
+ setSearchLoading(false)
+ return
+ }
+
+ const timer = window.setTimeout(() => {
+ void runSearch(term).catch((x) => {
+ setErr(x instanceof Error ? x.message : 'search failed')
+ })
+ }, 140)
+
+ return () => window.clearTimeout(timer)
+ }, [query, route, runSearch, user])
+
const bootstrap = useCallback(async () => {
setBootstrapping(true)
try {
@@ -855,15 +1036,12 @@ export default function App() {
}
setCachedUIPrefs(nextPrefs)
writeCachedUIPrefs(nextPrefs)
- const protocols = await api('/api/user/protocols')
- setProtocolInfo(protocols)
const initial = routeFromPath(window.location.pathname) === 'drive' ? drivePathFromLocation() : '/'
driveHistoryRef.current = [initial]
driveHistoryIndexRef.current = 0
await loadFiles(initial, { syncUrl: routeFromPath(window.location.pathname) !== 'drive' })
} catch {
setUser(null)
- setProtocolInfo(null)
}
try {
const data = await api<{ login: string }>('/api/admin/me')
@@ -897,8 +1075,6 @@ export default function App() {
}
setCachedUIPrefs(nextPrefs)
writeCachedUIPrefs(nextPrefs)
- const protocols = await api('/api/user/protocols')
- setProtocolInfo(protocols)
setUsername('')
setPassword('')
await loadFiles('/')
@@ -959,7 +1135,7 @@ export default function App() {
return
}
await api('/api/files/upload?path=' + encodeURIComponent(path), { method: 'POST', body })
- await loadFiles(path)
+ await refreshVisibleEntries()
}
async function createFolder() {
@@ -967,7 +1143,7 @@ export default function App() {
await api('/api/files/folder', { method: 'POST', body: JSON.stringify({ path, name: folderName.trim() }) })
setFolderName('')
setFolderDialog(false)
- await loadFiles(path)
+ await refreshVisibleEntries()
}
async function renameEntry() {
@@ -979,15 +1155,14 @@ export default function App() {
setRenameDialog(false)
setRenamePath('')
setRenameName('')
- await loadFiles(path)
+ await refreshVisibleEntries()
}
async function createShare() {
const expiresMinutes = Number(shareTTL || '1440')
- const maxDownloads = shareLimit.trim() ? Number(shareLimit.trim()) : null
const data = await api<{ url: string }>('/api/files/share', {
method: 'POST',
- body: JSON.stringify({ path: sharePath, expiresMinutes, maxDownloads }),
+ body: JSON.stringify({ path: sharePath, expiresMinutes }),
})
await navigator.clipboard.writeText(data.url)
setShareDialog(false)
@@ -1020,26 +1195,80 @@ export default function App() {
await loadUsers()
}
- function protocolURI(scheme: 'ftp' | 'ftps', profile: ProtocolProfile): string {
- return `${scheme}://${encodeURIComponent(profile.username)}@${profile.host}:${profile.port}`
- }
-
async function removeTag(pathValue: string, tag: string) {
await api(`/api/files/tags?path=${encodeURIComponent(pathValue)}&tag=${encodeURIComponent(tag)}`, { method: 'DELETE' })
- await loadFiles(path)
+ await refreshVisibleEntries()
+ }
+
+ function openInlinePreview(file: FileEntry) {
+ previewRequestRef.current += 1
+ setPreviewText('')
+ setPreviewError('')
+ setPreviewLoading(false)
+ setPreviewPath(file.path)
+ setPreviewOpen(true)
+ }
+
+ async function openExtractedPreview(file: FileEntry) {
+ const requestId = previewRequestRef.current + 1
+ previewRequestRef.current = requestId
+ setPreviewPath(file.path)
+ setPreviewText('')
+ setPreviewError('')
+ setPreviewLoading(true)
+ setPreviewOpen(true)
+
+ try {
+ const data = await api<{ path: string; content: string }>(`/api/files/content-preview?path=${encodeURIComponent(file.path)}`)
+ if (requestId !== previewRequestRef.current) return
+ setPreviewText(data.content)
+ } catch (error) {
+ if (requestId !== previewRequestRef.current) return
+ setPreviewError(error instanceof Error ? error.message : t('previewUnavailable'))
+ } finally {
+ if (requestId === previewRequestRef.current) {
+ setPreviewLoading(false)
+ }
+ }
+ }
+
+ async function openMarkdownPreview(file: FileEntry) {
+ const requestId = previewRequestRef.current + 1
+ previewRequestRef.current = requestId
+ setPreviewPath(file.path)
+ setPreviewText('')
+ setPreviewError('')
+ setPreviewLoading(true)
+ setPreviewOpen(true)
+
+ try {
+ const data = await api<{ path: string; content: string }>('/api/files/text?path=' + encodeURIComponent(file.path))
+ if (requestId !== previewRequestRef.current) return
+ setPreviewText(data.content)
+ } catch (error) {
+ if (requestId !== previewRequestRef.current) return
+ setPreviewError(error instanceof Error ? error.message : t('previewUnavailable'))
+ } finally {
+ if (requestId === previewRequestRef.current) {
+ setPreviewLoading(false)
+ }
+ }
}
async function openFile(file: FileEntry) {
if (isMarkdownFile(file.name)) {
- await openMarkdownEditor(file)
+ await openMarkdownPreview(file)
return
}
- if (isImageFile(file.name) || isVideoFile(file.name)) {
- setPreviewPath(file.path)
- setPreviewOpen(true)
+ if (isImageFile(file.name) || isVideoFile(file.name) || isPdfFile(file.name)) {
+ openInlinePreview(file)
return
}
- window.open(`/api/files/preview?path=${encodeURIComponent(file.path)}`, '_blank', 'noopener,noreferrer')
+ if (supportsExtractedPreview(file.name)) {
+ await openExtractedPreview(file)
+ return
+ }
+ window.open(filePreviewUrl(file.path), '_blank', 'noopener,noreferrer')
}
async function openMarkdownEditor(file: FileEntry) {
@@ -1054,12 +1283,79 @@ export default function App() {
setEditorSaving(true)
try {
await api('/api/files/text', { method: 'PUT', body: JSON.stringify({ path: editorPath, content: editorContent }) })
- await loadFiles(path)
+ await refreshVisibleEntries()
} finally {
setEditorSaving(false)
}
}
+ function handleEntryMouseDown(file: FileEntry, e: ReactMouseEvent) {
+ if (e.button !== 0) return
+ if (isSelectionIgnoredTarget(e.target)) return
+ if (e.shiftKey) {
+ setPaintSelect({ active: true, value: true })
+ selectVisibleRange(file.path, true)
+ return
+ }
+ if (selectedPaths[file.path]) {
+ setPaintSelect({ active: true, value: true })
+ setSelectionAnchorPath(file.path)
+ return
+ }
+ setPaintSelect({ active: true, value: true })
+ setSelectionAnchorPath(file.path)
+ setSelectedPaths((prev) => ({ ...prev, [file.path]: true }))
+ }
+
+ function handleEntryDoubleClick(file: FileEntry, e: ReactMouseEvent) {
+ if (isSelectionIgnoredTarget(e.target)) return
+ if (file.isDir) {
+ void openFolder(file.path)
+ return
+ }
+ void openFile(file)
+ }
+
+ function handleEntryDragStart(file: FileEntry, e: ReactDragEvent) {
+ const paths = selectedPaths[file.path] ? selectedList : [file.path]
+ setDraggingPaths(paths)
+ setDragClientY(e.clientY)
+ e.dataTransfer.setData('text/plain', paths.join('\n'))
+ e.dataTransfer.effectAllowed = 'move'
+ const ghost = makeDragImage(paths)
+ e.dataTransfer.setDragImage(ghost, 16, 16)
+ window.setTimeout(() => ghost.remove(), 0)
+ }
+
+ function handleEntryDragOver(file: FileEntry, e: ReactDragEvent) {
+ if (!file.isDir || draggingPaths.length === 0) return
+ if (draggingPaths.includes(file.path)) return
+ e.preventDefault()
+ setDragClientY(e.clientY)
+ e.dataTransfer.dropEffect = 'move'
+ if (dropFolderPath !== file.path) setDropFolderPath(file.path)
+ }
+
+ function handleEntryDrop(file: FileEntry, e: ReactDragEvent) {
+ if (!file.isDir || draggingPaths.length === 0) return
+ e.preventDefault()
+ e.stopPropagation()
+ const paths = draggingPaths
+ void movePathsTo(file.path, paths)
+ setDragClientY(null)
+ }
+
+ function handleEntryMouseEnter(file: FileEntry, e: ReactMouseEvent) {
+ if (!paintSelect.active) return
+ if ((e.buttons & 1) !== 1) return
+ if (isSelectionIgnoredTarget(e.target)) return
+ if (e.shiftKey && selectionAnchorPath) {
+ selectVisibleRange(file.path, true)
+ return
+ }
+ setSelectedPaths((prev) => ({ ...prev, [file.path]: paintSelect.value }))
+ }
+
async function createMarkdownFile() {
let i = 1
let candidate = path === '/' ? '/new-note.md' : `${path}/new-note.md`
@@ -1069,7 +1365,7 @@ export default function App() {
candidate = path === '/' ? `/new-note-${i}.md` : `${path}/new-note-${i}.md`
}
await api('/api/files/text', { method: 'PUT', body: JSON.stringify({ path: candidate, content: '# New note\n' }) })
- await loadFiles(path)
+ await refreshVisibleEntries()
setEditorPath(candidate)
setEditorContent('# New note\n')
setEditorOpen(true)
@@ -1114,7 +1410,7 @@ export default function App() {
}
setSelectedPaths({})
setSelectionAnchorPath('')
- await loadFiles(path)
+ await refreshVisibleEntries()
}
async function movePathsTo(destination: string, paths: string[]) {
@@ -1128,7 +1424,7 @@ export default function App() {
setDragClientY(null)
setSelectedPaths({})
setSelectionAnchorPath('')
- await loadFiles(path)
+ await refreshVisibleEntries()
}
function makeDragImage(paths: string[]): HTMLDivElement {
@@ -1173,15 +1469,15 @@ export default function App() {
const filesByPath = useMemo(() => {
const out: Record = {}
for (const f of files) out[f.path] = f
+ for (const f of searchResults) out[f.path] = f
return out
- }, [files])
- const previewEntry = useMemo(() => files.find((f) => f.path === previewPath) ?? null, [files, previewPath])
+ }, [files, searchResults])
+ const previewEntry = useMemo(() => filesByPath[previewPath] ?? null, [filesByPath, previewPath])
const selectedCount = useMemo(() => Object.values(selectedPaths).filter(Boolean).length, [selectedPaths])
const selectedList = useMemo(() => Object.keys(selectedPaths).filter((p) => selectedPaths[p]), [selectedPaths])
+ const searchActive = query.trim().length > 0
const visibleFiles = useMemo(() => {
- let list = [...files]
- const q = query.trim().toLowerCase()
- if (q) list = list.filter((f) => f.name.toLowerCase().includes(q))
+ let list = [...(searchActive ? searchResults : files)]
if (view === 'folders') list = list.filter((f) => f.isDir)
if (view === 'documents') list = list.filter((f) => !f.isDir && ['doc', 'sheet', 'code'].includes(fileKind(f)))
@@ -1196,16 +1492,18 @@ export default function App() {
}
if (activeTag) list = list.filter((f) => (f.tags ?? []).includes(activeTag))
- const factor = sortDir === 'asc' ? 1 : -1
- list.sort((a, b) => {
- if (a.isDir !== b.isDir) return a.isDir ? -1 : 1
- if (sortKey === 'name') return a.name.localeCompare(b.name) * factor
- if (sortKey === 'size') return (a.size - b.size) * factor
- if (sortKey === 'modTime') return (new Date(a.modTime).getTime() - new Date(b.modTime).getTime()) * factor
- return fileKind(a).localeCompare(fileKind(b)) * factor
- })
+ if (!searchActive) {
+ const factor = sortDir === 'asc' ? 1 : -1
+ list.sort((a, b) => {
+ if (a.isDir !== b.isDir) return a.isDir ? -1 : 1
+ if (sortKey === 'name') return a.name.localeCompare(b.name) * factor
+ if (sortKey === 'size') return (a.size - b.size) * factor
+ if (sortKey === 'modTime') return (new Date(a.modTime).getTime() - new Date(b.modTime).getTime()) * factor
+ return fileKind(a).localeCompare(fileKind(b)) * factor
+ })
+ }
return list
- }, [files, query, sortKey, sortDir, view, activeTag])
+ }, [activeTag, files, searchActive, searchResults, sortDir, sortKey, view])
const visiblePathIndex = useMemo(() => {
const out: Record = {}
@@ -1215,8 +1513,6 @@ export default function App() {
return out
}, [visibleFiles])
- const routeLabel = route === 'admin' ? '/admin' : route === 'drive' ? '/drive' : '/'
-
function selectVisibleRange(toPath: string, keepExisting = true) {
const anchor = selectionAnchorPath || toPath
const fromIdx = visiblePathIndex[anchor]
@@ -1256,16 +1552,13 @@ export default function App() {
-
-
-
+
+
+
-
-
{t('brand')}
-
- {t('shellTagline')}
-
+
@@ -1275,6 +1568,18 @@ export default function App() {
{t('admin')}
+ {route === 'drive' && user ? (
+
+ ) : null}
- {protocolInfo?.ftp || protocolInfo?.ftps ? (
-
-
{t('remoteAccess')}
- {protocolInfo.ftp ? (
-
-
-
{t('ftpAccess')}
-
-
-
{t('protocolHost')}: {protocolInfo.ftp.host}
-
{t('protocolPort')}: {protocolInfo.ftp.port}
-
{t('username')}: {protocolInfo.ftp.username}
-
{t('security')}: {t('plainFtp')}
-
{t('useAccountPassword')}
-
- ) : null}
- {protocolInfo.ftps ? (
-
-
-
{t('ftpsAccess')}
-
-
-
{t('protocolHost')}: {protocolInfo.ftps.host}
-
{t('protocolPort')}: {protocolInfo.ftps.port}
-
{t('username')}: {protocolInfo.ftps.username}
-
{t('security')}: {t('explicitTls')} ({protocolInfo.ftps.forceTLS ? t('required') : t('optional')})
-
{t('useAccountPassword')}
-
- ) : null}
-
- ) : null}
-
{t('remoteAccess')}
-
FTP
-
{t('remoteAccessDesc')}
+
{t('fuzzyFinder')}
+
{t('allFiles')}
+
{t('fuzzyFinderDesc')}
@@ -1480,125 +1749,173 @@ export default function App() {
) : route === 'drive' ? (
user ? (
-
-
- {t('loading')}...}>
- fileInputRef.current?.click()}
- onUploadFolder={() => folderInputRef.current?.click()}
- folderDialog={folderDialog}
- setFolderDialog={setFolderDialog}
- path={path}
- folderName={folderName}
- setFolderName={setFolderName}
- onCreateFolder={() => void createFolder()}
- view={view}
- setView={setView}
- filesCount={files.length}
- activeTag={activeTag}
- setActiveTag={setActiveTag}
- tagCounts={tagCounts}
- selectedCount={selectedCount}
- onDownloadSelected={() => void downloadPaths(selectedList)}
- />
-
-
+
+
+
+ {t('loading')}...}>
+ fileInputRef.current?.click()}
+ onUploadFolder={() => folderInputRef.current?.click()}
+ folderDialog={folderDialog}
+ setFolderDialog={setFolderDialog}
+ path={path}
+ folderName={folderName}
+ setFolderName={setFolderName}
+ onCreateFolder={() => void createFolder()}
+ view={view}
+ setView={setView}
+ filesCount={files.length}
+ activeTag={activeTag}
+ setActiveTag={setActiveTag}
+ tagCounts={tagCounts}
+ selectedCount={selectedCount}
+ onDownloadSelected={() => void downloadPaths(selectedList)}
+ />
+
+
+
-
-
-
-
-
-
- {routeLabel}
- {selectedCount > 0 ? {selectedCount} {t('selected')} : null}
-
+
+
+
+
-
-
-
-
setQuery(e.target.value)}
- placeholder={t('search')}
- />
+
+
+
+
+ setQuery(e.target.value)}
+ placeholder={t('search')}
+ />
+
+
+
+
+
+
+
+ {searchActive ? (
+ <>
+
{t('allFiles')}
+
/
+
{searchLoading ? `${t('loading')}...` : `${visibleFiles.length} ${t('items')}`}
+ >
+ ) : (
+ <>
+
+ {crumbs.map((crumb, idx) => {
+ const p = '/' + crumbs.slice(0, idx + 1).join('/')
+ return (
+
+ /
+
+
+ )
+ })}
+ >
+ )}
+
-
-
- {crumbs.map((crumb, idx) => {
- const p = '/' + crumbs.slice(0, idx + 1).join('/')
- return (
-
- /
-
-
- )
- })}
-
-
{t('shortcuts')}
-
-
+
- {
- if (e.button !== 0) return
- if (isSelectionIgnoredTarget(e.target)) return
- const target = e.target as HTMLElement
- if (!target.closest('[data-file-row="true"]')) {
- setSelectedPaths({})
- setSelectionAnchorPath('')
- }
- }}
- onDragOver={(e) => {
- e.preventDefault()
- setDragActive(true)
- if (draggingPaths.length > 0) setDragClientY(e.clientY)
- }}
- onDragLeave={(e) => {
- e.preventDefault()
- setDragActive(false)
- }}
- onDrop={(e) => {
- e.preventDefault()
- setDragActive(false)
- if (draggingPaths.length > 0) {
- setDraggingPaths([])
- setDropFolderPath('')
- setDragClientY(null)
- return
- }
- void upload(e.dataTransfer.files)
- }}
- >
+
+
+ {
+ if (e.button !== 0) return
+ if (isSelectionIgnoredTarget(e.target)) return
+ const target = e.target as HTMLElement
+ if (!target.closest('[data-file-row="true"]')) {
+ setSelectedPaths({})
+ setSelectionAnchorPath('')
+ }
+ }}
+ onDragOver={(e) => {
+ e.preventDefault()
+ setDragActive(true)
+ if (draggingPaths.length > 0) setDragClientY(e.clientY)
+ }}
+ onDragLeave={(e) => {
+ e.preventDefault()
+ setDragActive(false)
+ }}
+ onDrop={(e) => {
+ e.preventDefault()
+ setDragActive(false)
+ if (draggingPaths.length > 0) {
+ setDraggingPaths([])
+ setDropFolderPath('')
+ setDragClientY(null)
+ return
+ }
+ void upload(e.dataTransfer.files)
+ }}
+ >
-
-
-
+
-
setShareLimit(e.target.value)} placeholder={t('limit')} />
@@ -1658,15 +1972,26 @@ export default function App() {
-
-
-
-
-
-
+ {layout === 'list' ? (
+
+
+
+
+
+ ) : null}
{visibleFiles.length === 0 ? (
-
{t('empty')}
+
+ {searchActive && searchLoading ? `${t('loading')}...` : t(searchActive ? 'emptySearch' : 'empty')}
+
+ ) : layout === 'grid' ? (
+
+ {visibleFiles.map((f) => (
+
+
+ handleEntryMouseDown(f, e)}
+ onDoubleClick={(e) => handleEntryDoubleClick(f, e)}
+ onDragStart={(e) => handleEntryDragStart(f, e)}
+ onDragEnd={() => {
+ setDraggingPaths([])
+ setDropFolderPath('')
+ setDragClientY(null)
+ }}
+ onDragOver={(e) => handleEntryDragOver(f, e)}
+ onDragLeave={() => {
+ if (dropFolderPath === f.path) setDropFolderPath('')
+ }}
+ onDrop={(e) => handleEntryDrop(f, e)}
+ onMouseEnter={(e) => handleEntryMouseEnter(f, e)}
+ >
+
+
+
+
+
+
+ {searchActive ? (
+
{parentPath(f.path)}
+ ) : null}
+
+ {(f.tags?.length ?? 0) > 0 ? (
+
+ {(f.tags ?? []).slice(0, 4).map((tag) => (
+
+ #{tag}
+
+
+ ))}
+
+ ) : (
+
+ )}
+
+ {f.isDir ? '—' : formatSize(f.size)}
+ {new Date(f.modTime).toLocaleDateString()}
+
+
+
+
+
+ void downloadPaths(selectedPaths[f.path] ? selectedList : [f.path])}>{t('download')}
+ {!f.isDir ? { setSharePath(f.path); setShareDialog(true) }}>{t('share')} : null}
+ {
+ if (f.isDir) {
+ void openFolder(f.path)
+ } else {
+ void openFile(f)
+ }
+ }}
+ >
+ {f.isDir ? t('open') : t('preview')}
+
+ {!f.isDir ? window.open(filePreviewUrl(f.path), '_blank', 'noopener,noreferrer')}>{t('openInBrowser')} : null}
+ {isMarkdownFile(f.name) ? void openMarkdownEditor(f)}>{t('editMarkdown')} : null}
+
+ {
+ setRenamePath(f.path)
+ setRenameName(f.name)
+ setRenameDialog(true)
+ }}
+ >{t('rename')}
+
+ void deletePaths([f.path])}>{t('delete')}
+
+
+ ))}
+
) : (
visibleFiles.map((f) => (
@@ -1727,81 +2172,29 @@ export default function App() {
data-file-row="true"
draggable
className={`group grid items-center gap-4 border-[3px] border-border bg-card p-5 shadow-[4px_4px_0_hsl(var(--shadow-strong))] transition-[transform,box-shadow,background-color,border-color,opacity] duration-150 hover:-translate-x-1 hover:-translate-y-1 hover:bg-muted hover:shadow-[8px_8px_0_hsl(var(--shadow-strong))] lg:grid-cols-[minmax(0,1fr)_140px_190px] ${selectedPaths[f.path] ? 'border-primary bg-primary/10 shadow-[8px_8px_0_hsl(var(--shadow-strong))]' : ''} ${dropFolderPath === f.path ? 'border-primary bg-accent/35' : ''} ${draggingPaths.includes(f.path) ? 'opacity-70' : ''} ${paintSelect.active ? 'select-none' : ''}`}
- onMouseDown={(e) => {
- if (e.button !== 0) return
- if (isSelectionIgnoredTarget(e.target)) return
- if (e.shiftKey) {
- setPaintSelect({ active: true, value: true })
- selectVisibleRange(f.path, true)
- return
- }
- if (selectedPaths[f.path]) {
- setPaintSelect({ active: true, value: true })
- setSelectionAnchorPath(f.path)
- return
- }
- setPaintSelect({ active: true, value: true })
- setSelectionAnchorPath(f.path)
- setSelectedPaths((prev) => ({ ...prev, [f.path]: true }))
- }}
- onDoubleClick={(e) => {
- if (isSelectionIgnoredTarget(e.target)) return
- if (f.isDir) {
- void loadFiles(f.path)
- } else {
- void openFile(f)
- }
- }}
- onDragStart={(e) => {
- const paths = selectedPaths[f.path] ? selectedList : [f.path]
- setDraggingPaths(paths)
- setDragClientY(e.clientY)
- e.dataTransfer.setData('text/plain', paths.join('\n'))
- e.dataTransfer.effectAllowed = 'move'
- const ghost = makeDragImage(paths)
- e.dataTransfer.setDragImage(ghost, 16, 16)
- window.setTimeout(() => ghost.remove(), 0)
- }}
+ onMouseDown={(e) => handleEntryMouseDown(f, e)}
+ onDoubleClick={(e) => handleEntryDoubleClick(f, e)}
+ onDragStart={(e) => handleEntryDragStart(f, e)}
onDragEnd={() => {
setDraggingPaths([])
setDropFolderPath('')
setDragClientY(null)
}}
- onDragOver={(e) => {
- if (!f.isDir || draggingPaths.length === 0) return
- if (draggingPaths.includes(f.path)) return
- e.preventDefault()
- setDragClientY(e.clientY)
- e.dataTransfer.dropEffect = 'move'
- if (dropFolderPath !== f.path) setDropFolderPath(f.path)
- }}
+ onDragOver={(e) => handleEntryDragOver(f, e)}
onDragLeave={() => {
if (dropFolderPath === f.path) setDropFolderPath('')
}}
- onDrop={(e) => {
- if (!f.isDir || draggingPaths.length === 0) return
- e.preventDefault()
- e.stopPropagation()
- const paths = draggingPaths
- void movePathsTo(f.path, paths)
- setDragClientY(null)
- }}
- onMouseEnter={(e) => {
- if (!paintSelect.active) return
- if ((e.buttons & 1) !== 1) return
- if (isSelectionIgnoredTarget(e.target)) return
- if (e.shiftKey && selectionAnchorPath) {
- selectVisibleRange(f.path, true)
- return
- }
- setSelectedPaths((prev) => ({ ...prev, [f.path]: paintSelect.value }))
- }}
+ onDrop={(e) => handleEntryDrop(f, e)}
+ onMouseEnter={(e) => handleEntryMouseEnter(f, e)}
>
+ {searchActive ? (
+
{parentPath(f.path)}
+ ) : null}
{(f.tags?.length ?? 0) > 0 ? (
{(f.tags ?? []).map((tag) => (
@@ -1838,6 +2231,7 @@ export default function App() {
)}
+
{selectedList.length > 0 ? (
@@ -1852,8 +2246,8 @@ export default function App() {
folderInputRef.current?.click()}>{t('uploadFolder')}
-
-
+
+
) : bootstrapping ? (
authLoadingCard
diff --git a/frontend/src/lazy/TransferSection.tsx b/frontend/src/lazy/TransferSection.tsx
index 0706d8a..02912f4 100644
--- a/frontend/src/lazy/TransferSection.tsx
+++ b/frontend/src/lazy/TransferSection.tsx
@@ -49,7 +49,6 @@ export default function TransferSection(props: Props) {
onDownloadSelected,
} = props
- const pathLabel = path === '/' ? t('root') : path
const viewButton = (active: boolean) =>
cn(
'border-[3px] px-4 py-3 text-left text-xs font-black uppercase tracking-[0.14em] transition-[transform,box-shadow,background-color] duration-150',
@@ -60,15 +59,9 @@ export default function TransferSection(props: Props) {
return (
-
-
-
-
{t('accountSubtitle')}
-
{username}
-
-
-
-
{pathLabel}
+
+
+
{username}
{selectedCount > 0 ?
{selectedCount} {t('selected')} : null}
diff --git a/frontend/tailwind.config.ts b/frontend/tailwind.config.ts
index 684862c..7ee7940 100644
--- a/frontend/tailwind.config.ts
+++ b/frontend/tailwind.config.ts
@@ -13,7 +13,7 @@ export default {
},
extend: {
screens: {
- lg: '600px',
+ wide: '1400px',
},
colors: {
background: 'hsl(var(--background))',