2022-03-03 00:53:26 -06:00
|
|
|
package filestorage
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2022-05-16 12:26:40 -05:00
|
|
|
"crypto/md5"
|
|
|
|
"encoding/hex"
|
2022-07-08 13:23:16 -05:00
|
|
|
"reflect"
|
2022-05-16 12:26:40 -05:00
|
|
|
|
|
|
|
// can ignore because we don't need a cryptographically secure hash function
|
|
|
|
// sha1 low chance of collisions and better performance than sha256
|
|
|
|
// nolint:gosec
|
|
|
|
"crypto/sha1"
|
|
|
|
"fmt"
|
2022-03-03 00:53:26 -06:00
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
"github.com/grafana/grafana/pkg/infra/db"
|
2022-03-03 00:53:26 -06:00
|
|
|
"github.com/grafana/grafana/pkg/infra/log"
|
2022-05-16 12:26:40 -05:00
|
|
|
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
2022-03-03 00:53:26 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
type file struct {
|
2022-05-16 12:26:40 -05:00
|
|
|
Path string `xorm:"path"`
|
|
|
|
PathHash string `xorm:"path_hash"`
|
|
|
|
ParentFolderPathHash string `xorm:"parent_folder_path_hash"`
|
|
|
|
Contents []byte `xorm:"contents"`
|
|
|
|
ETag string `xorm:"etag"`
|
|
|
|
CacheControl string `xorm:"cache_control"`
|
|
|
|
ContentDisposition string `xorm:"content_disposition"`
|
|
|
|
Updated time.Time `xorm:"updated"`
|
|
|
|
Created time.Time `xorm:"created"`
|
|
|
|
Size int64 `xorm:"size"`
|
|
|
|
MimeType string `xorm:"mime_type"`
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-07-29 15:19:28 -05:00
|
|
|
var (
|
|
|
|
fileColsNoContents = []string{"path", "path_hash", "parent_folder_path_hash", "etag", "cache_control", "content_disposition", "updated", "created", "size", "mime_type"}
|
|
|
|
allFileCols = append([]string{"contents"}, fileColsNoContents...)
|
|
|
|
)
|
|
|
|
|
2022-03-03 00:53:26 -06:00
|
|
|
type fileMeta struct {
|
2022-05-16 12:26:40 -05:00
|
|
|
PathHash string `xorm:"path_hash"`
|
|
|
|
Key string `xorm:"key"`
|
|
|
|
Value string `xorm:"value"`
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
type dbFileStorage struct {
|
2022-08-26 19:07:58 -05:00
|
|
|
db db.DB
|
2022-03-03 00:53:26 -06:00
|
|
|
log log.Logger
|
|
|
|
}
|
|
|
|
|
2022-05-16 12:26:40 -05:00
|
|
|
func createPathHash(path string) (string, error) {
|
|
|
|
hasher := sha1.New()
|
|
|
|
if _, err := hasher.Write([]byte(strings.ToLower(path))); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return fmt.Sprintf("%x", hasher.Sum(nil)), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func createContentsHash(contents []byte) string {
|
|
|
|
hash := md5.Sum(contents)
|
|
|
|
return hex.EncodeToString(hash[:])
|
|
|
|
}
|
|
|
|
|
2022-08-26 19:07:58 -05:00
|
|
|
func NewDbStorage(log log.Logger, db db.DB, filter PathFilter, rootFolder string) FileStorage {
|
2022-03-11 12:08:19 -06:00
|
|
|
return newWrapper(log, &dbFileStorage{
|
2022-03-03 00:53:26 -06:00
|
|
|
log: log,
|
2022-03-11 12:08:19 -06:00
|
|
|
db: db,
|
2022-04-21 14:27:43 -05:00
|
|
|
}, filter, rootFolder)
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
func (s dbFileStorage) getProperties(sess *db.Session, pathHashes []string) (map[string]map[string]string, error) {
|
2022-03-03 00:53:26 -06:00
|
|
|
attributesByPath := make(map[string]map[string]string)
|
|
|
|
|
|
|
|
entities := make([]*fileMeta, 0)
|
2022-05-16 12:26:40 -05:00
|
|
|
if err := sess.Table("file_meta").In("path_hash", pathHashes).Find(&entities); err != nil {
|
2022-03-03 00:53:26 -06:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, entity := range entities {
|
2022-05-16 12:26:40 -05:00
|
|
|
if _, ok := attributesByPath[entity.PathHash]; !ok {
|
|
|
|
attributesByPath[entity.PathHash] = make(map[string]string)
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
2022-05-16 12:26:40 -05:00
|
|
|
attributesByPath[entity.PathHash][entity.Key] = entity.Value
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
return attributesByPath, nil
|
|
|
|
}
|
|
|
|
|
2022-08-30 08:23:16 -05:00
|
|
|
func (s dbFileStorage) Get(ctx context.Context, path string, options *GetFileOptions) (*File, bool, error) {
|
2022-03-03 00:53:26 -06:00
|
|
|
var result *File
|
2022-05-16 12:26:40 -05:00
|
|
|
|
2022-08-30 08:23:16 -05:00
|
|
|
pathHash, err := createPathHash(path)
|
2022-05-16 12:26:40 -05:00
|
|
|
if err != nil {
|
2022-08-30 08:23:16 -05:00
|
|
|
return nil, false, err
|
2022-05-16 12:26:40 -05:00
|
|
|
}
|
2022-10-19 08:02:15 -05:00
|
|
|
err = s.db.WithDbSession(ctx, func(sess *db.Session) error {
|
2022-03-03 00:53:26 -06:00
|
|
|
table := &file{}
|
2022-08-30 08:23:16 -05:00
|
|
|
|
|
|
|
sess.Table("file")
|
|
|
|
if options.WithContents {
|
|
|
|
sess.Cols(allFileCols...)
|
|
|
|
} else {
|
|
|
|
sess.Cols(fileColsNoContents...)
|
|
|
|
}
|
|
|
|
|
|
|
|
exists, err := sess.Where("path_hash = ?", pathHash).Get(table)
|
2022-03-03 00:53:26 -06:00
|
|
|
if !exists {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var meta = make([]*fileMeta, 0)
|
2022-05-16 12:26:40 -05:00
|
|
|
if err := sess.Table("file_meta").Where("path_hash = ?", pathHash).Find(&meta); err != nil {
|
2022-03-03 00:53:26 -06:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var metaProperties = make(map[string]string, len(meta))
|
|
|
|
|
|
|
|
for i := range meta {
|
|
|
|
metaProperties[meta[i].Key] = meta[i].Value
|
|
|
|
}
|
|
|
|
|
|
|
|
contents := table.Contents
|
|
|
|
if contents == nil {
|
|
|
|
contents = make([]byte, 0)
|
|
|
|
}
|
|
|
|
|
|
|
|
result = &File{
|
|
|
|
Contents: contents,
|
|
|
|
FileMetadata: FileMetadata{
|
|
|
|
Name: getName(table.Path),
|
|
|
|
FullPath: table.Path,
|
|
|
|
Created: table.Created,
|
|
|
|
Properties: metaProperties,
|
|
|
|
Modified: table.Updated,
|
|
|
|
Size: table.Size,
|
|
|
|
MimeType: table.MimeType,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
})
|
|
|
|
|
2022-08-30 08:23:16 -05:00
|
|
|
return result, result != nil, err
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s dbFileStorage) Delete(ctx context.Context, filePath string) error {
|
2022-05-16 12:26:40 -05:00
|
|
|
pathHash, err := createPathHash(filePath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2022-10-19 08:02:15 -05:00
|
|
|
err = s.db.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
2022-07-08 13:23:16 -05:00
|
|
|
deletedFilesCount, err := sess.Table("file").Where("path_hash = ?", pathHash).Delete(&file{})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-07-08 13:23:16 -05:00
|
|
|
deletedMetaCount, err := sess.Table("file_meta").Where("path_hash = ?", pathHash).Delete(&fileMeta{})
|
|
|
|
if err != nil {
|
|
|
|
if rollErr := sess.Rollback(); rollErr != nil {
|
|
|
|
return fmt.Errorf("failed to roll back transaction due to error: %s: %w", rollErr, err)
|
|
|
|
}
|
2022-03-03 00:53:26 -06:00
|
|
|
|
2022-07-08 13:23:16 -05:00
|
|
|
return err
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-07-08 13:23:16 -05:00
|
|
|
s.log.Info("Deleted file", "path", filePath, "deletedMetaCount", deletedMetaCount, "deletedFilesCount", deletedFilesCount)
|
|
|
|
return err
|
2022-03-03 00:53:26 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s dbFileStorage) Upsert(ctx context.Context, cmd *UpsertFileCommand) error {
|
|
|
|
now := time.Now()
|
2022-05-16 12:26:40 -05:00
|
|
|
pathHash, err := createPathHash(cmd.Path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
err = s.db.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
2022-03-03 00:53:26 -06:00
|
|
|
existing := &file{}
|
2022-05-16 12:26:40 -05:00
|
|
|
exists, err := sess.Table("file").Where("path_hash = ?", pathHash).Get(existing)
|
2022-03-03 00:53:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if exists {
|
|
|
|
existing.Updated = now
|
|
|
|
if cmd.Contents != nil {
|
2022-04-12 09:58:09 -05:00
|
|
|
contents := cmd.Contents
|
2022-03-03 00:53:26 -06:00
|
|
|
existing.Contents = contents
|
|
|
|
existing.MimeType = cmd.MimeType
|
2022-05-16 12:26:40 -05:00
|
|
|
existing.ETag = createContentsHash(contents)
|
|
|
|
existing.ContentDisposition = cmd.ContentDisposition
|
|
|
|
existing.CacheControl = cmd.CacheControl
|
2022-03-03 00:53:26 -06:00
|
|
|
existing.Size = int64(len(contents))
|
|
|
|
}
|
|
|
|
|
2022-05-16 12:26:40 -05:00
|
|
|
_, err = sess.Where("path_hash = ?", pathHash).Update(existing)
|
2022-03-03 00:53:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
contentsToInsert := make([]byte, 0)
|
|
|
|
if cmd.Contents != nil {
|
2022-04-12 09:58:09 -05:00
|
|
|
contentsToInsert = cmd.Contents
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-05-16 12:26:40 -05:00
|
|
|
parentFolderPath := getParentFolderPath(cmd.Path)
|
|
|
|
parentFolderPathHash, err := createPathHash(parentFolderPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-03-03 00:53:26 -06:00
|
|
|
file := &file{
|
2022-05-16 12:26:40 -05:00
|
|
|
Path: cmd.Path,
|
|
|
|
PathHash: pathHash,
|
|
|
|
ParentFolderPathHash: parentFolderPathHash,
|
|
|
|
Contents: contentsToInsert,
|
|
|
|
ContentDisposition: cmd.ContentDisposition,
|
|
|
|
CacheControl: cmd.CacheControl,
|
|
|
|
ETag: createContentsHash(contentsToInsert),
|
|
|
|
MimeType: cmd.MimeType,
|
|
|
|
Size: int64(len(contentsToInsert)),
|
|
|
|
Updated: now,
|
|
|
|
Created: now,
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
2022-05-16 12:26:40 -05:00
|
|
|
if _, err = sess.Insert(file); err != nil {
|
2022-03-03 00:53:26 -06:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(cmd.Properties) != 0 {
|
2022-08-26 19:07:58 -05:00
|
|
|
if err = upsertProperties(s.db.GetDialect(), sess, now, cmd, pathHash); err != nil {
|
2022-03-03 00:53:26 -06:00
|
|
|
if rollbackErr := sess.Rollback(); rollbackErr != nil {
|
2023-09-04 11:46:08 -05:00
|
|
|
s.log.Error("Failed while rolling back upsert", "path", cmd.Path)
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
|
|
|
})
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
func upsertProperties(dialect migrator.Dialect, sess *db.Session, now time.Time, cmd *UpsertFileCommand, pathHash string) error {
|
2022-03-03 00:53:26 -06:00
|
|
|
fileMeta := &fileMeta{}
|
2022-05-16 12:26:40 -05:00
|
|
|
_, err := sess.Table("file_meta").Where("path_hash = ?", pathHash).Delete(fileMeta)
|
2022-03-03 00:53:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for key, val := range cmd.Properties {
|
2022-05-16 12:26:40 -05:00
|
|
|
if err := upsertProperty(dialect, sess, now, pathHash, key, val); err != nil {
|
2022-03-03 00:53:26 -06:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
func upsertProperty(dialect migrator.Dialect, sess *db.Session, now time.Time, pathHash string, key string, val string) error {
|
2022-03-03 00:53:26 -06:00
|
|
|
existing := &fileMeta{}
|
2022-05-16 12:26:40 -05:00
|
|
|
|
|
|
|
keyEqualsCondition := fmt.Sprintf("%s = ?", dialect.Quote("key"))
|
|
|
|
exists, err := sess.Table("file_meta").Where("path_hash = ?", pathHash).Where(keyEqualsCondition, key).Get(existing)
|
2022-03-03 00:53:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if exists {
|
|
|
|
existing.Value = val
|
2022-05-16 12:26:40 -05:00
|
|
|
_, err = sess.Where("path_hash = ?", pathHash).Where(keyEqualsCondition, key).Update(existing)
|
2022-03-03 00:53:26 -06:00
|
|
|
} else {
|
|
|
|
_, err = sess.Insert(&fileMeta{
|
2022-05-16 12:26:40 -05:00
|
|
|
PathHash: pathHash,
|
|
|
|
Key: key,
|
|
|
|
Value: val,
|
2022-03-03 00:53:26 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-09-12 05:03:49 -05:00
|
|
|
//nolint:gocyclo
|
2022-03-15 12:21:22 -05:00
|
|
|
func (s dbFileStorage) List(ctx context.Context, folderPath string, paging *Paging, options *ListOptions) (*ListResponse, error) {
|
|
|
|
var resp *ListResponse
|
2022-03-03 00:53:26 -06:00
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
err := s.db.WithDbSession(ctx, func(sess *db.Session) error {
|
2022-03-15 12:21:22 -05:00
|
|
|
cursor := ""
|
|
|
|
if paging != nil && paging.After != "" {
|
2022-05-16 12:26:40 -05:00
|
|
|
pagingFolderPathHash, err := createPathHash(paging.After + Delimiter)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
exists, err := sess.Table("file").Where("path_hash = ?", pagingFolderPathHash).Exist()
|
2022-03-15 12:21:22 -05:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if exists {
|
|
|
|
cursor = paging.After + Delimiter
|
|
|
|
} else {
|
|
|
|
cursor = paging.After
|
|
|
|
}
|
|
|
|
}
|
2022-03-03 00:53:26 -06:00
|
|
|
|
2022-03-15 12:21:22 -05:00
|
|
|
var foundFiles = make([]*file, 0)
|
2022-03-03 00:53:26 -06:00
|
|
|
sess.Table("file")
|
2022-03-15 12:21:22 -05:00
|
|
|
lowerFolderPrefix := ""
|
2022-03-03 00:53:26 -06:00
|
|
|
lowerFolderPath := strings.ToLower(folderPath)
|
2022-03-15 12:21:22 -05:00
|
|
|
if lowerFolderPath == "" || lowerFolderPath == Delimiter {
|
|
|
|
lowerFolderPrefix = Delimiter
|
|
|
|
lowerFolderPath = Delimiter
|
2022-03-03 00:53:26 -06:00
|
|
|
} else {
|
2022-03-15 12:21:22 -05:00
|
|
|
lowerFolderPath = strings.TrimSuffix(lowerFolderPath, Delimiter)
|
|
|
|
lowerFolderPrefix = lowerFolderPath + Delimiter
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-05-16 12:26:40 -05:00
|
|
|
prefixHash, _ := createPathHash(lowerFolderPrefix)
|
|
|
|
|
|
|
|
sess.Where("path_hash != ?", prefixHash)
|
|
|
|
parentHash, err := createPathHash(lowerFolderPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2022-03-15 12:21:22 -05:00
|
|
|
|
|
|
|
if !options.Recursive {
|
2022-05-16 12:26:40 -05:00
|
|
|
sess.Where("parent_folder_path_hash = ?", parentHash)
|
2022-03-11 12:08:19 -06:00
|
|
|
} else {
|
2022-05-16 12:26:40 -05:00
|
|
|
sess.Where("(parent_folder_path_hash = ?) OR (lower(path) LIKE ?)", parentHash, lowerFolderPrefix+"%")
|
2022-03-15 12:21:22 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if !options.WithFolders && options.WithFiles {
|
|
|
|
sess.Where("path NOT LIKE ?", "%/")
|
|
|
|
}
|
|
|
|
|
|
|
|
if options.WithFolders && !options.WithFiles {
|
|
|
|
sess.Where("path LIKE ?", "%/")
|
|
|
|
}
|
|
|
|
|
2022-04-21 14:27:43 -05:00
|
|
|
sqlFilter := options.Filter.asSQLFilter()
|
|
|
|
sess.Where(sqlFilter.Where, sqlFilter.Args...)
|
2022-03-03 00:53:26 -06:00
|
|
|
|
|
|
|
sess.OrderBy("path")
|
|
|
|
|
2023-11-01 11:42:24 -05:00
|
|
|
pageSize := paging.Limit
|
2022-03-03 00:53:26 -06:00
|
|
|
sess.Limit(pageSize + 1)
|
|
|
|
|
2022-03-15 12:21:22 -05:00
|
|
|
if cursor != "" {
|
|
|
|
sess.Where("path > ?", cursor)
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-07-29 15:19:28 -05:00
|
|
|
if options.WithContents {
|
|
|
|
sess.Cols(allFileCols...)
|
|
|
|
} else {
|
|
|
|
sess.Cols(fileColsNoContents...)
|
|
|
|
}
|
|
|
|
|
2022-03-03 00:53:26 -06:00
|
|
|
if err := sess.Find(&foundFiles); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
foundLength := len(foundFiles)
|
|
|
|
if foundLength > pageSize {
|
|
|
|
foundLength = pageSize
|
|
|
|
}
|
|
|
|
|
2022-05-16 12:26:40 -05:00
|
|
|
pathToHash := make(map[string]string)
|
|
|
|
hashes := make([]string, 0)
|
2022-03-03 00:53:26 -06:00
|
|
|
for i := 0; i < foundLength; i++ {
|
2022-03-15 12:21:22 -05:00
|
|
|
isFolder := strings.HasSuffix(foundFiles[i].Path, Delimiter)
|
|
|
|
if !isFolder {
|
2022-05-16 12:26:40 -05:00
|
|
|
hash, err := createPathHash(foundFiles[i].Path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
hashes = append(hashes, hash)
|
|
|
|
pathToHash[foundFiles[i].Path] = hash
|
2022-03-15 12:21:22 -05:00
|
|
|
}
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
2022-05-16 12:26:40 -05:00
|
|
|
propertiesByPathHash, err := s.getProperties(sess, hashes)
|
2022-03-03 00:53:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-03-15 12:21:22 -05:00
|
|
|
files := make([]*File, 0)
|
2022-03-03 00:53:26 -06:00
|
|
|
for i := 0; i < foundLength; i++ {
|
|
|
|
var props map[string]string
|
2022-03-15 12:21:22 -05:00
|
|
|
path := strings.TrimSuffix(foundFiles[i].Path, Delimiter)
|
2022-05-16 12:26:40 -05:00
|
|
|
|
|
|
|
if hash, ok := pathToHash[path]; ok {
|
|
|
|
if foundProps, ok := propertiesByPathHash[hash]; ok {
|
|
|
|
props = foundProps
|
|
|
|
} else {
|
|
|
|
props = make(map[string]string)
|
|
|
|
}
|
2022-03-03 00:53:26 -06:00
|
|
|
} else {
|
|
|
|
props = make(map[string]string)
|
|
|
|
}
|
|
|
|
|
2022-03-15 12:21:22 -05:00
|
|
|
var contents []byte
|
|
|
|
if options.WithContents {
|
|
|
|
contents = foundFiles[i].Contents
|
|
|
|
} else {
|
|
|
|
contents = []byte{}
|
|
|
|
}
|
|
|
|
files = append(files, &File{Contents: contents, FileMetadata: FileMetadata{
|
2022-03-03 00:53:26 -06:00
|
|
|
Name: getName(path),
|
|
|
|
FullPath: path,
|
|
|
|
Created: foundFiles[i].Created,
|
|
|
|
Properties: props,
|
|
|
|
Modified: foundFiles[i].Updated,
|
|
|
|
Size: foundFiles[i].Size,
|
|
|
|
MimeType: foundFiles[i].MimeType,
|
2022-03-15 12:21:22 -05:00
|
|
|
}})
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
lastPath := ""
|
|
|
|
if len(files) > 0 {
|
|
|
|
lastPath = files[len(files)-1].FullPath
|
|
|
|
}
|
|
|
|
|
2022-03-15 12:21:22 -05:00
|
|
|
resp = &ListResponse{
|
2022-03-03 00:53:26 -06:00
|
|
|
Files: files,
|
|
|
|
LastPath: lastPath,
|
|
|
|
HasMore: len(foundFiles) == pageSize+1,
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
|
|
|
return resp, err
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s dbFileStorage) CreateFolder(ctx context.Context, path string) error {
|
|
|
|
now := time.Now()
|
|
|
|
precedingFolders := precedingFolders(path)
|
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
err := s.db.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
2022-03-03 00:53:26 -06:00
|
|
|
var insertErr error
|
|
|
|
sess.MustLogSQL(true)
|
2022-03-15 12:21:22 -05:00
|
|
|
previousFolder := Delimiter
|
2022-03-03 00:53:26 -06:00
|
|
|
for i := 0; i < len(precedingFolders); i++ {
|
|
|
|
existing := &file{}
|
2022-03-15 12:21:22 -05:00
|
|
|
currentFolderParentPath := previousFolder
|
|
|
|
previousFolder = Join(previousFolder, getName(precedingFolders[i]))
|
|
|
|
currentFolderPath := previousFolder
|
|
|
|
if !strings.HasSuffix(currentFolderPath, Delimiter) {
|
|
|
|
currentFolderPath = currentFolderPath + Delimiter
|
|
|
|
}
|
2022-05-16 12:26:40 -05:00
|
|
|
|
|
|
|
currentFolderPathHash, err := createPathHash(currentFolderPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
exists, err := sess.Table("file").Where("path_hash = ?", currentFolderPathHash).Get(existing)
|
2022-03-03 00:53:26 -06:00
|
|
|
if err != nil {
|
|
|
|
insertErr = err
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
if exists {
|
2022-03-15 12:21:22 -05:00
|
|
|
previousFolder = strings.TrimSuffix(existing.Path, Delimiter)
|
2022-03-03 00:53:26 -06:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-05-16 12:26:40 -05:00
|
|
|
currentFolderParentPathHash, err := createPathHash(currentFolderParentPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
contents := make([]byte, 0)
|
2022-03-03 00:53:26 -06:00
|
|
|
file := &file{
|
2022-05-16 12:26:40 -05:00
|
|
|
Path: currentFolderPath,
|
|
|
|
PathHash: currentFolderPathHash,
|
|
|
|
ParentFolderPathHash: currentFolderParentPathHash,
|
|
|
|
Contents: contents,
|
|
|
|
ETag: createContentsHash(contents),
|
|
|
|
Updated: now,
|
|
|
|
MimeType: DirectoryMimeType,
|
|
|
|
Created: now,
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
_, err = sess.Insert(file)
|
|
|
|
if err != nil {
|
|
|
|
insertErr = err
|
|
|
|
break
|
|
|
|
}
|
2022-05-16 12:26:40 -05:00
|
|
|
s.log.Info("Created folder", "markerPath", file.Path, "parent", currentFolderParentPath)
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if insertErr != nil {
|
|
|
|
if rollErr := sess.Rollback(); rollErr != nil {
|
2022-06-06 15:30:31 -05:00
|
|
|
return fmt.Errorf("rolling back transaction due to error failed: %s: %w", rollErr, insertErr)
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
return insertErr
|
|
|
|
}
|
|
|
|
|
|
|
|
return sess.Commit()
|
|
|
|
})
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-07-08 13:23:16 -05:00
|
|
|
func (s dbFileStorage) DeleteFolder(ctx context.Context, folderPath string, options *DeleteFolderOptions) error {
|
|
|
|
lowerFolderPath := strings.ToLower(folderPath)
|
|
|
|
if lowerFolderPath == "" || lowerFolderPath == Delimiter {
|
|
|
|
lowerFolderPath = Delimiter
|
|
|
|
} else if !strings.HasSuffix(lowerFolderPath, Delimiter) {
|
|
|
|
lowerFolderPath = lowerFolderPath + Delimiter
|
|
|
|
}
|
|
|
|
|
|
|
|
if !options.Force {
|
|
|
|
return s.Delete(ctx, lowerFolderPath)
|
|
|
|
}
|
|
|
|
|
2022-10-19 08:02:15 -05:00
|
|
|
err := s.db.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
|
2023-08-30 10:46:47 -05:00
|
|
|
var rawHashes []any
|
2022-07-08 13:23:16 -05:00
|
|
|
|
|
|
|
// xorm does not support `.Delete()` with `.Join()`, so we first have to retrieve all path_hashes and then use them to filter `file_meta` table
|
|
|
|
err := sess.Table("file").
|
|
|
|
Cols("path_hash").
|
|
|
|
Where("LOWER(path) LIKE ?", lowerFolderPath+"%").
|
|
|
|
Find(&rawHashes)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(rawHashes) == 0 {
|
|
|
|
s.log.Info("Force deleted folder", "path", lowerFolderPath, "deletedFilesCount", 0, "deletedMetaCount", 0)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
accessFilter := options.AccessFilter.asSQLFilter()
|
|
|
|
accessibleFilesCount, err := sess.Table("file").
|
|
|
|
Cols("path_hash").
|
|
|
|
Where("LOWER(path) LIKE ?", lowerFolderPath+"%").
|
|
|
|
Where(accessFilter.Where, accessFilter.Args...).
|
|
|
|
Count(&file{})
|
2022-05-16 12:26:40 -05:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2022-07-08 13:23:16 -05:00
|
|
|
|
|
|
|
if int64(len(rawHashes)) != accessibleFilesCount {
|
2023-09-04 11:46:08 -05:00
|
|
|
s.log.Error("Force folder delete: unauthorized access", "path", lowerFolderPath, "expectedAccessibleFilesCount", int64(len(rawHashes)), "actualAccessibleFilesCount", accessibleFilesCount)
|
2022-07-08 13:23:16 -05:00
|
|
|
return fmt.Errorf("force folder delete: unauthorized access for path %s", lowerFolderPath)
|
|
|
|
}
|
|
|
|
|
2023-08-30 10:46:47 -05:00
|
|
|
var hashes []any
|
2022-07-08 13:23:16 -05:00
|
|
|
for _, hash := range rawHashes {
|
|
|
|
if hashString, ok := hash.(string); ok {
|
|
|
|
hashes = append(hashes, hashString)
|
|
|
|
|
|
|
|
// MySQL returns the `path_hash` field as []uint8
|
|
|
|
} else if hashUint, ok := hash.([]uint8); ok {
|
|
|
|
hashes = append(hashes, string(hashUint))
|
|
|
|
} else {
|
|
|
|
return fmt.Errorf("invalid hash type: %s", reflect.TypeOf(hash))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
deletedFilesCount, err := sess.
|
|
|
|
Table("file").
|
|
|
|
In("path_hash", hashes...).
|
|
|
|
Delete(&file{})
|
|
|
|
|
2022-03-03 00:53:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-07-08 13:23:16 -05:00
|
|
|
deletedMetaCount, err := sess.
|
|
|
|
Table("file_meta").
|
|
|
|
In("path_hash", hashes...).
|
|
|
|
Delete(&fileMeta{})
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
if rollErr := sess.Rollback(); rollErr != nil {
|
|
|
|
return fmt.Errorf("failed to roll back transaction due to error: %s: %w", rollErr, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
2022-03-03 00:53:26 -06:00
|
|
|
}
|
|
|
|
|
2022-07-08 13:23:16 -05:00
|
|
|
s.log.Info("Force deleted folder", "path", folderPath, "deletedFilesCount", deletedFilesCount, "deletedMetaCount", deletedMetaCount)
|
|
|
|
return nil
|
2022-03-03 00:53:26 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s dbFileStorage) close() error {
|
|
|
|
return nil
|
|
|
|
}
|