Storage: list filtering and root folder support (#46453)

* git the things: FS api internal changes

* remove filestorage/service.go

* remove filestore flag

* remove dummy fs

* readd fileblob import
This commit is contained in:
Artur Wierzbicki 2022-03-11 22:08:19 +04:00 committed by GitHub
parent a29159f362
commit ed924b3d0c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 774 additions and 471 deletions

View File

@ -46,5 +46,4 @@ export interface FeatureToggles {
dashboardComments?: boolean;
annotationComments?: boolean;
migrationLocking?: boolean;
fileStoreApi?: boolean;
}

View File

@ -3,16 +3,11 @@ package filestorage
import (
"context"
"errors"
"regexp"
"strings"
"time"
)
type StorageName string
const (
StorageNamePublic StorageName = "public"
)
var (
ErrRelativePath = errors.New("path cant be relative")
ErrNonCanonicalPath = errors.New("path must be canonical")
@ -20,14 +15,14 @@ var (
ErrPathInvalid = errors.New("path is invalid")
ErrPathEndsWithDelimiter = errors.New("path can not end with delimiter")
Delimiter = "/"
multipleDelimiters = regexp.MustCompile(`/+`)
)
func Join(parts ...string) string {
return Delimiter + strings.Join(parts, Delimiter)
}
joinedPath := Delimiter + strings.Join(parts, Delimiter)
func belongsToStorage(path string, storageName StorageName) bool {
return strings.HasPrefix(path, Delimiter+string(storageName))
// makes the API more forgiving for clients without compromising safety
return multipleDelimiters.ReplaceAllString(joinedPath, Delimiter)
}
type File struct {
@ -64,26 +59,83 @@ type UpsertFileCommand struct {
}
type PathFilters struct {
allowedPrefixes []string
allowedPrefixes []string
disallowedPrefixes []string
allowedPaths []string
disallowedPaths []string
}
func (f *PathFilters) isAllowed(path string) bool {
if f == nil || f.allowedPrefixes == nil {
func toLower(list []string) []string {
if list == nil {
return nil
}
lower := make([]string, 0)
for _, el := range list {
lower = append(lower, strings.ToLower(el))
}
return lower
}
func allowAllPathFilters() *PathFilters {
return NewPathFilters(nil, nil, nil, nil)
}
//nolint:deadcode,unused
func denyAllPathFilters() *PathFilters {
return NewPathFilters([]string{}, []string{}, nil, nil)
}
func NewPathFilters(allowedPrefixes []string, allowedPaths []string, disallowedPrefixes []string, disallowedPaths []string) *PathFilters {
return &PathFilters{
allowedPrefixes: toLower(allowedPrefixes),
allowedPaths: toLower(allowedPaths),
disallowedPaths: toLower(disallowedPaths),
disallowedPrefixes: toLower(disallowedPrefixes),
}
}
func (f *PathFilters) isDenyAll() bool {
return f.allowedPaths != nil && f.allowedPrefixes != nil && (len(f.allowedPaths)+len(f.allowedPrefixes) == 0)
}
func (f *PathFilters) IsAllowed(path string) bool {
if f == nil {
return true
}
for i := range f.allowedPrefixes {
if strings.HasPrefix(path, strings.ToLower(f.allowedPrefixes[i])) {
return true
path = strings.ToLower(path)
for i := range f.disallowedPaths {
if f.disallowedPaths[i] == path {
return false
}
}
for i := range f.disallowedPrefixes {
if strings.HasPrefix(path, f.disallowedPrefixes[i]) {
return false
}
}
if f.allowedPrefixes == nil && f.allowedPaths == nil {
return true
}
for i := range f.allowedPaths {
if f.allowedPaths[i] == path {
return true
}
}
for i := range f.allowedPrefixes {
if strings.HasPrefix(path, f.allowedPrefixes[i]) {
return true
}
}
return false
}
type ListOptions struct {
Recursive bool
PathFilters
*PathFilters
}
type FileStorage interface {

View File

@ -34,42 +34,3 @@ func TestFilestorageApi_Join(t *testing.T) {
})
}
}
func TestFilestorageApi_belongToStorage(t *testing.T) {
var tests = []struct {
name string
path string
storage StorageName
expected bool
}{
{
name: "should return true if path is prefixed with delimiter and the storage name",
path: "/public/abc/d",
storage: StorageNamePublic,
expected: true,
},
{
name: "should return true if path consists just of the delimiter and the storage name",
path: "/public",
storage: StorageNamePublic,
expected: true,
},
{
name: "should return false if path is not prefixed with delimiter",
path: "public/abc/d",
storage: StorageNamePublic,
expected: false,
},
{
name: "should return false if storage name does not match",
path: "/notpublic/abc/d",
storage: StorageNamePublic,
expected: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
require.Equal(t, tt.expected, belongsToStorage(tt.path, tt.storage))
})
}
}

View File

@ -5,11 +5,15 @@ import (
"errors"
"fmt"
"io"
"sort"
"strings"
"github.com/grafana/grafana/pkg/infra/log"
"gocloud.dev/blob"
"gocloud.dev/gcerrors"
_ "gocloud.dev/blob/fileblob"
_ "gocloud.dev/blob/memblob"
)
const (
@ -17,21 +21,15 @@ const (
)
type cdkBlobStorage struct {
log log.Logger
bucket *blob.Bucket
rootFolder string
log log.Logger
bucket *blob.Bucket
}
func NewCdkBlobStorage(log log.Logger, bucket *blob.Bucket, rootFolder string, pathFilters *PathFilters) FileStorage {
return &wrapper{
log: log,
wrapped: &cdkBlobStorage{
log: log,
bucket: bucket,
rootFolder: rootFolder,
},
pathFilters: pathFilters,
}
return newWrapper(log, &cdkBlobStorage{
log: log,
bucket: bucket,
}, pathFilters, rootFolder)
}
func (c cdkBlobStorage) Get(ctx context.Context, filePath string) (*File, error) {
@ -177,7 +175,7 @@ func (c cdkBlobStorage) listFiles(ctx context.Context, folderPath string, paging
path := obj.Key
allowed := options.isAllowed(obj.Key)
allowed := options.IsAllowed(obj.Key)
if obj.IsDir && recursive {
newPaging := &Paging{
First: pageSize - len(files),
@ -216,8 +214,26 @@ func (c cdkBlobStorage) listFiles(ctx context.Context, folderPath string, paging
attributes, err := c.bucket.Attributes(ctx, strings.ToLower(path))
if err != nil {
c.log.Error("Failed while retrieving attributes", "path", path, "err", err)
return nil, err
if gcerrors.Code(err) == gcerrors.NotFound {
attributes, err = c.bucket.Attributes(ctx, path)
if err != nil {
c.log.Error("Failed while retrieving attributes", "path", path, "err", err)
return nil, err
}
} else {
c.log.Error("Failed while retrieving attributes", "path", path, "err", err)
return nil, err
}
}
if attributes.ContentType == "application/x-directory; charset=UTF-8" {
// S3 directory representation
continue
}
if attributes.ContentType == "text/plain" && obj.Key == folderPath && attributes.Size == 0 {
// GCS directory representation
continue
}
var originalPath string
@ -230,7 +246,7 @@ func (c cdkBlobStorage) listFiles(ctx context.Context, folderPath string, paging
}
} else {
props = make(map[string]string)
originalPath = fixPath(path)
originalPath = strings.TrimSuffix(path, Delimiter)
}
files = append(files, FileMetadata{
@ -257,52 +273,17 @@ func (c cdkBlobStorage) listFiles(ctx context.Context, folderPath string, paging
}, nil
}
func (c cdkBlobStorage) fixInputPrefix(path string) string {
if path == Delimiter || path == "" {
return c.rootFolder
func (c cdkBlobStorage) convertFolderPathToPrefix(path string) string {
if path != "" && !strings.HasSuffix(path, Delimiter) {
return path + Delimiter
}
if strings.HasPrefix(path, Delimiter) {
path = fmt.Sprintf("%s%s", c.rootFolder, strings.TrimPrefix(path, Delimiter))
}
return path
}
func (c cdkBlobStorage) convertFolderPathToPrefix(path string) string {
if path == Delimiter || path == "" {
return c.rootFolder
}
if strings.HasPrefix(path, Delimiter) {
path = fmt.Sprintf("%s%s", c.rootFolder, strings.TrimPrefix(path, Delimiter))
}
return fmt.Sprintf("%s%s", path, Delimiter)
}
func fixPath(path string) string {
newPath := strings.TrimSuffix(path, Delimiter)
if !strings.HasPrefix(newPath, Delimiter) {
newPath = fmt.Sprintf("%s%s", Delimiter, newPath)
}
return newPath
}
func (c cdkBlobStorage) convertListOptions(options *ListOptions) *ListOptions {
if options == nil || options.allowedPrefixes == nil || len(options.allowedPrefixes) == 0 {
return options
}
newPrefixes := make([]string, len(options.allowedPrefixes))
for i, prefix := range options.allowedPrefixes {
newPrefixes[i] = c.fixInputPrefix(prefix)
}
options.PathFilters.allowedPrefixes = newPrefixes
return options
}
func (c cdkBlobStorage) ListFiles(ctx context.Context, folderPath string, paging *Paging, options *ListOptions) (*ListFilesResponse, error) {
paging.After = c.fixInputPrefix(paging.After)
return c.listFiles(ctx, c.convertFolderPathToPrefix(folderPath), paging, c.convertListOptions(options))
prefix := c.convertFolderPathToPrefix(folderPath)
files, err := c.listFiles(ctx, prefix, paging, options)
return files, err
}
func (c cdkBlobStorage) listFolderPaths(ctx context.Context, parentFolderPath string, options *ListOptions) ([]string, error) {
@ -313,7 +294,8 @@ func (c cdkBlobStorage) listFolderPaths(ctx context.Context, parentFolderPath st
recursive := options.Recursive
currentDirPath := ""
dirPath := ""
dirMarkerPath := ""
foundPaths := make([]string, 0)
for {
obj, err := iterator.Next(ctx)
@ -326,16 +308,26 @@ func (c cdkBlobStorage) listFolderPaths(ctx context.Context, parentFolderPath st
return nil, err
}
if currentDirPath == "" && !obj.IsDir && options.isAllowed(obj.Key) {
attributes, err := c.bucket.Attributes(ctx, obj.Key)
if err != nil {
c.log.Error("Failed while retrieving attributes", "path", obj.Key, "err", err)
return nil, err
if options.IsAllowed(obj.Key) {
if obj.IsDir && !recursive && options.IsAllowed(obj.Key) {
foundPaths = append(foundPaths, strings.TrimSuffix(obj.Key, Delimiter))
}
if attributes.Metadata != nil {
if path, ok := attributes.Metadata[originalPathAttributeKey]; ok {
currentDirPath = getParentFolderPath(path)
if dirPath == "" && !obj.IsDir {
dirPath = getParentFolderPath(obj.Key)
}
if dirMarkerPath == "" && !obj.IsDir {
attributes, err := c.bucket.Attributes(ctx, obj.Key)
if err != nil {
c.log.Error("Failed while retrieving attributes", "path", obj.Key, "err", err)
return nil, err
}
if attributes.Metadata != nil {
if path, ok := attributes.Metadata[originalPathAttributeKey]; ok {
dirMarkerPath = getParentFolderPath(path)
}
}
}
}
@ -354,41 +346,35 @@ func (c cdkBlobStorage) listFolderPaths(ctx context.Context, parentFolderPath st
}
}
if currentDirPath != "" {
foundPaths = append(foundPaths, fixPath(currentDirPath))
var foundPath string
if dirMarkerPath != "" {
foundPath = dirMarkerPath
} else if dirPath != "" {
foundPath = dirPath
}
if foundPath != "" && options.IsAllowed(foundPath+Delimiter) {
foundPaths = append(foundPaths, foundPath)
}
return foundPaths, nil
}
func (c cdkBlobStorage) ListFolders(ctx context.Context, prefix string, options *ListOptions) ([]FileMetadata, error) {
foundPaths, err := c.listFolderPaths(ctx, c.convertFolderPathToPrefix(prefix), c.convertListOptions(options))
fixedPrefix := c.convertFolderPathToPrefix(prefix)
foundPaths, err := c.listFolderPaths(ctx, fixedPrefix, options)
if err != nil {
return nil, err
}
sort.Strings(foundPaths)
folders := make([]FileMetadata, 0)
mem := make(map[string]bool)
for i := 0; i < len(foundPaths); i++ {
path := foundPaths[i]
parts := strings.Split(path, Delimiter)
acc := parts[0]
j := 1
for {
acc = fmt.Sprintf("%s%s%s", acc, Delimiter, parts[j])
comparison := strings.Compare(acc, prefix)
if !mem[acc] && comparison > 0 {
folders = append(folders, FileMetadata{
Name: getName(acc),
FullPath: acc,
})
}
mem[acc] = true
j += 1
if j >= len(parts) {
break
}
for _, path := range foundPaths {
if strings.Compare(path, fixedPrefix) > 0 {
folders = append(folders, FileMetadata{
Name: getName(path),
FullPath: path,
})
}
}
@ -453,11 +439,11 @@ func (c cdkBlobStorage) CreateFolder(ctx context.Context, path string) error {
previousFolderOriginalCasing := ""
if i > 0 {
previousFolderOriginalCasing = folderToOriginalCasing[precedingFolders[i-1]]
previousFolderOriginalCasing = folderToOriginalCasing[precedingFolders[i-1]] + Delimiter
}
metadata := make(map[string]string)
currentFolderWithOriginalCasing := previousFolderOriginalCasing + Delimiter + getName(currentFolder)
currentFolderWithOriginalCasing := previousFolderOriginalCasing + getName(currentFolder)
metadata[originalPathAttributeKey] = currentFolderWithOriginalCasing + Delimiter + directoryMarker
if err := c.bucket.WriteAll(ctx, strings.ToLower(metadata[originalPathAttributeKey]), make([]byte, 0), &blob.WriterOptions{
Metadata: metadata,

View File

@ -32,15 +32,11 @@ type dbFileStorage struct {
log log.Logger
}
func NewDbStorage(log log.Logger, db *sqlstore.SQLStore, pathFilters *PathFilters) FileStorage {
return &wrapper{
func NewDbStorage(log log.Logger, db *sqlstore.SQLStore, pathFilters *PathFilters, rootFolder string) FileStorage {
return newWrapper(log, &dbFileStorage{
log: log,
wrapped: &dbFileStorage{
log: log,
db: db,
},
pathFilters: pathFilters,
}
db: db,
}, pathFilters, rootFolder)
}
func (s dbFileStorage) getProperties(sess *sqlstore.DBSession, lowerCasePaths []string) (map[string]map[string]string, error) {
@ -248,8 +244,12 @@ func (s dbFileStorage) ListFiles(ctx context.Context, folderPath string, paging
}
sess.Where("LOWER(path) NOT LIKE ?", fmt.Sprintf("%s%s%s", "%", Delimiter, directoryMarker))
for _, prefix := range options.PathFilters.allowedPrefixes {
sess.Where("LOWER(path) LIKE ?", fmt.Sprintf("%s%s", strings.ToLower(prefix), "%"))
if options.PathFilters.isDenyAll() {
sess.Where("1 == 2")
} else {
for _, prefix := range options.PathFilters.allowedPrefixes {
sess.Where("LOWER(path) LIKE ?", fmt.Sprintf("%s%s", strings.ToLower(prefix), "%"))
}
}
sess.OrderBy("path")
@ -330,8 +330,12 @@ func (s dbFileStorage) ListFolders(ctx context.Context, parentFolderPath string,
sess.Where("LOWER(parent_folder_path) = ?", strings.ToLower(parentFolderPath))
}
for _, prefix := range options.PathFilters.allowedPrefixes {
sess.Where("LOWER(parent_folder_path) LIKE ?", fmt.Sprintf("%s%s", strings.ToLower(prefix), "%"))
if options.PathFilters.isDenyAll() {
sess.Where("1 == 2")
} else {
for _, prefix := range options.PathFilters.allowedPrefixes {
sess.Where("LOWER(parent_folder_path) LIKE ?", fmt.Sprintf("%s%s", strings.ToLower(prefix), "%"))
}
}
sess.OrderBy("parent_folder_path")

View File

@ -1,51 +0,0 @@
package filestorage
import (
"context"
_ "gocloud.dev/blob/fileblob"
_ "gocloud.dev/blob/memblob"
)
var (
_ FileStorage = (*dummyFileStorage)(nil) // dummyFileStorage implements FileStorage
)
type dummyFileStorage struct {
}
func (d dummyFileStorage) Get(ctx context.Context, path string) (*File, error) {
return nil, nil
}
func (d dummyFileStorage) Delete(ctx context.Context, path string) error {
return nil
}
func (d dummyFileStorage) Upsert(ctx context.Context, file *UpsertFileCommand) error {
return nil
}
func (d dummyFileStorage) ListFiles(ctx context.Context, path string, cursor *Paging, options *ListOptions) (*ListFilesResponse, error) {
return nil, nil
}
func (d dummyFileStorage) ListFolders(ctx context.Context, path string, options *ListOptions) ([]FileMetadata, error) {
return nil, nil
}
func (d dummyFileStorage) CreateFolder(ctx context.Context, path string) error {
return nil
}
func (d dummyFileStorage) DeleteFolder(ctx context.Context, path string) error {
return nil
}
func (d dummyFileStorage) IsFolderEmpty(ctx context.Context, path string) (bool, error) {
return true, nil
}
func (d dummyFileStorage) close() error {
return nil
}

View File

@ -1,160 +0,0 @@
package filestorage
import (
"context"
"errors"
"fmt"
"os"
"strings"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/setting"
"gocloud.dev/blob"
_ "gocloud.dev/blob/fileblob"
_ "gocloud.dev/blob/memblob"
)
const (
ServiceName = "FileStorage"
)
func ProvideService(features featuremgmt.FeatureToggles, cfg *setting.Cfg) (FileStorage, error) {
grafanaDsStorageLogger := log.New("grafanaDsStorage")
path := fmt.Sprintf("file://%s", cfg.StaticRootPath)
grafanaDsStorageLogger.Info("Initializing grafana ds storage", "path", path)
bucket, err := blob.OpenBucket(context.Background(), path)
if err != nil {
currentDir, _ := os.Getwd()
grafanaDsStorageLogger.Error("Failed to initialize grafana ds storage", "path", path, "error", err, "cwd", currentDir)
return nil, err
}
prefixes := []string{
"testdata/",
"img/icons/",
"img/bg/",
"gazetteer/",
"maps/",
"upload/",
}
var grafanaDsStorage FileStorage
if features.IsEnabled(featuremgmt.FlagFileStoreApi) {
grafanaDsStorage = &wrapper{
log: grafanaDsStorageLogger,
wrapped: cdkBlobStorage{
log: grafanaDsStorageLogger,
bucket: bucket,
rootFolder: "",
},
pathFilters: &PathFilters{allowedPrefixes: prefixes},
}
} else {
grafanaDsStorage = &dummyFileStorage{}
}
return &service{
grafanaDsStorage: grafanaDsStorage,
log: log.New("fileStorageService"),
}, nil
}
type service struct {
log log.Logger
grafanaDsStorage FileStorage
}
func (b service) Get(ctx context.Context, path string) (*File, error) {
var filestorage FileStorage
if belongsToStorage(path, StorageNamePublic) {
filestorage = b.grafanaDsStorage
path = removeStoragePrefix(path)
}
if err := validatePath(path); err != nil {
return nil, err
}
return filestorage.Get(ctx, path)
}
func removeStoragePrefix(path string) string {
path = strings.TrimPrefix(path, Delimiter)
if path == Delimiter || path == "" {
return Delimiter
}
if !strings.Contains(path, Delimiter) {
return Delimiter
}
split := strings.Split(path, Delimiter)
// root of storage
if len(split) == 2 && split[1] == "" {
return Delimiter
}
// replace storage
split[0] = ""
return strings.Join(split, Delimiter)
}
func (b service) Delete(ctx context.Context, path string) error {
return errors.New("not implemented")
}
func (b service) Upsert(ctx context.Context, file *UpsertFileCommand) error {
return errors.New("not implemented")
}
func (b service) ListFiles(ctx context.Context, path string, cursor *Paging, options *ListOptions) (*ListFilesResponse, error) {
var filestorage FileStorage
if belongsToStorage(path, StorageNamePublic) {
filestorage = b.grafanaDsStorage
path = removeStoragePrefix(path)
} else {
return nil, errors.New("not implemented")
}
if err := validatePath(path); err != nil {
return nil, err
}
return filestorage.ListFiles(ctx, path, cursor, options)
}
func (b service) ListFolders(ctx context.Context, path string, options *ListOptions) ([]FileMetadata, error) {
var filestorage FileStorage
if belongsToStorage(path, StorageNamePublic) {
filestorage = b.grafanaDsStorage
path = removeStoragePrefix(path)
} else {
return nil, errors.New("not implemented")
}
if err := validatePath(path); err != nil {
return nil, err
}
return filestorage.ListFolders(ctx, path, options)
}
func (b service) CreateFolder(ctx context.Context, path string) error {
return errors.New("not implemented")
}
func (b service) DeleteFolder(ctx context.Context, path string) error {
return errors.New("not implemented")
}
func (b service) IsFolderEmpty(ctx context.Context, path string) (bool, error) {
return true, errors.New("not implemented")
}
func (b service) close() error {
return b.grafanaDsStorage.close()
}

View File

@ -1,46 +0,0 @@
package filestorage
import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
)
func TestFilestorage_removeStoragePrefix(t *testing.T) {
var tests = []struct {
name string
path string
expected string
}{
{
name: "should return root if path is empty",
path: "",
expected: Delimiter,
},
{
name: "should remove prefix folder from path with multiple parts",
path: "public/abc/d",
expected: "/abc/d",
},
{
name: "should return root path if path is just the storage name",
path: "public",
expected: Delimiter,
},
{
name: "should return root path if path is the prefix of storage",
path: "public/",
expected: Delimiter,
},
}
for _, tt := range tests {
t.Run(fmt.Sprintf("%s%s", "absolute: ", tt.name), func(t *testing.T) {
require.Equal(t, tt.expected, removeStoragePrefix(Delimiter+tt.path))
})
t.Run(fmt.Sprintf("%s%s", "relative: ", tt.name), func(t *testing.T) {
require.Equal(t, tt.expected, removeStoragePrefix(tt.path))
})
}
}

View File

@ -9,6 +9,7 @@ import (
"fmt"
"io/ioutil"
"os"
"path"
"testing"
"github.com/grafana/grafana/pkg/infra/log"
@ -61,13 +62,13 @@ func runTests(createCases func() []fsTestCase, t *testing.T) {
setupInMemFS := func() {
commonSetup()
bucket, _ := blob.OpenBucket(context.Background(), "mem://")
filestorage = NewCdkBlobStorage(testLogger, bucket, Delimiter, nil)
filestorage = NewCdkBlobStorage(testLogger, bucket, "", nil)
}
//setupSqlFS := func() {
// commonSetup()
// sqlStore = sqlstore.InitTestDB(t)
// filestorage = NewDbStorage(testLogger, sqlStore, nil)
// filestorage = NewDbStorage(testLogger, sqlStore, nil, "/")
//}
setupLocalFs := func() {
@ -85,6 +86,27 @@ func runTests(createCases func() []fsTestCase, t *testing.T) {
filestorage = NewCdkBlobStorage(testLogger, bucket, "", nil)
}
setupLocalFsNestedPath := func() {
commonSetup()
tmpDir, err := ioutil.TempDir("", "")
tempDir = tmpDir
if err != nil {
t.Fatal(err)
}
nestedPath := path.Join("a", "b")
err = os.MkdirAll(path.Join(tmpDir, nestedPath), os.ModePerm)
if err != nil {
t.Fatal(err)
}
bucket, err := blob.OpenBucket(context.Background(), fmt.Sprintf("file://%s", tmpDir))
if err != nil {
t.Fatal(err)
}
filestorage = NewCdkBlobStorage(testLogger, bucket, nestedPath+Delimiter, nil)
}
backends := []struct {
setup func()
name string
@ -93,6 +115,10 @@ func runTests(createCases func() []fsTestCase, t *testing.T) {
setup: setupLocalFs,
name: "Local FS",
},
{
setup: setupLocalFsNestedPath,
name: "Local FS with nested path",
},
{
setup: setupInMemFS,
name: "In-mem FS",
@ -218,6 +244,47 @@ func TestFsStorage(t *testing.T) {
},
},
},
{
name: "listing files with path to a file",
steps: []interface{}{
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/folder1/folder2/file.jpg",
Contents: &[]byte{},
Properties: map[string]string{"prop1": "val1", "prop2": "val"},
},
},
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/folder1/file-inner.jpg",
Contents: &[]byte{},
Properties: map[string]string{"prop1": "val1"},
},
},
queryListFiles{
input: queryListFilesInput{path: "/folder1/file-inner.jp", options: &ListOptions{Recursive: true}},
list: checks(listSize(0), listHasMore(false), listLastPath("")),
},
queryListFiles{
input: queryListFilesInput{path: "/folder1/file-inner", options: &ListOptions{Recursive: true}},
list: checks(listSize(0), listHasMore(false), listLastPath("")),
},
queryListFiles{
input: queryListFilesInput{path: "/folder1/folder2/file.jpg", options: &ListOptions{Recursive: true}},
list: checks(listSize(1), listHasMore(false), listLastPath("/folder1/folder2/file.jpg")),
files: [][]interface{}{
checks(fPath("/folder1/folder2/file.jpg"), fName("file.jpg"), fProperties(map[string]string{"prop1": "val1", "prop2": "val"})),
},
},
queryListFiles{
input: queryListFilesInput{path: "/folder1/file-inner.jpg", options: &ListOptions{Recursive: true}},
list: checks(listSize(1), listHasMore(false), listLastPath("/folder1/file-inner.jpg")),
files: [][]interface{}{
checks(fPath("/folder1/file-inner.jpg"), fName("file-inner.jpg"), fProperties(map[string]string{"prop1": "val1"})),
},
},
},
},
{
name: "listing files with prefix filter",
steps: []interface{}{
@ -234,11 +301,11 @@ func TestFsStorage(t *testing.T) {
},
},
queryListFiles{
input: queryListFilesInput{path: "/folder1", options: &ListOptions{Recursive: true, PathFilters: PathFilters{allowedPrefixes: []string{"/folder2"}}}},
input: queryListFilesInput{path: "/folder1", options: &ListOptions{Recursive: true, PathFilters: &PathFilters{allowedPrefixes: []string{"/folder2"}}}},
list: checks(listSize(0), listHasMore(false), listLastPath("")),
},
queryListFiles{
input: queryListFilesInput{path: "/folder1", options: &ListOptions{Recursive: true, PathFilters: PathFilters{allowedPrefixes: []string{"/folder1/folder"}}}},
input: queryListFilesInput{path: "/folder1", options: &ListOptions{Recursive: true, PathFilters: &PathFilters{allowedPrefixes: []string{"/folder1/folder"}}}},
list: checks(listSize(1), listHasMore(false)),
files: [][]interface{}{
checks(fPath("/folder1/folder2/file.jpg")),
@ -850,8 +917,156 @@ func TestFsStorage(t *testing.T) {
}
}
createPathFiltersCases := func() []fsTestCase {
pathFilters := NewPathFilters(
[]string{"/gitB/", "/s3/folder/", "/gitC/"},
[]string{"/gitA/dashboard2.json"},
[]string{"/s3/folder/nested/"},
[]string{"/gitC/nestedC/"},
)
return []fsTestCase{
{
name: "catch-all test - TODO: split into multiple",
steps: []interface{}{
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/s3/folder/dashboard.json",
Contents: &[]byte{},
},
},
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/s3/folder/nested/dashboard.json",
Contents: &[]byte{},
},
},
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/gitA/dashboard1.json",
Contents: &[]byte{},
},
},
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/gitA/dashboard2.json",
Contents: &[]byte{},
},
},
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/gitB/nested/dashboard.json",
Contents: &[]byte{},
},
},
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/gitB/nested2/dashboard2.json",
Contents: &[]byte{},
},
},
cmdUpsert{
cmd: UpsertFileCommand{
Path: "/gitC/nestedC/dashboardC.json",
Contents: &[]byte{},
},
},
queryListFiles{
input: queryListFilesInput{path: "/", options: &ListOptions{
Recursive: true,
PathFilters: allowAllPathFilters(),
}},
list: checks(listSize(7)),
},
queryListFiles{
input: queryListFilesInput{path: "/", options: &ListOptions{
Recursive: true,
PathFilters: denyAllPathFilters(),
}},
list: checks(listSize(0)),
},
queryListFiles{
input: queryListFilesInput{path: "/", options: &ListOptions{
Recursive: true,
PathFilters: pathFilters,
}},
list: checks(listSize(5), listHasMore(false), listLastPath("/s3/folder/dashboard.json")),
files: [][]interface{}{
// /gitA/dashboard.json is not explicitly allowed
checks(fPath("/gitA/dashboard2.json")), // explicitly allowed by allowedPath
checks(fPath("/gitB/nested/dashboard.json")), // allowed by '/gitB/' prefix
checks(fPath("/gitB/nested2/dashboard2.json")), // allowed by '/gitB/' prefix
checks(fPath("/gitC/nestedC/dashboardC.json")), // allowed by '/gitC/' prefix
checks(fPath("/s3/folder/dashboard.json")), // allowed by '/s3/folder/' prefix
// /s3/folder/nested/dashboard.json is denied with '/s3/folder/nested/' prefix
},
},
queryListFolders{
input: queryListFoldersInput{path: "/", options: &ListOptions{
Recursive: true,
PathFilters: pathFilters,
}},
checks: [][]interface{}{
// /gitA is missing due to the lack of explicit allow
checks(fPath("/gitB")), // allowed by '/gitB/' prefix
checks(fPath("/gitB/nested")), // allowed by '/gitB/' prefix
checks(fPath("/gitB/nested2")), // allowed by '/gitB/' prefix
checks(fPath("/gitC")), // allowed by '/gitC/' prefix
// /gitC/nestedC is explicitly denied
// /s3 is not explicitly allowed
checks(fPath("/s3/folder")),
// /s3/folder/nested is denied with '/s3/folder/nested/' prefix
},
},
queryListFiles{
input: queryListFilesInput{path: "/gitA", options: &ListOptions{
Recursive: false,
PathFilters: pathFilters,
}},
list: checks(listSize(1), listHasMore(false), listLastPath("/gitA/dashboard2.json")),
files: [][]interface{}{
checks(fPath("/gitA/dashboard2.json")),
},
},
queryListFolders{
input: queryListFoldersInput{path: "/gitA", options: &ListOptions{
Recursive: false,
PathFilters: pathFilters,
}},
checks: [][]interface{}{},
},
queryListFiles{
input: queryListFilesInput{path: "/gitC", options: &ListOptions{
Recursive: false,
PathFilters: pathFilters,
}},
list: checks(listSize(0), listHasMore(false), listLastPath("")),
files: [][]interface{}{},
},
queryListFiles{
input: queryListFilesInput{path: "/gitC/nestedC", options: &ListOptions{
Recursive: false,
PathFilters: pathFilters,
}},
list: checks(listSize(1), listHasMore(false), listLastPath("/gitC/nestedC/dashboardC.json")),
files: [][]interface{}{
checks(fPath("/gitC/nestedC/dashboardC.json")),
},
},
queryListFolders{
input: queryListFoldersInput{path: "/gitC", options: &ListOptions{
Recursive: false,
PathFilters: pathFilters,
}},
checks: [][]interface{}{},
},
},
},
}
}
runTests(createListFoldersTests, t)
runTests(createListFilesTests, t)
runTests(createFileCRUDTests, t)
runTests(createFolderCrudCases, t)
runTests(createPathFiltersCases, t)
}

View File

@ -9,19 +9,111 @@ import (
"strings"
"github.com/grafana/grafana/pkg/infra/log"
_ "gocloud.dev/blob/fileblob"
_ "gocloud.dev/blob/memblob"
)
var (
directoryMarker = ".___gf_dir_marker___"
pathRegex = regexp.MustCompile(`(^/$)|(^(/[A-Za-z0-9!\-_.*'()]+)+$)`)
pathRegex = regexp.MustCompile(`(^/$)|(^(/[A-Za-z0-9!\-_.*'() ]+)+$)`)
)
type wrapper struct {
log log.Logger
wrapped FileStorage
pathFilters *PathFilters
rootFolder string
}
func addRootFolderToFilters(pathFilters *PathFilters, rootFolder string) *PathFilters {
if pathFilters == nil {
return pathFilters
}
for i := range pathFilters.disallowedPaths {
pathFilters.disallowedPaths[i] = rootFolder + strings.TrimPrefix(pathFilters.disallowedPaths[i], Delimiter)
}
for i := range pathFilters.disallowedPrefixes {
pathFilters.disallowedPrefixes[i] = rootFolder + strings.TrimPrefix(pathFilters.disallowedPrefixes[i], Delimiter)
}
for i := range pathFilters.allowedPaths {
pathFilters.allowedPaths[i] = rootFolder + strings.TrimPrefix(pathFilters.allowedPaths[i], Delimiter)
}
for i := range pathFilters.allowedPrefixes {
pathFilters.allowedPrefixes[i] = rootFolder + strings.TrimPrefix(pathFilters.allowedPrefixes[i], Delimiter)
}
return pathFilters
}
func copyPathFilters(p *PathFilters) *PathFilters {
if p == nil {
return nil
}
return NewPathFilters(p.allowedPrefixes, p.allowedPaths, p.disallowedPrefixes, p.disallowedPaths)
}
func addPathFilters(base *PathFilters, new *PathFilters) *PathFilters {
if new == nil {
return base
}
if new.allowedPrefixes != nil {
if base.allowedPrefixes != nil {
base.allowedPrefixes = append(base.allowedPrefixes, new.allowedPrefixes...)
} else {
copiedPrefixes := make([]string, len(new.allowedPrefixes))
copy(copiedPrefixes, new.allowedPrefixes)
base.allowedPrefixes = copiedPrefixes
}
}
if new.allowedPaths != nil {
if base.allowedPaths != nil {
base.allowedPaths = append(base.allowedPaths, new.allowedPaths...)
} else {
copiedPaths := make([]string, len(new.allowedPaths))
copy(copiedPaths, new.allowedPaths)
base.allowedPaths = copiedPaths
}
}
if new.disallowedPrefixes != nil {
if base.disallowedPrefixes != nil {
base.disallowedPrefixes = append(base.disallowedPrefixes, new.disallowedPrefixes...)
} else {
copiedPrefixes := make([]string, len(new.disallowedPrefixes))
copy(copiedPrefixes, new.disallowedPrefixes)
base.disallowedPrefixes = copiedPrefixes
}
}
if new.disallowedPaths != nil {
if base.disallowedPaths != nil {
base.disallowedPaths = append(base.disallowedPaths, new.disallowedPaths...)
} else {
copiedPaths := make([]string, len(new.disallowedPaths))
copy(copiedPaths, new.disallowedPaths)
base.disallowedPaths = copiedPaths
}
}
return base
}
func newWrapper(log log.Logger, wrapped FileStorage, pathFilters *PathFilters, rootFolder string) FileStorage {
var rootedPathFilters *PathFilters
if pathFilters != nil {
rootedPathFilters = addRootFolderToFilters(copyPathFilters(pathFilters), rootFolder)
} else {
rootedPathFilters = allowAllPathFilters()
}
return &wrapper{
log: log,
wrapped: wrapped,
pathFilters: rootedPathFilters,
rootFolder: rootFolder,
}
}
var (
@ -30,13 +122,14 @@ var (
func getParentFolderPath(path string) string {
if path == Delimiter || path == "" {
return Delimiter
return path
}
if !strings.Contains(path, Delimiter) {
return Delimiter
return ""
}
path = strings.TrimSuffix(path, Delimiter)
split := strings.Split(path, Delimiter)
splitWithoutLastPart := split[:len(split)-1]
if len(splitWithoutLastPart) == 1 && split[0] == "" {
@ -91,27 +184,39 @@ func (b wrapper) validatePath(path string) error {
return nil
}
func (b wrapper) addRoot(path string) string {
return b.rootFolder + strings.TrimPrefix(path, Delimiter)
}
func (b wrapper) removeRoot(path string) string {
return Join(Delimiter, strings.TrimPrefix(path, b.rootFolder))
}
func (b wrapper) Get(ctx context.Context, path string) (*File, error) {
if err := b.validatePath(path); err != nil {
return nil, err
}
if !b.pathFilters.isAllowed(path) {
if !b.pathFilters.IsAllowed(path) {
return nil, nil
}
return b.wrapped.Get(ctx, path)
file, err := b.wrapped.Get(ctx, b.addRoot(path))
if file != nil {
file.FullPath = b.removeRoot(file.FullPath)
}
return file, err
}
func (b wrapper) Delete(ctx context.Context, path string) error {
if err := b.validatePath(path); err != nil {
return err
}
if !b.pathFilters.isAllowed(path) {
if !b.pathFilters.IsAllowed(path) {
return nil
}
return b.wrapped.Delete(ctx, path)
return b.wrapped.Delete(ctx, b.addRoot(path))
}
func detectContentType(path string, originalGuess string) string {
@ -130,7 +235,7 @@ func (b wrapper) Upsert(ctx context.Context, file *UpsertFileCommand) error {
return err
}
if !b.pathFilters.isAllowed(file.Path) {
if !b.pathFilters.IsAllowed(file.Path) {
return nil
}
@ -144,31 +249,54 @@ func (b wrapper) Upsert(ctx context.Context, file *UpsertFileCommand) error {
file.MimeType = detectContentType(file.Path, "")
}
return b.wrapped.Upsert(ctx, file)
return b.wrapped.Upsert(ctx, &UpsertFileCommand{
Path: b.addRoot(file.Path),
MimeType: file.MimeType,
Contents: file.Contents,
Properties: file.Properties,
})
}
func (b wrapper) withDefaults(options *ListOptions, folderQuery bool) *ListOptions {
func (b wrapper) pagingOptionsWithDefaults(paging *Paging) *Paging {
if paging == nil {
return &Paging{
First: 100,
}
}
if paging.First <= 0 {
paging.First = 100
}
if paging.After != "" {
paging.After = b.addRoot(paging.After)
}
return paging
}
func (b wrapper) listOptionsWithDefaults(options *ListOptions, folderQuery bool) *ListOptions {
if options == nil {
options = &ListOptions{}
options.Recursive = folderQuery
if b.pathFilters != nil && b.pathFilters.allowedPrefixes != nil {
options.PathFilters = *b.pathFilters
}
options.PathFilters = b.pathFilters
return options
}
if b.pathFilters != nil && b.pathFilters.allowedPrefixes != nil {
if options.allowedPrefixes != nil {
options.allowedPrefixes = append(options.allowedPrefixes, b.pathFilters.allowedPrefixes...)
} else {
copiedPrefixes := make([]string, len(b.pathFilters.allowedPrefixes))
copy(copiedPrefixes, b.pathFilters.allowedPrefixes)
options.allowedPrefixes = copiedPrefixes
return &ListOptions{
Recursive: folderQuery,
PathFilters: b.pathFilters,
}
}
return options
if options.PathFilters == nil {
return &ListOptions{
Recursive: options.Recursive,
PathFilters: b.pathFilters,
}
}
rootedFilters := addRootFolderToFilters(copyPathFilters(options.PathFilters), b.rootFolder)
return &ListOptions{
Recursive: options.Recursive,
PathFilters: addPathFilters(rootedFilters, b.pathFilters),
}
}
func (b wrapper) ListFiles(ctx context.Context, path string, paging *Paging, options *ListOptions) (*ListFilesResponse, error) {
@ -176,15 +304,43 @@ func (b wrapper) ListFiles(ctx context.Context, path string, paging *Paging, opt
return nil, err
}
if paging == nil {
paging = &Paging{
First: 100,
pathWithRoot := b.addRoot(path)
resp, err := b.wrapped.ListFiles(ctx, pathWithRoot, b.pagingOptionsWithDefaults(paging), b.listOptionsWithDefaults(options, false))
if resp != nil && resp.Files != nil {
if resp.LastPath != "" {
resp.LastPath = b.removeRoot(resp.LastPath)
}
for i := 0; i < len(resp.Files); i++ {
resp.Files[i].FullPath = b.removeRoot(resp.Files[i].FullPath)
}
} else if paging.First <= 0 {
paging.First = 100
}
return b.wrapped.ListFiles(ctx, path, paging, b.withDefaults(options, false))
if err != nil {
return resp, err
}
if len(resp.Files) != 0 {
return resp, err
}
// TODO: optimize, don't fetch the contents in this case
file, err := b.Get(ctx, path)
if err != nil {
return resp, err
}
if file != nil {
file.FileMetadata.FullPath = b.removeRoot(file.FileMetadata.FullPath)
return &ListFilesResponse{
Files: []FileMetadata{file.FileMetadata},
HasMore: false,
LastPath: file.FileMetadata.FullPath,
}, nil
}
return resp, err
}
func (b wrapper) ListFolders(ctx context.Context, path string, options *ListOptions) ([]FileMetadata, error) {
@ -192,7 +348,13 @@ func (b wrapper) ListFolders(ctx context.Context, path string, options *ListOpti
return nil, err
}
return b.wrapped.ListFolders(ctx, path, b.withDefaults(options, true))
folders, err := b.wrapped.ListFolders(ctx, b.addRoot(path), b.listOptionsWithDefaults(options, true))
if folders != nil {
for i := 0; i < len(folders); i++ {
folders[i].FullPath = b.removeRoot(folders[i].FullPath)
}
}
return folders, err
}
func (b wrapper) CreateFolder(ctx context.Context, path string) error {
@ -200,11 +362,11 @@ func (b wrapper) CreateFolder(ctx context.Context, path string) error {
return err
}
if !b.pathFilters.isAllowed(path) {
if !b.pathFilters.IsAllowed(path) {
return nil
}
return b.wrapped.CreateFolder(ctx, path)
return b.wrapped.CreateFolder(ctx, b.addRoot(path))
}
func (b wrapper) DeleteFolder(ctx context.Context, path string) error {
@ -212,7 +374,7 @@ func (b wrapper) DeleteFolder(ctx context.Context, path string) error {
return err
}
if !b.pathFilters.isAllowed(path) {
if !b.pathFilters.IsAllowed(path) {
return nil
}
@ -225,7 +387,7 @@ func (b wrapper) DeleteFolder(ctx context.Context, path string) error {
return fmt.Errorf("folder %s is not empty - cant remove it", path)
}
return b.wrapped.DeleteFolder(ctx, path)
return b.wrapped.DeleteFolder(ctx, b.addRoot(path))
}
func (b wrapper) isFolderEmpty(ctx context.Context, path string) (bool, error) {

View File

@ -0,0 +1,193 @@
package filestorage
import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
)
func TestWrapper_addRootFolderToFilters(t *testing.T) {
t.Run("should return null if passed filters are null", func(t *testing.T) {
require.Nil(t, addRootFolderToFilters(nil, "root"))
})
t.Run("should not allocate empty arrays in place of nil arrays", func(t *testing.T) {
filters := NewPathFilters(nil, nil, nil, nil)
rootedFilters := addRootFolderToFilters(filters, "root")
require.NotNil(t, rootedFilters)
require.Nil(t, rootedFilters.disallowedPrefixes)
require.Nil(t, rootedFilters.disallowedPaths)
require.Nil(t, rootedFilters.allowedPrefixes)
require.Nil(t, rootedFilters.allowedPaths)
})
t.Run("should preserve empty arrays", func(t *testing.T) {
filters := NewPathFilters([]string{}, []string{}, nil, nil)
rootedFilters := addRootFolderToFilters(filters, "root")
require.NotNil(t, rootedFilters)
require.Nil(t, rootedFilters.disallowedPrefixes)
require.Nil(t, rootedFilters.disallowedPaths)
require.NotNil(t, rootedFilters.allowedPrefixes)
require.Equal(t, []string{}, rootedFilters.allowedPrefixes)
require.NotNil(t, rootedFilters.allowedPaths)
require.Equal(t, []string{}, rootedFilters.allowedPaths)
})
t.Run("should mutate arrays rather than reallocate", func(t *testing.T) {
filters := NewPathFilters([]string{"/abc", "/abc2"}, nil, nil, []string{"/abc/", "/abc2/"})
originalAllowedPrefixes := filters.allowedPrefixes
originalDisallowedPaths := filters.disallowedPaths
rootedFilters := addRootFolderToFilters(filters, "root/")
require.NotNil(t, rootedFilters)
require.Nil(t, rootedFilters.allowedPaths)
require.Nil(t, rootedFilters.disallowedPrefixes)
expectedAllowedPrefixes := []string{"root/abc", "root/abc2"}
expectedDisallowedPaths := []string{"root/abc/", "root/abc2/"}
require.Equal(t, expectedAllowedPrefixes, rootedFilters.allowedPrefixes)
require.Equal(t, expectedDisallowedPaths, rootedFilters.disallowedPaths)
require.Equal(t, expectedAllowedPrefixes, originalAllowedPrefixes)
require.Equal(t, expectedDisallowedPaths, originalDisallowedPaths)
})
}
func TestWrapper_copyPathFilters(t *testing.T) {
t.Run("should return null if passed pathFilters are null", func(t *testing.T) {
require.Nil(t, copyPathFilters(nil))
})
t.Run("should not allocate empty arrays in place of nil arrays", func(t *testing.T) {
copiedFilters := copyPathFilters(NewPathFilters(nil, nil, nil, nil))
require.NotNil(t, copiedFilters)
require.Nil(t, copiedFilters.disallowedPrefixes)
require.Nil(t, copiedFilters.disallowedPaths)
require.Nil(t, copiedFilters.allowedPrefixes)
require.Nil(t, copiedFilters.allowedPaths)
})
t.Run("should preserve empty arrays", func(t *testing.T) {
copiedFilters := copyPathFilters(NewPathFilters([]string{}, []string{}, nil, nil))
require.NotNil(t, copiedFilters)
require.Nil(t, copiedFilters.disallowedPrefixes)
require.Nil(t, copiedFilters.disallowedPaths)
require.NotNil(t, copiedFilters.allowedPrefixes)
require.Equal(t, []string{}, copiedFilters.allowedPrefixes)
require.NotNil(t, copiedFilters.allowedPaths)
require.Equal(t, []string{}, copiedFilters.allowedPaths)
})
t.Run("should new pointer with new slices", func(t *testing.T) {
filters := NewPathFilters([]string{"/abc", "/abc2"}, nil, nil, []string{"/abc/", "/abc2/"})
copiedFilters := copyPathFilters(filters)
require.NotSame(t, filters, copiedFilters)
require.Equal(t, filters.allowedPrefixes, copiedFilters.allowedPrefixes)
require.Equal(t, filters.allowedPaths, copiedFilters.allowedPaths)
require.Equal(t, filters.disallowedPrefixes, copiedFilters.disallowedPrefixes)
require.Equal(t, filters.disallowedPaths, copiedFilters.disallowedPaths)
copiedFilters.disallowedPaths[0] = "changed"
require.Equal(t, []string{"/abc/", "/abc2/"}, filters.disallowedPaths)
require.Equal(t, []string{"changed", "/abc2/"}, copiedFilters.disallowedPaths)
require.NotEqual(t, filters.disallowedPaths, copiedFilters.disallowedPaths)
})
}
func TestWrapper_addPathFilters(t *testing.T) {
t.Run("should return pointer to the first argument", func(t *testing.T) {
base := NewPathFilters(nil, nil, nil, nil)
toAdd := NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"})
require.Same(t, base, addPathFilters(base, toAdd))
})
testcases := []struct {
base *PathFilters
toAdd *PathFilters
expected *PathFilters
}{
{
base: NewPathFilters(nil, nil, nil, nil),
toAdd: NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"}),
expected: NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"}),
},
{
base: NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"}),
toAdd: NewPathFilters(nil, nil, nil, nil),
expected: NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"}),
},
{
base: NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"}),
toAdd: NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"}),
expected: NewPathFilters([]string{"abc", "abc"}, []string{"abc2", "abc2"}, []string{"abc3", "abc3"}, []string{"abc4", "abc4"}),
},
{
base: NewPathFilters([]string{"abc"}, []string{}, nil, []string{"abc4"}),
toAdd: NewPathFilters([]string{"abc"}, []string{"abc2", "abc22", "abc222"}, []string{"abc3"}, []string{"abc4"}),
expected: NewPathFilters([]string{"abc", "abc"}, []string{"abc2", "abc22", "abc222"}, []string{"abc3"}, []string{"abc4", "abc4"}),
},
}
for _, tt := range testcases {
require.Equal(t, tt.expected, addPathFilters(tt.base, tt.toAdd))
}
t.Run("should not reuse arrays allocations from the second arg", func(t *testing.T) {
base := NewPathFilters(nil, []string{}, nil, nil)
toAdd := NewPathFilters([]string{"abc"}, []string{"abc2"}, []string{"abc3"}, []string{"abc4"})
_ = addPathFilters(base, toAdd)
require.Equal(t, toAdd.allowedPaths, base.allowedPaths)
base.allowedPaths[0] = "mutated"
require.Equal(t, []string{"mutated"}, base.allowedPaths)
require.Equal(t, []string{"abc2"}, toAdd.allowedPaths)
require.NotEqual(t, toAdd.allowedPaths, base.allowedPaths)
require.Equal(t, toAdd.allowedPrefixes, base.allowedPrefixes)
base.allowedPrefixes[0] = "mutated2"
require.Equal(t, []string{"mutated2"}, base.allowedPrefixes)
require.Equal(t, []string{"abc"}, toAdd.allowedPrefixes)
require.NotEqual(t, toAdd.allowedPrefixes, base.allowedPrefixes)
})
}
func TestFilestorage_getParentFolderPath(t *testing.T) {
var tests = []struct {
name string
path string
expected string
}{
{
name: "should return empty path if path has a single part - relative, suffix",
path: "ab/",
expected: "",
},
{
name: "should return empty path if path has a single part - relative, no suffix",
path: "ab",
expected: "",
},
{
name: "should return root if path has a single part - abs, no suffix",
path: "/public",
expected: Delimiter,
},
{
name: "should return root if path has a single part - abs, suffix",
path: "/public/",
expected: Delimiter,
},
}
for _, tt := range tests {
t.Run(fmt.Sprintf(tt.name), func(t *testing.T) {
require.Equal(t, tt.expected, getParentFolderPath(tt.path))
})
}
}

View File

@ -10,7 +10,6 @@ import (
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/filestorage"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/httpclient/httpclientprovider"
"github.com/grafana/grafana/pkg/infra/kvstore"
@ -150,7 +149,6 @@ var wireBasicSet = wire.NewSet(
wire.Bind(new(queryhistory.Service), new(*queryhistory.QueryHistoryService)),
quota.ProvideService,
remotecache.ProvideService,
filestorage.ProvideService,
loginservice.ProvideService,
wire.Bind(new(login.Service), new(*loginservice.Implementation)),
authinfoservice.ProvideAuthInfoService,

View File

@ -162,11 +162,5 @@ var (
Description: "Lock database during migrations",
State: FeatureStateBeta,
},
{
Name: "fileStoreApi",
Description: "Simple API for managing files",
State: FeatureStateAlpha,
RequiresDevMode: true,
},
}
)

View File

@ -122,8 +122,4 @@ const (
// FlagMigrationLocking
// Lock database during migrations
FlagMigrationLocking = "migrationLocking"
// FlagFileStoreApi
// Simple API for managing files
FlagFileStoreApi = "fileStoreApi"
)