Image Store: Add support for using signed URLs when uploading images to GCS (#26840)

Enables creating signed URLs when uploading images to Google Cloud Storage. 
By using signed urls, not only is the public URL expiration configurable but the 
images in the bucket are not publicly accessible.

Fixes #26773

Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com>
This commit is contained in:
Marcos Mendez 2020-09-07 13:10:14 -04:00 committed by GitHub
parent 8253b9ddb3
commit 4e94c0959a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 100 additions and 17 deletions

View File

@ -707,6 +707,8 @@ public_url =
key_file =
bucket =
path =
enable_signed_urls = false
signed_url_expiration =
[external_image_storage.azure_blob]
account_name =

View File

@ -1215,7 +1215,7 @@ Optional URL to send to users in notifications. If the string contains the seque
Optional path to JSON key file associated with a Google service account to authenticate and authorize. If no value is provided it tries to use the [application default credentials](https://cloud.google.com/docs/authentication/production#finding_credentials_automatically).
Service Account keys can be created and downloaded from https://console.developers.google.com/permissions/serviceaccounts.
Service Account should have "Storage Object Writer" role. The access control model of the bucket needs to be "Set object-level and bucket-level permissions". Grafana itself will make the images public readable.
Service Account should have "Storage Object Writer" role. The access control model of the bucket needs to be "Set object-level and bucket-level permissions". Grafana itself will make the images public readable when signed urls are not enabled.
### bucket
@ -1225,6 +1225,15 @@ Bucket Name on Google Cloud Storage.
Optional extra path inside bucket.
### enable_signed_urls
If set to true, Grafana creates a [signed URL](https://cloud.google.com/storage/docs/access-control/signed-urls] for
the image uploaded to Google Cloud Storage.
### signed_url_expiration
Sets the signed URL expiration, which defaults to seven days.
## [external_image_storage.azure_blob]
### account_name

1
go.mod
View File

@ -12,6 +12,7 @@ replace k8s.io/client-go => k8s.io/client-go v0.18.8
require (
cloud.google.com/go v0.60.0 // indirect
cloud.google.com/go/storage v1.8.0
github.com/BurntSushi/toml v0.3.1
github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f
github.com/aws/aws-sdk-go v1.33.12

View File

@ -3,35 +3,57 @@ package imguploader
import (
"context"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"time"
"golang.org/x/oauth2/jwt"
"cloud.google.com/go/storage"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/util"
"golang.org/x/oauth2/google"
)
const (
tokenUrl string = "https://www.googleapis.com/auth/devstorage.read_write" // #nosec
uploadUrl string = "https://www.googleapis.com/upload/storage/v1/b/%s/o?uploadType=media&name=%s&predefinedAcl=publicRead"
tokenUrl string = "https://www.googleapis.com/auth/devstorage.read_write" // #nosec
uploadUrl string = "https://www.googleapis.com/upload/storage/v1/b/%s/o?uploadType=media&name=%s"
publicReadOption string = "&predefinedAcl=publicRead"
bodySizeLimit = 1 << 20
)
type GCSUploader struct {
keyFile string
bucket string
path string
log log.Logger
keyFile string
bucket string
path string
log log.Logger
enableSignedUrls bool
signedUrlExpiration time.Duration
}
func NewGCSUploader(keyFile, bucket, path string) *GCSUploader {
return &GCSUploader{
keyFile: keyFile,
bucket: bucket,
path: path,
log: log.New("gcsuploader"),
func NewGCSUploader(keyFile, bucket, path string, enableSignedUrls bool, signedUrlExpiration string) (*GCSUploader, error) {
expiration, err := time.ParseDuration(signedUrlExpiration)
if err != nil {
return nil, err
}
if expiration <= 0 {
return nil, fmt.Errorf("invalid signed url expiration: %q", expiration)
}
uploader := &GCSUploader{
keyFile: keyFile,
bucket: bucket,
path: path,
log: log.New("gcsuploader"),
enableSignedUrls: enableSignedUrls,
signedUrlExpiration: expiration,
}
uploader.log.Debug(fmt.Sprintf("Created GCSUploader key=%q bucket=%q path=%q, enable_signed_urls=%v signed_url_expiration=%q", keyFile, bucket, path, enableSignedUrls, expiration.String()))
return uploader, nil
}
func (u *GCSUploader) Upload(ctx context.Context, imageDiskPath string) (string, error) {
@ -73,7 +95,43 @@ func (u *GCSUploader) Upload(ctx context.Context, imageDiskPath string) (string,
return "", err
}
return fmt.Sprintf("https://storage.googleapis.com/%s/%s", u.bucket, key), nil
if !u.enableSignedUrls {
return fmt.Sprintf("https://storage.googleapis.com/%s/%s", u.bucket, key), nil
}
u.log.Debug("Signing GCS URL")
var conf *jwt.Config
if u.keyFile != "" {
jsonKey, err := ioutil.ReadFile(u.keyFile)
if err != nil {
return "", fmt.Errorf("ioutil.ReadFile: %v", err)
}
conf, err = google.JWTConfigFromJSON(jsonKey)
if err != nil {
return "", fmt.Errorf("google.JWTConfigFromJSON: %v", err)
}
} else {
creds, err := google.FindDefaultCredentials(ctx, storage.ScopeReadWrite)
if err != nil {
return "", fmt.Errorf("google.FindDefaultCredentials: %v", err)
}
conf, err = google.JWTConfigFromJSON(creds.JSON)
if err != nil {
return "", fmt.Errorf("google.JWTConfigFromJSON: %v", err)
}
}
opts := &storage.SignedURLOptions{
Scheme: storage.SigningSchemeV4,
Method: "GET",
GoogleAccessID: conf.Email,
PrivateKey: conf.PrivateKey,
Expires: time.Now().Add(u.signedUrlExpiration),
}
signedUrl, err := storage.SignedURL(u.bucket, key, opts)
if err != nil {
return "", fmt.Errorf("storage.SignedURL: %v", err)
}
return signedUrl, nil
}
func (u *GCSUploader) uploadFile(client *http.Client, imageDiskPath, key string) error {
@ -86,6 +144,9 @@ func (u *GCSUploader) uploadFile(client *http.Client, imageDiskPath, key string)
defer fileReader.Close()
reqUrl := fmt.Sprintf(uploadUrl, u.bucket, key)
if !u.enableSignedUrls {
reqUrl += publicReadOption
}
u.log.Debug("Request URL: ", reqUrl)
req, err := http.NewRequest("POST", reqUrl, fileReader)
@ -100,9 +161,13 @@ func (u *GCSUploader) uploadFile(client *http.Client, imageDiskPath, key string)
if err != nil {
return err
}
resp.Body.Close()
defer resp.Body.Close()
if resp.StatusCode != 200 {
respBody, err := ioutil.ReadAll(io.LimitReader(resp.Body, bodySizeLimit))
if err == nil && len(respBody) > 0 {
u.log.Error(fmt.Sprintf("GCS response: url=%q status=%d, body=%q", reqUrl, resp.StatusCode, string(respBody)))
}
return fmt.Errorf("GCS response status code %d", resp.StatusCode)
}

View File

@ -4,12 +4,16 @@ import (
"context"
"fmt"
"regexp"
"time"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/setting"
)
const pngExt = ".png"
const (
pngExt = ".png"
defaultSGcsSignedUrlExpiration = 7 * 24 * time.Hour //7 days
)
type ImageUploader interface {
Upload(ctx context.Context, path string) (string, error)
@ -82,8 +86,10 @@ func NewImageUploader() (ImageUploader, error) {
keyFile := gcssec.Key("key_file").MustString("")
bucketName := gcssec.Key("bucket").MustString("")
path := gcssec.Key("path").MustString("")
enableSignedUrls := gcssec.Key("enable_signed_urls").MustBool(false)
signedUrlExpiration := gcssec.Key("signed_url_expiration").MustString(defaultSGcsSignedUrlExpiration.String())
return NewGCSUploader(keyFile, bucketName, path), nil
return NewGCSUploader(keyFile, bucketName, path, enableSignedUrls, signedUrlExpiration)
case "azure_blob":
azureBlobSec, err := setting.Raw.GetSection("external_image_storage.azure_blob")
if err != nil {