grafana/pkg/components/imguploader/imguploader.go
Joe Blubaugh 687e79538b
Alerting: Add a general screenshot service and alerting-specific image service. (#49293)
This commit adds a pkg/services/screenshot package for taking and uploading screenshots of Grafana dashboards. It supports taking screenshots of both dashboards and individual panels within a dashboard, using the rendering service.

The screenshot package has the following services, most of which can be composed:

BrowserScreenshotService (Takes screenshots with headless Chrome)
CachableScreenshotService (Caches screenshots taken with another service such as BrowserScreenshotService)
NoopScreenshotService (A no-op screenshot service for tests)
SingleFlightScreenshotService (Prevents duplicate screenshots when taking screenshots of the same dashboard or panel in parallel)
ScreenshotUnavailableService (A screenshot service that returns ErrScreenshotsUnavailable)
UploadingScreenshotService (A screenshot service that uploads taken screenshots)

The screenshot package does not support wire dependency injection yet. ngalert constructs its own version of the service. See https://github.com/grafana/grafana/issues/49296

This PR also adds an ImageScreenshotService to ngAlert. This is used to take screenshots with a screenshotservice and then store their location reference for use by alert instances and notifiers.
2022-05-22 22:33:49 +08:00

159 lines
4.3 KiB
Go

package imguploader
import (
"context"
"fmt"
"regexp"
"time"
"github.com/grafana/grafana/pkg/components/imguploader/gcs"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/setting"
)
const (
pngExt = ".png"
defaultGCSSignedURLExpiration = 7 * 24 * time.Hour // 7 days
)
//go:generate mockgen -destination=mock.go -package=imguploader github.com/grafana/grafana/pkg/components/imguploader ImageUploader
type ImageUploader interface {
Upload(ctx context.Context, path string) (string, error)
}
type NopImageUploader struct {
}
func (NopImageUploader) Upload(ctx context.Context, path string) (string, error) {
return "", nil
}
var (
logger = log.New("imguploader")
)
func NewImageUploader() (ImageUploader, error) {
switch setting.ImageUploadProvider {
case "s3":
s3sec, err := setting.Raw.GetSection("external_image_storage.s3")
if err != nil {
return nil, err
}
endpoint := s3sec.Key("endpoint").MustString("")
pathStyleAccess := s3sec.Key("path_style_access").MustBool(false)
bucket := s3sec.Key("bucket").MustString("")
region := s3sec.Key("region").MustString("")
path := s3sec.Key("path").MustString("")
bucketUrl := s3sec.Key("bucket_url").MustString("")
accessKey := s3sec.Key("access_key").MustString("")
secretKey := s3sec.Key("secret_key").MustString("")
if path != "" && path[len(path)-1:] != "/" {
path += "/"
}
if bucket == "" || region == "" {
info, err := getRegionAndBucketFromUrl(bucketUrl)
if err != nil {
return nil, err
}
bucket = info.bucket
region = info.region
}
return NewS3Uploader(endpoint, region, bucket, path, "public-read", accessKey, secretKey, pathStyleAccess), nil
case "webdav":
webdavSec, err := setting.Raw.GetSection("external_image_storage.webdav")
if err != nil {
return nil, err
}
url := webdavSec.Key("url").String()
if url == "" {
return nil, fmt.Errorf("could not find URL key for image.uploader.webdav")
}
public_url := webdavSec.Key("public_url").String()
username := webdavSec.Key("username").String()
password := webdavSec.Key("password").String()
return NewWebdavImageUploader(url, username, password, public_url)
case "gcs":
gcssec, err := setting.Raw.GetSection("external_image_storage.gcs")
if err != nil {
return nil, err
}
keyFile := gcssec.Key("key_file").MustString("")
bucketName := gcssec.Key("bucket").MustString("")
path := gcssec.Key("path").MustString("")
enableSignedURLs := gcssec.Key("enable_signed_urls").MustBool(false)
exp := gcssec.Key("signed_url_expiration").MustString("")
var suExp time.Duration
if exp != "" {
suExp, err = time.ParseDuration(exp)
if err != nil {
return nil, err
}
} else {
suExp = defaultGCSSignedURLExpiration
}
return gcs.NewUploader(keyFile, bucketName, path, enableSignedURLs, suExp)
case "azure_blob":
azureBlobSec, err := setting.Raw.GetSection("external_image_storage.azure_blob")
if err != nil {
return nil, err
}
account_name := azureBlobSec.Key("account_name").MustString("")
account_key := azureBlobSec.Key("account_key").MustString("")
container_name := azureBlobSec.Key("container_name").MustString("")
return NewAzureBlobUploader(account_name, account_key, container_name), nil
case "local":
return NewLocalImageUploader()
}
if setting.ImageUploadProvider != "" {
logger.Error("The external image storage configuration is invalid", "unsupported provider", setting.ImageUploadProvider)
}
return NopImageUploader{}, nil
}
type s3Info struct {
region string
bucket string
}
func getRegionAndBucketFromUrl(url string) (*s3Info, error) {
info := &s3Info{}
urlRegex := regexp.MustCompile(`https?:\/\/(.*)\.s3(-([^.]+))?\.amazonaws\.com\/?`)
matches := urlRegex.FindStringSubmatch(url)
if len(matches) > 0 {
info.bucket = matches[1]
if matches[3] != "" {
info.region = matches[3]
} else {
info.region = "us-east-1"
}
return info, nil
}
urlRegex2 := regexp.MustCompile(`https?:\/\/s3(-([^.]+))?\.amazonaws\.com\/(.*)?`)
matches2 := urlRegex2.FindStringSubmatch(url)
if len(matches2) > 0 {
info.bucket = matches2[3]
if matches2[2] != "" {
info.region = matches2[2]
} else {
info.region = "us-east-1"
}
return info, nil
}
return nil, fmt.Errorf("could not find bucket setting for image.uploader.s3")
}