mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'main' into drclau/unistor/replace-authenticators-3
This commit is contained in:
@@ -3513,8 +3513,7 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Do not use export all (\`export * from ...\`)", "2"],
|
||||
[0, 0, 0, "Do not use export all (\`export * from ...\`)", "3"],
|
||||
[0, 0, 0, "Do not use export all (\`export * from ...\`)", "4"],
|
||||
[0, 0, 0, "Do not use export all (\`export * from ...\`)", "5"],
|
||||
[0, 0, 0, "Do not use export all (\`export * from ...\`)", "6"]
|
||||
[0, 0, 0, "Do not use export all (\`export * from ...\`)", "5"]
|
||||
],
|
||||
"public/app/features/datasources/state/navModel.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
@@ -4770,8 +4769,7 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "35"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "36"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "37"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "38"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "39"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "38"]
|
||||
],
|
||||
"public/app/features/query/state/DashboardQueryRunner/AnnotationsQueryRunner.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
@@ -7310,24 +7308,6 @@ exports[`no gf-form usage`] = {
|
||||
"public/app/features/query/components/QueryEditorRow.tsx:5381": [
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
|
||||
],
|
||||
"public/app/features/query/components/QueryGroupOptions.tsx:5381": [
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
|
||||
],
|
||||
"public/app/features/variables/adhoc/picker/AdHocFilter.tsx:5381": [
|
||||
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
|
||||
],
|
||||
|
||||
@@ -16,7 +16,7 @@ watch_exts = [".go", ".ini", ".toml", ".template.html"]
|
||||
ignore_files = [".*_gen.go"]
|
||||
build_delay = 1500
|
||||
cmds = [
|
||||
["GO_BUILD_DEV=1", "make", "build-go"],
|
||||
["GO_BUILD_DEV=1", "make", "build-go-fast"],
|
||||
["make", "gen-jsonnet"],
|
||||
["./bin/grafana", "server", "-profile", "-profile-addr=127.0.0.1", "-profile-port=6000", "-profile-block-rate=1", "-profile-mutex-rate=5", "-packaging=dev", "cfg:app_mode=development"]
|
||||
]
|
||||
|
||||
86
.drone.yml
86
.drone.yml
@@ -71,18 +71,10 @@ steps:
|
||||
- echo $DRONE_RUNNER_NAME
|
||||
image: alpine:3.20.3
|
||||
name: identify-runner
|
||||
- commands:
|
||||
- go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd
|
||||
depends_on: []
|
||||
environment:
|
||||
CGO_ENABLED: 0
|
||||
image: golang:1.23.1-alpine
|
||||
name: compile-build-cmd
|
||||
- commands:
|
||||
- go install github.com/bazelbuild/buildtools/buildifier@latest
|
||||
- buildifier --lint=warn -mode=check -r .
|
||||
depends_on:
|
||||
- compile-build-cmd
|
||||
depends_on: []
|
||||
image: golang:1.23.1-alpine
|
||||
name: lint-starlark
|
||||
trigger:
|
||||
@@ -1278,13 +1270,6 @@ platform:
|
||||
os: linux
|
||||
services: []
|
||||
steps:
|
||||
- commands:
|
||||
- go build -o ./bin/build -ldflags '-extldflags -static' ./pkg/build/cmd
|
||||
depends_on: []
|
||||
environment:
|
||||
CGO_ENABLED: 0
|
||||
image: golang:1.23.1-alpine
|
||||
name: compile-build-cmd
|
||||
- commands:
|
||||
- apt-get update -yq && apt-get install shellcheck
|
||||
- shellcheck -e SC1071 -e SC2162 scripts/**/*.sh
|
||||
@@ -3434,31 +3419,32 @@ steps:
|
||||
- |2-
|
||||
|
||||
bash -c '
|
||||
IMAGE_TAG=$(echo "$${TAG}" | sed -e "s/+/-/g")
|
||||
debug=
|
||||
if [[ -n $${DRY_RUN} ]]; then debug=echo; fi
|
||||
docker login -u $${DOCKER_USER} -p $${DOCKER_PASSWORD}
|
||||
|
||||
# Push the grafana-image-tags images
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-armv7
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-armv7
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-armv7
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7
|
||||
|
||||
# Create the grafana manifests
|
||||
$$debug docker manifest create grafana/grafana:${TAG} grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7
|
||||
$$debug docker manifest create grafana/grafana:${TAG} grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7
|
||||
|
||||
$$debug docker manifest create grafana/grafana:${TAG}-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7
|
||||
$$debug docker manifest create grafana/grafana:${TAG}-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7
|
||||
|
||||
# Push the grafana manifests
|
||||
$$debug docker manifest push grafana/grafana:$${TAG}
|
||||
$$debug docker manifest push grafana/grafana:$${TAG}-ubuntu
|
||||
$$debug docker manifest push grafana/grafana:$${IMAGE_TAG}
|
||||
$$debug docker manifest push grafana/grafana:$${IMAGE_TAG}-ubuntu
|
||||
|
||||
# if LATEST is set, then also create & push latest
|
||||
if [[ -n $${LATEST} ]]; then
|
||||
$$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7
|
||||
$$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7
|
||||
$$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7
|
||||
$$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7
|
||||
|
||||
$$debug docker manifest push grafana/grafana:latest
|
||||
$$debug docker manifest push grafana/grafana:latest-ubuntu
|
||||
@@ -3565,31 +3551,32 @@ steps:
|
||||
- |2-
|
||||
|
||||
bash -c '
|
||||
IMAGE_TAG=$(echo "$${TAG}" | sed -e "s/+/-/g")
|
||||
debug=
|
||||
if [[ -n $${DRY_RUN} ]]; then debug=echo; fi
|
||||
docker login -u $${DOCKER_USER} -p $${DOCKER_PASSWORD}
|
||||
|
||||
# Push the grafana-image-tags images
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-armv7
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${TAG}-ubuntu-armv7
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-armv7
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64
|
||||
$$debug docker push grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7
|
||||
|
||||
# Create the grafana manifests
|
||||
$$debug docker manifest create grafana/grafana:${TAG} grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7
|
||||
$$debug docker manifest create grafana/grafana:${TAG} grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7
|
||||
|
||||
$$debug docker manifest create grafana/grafana:${TAG}-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7
|
||||
$$debug docker manifest create grafana/grafana:${TAG}-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7
|
||||
|
||||
# Push the grafana manifests
|
||||
$$debug docker manifest push grafana/grafana:$${TAG}
|
||||
$$debug docker manifest push grafana/grafana:$${TAG}-ubuntu
|
||||
$$debug docker manifest push grafana/grafana:$${IMAGE_TAG}
|
||||
$$debug docker manifest push grafana/grafana:$${IMAGE_TAG}-ubuntu
|
||||
|
||||
# if LATEST is set, then also create & push latest
|
||||
if [[ -n $${LATEST} ]]; then
|
||||
$$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${TAG}-amd64 grafana/grafana-image-tags:$${TAG}-arm64 grafana/grafana-image-tags:$${TAG}-armv7
|
||||
$$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${TAG}-ubuntu-armv7
|
||||
$$debug docker manifest create grafana/grafana:latest grafana/grafana-image-tags:$${IMAGE_TAG}-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-armv7
|
||||
$$debug docker manifest create grafana/grafana:latest-ubuntu grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-amd64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-arm64 grafana/grafana-image-tags:$${IMAGE_TAG}-ubuntu-armv7
|
||||
|
||||
$$debug docker manifest push grafana/grafana:latest
|
||||
$$debug docker manifest push grafana/grafana:latest-ubuntu
|
||||
@@ -3681,7 +3668,8 @@ steps:
|
||||
image: golang:1.23.1-alpine
|
||||
name: compile-build-cmd
|
||||
- commands:
|
||||
- ./bin/build artifacts packages --tag $${DRONE_TAG} --src-bucket $${PRERELEASE_BUCKET}
|
||||
- ./bin/build artifacts packages --artifacts-editions=oss --tag $${DRONE_TAG} --src-bucket
|
||||
$${PRERELEASE_BUCKET}
|
||||
depends_on:
|
||||
- compile-build-cmd
|
||||
environment:
|
||||
@@ -3691,19 +3679,6 @@ steps:
|
||||
from_secret: prerelease_bucket
|
||||
image: grafana/grafana-ci-deploy:1.3.3
|
||||
name: publish-artifacts
|
||||
- commands:
|
||||
- ./bin/build artifacts static-assets --tag ${DRONE_TAG} --static-asset-editions=grafana-oss
|
||||
depends_on:
|
||||
- compile-build-cmd
|
||||
environment:
|
||||
GCP_KEY:
|
||||
from_secret: gcp_grafanauploads_base64
|
||||
PRERELEASE_BUCKET:
|
||||
from_secret: prerelease_bucket
|
||||
STATIC_ASSET_EDITIONS:
|
||||
from_secret: static_asset_editions
|
||||
image: grafana/grafana-ci-deploy:1.3.3
|
||||
name: publish-static-assets
|
||||
- commands:
|
||||
- ./bin/build artifacts storybook --tag ${DRONE_TAG}
|
||||
depends_on:
|
||||
@@ -3723,7 +3698,6 @@ steps:
|
||||
-f latest=$${LATEST} --repo=grafana/grafana release-pr.yml
|
||||
depends_on:
|
||||
- publish-artifacts
|
||||
- publish-static-assets
|
||||
environment:
|
||||
GH_CLI_URL: https://github.com/cli/cli/releases/download/v2.50.0/gh_2.50.0_linux_amd64.tar.gz
|
||||
GITHUB_TOKEN:
|
||||
@@ -6013,6 +5987,6 @@ kind: secret
|
||||
name: gcr_credentials
|
||||
---
|
||||
kind: signature
|
||||
hmac: e618274ea7a8bfbf3d5e151d459348aa9382fe63fe7fef76c997db3cba74779f
|
||||
hmac: dc30a3a00ee542fb289da36ef6db4274684db4533c472f7f903468919d1046ac
|
||||
|
||||
...
|
||||
|
||||
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -241,7 +241,6 @@
|
||||
/devenv/docker/rpmtest/ @grafana/grafana-backend-services-squad
|
||||
/devenv/jsonnet/ @grafana/dataviz-squad
|
||||
/devenv/local-npm/ @grafana/frontend-ops
|
||||
/devenv/vscode/ @grafana/frontend-ops
|
||||
/devenv/setup.sh @grafana/grafana-backend-services-squad
|
||||
/devenv/plugins.yaml @grafana/plugins-platform-frontend
|
||||
|
||||
|
||||
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -9,7 +9,12 @@
|
||||
"program": "${workspaceFolder}/pkg/cmd/grafana/",
|
||||
"env": {},
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": ["server", "--homepath", "${workspaceFolder}", "--packaging", "dev", "cfg:app_mode=development"]
|
||||
"args": [
|
||||
"server",
|
||||
"--homepath", "${workspaceFolder}",
|
||||
"--packaging", "dev",
|
||||
"cfg:app_mode=development",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Run API Server (testdata)",
|
||||
|
||||
6
Makefile
6
Makefile
@@ -174,12 +174,16 @@ gen-jsonnet:
|
||||
go generate ./devenv/jsonnet
|
||||
|
||||
.PHONY: update-workspace
|
||||
update-workspace:
|
||||
update-workspace: gen-go
|
||||
@echo "updating workspace"
|
||||
bash scripts/go-workspace/update-workspace.sh
|
||||
|
||||
.PHONY: build-go
|
||||
build-go: gen-go update-workspace ## Build all Go binaries.
|
||||
@echo "build go files with updated workspace"
|
||||
$(GO) run build.go $(GO_BUILD_FLAGS) build
|
||||
|
||||
build-go-fast: gen-go ## Build all Go binaries.
|
||||
@echo "build go files"
|
||||
$(GO) run build.go $(GO_BUILD_FLAGS) build
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ If you're interested in contributing to the Grafana project:
|
||||
|
||||
## Get involved
|
||||
|
||||
- Follow [@grafana on Twitter](https://twitter.com/grafana/).
|
||||
- Follow [@grafana on X (formerly Twitter)](https://x.com/grafana/).
|
||||
- Read and subscribe to the [Grafana blog](https://grafana.com/blog/).
|
||||
- If you have a specific question, check out our [discussion forums](https://community.grafana.com/).
|
||||
- For general discussions, join us on the [official Slack](https://slack.grafana.com) team.
|
||||
|
||||
@@ -553,7 +553,7 @@ token_expiration_day_limit =
|
||||
# Login cookie name
|
||||
login_cookie_name = grafana_session
|
||||
|
||||
# Disable usage of Grafana build-in login solution.
|
||||
# Disable usage of Grafana's built-in login solution.
|
||||
disable_login = false
|
||||
|
||||
# The maximum lifetime (duration) an authenticated user can be inactive before being required to login at next visit. Default is 7 days (7d). This setting should be expressed as a duration, e.g. 5m (minutes), 6h (hours), 10d (days), 2w (weeks), 1M (month). The lifetime resets at each successful token rotation (token_rotation_interval_minutes).
|
||||
|
||||
@@ -81,7 +81,7 @@ host = "localhost:1025"
|
||||
You can access the web UI at http://localhost:12080/#/
|
||||
|
||||
## Debugging setup in VS Code
|
||||
An example of launch.json is provided in `devenv/vscode/launch.json`. It basically does what Makefile and .bra.toml do. The 'program' field is set to the folder name so VS Code loads all *.go files in it instead of just main.go.
|
||||
An example of launch.json is provided in `.vscode/launch.json`. It basically does what Makefile and .bra.toml do. The 'program' field is set to the folder name so VS Code loads all *.go files in it instead of just main.go.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "grafana-server",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/pkg/cmd/grafana-server",
|
||||
"env": {},
|
||||
"args": [
|
||||
"--homepath=${workspaceFolder}",
|
||||
"--packaging=dev",
|
||||
"cfg:app_mode=development",
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -199,6 +199,26 @@ The following list contains role-based access control actions used by Grafana Ad
|
||||
| `grafana‑adaptive‑metrics‑app.exemptions:read` | None | Read recommendation exemptions. |
|
||||
| `grafana‑adaptive‑metrics‑app.exemptions:write` | None | Create, update, and delete recommendation exemptions. |
|
||||
|
||||
### Grafana Alerting Notification action definitions
|
||||
|
||||
To enable these permissions, enable the `alertingApiServer` feature toggle.
|
||||
|
||||
| Action | Applicable scopes | Description |
|
||||
| -------------------------------------------- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------- |
|
||||
| `alert.notifications.receivers:read` | `receivers:*`<br>`receivers:uid:*` | Read contact points. None |
|
||||
| `alert.notifications.receivers.secrets:read` | `receivers:*`<br>`receivers:uid:*` | Export contact points with decrypted secrets.None |
|
||||
| `alert.notifications.receivers:create` | None | Create a new contact points. The creator is automatically granted full access to the created contact point.None |
|
||||
| `alert.notifications.receivers:write` | `receivers:*`<br>`receivers:uid:*` | Update existing contact points.None |
|
||||
| `alert.notifications.receivers:delete` | `receivers:*`<br>`receivers:uid:*` | Update and delete existing contact points.None |
|
||||
| `receivers.permissions:read` | `receivers:*`<br>`receivers:uid:*` | Read permissions for contact points.None |
|
||||
| `receivers.permissions:write` | `receivers:*`<br>`receivers:uid:*` | Manage permissions for contact points.None |
|
||||
| `alert.notifications.time-intervals:read` | None | Read mute time intervals.None |
|
||||
| `alert.notifications.time-intervals:write` | None | Create new or update existing mute time intervals.None |
|
||||
| `alert.notifications.time-intervals:delete` | None | Delete existing time intervals.None |
|
||||
| `alert.notifications.templates:read` | None | Read templates. |
|
||||
| `alert.notifications.templates:write` | None | Create new or update existing templates.None |
|
||||
| `alert.notifications.templates:delete` | None | Delete existing templates.None |
|
||||
|
||||
## Scope definitions
|
||||
|
||||
The following list contains role-based access control scopes.
|
||||
|
||||
@@ -149,8 +149,8 @@ Each alert instance in the `alerts` array has the following fields.
|
||||
| generatorURL | string | URL of the alert rule in the Grafana UI |
|
||||
| fingerprint | string | The labels fingerprint, alarms with the same labels will have the same fingerprint |
|
||||
| silenceURL | string | URL to silence the alert rule in the Grafana UI |
|
||||
| dashboardURL | string | **Deprecated. It will be removed in a future release.** |
|
||||
| panelURL | string | **Deprecated. It will be removed in a future release.** |
|
||||
| dashboardURL | string | A link to the Grafana Dashboard if the alert has a Dashboard UID annotation |
|
||||
| panelURL | string | A link to the panel if the alert has a Panel ID annotation |
|
||||
| imageURL | string | URL of a screenshot of a panel assigned to the rule that created this notification |
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
|
||||
@@ -134,8 +134,8 @@ The webhook notification is a simple way to send information about a state chang
|
||||
| generatorURL | string | URL of the alert rule in the Grafana UI |
|
||||
| fingerprint | string | The labels fingerprint, alarms with the same labels will have the same fingerprint |
|
||||
| silenceURL | string | URL to silence the alert rule in the Grafana UI |
|
||||
| dashboardURL | string | **Will be deprecated soon** |
|
||||
| panelURL | string | **Will be deprecated soon** |
|
||||
| dashboardURL | string | A link to the Grafana Dashboard if the alert has a Dashboard UID annotation |
|
||||
| panelURL | string | A link to the panel if the alert has a Panel ID annotation |
|
||||
| imageURL | string | URL of a screenshot of a panel assigned to the rule that created this notification |
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
|
||||
@@ -23,20 +23,20 @@ weight: 400
|
||||
|
||||
### Alert
|
||||
|
||||
| Name | Kind | Description | Example |
|
||||
| ------------ | -------- | ------------------------------------------------------------------------------------ | --------------------- |
|
||||
| Status | `string` | Firing or resolved | `{{ .Status }}` |
|
||||
| Labels | `KV` | The labels for this alert | `{{ .Labels }}` |
|
||||
| Annotations | `KV` | The annotations for this alert | `{{ .Annotations }}` |
|
||||
| Values | `KV` | The values of all expressions, including Classic Conditions | `{{ .Values }}` |
|
||||
| StartsAt | `Time` | The time the alert fired | `{{ .StartsAt }}` |
|
||||
| EndsAt | `Time` | | `{{ .EndsAt }}` |
|
||||
| GeneratorURL | `string` | A link to Grafana, or the Alertmanager if using an external Alertmanager | `{{ .GeneratorURL }}` |
|
||||
| SilenceURL | `string` | A link to silence the alert | `{{ .SilenceURL }}` |
|
||||
| DashboardURL | `string` | A link to the Grafana Dashboard if the alert has a Dashboard UID annotation | `{{ .DashboardURL }}` |
|
||||
| PanelURL | `string` | A link to the panel if the alert has a Panel ID annotation | `{{ .PanelURL }}` |
|
||||
| Fingerprint | `string` | A unique string that identifies the alert | `{{ .Fingerprint }}` |
|
||||
| ValueString | `string` | A string that contains the labels and value of each reduced expression in the alert. | `{{ .ValueString }}` |
|
||||
| Name | Kind | Description | Example |
|
||||
| ------------ | -------- | ----------------------------------------------------------------------------------- | --------------------- |
|
||||
| Status | `string` | Firing or resolved | `{{ .Status }}` |
|
||||
| Labels | `KV` | The labels for this alert | `{{ .Labels }}` |
|
||||
| Annotations | `KV` | The annotations for this alert | `{{ .Annotations }}` |
|
||||
| Values | `KV` | The values of all expressions, including Classic Conditions | `{{ .Values }}` |
|
||||
| StartsAt | `Time` | The time the alert fired | `{{ .StartsAt }}` |
|
||||
| EndsAt | `Time` | | `{{ .EndsAt }}` |
|
||||
| GeneratorURL | `string` | A link to Grafana, or the source of the alert if using an external alert generator | `{{ .GeneratorURL }}` |
|
||||
| SilenceURL | `string` | A link to silence the alert | `{{ .SilenceURL }}` |
|
||||
| DashboardURL | `string` | A link to the Grafana Dashboard if the alert has a Dashboard UID annotation | `{{ .DashboardURL }}` |
|
||||
| PanelURL | `string` | A link to the panel if the alert has a Panel ID annotation | `{{ .PanelURL }}` |
|
||||
| Fingerprint | `string` | A unique string that identifies the alert | `{{ .Fingerprint }}` |
|
||||
| ValueString | `string` | A string that contains the labels and value of each reduced expression in the alert | `{{ .ValueString }}` |
|
||||
|
||||
### ExtendedData
|
||||
|
||||
|
||||
@@ -52,4 +52,32 @@ Grafana Alerting has the following permissions.
|
||||
| `alert.provisioning:write` | n/a | Update all Grafana alert rules, notification policies, etc via provisioning API. Permissions to folders and data source are not required. |
|
||||
| `alert.provisioning.provenance:write` | n/a | Set provisioning status for alerting resources. Cannot be used alone. Requires user to have permissions to access resources |
|
||||
|
||||
Contact point permissions. To enable these permissions, enable the `alertingApiServer` feature toggle.
|
||||
|
||||
| Action | Applicable scope | Description |
|
||||
| -------------------------------------------- | ---------------------------------- | ----------------------------------------------------------------------------------------------------------- |
|
||||
| `alert.notifications.receivers:read` | `receivers:*`<br>`receivers:uid:*` | Read contact points. |
|
||||
| `alert.notifications.receivers.secrets:read` | `receivers:*`<br>`receivers:uid:*` | Export contact points with decrypted secrets. |
|
||||
| `alert.notifications.receivers:create` | n/a | Create a new contact points. The creator is automatically granted full access to the created contact point. |
|
||||
| `alert.notifications.receivers:write` | `receivers:*`<br>`receivers:uid:*` | Update existing contact points. |
|
||||
| `alert.notifications.receivers:delete` | `receivers:*`<br>`receivers:uid:*` | Update and delete existing contact points. |
|
||||
| `receivers.permissions:read` | `receivers:*`<br>`receivers:uid:*` | Read permissions for contact points. |
|
||||
| `receivers.permissions:write` | `receivers:*`<br>`receivers:uid:*` | Manage permissions for contact points. |
|
||||
|
||||
Mute time interval permissions. To enable these permissions, enable the `alertingApiServer` feature toggle.
|
||||
|
||||
| Action | Applicable scope | Description |
|
||||
| ------------------------------------------- | ---------------- | -------------------------------------------------- |
|
||||
| `alert.notifications.time-intervals:read` | n/a | Read mute time intervals. |
|
||||
| `alert.notifications.time-intervals:write` | n/a | Create new or update existing mute time intervals. |
|
||||
| `alert.notifications.time-intervals:delete` | n/a | Delete existing time intervals. |
|
||||
|
||||
Notification template permissions. To enable these permissions, enable the `alertingApiServer` feature toggle.
|
||||
|
||||
| Action | Applicable scope | Description |
|
||||
| -------------------------------------- | ---------------- | ---------------------------------------- |
|
||||
| `alert.notifications.templates:read` | n/a | Read templates. |
|
||||
| `alert.notifications.templates:write` | n/a | Create new or update existing templates. |
|
||||
| `alert.notifications.templates:delete` | n/a | Delete existing templates. |
|
||||
|
||||
To help plan your RBAC rollout strategy, refer to [Plan your RBAC rollout strategy](https://grafana.com/docs/grafana/next/administration/roles-and-permissions/access-control/plan-rbac-rollout-strategy/).
|
||||
|
||||
@@ -57,12 +57,28 @@ Details of the fixed roles and the access they provide for Grafana Alerting are
|
||||
| Access to alert rules provisioning API: `fixed:alerting.provisioning:writer` | `alert.provisioning:read` and `alert.provisioning:write` | Manage all alert rules, notification policies, contact points, templates, in the organization using the provisioning API. |
|
||||
| Set provisioning status: `fixed:alerting.provisioning.status:writer` | `alert.provisioning.provenance:write` | Set provisioning rules for Alerting resources. Should be used together with other regular roles (Notifications Writer and/or Rules Writer.) |
|
||||
|
||||
If you have enabled the `alertingApiServer` feature toggle, an additional set of fixed roles is available.
|
||||
|
||||
| Display name in UI / Fixed role | Permissions | Description |
|
||||
| ------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- |
|
||||
| Contact Point Reader: `fixed:alerting.receivers:reader` | `alert.notifications.receivers:read` for scope `receivers:*` | Read all contact points. |
|
||||
| Contact Point Creator: `fixed:alerting.receivers:creator` | `alert.notifications.receivers:create` | Create a new contact point. The user is automatically granted full access to the created contact point. |
|
||||
| Contact Point Writer: `fixed:alerting.receivers:writer` | `alert.notifications.receivers:read`, `alert.notifications.receivers:write`, `alert.notifications.receivers:delete` for scope `receivers:*` and <br> `alert.notifications.receivers:create` | Create a new contact point and manage all existing contact points. |
|
||||
| Templates Reader: `fixed:alerting.templates:reader` | `alert.notifications.templates:read` | Read all notification templates. |
|
||||
| Templates Writer: `fixed:alerting.templates:writer` | `alert.notifications.templates:read`, `alert.notifications.templates:write`, `alert.notifications.templates:delete` | Create new and manage existing notification templates. |
|
||||
| Time Intervals Reader: `fixed:alerting.time-intervals:reader` | `alert.notifications.time-intervals:read` | Read all time intervals. |
|
||||
| Time Intervals Writer: `fixed:alerting.time-intervals:writer` | `alert.notifications.time-intervals:read`, `alert.notifications.time-intervals:write`, `alert.notifications.time-intervals:delete` | Create new and manage existing time intervals. |
|
||||
|
||||
## Create custom roles
|
||||
|
||||
Create custom roles of your own to manage permissions. Custom roles contain unique combinations of permissions, actions and scopes. Create a custom role when basic roles and fixed roles do not meet your permissions requirements.
|
||||
|
||||
For more information on creating custom roles, refer to [Create custom roles](https://grafana.com/docs/grafana/latest/administration/roles-and-permissions/access-control/manage-rbac-roles/#create-custom-roles).
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
It is not recommended to create custom roles that include `alerting.notifications.receiver` actions with a scope other than `receivers:*`. The UID used in the scope is not stable and changes whenever a contact point is renamed.
|
||||
{{< /admonition >}}
|
||||
|
||||
### Examples
|
||||
|
||||
The following examples give you an idea of how you can combine permissions for Grafana Alerting.
|
||||
|
||||
@@ -64,3 +64,30 @@ To manage folder permissions, complete the following steps.
|
||||
1. Hover your mouse cursor over a folder and click **Go to folder**.
|
||||
1. Click **Manage permissions** from the Folder actions menu.
|
||||
1. Update or add permissions as required.
|
||||
|
||||
## Manage access using contact point permissions
|
||||
|
||||
### Before you begin
|
||||
|
||||
- Enable the `alertingApiServer` feature toggle.
|
||||
|
||||
Extend or limit the access provided by a role to contact points by assigning permissions to individual contact point.
|
||||
|
||||
This allows different users, teams, or service accounts to have customized access to read or modify specific contact points.
|
||||
|
||||
Refer to the following table for details on the additional access provided by contact point permissions.
|
||||
|
||||
| Folder permission | Additional Access |
|
||||
| ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| View | View and export contact point as well as select it on the Alert rule edit page |
|
||||
| Edit | Update or delete the contact point |
|
||||
| Admin | Same additional access as Edit and manage permissions for the contact point. User should have additional permissions to read users and teams. |
|
||||
|
||||
### Steps
|
||||
|
||||
To contact point permissions, complete the following steps.
|
||||
|
||||
1. In the left-side menu, click **Contact points**.
|
||||
1. Hover your mouse cursor over a contact point and click **More**.
|
||||
1. Click **Manage permissions** from the actions menu.
|
||||
1. Update or add permissions as required.
|
||||
|
||||
@@ -155,7 +155,7 @@ We also bundle a dashboard within Grafana so you can start viewing your metrics
|
||||
1. Navigate to the data source's [configuration page](ref:configure-prometheus-data-source).
|
||||
1. Select the **Dashboards** tab.
|
||||
|
||||
This displays dashboards for Grafana and Prometheus.
|
||||
This displays dashboards for Grafana and Prometheus.
|
||||
|
||||
1. Select **Import** for the dashboard to import.
|
||||
|
||||
|
||||
@@ -165,6 +165,7 @@ Experimental features might be changed or removed without prior notice.
|
||||
| `disableClassicHTTPHistogram` | Disables classic HTTP Histogram (use with enableNativeHTTPHistogram) |
|
||||
| `kubernetesSnapshots` | Routes snapshot requests from /api to the /apis endpoint |
|
||||
| `kubernetesDashboards` | Use the kubernetes API in the frontend for dashboards |
|
||||
| `kubernetesDashboardsAPI` | Use the kubernetes API in the backend for dashboards |
|
||||
| `kubernetesFolders` | Use the kubernetes API in the frontend for folders, and route /api/folders requests to k8s |
|
||||
| `grafanaAPIServerTestingWithExperimentalAPIs` | Facilitate integration testing of experimental APIs |
|
||||
| `datasourceQueryTypes` | Show query type endpoints in datasource API servers (currently hardcoded for testdata, expressions, and prometheus) |
|
||||
|
||||
@@ -105,8 +105,8 @@ To add a tag, follow these steps:
|
||||
### Optional: Use Aggregate by
|
||||
|
||||
{{% admonition type="warning" %}}
|
||||
**Aggregate by** is an [experimental feature](/docs/release-life-cycle/). Engineering and on-call support is not available. Documentation is either limited or not provided outside of code comments. No SLA is provided.
|
||||
[Enable the `metricsSummary` feature toggle](/docs/grafana/latest/setup-grafana/configure-grafana/feature-toggles/) in Grafana to use this feature. Your Grafana Tempo data source must also point to a Tempo database with the [Metrics Summary API](https://grafana.com/docs/tempo/latest/api_docs/metrics-summary/) enabled. Contact Grafana Support to enable this feature in Grafana Cloud.
|
||||
Metrics summary API and the **Aggregate by** feature are deprecated in Grafana Cloud and Grafana 11.3 and later.
|
||||
It will be removed in a future release.
|
||||
{{% /admonition %}}
|
||||
|
||||
Using **Aggregate by**, you can calculate RED metrics (total span count, percent erroring spans, and latency information) for spans of `kind=server` that match your filter criteria, grouped by one or more attributes.
|
||||
@@ -118,7 +118,16 @@ For additional information, refer to [Traces to metrics: Ad-hoc RED metrics in G
|
||||
|
||||
{{< youtube id="xOolCpm2F8c" >}}
|
||||
|
||||
When you use **Aggregate by**, the selections you make determine how the information is reported in the Table. Every combination that matches selections in your data is listed in the table.
|
||||
**Aggregate by** is an [experimental feature](/docs/release-life-cycle/) that is disabled by default.
|
||||
[Enable the `metricsSummary` feature toggle](/docs/grafana/latest/setup-grafana/configure-grafana/feature-toggles/) in Grafana to use this feature.
|
||||
|
||||
Your Grafana Tempo data source must also point to a Tempo database with the [Metrics Summary API](https://grafana.com/docs/tempo/latest/api_docs/metrics-summary/) enabled.
|
||||
Contact Grafana Support to enable this feature in Grafana Cloud.
|
||||
|
||||
#### Use Aggregate by
|
||||
|
||||
When you use **Aggregate by**, the selections you make determine how the information is reported in the Table.
|
||||
Every combination that matches selections in your data is listed in the table.
|
||||
Each aggregate value, for example `intrinsic`:`name`, has a corresponding column in the results table.
|
||||
|
||||
For example, **names** matching `GET /:endpoint` with a **span.http.user_agent** of `k6/0.46` appeared in 31,466 spans. Instead of being listed by traces and associated spans, the query results are grouped by the selections in **Aggregate by**.
|
||||
|
||||
6
go.mod
6
go.mod
@@ -73,8 +73,8 @@ require (
|
||||
github.com/gorilla/mux v1.8.1 // @grafana/grafana-backend-group
|
||||
github.com/gorilla/websocket v1.5.0 // @grafana/grafana-app-platform-squad
|
||||
github.com/grafana/alerting v0.0.0-20241010165806-807ddf183724 // @grafana/alerting-backend
|
||||
github.com/grafana/authlib v0.0.0-20240919120951-58259833c564 // @grafana/identity-access-team
|
||||
github.com/grafana/authlib/claims v0.0.0-20240827210201-19d5347dd8dd // @grafana/identity-access-team
|
||||
github.com/grafana/authlib v0.0.0-20241014135010-3e1f37f75699 // @grafana/identity-access-team
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0 // @grafana/identity-access-team
|
||||
github.com/grafana/codejen v0.0.3 // @grafana/dataviz-squad
|
||||
github.com/grafana/cuetsy v0.1.11 // @grafana/grafana-as-code
|
||||
github.com/grafana/dataplane/examples v0.0.1 // @grafana/observability-metrics
|
||||
@@ -87,7 +87,7 @@ require (
|
||||
github.com/grafana/grafana-cloud-migration-snapshot v1.3.0 // @grafana/grafana-operator-experience-squad
|
||||
github.com/grafana/grafana-google-sdk-go v0.1.0 // @grafana/partner-datasources
|
||||
github.com/grafana/grafana-openapi-client-go v0.0.0-20231213163343-bd475d63fb79 // @grafana/grafana-backend-group
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.251.0 // @grafana/plugins-platform-backend
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.253.0 // @grafana/plugins-platform-backend
|
||||
github.com/grafana/grafana/pkg/aggregator v0.0.0-20240813192817-1b0e6b5c09b2 // @grafana/grafana-app-platform-squad
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240821155123-6891eb1d35da // @grafana/grafana-app-platform-squad
|
||||
github.com/grafana/grafana/pkg/apiserver v0.0.0-20240821155123-6891eb1d35da // @grafana/grafana-app-platform-squad
|
||||
|
||||
12
go.sum
12
go.sum
@@ -2249,10 +2249,10 @@ github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWm
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grafana/alerting v0.0.0-20241010165806-807ddf183724 h1:u+ZM5TLkdeEoSWXgYWxc4XRfPHhXpR63MyHXJxbBLrc=
|
||||
github.com/grafana/alerting v0.0.0-20241010165806-807ddf183724/go.mod h1:QsnoKX/iYZxA4Cv+H+wC7uxutBD8qi8ZW5UJvD2TYmU=
|
||||
github.com/grafana/authlib v0.0.0-20240919120951-58259833c564 h1:zYF/RBulpvMqPYR3gbzJZ8t/j/Eymn5FNidSYkueNCA=
|
||||
github.com/grafana/authlib v0.0.0-20240919120951-58259833c564/go.mod h1:PFzXbCrn0GIpN4KwT6NP1l5Z1CPLfmKHnYx8rZzQcyY=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240827210201-19d5347dd8dd h1:sIlR7n38/MnZvX2qxDEszywXdI5soCwQ78aTDSARvus=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240827210201-19d5347dd8dd/go.mod h1:r+F8H6awwjNQt/KPZ2GNwjk8TvsJ7/gxzkXN26GlL/A=
|
||||
github.com/grafana/authlib v0.0.0-20241014135010-3e1f37f75699 h1:+xSpRpQPhMXAE9z68u0zMzzIa78jy1UqFb4tMJczFNc=
|
||||
github.com/grafana/authlib v0.0.0-20241014135010-3e1f37f75699/go.mod h1:fhuI+ulquEIVcLsbwPml9JapWQzg8EYBp29HteO62DM=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0 h1:XT/WvQCWVVOvXRJy0SCQHkhxXFHNRJ3+jzhW5PutEk8=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0/go.mod h1:r+F8H6awwjNQt/KPZ2GNwjk8TvsJ7/gxzkXN26GlL/A=
|
||||
github.com/grafana/codejen v0.0.3 h1:tAWxoTUuhgmEqxJPOLtJoxlPBbMULFwKFOcRsPRPXDw=
|
||||
github.com/grafana/codejen v0.0.3/go.mod h1:zmwwM/DRyQB7pfuBjTWII3CWtxcXh8LTwAYGfDfpR6s=
|
||||
github.com/grafana/cue v0.0.0-20230926092038-971951014e3f h1:TmYAMnqg3d5KYEAaT6PtTguL2GjLfvr6wnAX8Azw6tQ=
|
||||
@@ -2284,8 +2284,8 @@ github.com/grafana/grafana-google-sdk-go v0.1.0/go.mod h1:Vo2TKWfDVmNTELBUM+3lkr
|
||||
github.com/grafana/grafana-openapi-client-go v0.0.0-20231213163343-bd475d63fb79 h1:r+mU5bGMzcXCRVAuOrTn54S80qbfVkvTdUJZfSfTNbs=
|
||||
github.com/grafana/grafana-openapi-client-go v0.0.0-20231213163343-bd475d63fb79/go.mod h1:wc6Hbh3K2TgCUSfBC/BOzabItujtHMESZeFk5ZhdxhQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.114.0/go.mod h1:D7x3ah+1d4phNXpbnOaxa/osSaZlwh9/ZUnGGzegRbk=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.251.0 h1:gnOtxrC/1rqFvpSbQYyoZqkr47oWDlz4Q2L6Ozmsi3w=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.251.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.253.0 h1:KaCrqqsDgVIoT8hwvwuUMKV7QbHVlvRoFN5+U2rOXR8=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.253.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk=
|
||||
github.com/grafana/grafana/apps/playlist v0.0.0-20240917082838-e2bce38a7990 h1:uQMZE/z+Y+o/U0z/g8ckAHss7U7LswedilByA2535DU=
|
||||
github.com/grafana/grafana/apps/playlist v0.0.0-20240917082838-e2bce38a7990/go.mod h1:3Vi0xv/4OBkBw4R9GAERkSrBnx06qrjpmNBRisucuSM=
|
||||
github.com/grafana/grafana/pkg/aggregator v0.0.0-20240813192817-1b0e6b5c09b2 h1:2H9x4q53pkfUGtSNYD1qSBpNnxrFgylof/TYADb5xMI=
|
||||
|
||||
15
go.work.sum
15
go.work.sum
@@ -1,7 +1,6 @@
|
||||
buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 h1:tdpHgTbmbvEIARu+bixzmleMi14+3imnpoFXz+Qzjp4=
|
||||
buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1/go.mod h1:xafc+XIsTxTy76GJQ1TKgvJWsSugFBqMaN27WhUblew=
|
||||
cel.dev/expr v0.15.0 h1:O1jzfJCQBfL5BFoYktaxwIhuttaQPsVWerH9/EEKx0w=
|
||||
cel.dev/expr v0.15.0/go.mod h1:TRSuuV7DlVCE/uwv5QbAiW/v8l5O8C4eEPHeu7gf7Sg=
|
||||
cel.dev/expr v0.16.0 h1:yloc84fytn4zmJX2GU3TkXGsaieaV7dQ057Qs4sIG2Y=
|
||||
cel.dev/expr v0.16.0/go.mod h1:TRSuuV7DlVCE/uwv5QbAiW/v8l5O8C4eEPHeu7gf7Sg=
|
||||
cloud.google.com/go/accessapproval v1.7.11 h1:MgtE8CI+YJWPGGHnxQ9z1VQqV87h+vSGy2MeM/m0ggQ=
|
||||
@@ -516,6 +515,8 @@ github.com/elastic/go-sysinfo v1.11.2/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJ
|
||||
github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0=
|
||||
github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss=
|
||||
github.com/elazarl/goproxy v0.0.0-20230731152917-f99041a5c027/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
|
||||
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8=
|
||||
github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw=
|
||||
github.com/expr-lang/expr v1.16.2 h1:JvMnzUs3LeVHBvGFcXYmXo+Q6DPDmzrlcSBO6Wy3w4s=
|
||||
github.com/expr-lang/expr v1.16.2/go.mod h1:uCkhfG+x7fcZ5A5sXHKuQ07jGZRl6J0FCAaf2k4PtVQ=
|
||||
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
|
||||
@@ -598,7 +599,6 @@ github.com/gogo/status v1.1.0/go.mod h1:BFv9nrluPLmrS0EmGVvLaPNmRosr9KapBYd5/hpY
|
||||
github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c=
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
|
||||
github.com/golang/glog v1.2.1 h1:OptwRhECazUx5ix5TTWC3EZhsZEHWcYWY4FQHTIubm4=
|
||||
github.com/golang/glog v1.2.1/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w=
|
||||
github.com/golang/glog v1.2.2 h1:1+mZ9upx1Dh6FmUTFR1naJ77miKiXgALjWOZ3NVFPmY=
|
||||
github.com/golang/glog v1.2.2/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w=
|
||||
github.com/gomarkdown/markdown v0.0.0-20230922112808-5421fefb8386 h1:EcQR3gusLHN46TAD+G+EbaaqJArt5vHhNpXAa12PQf4=
|
||||
@@ -906,7 +906,6 @@ github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0b
|
||||
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||
github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad h1:fiWzISvDn0Csy5H0iwgAuJGQTUpVfEMJJd4nRFXogbc=
|
||||
github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8=
|
||||
github.com/stoewer/parquet-cli v0.0.7 h1:rhdZODIbyMS3twr4OM3am8BPPT5pbfMcHLH93whDM5o=
|
||||
github.com/stoewer/parquet-cli v0.0.7/go.mod h1:bskxHdj8q3H1EmfuCqjViFoeO3NEvs5lzZAQvI8Nfjk=
|
||||
github.com/streadway/amqp v1.0.0 h1:kuuDrUJFZL1QYL9hUNuCxNObNzB0bV/ZG5jV3RWAQgo=
|
||||
@@ -1063,6 +1062,7 @@ go.opentelemetry.io/collector/service v0.95.0 h1:t6RUHV7ByFjkjPKGz5n6n4wIoXZLC8H
|
||||
go.opentelemetry.io/collector/service v0.95.0/go.mod h1:4yappQmDE5UZmLE9wwtj6IPM4W5KGLIYfObEAaejtQc=
|
||||
go.opentelemetry.io/contrib/config v0.4.0 h1:Xb+ncYOqseLroMuBesGNRgVQolXcXOhMj7EhGwJCdHs=
|
||||
go.opentelemetry.io/contrib/config v0.4.0/go.mod h1:drNk2xRqLWW4/amk6Uh1S+sDAJTc7bcEEN1GfJzj418=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.53.0/go.mod h1:ImRBLMJv177/pwiLZ7tU7HDGNdBv7rS0HQ99eN/zBl8=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.23.0 h1:aaIGWc5JdfRGpCafLRxMJbD65MfTa206AwSKkvGS0Hg=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.23.0/go.mod h1:Gyz7V7XghvwTq+mIhLFlTgcc03UDroOg8vezs4NLhwU=
|
||||
go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4=
|
||||
@@ -1074,7 +1074,6 @@ go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.1 h1:ZqR
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.23.1/go.mod h1:D7ynngPWlGJrqyGSDOdscuv7uqttfCE3jcBvffDv9y4=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.23.1 h1:q/Nj5/2TZRIt6PderQ9oU0M00fzoe8UZuINGw6ETGTw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.23.1/go.mod h1:DTE9yAu6r08jU3xa68GiSeI7oRcSEQ2RpKbbQGO+dWM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0/go.mod h1:s75jGIWA9OfCMzF0xr+ZgfrB5FEbbV7UuYo32ahUiFI=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.27.0/go.mod h1:MOiCmryaYtc+V0Ei+Tx9o5S1ZjA7kzLucuVuyzBZloQ=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0/go.mod h1:QWFXnDavXWwMx2EEcZsf3yxgEKAqsxQ+Syjp+seyInw=
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.46.0 h1:I8WIFXR351FoLJYuloU4EgXbtNX2URfU/85pUPheIEQ=
|
||||
@@ -1085,7 +1084,6 @@ go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.24.0 h1:s0PHtIkN+3xrbDO
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.24.0/go.mod h1:hZlFbDbRt++MMPCCfSJfmhkGIWnX1h3XjkfxZUjLrIA=
|
||||
go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s=
|
||||
go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg=
|
||||
go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.26.0 h1:cWSks5tfriHPdWFnl+qpX3P681aAYqlZHcAyHw5aU9Y=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.26.0/go.mod h1:ClMFFknnThJCksebJwz7KIyEDHO+nTB6gK8obLy8RyE=
|
||||
go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI=
|
||||
@@ -1118,11 +1116,9 @@ golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
|
||||
golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
|
||||
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
|
||||
golang.org/x/oauth2 v0.20.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -1134,8 +1130,6 @@ golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk=
|
||||
golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0=
|
||||
golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
@@ -1150,15 +1144,12 @@ gonum.org/v1/plot v0.14.0 h1:+LBDVFYwFe4LHhdP8coW6296MBEY4nQ+Y4vuUpJopcE=
|
||||
gonum.org/v1/plot v0.14.0/go.mod h1:MLdR9424SJed+5VqC6MsouEpig9pZX2VZ57H9ko2bXU=
|
||||
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
|
||||
google.golang.org/genproto v0.0.0-20240730163845-b1a4ccb954bf/go.mod h1:mCr1K1c8kX+1iSBREvU3Juo11CB+QOEWxbRS01wWl5M=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240528184218-531527333157/go.mod h1:99sLkeliLXfdj2J75X3Ho+rrVCaJze0uwN7zDDkjPVU=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240604185151-ef581f913117/go.mod h1:OimBR/bc1wPO9iV4NC2bpyjy3VnAwZh5EBPQdtaE5oo=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240725223205-93522f1f2a9f/go.mod h1:AHT0dDg3SoMOgZGnZk29b5xTbPHMoEC8qthmBLJCpys=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240730163845-b1a4ccb954bf/go.mod h1:OFMYQFHJ4TM3JRlWDZhJbZfra2uqc3WLBZiaaqP4DtU=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo=
|
||||
google.golang.org/genproto/googleapis/bytestream v0.0.0-20240730163845-b1a4ccb954bf h1:T4tsZBlZYXK3j40sQNP5MBO32I+rn6ypV1PpklsiV8k=
|
||||
google.golang.org/genproto/googleapis/bytestream v0.0.0-20240730163845-b1a4ccb954bf/go.mod h1:5/MT647Cn/GGhwTpXC7QqcaR5Cnee4v4MKCU1/nwnIQ=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240722135656-d784300faade/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240730163845-b1a4ccb954bf/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
|
||||
|
||||
2
nx.json
2
nx.json
@@ -3,7 +3,7 @@
|
||||
"default": {
|
||||
"runner": "nx/tasks-runners/default",
|
||||
"options": {
|
||||
"cacheableOperations": ["build"]
|
||||
"cacheableOperations": ["build", "generate"]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
14
package.json
14
package.json
@@ -31,11 +31,11 @@
|
||||
"lint:sass": "yarn stylelint '{public/sass,packages}/**/*.scss' --cache",
|
||||
"test:ci": "mkdir -p reports/junit && JEST_JUNIT_OUTPUT_DIR=reports/junit jest --ci --reporters=default --reporters=jest-junit -w ${TEST_MAX_WORKERS:-100%}",
|
||||
"lint:fix": "yarn lint:ts --fix",
|
||||
"packages:build": "nx run-many -t build --projects='@grafana/*'",
|
||||
"packages:clean": "rimraf ./npm-artifacts && lerna run clean --parallel",
|
||||
"packages:build": "nx run-many -t build --projects='tag:scope:package'",
|
||||
"packages:clean": "rimraf ./npm-artifacts && nx run-many -t clean --projects='tag:scope:package' --maxParallel=100",
|
||||
"packages:prepare": "lerna version --no-push --no-git-tag-version --force-publish --exact",
|
||||
"packages:pack": "mkdir -p ./npm-artifacts && lerna exec --no-private -- yarn pack --out \"../../npm-artifacts/%s-%v.tgz\"",
|
||||
"packages:typecheck": "nx run-many -t typecheck --projects='@grafana/*'",
|
||||
"packages:typecheck": "nx run-many -t typecheck --projects='tag:scope:package'",
|
||||
"prettier:check": "prettier --check --list-different=false --log-level=warn \"**/*.{ts,tsx,scss,md,mdx,json}\"",
|
||||
"prettier:checkDocs": "prettier --check --list-different=false --log-level=warn \"docs/**/*.md\" \"*.md\" \"packages/**/*.{ts,tsx,scss,md,mdx,json}\"",
|
||||
"prettier:write": "prettier --list-different \"**/*.{js,ts,tsx,scss,md,mdx,json}\" --write",
|
||||
@@ -58,10 +58,10 @@
|
||||
"betterer:stats": "ts-node --transpile-only --project ./scripts/cli/tsconfig.json ./scripts/cli/reportBettererStats.ts",
|
||||
"betterer:issues": "ts-node --transpile-only --project ./scripts/cli/tsconfig.json ./scripts/cli/generateBettererIssues.ts",
|
||||
"generate-icons-bundle-cache-file": "node ./scripts/generate-icon-bundle.js",
|
||||
"plugin:build": "nx run-many -t build --projects='@grafana-plugins/*'",
|
||||
"plugin:build:commit": "nx run-many -t build:commit --projects='@grafana-plugins/*'",
|
||||
"plugin:build:dev": "nx run-many -t dev --projects='@grafana-plugins/*' --maxParallel=100",
|
||||
"generate-icons": "yarn workspace @grafana/saga-icons generate",
|
||||
"plugin:build": "nx run-many -t build --projects='tag:scope:plugin'",
|
||||
"plugin:build:commit": "nx run-many -t build:commit --projects='tag:scope:plugin'",
|
||||
"plugin:build:dev": "nx run-many -t dev --projects='tag:scope:plugin' --maxParallel=100",
|
||||
"generate-icons": "nx run grafana-icons:generate",
|
||||
"generate-apis": "rtk-query-codegen-openapi ./scripts/generate-rtk-apis.ts"
|
||||
},
|
||||
"grafana": {
|
||||
|
||||
8
packages/grafana-data/project.json
Normal file
8
packages/grafana-data/project.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
@@ -486,4 +486,52 @@ describe('fieldToTimeField', () => {
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should properly parse Unix timestamps - in seconds', () => {
|
||||
const numberTimeField: Field = {
|
||||
config: {},
|
||||
name: 'Unix second timestamps',
|
||||
type: FieldType.number,
|
||||
values: [1728397800, 1728397815, 1728397830],
|
||||
};
|
||||
|
||||
expect(fieldToTimeField(numberTimeField, 'X')).toEqual({
|
||||
config: {},
|
||||
name: 'Unix second timestamps',
|
||||
type: FieldType.time,
|
||||
values: [1728397800000, 1728397815000, 1728397830000],
|
||||
});
|
||||
});
|
||||
|
||||
it('should properly parse Unix timestamps - in millseconds (with format)', () => {
|
||||
const numberTimeField: Field = {
|
||||
config: {},
|
||||
name: 'Unix MS timestamps',
|
||||
type: FieldType.number,
|
||||
values: [1728397800000, 1728397815000, 1728397830000],
|
||||
};
|
||||
|
||||
expect(fieldToTimeField(numberTimeField, 'x')).toEqual({
|
||||
config: {},
|
||||
name: 'Unix MS timestamps',
|
||||
type: FieldType.time,
|
||||
values: [1728397800000, 1728397815000, 1728397830000],
|
||||
});
|
||||
});
|
||||
|
||||
it('should properly parse Unix timestamps - in millseconds (without format)', () => {
|
||||
const numberTimeField: Field = {
|
||||
config: {},
|
||||
name: 'Unix MS timestamps',
|
||||
type: FieldType.number,
|
||||
values: [1728397800000, 1728397815000, 1728397830000],
|
||||
};
|
||||
|
||||
expect(fieldToTimeField(numberTimeField)).toEqual({
|
||||
config: {},
|
||||
name: 'Unix MS timestamps',
|
||||
type: FieldType.time,
|
||||
values: [1728397800000, 1728397815000, 1728397830000],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -131,13 +131,17 @@ export function fieldToTimeField(field: Field, dateFormat?: string): Field {
|
||||
const timeValues = field.values.slice();
|
||||
|
||||
let firstDefined = timeValues.find((v) => v != null);
|
||||
|
||||
let isISO8601 = typeof firstDefined === 'string' && iso8601Regex.test(firstDefined);
|
||||
const convertToMS = typeof firstDefined === 'number' && dateFormat === 'X';
|
||||
const isISO8601 = typeof firstDefined === 'string' && iso8601Regex.test(firstDefined);
|
||||
|
||||
for (let t = 0; t < timeValues.length; t++) {
|
||||
if (timeValues[t]) {
|
||||
let parsed = isISO8601 ? Date.parse(timeValues[t]) : dateTimeParse(timeValues[t], opts).valueOf();
|
||||
timeValues[t] = Number.isFinite(parsed) ? parsed : null;
|
||||
if (Number.isFinite(parsed)) {
|
||||
timeValues[t] = convertToMS ? parsed * 1000 : parsed;
|
||||
} else {
|
||||
timeValues[t] = null;
|
||||
}
|
||||
} else {
|
||||
timeValues[t] = null;
|
||||
}
|
||||
@@ -254,10 +258,13 @@ function fieldToComplexField(field: Field): Field {
|
||||
*/
|
||||
export function ensureTimeField(field: Field, dateFormat?: string): Field {
|
||||
const firstValueTypeIsNumber = typeof field.values[0] === 'number';
|
||||
// if the format is unix seconds, we don't want to skip formatting
|
||||
const isUnixSecondsFormat = dateFormat === 'X';
|
||||
|
||||
if (field.type === FieldType.time && firstValueTypeIsNumber) {
|
||||
return field; //already time
|
||||
}
|
||||
if (firstValueTypeIsNumber) {
|
||||
if (firstValueTypeIsNumber && !isUnixSecondsFormat) {
|
||||
return {
|
||||
...field,
|
||||
type: FieldType.time, //assumes it should be time
|
||||
|
||||
@@ -116,6 +116,7 @@ export interface FeatureToggles {
|
||||
kubernetesPlaylists?: boolean;
|
||||
kubernetesSnapshots?: boolean;
|
||||
kubernetesDashboards?: boolean;
|
||||
kubernetesDashboardsAPI?: boolean;
|
||||
kubernetesFolders?: boolean;
|
||||
grafanaAPIServerTestingWithExperimentalAPIs?: boolean;
|
||||
datasourceQueryTypes?: boolean;
|
||||
|
||||
@@ -223,7 +223,7 @@ type Dashboard = {
|
||||
|
||||
// deprecated types
|
||||
|
||||
/** @deprecated - use PluginAddedComponentConfig instead */
|
||||
/** @deprecated - use PluginExtensionAddedLinkConfig instead */
|
||||
export type PluginExtensionLinkConfig<Context extends object = object> = {
|
||||
type: PluginExtensionTypes.link;
|
||||
title: string;
|
||||
|
||||
8
packages/grafana-e2e-selectors/project.json
Normal file
8
packages/grafana-e2e-selectors/project.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
6
packages/grafana-eslint-rules/project.json
Normal file
6
packages/grafana-eslint-rules/project.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:build"],
|
||||
"targets": {}
|
||||
}
|
||||
8
packages/grafana-flamegraph/project.json
Normal file
8
packages/grafana-flamegraph/project.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
11
packages/grafana-icons/project.json
Normal file
11
packages/grafana-icons/project.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"generate": {
|
||||
"outputs": ["{projectRoot}/src/icons-gen"]
|
||||
},
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
6
packages/grafana-o11y-ds-frontend/project.json
Normal file
6
packages/grafana-o11y-ds-frontend/project.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {}
|
||||
}
|
||||
6
packages/grafana-plugin-configs/project.json
Normal file
6
packages/grafana-plugin-configs/project.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:build"],
|
||||
"targets": {}
|
||||
}
|
||||
8
packages/grafana-prometheus/project.json
Normal file
8
packages/grafana-prometheus/project.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
8
packages/grafana-runtime/project.json
Normal file
8
packages/grafana-runtime/project.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
8
packages/grafana-schema/project.json
Normal file
8
packages/grafana-schema/project.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
6
packages/grafana-sql/project.json
Normal file
6
packages/grafana-sql/project.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {}
|
||||
}
|
||||
8
packages/grafana-ui/project.json
Normal file
8
packages/grafana-ui/project.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
||||
"projectType": "library",
|
||||
"tags": ["scope:package", "type:ui"],
|
||||
"targets": {
|
||||
"build": {}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { css, cx } from '@emotion/css';
|
||||
import { Property } from 'csstype';
|
||||
import { ElementType, forwardRef, PropsWithChildren } from 'react';
|
||||
import * as React from 'react';
|
||||
|
||||
@@ -66,6 +67,7 @@ interface BoxProps extends FlexProps, SizeProps, Omit<React.HTMLAttributes<HTMLE
|
||||
boxShadow?: ResponsiveProp<BoxShadow>;
|
||||
/** Sets the HTML element that will be rendered as a Box. Defaults to 'div' */
|
||||
element?: ElementType;
|
||||
position?: ResponsiveProp<Property.Position>;
|
||||
}
|
||||
|
||||
export const Box = forwardRef<HTMLElement, PropsWithChildren<BoxProps>>((props, ref) => {
|
||||
@@ -106,6 +108,7 @@ export const Box = forwardRef<HTMLElement, PropsWithChildren<BoxProps>>((props,
|
||||
height,
|
||||
minHeight,
|
||||
maxHeight,
|
||||
position,
|
||||
...rest
|
||||
} = props;
|
||||
const styles = useStyles2(
|
||||
@@ -137,7 +140,8 @@ export const Box = forwardRef<HTMLElement, PropsWithChildren<BoxProps>>((props,
|
||||
justifyContent,
|
||||
alignItems,
|
||||
boxShadow,
|
||||
gap
|
||||
gap,
|
||||
position
|
||||
);
|
||||
const sizeStyles = useStyles2(getSizeStyles, width, minWidth, maxWidth, height, minHeight, maxHeight);
|
||||
const Element = element ?? 'div';
|
||||
@@ -204,7 +208,8 @@ const getStyles = (
|
||||
justifyContent: BoxProps['justifyContent'],
|
||||
alignItems: BoxProps['alignItems'],
|
||||
boxShadow: BoxProps['boxShadow'],
|
||||
gap: BoxProps['gap']
|
||||
gap: BoxProps['gap'],
|
||||
position: BoxProps['position']
|
||||
) => {
|
||||
return {
|
||||
root: css([
|
||||
@@ -299,6 +304,9 @@ const getStyles = (
|
||||
getResponsiveStyle(theme, gap, (val) => ({
|
||||
gap: theme.spacing(val),
|
||||
})),
|
||||
getResponsiveStyle(theme, position, (val) => ({
|
||||
position: val,
|
||||
})),
|
||||
]),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -44,6 +44,7 @@ export const Table = memo((props: Props) => {
|
||||
data,
|
||||
height,
|
||||
onCellFilterAdded,
|
||||
onColumnResize,
|
||||
width,
|
||||
columnMinWidth = COLUMN_MIN_WIDTH,
|
||||
noHeader,
|
||||
@@ -128,7 +129,7 @@ export const Table = memo((props: Props) => {
|
||||
|
||||
// Internal react table state reducer
|
||||
const stateReducer = useTableStateReducer({
|
||||
...props,
|
||||
onColumnResize,
|
||||
onSortByChange: (state) => {
|
||||
// Collapse all rows. This prevents a known bug that causes the size of the rows to be incorrect due to
|
||||
// using `VariableSizeList` and `useExpanded` together.
|
||||
@@ -138,6 +139,7 @@ export const Table = memo((props: Props) => {
|
||||
props.onSortByChange(state);
|
||||
}
|
||||
},
|
||||
data,
|
||||
});
|
||||
|
||||
const hasUniqueId = !!data.meta?.uniqueRowIdFields?.length;
|
||||
|
||||
@@ -2,14 +2,14 @@ import { useCallback } from 'react';
|
||||
|
||||
import { getFieldDisplayName } from '@grafana/data';
|
||||
|
||||
import { TableSortByFieldState, GrafanaTableColumn, GrafanaTableState, Props } from './types';
|
||||
import { TableSortByFieldState, GrafanaTableColumn, GrafanaTableState, TableStateReducerProps, Props } from './types';
|
||||
|
||||
export interface ActionType {
|
||||
type: string;
|
||||
id: string | undefined;
|
||||
}
|
||||
|
||||
export function useTableStateReducer({ onColumnResize, onSortByChange, data }: Props) {
|
||||
export function useTableStateReducer({ onColumnResize, onSortByChange, data }: TableStateReducerProps) {
|
||||
return useCallback(
|
||||
(newState: GrafanaTableState, action: ActionType) => {
|
||||
switch (action.type) {
|
||||
|
||||
@@ -75,6 +75,12 @@ export interface GrafanaTableState extends TableState {
|
||||
|
||||
export interface GrafanaTableRow extends Row, UseExpandedRowProps<{}> {}
|
||||
|
||||
export interface TableStateReducerProps {
|
||||
onColumnResize?: TableColumnResizeActionCallback;
|
||||
onSortByChange?: TableSortByActionCallback;
|
||||
data: DataFrame;
|
||||
}
|
||||
|
||||
export interface Props {
|
||||
ariaLabel?: string;
|
||||
data: DataFrame;
|
||||
|
||||
@@ -12,143 +12,66 @@ export const darkThemeVarsTemplate = (theme: GrafanaTheme2) =>
|
||||
|
||||
$theme-name: dark;
|
||||
|
||||
$colors-action-hover: ${theme.colors.action.hover};
|
||||
$colors-action-selected: ${theme.colors.action.selected};
|
||||
|
||||
// New Colors
|
||||
// -------------------------
|
||||
$blue-light: ${theme.colors.primary.text};
|
||||
$blue-base: ${theme.colors.primary.main};
|
||||
$blue-shade: ${theme.colors.primary.shade};
|
||||
$red-base: ${theme.colors.error.main};
|
||||
$red-shade: ${theme.colors.error.shade};
|
||||
$green-base: ${theme.colors.success.main};
|
||||
$green-shade: ${theme.colors.success.shade};
|
||||
$orange-dark: ${theme.v1.palette.orangeDark};
|
||||
|
||||
$gray98: ${theme.v1.palette.gray98};
|
||||
$gray95: ${theme.v1.palette.gray95};
|
||||
$gray85: ${theme.v1.palette.gray85};
|
||||
$gray70: ${theme.v1.palette.gray70};
|
||||
$gray60: ${theme.v1.palette.gray60};
|
||||
$gray33: ${theme.v1.palette.gray33};
|
||||
$gray25: ${theme.v1.palette.gray25};
|
||||
$gray15: ${theme.v1.palette.gray15};
|
||||
$gray10: ${theme.v1.palette.gray10};
|
||||
$gray05: ${theme.v1.palette.gray05};
|
||||
|
||||
// Grays
|
||||
// -------------------------
|
||||
$black: ${theme.v1.palette.black};
|
||||
$dark-1: ${theme.v1.palette.dark1};
|
||||
$dark-2: ${theme.v1.palette.dark2};
|
||||
$dark-3: ${theme.v1.palette.dark3};
|
||||
$dark-4: ${theme.v1.palette.dark4};
|
||||
$dark-5: ${theme.v1.palette.dark5};
|
||||
$dark-6: ${theme.v1.palette.dark6};
|
||||
$dark-7: ${theme.v1.palette.dark7};
|
||||
$dark-8: ${theme.v1.palette.dark8};
|
||||
$dark-9: ${theme.v1.palette.dark9};
|
||||
$dark-10: ${theme.v1.palette.dark10};
|
||||
$gray-1: ${theme.v1.palette.gray1};
|
||||
$gray-2: ${theme.v1.palette.gray2};
|
||||
$gray-3: ${theme.v1.palette.gray3};
|
||||
$gray-4: ${theme.v1.palette.gray4};
|
||||
$gray-5: ${theme.v1.palette.gray5};
|
||||
$gray-6: ${theme.v1.palette.gray6};
|
||||
|
||||
$white: ${theme.v1.palette.white};
|
||||
|
||||
$layer0: ${theme.colors.background.canvas};
|
||||
$layer1: ${theme.colors.background.primary};
|
||||
$layer2: ${theme.colors.background.secondary};
|
||||
|
||||
$divider: ${theme.colors.border.weak};
|
||||
|
||||
$border0: ${theme.colors.border.weak};
|
||||
$border1: ${theme.colors.border.medium};
|
||||
|
||||
// Accent colors
|
||||
// -------------------------
|
||||
$blue: ${theme.v1.palette.blue85};
|
||||
$red: $red-base;
|
||||
$yellow: ${theme.v1.palette.yellow};
|
||||
$orange: ${theme.v1.palette.orange};
|
||||
$purple: ${theme.v1.palette.purple};
|
||||
$variable: ${theme.colors.primary.text};
|
||||
|
||||
$brand-primary: ${theme.v1.palette.orange};
|
||||
$brand-success: ${theme.colors.success.main};
|
||||
$brand-warning: ${theme.colors.warning.main};
|
||||
$brand-danger: ${theme.colors.error.main};
|
||||
|
||||
$query-red: ${theme.colors.error.text};
|
||||
$query-green: ${theme.colors.success.text};
|
||||
$query-purple: #fe85fc;
|
||||
$query-orange: ${theme.v1.palette.orange};
|
||||
|
||||
// Status colors
|
||||
// -------------------------¨
|
||||
$online: ${theme.colors.success.text};
|
||||
$warn: ${theme.colors.warning.text};
|
||||
$critical: ${theme.colors.error.text};
|
||||
|
||||
// Scaffolding
|
||||
// -------------------------
|
||||
$body-bg: ${theme.colors.background.canvas};
|
||||
$page-bg: ${theme.colors.background.canvas};
|
||||
$dashboard-bg: ${theme.colors.background.canvas};
|
||||
|
||||
$text-color-strong: ${theme.colors.text.maxContrast};
|
||||
$text-color: ${theme.colors.text.primary};
|
||||
$text-color-semi-weak: ${theme.colors.text.secondary};
|
||||
$text-color-weak: ${theme.colors.text.secondary};
|
||||
$text-color-faint: ${theme.colors.text.disabled};
|
||||
$text-color-emphasis: ${theme.colors.text.maxContrast};
|
||||
$text-blue: ${theme.colors.primary.text};
|
||||
|
||||
$text-shadow-faint: 1px 1px 4px rgb(45, 45, 45);
|
||||
$textShadow: none;
|
||||
|
||||
// gradients
|
||||
$brand-gradient-horizontal: ${theme.colors.gradients.brandHorizontal};
|
||||
$brand-gradient-vertical: ${theme.colors.gradients.brandVertical};
|
||||
|
||||
// Links
|
||||
// -------------------------
|
||||
$link-color: ${theme.colors.text.primary};
|
||||
$link-color-disabled: ${theme.colors.text.disabled};
|
||||
$link-hover-color: ${theme.colors.text.maxContrast};
|
||||
$external-link-color: ${theme.colors.text.link};
|
||||
|
||||
// Typography
|
||||
// -------------------------
|
||||
$headings-color: ${theme.colors.text.primary};
|
||||
$abbr-border-color: $gray-2 !default;
|
||||
$text-muted: $text-color-weak;
|
||||
|
||||
$hr-border-color: $dark-9;
|
||||
|
||||
// Panel
|
||||
// -------------------------
|
||||
$panel-bg: ${theme.components.panel.background};
|
||||
$panel-border: 1px solid ${theme.components.panel.borderColor};
|
||||
$panel-header-hover-bg: ${theme.colors.action.hover};
|
||||
$panel-box-shadow: ${theme.components.panel.boxShadow};
|
||||
$panel-corner: $panel-bg;
|
||||
|
||||
// page header
|
||||
$page-header-bg: ${theme.colors.background.canvas};
|
||||
$page-header-shadow: inset 0px -4px 14px $dark-3;
|
||||
$page-header-border-color: ${theme.colors.background.canvas};
|
||||
|
||||
$divider-border-color: $gray-1;
|
||||
|
||||
// Graphite Target Editor
|
||||
$tight-form-func-bg: ${theme.colors.background.secondary};
|
||||
$tight-form-func-highlight-bg: ${theme.colors.emphasize(theme.colors.background.secondary, 0.03)};
|
||||
|
||||
$modal-backdrop-bg: ${theme.colors.action.hover};
|
||||
$code-tag-bg: $dark-1;
|
||||
$code-tag-border: $dark-9;
|
||||
|
||||
@@ -159,76 +82,32 @@ $card-shadow: none;
|
||||
|
||||
// Lists
|
||||
$list-item-bg: $card-background;
|
||||
$list-item-hover-bg: $card-background-hover;
|
||||
$list-item-shadow: $card-shadow;
|
||||
|
||||
$empty-list-cta-bg: ${theme.colors.background.secondary};
|
||||
|
||||
// Scrollbars
|
||||
$scrollbarBackground: #404357;
|
||||
$scrollbarBackground2: $dark-10;
|
||||
$scrollbarBorder: black;
|
||||
|
||||
// Tables
|
||||
// -------------------------
|
||||
$table-bg-accent: ${theme.colors.background.secondary};
|
||||
$table-border: ${theme.colors.border.medium};
|
||||
$table-bg-odd: ${theme.colors.emphasize(theme.colors.background.primary, 0.02)};
|
||||
$table-bg-hover: ${theme.colors.emphasize(theme.colors.background.primary, 0.05)};
|
||||
|
||||
// Buttons
|
||||
// -------------------------
|
||||
$btn-primary-bg: $blue-base;
|
||||
$btn-primary-bg-hl: $blue-shade;
|
||||
|
||||
$btn-secondary-bg: $dark-6;
|
||||
$btn-secondary-bg-hl: color.adjust($dark-6, $lightness: 4%);
|
||||
|
||||
$btn-success-bg: $green-base;
|
||||
$btn-success-bg-hl: $green-shade;
|
||||
|
||||
$btn-danger-bg: $red-base;
|
||||
$btn-danger-bg-hl: $red-shade;
|
||||
|
||||
$btn-inverse-bg: $dark-6;
|
||||
$btn-inverse-bg-hl: color.adjust($dark-6, $lightness: 4%);
|
||||
$btn-inverse-text-color: $link-color;
|
||||
$btn-inverse-text-shadow: 0px 1px 0 rgba(0, 0, 0, 0.1);
|
||||
|
||||
$btn-link-color: $gray-3;
|
||||
|
||||
$iconContainerBackground: $black;
|
||||
|
||||
$btn-divider-left: $dark-9;
|
||||
$btn-divider-right: $dark-3;
|
||||
|
||||
$btn-drag-image: '../img/grab_dark.svg';
|
||||
|
||||
$navbar-btn-gicon-brightness: brightness(0.5);
|
||||
|
||||
$btn-active-box-shadow: 0px 0px 4px rgba(255, 120, 10, 0.5);
|
||||
|
||||
// Forms
|
||||
// -------------------------
|
||||
$input-bg: ${theme.components.input.background};
|
||||
$input-bg-disabled: ${theme.colors.action.disabledBackground};
|
||||
|
||||
$input-color: ${theme.components.input.text};
|
||||
$input-border-color: ${theme.components.input.borderColor};
|
||||
$input-box-shadow: none;
|
||||
$input-border-focus: ${theme.colors.primary.border};
|
||||
$input-box-shadow-focus: ${theme.colors.primary.border} !default;
|
||||
$input-color-placeholder: ${theme.colors.text.disabled};
|
||||
$input-label-bg: ${theme.colors.background.secondary};
|
||||
$input-color-select-arrow: $white;
|
||||
|
||||
// Search
|
||||
$search-shadow: 0 0 30px 0 $black;
|
||||
|
||||
// Typeahead
|
||||
$typeahead-shadow: 0 5px 10px 0 $black;
|
||||
$typeahead-selected-bg: $dark-9;
|
||||
$typeahead-selected-color: $yellow;
|
||||
|
||||
// Dropdowns
|
||||
// -------------------------
|
||||
@@ -236,48 +115,20 @@ $dropdownBackground: ${theme.colors.background.primary};
|
||||
$dropdownBorder: ${theme.colors.border.weak};
|
||||
$dropdownDividerTop: ${theme.colors.border.weak};
|
||||
$dropdownDividerBottom: ${theme.colors.border.weak};
|
||||
$dropdownShadow: ${theme.shadows.z3};
|
||||
|
||||
$dropdownLinkColor: $link-color;
|
||||
$dropdownLinkColorHover: $white;
|
||||
$dropdownLinkColorActive: $white;
|
||||
$dropdownLinkBackgroundHover: $dark-9;
|
||||
|
||||
// Horizontal forms & lists
|
||||
// -------------------------
|
||||
$horizontalComponentOffset: 180px;
|
||||
|
||||
// Navbar
|
||||
// -------------------------
|
||||
$navbarHeight: 55px;
|
||||
$navbarBorder: 1px solid $dark-6;
|
||||
|
||||
// Sidemenu
|
||||
// -------------------------
|
||||
$side-menu-bg: $panel-bg;
|
||||
$side-menu-bg-mobile: $panel-bg;
|
||||
$side-menu-border: none;
|
||||
$side-menu-item-hover-bg: ${theme.colors.background.secondary};
|
||||
$side-menu-shadow: 0 0 30px #111;
|
||||
$side-menu-icon-color: ${theme.v1.palette.gray70};
|
||||
$side-menu-header-color: ${theme.colors.text.primary};
|
||||
|
||||
// Menu dropdowns
|
||||
// -------------------------
|
||||
$menu-dropdown-bg: ${theme.colors.background.primary};
|
||||
$menu-dropdown-hover-bg: ${theme.colors.action.hover};
|
||||
$menu-dropdown-shadow: ${theme.shadows.z3};
|
||||
|
||||
// Tabs
|
||||
// -------------------------
|
||||
$tab-border-color: $dark-9;
|
||||
|
||||
// Form states and alerts
|
||||
// -------------------------
|
||||
$warning-text-color: ${theme.colors.warning.text};
|
||||
$error-text-color: ${theme.colors.error.text};
|
||||
$success-text-color: ${theme.colors.success.text};
|
||||
|
||||
$alert-error-bg: ${theme.colors.error.main};
|
||||
$alert-success-bg: ${theme.colors.success.main};
|
||||
$alert-warning-bg: ${theme.colors.warning.main};
|
||||
@@ -285,16 +136,12 @@ $alert-info-bg: ${theme.colors.warning.main};
|
||||
|
||||
// Tooltips and popovers
|
||||
// -------------------------
|
||||
$tooltipArrowWidth: 5px;
|
||||
$tooltipLinkColor: $link-color;
|
||||
$tooltipExternalLinkColor: $external-link-color;
|
||||
$tooltipExternalLinkColor: ${theme.colors.text.link};
|
||||
$graph-tooltip-bg: $dark-1;
|
||||
|
||||
$tooltipBackground: ${theme.components.tooltip.background};
|
||||
$tooltipColor: ${theme.components.tooltip.text};
|
||||
$tooltipArrowColor: ${theme.components.tooltip.background};
|
||||
$tooltipBackgroundError: ${theme.colors.error.main};
|
||||
$tooltipShadow: ${theme.shadows.z2};
|
||||
|
||||
$popover-bg: ${theme.colors.background.primary};
|
||||
$popover-color: ${theme.colors.text.primary};
|
||||
@@ -304,10 +151,7 @@ $popover-shadow: ${theme.shadows.z3};
|
||||
|
||||
$popover-help-bg: $tooltipBackground;
|
||||
$popover-help-color: $text-color;
|
||||
$popover-error-bg: $btn-danger-bg;
|
||||
|
||||
$popover-code-bg: $popover-bg;
|
||||
$popover-code-boxshadow: $tooltipShadow;
|
||||
$popover-error-bg: $red-base;
|
||||
|
||||
// images
|
||||
$checkboxImageUrl: '../img/checkbox.png';
|
||||
@@ -315,46 +159,6 @@ $checkboxImageUrl: '../img/checkbox.png';
|
||||
// info box
|
||||
$info-box-border-color: $blue-base;
|
||||
|
||||
// footer
|
||||
$footer-link-color: $gray-2;
|
||||
$footer-link-hover: $gray-4;
|
||||
|
||||
// json-explorer
|
||||
$json-explorer-default-color: $text-color;
|
||||
$json-explorer-string-color: #23d662;
|
||||
$json-explorer-number-color: $variable;
|
||||
$json-explorer-boolean-color: $variable;
|
||||
$json-explorer-null-color: #eec97d;
|
||||
$json-explorer-undefined-color: rgb(239, 143, 190);
|
||||
$json-explorer-function-color: #fd48cb;
|
||||
$json-explorer-rotate-time: 100ms;
|
||||
$json-explorer-toggler-opacity: 0.6;
|
||||
$json-explorer-bracket-color: #9494ff;
|
||||
$json-explorer-key-color: #23a0db;
|
||||
$json-explorer-url-color: #027bff;
|
||||
|
||||
// Changelog and diff
|
||||
// -------------------------
|
||||
$diff-label-bg: ${theme.colors.action.hover};
|
||||
$diff-label-fg: $white;
|
||||
|
||||
$diff-group-bg: ${theme.colors.background.secondary};
|
||||
$diff-arrow-color: $white;
|
||||
|
||||
$diff-json-bg: ${theme.colors.background.secondary};
|
||||
$diff-json-fg: ${theme.colors.text.primary};
|
||||
|
||||
$diff-json-added: $blue-shade;
|
||||
$diff-json-deleted: $red-shade;
|
||||
|
||||
$diff-json-old: #a04338;
|
||||
$diff-json-new: #457740;
|
||||
|
||||
$diff-json-changed-fg: $gray-5;
|
||||
$diff-json-changed-num: $text-color;
|
||||
|
||||
$diff-json-icon: $gray-5;
|
||||
|
||||
//Switch Slider
|
||||
// -------------------------
|
||||
$switch-bg: $input-bg;
|
||||
@@ -369,36 +173,4 @@ $checkbox-bg: $dark-1;
|
||||
$checkbox-border: 1px solid $gray-1;
|
||||
$checkbox-checked-bg: linear-gradient(0deg, #eb7b18, #d44a3a);
|
||||
$checkbox-color: $dark-1;
|
||||
|
||||
//Panel Edit
|
||||
// -------------------------
|
||||
$panel-editor-shadow: 0 0 20px black;
|
||||
$panel-editor-side-menu-shadow: drop-shadow(0 0 10px $black);
|
||||
$panel-editor-viz-item-shadow: 0 0 8px $dark-10;
|
||||
$panel-editor-viz-item-border: 1px solid $dark-10;
|
||||
$panel-editor-viz-item-shadow-hover: 0 0 4px $blue-light;
|
||||
$panel-editor-viz-item-border-hover: 1px solid $blue-light;
|
||||
$panel-editor-viz-item-bg: $input-bg;
|
||||
$panel-editor-tabs-line-color: #e3e3e3;
|
||||
|
||||
$panel-editor-viz-item-bg-hover: color.adjust($blue-base, $lightness: -46%);
|
||||
|
||||
$panel-grid-placeholder-bg: color.adjust(${theme.v1.palette.blue77}, $lightness: -30%);
|
||||
$panel-grid-placeholder-shadow: 0 0 4px ${theme.v1.palette.blue80};
|
||||
|
||||
// logs
|
||||
$logs-color-unknown: $gray-2;
|
||||
|
||||
// toggle-group
|
||||
$button-toggle-group-btn-active-bg: linear-gradient(90deg, #eb7b18, #d44a3a);
|
||||
$button-toggle-group-btn-active-shadow: inset 0 0 4px $black;
|
||||
$button-toggle-group-btn-separator-border: 1px solid $dark-2;
|
||||
|
||||
$vertical-resize-handle-bg: $dark-10;
|
||||
$vertical-resize-handle-dots: $gray-1;
|
||||
$vertical-resize-handle-dots-hover: $gray-2;
|
||||
|
||||
// Calendar
|
||||
$calendar-bg-days: $input-bg;
|
||||
$calendar-bg-now: $dark-10;
|
||||
`;
|
||||
|
||||
@@ -4,8 +4,6 @@ import { GrafanaTheme2 } from '@grafana/data';
|
||||
|
||||
import { renderGeneratedFileBanner } from '../utils/generatedFileBanner';
|
||||
|
||||
import { styleMixins } from '.';
|
||||
|
||||
export const lightThemeVarsTemplate = (theme: GrafanaTheme2) =>
|
||||
`${renderGeneratedFileBanner('grafana-ui/src/themes/light.ts', 'grafana-ui/src/themes/_variable.light.scss.tmpl.ts')}
|
||||
@use 'sass:color';
|
||||
@@ -14,42 +12,20 @@ export const lightThemeVarsTemplate = (theme: GrafanaTheme2) =>
|
||||
|
||||
$theme-name: light;
|
||||
|
||||
$colors-action-hover: ${theme.colors.action.hover};
|
||||
$colors-action-selected: ${theme.colors.action.selected};
|
||||
|
||||
// New Colors
|
||||
// -------------------------
|
||||
$blue-light: ${theme.colors.primary.text};
|
||||
$blue-base: ${theme.colors.primary.main};
|
||||
$blue-shade: ${theme.colors.primary.shade};
|
||||
$red-base: ${theme.colors.error.main};
|
||||
$red-shade: ${theme.colors.error.shade};
|
||||
$green-base: ${theme.colors.success.main};
|
||||
$green-shade: ${theme.colors.success.shade};
|
||||
$orange-dark: ${theme.v1.palette.orangeDark};
|
||||
|
||||
$gray98: ${theme.v1.palette.gray98};
|
||||
$gray95: ${theme.v1.palette.gray95};
|
||||
$gray85: ${theme.v1.palette.gray85};
|
||||
$gray70: ${theme.v1.palette.gray70};
|
||||
$gray60: ${theme.v1.palette.gray60};
|
||||
$gray33: ${theme.v1.palette.gray33};
|
||||
$gray25: ${theme.v1.palette.gray25};
|
||||
$gray15: ${theme.v1.palette.gray15};
|
||||
$gray10: ${theme.v1.palette.gray10};
|
||||
$gray05: ${theme.v1.palette.gray05};
|
||||
|
||||
// Grays
|
||||
// -------------------------
|
||||
$black: ${theme.v1.palette.black};
|
||||
|
||||
$dark-1: ${theme.v1.palette.dark1};
|
||||
$dark-2: ${theme.v1.palette.dark2};
|
||||
$dark-4: ${theme.v1.palette.dark4};
|
||||
$dark-10: ${theme.v1.palette.dark10};
|
||||
$gray-1: ${theme.v1.palette.gray1};
|
||||
$gray-2: ${theme.v1.palette.gray2};
|
||||
$gray-3: ${theme.v1.palette.gray3};
|
||||
$gray-4: ${theme.v1.palette.gray4};
|
||||
$gray-5: ${theme.v1.palette.gray5};
|
||||
$gray-6: ${theme.v1.palette.gray6};
|
||||
@@ -57,95 +33,45 @@ $gray-7: ${theme.v1.palette.gray7};
|
||||
|
||||
$white: ${theme.v1.palette.white};
|
||||
|
||||
$layer0: ${theme.colors.background.canvas};
|
||||
$layer1: ${theme.colors.background.primary};
|
||||
$layer2: ${theme.colors.background.secondary};
|
||||
|
||||
$divider: ${theme.colors.border.weak};
|
||||
$border0: ${theme.colors.border.weak};
|
||||
$border1: ${theme.colors.border.medium};
|
||||
|
||||
// Accent colors
|
||||
// -------------------------
|
||||
$blue: ${theme.colors.primary.text};
|
||||
$red: $red-base;
|
||||
$yellow: ${theme.v1.palette.yellow};
|
||||
$orange: ${theme.v1.palette.orange};
|
||||
$purple: ${theme.v1.palette.purple};
|
||||
$variable: ${theme.colors.primary.text};
|
||||
|
||||
$brand-primary: ${theme.v1.palette.orange};
|
||||
$brand-success: ${theme.colors.success.main};
|
||||
$brand-warning: ${theme.colors.warning.main};
|
||||
$brand-danger: ${theme.colors.error.main};
|
||||
|
||||
$query-red: ${theme.colors.error.text};
|
||||
$query-green: ${theme.colors.success.text};
|
||||
$query-purple: #fe85fc;
|
||||
$query-orange: ${theme.v1.palette.orange};
|
||||
|
||||
// Status colors
|
||||
// -------------------------
|
||||
$online: ${theme.colors.success.text};
|
||||
$warn: ${theme.colors.warning.text};
|
||||
$critical: ${theme.colors.error.text};
|
||||
|
||||
|
||||
// Scaffolding
|
||||
// -------------------------
|
||||
$body-bg: ${theme.colors.background.canvas};
|
||||
$page-bg: ${theme.colors.background.canvas};
|
||||
$dashboard-bg: ${theme.colors.background.canvas};
|
||||
|
||||
$text-color: ${theme.colors.text.primary};
|
||||
$text-color-strong: ${theme.colors.text.maxContrast};
|
||||
$text-color-semi-weak: ${theme.colors.text.secondary};
|
||||
$text-color-weak: ${theme.colors.text.secondary};
|
||||
$text-color-faint: ${theme.colors.text.disabled};
|
||||
$text-color-emphasis: ${theme.colors.text.maxContrast};
|
||||
$text-blue: ${theme.colors.primary.text};
|
||||
|
||||
$text-shadow-faint: none;
|
||||
|
||||
// gradients
|
||||
$brand-gradient-horizontal: ${theme.colors.gradients.brandHorizontal};
|
||||
$brand-gradient-vertical: ${theme.colors.gradients.brandVertical};
|
||||
|
||||
// Links
|
||||
// -------------------------
|
||||
$link-color: ${theme.colors.text.primary};
|
||||
$link-color-disabled: ${theme.colors.text.disabled};
|
||||
$link-hover-color: ${theme.colors.text.maxContrast};
|
||||
$external-link-color: ${theme.colors.text.link};
|
||||
|
||||
// Typography
|
||||
// -------------------------
|
||||
$headings-color: ${theme.colors.text.primary};
|
||||
$abbr-border-color: $gray-2 !default;
|
||||
$text-muted: $text-color-weak;
|
||||
|
||||
$hr-border-color: $gray-4 !default;
|
||||
|
||||
// Panel
|
||||
// -------------------------
|
||||
$panel-bg: ${theme.components.panel.background};
|
||||
$panel-border: 1px solid ${theme.components.panel.borderColor};
|
||||
$panel-header-hover-bg: ${theme.colors.action.hover};
|
||||
$panel-box-shadow: ${theme.components.panel.boxShadow};
|
||||
$panel-corner: $panel-bg;
|
||||
|
||||
// Page header
|
||||
$page-header-bg: ${theme.colors.background.canvas};
|
||||
$page-header-shadow: inset 0px -3px 10px $gray-6;
|
||||
$page-header-border-color: ${theme.colors.background.canvas};
|
||||
|
||||
$divider-border-color: $gray-2;
|
||||
|
||||
// Graphite Target Editor
|
||||
$tight-form-func-bg: ${theme.colors.background.secondary};
|
||||
$tight-form-func-highlight-bg: ${styleMixins.hoverColor(theme.colors.background.secondary, theme)};
|
||||
|
||||
$modal-backdrop-bg: ${theme.colors.background.primary};
|
||||
$code-tag-bg: $gray-6;
|
||||
$code-tag-border: $gray-4;
|
||||
|
||||
@@ -156,76 +82,32 @@ $card-shadow: none;
|
||||
|
||||
// Lists
|
||||
$list-item-bg: $gray-7;
|
||||
$list-item-hover-bg: $gray-6;
|
||||
$list-item-shadow: $card-shadow;
|
||||
|
||||
$empty-list-cta-bg: $gray-6;
|
||||
|
||||
// Scrollbars
|
||||
$scrollbarBackground: $gray-4;
|
||||
$scrollbarBackground2: $gray-4;
|
||||
$scrollbarBorder: $gray-7;
|
||||
|
||||
// Tables
|
||||
// -------------------------
|
||||
$table-bg-accent: ${theme.colors.background.secondary};
|
||||
$table-border: ${theme.colors.border.medium};
|
||||
$table-bg-odd: ${theme.colors.emphasize(theme.colors.background.primary, 0.02)};
|
||||
$table-bg-hover: ${theme.colors.emphasize(theme.colors.background.primary, 0.05)};
|
||||
|
||||
// Buttons
|
||||
// -------------------------
|
||||
$btn-secondary-bg: $gray-5;
|
||||
$btn-secondary-bg-hl: $gray-4;
|
||||
|
||||
$btn-primary-bg: $blue-base;
|
||||
$btn-primary-bg-hl: $blue-shade;
|
||||
|
||||
$btn-success-bg: $green-base;
|
||||
$btn-success-bg-hl: $green-shade;
|
||||
|
||||
$btn-danger-bg: $red-base;
|
||||
$btn-danger-bg-hl: $red-shade;
|
||||
|
||||
$btn-inverse-bg: $gray-5;
|
||||
$btn-inverse-bg-hl: $gray-4;
|
||||
$btn-inverse-text-color: $gray-1;
|
||||
$btn-inverse-text-shadow: 0 1px 0 rgba(255, 255, 255, 0.4);
|
||||
|
||||
$btn-link-color: $gray-1;
|
||||
|
||||
$iconContainerBackground: $white;
|
||||
|
||||
$btn-divider-left: $gray-4;
|
||||
$btn-divider-right: $gray-7;
|
||||
|
||||
$btn-drag-image: '../img/grab_light.svg';
|
||||
|
||||
$navbar-btn-gicon-brightness: brightness(1.5);
|
||||
|
||||
$btn-active-box-shadow: 0px 0px 4px rgba(234, 161, 51, 0.6);
|
||||
|
||||
// Forms
|
||||
// -------------------------
|
||||
$input-bg: ${theme.components.input.background};
|
||||
$input-bg-disabled: ${theme.colors.action.disabledBackground};
|
||||
|
||||
$input-color: ${theme.components.input.text};
|
||||
$input-border-color: ${theme.components.input.borderColor};
|
||||
$input-box-shadow: none;
|
||||
$input-border-focus: ${theme.v1.palette.blue95};
|
||||
$input-box-shadow-focus: ${theme.v1.palette.blue95};
|
||||
$input-color-placeholder: ${theme.colors.text.disabled};
|
||||
$input-label-bg: ${theme.colors.background.secondary};
|
||||
$input-color-select-arrow: ${theme.v1.palette.gray60};
|
||||
|
||||
// search
|
||||
$search-shadow: 0 1px 5px 0 $gray-5;
|
||||
|
||||
// Typeahead
|
||||
$typeahead-shadow: 0 5px 10px 0 $gray-5;
|
||||
$typeahead-selected-bg: $gray-6;
|
||||
$typeahead-selected-color: $yellow;
|
||||
|
||||
// Dropdowns
|
||||
// -------------------------
|
||||
@@ -233,7 +115,6 @@ $dropdownBackground: ${theme.colors.background.primary};
|
||||
$dropdownBorder: ${theme.colors.border.weak};
|
||||
$dropdownDividerTop: ${theme.colors.border.weak};
|
||||
$dropdownDividerBottom: ${theme.colors.border.weak};
|
||||
$dropdownShadow: ${theme.shadows.z3};
|
||||
|
||||
$dropdownLinkColor: $dark-2;
|
||||
$dropdownLinkColorHover: $link-color;
|
||||
@@ -241,42 +122,14 @@ $dropdownLinkColorActive: $link-color;
|
||||
|
||||
$dropdownLinkBackgroundHover: $gray-6;
|
||||
|
||||
// Horizontal forms & lists
|
||||
// -------------------------
|
||||
$horizontalComponentOffset: 180px;
|
||||
|
||||
// Navbar
|
||||
// -------------------------
|
||||
$navbarHeight: 52px;
|
||||
$navbarBorder: 1px solid $gray-5;
|
||||
|
||||
// Sidemenu
|
||||
// -------------------------
|
||||
$side-menu-bg: ${theme.v1.palette.gray15};
|
||||
$side-menu-border: 1px solid ${theme.v1.palette.gray25};
|
||||
$side-menu-bg-mobile: rgba(0, 0, 0, 0); //$gray-6;
|
||||
$side-menu-item-hover-bg: ${theme.v1.palette.gray25};
|
||||
$side-menu-shadow: 5px 0px 10px -5px $gray-1;
|
||||
$side-menu-link-color: $gray-4;
|
||||
$side-menu-icon-color: ${theme.v1.palette.gray70};
|
||||
$side-menu-header-color: ${theme.v1.palette.gray95};
|
||||
|
||||
// Menu dropdowns
|
||||
// -------------------------
|
||||
$menu-dropdown-bg: ${theme.colors.background.primary};
|
||||
$menu-dropdown-hover-bg: ${theme.colors.action.hover};
|
||||
$menu-dropdown-shadow: ${theme.shadows.z3};
|
||||
|
||||
// Tabs
|
||||
// -------------------------
|
||||
$tab-border-color: $gray-5;
|
||||
|
||||
// Form states and alerts
|
||||
// -------------------------
|
||||
$warning-text-color: ${theme.colors.warning.text};
|
||||
$error-text-color: ${theme.colors.error.text};
|
||||
$success-text-color: ${theme.colors.success.text};
|
||||
|
||||
$alert-error-bg: ${theme.colors.error.main};
|
||||
$alert-success-bg: ${theme.colors.success.main};
|
||||
$alert-warning-bg: ${theme.colors.warning.main};
|
||||
@@ -285,9 +138,6 @@ $alert-info-bg: ${theme.colors.warning.main};
|
||||
// Tooltips and popovers
|
||||
$tooltipBackground: ${theme.components.tooltip.background};
|
||||
$tooltipColor: ${theme.components.tooltip.text};
|
||||
$tooltipArrowColor: ${theme.components.tooltip.background};
|
||||
$tooltipBackgroundError: ${theme.colors.error.main};
|
||||
$tooltipShadow: ${theme.shadows.z2};
|
||||
|
||||
$popover-bg: ${theme.colors.background.primary};
|
||||
$popover-color: ${theme.colors.text.primary};
|
||||
@@ -297,63 +147,19 @@ $popover-shadow: ${theme.shadows.z3};
|
||||
|
||||
$graph-tooltip-bg: $gray-5;
|
||||
|
||||
$tooltipArrowWidth: 5px;
|
||||
$tooltipLinkColor: color.adjust($tooltipColor, $lightness: 5%);
|
||||
$tooltipExternalLinkColor: #6E9FFF;
|
||||
|
||||
$popover-error-bg: $btn-danger-bg;
|
||||
$popover-error-bg: $red-base;
|
||||
$popover-help-bg: $tooltipBackground;
|
||||
$popover-help-color: $tooltipColor;
|
||||
|
||||
$popover-code-bg: ${theme.colors.background.primary};
|
||||
$popover-code-boxshadow: 0 0 5px $gray60;
|
||||
|
||||
// images
|
||||
$checkboxImageUrl: '../img/checkbox_white.png';
|
||||
|
||||
// info box
|
||||
$info-box-border-color: $blue-base;
|
||||
|
||||
// footer
|
||||
$footer-link-color: $gray-1;
|
||||
$footer-link-hover: $dark-2;
|
||||
|
||||
// json explorer
|
||||
$json-explorer-default-color: black;
|
||||
$json-explorer-string-color: green;
|
||||
$json-explorer-number-color: $blue-base;
|
||||
$json-explorer-boolean-color: $red-base;
|
||||
$json-explorer-null-color: #855a00;
|
||||
$json-explorer-undefined-color: rgb(202, 11, 105);
|
||||
$json-explorer-function-color: #ff20ed;
|
||||
$json-explorer-rotate-time: 100ms;
|
||||
$json-explorer-toggler-opacity: 0.6;
|
||||
$json-explorer-bracket-color: $blue-base;
|
||||
$json-explorer-key-color: #00008b;
|
||||
$json-explorer-url-color: $blue-base;
|
||||
|
||||
// Changelog and diff
|
||||
// -------------------------
|
||||
$diff-label-bg: ${theme.colors.action.hover};
|
||||
$diff-label-fg: $gray-2;
|
||||
|
||||
$diff-arrow-color: $dark-2;
|
||||
$diff-group-bg: ${theme.colors.background.secondary};
|
||||
|
||||
$diff-json-bg: ${theme.colors.background.secondary};
|
||||
$diff-json-fg: ${theme.colors.text.primary};
|
||||
|
||||
$diff-json-added: $blue-shade;
|
||||
$diff-json-deleted: $red-shade;
|
||||
|
||||
$diff-json-old: #5a372a;
|
||||
$diff-json-new: #664e33;
|
||||
|
||||
$diff-json-changed-fg: $gray-7;
|
||||
$diff-json-changed-num: $gray-4;
|
||||
|
||||
$diff-json-icon: $gray-4;
|
||||
|
||||
//Switch Slider
|
||||
// -------------------------
|
||||
$switch-bg: $white;
|
||||
@@ -365,40 +171,7 @@ $switch-slider-shadow: 0 0 3px $dark-2;
|
||||
//Checkbox
|
||||
// -------------------------
|
||||
$checkbox-bg: $gray-6;
|
||||
$checkbox-border: 1px solid $gray-3;
|
||||
$checkbox-border: 1px solid ${theme.v1.palette.gray3};
|
||||
$checkbox-checked-bg: linear-gradient(0deg, #ff9830, #e55400);
|
||||
$checkbox-color: $gray-7;
|
||||
|
||||
//Panel Edit
|
||||
// -------------------------
|
||||
$panel-editor-shadow: 0px 0px 8px $gray-3;
|
||||
$panel-editor-side-menu-shadow: drop-shadow(0 0 2px $gray-3);
|
||||
$panel-editor-viz-item-shadow: 0 0 4px $gray-3;
|
||||
$panel-editor-viz-item-border: 1px solid $gray-3;
|
||||
$panel-editor-viz-item-shadow-hover: 0 0 4px $blue-light;
|
||||
$panel-editor-viz-item-border-hover: 1px solid $blue-light;
|
||||
$panel-editor-viz-item-bg: $card-background;
|
||||
$panel-editor-tabs-line-color: $dark-2;
|
||||
|
||||
$panel-editor-viz-item-bg-hover: color.adjust($blue-base, $lightness: 45%);
|
||||
|
||||
$panel-grid-placeholder-bg: color.adjust(${theme.v1.palette.blue95}, $lightness: 30%);
|
||||
$panel-grid-placeholder-shadow: 0 0 4px ${theme.v1.palette.blue95};
|
||||
|
||||
// logs
|
||||
$logs-color-unknown: $gray-5;
|
||||
|
||||
// toggle-group
|
||||
$button-toggle-group-btn-active-bg: $brand-primary;
|
||||
$button-toggle-group-btn-active-shadow: inset 0 0 4px $white;
|
||||
$button-toggle-group-btn-separator-border: 1px solid $gray-6;
|
||||
|
||||
$vertical-resize-handle-bg: $gray-4;
|
||||
$vertical-resize-handle-dots: $gray-3;
|
||||
$vertical-resize-handle-dots-hover: $gray-2;
|
||||
|
||||
// Calendar
|
||||
$calendar-bg-days: $white;
|
||||
$calendar-bg-now: $gray-6;
|
||||
|
||||
`;
|
||||
|
||||
@@ -4,7 +4,7 @@ go 1.23.1
|
||||
|
||||
require (
|
||||
github.com/emicklei/go-restful/v3 v3.11.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.251.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.253.0
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240808213237-f4d2e064f435
|
||||
github.com/grafana/grafana/pkg/semconv v0.0.0-20240808213237-f4d2e064f435
|
||||
github.com/mattbaird/jsonpatch v0.0.0-20240118010651-0ba75a80ca38
|
||||
|
||||
@@ -130,8 +130,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.251.0 h1:gnOtxrC/1rqFvpSbQYyoZqkr47oWDlz4Q2L6Ozmsi3w=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.251.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.253.0 h1:KaCrqqsDgVIoT8hwvwuUMKV7QbHVlvRoFN5+U2rOXR8=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.253.0/go.mod h1:gCGN9kHY3KeX4qyni3+Kead38Q+85pYOrsDcxZp6AIk=
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240808213237-f4d2e064f435 h1:lmw60EW7JWlAEvgggktOyVkH4hF1m/+LSF/Ap0NCyi8=
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240808213237-f4d2e064f435/go.mod h1:ORVFiW/KNRY52lNjkGwnFWCxNVfE97bJG2jr2fetq0I=
|
||||
github.com/grafana/grafana/pkg/semconv v0.0.0-20240808213237-f4d2e064f435 h1:SNEeqY22DrGr5E9kGF1mKSqlOom14W9+b1u4XEGJowA=
|
||||
|
||||
@@ -3,8 +3,8 @@ module github.com/grafana/grafana/pkg/apimachinery
|
||||
go 1.23.1
|
||||
|
||||
require (
|
||||
github.com/grafana/authlib v0.0.0-20240919120951-58259833c564 // @grafana/identity-access-team
|
||||
github.com/grafana/authlib/claims v0.0.0-20240903121118-16441568af1e // @grafana/identity-access-team
|
||||
github.com/grafana/authlib v0.0.0-20241014135010-3e1f37f75699 // @grafana/identity-access-team
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0 // @grafana/identity-access-team
|
||||
github.com/stretchr/testify v1.9.0
|
||||
k8s.io/apimachinery v0.31.1
|
||||
k8s.io/apiserver v0.31.1
|
||||
|
||||
@@ -28,10 +28,10 @@ github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/grafana/authlib v0.0.0-20240919120951-58259833c564 h1:zYF/RBulpvMqPYR3gbzJZ8t/j/Eymn5FNidSYkueNCA=
|
||||
github.com/grafana/authlib v0.0.0-20240919120951-58259833c564/go.mod h1:PFzXbCrn0GIpN4KwT6NP1l5Z1CPLfmKHnYx8rZzQcyY=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240903121118-16441568af1e h1:ng5SopWamGS0MHaCj2e5huWYxAfMeCrj1l/dbJnfiow=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240903121118-16441568af1e/go.mod h1:r+F8H6awwjNQt/KPZ2GNwjk8TvsJ7/gxzkXN26GlL/A=
|
||||
github.com/grafana/authlib v0.0.0-20241014135010-3e1f37f75699 h1:+xSpRpQPhMXAE9z68u0zMzzIa78jy1UqFb4tMJczFNc=
|
||||
github.com/grafana/authlib v0.0.0-20241014135010-3e1f37f75699/go.mod h1:fhuI+ulquEIVcLsbwPml9JapWQzg8EYBp29HteO62DM=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0 h1:XT/WvQCWVVOvXRJy0SCQHkhxXFHNRJ3+jzhW5PutEk8=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0/go.mod h1:r+F8H6awwjNQt/KPZ2GNwjk8TvsJ7/gxzkXN26GlL/A=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
|
||||
@@ -4,7 +4,7 @@ go 1.23.1
|
||||
|
||||
require (
|
||||
github.com/google/go-cmp v0.6.0
|
||||
github.com/grafana/authlib/claims v0.0.0-20240903121118-16441568af1e
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240701135906-559738ce6ae1
|
||||
github.com/prometheus/client_golang v1.20.4
|
||||
github.com/stretchr/testify v1.9.0
|
||||
|
||||
@@ -78,8 +78,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240903121118-16441568af1e h1:ng5SopWamGS0MHaCj2e5huWYxAfMeCrj1l/dbJnfiow=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240903121118-16441568af1e/go.mod h1:r+F8H6awwjNQt/KPZ2GNwjk8TvsJ7/gxzkXN26GlL/A=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0 h1:XT/WvQCWVVOvXRJy0SCQHkhxXFHNRJ3+jzhW5PutEk8=
|
||||
github.com/grafana/authlib/claims v0.0.0-20240926100702-4aee62663da0/go.mod h1:r+F8H6awwjNQt/KPZ2GNwjk8TvsJ7/gxzkXN26GlL/A=
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240701135906-559738ce6ae1 h1:ItDcDxUjVLPKja+hogpqgW/kj8LxUL2qscelXIsN1Bs=
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240701135906-559738ce6ae1/go.mod h1:DkxMin+qOh1Fgkxfbt+CUfBqqsCQJMG9op8Os/irBPA=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI=
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
@@ -306,54 +305,3 @@ func getName(o runtime.Object) string {
|
||||
}
|
||||
return accessor.GetName()
|
||||
}
|
||||
|
||||
const dataSyncerInterval = 60 * time.Minute
|
||||
|
||||
// StartPeriodicDataSyncer starts a background job that will execute the DataSyncer every 60 minutes
|
||||
func StartPeriodicDataSyncer(ctx context.Context, mode DualWriterMode, legacy LegacyStorage, storage Storage,
|
||||
kind string, reg prometheus.Registerer, serverLockService ServerLockService, requestInfo *request.RequestInfo) {
|
||||
klog.Info("Starting periodic data syncer for mode mode: ", mode)
|
||||
|
||||
// run in background
|
||||
go func() {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
timeWindow := 600 // 600 seconds (10 minutes)
|
||||
jitterSeconds := r.Int63n(int64(timeWindow))
|
||||
klog.Info("data syncer is going to start at: ", time.Now().Add(time.Second*time.Duration(jitterSeconds)))
|
||||
time.Sleep(time.Second * time.Duration(jitterSeconds))
|
||||
|
||||
// run it immediately
|
||||
syncOK, err := runDataSyncer(ctx, mode, legacy, storage, kind, reg, serverLockService, requestInfo)
|
||||
klog.Info("data syncer finished, syncOK: ", syncOK, ", error: ", err)
|
||||
|
||||
ticker := time.NewTicker(dataSyncerInterval)
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
syncOK, err = runDataSyncer(ctx, mode, legacy, storage, kind, reg, serverLockService, requestInfo)
|
||||
klog.Info("data syncer finished, syncOK: ", syncOK, ", error: ", err)
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
// runDataSyncer will ensure that data between legacy storage and unified storage are in sync.
|
||||
// The sync implementation depends on the DualWriter mode
|
||||
func runDataSyncer(ctx context.Context, mode DualWriterMode, legacy LegacyStorage, storage Storage,
|
||||
kind string, reg prometheus.Registerer, serverLockService ServerLockService, requestInfo *request.RequestInfo) (bool, error) {
|
||||
// ensure that execution takes no longer than necessary
|
||||
const timeout = dataSyncerInterval - time.Minute
|
||||
ctx, cancelFn := context.WithTimeout(ctx, timeout)
|
||||
defer cancelFn()
|
||||
|
||||
// implementation depends on the current DualWriter mode
|
||||
switch mode {
|
||||
case Mode2:
|
||||
return mode2DataSyncer(ctx, legacy, storage, kind, reg, serverLockService, requestInfo)
|
||||
default:
|
||||
klog.Info("data syncer not implemented for mode mode:", mode)
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,21 +2,16 @@ package rest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
apierrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
"k8s.io/apimachinery/pkg/api/meta"
|
||||
metainternalversion "k8s.io/apimachinery/pkg/apis/meta/internalversion"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apiserver/pkg/endpoints/request"
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"github.com/grafana/authlib/claims"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
)
|
||||
|
||||
@@ -392,213 +387,3 @@ func enrichLegacyObject(originalObj, returnedObj runtime.Object) error {
|
||||
accessorReturned.SetUID(accessorOriginal.GetUID())
|
||||
return nil
|
||||
}
|
||||
|
||||
func getSyncRequester(orgId int64) *identity.StaticRequester {
|
||||
return &identity.StaticRequester{
|
||||
Type: claims.TypeServiceAccount, // system:apiserver
|
||||
UserID: 1,
|
||||
OrgID: orgId,
|
||||
Name: "admin",
|
||||
Login: "admin",
|
||||
OrgRole: identity.RoleAdmin,
|
||||
IsGrafanaAdmin: true,
|
||||
Permissions: map[int64]map[string][]string{
|
||||
orgId: {
|
||||
"*": {"*"}, // all resources, all scopes
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type syncItem struct {
|
||||
name string
|
||||
objStorage runtime.Object
|
||||
objLegacy runtime.Object
|
||||
}
|
||||
|
||||
func getList(ctx context.Context, obj rest.Lister, listOptions *metainternalversion.ListOptions) ([]runtime.Object, error) {
|
||||
ll, err := obj.List(ctx, listOptions)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return meta.ExtractList(ll)
|
||||
}
|
||||
|
||||
func mode2DataSyncer(ctx context.Context, legacy LegacyStorage, storage Storage, resource string, reg prometheus.Registerer, serverLockService ServerLockService, requestInfo *request.RequestInfo) (bool, error) {
|
||||
metrics := &dualWriterMetrics{}
|
||||
metrics.init(reg)
|
||||
|
||||
log := klog.NewKlogr().WithName("DualWriterMode2Syncer")
|
||||
|
||||
everythingSynced := false
|
||||
outOfSync := 0
|
||||
syncSuccess := 0
|
||||
syncErr := 0
|
||||
|
||||
maxInterval := dataSyncerInterval + 5*time.Minute
|
||||
|
||||
var errSync error
|
||||
const maxRecordsSync = 1000
|
||||
|
||||
// LockExecuteAndRelease ensures that just a single Grafana server acquires a lock at a time
|
||||
// The parameter 'maxInterval' is a timeout safeguard, if the LastExecution in the
|
||||
// database is older than maxInterval, we will assume the lock as timeouted. The 'maxInterval' parameter should be so long
|
||||
// that is impossible for 2 processes to run at the same time.
|
||||
err := serverLockService.LockExecuteAndRelease(ctx, "dualwriter mode 2 sync", maxInterval, func(context.Context) {
|
||||
log.Info("starting dualwriter mode 2 sync")
|
||||
startSync := time.Now()
|
||||
|
||||
orgId := int64(1)
|
||||
|
||||
ctx = klog.NewContext(ctx, log)
|
||||
ctx = identity.WithRequester(ctx, getSyncRequester(orgId))
|
||||
ctx = request.WithNamespace(ctx, requestInfo.Namespace)
|
||||
ctx = request.WithRequestInfo(ctx, requestInfo)
|
||||
|
||||
storageList, err := getList(ctx, storage, &metainternalversion.ListOptions{
|
||||
Limit: maxRecordsSync,
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(err, "unable to extract list from storage")
|
||||
return
|
||||
}
|
||||
|
||||
if len(storageList) >= maxRecordsSync {
|
||||
errSync = fmt.Errorf("unified storage has more than %d records. Aborting sync", maxRecordsSync)
|
||||
log.Error(errSync, "Unified storage has more records to be synced than allowed")
|
||||
return
|
||||
}
|
||||
|
||||
log.Info("got items from unified storage", "items", len(storageList))
|
||||
|
||||
legacyList, err := getList(ctx, legacy, &metainternalversion.ListOptions{})
|
||||
if err != nil {
|
||||
log.Error(err, "unable to extract list from legacy storage")
|
||||
return
|
||||
}
|
||||
log.Info("got items from legacy storage", "items", len(legacyList))
|
||||
|
||||
itemsByName := map[string]syncItem{}
|
||||
for _, obj := range legacyList {
|
||||
accessor, err := utils.MetaAccessor(obj)
|
||||
if err != nil {
|
||||
log.Error(err, "error retrieving accessor data for object from legacy storage")
|
||||
continue
|
||||
}
|
||||
name := accessor.GetName()
|
||||
|
||||
item, ok := itemsByName[name]
|
||||
if !ok {
|
||||
item = syncItem{}
|
||||
}
|
||||
item.name = name
|
||||
item.objLegacy = obj
|
||||
itemsByName[name] = item
|
||||
}
|
||||
|
||||
for _, obj := range storageList {
|
||||
accessor, err := utils.MetaAccessor(obj)
|
||||
if err != nil {
|
||||
log.Error(err, "error retrieving accessor data for object from storage")
|
||||
continue
|
||||
}
|
||||
name := accessor.GetName()
|
||||
|
||||
item, ok := itemsByName[name]
|
||||
if !ok {
|
||||
item = syncItem{}
|
||||
}
|
||||
item.name = name
|
||||
item.objStorage = obj
|
||||
itemsByName[name] = item
|
||||
}
|
||||
log.Info("got list of items to be synced", "items", len(itemsByName))
|
||||
|
||||
for name, item := range itemsByName {
|
||||
// upsert if:
|
||||
// - existing in both legacy and storage, but objects are different, or
|
||||
// - if it's missing from storage
|
||||
if item.objLegacy != nil &&
|
||||
((item.objStorage != nil && !Compare(item.objLegacy, item.objStorage)) || (item.objStorage == nil)) {
|
||||
outOfSync++
|
||||
|
||||
accessor, err := utils.MetaAccessor(item.objLegacy)
|
||||
if err != nil {
|
||||
log.Error(err, "error retrieving accessor data for object from storage")
|
||||
continue
|
||||
}
|
||||
|
||||
if item.objStorage != nil {
|
||||
accessorStorage, err := utils.MetaAccessor(item.objStorage)
|
||||
if err != nil {
|
||||
log.Error(err, "error retrieving accessor data for object from storage")
|
||||
continue
|
||||
}
|
||||
accessor.SetResourceVersion(accessorStorage.GetResourceVersion())
|
||||
accessor.SetUID(accessorStorage.GetUID())
|
||||
|
||||
log.Info("updating item on unified storage", "name", name)
|
||||
} else {
|
||||
accessor.SetResourceVersion("")
|
||||
accessor.SetUID("")
|
||||
|
||||
log.Info("inserting item on unified storage", "name", name)
|
||||
}
|
||||
|
||||
objInfo := rest.DefaultUpdatedObjectInfo(item.objLegacy, []rest.TransformFunc{}...)
|
||||
res, _, err := storage.Update(ctx,
|
||||
name,
|
||||
objInfo,
|
||||
func(ctx context.Context, obj runtime.Object) error { return nil },
|
||||
func(ctx context.Context, obj, old runtime.Object) error { return nil },
|
||||
true, // force creation
|
||||
&metav1.UpdateOptions{},
|
||||
)
|
||||
if err != nil {
|
||||
log.WithValues("object", res).Error(err, "could not update in storage")
|
||||
syncErr++
|
||||
} else {
|
||||
syncSuccess++
|
||||
}
|
||||
}
|
||||
|
||||
// delete if object does not exists on legacy but exists on storage
|
||||
if item.objLegacy == nil && item.objStorage != nil {
|
||||
outOfSync++
|
||||
|
||||
ctx = request.WithRequestInfo(ctx, &request.RequestInfo{
|
||||
APIGroup: requestInfo.APIGroup,
|
||||
Resource: requestInfo.Resource,
|
||||
Name: name,
|
||||
Namespace: requestInfo.Namespace,
|
||||
})
|
||||
|
||||
log.Info("deleting item from unified storage", "name", name)
|
||||
|
||||
deletedS, _, err := storage.Delete(ctx, name, func(ctx context.Context, obj runtime.Object) error { return nil }, &metav1.DeleteOptions{})
|
||||
if err != nil {
|
||||
if !apierrors.IsNotFound(err) {
|
||||
log.WithValues("objectList", deletedS).Error(err, "could not delete from storage")
|
||||
}
|
||||
syncErr++
|
||||
} else {
|
||||
syncSuccess++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
everythingSynced = outOfSync == syncSuccess
|
||||
|
||||
metrics.recordDataSyncerOutcome(mode2Str, resource, everythingSynced)
|
||||
metrics.recordDataSyncerDuration(err != nil, mode2Str, resource, startSync)
|
||||
|
||||
log.Info("finished syncing items", "items", len(itemsByName), "updated", syncSuccess, "failed", syncErr, "outcome", everythingSynced)
|
||||
})
|
||||
|
||||
if errSync != nil {
|
||||
err = errSync
|
||||
}
|
||||
|
||||
return everythingSynced, err
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
@@ -16,7 +15,6 @@ import (
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apimachinery/pkg/types"
|
||||
"k8s.io/apiserver/pkg/apis/example"
|
||||
"k8s.io/apiserver/pkg/endpoints/request"
|
||||
)
|
||||
|
||||
var createFn = func(context.Context, runtime.Object) error { return nil }
|
||||
@@ -609,197 +607,3 @@ func TestEnrichReturnedObject(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var legacyObj1 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo1", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var legacyObj2 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var legacyObj3 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo3", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var legacyObj4 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo4", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
|
||||
var legacyObj2WithHostname = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{Hostname: "hostname"}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
|
||||
var storageObj1 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo1", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var storageObj2 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var storageObj3 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo3", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var storageObj4 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo4", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
|
||||
var legacyListWith3items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*legacyObj1,
|
||||
*legacyObj2,
|
||||
*legacyObj3,
|
||||
}}
|
||||
|
||||
var legacyListWith4items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*legacyObj1,
|
||||
*legacyObj2,
|
||||
*legacyObj3,
|
||||
*legacyObj4,
|
||||
}}
|
||||
|
||||
var legacyListWith3itemsObj2IsDifferent = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*legacyObj1,
|
||||
*legacyObj2WithHostname,
|
||||
*legacyObj3,
|
||||
}}
|
||||
|
||||
var storageListWith3items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*storageObj1,
|
||||
*storageObj2,
|
||||
*storageObj3,
|
||||
}}
|
||||
|
||||
var storageListWith4items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*storageObj1,
|
||||
*storageObj2,
|
||||
*storageObj3,
|
||||
*storageObj4,
|
||||
}}
|
||||
|
||||
var storageListWith3itemsMissingFoo2 = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*storageObj1,
|
||||
*storageObj3,
|
||||
*storageObj4,
|
||||
}}
|
||||
|
||||
func TestMode2_DataSyncer(t *testing.T) {
|
||||
type testCase struct {
|
||||
setupLegacyFn func(m *mock.Mock)
|
||||
setupStorageFn func(m *mock.Mock)
|
||||
name string
|
||||
expectedOutcome bool
|
||||
wantErr bool
|
||||
}
|
||||
tests :=
|
||||
[]testCase{
|
||||
{
|
||||
name: "both stores are in sync",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "both stores are in sync - fail to list from legacy",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, errors.New("error"))
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "both stores are in sync - fail to list from storage",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, errors.New("error"))
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "storage is missing 1 entry (foo4)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith4items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
m.On("Update", mock.Anything, "foo4", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "storage needs to be update (foo2 is different)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3itemsObj2IsDifferent, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
m.On("Update", mock.Anything, "foo2", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "storage is missing 1 entry (foo4) - fail to upsert",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith4items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
m.On("Update", mock.Anything, "foo4", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, errors.New("error"))
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "storage has an extra 1 entry (foo4)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith4items, nil)
|
||||
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "storage has an extra 1 entry (foo4) - fail to delete",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith4items, nil)
|
||||
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, errors.New("error"))
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "storage is missing 1 entry (foo3) and has an extra 1 entry (foo4)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3itemsMissingFoo2, nil)
|
||||
m.On("Update", mock.Anything, "foo2", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
l := (LegacyStorage)(nil)
|
||||
s := (Storage)(nil)
|
||||
lm := &mock.Mock{}
|
||||
um := &mock.Mock{}
|
||||
|
||||
ls := legacyStoreMock{lm, l}
|
||||
us := storageMock{um, s}
|
||||
|
||||
if tt.setupLegacyFn != nil {
|
||||
tt.setupLegacyFn(lm)
|
||||
}
|
||||
if tt.setupStorageFn != nil {
|
||||
tt.setupStorageFn(um)
|
||||
}
|
||||
|
||||
outcome, err := mode2DataSyncer(context.Background(), ls, us, "test.kind", p, &fakeServerLock{}, &request.RequestInfo{})
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedOutcome, outcome)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
271
pkg/apiserver/rest/dualwriter_syncer.go
Normal file
271
pkg/apiserver/rest/dualwriter_syncer.go
Normal file
@@ -0,0 +1,271 @@
|
||||
package rest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
apierrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
"k8s.io/apimachinery/pkg/api/meta"
|
||||
metainternalversion "k8s.io/apimachinery/pkg/apis/meta/internalversion"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apiserver/pkg/endpoints/request"
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"github.com/grafana/authlib/claims"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
)
|
||||
|
||||
type syncItem struct {
|
||||
name string
|
||||
objStorage runtime.Object
|
||||
objLegacy runtime.Object
|
||||
accessorStorage utils.GrafanaMetaAccessor
|
||||
accessorLegacy utils.GrafanaMetaAccessor
|
||||
}
|
||||
|
||||
const dataSyncerInterval = 60 * time.Minute
|
||||
|
||||
// StartPeriodicDataSyncer starts a background job that will execute the DataSyncer every 60 minutes
|
||||
func StartPeriodicDataSyncer(ctx context.Context, mode DualWriterMode, legacy LegacyStorage, storage Storage,
|
||||
kind string, reg prometheus.Registerer, serverLockService ServerLockService, requestInfo *request.RequestInfo) {
|
||||
log := klog.NewKlogr().WithName("legacyToUnifiedStorageDataSyncer").WithValues("mode", mode, "resource", kind)
|
||||
|
||||
log.Info("Starting periodic data syncer")
|
||||
|
||||
// run in background
|
||||
go func() {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
timeWindow := 600 // 600 seconds (10 minutes)
|
||||
jitterSeconds := r.Int63n(int64(timeWindow))
|
||||
log.Info("data syncer scheduled", "starting time", time.Now().Add(time.Second*time.Duration(jitterSeconds)))
|
||||
time.Sleep(time.Second * time.Duration(jitterSeconds))
|
||||
|
||||
// run it immediately
|
||||
syncOK, err := runDataSyncer(ctx, mode, legacy, storage, kind, reg, serverLockService, requestInfo)
|
||||
log.Info("data syncer finished", "syncOK", syncOK, "error", err)
|
||||
|
||||
ticker := time.NewTicker(dataSyncerInterval)
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
syncOK, err = runDataSyncer(ctx, mode, legacy, storage, kind, reg, serverLockService, requestInfo)
|
||||
log.Info("data syncer finished", "syncOK", syncOK, ", error", err)
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
// runDataSyncer will ensure that data between legacy storage and unified storage are in sync.
|
||||
// The sync implementation depends on the DualWriter mode
|
||||
func runDataSyncer(ctx context.Context, mode DualWriterMode, legacy LegacyStorage, storage Storage,
|
||||
kind string, reg prometheus.Registerer, serverLockService ServerLockService, requestInfo *request.RequestInfo) (bool, error) {
|
||||
// ensure that execution takes no longer than necessary
|
||||
const timeout = dataSyncerInterval - time.Minute
|
||||
ctx, cancelFn := context.WithTimeout(ctx, timeout)
|
||||
defer cancelFn()
|
||||
|
||||
// implementation depends on the current DualWriter mode
|
||||
switch mode {
|
||||
case Mode1, Mode2:
|
||||
return legacyToUnifiedStorageDataSyncer(ctx, mode, legacy, storage, kind, reg, serverLockService, requestInfo)
|
||||
default:
|
||||
klog.Info("data syncer not implemented for mode mode:", mode)
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
func legacyToUnifiedStorageDataSyncer(ctx context.Context, mode DualWriterMode, legacy LegacyStorage, storage Storage, resource string, reg prometheus.Registerer, serverLockService ServerLockService, requestInfo *request.RequestInfo) (bool, error) {
|
||||
metrics := &dualWriterMetrics{}
|
||||
metrics.init(reg)
|
||||
|
||||
log := klog.NewKlogr().WithName("legacyToUnifiedStorageDataSyncer").WithValues("mode", mode, "resource", resource)
|
||||
|
||||
everythingSynced := false
|
||||
outOfSync := 0
|
||||
syncSuccess := 0
|
||||
syncErr := 0
|
||||
|
||||
maxInterval := dataSyncerInterval + 5*time.Minute
|
||||
|
||||
var errSync error
|
||||
const maxRecordsSync = 1000
|
||||
|
||||
// LockExecuteAndRelease ensures that just a single Grafana server acquires a lock at a time
|
||||
// The parameter 'maxInterval' is a timeout safeguard, if the LastExecution in the
|
||||
// database is older than maxInterval, we will assume the lock as timeouted. The 'maxInterval' parameter should be so long
|
||||
// that is impossible for 2 processes to run at the same time.
|
||||
err := serverLockService.LockExecuteAndRelease(ctx, fmt.Sprintf("legacyToUnifiedStorageDataSyncer-%d-%s", mode, resource), maxInterval, func(context.Context) {
|
||||
log.Info("starting legacyToUnifiedStorageDataSyncer")
|
||||
startSync := time.Now()
|
||||
|
||||
orgId := int64(1)
|
||||
|
||||
ctx = klog.NewContext(ctx, log)
|
||||
ctx = identity.WithRequester(ctx, getSyncRequester(orgId))
|
||||
ctx = request.WithNamespace(ctx, requestInfo.Namespace)
|
||||
ctx = request.WithRequestInfo(ctx, requestInfo)
|
||||
|
||||
storageList, err := getList(ctx, storage, &metainternalversion.ListOptions{
|
||||
Limit: maxRecordsSync,
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(err, "unable to extract list from storage")
|
||||
return
|
||||
}
|
||||
|
||||
if len(storageList) >= maxRecordsSync {
|
||||
errSync = fmt.Errorf("unified storage has more than %d records. Aborting sync", maxRecordsSync)
|
||||
log.Error(errSync, "Unified storage has more records to be synced than allowed")
|
||||
return
|
||||
}
|
||||
|
||||
log.Info("got items from unified storage", "items", len(storageList))
|
||||
|
||||
legacyList, err := getList(ctx, legacy, &metainternalversion.ListOptions{})
|
||||
if err != nil {
|
||||
log.Error(err, "unable to extract list from legacy storage")
|
||||
return
|
||||
}
|
||||
log.Info("got items from legacy storage", "items", len(legacyList))
|
||||
|
||||
itemsByName := map[string]syncItem{}
|
||||
for _, obj := range legacyList {
|
||||
accessor, err := utils.MetaAccessor(obj)
|
||||
if err != nil {
|
||||
log.Error(err, "error retrieving accessor data for object from legacy storage")
|
||||
continue
|
||||
}
|
||||
name := accessor.GetName()
|
||||
|
||||
item := itemsByName[name]
|
||||
item.name = name
|
||||
item.objLegacy = obj
|
||||
item.accessorLegacy = accessor
|
||||
itemsByName[name] = item
|
||||
}
|
||||
|
||||
for _, obj := range storageList {
|
||||
accessor, err := utils.MetaAccessor(obj)
|
||||
if err != nil {
|
||||
log.Error(err, "error retrieving accessor data for object from storage")
|
||||
continue
|
||||
}
|
||||
name := accessor.GetName()
|
||||
|
||||
item := itemsByName[name]
|
||||
item.name = name
|
||||
item.objStorage = obj
|
||||
item.accessorStorage = accessor
|
||||
itemsByName[name] = item
|
||||
}
|
||||
log.Info("got list of items to be synced", "items", len(itemsByName))
|
||||
|
||||
for name, item := range itemsByName {
|
||||
// upsert if:
|
||||
// - existing in both legacy and storage, but objects are different, or
|
||||
// - if it's missing from storage
|
||||
if item.objLegacy != nil &&
|
||||
(item.objStorage == nil || !Compare(item.objLegacy, item.objStorage)) {
|
||||
outOfSync++
|
||||
|
||||
if item.objStorage != nil {
|
||||
item.accessorLegacy.SetResourceVersion(item.accessorStorage.GetResourceVersion())
|
||||
item.accessorLegacy.SetUID(item.accessorStorage.GetUID())
|
||||
|
||||
log.Info("updating item on unified storage", "name", name)
|
||||
} else {
|
||||
item.accessorLegacy.SetResourceVersion("")
|
||||
item.accessorLegacy.SetUID("")
|
||||
|
||||
log.Info("inserting item on unified storage", "name", name)
|
||||
}
|
||||
|
||||
objInfo := rest.DefaultUpdatedObjectInfo(item.objLegacy, []rest.TransformFunc{}...)
|
||||
res, _, err := storage.Update(ctx,
|
||||
name,
|
||||
objInfo,
|
||||
func(ctx context.Context, obj runtime.Object) error { return nil },
|
||||
func(ctx context.Context, obj, old runtime.Object) error { return nil },
|
||||
true, // force creation
|
||||
&metav1.UpdateOptions{},
|
||||
)
|
||||
if err != nil {
|
||||
log.WithValues("object", res).Error(err, "could not update in storage")
|
||||
syncErr++
|
||||
} else {
|
||||
syncSuccess++
|
||||
}
|
||||
}
|
||||
|
||||
// delete if object does not exists on legacy but exists on storage
|
||||
if item.objLegacy == nil && item.objStorage != nil {
|
||||
outOfSync++
|
||||
|
||||
ctx = request.WithRequestInfo(ctx, &request.RequestInfo{
|
||||
APIGroup: requestInfo.APIGroup,
|
||||
Resource: requestInfo.Resource,
|
||||
Name: name,
|
||||
Namespace: requestInfo.Namespace,
|
||||
})
|
||||
|
||||
log.Info("deleting item from unified storage", "name", name)
|
||||
|
||||
deletedS, _, err := storage.Delete(ctx, name, func(ctx context.Context, obj runtime.Object) error { return nil }, &metav1.DeleteOptions{})
|
||||
if err != nil && !apierrors.IsNotFound(err) {
|
||||
log.WithValues("objectList", deletedS).Error(err, "could not delete from storage")
|
||||
syncErr++
|
||||
continue
|
||||
}
|
||||
|
||||
syncSuccess++
|
||||
}
|
||||
}
|
||||
|
||||
everythingSynced = outOfSync == syncSuccess
|
||||
|
||||
metrics.recordDataSyncerOutcome(mode, resource, everythingSynced)
|
||||
metrics.recordDataSyncerDuration(err != nil, mode, resource, startSync)
|
||||
|
||||
log.Info("finished syncing items", "items", len(itemsByName), "updated", syncSuccess, "failed", syncErr, "outcome", everythingSynced)
|
||||
})
|
||||
|
||||
if errSync != nil {
|
||||
err = errSync
|
||||
}
|
||||
|
||||
return everythingSynced, err
|
||||
}
|
||||
|
||||
func getSyncRequester(orgId int64) *identity.StaticRequester {
|
||||
return &identity.StaticRequester{
|
||||
Type: claims.TypeServiceAccount, // system:apiserver
|
||||
UserID: 1,
|
||||
OrgID: orgId,
|
||||
Name: "admin",
|
||||
Login: "admin",
|
||||
OrgRole: identity.RoleAdmin,
|
||||
IsGrafanaAdmin: true,
|
||||
Permissions: map[int64]map[string][]string{
|
||||
orgId: {
|
||||
"*": {"*"}, // all resources, all scopes
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func getList(ctx context.Context, obj rest.Lister, listOptions *metainternalversion.ListOptions) ([]runtime.Object, error) {
|
||||
ll, err := obj.List(ctx, listOptions)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return meta.ExtractList(ll)
|
||||
}
|
||||
238
pkg/apiserver/rest/dualwriter_syncer_test.go
Normal file
238
pkg/apiserver/rest/dualwriter_syncer_test.go
Normal file
@@ -0,0 +1,238 @@
|
||||
package rest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apiserver/pkg/apis/example"
|
||||
"k8s.io/apiserver/pkg/endpoints/request"
|
||||
)
|
||||
|
||||
var legacyObj1 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo1", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var legacyObj2 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var legacyObj3 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo3", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var legacyObj4 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo4", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
|
||||
var legacyObj2WithHostname = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{Hostname: "hostname"}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
|
||||
var storageObj1 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo1", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var storageObj2 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var storageObj3 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo3", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
var storageObj4 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo4", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
|
||||
|
||||
var legacyListWith3items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*legacyObj1,
|
||||
*legacyObj2,
|
||||
*legacyObj3,
|
||||
}}
|
||||
|
||||
var legacyListWith4items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*legacyObj1,
|
||||
*legacyObj2,
|
||||
*legacyObj3,
|
||||
*legacyObj4,
|
||||
}}
|
||||
|
||||
var legacyListWith3itemsObj2IsDifferent = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*legacyObj1,
|
||||
*legacyObj2WithHostname,
|
||||
*legacyObj3,
|
||||
}}
|
||||
|
||||
var storageListWith3items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*storageObj1,
|
||||
*storageObj2,
|
||||
*storageObj3,
|
||||
}}
|
||||
|
||||
var storageListWith4items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*storageObj1,
|
||||
*storageObj2,
|
||||
*storageObj3,
|
||||
*storageObj4,
|
||||
}}
|
||||
|
||||
var storageListWith3itemsMissingFoo2 = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
|
||||
Items: []example.Pod{
|
||||
*storageObj1,
|
||||
*storageObj3,
|
||||
*storageObj4,
|
||||
}}
|
||||
|
||||
func TestLegacyToUnifiedStorage_DataSyncer(t *testing.T) {
|
||||
type testCase struct {
|
||||
setupLegacyFn func(m *mock.Mock)
|
||||
setupStorageFn func(m *mock.Mock)
|
||||
name string
|
||||
expectedOutcome bool
|
||||
wantErr bool
|
||||
}
|
||||
tests :=
|
||||
[]testCase{
|
||||
{
|
||||
name: "both stores are in sync",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "both stores are in sync - fail to list from legacy",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, errors.New("error"))
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "both stores are in sync - fail to list from storage",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, errors.New("error"))
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "storage is missing 1 entry (foo4)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith4items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
m.On("Update", mock.Anything, "foo4", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "storage needs to be update (foo2 is different)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3itemsObj2IsDifferent, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
m.On("Update", mock.Anything, "foo2", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "storage is missing 1 entry (foo4) - fail to upsert",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith4items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
|
||||
m.On("Update", mock.Anything, "foo4", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, errors.New("error"))
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "storage has an extra 1 entry (foo4)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith4items, nil)
|
||||
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
{
|
||||
name: "storage has an extra 1 entry (foo4) - fail to delete",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith4items, nil)
|
||||
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, errors.New("error"))
|
||||
},
|
||||
expectedOutcome: false,
|
||||
},
|
||||
{
|
||||
name: "storage is missing 1 entry (foo3) and has an extra 1 entry (foo4)",
|
||||
setupLegacyFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
|
||||
},
|
||||
setupStorageFn: func(m *mock.Mock) {
|
||||
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3itemsMissingFoo2, nil)
|
||||
m.On("Update", mock.Anything, "foo2", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, nil)
|
||||
},
|
||||
expectedOutcome: true,
|
||||
},
|
||||
}
|
||||
|
||||
// mode 1
|
||||
for _, tt := range tests {
|
||||
t.Run("Mode-1-"+tt.name, func(t *testing.T) {
|
||||
l := (LegacyStorage)(nil)
|
||||
s := (Storage)(nil)
|
||||
lm := &mock.Mock{}
|
||||
um := &mock.Mock{}
|
||||
|
||||
ls := legacyStoreMock{lm, l}
|
||||
us := storageMock{um, s}
|
||||
|
||||
if tt.setupLegacyFn != nil {
|
||||
tt.setupLegacyFn(lm)
|
||||
}
|
||||
if tt.setupStorageFn != nil {
|
||||
tt.setupStorageFn(um)
|
||||
}
|
||||
|
||||
outcome, err := legacyToUnifiedStorageDataSyncer(context.Background(), Mode1, ls, us, "test.kind", p, &fakeServerLock{}, &request.RequestInfo{})
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedOutcome, outcome)
|
||||
})
|
||||
}
|
||||
|
||||
// mode 2
|
||||
for _, tt := range tests {
|
||||
t.Run("Mode-2-"+tt.name, func(t *testing.T) {
|
||||
l := (LegacyStorage)(nil)
|
||||
s := (Storage)(nil)
|
||||
lm := &mock.Mock{}
|
||||
um := &mock.Mock{}
|
||||
|
||||
ls := legacyStoreMock{lm, l}
|
||||
us := storageMock{um, s}
|
||||
|
||||
if tt.setupLegacyFn != nil {
|
||||
tt.setupLegacyFn(lm)
|
||||
}
|
||||
if tt.setupStorageFn != nil {
|
||||
tt.setupStorageFn(um)
|
||||
}
|
||||
|
||||
outcome, err := legacyToUnifiedStorageDataSyncer(context.Background(), Mode2, ls, us, "test.kind", p, &fakeServerLock{}, &request.RequestInfo{})
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expectedOutcome, outcome)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package rest
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
@@ -97,15 +98,15 @@ func (m *dualWriterMetrics) recordOutcome(mode string, name string, areEqual boo
|
||||
m.outcome.WithLabelValues(mode, name, method).Observe(observeValue)
|
||||
}
|
||||
|
||||
func (m *dualWriterMetrics) recordDataSyncerDuration(isError bool, mode string, resource string, startFrom time.Time) {
|
||||
func (m *dualWriterMetrics) recordDataSyncerDuration(isError bool, mode DualWriterMode, resource string, startFrom time.Time) {
|
||||
duration := time.Since(startFrom).Seconds()
|
||||
m.syncer.WithLabelValues(strconv.FormatBool(isError), mode, resource).Observe(duration)
|
||||
m.syncer.WithLabelValues(strconv.FormatBool(isError), fmt.Sprintf("%d", mode), resource).Observe(duration)
|
||||
}
|
||||
|
||||
func (m *dualWriterMetrics) recordDataSyncerOutcome(mode string, resource string, synced bool) {
|
||||
func (m *dualWriterMetrics) recordDataSyncerOutcome(mode DualWriterMode, resource string, synced bool) {
|
||||
var observeValue float64
|
||||
if !synced {
|
||||
observeValue = 1
|
||||
}
|
||||
m.syncerOutcome.WithLabelValues(mode, resource).Observe(observeValue)
|
||||
m.syncerOutcome.WithLabelValues(fmt.Sprintf("%d", mode), resource).Observe(observeValue)
|
||||
}
|
||||
|
||||
@@ -11,6 +11,8 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -30,6 +32,12 @@ func logError(message string, err error) int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func RunCmdCLI(c *cli.Context) error {
|
||||
os.Exit(RunCmd())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RunCmd runs the build command and returns the exit code
|
||||
func RunCmd() int {
|
||||
opts := BuildOptsFromFlags()
|
||||
|
||||
@@ -2,20 +2,6 @@ package main
|
||||
|
||||
import "github.com/urfave/cli/v2"
|
||||
|
||||
// ArgCountWrapper will cause the action to fail if there were not exactly `num` args provided.
|
||||
func ArgCountWrapper(num int, action cli.ActionFunc) cli.ActionFunc {
|
||||
return func(ctx *cli.Context) error {
|
||||
if ctx.NArg() != num {
|
||||
if err := cli.ShowSubcommandHelp(ctx); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
return cli.Exit("", 1)
|
||||
}
|
||||
|
||||
return action(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// ArgCountWrapper will cause the action to fail if there were more than `num` args provided.
|
||||
func MaxArgCountWrapper(max int, action cli.ActionFunc) cli.ActionFunc {
|
||||
return func(ctx *cli.Context) error {
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/compilers"
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/errutil"
|
||||
"github.com/grafana/grafana/pkg/build/grafana"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
)
|
||||
|
||||
func BuildBackend(ctx *cli.Context) error {
|
||||
metadata, err := config.GenerateMetadata(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
version := metadata.GrafanaVersion
|
||||
|
||||
var (
|
||||
edition = config.Edition(ctx.String("edition"))
|
||||
cfg = config.Config{
|
||||
NumWorkers: ctx.Int("jobs"),
|
||||
}
|
||||
)
|
||||
|
||||
buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not get version / package info for mode '%s': %w", metadata.ReleaseMode.Mode, err)
|
||||
}
|
||||
|
||||
const grafanaDir = "."
|
||||
|
||||
log.Printf("Building Grafana back-end, version %q, %s edition, variants [%v]",
|
||||
version, edition, buildConfig.Variants)
|
||||
|
||||
p := syncutil.NewWorkerPool(cfg.NumWorkers)
|
||||
defer p.Close()
|
||||
|
||||
if err := compilers.Install(); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
g, _ := errutil.GroupWithContext(ctx.Context)
|
||||
for _, variant := range buildConfig.Variants {
|
||||
variant := variant
|
||||
|
||||
opts := grafana.BuildVariantOpts{
|
||||
Variant: variant,
|
||||
Edition: edition,
|
||||
Version: version,
|
||||
GrafanaDir: grafanaDir,
|
||||
}
|
||||
|
||||
p.Schedule(g.Wrap(func() error {
|
||||
return grafana.BuildVariant(ctx.Context, opts)
|
||||
}))
|
||||
}
|
||||
if err := g.Wait(); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
log.Println("Successfully built back-end binaries!")
|
||||
return nil
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/docker"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud"
|
||||
)
|
||||
|
||||
func BuildDocker(c *cli.Context) error {
|
||||
if err := docker.Init(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
useUbuntu := c.Bool("ubuntu")
|
||||
buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
shouldSave := buildConfig.Docker.ShouldSave
|
||||
if shouldSave {
|
||||
if err := gcloud.ActivateServiceAccount(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
edition := config.Edition(c.String("edition"))
|
||||
|
||||
version := metadata.GrafanaVersion
|
||||
|
||||
log.Printf("Building Docker images, version %s, %s edition, Ubuntu based: %v...", version, edition,
|
||||
useUbuntu)
|
||||
|
||||
for _, arch := range buildConfig.Docker.Architectures {
|
||||
if _, err := docker.BuildImage(version, arch, ".", useUbuntu, shouldSave, edition, metadata.ReleaseMode.Mode); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
}
|
||||
|
||||
log.Println("Successfully built Docker images!")
|
||||
return nil
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/errutil"
|
||||
"github.com/grafana/grafana/pkg/build/frontend"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
)
|
||||
|
||||
func BuildFrontend(c *cli.Context) error {
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cfg, mode, err := frontend.GetConfig(c, metadata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
p := syncutil.NewWorkerPool(cfg.NumWorkers)
|
||||
defer p.Close()
|
||||
|
||||
g, _ := errutil.GroupWithContext(c.Context)
|
||||
if err := frontend.Build(mode, frontend.GrafanaDir, p, g); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := g.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Println("Successfully built Grafana front-end!")
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/errutil"
|
||||
"github.com/grafana/grafana/pkg/build/frontend"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func BuildFrontendPackages(c *cli.Context) error {
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cfg, mode, err := frontend.GetConfig(c, metadata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
p := syncutil.NewWorkerPool(cfg.NumWorkers)
|
||||
defer p.Close()
|
||||
|
||||
g, _ := errutil.GroupWithContext(c.Context)
|
||||
if err := frontend.BuildFrontendPackages(cfg.PackageVersion, mode, frontend.GrafanaDir, p, g); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
if err := g.Wait(); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
log.Println("Successfully built Grafana front-end packages!")
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/errutil"
|
||||
"github.com/grafana/grafana/pkg/build/plugins"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
)
|
||||
|
||||
func BuildInternalPlugins(c *cli.Context) error {
|
||||
cfg := config.Config{
|
||||
NumWorkers: c.Int("jobs"),
|
||||
}
|
||||
|
||||
const grafanaDir = "."
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Println("Building internal Grafana plug-ins...")
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
p := syncutil.NewWorkerPool(cfg.NumWorkers)
|
||||
defer p.Close()
|
||||
|
||||
var g *errutil.Group
|
||||
g, ctx = errutil.GroupWithContext(ctx)
|
||||
if err := plugins.Build(ctx, grafanaDir, p, g, buildConfig); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
if err := g.Wait(); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
if err := plugins.Download(ctx, grafanaDir, p); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
log.Println("Successfully built Grafana plug-ins!")
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/env"
|
||||
"github.com/grafana/grafana/pkg/build/git"
|
||||
)
|
||||
|
||||
// checkOpts are options used to create a new GitHub check for the enterprise downstream test.
|
||||
type checkOpts struct {
|
||||
SHA string
|
||||
URL string
|
||||
Branch string
|
||||
PR int
|
||||
}
|
||||
|
||||
func getCheckOpts(args []string) (*checkOpts, error) {
|
||||
branch, ok := env.Lookup("DRONE_SOURCE_BRANCH", args)
|
||||
if !ok {
|
||||
return nil, cli.Exit("Unable to retrieve build source branch", 1)
|
||||
}
|
||||
|
||||
var (
|
||||
rgx = git.PRCheckRegexp()
|
||||
matches = rgx.FindStringSubmatch(branch)
|
||||
)
|
||||
|
||||
sha, ok := env.Lookup("SOURCE_COMMIT", args)
|
||||
if !ok {
|
||||
if matches == nil || len(matches) <= 1 {
|
||||
return nil, cli.Exit("Unable to retrieve source commit", 1)
|
||||
}
|
||||
sha = matches[2]
|
||||
}
|
||||
|
||||
url, ok := env.Lookup("DRONE_BUILD_LINK", args)
|
||||
if !ok {
|
||||
return nil, cli.Exit(`missing environment variable "DRONE_BUILD_LINK"`, 1)
|
||||
}
|
||||
|
||||
prStr, ok := env.Lookup("OSS_PULL_REQUEST", args)
|
||||
if !ok {
|
||||
if matches == nil || len(matches) <= 1 {
|
||||
return nil, cli.Exit("Unable to retrieve PR number", 1)
|
||||
}
|
||||
|
||||
prStr = matches[1]
|
||||
}
|
||||
|
||||
pr, err := strconv.Atoi(prStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &checkOpts{
|
||||
Branch: branch,
|
||||
PR: pr,
|
||||
SHA: sha,
|
||||
URL: url,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// EnterpriseCheckBegin creates the GitHub check and signals the beginning of the downstream build / test process
|
||||
func EnterpriseCheckBegin(c *cli.Context) error {
|
||||
var (
|
||||
ctx = c.Context
|
||||
client = git.NewGitHubClient(ctx, c.String("github-token"))
|
||||
)
|
||||
|
||||
opts, err := getCheckOpts(os.Environ())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err = git.CreateEnterpriseStatus(ctx, client.Repositories, opts.SHA, opts.URL, "pending"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func EnterpriseCheckSuccess(c *cli.Context) error {
|
||||
return completeEnterpriseCheck(c, true)
|
||||
}
|
||||
|
||||
func EnterpriseCheckFail(c *cli.Context) error {
|
||||
return completeEnterpriseCheck(c, false)
|
||||
}
|
||||
|
||||
func completeEnterpriseCheck(c *cli.Context, success bool) error {
|
||||
var (
|
||||
ctx = c.Context
|
||||
client = git.NewGitHubClient(ctx, c.String("github-token"))
|
||||
)
|
||||
|
||||
// Update the pull request labels
|
||||
opts, err := getCheckOpts(os.Environ())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
status := "failure"
|
||||
if success {
|
||||
status = "success"
|
||||
}
|
||||
|
||||
// Update the GitHub check...
|
||||
if _, err := git.CreateEnterpriseStatus(ctx, client.Repositories, opts.SHA, opts.URL, status); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Delete branch if needed
|
||||
log.Printf("Checking branch '%s' against '%s'", git.PRCheckRegexp().String(), opts.Branch)
|
||||
if git.PRCheckRegexp().MatchString(opts.Branch) {
|
||||
log.Println("Deleting branch", opts.Branch)
|
||||
if err := git.DeleteEnterpriseBranch(ctx, client.Git, opts.Branch); err != nil {
|
||||
return fmt.Errorf("error deleting enterprise branch: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
label := "enterprise-failed"
|
||||
if success {
|
||||
label = "enterprise-ok"
|
||||
}
|
||||
|
||||
return git.AddLabelToPR(ctx, client.Issues, opts.PR, label)
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetCheckOpts(t *testing.T) {
|
||||
t.Run("it should return the checkOpts if the correct environment variables are set", func(t *testing.T) {
|
||||
args := []string{
|
||||
"SOURCE_COMMIT=1234",
|
||||
"DRONE_SOURCE_BRANCH=test",
|
||||
"DRONE_BUILD_LINK=http://example.com",
|
||||
"OSS_PULL_REQUEST=1",
|
||||
}
|
||||
|
||||
opts, err := getCheckOpts(args)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, opts.SHA, "1234")
|
||||
require.Equal(t, opts.URL, "http://example.com")
|
||||
})
|
||||
t.Run("it should return an error if SOURCE_COMMIT is not set", func(t *testing.T) {
|
||||
args := []string{
|
||||
"DRONE_BUILD_LINK=http://example.com",
|
||||
"DRONE_SOURCE_BRANCH=test",
|
||||
"DRONE_BUILD_LINK=http://example.com",
|
||||
"OSS_PULL_REQUEST=1",
|
||||
}
|
||||
|
||||
opts, err := getCheckOpts(args)
|
||||
require.Nil(t, opts)
|
||||
require.Error(t, err)
|
||||
})
|
||||
t.Run("it should return an error if DRONE_BUILD_LINK is not set", func(t *testing.T) {
|
||||
args := []string{
|
||||
"SOURCE_COMMIT=1234",
|
||||
"DRONE_SOURCE_BRANCH=test",
|
||||
"OSS_PULL_REQUEST=1",
|
||||
}
|
||||
|
||||
opts, err := getCheckOpts(args)
|
||||
require.Nil(t, opts)
|
||||
require.Error(t, err)
|
||||
})
|
||||
t.Run("it should return an error if OSS_PULL_REQUEST is not set", func(t *testing.T) {
|
||||
args := []string{
|
||||
"SOURCE_COMMIT=1234",
|
||||
"DRONE_SOURCE_BRANCH=test",
|
||||
"DRONE_BUILD_LINK=http://example.com",
|
||||
}
|
||||
|
||||
opts, err := getCheckOpts(args)
|
||||
require.Nil(t, opts)
|
||||
require.Error(t, err)
|
||||
})
|
||||
t.Run("it should return an error if OSS_PULL_REQUEST is not an integer", func(t *testing.T) {
|
||||
args := []string{
|
||||
"SOURCE_COMMIT=1234",
|
||||
"DRONE_SOURCE_BRANCH=test",
|
||||
"DRONE_BUILD_LINK=http://example.com",
|
||||
"OSS_PULL_REQUEST=http://example.com",
|
||||
}
|
||||
|
||||
opts, err := getCheckOpts(args)
|
||||
require.Nil(t, opts)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
)
|
||||
|
||||
func ExportVersion(c *cli.Context) error {
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
const distDir = "dist"
|
||||
if err := os.RemoveAll(distDir); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.Mkdir(distDir, 0750); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// nolint:gosec
|
||||
if err := os.WriteFile(filepath.Join(distDir, "grafana.version"), []byte(metadata.GrafanaVersion), 0664); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
@@ -18,6 +18,25 @@ const (
|
||||
ubuntu = "ubuntu"
|
||||
)
|
||||
|
||||
// GetImageFiles returns the list of image (.img, but should be .tar because they are tar archives) files that are
|
||||
// created in the 'tag' process and stored in the prerelease bucket, waiting to be released.
|
||||
func GetImageFiles(grafana string, version string, architectures []config.Architecture) []string {
|
||||
bases := []string{alpine, ubuntu}
|
||||
images := []string{}
|
||||
for _, base := range bases {
|
||||
for _, arch := range architectures {
|
||||
image := fmt.Sprintf("%s-%s-%s.img", grafana, version, arch)
|
||||
if base == "ubuntu" {
|
||||
image = fmt.Sprintf("%s-%s-ubuntu-%s.img", grafana, version, arch)
|
||||
}
|
||||
|
||||
images = append(images, image)
|
||||
}
|
||||
}
|
||||
|
||||
return images
|
||||
}
|
||||
|
||||
func FetchImages(c *cli.Context) error {
|
||||
if c.NArg() > 0 {
|
||||
if err := cli.ShowSubcommandHelp(c); err != nil {
|
||||
@@ -44,74 +63,65 @@ func FetchImages(c *cli.Context) error {
|
||||
Tag: metadata.GrafanaVersion,
|
||||
}
|
||||
|
||||
edition := fmt.Sprintf("-%s", cfg.Edition)
|
||||
grafana := "grafana"
|
||||
if cfg.Edition == "enterprise" {
|
||||
grafana = "grafana-enterprise"
|
||||
}
|
||||
if cfg.Edition == "enterprise2" {
|
||||
grafana = "grafana-enterprise2"
|
||||
}
|
||||
if cfg.Edition == "grafana" || cfg.Edition == "oss" {
|
||||
grafana = "grafana-oss"
|
||||
}
|
||||
|
||||
err = gcloud.ActivateServiceAccount()
|
||||
if err != nil {
|
||||
baseURL := fmt.Sprintf("gs://%s/%s/", cfg.Bucket, cfg.Tag)
|
||||
images := GetImageFiles(grafana, cfg.Tag, cfg.Archs)
|
||||
|
||||
log.Printf("Fetching images [%v]", images)
|
||||
|
||||
if err := gcloud.ActivateServiceAccount(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var basesStr []string
|
||||
for _, base := range cfg.Distribution {
|
||||
switch base {
|
||||
case alpine:
|
||||
basesStr = append(basesStr, "")
|
||||
case ubuntu:
|
||||
basesStr = append(basesStr, "-ubuntu")
|
||||
default:
|
||||
return fmt.Errorf("unrecognized base %q", base)
|
||||
}
|
||||
}
|
||||
|
||||
err = downloadFromGCS(cfg, basesStr, edition)
|
||||
if err != nil {
|
||||
if err := DownloadImages(baseURL, images, "."); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = loadImages(cfg, basesStr, edition)
|
||||
if err != nil {
|
||||
if err := LoadImages(images, "."); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func loadImages(cfg docker.Config, basesStr []string, edition string) error {
|
||||
log.Println("Loading fetched image files to local docker registry...")
|
||||
log.Printf("Number of images to be loaded: %d\n", len(basesStr)*len(cfg.Archs))
|
||||
for _, base := range basesStr {
|
||||
for _, arch := range cfg.Archs {
|
||||
imageFilename := fmt.Sprintf("grafana%s-%s%s-%s.img", edition, cfg.Tag, base, arch)
|
||||
log.Printf("image file name: %s\n", imageFilename)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("docker", "load", "-i", imageFilename)
|
||||
cmd.Dir = "."
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
log.Printf("out: %s\n", out)
|
||||
return fmt.Errorf("error loading image: %q", err)
|
||||
}
|
||||
log.Printf("Successfully loaded %s!\n %s\n", fmt.Sprintf("grafana%s-%s%s-%s", edition, cfg.Tag, base, arch), out)
|
||||
// LoadImages uses the `docker load -i` command to load the image tar file into the docker daemon so that it can be
|
||||
// tagged and pushed.
|
||||
func LoadImages(images []string, source string) error {
|
||||
p := filepath.Clean(source)
|
||||
for _, image := range images {
|
||||
image := filepath.Join(p, image)
|
||||
log.Println("Loading image", image)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("docker", "load", "-i", image)
|
||||
cmd.Dir = "."
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
log.Printf("out: %s\n", out)
|
||||
return fmt.Errorf("error loading image: %q", err)
|
||||
}
|
||||
log.Println("Loaded image", image)
|
||||
}
|
||||
log.Println("Images successfully loaded!")
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadFromGCS(cfg docker.Config, basesStr []string, edition string) error {
|
||||
log.Printf("Downloading Docker images from GCS bucket: %s\n", cfg.Bucket)
|
||||
|
||||
for _, base := range basesStr {
|
||||
for _, arch := range cfg.Archs {
|
||||
src := fmt.Sprintf("gs://%s/%s/grafana%s-%s%s-%s.img", cfg.Bucket, cfg.Tag, edition, cfg.Tag, base, arch)
|
||||
args := strings.Split(fmt.Sprintf("-m cp -r %s .", src), " ")
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("gsutil", args...)
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to download: %w\n%s", err, out)
|
||||
}
|
||||
func DownloadImages(baseURL string, images []string, destination string) error {
|
||||
for _, image := range images {
|
||||
p := baseURL + image
|
||||
log.Println("Downloading image", p)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("gsutil", "-m", "cp", "-r", p, destination)
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to download: %w\n%s", err, out)
|
||||
}
|
||||
}
|
||||
log.Printf("Successfully fetched image files from %s bucket!\n", cfg.Bucket)
|
||||
return nil
|
||||
}
|
||||
|
||||
48
pkg/build/cmd/fetchimages_test.go
Normal file
48
pkg/build/cmd/fetchimages_test.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetImageFiles(t *testing.T) {
|
||||
var (
|
||||
architectures = []config.Architecture{
|
||||
config.ArchAMD64,
|
||||
config.ArchARM64,
|
||||
config.ArchARMv7,
|
||||
}
|
||||
)
|
||||
|
||||
t.Run("1.2.3", func(t *testing.T) {
|
||||
expect := []string{
|
||||
"grafana-oss-1.2.3-amd64.img",
|
||||
"grafana-oss-1.2.3-arm64.img",
|
||||
"grafana-oss-1.2.3-armv7.img",
|
||||
"grafana-oss-1.2.3-ubuntu-amd64.img",
|
||||
"grafana-oss-1.2.3-ubuntu-arm64.img",
|
||||
"grafana-oss-1.2.3-ubuntu-armv7.img",
|
||||
}
|
||||
|
||||
res := GetImageFiles("grafana-oss", "1.2.3", architectures)
|
||||
|
||||
require.Equal(t, expect, res)
|
||||
})
|
||||
|
||||
t.Run("1.2.3+example-01", func(t *testing.T) {
|
||||
expect := []string{
|
||||
"grafana-oss-1.2.3+example-01-amd64.img",
|
||||
"grafana-oss-1.2.3+example-01-arm64.img",
|
||||
"grafana-oss-1.2.3+example-01-armv7.img",
|
||||
"grafana-oss-1.2.3+example-01-ubuntu-amd64.img",
|
||||
"grafana-oss-1.2.3+example-01-ubuntu-arm64.img",
|
||||
"grafana-oss-1.2.3+example-01-ubuntu-armv7.img",
|
||||
}
|
||||
|
||||
res := GetImageFiles("grafana-oss", "1.2.3+example-01", architectures)
|
||||
|
||||
require.Equal(t, expect, res)
|
||||
})
|
||||
}
|
||||
@@ -16,37 +16,15 @@ var (
|
||||
Usage: "The edition of Grafana to build (oss or enterprise)",
|
||||
Value: "oss",
|
||||
}
|
||||
variantsFlag = cli.StringFlag{
|
||||
Name: "variants",
|
||||
Usage: "Comma-separated list of variants to build",
|
||||
}
|
||||
triesFlag = cli.IntFlag{
|
||||
Name: "tries",
|
||||
Usage: "Specify number of tries before failing",
|
||||
Value: 1,
|
||||
}
|
||||
noInstallDepsFlag = cli.BoolFlag{
|
||||
Name: "no-install-deps",
|
||||
Usage: "Don't install dependencies",
|
||||
}
|
||||
signingAdminFlag = cli.BoolFlag{
|
||||
Name: "signing-admin",
|
||||
Usage: "Use manifest signing admin API endpoint?",
|
||||
}
|
||||
signFlag = cli.BoolFlag{
|
||||
Name: "sign",
|
||||
Usage: "Enable plug-in signing (you must set GRAFANA_API_KEY)",
|
||||
}
|
||||
dryRunFlag = cli.BoolFlag{
|
||||
Name: "dry-run",
|
||||
Usage: "Only simulate actions",
|
||||
}
|
||||
gitHubTokenFlag = cli.StringFlag{
|
||||
Name: "github-token",
|
||||
Value: "",
|
||||
EnvVars: []string{"GITHUB_TOKEN"},
|
||||
Usage: "GitHub token",
|
||||
}
|
||||
tagFlag = cli.StringFlag{
|
||||
Name: "tag",
|
||||
Usage: "Grafana version tag",
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud/storage"
|
||||
"github.com/grafana/grafana/pkg/build/gcom"
|
||||
"github.com/grafana/grafana/pkg/build/packaging"
|
||||
)
|
||||
|
||||
@@ -125,6 +126,48 @@ func getReleaseURLs() (string, string, error) {
|
||||
return pconf.Grafana.WhatsNewURL, pconf.Grafana.ReleaseNotesURL, nil
|
||||
}
|
||||
|
||||
func Builds(baseURL *url.URL, grafana, version string, packages []packaging.BuildArtifact) ([]GCOMPackage, error) {
|
||||
builds := make([]GCOMPackage, len(packages))
|
||||
for i, v := range packages {
|
||||
var (
|
||||
os = v.Distro
|
||||
arch = v.Arch
|
||||
)
|
||||
|
||||
if v.Distro == "windows" {
|
||||
os = "win"
|
||||
if v.Ext == "msi" {
|
||||
os = "win-installer"
|
||||
}
|
||||
}
|
||||
|
||||
if v.Distro == "rhel" {
|
||||
if arch == "aarch64" {
|
||||
arch = "arm64"
|
||||
}
|
||||
}
|
||||
|
||||
if v.Distro == "deb" {
|
||||
if arch == "armhf" {
|
||||
arch = "armv7"
|
||||
if v.RaspberryPi {
|
||||
log.Println(v.Distro, arch, "raspberrypi == true")
|
||||
arch = "armv6"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
u := gcom.GetURL(baseURL, version, grafana, v.Distro, v.Arch, v.Ext, v.Musl, v.RaspberryPi)
|
||||
builds[i] = GCOMPackage{
|
||||
OS: os,
|
||||
URL: u.String(),
|
||||
Arch: arch,
|
||||
}
|
||||
}
|
||||
|
||||
return builds, nil
|
||||
}
|
||||
|
||||
// publishPackages publishes packages to grafana.com.
|
||||
func publishPackages(cfg packaging.PublishConfig) error {
|
||||
log.Printf("Publishing Grafana packages, version %s, %s edition, %s mode, dryRun: %v, simulating: %v...\n",
|
||||
@@ -133,14 +176,17 @@ func publishPackages(cfg packaging.PublishConfig) error {
|
||||
versionStr := fmt.Sprintf("v%s", cfg.Version)
|
||||
log.Printf("Creating release %s at grafana.com...\n", versionStr)
|
||||
|
||||
var sfx string
|
||||
var pth string
|
||||
var (
|
||||
pth string
|
||||
grafana = "grafana"
|
||||
)
|
||||
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
pth = "oss"
|
||||
case config.EditionEnterprise:
|
||||
grafana = "grafana-enterprise"
|
||||
pth = "enterprise"
|
||||
sfx = packaging.EnterpriseSfx
|
||||
default:
|
||||
return fmt.Errorf("unrecognized edition %q", cfg.Edition)
|
||||
}
|
||||
@@ -152,28 +198,19 @@ func publishPackages(cfg packaging.PublishConfig) error {
|
||||
pth = path.Join(pth, packaging.ReleaseFolder)
|
||||
}
|
||||
|
||||
product := fmt.Sprintf("grafana%s", sfx)
|
||||
pth = path.Join(pth, product)
|
||||
baseArchiveURL := fmt.Sprintf("https://dl.grafana.com/%s", pth)
|
||||
|
||||
builds := make([]buildRepr, len(packaging.ArtifactConfigs))
|
||||
for i, ba := range packaging.ArtifactConfigs {
|
||||
u := ba.GetURL(baseArchiveURL, cfg)
|
||||
|
||||
sha256, err := getSHA256(u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
builds[i] = buildRepr{
|
||||
OS: ba.Os,
|
||||
URL: u,
|
||||
SHA256: string(sha256),
|
||||
Arch: ba.Arch,
|
||||
}
|
||||
pth = path.Join(pth)
|
||||
baseArchiveURL := &url.URL{
|
||||
Scheme: "https",
|
||||
Host: "dl.grafana.com",
|
||||
Path: pth,
|
||||
}
|
||||
|
||||
r := releaseRepr{
|
||||
builds, err := Builds(baseArchiveURL, grafana, cfg.Version, packaging.ArtifactConfigs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r := Release{
|
||||
Version: cfg.Version,
|
||||
ReleaseDate: time.Now().UTC(),
|
||||
Builds: builds,
|
||||
@@ -195,6 +232,15 @@ func publishPackages(cfg packaging.PublishConfig) error {
|
||||
return err
|
||||
}
|
||||
|
||||
for i, v := range r.Builds {
|
||||
sha, err := getSHA256(v.URL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.Builds[i].SHA256 = string(sha)
|
||||
}
|
||||
|
||||
for _, b := range r.Builds {
|
||||
if err := postRequest(cfg, fmt.Sprintf("versions/%s/packages", cfg.Version), b,
|
||||
fmt.Sprintf("create build %s %s", b.OS, b.Arch)); err != nil {
|
||||
@@ -211,6 +257,7 @@ func publishPackages(cfg packaging.PublishConfig) error {
|
||||
|
||||
func getSHA256(u string) ([]byte, error) {
|
||||
shaURL := fmt.Sprintf("%s.sha256", u)
|
||||
|
||||
// nolint:gosec
|
||||
resp, err := http.Get(shaURL)
|
||||
if err != nil {
|
||||
@@ -232,7 +279,7 @@ func getSHA256(u string) ([]byte, error) {
|
||||
return sha256, nil
|
||||
}
|
||||
|
||||
func postRequest(cfg packaging.PublishConfig, pth string, obj any, descr string) error {
|
||||
func postRequest(cfg packaging.PublishConfig, pth string, body any, descr string) error {
|
||||
var sfx string
|
||||
switch cfg.Edition {
|
||||
case config.EditionOSS:
|
||||
@@ -243,7 +290,7 @@ func postRequest(cfg packaging.PublishConfig, pth string, obj any, descr string)
|
||||
}
|
||||
product := fmt.Sprintf("grafana%s", sfx)
|
||||
|
||||
jsonB, err := json.Marshal(obj)
|
||||
jsonB, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to JSON encode release: %w", err)
|
||||
}
|
||||
@@ -303,20 +350,20 @@ func constructURL(product string, pth string) (string, error) {
|
||||
return u.String(), err
|
||||
}
|
||||
|
||||
type buildRepr struct {
|
||||
type GCOMPackage struct {
|
||||
OS string `json:"os"`
|
||||
URL string `json:"url"`
|
||||
SHA256 string `json:"sha256"`
|
||||
Arch string `json:"arch"`
|
||||
}
|
||||
|
||||
type releaseRepr struct {
|
||||
Version string `json:"version"`
|
||||
ReleaseDate time.Time `json:"releaseDate"`
|
||||
Stable bool `json:"stable"`
|
||||
Beta bool `json:"beta"`
|
||||
Nightly bool `json:"nightly"`
|
||||
WhatsNewURL string `json:"whatsNewUrl"`
|
||||
ReleaseNotesURL string `json:"releaseNotesUrl"`
|
||||
Builds []buildRepr `json:"-"`
|
||||
type Release struct {
|
||||
Version string `json:"version"`
|
||||
ReleaseDate time.Time `json:"releaseDate"`
|
||||
Stable bool `json:"stable"`
|
||||
Beta bool `json:"beta"`
|
||||
Nightly bool `json:"nightly"`
|
||||
WhatsNewURL string `json:"whatsNewUrl"`
|
||||
ReleaseNotesURL string `json:"releaseNotesUrl"`
|
||||
Builds []GCOMPackage `json:"-"`
|
||||
}
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"path"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/packaging"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_constructURL(t *testing.T) {
|
||||
@@ -33,3 +40,221 @@ func Test_constructURL(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuilds(t *testing.T) {
|
||||
baseURL := &url.URL{
|
||||
Scheme: "https",
|
||||
Host: "dl.example.com",
|
||||
Path: path.Join("oss", "release"),
|
||||
}
|
||||
|
||||
version := "1.2.3"
|
||||
grafana := "grafana"
|
||||
packages := []packaging.BuildArtifact{
|
||||
{
|
||||
Distro: "deb",
|
||||
Arch: "arm64",
|
||||
Ext: "deb",
|
||||
},
|
||||
{
|
||||
Distro: "rhel",
|
||||
Arch: "aarch64",
|
||||
Ext: "rpm",
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Arch: "arm64",
|
||||
Ext: "tar.gz",
|
||||
},
|
||||
{
|
||||
Distro: "deb",
|
||||
Arch: "armhf",
|
||||
Ext: "deb",
|
||||
RaspberryPi: true,
|
||||
},
|
||||
{
|
||||
Distro: "deb",
|
||||
Arch: "armhf",
|
||||
Ext: "deb",
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Arch: "armv7",
|
||||
Ext: "tar.gz",
|
||||
},
|
||||
{
|
||||
Distro: "windows",
|
||||
Arch: "amd64",
|
||||
Ext: "zip",
|
||||
},
|
||||
{
|
||||
Distro: "windows",
|
||||
Arch: "amd64",
|
||||
Ext: "msi",
|
||||
},
|
||||
}
|
||||
|
||||
expect := []GCOMPackage{
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana_1.2.3_arm64.deb",
|
||||
OS: "deb",
|
||||
Arch: "arm64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3-1.aarch64.rpm",
|
||||
OS: "rhel",
|
||||
Arch: "arm64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3.linux-arm64.tar.gz",
|
||||
OS: "linux",
|
||||
Arch: "arm64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-rpi_1.2.3_armhf.deb",
|
||||
OS: "deb",
|
||||
Arch: "armv6",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana_1.2.3_armhf.deb",
|
||||
OS: "deb",
|
||||
Arch: "armv7",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3.linux-armv7.tar.gz",
|
||||
OS: "linux",
|
||||
Arch: "armv7",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3.windows-amd64.zip",
|
||||
OS: "win",
|
||||
Arch: "amd64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3.windows-amd64.msi",
|
||||
OS: "win-installer",
|
||||
Arch: "amd64",
|
||||
},
|
||||
}
|
||||
|
||||
builds, err := Builds(baseURL, grafana, version, packages)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(expect), len(builds))
|
||||
|
||||
for i := range builds {
|
||||
t.Run(fmt.Sprintf("[%d/%d] %s", i+1, len(builds), expect[i].URL), func(t *testing.T) {
|
||||
assert.Equal(t, expect[i].URL, builds[i].URL)
|
||||
assert.Equal(t, expect[i].OS, builds[i].OS)
|
||||
assert.Equal(t, expect[i].Arch, builds[i].Arch)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildsWithPlus(t *testing.T) {
|
||||
baseURL := &url.URL{
|
||||
Scheme: "https",
|
||||
Host: "dl.example.com",
|
||||
Path: path.Join("oss", "release"),
|
||||
}
|
||||
|
||||
version := "1.2.3+example-01"
|
||||
grafana := "grafana"
|
||||
packages := []packaging.BuildArtifact{
|
||||
{
|
||||
Distro: "deb",
|
||||
Arch: "arm64",
|
||||
Ext: "deb",
|
||||
},
|
||||
{
|
||||
Distro: "rhel",
|
||||
Arch: "aarch64",
|
||||
Ext: "rpm",
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Arch: "arm64",
|
||||
Ext: "tar.gz",
|
||||
},
|
||||
{
|
||||
Distro: "deb",
|
||||
Arch: "armhf",
|
||||
Ext: "deb",
|
||||
RaspberryPi: true,
|
||||
},
|
||||
{
|
||||
Distro: "deb",
|
||||
Arch: "armhf",
|
||||
Ext: "deb",
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Arch: "armv7",
|
||||
Ext: "tar.gz",
|
||||
},
|
||||
{
|
||||
Distro: "windows",
|
||||
Arch: "amd64",
|
||||
Ext: "zip",
|
||||
},
|
||||
{
|
||||
Distro: "windows",
|
||||
Arch: "amd64",
|
||||
Ext: "msi",
|
||||
},
|
||||
}
|
||||
|
||||
expect := []GCOMPackage{
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana_1.2.3+example~01_arm64.deb",
|
||||
OS: "deb",
|
||||
Arch: "arm64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3+example~01-1.aarch64.rpm",
|
||||
OS: "rhel",
|
||||
Arch: "arm64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.linux-arm64.tar.gz",
|
||||
OS: "linux",
|
||||
Arch: "arm64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-rpi_1.2.3+example~01_armhf.deb",
|
||||
OS: "deb",
|
||||
Arch: "armv6",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana_1.2.3+example~01_armhf.deb",
|
||||
OS: "deb",
|
||||
Arch: "armv7",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.linux-armv7.tar.gz",
|
||||
OS: "linux",
|
||||
Arch: "armv7",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.windows-amd64.zip",
|
||||
OS: "win",
|
||||
Arch: "amd64",
|
||||
},
|
||||
{
|
||||
URL: "https://dl.example.com/oss/release/grafana-1.2.3+example-01.windows-amd64.msi",
|
||||
OS: "win-installer",
|
||||
Arch: "amd64",
|
||||
},
|
||||
}
|
||||
|
||||
builds, err := Builds(baseURL, grafana, version, packages)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(expect), len(builds))
|
||||
|
||||
for i := range builds {
|
||||
t.Run(fmt.Sprintf("[%d/%d] %s", i+1, len(builds), expect[i].URL), func(t *testing.T) {
|
||||
assert.Equal(t, expect[i].URL, builds[i].URL)
|
||||
assert.Equal(t, expect[i].OS, builds[i].OS)
|
||||
assert.Equal(t, expect[i].Arch, builds[i].Arch)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,11 +3,9 @@ package main
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/docker"
|
||||
)
|
||||
|
||||
var additionalCommands []*cli.Command = make([]*cli.Command, 0, 5)
|
||||
@@ -21,28 +19,8 @@ func main() {
|
||||
app := cli.NewApp()
|
||||
app.Commands = cli.Commands{
|
||||
{
|
||||
Name: "build-backend",
|
||||
Usage: "Build one or more variants of back-end binaries",
|
||||
ArgsUsage: "[version]",
|
||||
Action: MaxArgCountWrapper(1, BuildBackend),
|
||||
Flags: []cli.Flag{
|
||||
&jobsFlag,
|
||||
&variantsFlag,
|
||||
&editionFlag,
|
||||
&buildIDFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "build-frontend-packages",
|
||||
Usage: "Build front-end packages",
|
||||
ArgsUsage: "[version]",
|
||||
Action: BuildFrontendPackages,
|
||||
Flags: []cli.Flag{
|
||||
&jobsFlag,
|
||||
&editionFlag,
|
||||
&buildIDFlag,
|
||||
&noInstallDepsFlag,
|
||||
},
|
||||
Name: "build",
|
||||
Action: build.RunCmdCLI,
|
||||
},
|
||||
{
|
||||
Name: "e2e-tests",
|
||||
@@ -71,44 +49,11 @@ func main() {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "build-frontend",
|
||||
Usage: "Build front-end artifacts",
|
||||
ArgsUsage: "[version]",
|
||||
Action: MaxArgCountWrapper(1, BuildFrontend),
|
||||
Flags: []cli.Flag{
|
||||
&jobsFlag,
|
||||
&editionFlag,
|
||||
&buildIDFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "whatsnew-checker",
|
||||
Usage: "Checks whatsNewUrl in package.json for differences between the tag and the docs version",
|
||||
Action: WhatsNewChecker,
|
||||
},
|
||||
{
|
||||
Name: "build-docker",
|
||||
Usage: "Build Grafana Docker images",
|
||||
Action: MaxArgCountWrapper(1, BuildDocker),
|
||||
Flags: []cli.Flag{
|
||||
&jobsFlag,
|
||||
&editionFlag,
|
||||
&cli.BoolFlag{
|
||||
Name: "ubuntu",
|
||||
Usage: "Use Ubuntu base image",
|
||||
},
|
||||
&cli.BoolFlag{
|
||||
Name: "shouldSave",
|
||||
Usage: "Should save docker image to tarball",
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "archs",
|
||||
Value: strings.Join(docker.AllArchs, ","),
|
||||
Usage: "Comma separated architectures to build",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "upload-cdn",
|
||||
Usage: "Upload public/* to a cdn bucket",
|
||||
@@ -117,23 +62,6 @@ func main() {
|
||||
&editionFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "shellcheck",
|
||||
Usage: "Run shellcheck on shell scripts",
|
||||
Action: Shellcheck,
|
||||
},
|
||||
{
|
||||
Name: "build-plugins",
|
||||
Usage: "Build internal plug-ins",
|
||||
Action: MaxArgCountWrapper(1, BuildInternalPlugins),
|
||||
Flags: []cli.Flag{
|
||||
&jobsFlag,
|
||||
&editionFlag,
|
||||
&signingAdminFlag,
|
||||
&signFlag,
|
||||
&noInstallDepsFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "publish-metrics",
|
||||
Usage: "Publish a set of metrics from stdin",
|
||||
@@ -145,30 +73,6 @@ func main() {
|
||||
Usage: "Verify Drone configuration",
|
||||
Action: VerifyDrone,
|
||||
},
|
||||
{
|
||||
Name: "verify-starlark",
|
||||
Usage: "Verify Starlark configuration",
|
||||
ArgsUsage: "<workspace path>",
|
||||
Action: VerifyStarlark,
|
||||
},
|
||||
{
|
||||
Name: "export-version",
|
||||
Usage: "Exports version in dist/grafana.version",
|
||||
Action: ExportVersion,
|
||||
},
|
||||
{
|
||||
Name: "package",
|
||||
Usage: "Package one or more Grafana variants",
|
||||
ArgsUsage: "[version]",
|
||||
Action: MaxArgCountWrapper(1, Package),
|
||||
Flags: []cli.Flag{
|
||||
&jobsFlag,
|
||||
&variantsFlag,
|
||||
&editionFlag,
|
||||
&buildIDFlag,
|
||||
&signFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "store-storybook",
|
||||
Usage: "Stores storybook to GCS buckets",
|
||||
@@ -279,18 +183,6 @@ func main() {
|
||||
&editionFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "publish-enterprise2",
|
||||
Usage: "Handle Grafana Enterprise2 Docker images",
|
||||
ArgsUsage: "[version]",
|
||||
Action: Enterprise2,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "dockerhub-repo",
|
||||
Usage: "DockerHub repo to push images",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -399,36 +291,6 @@ func main() {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "enterprise-check",
|
||||
Usage: "Commands for testing against Grafana Enterprise",
|
||||
Subcommands: cli.Commands{
|
||||
{
|
||||
Name: "begin",
|
||||
Usage: "Creates the GitHub check in a pull request and begins the tests",
|
||||
Action: EnterpriseCheckBegin,
|
||||
Flags: []cli.Flag{
|
||||
&gitHubTokenFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "success",
|
||||
Usage: "Updates the GitHub check in a pull request to show a successful build and updates the pull request labels",
|
||||
Action: EnterpriseCheckSuccess,
|
||||
Flags: []cli.Flag{
|
||||
&gitHubTokenFlag,
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "fail",
|
||||
Usage: "Updates the GitHub check in a pull request to show a failed build and updates the pull request labels",
|
||||
Action: EnterpriseCheckFail,
|
||||
Flags: []cli.Flag{
|
||||
&gitHubTokenFlag,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
app.Commands = append(app.Commands, additionalCommands...)
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/gpg"
|
||||
"github.com/grafana/grafana/pkg/build/packaging"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
)
|
||||
|
||||
func Package(c *cli.Context) error {
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
edition := config.Edition(c.String("edition"))
|
||||
|
||||
releaseMode, err := metadata.GetReleaseMode()
|
||||
if err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
releaseModeConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode)
|
||||
if err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
cfg := config.Config{
|
||||
NumWorkers: c.Int("jobs"),
|
||||
SignPackages: c.Bool("sign"),
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
variants := []config.Variant{}
|
||||
variantStrs := strings.Split(c.String("variants"), ",")
|
||||
if c.String("variants") != "" {
|
||||
for _, varStr := range variantStrs {
|
||||
if varStr == "" {
|
||||
continue
|
||||
}
|
||||
variants = append(variants, config.Variant(varStr))
|
||||
}
|
||||
} else {
|
||||
variants = releaseModeConfig.Variants
|
||||
}
|
||||
|
||||
if len(variants) == 0 {
|
||||
variants = config.AllVariants
|
||||
}
|
||||
|
||||
log.Printf("Packaging Grafana version %q, version mode %s, %s edition, variants %s", metadata.GrafanaVersion, releaseMode.Mode,
|
||||
edition, strings.Join(variantStrs, ","))
|
||||
|
||||
if cfg.SignPackages {
|
||||
if err := gpg.LoadGPGKeys(&cfg); err != nil {
|
||||
return cli.Exit(err, 1)
|
||||
}
|
||||
defer gpg.RemoveGPGFiles(cfg)
|
||||
if err := gpg.Import(cfg); err != nil {
|
||||
return cli.Exit(err, 1)
|
||||
}
|
||||
}
|
||||
|
||||
p := syncutil.NewWorkerPool(cfg.NumWorkers)
|
||||
defer p.Close()
|
||||
|
||||
if err := packaging.PackageGrafana(ctx, metadata.GrafanaVersion, ".", cfg, edition, variants, releaseModeConfig.PluginSignature.Sign, p); err != nil {
|
||||
return cli.Exit(err, 1)
|
||||
}
|
||||
|
||||
log.Println("Successfully packaged Grafana!")
|
||||
return nil
|
||||
}
|
||||
@@ -1,101 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/docker"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud"
|
||||
)
|
||||
|
||||
func Enterprise2(c *cli.Context) error {
|
||||
if c.NArg() > 0 {
|
||||
if err := cli.ShowSubcommandHelp(c); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
return cli.Exit("", 1)
|
||||
}
|
||||
|
||||
if err := gcloud.ActivateServiceAccount(); err != nil {
|
||||
return fmt.Errorf("couldn't activate service account, err: %w", err)
|
||||
}
|
||||
|
||||
metadata, err := config.GenerateMetadata(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
buildConfig, err := config.GetBuildConfig(metadata.ReleaseMode.Mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cfg := docker.Config{
|
||||
Archs: buildConfig.Docker.Architectures,
|
||||
Distribution: buildConfig.Docker.Distribution,
|
||||
DockerHubRepo: c.String("dockerhub-repo"),
|
||||
Tag: metadata.GrafanaVersion,
|
||||
}
|
||||
|
||||
err = dockerLoginEnterprise2()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var distributionStr []string
|
||||
for _, distribution := range cfg.Distribution {
|
||||
switch distribution {
|
||||
case alpine:
|
||||
distributionStr = append(distributionStr, "")
|
||||
case ubuntu:
|
||||
distributionStr = append(distributionStr, "-ubuntu")
|
||||
default:
|
||||
return fmt.Errorf("unrecognized distribution %q", distribution)
|
||||
}
|
||||
}
|
||||
|
||||
for _, distribution := range distributionStr {
|
||||
var imageFileNames []string
|
||||
for _, arch := range cfg.Archs {
|
||||
imageFilename := fmt.Sprintf("%s:%s%s-%s", cfg.DockerHubRepo, cfg.Tag, distribution, arch)
|
||||
err := docker.PushImage(imageFilename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
imageFileNames = append(imageFileNames, imageFilename)
|
||||
}
|
||||
manifest := fmt.Sprintf("%s:%s%s", cfg.DockerHubRepo, cfg.Tag, distribution)
|
||||
args := []string{"manifest", "create", manifest}
|
||||
args = append(args, imageFileNames...)
|
||||
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("docker", args...)
|
||||
cmd.Env = append(os.Environ(), "DOCKER_CLI_EXPERIMENTAL=enabled")
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to create Docker manifest: %w\n%s", err, output)
|
||||
}
|
||||
|
||||
err = docker.PushManifest(manifest)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func dockerLoginEnterprise2() error {
|
||||
log.Println("Docker login...")
|
||||
cmd := exec.Command("gcloud", "auth", "configure-docker")
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("error logging in to DockerHub: %s %q", out, err)
|
||||
}
|
||||
|
||||
log.Println("Successful login!")
|
||||
return nil
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func Shellcheck(c *cli.Context) error {
|
||||
log.Println("Running shellcheck...")
|
||||
|
||||
fpaths := []string{}
|
||||
if err := filepath.Walk("scripts", func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if strings.HasSuffix(path, ".sh") {
|
||||
fpaths = append(fpaths, path)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return fmt.Errorf("couldn't traverse scripts/: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Running shellcheck on %s", strings.Join(fpaths, ","))
|
||||
args := append([]string{"-e", "SC1071", "-e", "SC2162"}, fpaths...)
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("shellcheck", args...)
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("shellcheck failed: %s", output)
|
||||
}
|
||||
|
||||
log.Println("Successfully ran shellcheck!")
|
||||
return nil
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
@@ -14,9 +15,28 @@ import (
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/droneutil"
|
||||
"github.com/grafana/grafana/pkg/build/gcloud"
|
||||
"github.com/grafana/grafana/pkg/build/packaging"
|
||||
)
|
||||
|
||||
// PackageRegexp returns a regexp for matching packages corresponding to a certain Grafana edition.
|
||||
func PackageRegexp(edition config.Edition) *regexp.Regexp {
|
||||
var sfx string
|
||||
switch edition {
|
||||
case config.EditionOSS:
|
||||
case config.EditionEnterprise:
|
||||
sfx = "-enterprise"
|
||||
case config.EditionEnterprise2:
|
||||
sfx = "-enterprise2"
|
||||
default:
|
||||
panic(fmt.Sprintf("unrecognized edition %q", edition))
|
||||
}
|
||||
rePkg, err := regexp.Compile(fmt.Sprintf(`^grafana%s(?:-rpi)?[-_][^-_]+.*$`, sfx))
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Failed to compile regexp: %s", err))
|
||||
}
|
||||
|
||||
return rePkg
|
||||
}
|
||||
|
||||
const releaseFolder = "release"
|
||||
const mainFolder = "main"
|
||||
const releaseBranchFolder = "prerelease"
|
||||
@@ -181,7 +201,7 @@ func uploadPackages(cfg uploadConfig) error {
|
||||
return fmt.Errorf("failed to list packages: %w", err)
|
||||
}
|
||||
fpaths := []string{}
|
||||
rePkg := packaging.PackageRegexp(cfg.edition)
|
||||
rePkg := PackageRegexp(cfg.edition)
|
||||
for _, fpath := range matches {
|
||||
fname := filepath.Base(fpath)
|
||||
if strings.Contains(fname, "latest") || !rePkg.MatchString(fname) {
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func mapSlice[I any, O any](a []I, f func(I) O) []O {
|
||||
o := make([]O, len(a))
|
||||
for i, e := range a {
|
||||
o[i] = f(e)
|
||||
}
|
||||
return o
|
||||
}
|
||||
|
||||
// VerifyStarlark is the CLI Action for verifying Starlark files in a workspace.
|
||||
// It expects a single context argument which is the path to the workspace.
|
||||
// The actual verification procedure can return multiple errors which are
|
||||
// joined together to be one holistic error for the action.
|
||||
func VerifyStarlark(c *cli.Context) error {
|
||||
if c.NArg() != 1 {
|
||||
var message string
|
||||
if c.NArg() == 0 {
|
||||
message = "ERROR: missing required argument <workspace path>"
|
||||
}
|
||||
if c.NArg() > 1 {
|
||||
message = "ERROR: too many arguments"
|
||||
}
|
||||
|
||||
if err := cli.ShowSubcommandHelp(c); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return cli.Exit(message, 1)
|
||||
}
|
||||
|
||||
workspace := c.Args().Get(0)
|
||||
verificationErrs, executionErr := verifyStarlark(c.Context, workspace, buildifierLintCommand)
|
||||
if executionErr != nil {
|
||||
return executionErr
|
||||
}
|
||||
|
||||
if len(verificationErrs) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
noun := "file"
|
||||
if len(verificationErrs) > 1 {
|
||||
noun += "s"
|
||||
}
|
||||
|
||||
return fmt.Errorf("verification failed for %d %s:\n%s",
|
||||
len(verificationErrs),
|
||||
noun,
|
||||
strings.Join(
|
||||
mapSlice(verificationErrs, func(e error) string { return e.Error() }),
|
||||
"\n",
|
||||
))
|
||||
}
|
||||
|
||||
type commandFunc = func(path string) (command string, args []string)
|
||||
|
||||
func buildifierLintCommand(path string) (string, []string) {
|
||||
return "buildifier", []string{"-lint", "warn", "-mode", "check", path}
|
||||
}
|
||||
|
||||
// verifyStarlark walks all directories starting at provided workspace path and
|
||||
// verifies any Starlark files it finds.
|
||||
// Starlark files are assumed to end with the .star extension.
|
||||
// The verification relies on linting frovided by the 'buildifier' binary which
|
||||
// must be in the PATH.
|
||||
// A slice of verification errors are returned, one for each file that failed verification.
|
||||
// If any execution of the `buildifier` command fails, this is returned separately.
|
||||
// commandFn is executed on every Starlark file to determine the command and arguments to be executed.
|
||||
// The caller is trusted and it is the callers responsibility to ensure that the resulting command is safe to execute.
|
||||
func verifyStarlark(ctx context.Context, workspace string, commandFn commandFunc) ([]error, error) {
|
||||
var verificationErrs []error
|
||||
|
||||
// All errors from filepath.WalkDir are filtered by the fs.WalkDirFunc.
|
||||
// Lstat or ReadDir errors are reported as verificationErrors.
|
||||
// If any execution of the `buildifier` command fails or if the context is cancelled,
|
||||
// it is reported as an error and any verification of subsequent files is skipped.
|
||||
err := filepath.WalkDir(workspace, func(path string, d fs.DirEntry, err error) error {
|
||||
// Skip verification of the file or files within the directory if there is an error
|
||||
// returned by Lstat or ReadDir.
|
||||
if err != nil {
|
||||
verificationErrs = append(verificationErrs, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if filepath.Ext(path) == ".star" {
|
||||
command, args := commandFn(path)
|
||||
// The caller is trusted.
|
||||
//nolint:gosec
|
||||
cmd := exec.CommandContext(ctx, command, args...)
|
||||
cmd.Dir = workspace
|
||||
|
||||
_, err = cmd.Output()
|
||||
if err == nil { // No error, early return.
|
||||
return nil
|
||||
}
|
||||
|
||||
// The error returned from cmd.Output() is never wrapped.
|
||||
//nolint:errorlint
|
||||
if err, ok := err.(*exec.ExitError); ok {
|
||||
switch err.ExitCode() {
|
||||
// Case comments are informed by the output of `buildifier --help`
|
||||
case 1: // syntax errors in input
|
||||
verificationErrs = append(verificationErrs, errors.New(string(err.Stderr)))
|
||||
return nil
|
||||
case 2: // usage errors: invoked incorrectly
|
||||
return fmt.Errorf("command %q: %s", cmd, err.Stderr)
|
||||
case 3: // unexpected runtime errors: file I/O problems or internal bugs
|
||||
return fmt.Errorf("command %q: %s", cmd, err.Stderr)
|
||||
case 4: // check mode failed (reformat is needed)
|
||||
verificationErrs = append(verificationErrs, errors.New(string(err.Stderr)))
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("command %q: %s", cmd, err.Stderr)
|
||||
}
|
||||
}
|
||||
|
||||
// Error was not an exit error from the command.
|
||||
return fmt.Errorf("command %q: %v", cmd, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return verificationErrs, err
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
//go:build requires_buildifier
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestVerifyStarlark(t *testing.T) {
|
||||
t.Run("execution errors", func(t *testing.T) {
|
||||
t.Run("invalid usage", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
workspace := t.TempDir()
|
||||
err := os.WriteFile(filepath.Join(workspace, "ignored.star"), []byte{}, os.ModePerm)
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
_, executionErr := verifyStarlark(ctx, workspace, func(string) (string, []string) { return "buildifier", []string{"--invalid"} })
|
||||
if executionErr == nil {
|
||||
t.Fatalf("Expected execution error but got none")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("context cancellation", func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
workspace := t.TempDir()
|
||||
err := os.WriteFile(filepath.Join(workspace, "ignored.star"), []byte{}, os.ModePerm)
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
}
|
||||
err = os.WriteFile(filepath.Join(workspace, "other-ignored.star"), []byte{}, os.ModePerm)
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
}
|
||||
cancel()
|
||||
|
||||
_, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand)
|
||||
if executionErr == nil {
|
||||
t.Fatalf("Expected execution error but got none")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("verification errors", func(t *testing.T) {
|
||||
t.Run("a single file with lint", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
workspace := t.TempDir()
|
||||
|
||||
invalidContent := []byte(`load("scripts/drone/other.star", "function")
|
||||
|
||||
function()`)
|
||||
err := os.WriteFile(filepath.Join(workspace, "has-lint.star"), invalidContent, os.ModePerm)
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
verificationErrs, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand)
|
||||
if executionErr != nil {
|
||||
t.Fatalf("Unexpected execution error: %v", executionErr)
|
||||
}
|
||||
if len(verificationErrs) == 0 {
|
||||
t.Fatalf(`"has-lint.star" requires linting but the verifyStarlark function provided no linting error`)
|
||||
}
|
||||
if len(verificationErrs) > 1 {
|
||||
t.Fatalf(`verifyStarlark returned multiple errors for the "has-lint.star" file but only one was expected: %v`, verificationErrs)
|
||||
}
|
||||
if !strings.Contains(verificationErrs[0].Error(), "has-lint.star:1: module-docstring: The file has no module docstring.") {
|
||||
t.Fatalf(`"has-lint.star" is missing a module docstring but the verifyStarlark function linting error did not mention this, instead we got: %v`, verificationErrs[0])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("no files with lint", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
workspace := t.TempDir()
|
||||
|
||||
content := []byte(`"""
|
||||
This module does nothing.
|
||||
"""
|
||||
|
||||
load("scripts/drone/other.star", "function")
|
||||
|
||||
function()
|
||||
`)
|
||||
require.NoError(t, os.WriteFile(filepath.Join(workspace, "no-lint.star"), content, os.ModePerm))
|
||||
|
||||
verificationErrs, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand)
|
||||
if executionErr != nil {
|
||||
t.Fatalf("Unexpected execution error: %v", executionErr)
|
||||
}
|
||||
if len(verificationErrs) != 0 {
|
||||
t.Log(`"no-lint.star" has no lint but the verifyStarlark function provided at least one error`)
|
||||
for _, err := range verificationErrs {
|
||||
t.Log(err)
|
||||
}
|
||||
t.FailNow()
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("multiple files with lint", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
workspace := t.TempDir()
|
||||
|
||||
invalidContent := []byte(`load("scripts/drone/other.star", "function")
|
||||
|
||||
function()`)
|
||||
require.NoError(t, os.WriteFile(filepath.Join(workspace, "has-lint.star"), invalidContent, os.ModePerm))
|
||||
require.NoError(t, os.WriteFile(filepath.Join(workspace, "has-lint2.star"), invalidContent, os.ModePerm))
|
||||
|
||||
verificationErrs, executionErr := verifyStarlark(ctx, workspace, buildifierLintCommand)
|
||||
if executionErr != nil {
|
||||
t.Fatalf("Unexpected execution error: %v", executionErr)
|
||||
}
|
||||
if len(verificationErrs) == 0 {
|
||||
t.Fatalf(`Two files require linting but the verifyStarlark function provided no linting error`)
|
||||
}
|
||||
if len(verificationErrs) == 1 {
|
||||
t.Fatalf(`Two files require linting but the verifyStarlark function provided only one linting error: %v`, verificationErrs[0])
|
||||
}
|
||||
if len(verificationErrs) > 2 {
|
||||
t.Fatalf(`verifyStarlark returned more errors than expected: %v`, verificationErrs)
|
||||
}
|
||||
if !strings.Contains(verificationErrs[0].Error(), "has-lint.star:1: module-docstring: The file has no module docstring.") {
|
||||
t.Errorf(`"has-lint.star" is missing a module docstring but the verifyStarlark function linting error did not mention this, instead we got: %v`, verificationErrs[0])
|
||||
}
|
||||
if !strings.Contains(verificationErrs[1].Error(), "has-lint2.star:1: module-docstring: The file has no module docstring.") {
|
||||
t.Fatalf(`"has-lint2.star" is missing a module docstring but the verifyStarlark function linting error did not mention this, instead we got: %v`, verificationErrs[0])
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
package compilers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
const (
|
||||
ArmV6 = "/opt/rpi-tools/arm-bcm2708/arm-linux-gnueabihf/bin/arm-linux-gnueabihf-gcc"
|
||||
Armv7 = "arm-linux-gnueabihf-gcc"
|
||||
Armv7Musl = "/tmp/arm-linux-musleabihf-cross/bin/arm-linux-musleabihf-gcc"
|
||||
Arm64 = "aarch64-linux-gnu-gcc"
|
||||
Arm64Musl = "/tmp/aarch64-linux-musl-cross/bin/aarch64-linux-musl-gcc"
|
||||
Osx64 = "/tmp/osxcross/target/bin/o64-clang"
|
||||
Win64 = "x86_64-w64-mingw32-gcc"
|
||||
LinuxX64 = "/tmp/x86_64-centos6-linux-gnu/bin/x86_64-centos6-linux-gnu-gcc"
|
||||
LinuxX64Musl = "/tmp/x86_64-linux-musl-cross/bin/x86_64-linux-musl-gcc"
|
||||
)
|
||||
|
||||
func Install() error {
|
||||
// From the os.TempDir documentation:
|
||||
// On Unix systems, it returns $TMPDIR if non-empty,
|
||||
// else /tmp. On Windows, it uses GetTempPath,
|
||||
// returning the first non-empty value from %TMP%, %TEMP%, %USERPROFILE%,
|
||||
// or the Windows directory. On Plan 9, it returns /tmp.
|
||||
tmp := os.TempDir()
|
||||
|
||||
var (
|
||||
centosArchive = "x86_64-centos6-linux-gnu.tar.xz"
|
||||
osxArchive = "osxcross.tar.xz"
|
||||
)
|
||||
|
||||
for _, fname := range []string{centosArchive, osxArchive} {
|
||||
path := filepath.Join(tmp, fname)
|
||||
if _, err := os.Stat(path); err != nil {
|
||||
return fmt.Errorf("stat error: %w", err)
|
||||
}
|
||||
// Ignore gosec G204 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("tar", "xfJ", fname)
|
||||
cmd.Dir = tmp
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to unpack %q: %q, %w", fname, output, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/executil"
|
||||
)
|
||||
|
||||
type Revision struct {
|
||||
Timestamp int64
|
||||
SHA256 string
|
||||
EnterpriseCommit string
|
||||
Branch string
|
||||
}
|
||||
|
||||
func GrafanaTimestamp(ctx context.Context, dir string) (int64, error) {
|
||||
out, err := executil.OutputAt(ctx, dir, "git", "show", "-s", "--format=%ct")
|
||||
if err != nil {
|
||||
return time.Now().Unix(), nil
|
||||
}
|
||||
|
||||
stamp, err := strconv.ParseInt(out, 10, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to parse output from git show: %q", out)
|
||||
}
|
||||
|
||||
return stamp, nil
|
||||
}
|
||||
|
||||
// GrafanaRevision uses git commands to get information about the checked out Grafana code located at 'grafanaDir'.
|
||||
// This could maybe be a more generic "Describe" function in the "git" package.
|
||||
func GrafanaRevision(ctx context.Context, grafanaDir string) (Revision, error) {
|
||||
stamp, err := GrafanaTimestamp(ctx, grafanaDir)
|
||||
if err != nil {
|
||||
return Revision{}, err
|
||||
}
|
||||
|
||||
sha, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--short", "HEAD")
|
||||
if err != nil {
|
||||
return Revision{}, err
|
||||
}
|
||||
|
||||
enterpriseCommit, err := executil.OutputAt(ctx, grafanaDir, "git", "-C", "../grafana-enterprise", "rev-parse", "--short", "HEAD")
|
||||
if err != nil {
|
||||
enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "..", "rev-parse", "--short", "HEAD")
|
||||
if err != nil {
|
||||
enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "/tmp/grafana-enterprise", "rev-parse", "--short", "HEAD")
|
||||
if err != nil {
|
||||
log.Println("Could not get enterprise commit. Error:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
branch, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--abbrev-ref", "HEAD")
|
||||
if err != nil {
|
||||
return Revision{}, err
|
||||
}
|
||||
|
||||
return Revision{
|
||||
SHA256: sha,
|
||||
EnterpriseCommit: enterpriseCommit,
|
||||
Branch: branch,
|
||||
Timestamp: stamp,
|
||||
}, nil
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
package cryptoutil
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
func MD5File(fpath string) error {
|
||||
// Ignore gosec G304 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
fd, err := os.Open(fpath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err := fd.Close(); err != nil {
|
||||
log.Printf("error closing file at '%s': %s", fpath, err.Error())
|
||||
}
|
||||
}()
|
||||
|
||||
h := md5.New() // nolint:gosec
|
||||
if _, err = io.Copy(h, fd); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// nolint:gosec
|
||||
if err := os.WriteFile(fpath+".md5", []byte(fmt.Sprintf("%x\n", h.Sum(nil))), 0664); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
package docker
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
)
|
||||
|
||||
// verifyArchive verifies the integrity of an archive file.
|
||||
func verifyArchive(archive string) error {
|
||||
log.Printf("Verifying checksum of %q", archive)
|
||||
|
||||
//nolint:gosec
|
||||
shaB, err := os.ReadFile(archive + ".sha256")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
exp := strings.TrimSpace(string(shaB))
|
||||
|
||||
//nolint:gosec
|
||||
f, err := os.Open(archive)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := f.Close(); err != nil {
|
||||
log.Println("error closing file:", err)
|
||||
}
|
||||
}()
|
||||
|
||||
h := sha256.New()
|
||||
_, err = io.Copy(h, f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
chksum := hex.EncodeToString(h.Sum(nil))
|
||||
if chksum != exp {
|
||||
return fmt.Errorf("archive checksum is different than expected: %q", archive)
|
||||
}
|
||||
|
||||
log.Printf("Archive %q has expected checksum: %s", archive, exp)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// BuildImage builds a Docker image.
|
||||
// The image tag is returned.
|
||||
func BuildImage(version string, arch config.Architecture, grafanaDir string, useUbuntu, shouldSave bool, edition config.Edition, mode config.VersionMode) ([]string, error) {
|
||||
var baseArch string
|
||||
|
||||
switch arch {
|
||||
case "amd64":
|
||||
case "armv7":
|
||||
baseArch = "arm32v7/"
|
||||
case "arm64":
|
||||
baseArch = "arm64v8/"
|
||||
default:
|
||||
return []string{}, fmt.Errorf("unrecognized architecture %q", arch)
|
||||
}
|
||||
|
||||
libc := "-musl"
|
||||
baseImage := fmt.Sprintf("%salpine:3.18.5", baseArch)
|
||||
tagSuffix := ""
|
||||
if useUbuntu {
|
||||
libc = ""
|
||||
baseImage = fmt.Sprintf("%subuntu:22.04", baseArch)
|
||||
tagSuffix = "-ubuntu"
|
||||
}
|
||||
|
||||
var editionStr string
|
||||
var dockerRepo string
|
||||
var additionalDockerRepo string
|
||||
var tags []string
|
||||
var imageFileBase string
|
||||
var dockerEnterprise2Repo string
|
||||
if repo, ok := os.LookupEnv("DOCKER_ENTERPRISE2_REPO"); ok {
|
||||
dockerEnterprise2Repo = repo
|
||||
}
|
||||
|
||||
switch edition {
|
||||
case config.EditionOSS:
|
||||
dockerRepo = "grafana/grafana-image-tags"
|
||||
additionalDockerRepo = "grafana/grafana-oss-image-tags"
|
||||
imageFileBase = "grafana-oss"
|
||||
case config.EditionEnterprise:
|
||||
dockerRepo = "grafana/grafana-enterprise-image-tags"
|
||||
imageFileBase = "grafana-enterprise"
|
||||
editionStr = "-enterprise"
|
||||
case config.EditionEnterprise2:
|
||||
dockerRepo = dockerEnterprise2Repo
|
||||
imageFileBase = "grafana-enterprise2"
|
||||
editionStr = "-enterprise2"
|
||||
default:
|
||||
return []string{}, fmt.Errorf("unrecognized edition %s", edition)
|
||||
}
|
||||
|
||||
buildDir := filepath.Join(grafanaDir, "packaging/docker")
|
||||
// For example: grafana-8.5.0-52819pre.linux-amd64-musl.tar.gz
|
||||
archive := fmt.Sprintf("grafana%s-%s.linux-%s%s.tar.gz", editionStr, version, arch, libc)
|
||||
if err := verifyArchive(filepath.Join(buildDir, archive)); err != nil {
|
||||
return []string{}, err
|
||||
}
|
||||
|
||||
tag := fmt.Sprintf("%s:%s%s-%s", dockerRepo, version, tagSuffix, arch)
|
||||
tags = append(tags, tag)
|
||||
|
||||
args := []string{
|
||||
"build",
|
||||
"-q",
|
||||
"--build-arg", fmt.Sprintf("BASE_IMAGE=%s", baseImage),
|
||||
"--build-arg", fmt.Sprintf("GRAFANA_TGZ=%s", archive),
|
||||
"--build-arg", "GO_SRC=tgz-builder",
|
||||
"--build-arg", "JS_SRC=tgz-builder",
|
||||
"--build-arg", "RUN_SH=./run.sh",
|
||||
"--tag", tag,
|
||||
"--no-cache",
|
||||
"--file", "../../Dockerfile",
|
||||
".",
|
||||
"--label", fmt.Sprintf("mode=%s", string(mode)),
|
||||
}
|
||||
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("docker", args...)
|
||||
cmd.Dir = buildDir
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Env = append(os.Environ(), "DOCKER_CLI_EXPERIMENTAL=enabled", "DOCKER_BUILDKIT=1")
|
||||
log.Printf("Running Docker: DOCKER_CLI_EXPERIMENTAL=enabled DOCKER_BUILDKIT=1 %s", cmd)
|
||||
if err := cmd.Run(); err != nil {
|
||||
return []string{}, fmt.Errorf("building Docker image failed: %w", err)
|
||||
}
|
||||
if shouldSave {
|
||||
imageFile := fmt.Sprintf("%s-%s%s-%s.img", imageFileBase, version, tagSuffix, arch)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("docker", "save", tag, "-o", imageFile)
|
||||
cmd.Dir = buildDir
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
log.Printf("Running Docker: %s", cmd)
|
||||
if err := cmd.Run(); err != nil {
|
||||
return []string{}, fmt.Errorf("saving Docker image failed: %w", err)
|
||||
}
|
||||
gcsURL := fmt.Sprintf("gs://grafana-prerelease/artifacts/docker/%s/%s", version, imageFile)
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("gsutil", "-q", "cp", imageFile, gcsURL)
|
||||
cmd.Dir = buildDir
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
log.Printf("Running gsutil: %s", cmd)
|
||||
if err := cmd.Run(); err != nil {
|
||||
return []string{}, fmt.Errorf("storing Docker image failed: %w", err)
|
||||
}
|
||||
log.Printf("Docker image %s stored to grafana-prerelease GCS bucket", imageFile)
|
||||
}
|
||||
if additionalDockerRepo != "" {
|
||||
additionalTag := fmt.Sprintf("%s:%s%s-%s", additionalDockerRepo, version, tagSuffix, arch)
|
||||
|
||||
//nolint:gosec
|
||||
cmd = exec.Command("docker", "tag", tag, additionalTag)
|
||||
cmd.Dir = buildDir
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
log.Printf("Running Docker: %s", cmd)
|
||||
if err := cmd.Run(); err != nil {
|
||||
return []string{}, fmt.Errorf("tagging Docker image failed: %w", err)
|
||||
}
|
||||
tags = append(tags, additionalTag)
|
||||
}
|
||||
|
||||
return tags, nil
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package docker
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
// AllArchs is a list of all supported Docker image architectures.
|
||||
var AllArchs = []string{"amd64", "arm64"}
|
||||
|
||||
// emulatorImage is the docker image used as the cross-platform emulator
|
||||
var emulatorImage = "tonistiigi/binfmt:qemu-v7.0.0"
|
||||
|
||||
// Init initializes the OS for Docker image building.
|
||||
func Init() error {
|
||||
// Necessary for cross-platform builds
|
||||
if err := os.Setenv("DOCKER_BUILDKIT", "1"); err != nil {
|
||||
log.Println("error setting DOCKER_BUILDKIT environment variable:", err)
|
||||
}
|
||||
|
||||
// Enable execution of Docker images for other architectures
|
||||
//nolint:gosec
|
||||
cmd := exec.Command("docker", "run", "--privileged", "--rm",
|
||||
emulatorImage, "--install", "all")
|
||||
output, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to enable execution of cross-platform Docker images: %w\n%s", err, output)
|
||||
}
|
||||
log.Println("emulators have been installed successfully!")
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
package docker
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
tries = 3
|
||||
sleepTime = 30
|
||||
)
|
||||
|
||||
func PushImage(newImage string) error {
|
||||
var err error
|
||||
for i := 0; i < tries; i++ {
|
||||
log.Printf("push attempt #%d...", i+1)
|
||||
var out []byte
|
||||
cmd := exec.Command("docker", "push", newImage)
|
||||
cmd.Dir = "."
|
||||
out, err = cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
log.Printf("output: %s", out)
|
||||
log.Printf("sleep for %d, before retrying...", sleepTime)
|
||||
time.Sleep(sleepTime * time.Second)
|
||||
} else {
|
||||
log.Printf("Successfully pushed %s!", newImage)
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("error pushing images to DockerHub: %q", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func PushManifest(manifest string) error {
|
||||
log.Printf("Pushing Docker manifest %s...", manifest)
|
||||
|
||||
var err error
|
||||
for i := 0; i < tries; i++ {
|
||||
log.Printf("push attempt #%d...", i+1)
|
||||
var out []byte
|
||||
cmd := exec.Command("docker", "manifest", "push", manifest)
|
||||
cmd.Env = append(os.Environ(), "DOCKER_CLI_EXPERIMENTAL=enabled")
|
||||
out, err = cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
log.Printf("output: %s", out)
|
||||
log.Printf("sleep for %d, before retrying...", sleepTime)
|
||||
time.Sleep(sleepTime * time.Second)
|
||||
} else {
|
||||
log.Printf("Successful manifest push! %s", string(out))
|
||||
break
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to push manifest, err: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
18
pkg/build/env/lookup.go
vendored
18
pkg/build/env/lookup.go
vendored
@@ -1,18 +0,0 @@
|
||||
package env
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Lookup is the equivalent of os.LookupEnv, only you are able to provide the list of environment variables.
|
||||
// To use this as os.LookupEnv would be used, simply call
|
||||
// `env.Lookup("ENVIRONMENT_VARIABLE", os.Environ())`
|
||||
func Lookup(name string, vars []string) (string, bool) {
|
||||
for _, v := range vars {
|
||||
if strings.HasPrefix(v, name) {
|
||||
return strings.TrimPrefix(v, name+"="), true
|
||||
}
|
||||
}
|
||||
|
||||
return "", false
|
||||
}
|
||||
43
pkg/build/env/lookup_test.go
vendored
43
pkg/build/env/lookup_test.go
vendored
@@ -1,43 +0,0 @@
|
||||
package env_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/env"
|
||||
)
|
||||
|
||||
func TestLookup(t *testing.T) {
|
||||
values := []string{"ENV_1=a", "ENV_2=b", "ENV_3=c", "ENV_4_TEST="}
|
||||
|
||||
{
|
||||
v, ok := env.Lookup("ENV_1", values)
|
||||
require.Equal(t, v, "a")
|
||||
require.True(t, ok)
|
||||
}
|
||||
|
||||
{
|
||||
v, ok := env.Lookup("ENV_2", values)
|
||||
require.Equal(t, v, "b")
|
||||
require.True(t, ok)
|
||||
}
|
||||
|
||||
{
|
||||
v, ok := env.Lookup("ENV_3", values)
|
||||
require.Equal(t, v, "c")
|
||||
require.True(t, ok)
|
||||
}
|
||||
|
||||
{
|
||||
v, ok := env.Lookup("ENV_4_TEST", values)
|
||||
require.Equal(t, v, "")
|
||||
require.True(t, ok)
|
||||
}
|
||||
|
||||
{
|
||||
v, ok := env.Lookup("NOT_THERE", values)
|
||||
require.Equal(t, v, "")
|
||||
require.False(t, ok)
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package errutil
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type Group struct {
|
||||
cancel func()
|
||||
wg sync.WaitGroup
|
||||
errOnce sync.Once
|
||||
err error
|
||||
}
|
||||
|
||||
func GroupWithContext(ctx context.Context) (*Group, context.Context) {
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
return &Group{cancel: cancel}, ctx
|
||||
}
|
||||
|
||||
// Wait waits for any wrapped goroutines to finish and returns any error having occurred in one of them.
|
||||
func (g *Group) Wait() error {
|
||||
log.Println("Waiting on Group")
|
||||
g.wg.Wait()
|
||||
if g.cancel != nil {
|
||||
log.Println("Group canceling its context after waiting")
|
||||
g.cancel()
|
||||
}
|
||||
return g.err
|
||||
}
|
||||
|
||||
// Cancel cancels the associated context.
|
||||
func (g *Group) Cancel() {
|
||||
log.Println("Group's Cancel method being called")
|
||||
g.cancel()
|
||||
}
|
||||
|
||||
// Wrap wraps a function to be executed in a goroutine.
|
||||
func (g *Group) Wrap(f func() error) func() {
|
||||
g.wg.Add(1)
|
||||
return func() {
|
||||
defer g.wg.Done()
|
||||
|
||||
if err := f(); err != nil {
|
||||
g.errOnce.Do(func() {
|
||||
log.Printf("An error occurred in Group: %s", err)
|
||||
g.err = err
|
||||
if g.cancel != nil {
|
||||
log.Println("Group canceling its context due to error")
|
||||
g.cancel()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Go wraps the provided function and executes it in a goroutine.
|
||||
func (g *Group) Go(f func() error) {
|
||||
wrapped := g.Wrap(f)
|
||||
go wrapped()
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
package executil
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func RunAt(ctx context.Context, dir, cmd string, args ...string) error {
|
||||
// Ignore gosec G204 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
c := exec.CommandContext(ctx, cmd, args...)
|
||||
c.Dir = dir
|
||||
|
||||
b, err := c.CombinedOutput()
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("%w. '%s %v': %s", err, cmd, args, string(b))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Run(ctx context.Context, cmd string, args ...string) error {
|
||||
return RunAt(ctx, ".", cmd, args...)
|
||||
}
|
||||
|
||||
func OutputAt(ctx context.Context, dir, cmd string, args ...string) (string, error) {
|
||||
// Ignore gosec G204 as this function is only used in the build process.
|
||||
//nolint:gosec
|
||||
c := exec.CommandContext(ctx, cmd, args...)
|
||||
c.Dir = dir
|
||||
|
||||
b, err := c.CombinedOutput()
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return strings.TrimSpace(string(b)), nil
|
||||
}
|
||||
|
||||
func Output(ctx context.Context, cmd string, args ...string) (string, error) {
|
||||
return OutputAt(ctx, ".", cmd, args...)
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
package frontend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/grafana/grafana/pkg/build/errutil"
|
||||
"github.com/grafana/grafana/pkg/build/lerna"
|
||||
"github.com/grafana/grafana/pkg/build/syncutil"
|
||||
)
|
||||
|
||||
func BuildFrontendPackages(version string, edition config.Edition, grafanaDir string, p syncutil.WorkerPool, g *errutil.Group) error {
|
||||
p.Schedule(g.Wrap(func() error {
|
||||
if err := lerna.BuildFrontendPackages(version, edition, grafanaDir); err != nil {
|
||||
return fmt.Errorf("failed to build %s frontend packages: %v", edition, err)
|
||||
}
|
||||
|
||||
log.Printf("Finished building %s frontend packages", string(edition))
|
||||
return nil
|
||||
}))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Build builds the Grafana front-end
|
||||
func Build(edition config.Edition, grafanaDir string, p syncutil.WorkerPool, g *errutil.Group) error {
|
||||
log.Printf("Building %s frontend in %q", edition, grafanaDir)
|
||||
grafanaDir, err := filepath.Abs(grafanaDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, dpath := range []string{"tmp", "public_gen", "public/build"} {
|
||||
dpath = filepath.Join(grafanaDir, dpath)
|
||||
if err := os.RemoveAll(dpath); err != nil {
|
||||
return fmt.Errorf("failed to remove %q: %w", dpath, err)
|
||||
}
|
||||
}
|
||||
|
||||
p.Schedule(g.Wrap(func() error {
|
||||
cmd := exec.Command("yarn", "run", "build")
|
||||
cmd.Dir = grafanaDir
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("failed to build %s frontend with webpack: %s", edition, output)
|
||||
}
|
||||
|
||||
log.Printf("Finished building %s frontend", edition)
|
||||
return nil
|
||||
}))
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
package frontend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/blang/semver/v4"
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
const GrafanaDir = "."
|
||||
|
||||
func GetConfig(c *cli.Context, metadata config.Metadata) (config.Config, config.Edition, error) {
|
||||
cfg := config.Config{
|
||||
NumWorkers: c.Int("jobs"),
|
||||
GitHubToken: c.String("github-token"),
|
||||
}
|
||||
|
||||
mode := config.Edition(c.String("edition"))
|
||||
|
||||
if metadata.ReleaseMode.Mode == config.TagMode && !metadata.ReleaseMode.IsTest {
|
||||
packageJSONVersion, err := config.GetPackageJSONVersion(GrafanaDir)
|
||||
if err != nil {
|
||||
return config.Config{}, "", err
|
||||
}
|
||||
semverGrafanaVersion, err := semver.Parse(metadata.GrafanaVersion)
|
||||
if err != nil {
|
||||
return config.Config{}, "", err
|
||||
}
|
||||
semverPackageJSONVersion, err := semver.Parse(packageJSONVersion)
|
||||
if err != nil {
|
||||
return config.Config{}, "", err
|
||||
}
|
||||
// Check if the semver digits of the tag are not equal
|
||||
if semverGrafanaVersion.FinalizeVersion() != semverPackageJSONVersion.FinalizeVersion() {
|
||||
return config.Config{}, "", cli.Exit(fmt.Errorf("package.json version and input tag version differ %s != %s.\nPlease update package.json", packageJSONVersion, metadata.GrafanaVersion), 1)
|
||||
}
|
||||
}
|
||||
|
||||
cfg.PackageVersion = metadata.GrafanaVersion
|
||||
return cfg, mode, nil
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
package frontend
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/config"
|
||||
)
|
||||
|
||||
const (
|
||||
jobs = "jobs"
|
||||
githubToken = "github-token"
|
||||
buildID = "build-id"
|
||||
)
|
||||
|
||||
type packageJson struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
type flagObj struct {
|
||||
name string
|
||||
value string
|
||||
}
|
||||
|
||||
var app = cli.NewApp()
|
||||
|
||||
func TestGetConfig(t *testing.T) {
|
||||
tests := []struct {
|
||||
ctx *cli.Context
|
||||
name string
|
||||
packageJsonVersion string
|
||||
metadata config.Metadata
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
|
||||
name: "package.json matches tag",
|
||||
packageJsonVersion: "10.0.0",
|
||||
metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
|
||||
name: "custom tag, package.json doesn't match",
|
||||
packageJsonVersion: "10.0.0",
|
||||
metadata: config.Metadata{GrafanaVersion: "10.0.0-abcd123pre", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
|
||||
name: "package.json doesn't match tag",
|
||||
packageJsonVersion: "10.1.0",
|
||||
metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
|
||||
name: "test tag event, check should be skipped",
|
||||
packageJsonVersion: "10.1.0",
|
||||
metadata: config.Metadata{GrafanaVersion: "10.1.0-test", ReleaseMode: config.ReleaseMode{Mode: config.TagMode, IsTest: true}},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}, flagObj{name: buildID, value: "12345"}), nil),
|
||||
name: "non-tag event",
|
||||
packageJsonVersion: "10.1.0-pre",
|
||||
metadata: config.Metadata{GrafanaVersion: "10.1.0-12345pre", ReleaseMode: config.ReleaseMode{Mode: config.PullRequestMode}},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := createTempPackageJson(t, tt.packageJsonVersion)
|
||||
require.NoError(t, err)
|
||||
|
||||
got, _, err := GetConfig(tt.ctx, tt.metadata)
|
||||
if !tt.wantErr {
|
||||
require.Equal(t, got.PackageVersion, tt.metadata.GrafanaVersion)
|
||||
}
|
||||
|
||||
if tt.wantErr {
|
||||
require.Equal(t, got.PackageVersion, "")
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func setFlags(t *testing.T, flagSet *flag.FlagSet, flags ...flagObj) *flag.FlagSet {
|
||||
t.Helper()
|
||||
for _, f := range flags {
|
||||
if f.name != "" {
|
||||
flagSet.StringVar(&f.name, f.name, f.value, "")
|
||||
}
|
||||
}
|
||||
return flagSet
|
||||
}
|
||||
|
||||
func createTempPackageJson(t *testing.T, version string) error {
|
||||
t.Helper()
|
||||
|
||||
data := packageJson{Version: version}
|
||||
file, _ := json.MarshalIndent(data, "", " ")
|
||||
|
||||
err := os.WriteFile("package.json", file, 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Cleanup(func() {
|
||||
err := os.RemoveAll("package.json")
|
||||
require.NoError(t, err)
|
||||
})
|
||||
return nil
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
package fsutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// CopyRecursive copies files and directories recursively.
|
||||
func CopyRecursive(src, dst string) error {
|
||||
sfi, err := os.Stat(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !sfi.IsDir() {
|
||||
return CopyFile(src, dst)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dst); os.IsNotExist(err) {
|
||||
if err := os.MkdirAll(dst, sfi.Mode()); err != nil {
|
||||
return fmt.Errorf("failed to create directory %q: %s", dst, err)
|
||||
}
|
||||
}
|
||||
|
||||
entries, err := os.ReadDir(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, entry := range entries {
|
||||
srcPath := filepath.Join(src, entry.Name())
|
||||
dstPath := filepath.Join(dst, entry.Name())
|
||||
|
||||
srcFi, err := os.Stat(srcPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch srcFi.Mode() & os.ModeType {
|
||||
case os.ModeDir:
|
||||
if err := CopyRecursive(srcPath, dstPath); err != nil {
|
||||
return err
|
||||
}
|
||||
case os.ModeSymlink:
|
||||
link, err := os.Readlink(srcPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.Symlink(link, dstPath); err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
if err := CopyFile(srcPath, dstPath); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if srcFi.Mode()&os.ModeSymlink != 0 {
|
||||
if err := os.Chmod(dstPath, srcFi.Mode()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
package fsutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
// CreateTempFile generates a temp filepath, based on the provided suffix.
|
||||
// A typical generated path looks like /var/folders/abcd/abcdefg/A/1137975807.
|
||||
func CreateTempFile(sfx string) (string, error) {
|
||||
var suffix string
|
||||
if sfx != "" {
|
||||
suffix = fmt.Sprintf("*-%s", sfx)
|
||||
} else {
|
||||
suffix = sfx
|
||||
}
|
||||
f, err := os.CreateTemp("", suffix)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if err := f.Close(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return f.Name(), nil
|
||||
}
|
||||
|
||||
// CreateTempDir generates a temp directory, based on the provided suffix.
|
||||
// A typical generated path looks like /var/folders/abcd/abcdefg/A/1137975807/.
|
||||
func CreateTempDir(sfx string) (string, error) {
|
||||
var suffix string
|
||||
if sfx != "" {
|
||||
suffix = fmt.Sprintf("*-%s", sfx)
|
||||
} else {
|
||||
suffix = sfx
|
||||
}
|
||||
dir, err := os.MkdirTemp("", suffix)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return dir, nil
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package fsutil
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCreateTempFile(t *testing.T) {
|
||||
t.Run("empty suffix, expects pattern like: /var/folders/abcd/abcdefg/A/1137975807", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 1)
|
||||
})
|
||||
|
||||
t.Run("non-empty suffix, expects /var/folders/abcd/abcdefg/A/1137975807-foobar", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("foobar")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 2)
|
||||
})
|
||||
}
|
||||
|
||||
func TestCreateTempDir(t *testing.T) {
|
||||
t.Run("empty suffix, expects pattern like: /var/folders/abcd/abcdefg/A/1137975807/", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 1)
|
||||
})
|
||||
|
||||
t.Run("non-empty suffix, expects /var/folders/abcd/abcdefg/A/1137975807-foobar/", func(t *testing.T) {
|
||||
filePath, err := CreateTempFile("foobar")
|
||||
require.NoError(t, err)
|
||||
|
||||
pathParts := strings.Split(filePath, "/")
|
||||
require.Greater(t, len(pathParts), 1)
|
||||
require.Len(t, strings.Split(pathParts[len(pathParts)-1], "-"), 2)
|
||||
})
|
||||
}
|
||||
72
pkg/build/gcom/url.go
Normal file
72
pkg/build/gcom/url.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package gcom
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/versions"
|
||||
)
|
||||
|
||||
func PackageName(grafana, distro, arch, version, ext string, musl bool, raspberryPi bool) string {
|
||||
v := versions.ParseSemver(version)
|
||||
|
||||
if raspberryPi {
|
||||
grafana += "-rpi"
|
||||
}
|
||||
|
||||
versionString := strings.Join([]string{v.Major, v.Minor, v.Patch}, ".")
|
||||
fmt.Println("Version string:", versionString)
|
||||
if distro == "deb" {
|
||||
if v.BuildMetadata != "" {
|
||||
versionString += "+" + strings.ReplaceAll(v.BuildMetadata, "-", "~")
|
||||
}
|
||||
|
||||
if v.Prerelease != "" {
|
||||
versionString += "~" + v.Prerelease
|
||||
}
|
||||
|
||||
return strings.Join([]string{grafana, versionString, arch}, "_") + "." + ext
|
||||
}
|
||||
|
||||
if distro == "rhel" {
|
||||
if v.BuildMetadata != "" {
|
||||
versionString += "+" + strings.ReplaceAll(v.BuildMetadata, "-", "~")
|
||||
}
|
||||
|
||||
if v.Prerelease != "" {
|
||||
versionString += "~" + v.Prerelease
|
||||
}
|
||||
|
||||
versionString += "-1"
|
||||
|
||||
// Notable difference between our deb naming and our RPM naming: the file ends with `.arch.ext`, not
|
||||
// `_arch.ext`.
|
||||
return strings.Join([]string{grafana, versionString}, "-") + "." + arch + "." + ext
|
||||
}
|
||||
|
||||
if v.Prerelease != "" {
|
||||
versionString += "-" + v.Prerelease
|
||||
}
|
||||
|
||||
if v.BuildMetadata != "" {
|
||||
versionString += "+" + v.BuildMetadata
|
||||
}
|
||||
|
||||
if musl {
|
||||
arch += "-musl"
|
||||
}
|
||||
|
||||
// grafana-enterprise-1.2.3+example-01.linux-amd64.tar.gz
|
||||
return fmt.Sprintf("%s-%s.%s-%s.%s", grafana, versionString, distro, arch, ext)
|
||||
}
|
||||
|
||||
func GetURL(baseURL *url.URL, version, grafana, distro, arch, ext string, musl, raspberryPi bool) *url.URL {
|
||||
packageName := PackageName(grafana, distro, arch, version, ext, musl, raspberryPi)
|
||||
return &url.URL{
|
||||
Host: baseURL.Host,
|
||||
Scheme: baseURL.Scheme,
|
||||
Path: path.Join(baseURL.Path, packageName),
|
||||
}
|
||||
}
|
||||
367
pkg/build/gcom/url_test.go
Normal file
367
pkg/build/gcom/url_test.go
Normal file
@@ -0,0 +1,367 @@
|
||||
package gcom_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/gcom"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestPackageName(t *testing.T) {
|
||||
type args struct {
|
||||
Distro string
|
||||
Arch string
|
||||
Version string
|
||||
Ext string
|
||||
Musl bool
|
||||
RaspberryPi bool
|
||||
|
||||
Expect string
|
||||
}
|
||||
|
||||
cases := []args{
|
||||
{
|
||||
RaspberryPi: true,
|
||||
Distro: "deb",
|
||||
Arch: "armhf",
|
||||
Version: "1.2.3",
|
||||
Ext: "deb",
|
||||
Expect: "grafana-rpi_1.2.3_armhf.deb",
|
||||
},
|
||||
{
|
||||
Distro: "deb",
|
||||
Arch: "arm64",
|
||||
Version: "1.2.3",
|
||||
Ext: "deb",
|
||||
Expect: "grafana_1.2.3_arm64.deb",
|
||||
},
|
||||
{
|
||||
Distro: "rhel",
|
||||
Arch: "aarch64",
|
||||
Version: "1.2.3",
|
||||
Ext: "rpm",
|
||||
Expect: "grafana-1.2.3-1.aarch64.rpm",
|
||||
},
|
||||
{
|
||||
Distro: "rhel",
|
||||
Arch: "aarch64",
|
||||
Ext: "rpm.sha256",
|
||||
Version: "1.2.3",
|
||||
Expect: "grafana-1.2.3-1.aarch64.rpm.sha256",
|
||||
},
|
||||
{
|
||||
Distro: "rhel",
|
||||
Ext: "rpm",
|
||||
Version: "1.2.3",
|
||||
Arch: "x86_64",
|
||||
Expect: "grafana-1.2.3-1.x86_64.rpm",
|
||||
},
|
||||
{
|
||||
Distro: "rhel",
|
||||
Ext: "rpm.sha256",
|
||||
Version: "1.2.3",
|
||||
Arch: "x86_64",
|
||||
Expect: "grafana-1.2.3-1.x86_64.rpm.sha256",
|
||||
},
|
||||
{
|
||||
Distro: "darwin",
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Expect: "grafana-1.2.3.darwin-amd64.tar.gz",
|
||||
},
|
||||
{
|
||||
Distro: "darwin",
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Expect: "grafana-1.2.3.darwin-amd64.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
Distro: "darwin",
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.darwin-arm64-musl.tar.gz",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Distro: "darwin",
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.darwin-arm64-musl.tar.gz.sha256",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Distro: "darwin",
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.darwin-arm64.tar.gz",
|
||||
},
|
||||
{
|
||||
Distro: "darwin",
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.darwin-arm64.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Expect: "grafana-1.2.3.linux-amd64-musl.tar.gz",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Expect: "grafana-1.2.3.linux-amd64-musl.tar.gz.sha256",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Expect: "grafana-1.2.3.linux-amd64.tar.gz",
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Expect: "grafana-1.2.3.linux-amd64.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.linux-arm64-musl.tar.gz",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.linux-arm64-musl.tar.gz.sha256",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Distro: "linux",
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.linux-arm64.tar.gz",
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Distro: "linux",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.linux-arm64.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Distro: "linux",
|
||||
Arch: "armv6",
|
||||
Expect: "grafana-1.2.3.linux-armv6.tar.gz",
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Distro: "linux",
|
||||
Arch: "armv6",
|
||||
Expect: "grafana-1.2.3.linux-armv6.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Distro: "linux",
|
||||
Arch: "armv7",
|
||||
Expect: "grafana-1.2.3.linux-armv7-musl.tar.gz",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Distro: "linux",
|
||||
Arch: "armv7",
|
||||
Expect: "grafana-1.2.3.linux-armv7-musl.tar.gz.sha256",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Distro: "linux",
|
||||
Arch: "armv7",
|
||||
Expect: "grafana-1.2.3.linux-armv7.tar.gz",
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz.sha256",
|
||||
Version: "1.2.3",
|
||||
Distro: "linux",
|
||||
Arch: "armv7",
|
||||
Expect: "grafana-1.2.3.linux-armv7.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Ext: "exe",
|
||||
Distro: "windows",
|
||||
Expect: "grafana-1.2.3.windows-amd64.exe",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Distro: "windows",
|
||||
Ext: "exe.sha256",
|
||||
Expect: "grafana-1.2.3.windows-amd64.exe.sha256",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Distro: "windows",
|
||||
Ext: "msi",
|
||||
Expect: "grafana-1.2.3.windows-amd64.msi",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Arch: "amd64",
|
||||
Distro: "windows",
|
||||
Ext: "msi.sha256",
|
||||
Expect: "grafana-1.2.3.windows-amd64.msi.sha256",
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Distro: "windows",
|
||||
Expect: "grafana-1.2.3.windows-amd64.tar.gz",
|
||||
Arch: "amd64",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Distro: "windows",
|
||||
Arch: "amd64",
|
||||
Ext: "tar.gz.sha256",
|
||||
Expect: "grafana-1.2.3.windows-amd64.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Distro: "windows",
|
||||
Expect: "grafana-1.2.3.windows-amd64.zip",
|
||||
Ext: "zip",
|
||||
Arch: "amd64",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Distro: "windows",
|
||||
Expect: "grafana-1.2.3.windows-amd64.zip.sha256",
|
||||
Ext: "zip.sha256",
|
||||
Arch: "amd64",
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Distro: "windows",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.windows-arm64-musl.tar.gz",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "tar.gz.sha256",
|
||||
Distro: "windows",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.windows-arm64-musl.tar.gz.sha256",
|
||||
Musl: true,
|
||||
},
|
||||
{
|
||||
Ext: "tar.gz",
|
||||
Version: "1.2.3",
|
||||
Distro: "windows",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.windows-arm64.tar.gz",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "tar.gz.sha256",
|
||||
Distro: "windows",
|
||||
Arch: "arm64",
|
||||
Expect: "grafana-1.2.3.windows-arm64.tar.gz.sha256",
|
||||
},
|
||||
{
|
||||
RaspberryPi: true,
|
||||
Version: "1.2.3",
|
||||
Ext: "deb",
|
||||
Arch: "armhf",
|
||||
Distro: "deb",
|
||||
Expect: "grafana-rpi_1.2.3_armhf.deb",
|
||||
},
|
||||
{
|
||||
RaspberryPi: true,
|
||||
Version: "1.2.3",
|
||||
Ext: "deb.sha256",
|
||||
Distro: "deb",
|
||||
Arch: "armhf",
|
||||
Expect: "grafana-rpi_1.2.3_armhf.deb.sha256",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "deb",
|
||||
Distro: "deb",
|
||||
Expect: "grafana_1.2.3_amd64.deb",
|
||||
Arch: "amd64",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "deb.sha256",
|
||||
Distro: "deb",
|
||||
Expect: "grafana_1.2.3_amd64.deb.sha256",
|
||||
Arch: "amd64",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "deb",
|
||||
Arch: "arm64",
|
||||
Distro: "deb",
|
||||
Expect: "grafana_1.2.3_arm64.deb",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "deb.sha256",
|
||||
Arch: "arm64",
|
||||
Distro: "deb",
|
||||
Expect: "grafana_1.2.3_arm64.deb.sha256",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "deb",
|
||||
Distro: "deb",
|
||||
Arch: "armhf",
|
||||
Expect: "grafana_1.2.3_armhf.deb",
|
||||
},
|
||||
{
|
||||
Version: "1.2.3",
|
||||
Ext: "deb.sha256",
|
||||
Arch: "armhf",
|
||||
Distro: "deb",
|
||||
Expect: "grafana_1.2.3_armhf.deb.sha256",
|
||||
},
|
||||
}
|
||||
|
||||
for i, v := range cases {
|
||||
t.Run(fmt.Sprintf("[%d / %d] %s", i+1, len(cases), v.Expect), func(t *testing.T) {
|
||||
n := gcom.PackageName("grafana", v.Distro, v.Arch, v.Version, v.Ext, v.Musl, v.RaspberryPi)
|
||||
require.Equal(t, v.Expect, n)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,13 +4,9 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
|
||||
"github.com/google/go-github/v45/github"
|
||||
"golang.org/x/oauth2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/build/stringutil"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -47,19 +43,6 @@ type StatusesService interface {
|
||||
CreateStatus(ctx context.Context, owner, repo, ref string, status *github.RepoStatus) (*github.RepoStatus, *github.Response, error)
|
||||
}
|
||||
|
||||
// NewGitHubClient creates a new Client using the provided GitHub token if not empty.
|
||||
func NewGitHubClient(ctx context.Context, token string) *github.Client {
|
||||
var tc *http.Client
|
||||
if token != "" {
|
||||
ts := oauth2.StaticTokenSource(&oauth2.Token{
|
||||
AccessToken: token,
|
||||
})
|
||||
tc = oauth2.NewClient(ctx, ts)
|
||||
}
|
||||
|
||||
return github.NewClient(tc)
|
||||
}
|
||||
|
||||
func PRCheckRegexp() *regexp.Regexp {
|
||||
reBranch, err := regexp.Compile(`^prc-([0-9]+)-([A-Za-z0-9]+)\/(.+)$`)
|
||||
if err != nil {
|
||||
@@ -68,76 +51,3 @@ func PRCheckRegexp() *regexp.Regexp {
|
||||
|
||||
return reBranch
|
||||
}
|
||||
|
||||
func AddLabelToPR(ctx context.Context, client LabelsService, prID int, newLabel string) error {
|
||||
// Check existing labels
|
||||
labels, _, err := client.ListLabelsByIssue(ctx, RepoOwner, OSSRepo, prID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
duplicate := false
|
||||
for _, label := range labels {
|
||||
if *label.Name == newLabel {
|
||||
duplicate = true
|
||||
continue
|
||||
}
|
||||
|
||||
// Delete existing "enterprise-xx" labels
|
||||
if stringutil.Contains(EnterpriseCheckLabels, *label.Name) {
|
||||
_, err := client.RemoveLabelForIssue(ctx, RepoOwner, OSSRepo, prID, *label.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if duplicate {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, _, err = client.AddLabelsToIssue(ctx, RepoOwner, OSSRepo, prID, []string{newLabel})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func DeleteEnterpriseBranch(ctx context.Context, client GitService, branchName string) error {
|
||||
ref := "heads/" + branchName
|
||||
if _, err := client.DeleteRef(ctx, RepoOwner, EnterpriseRepo, ref); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateEnterpriseStatus sets the status on a commit for the enterprise build check.
|
||||
func CreateEnterpriseStatus(ctx context.Context, client StatusesService, sha, link, status string) (*github.RepoStatus, error) {
|
||||
check, _, err := client.CreateStatus(ctx, RepoOwner, OSSRepo, sha, &github.RepoStatus{
|
||||
Context: github.String(EnterpriseCheckName),
|
||||
Description: github.String(EnterpriseCheckDescription),
|
||||
TargetURL: github.String(link),
|
||||
State: github.String(status),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return check, nil
|
||||
}
|
||||
|
||||
func CreateEnterpriseBuildFailedComment(ctx context.Context, client CommentService, link string, prID int) error {
|
||||
body := fmt.Sprintf("Drone build failed: %s", link)
|
||||
|
||||
_, _, err := client.CreateComment(ctx, RepoOwner, OSSRepo, prID, &github.IssueComment{
|
||||
Body: &body,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user