diff --git a/.betterer.results b/.betterer.results index 867fddaac20..f83e79923d8 100644 --- a/.betterer.results +++ b/.betterer.results @@ -3863,13 +3863,9 @@ exports[`better eslint`] = { [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] ], "public/app/features/explore/TraceView/TraceView.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], - [0, 0, 0, "Do not use any type assertions.", "2"], - [0, 0, 0, "Do not use any type assertions.", "3"] - ], - "public/app/features/explore/TraceView/components/TracePageHeader/Actions/ActionButton.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], + [0, 0, 0, "Do not use any type assertions.", "1"], + [0, 0, 0, "Do not use any type assertions.", "2"] ], "public/app/features/explore/TraceView/components/TracePageHeader/Actions/TracePageActions.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] @@ -3882,69 +3878,24 @@ exports[`better eslint`] = { [0, 0, 0, "No untranslated strings. Wrap text with ", "4"], [0, 0, 0, "No untranslated strings. Wrap text with ", "5"], [0, 0, 0, "No untranslated strings. Wrap text with ", "6"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "7"], - [0, 0, 0, "Styles should be written using objects.", "8"], - [0, 0, 0, "Styles should be written using objects.", "9"], - [0, 0, 0, "Styles should be written using objects.", "10"], - [0, 0, 0, "Styles should be written using objects.", "11"], - [0, 0, 0, "Styles should be written using objects.", "12"], - [0, 0, 0, "Styles should be written using objects.", "13"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "7"] ], "public/app/features/explore/TraceView/components/TracePageHeader/SearchBar/TracePageSearchBar.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "2"] ], "public/app/features/explore/TraceView/components/TracePageHeader/SpanFilters/SpanFilters.tsx:5381": [ [0, 0, 0, "\'HorizontalGroup\' import from \'@grafana/ui\' is restricted from being used by a pattern. Use Stack component instead.", "0"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"] - ], - "public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/CanvasSpanGraph.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], - "public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/GraphTicks.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], - "public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/Scrubber.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"] - ], - "public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/TickLabels.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "1"] ], "public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/ViewingLayer.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "8"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] ], "public/app/features/explore/TraceView/components/TracePageHeader/TracePageHeader.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "2"] ], "public/app/features/explore/TraceView/components/TracePageHeader/index.tsx:5381": [ [0, 0, 0, "Do not re-export imported variable (\`./TracePageHeader\`)", "0"] @@ -3954,193 +3905,42 @@ exports[`better eslint`] = { [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], [0, 0, 0, "No untranslated strings. Wrap text with ", "2"] ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanBarRow.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"], - [0, 0, 0, "Styles should be written using objects.", "8"], - [0, 0, 0, "Styles should be written using objects.", "9"], - [0, 0, 0, "Styles should be written using objects.", "10"], - [0, 0, 0, "Styles should be written using objects.", "11"], - [0, 0, 0, "Styles should be written using objects.", "12"], - [0, 0, 0, "Styles should be written using objects.", "13"], - [0, 0, 0, "Styles should be written using objects.", "14"], - [0, 0, 0, "Styles should be written using objects.", "15"], - [0, 0, 0, "Styles should be written using objects.", "16"], - [0, 0, 0, "Styles should be written using objects.", "17"], - [0, 0, 0, "Styles should be written using objects.", "18"], - [0, 0, 0, "Styles should be written using objects.", "19"], - [0, 0, 0, "Styles should be written using objects.", "20"], - [0, 0, 0, "Styles should be written using objects.", "21"] - ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianKeyValues.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "8"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianLogs.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "2"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "3"], [0, 0, 0, "No untranslated strings. Wrap text with ", "4"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "5"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "6"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "7"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "8"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "9"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "5"] ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianReferences.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"], - [0, 0, 0, "Styles should be written using objects.", "8"], - [0, 0, 0, "Styles should be written using objects.", "9"], - [0, 0, 0, "Styles should be written using objects.", "10"], - [0, 0, 0, "Styles should be written using objects.", "11"], - [0, 0, 0, "Styles should be written using objects.", "12"], - [0, 0, 0, "Styles should be written using objects.", "13"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "14"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "15"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "16"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "17"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "2"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "3"] ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianText.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "2"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/KeyValuesTable.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], + [0, 0, 0, "No untranslated strings. Wrap text with ", "1"] ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/SpanFlameGraph.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/TextList.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"] - ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/index.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"], - [0, 0, 0, "Styles should be written using objects.", "8"], - [0, 0, 0, "Styles should be written using objects.", "9"], - [0, 0, 0, "Styles should be written using objects.", "10"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "11"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetailRow.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanLinks.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanTreeOffset.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/Ticks.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineCollapser.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineColumnResizer.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineHeaderRow.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "3"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineViewingLayer.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"], - [0, 0, 0, "Styles should be written using objects.", "4"], - [0, 0, 0, "Styles should be written using objects.", "5"], - [0, 0, 0, "Styles should be written using objects.", "6"], - [0, 0, 0, "Styles should be written using objects.", "7"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/index.tsx:5381": [ [0, 0, 0, "Do not re-export imported variable (\`./TimelineHeaderRow\`)", "0"] ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/VirtualizedTraceView.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"] - ], - "public/app/features/explore/TraceView/components/TraceTimelineViewer/index.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], "public/app/features/explore/TraceView/components/TraceTimelineViewer/utils.tsx:5381": [ [0, 0, 0, "Do not re-export imported variable (\`../utils/date\`)", "0"] ], - "public/app/features/explore/TraceView/components/common/BreakableText.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], - "public/app/features/explore/TraceView/components/common/CopyIcon.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], - "public/app/features/explore/TraceView/components/common/LabeledList.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"], - [0, 0, 0, "Styles should be written using objects.", "2"], - [0, 0, 0, "Styles should be written using objects.", "3"] - ], - "public/app/features/explore/TraceView/components/common/NewWindowIcon.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], - "public/app/features/explore/TraceView/components/common/TraceName.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"] - ], "public/app/features/explore/TraceView/components/demo/trace-generators.ts:5381": [ [0, 0, 0, "Do not use any type assertions.", "0"] ], @@ -4175,10 +3975,6 @@ exports[`better eslint`] = { "public/app/features/explore/TraceView/components/model/transform-trace-data.tsx:5381": [ [0, 0, 0, "Do not use any type assertions.", "0"] ], - "public/app/features/explore/TraceView/components/settings/SpanBarSettings.tsx:5381": [ - [0, 0, 0, "Styles should be written using objects.", "0"], - [0, 0, 0, "Styles should be written using objects.", "1"] - ], "public/app/features/explore/TraceView/components/types/index.tsx:5381": [ [0, 0, 0, "Do not re-export imported variable (\`./trace\`)", "0"], [0, 0, 0, "Do not re-export imported variable (\`../settings/SpanBarSettings\`)", "1"], @@ -4593,8 +4389,7 @@ exports[`better eslint`] = { "public/app/features/migrate-to-cloud/onprem/NameCell.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"], [0, 0, 0, "No untranslated strings. Wrap text with ", "1"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "2"], - [0, 0, 0, "No untranslated strings. Wrap text with ", "3"] + [0, 0, 0, "No untranslated strings. Wrap text with ", "2"] ], "public/app/features/notifications/StoredNotifications.tsx:5381": [ [0, 0, 0, "No untranslated strings. Wrap text with ", "0"] @@ -5225,9 +5020,6 @@ exports[`better eslint`] = { "public/app/features/teams/state/reducers.ts:5381": [ [0, 0, 0, "Do not use any type assertions.", "0"] ], - "public/app/features/teams/state/selectors.ts:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"] - ], "public/app/features/templating/fieldAccessorCache.ts:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"] ], diff --git a/.drone.yml b/.drone.yml index 207e0d20ecb..48d7046793b 100644 --- a/.drone.yml +++ b/.drone.yml @@ -4332,142 +4332,6 @@ volumes: clone: retries: 3 depends_on: [] -environment: - EDITION: oss -image_pull_secrets: -- gcr -- gar -kind: pipeline -name: release-test-frontend -node: - type: no-parallel -platform: - arch: amd64 - os: linux -services: [] -steps: -- commands: - - echo $DRONE_RUNNER_NAME - image: alpine:3.20.3 - name: identify-runner -- commands: - - yarn install --immutable || yarn install --immutable - depends_on: [] - image: node:20.9.0-alpine - name: yarn-install -- commands: - - apk add --update git bash - - yarn betterer ci - depends_on: - - yarn-install - image: node:20.9.0-alpine - name: betterer-frontend -- commands: - - yarn run ci:test-frontend - depends_on: - - yarn-install - environment: - TEST_MAX_WORKERS: 50% - image: node:20.9.0-alpine - name: test-frontend -trigger: - event: - exclude: - - promote - ref: - exclude: - - refs/tags/*-cloud* - include: - - refs/tags/v* -type: docker -volumes: -- host: - path: /var/run/docker.sock - name: docker ---- -clone: - retries: 3 -depends_on: [] -environment: - EDITION: oss -image_pull_secrets: -- gcr -- gar -kind: pipeline -name: release-test-backend -node: - type: no-parallel -platform: - arch: amd64 - os: linux -services: [] -steps: -- commands: - - echo $DRONE_RUNNER_NAME - image: alpine:3.20.3 - name: identify-runner -- commands: - - '# It is required that code generated from Thema/CUE be committed and in sync - with its inputs.' - - '# The following command will fail if running code generators produces any diff - in output.' - - apk add --update make - - CODEGEN_VERIFY=1 make gen-cue - depends_on: [] - image: golang:1.23.1-alpine - name: verify-gen-cue -- commands: - - '# It is required that generated jsonnet is committed and in sync with its inputs.' - - '# The following command will fail if running code generators produces any diff - in output.' - - apk add --update make - - CODEGEN_VERIFY=1 make gen-jsonnet - depends_on: [] - image: golang:1.23.1-alpine - name: verify-gen-jsonnet -- commands: - - apk add --update make - - make gen-go - depends_on: - - verify-gen-cue - image: golang:1.23.1-alpine - name: wire-install -- commands: - - apk add --update build-base shared-mime-info shared-mime-info-lang - - go list -f '{{.Dir}}/...' -m | xargs go test -short -covermode=atomic -timeout=5m - depends_on: - - wire-install - image: golang:1.23.1-alpine - name: test-backend -- commands: - - apk add --update build-base - - go test -count=1 -covermode=atomic -timeout=5m -run '^TestIntegration' $(find - ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' - | grep -o '\(.*\)/' | sort -u) - depends_on: - - wire-install - image: golang:1.23.1-alpine - name: test-backend-integration -trigger: - event: - exclude: - - promote - ref: - exclude: - - refs/tags/*-cloud* - include: - - refs/tags/v* -type: docker -volumes: -- host: - path: /var/run/docker.sock - name: docker ---- -clone: - retries: 3 -depends_on: -- release-test-backend -- release-test-frontend image_pull_secrets: - gcr - gar @@ -4648,9 +4512,7 @@ volumes: --- clone: retries: 3 -depends_on: -- release-test-backend -- release-test-frontend +depends_on: [] image_pull_secrets: - gcr - gar @@ -6151,6 +6013,6 @@ kind: secret name: gcr_credentials --- kind: signature -hmac: 766cd43d479f82bdb5bbaa3b48ed87ad13ea71d3418deb5d0c89ec7b77ae0475 +hmac: e618274ea7a8bfbf3d5e151d459348aa9382fe63fe7fef76c997db3cba74779f ... diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9e0a8720582..aea94e92eeb 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -298,6 +298,7 @@ /pkg/modules/ @grafana/grafana-app-platform-squad /pkg/services/grpcserver/ @grafana/grafana-search-and-storage /pkg/generated @grafana/grafana-app-platform-squad +/pkg/services/unifiedSearch/ @grafana/grafana-search-and-storage # Alerting /pkg/services/ngalert/ @grafana/alerting-backend diff --git a/.github/workflows/detect-breaking-changes-levitate.yml b/.github/workflows/detect-breaking-changes-levitate.yml index b35e86724fa..8da09bdd753 100644 --- a/.github/workflows/detect-breaking-changes-levitate.yml +++ b/.github/workflows/detect-breaking-changes-levitate.yml @@ -2,6 +2,10 @@ --- name: Levitate / Detect breaking changes in PR +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + on: pull_request: paths: @@ -11,7 +15,7 @@ on: jobs: buildPR: - name: Build PR + name: Build PR packages artifacts runs-on: ubuntu-latest defaults: run: @@ -27,7 +31,7 @@ jobs: - name: Get yarn cache directory path id: yarn-cache-dir-path - run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT + run: echo "dir=$(yarn config get cacheFolder)" >> "$GITHUB_OUTPUT" - name: Restore yarn cache uses: actions/cache@v4 @@ -57,7 +61,7 @@ jobs: path: './pr/pr_built_packages.zip' buildBase: - name: Build Base + name: Build Base packages artifacts runs-on: ubuntu-latest defaults: run: @@ -75,7 +79,7 @@ jobs: - name: Get yarn cache directory path id: yarn-cache-dir-path - run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT + run: echo "dir=$(yarn config get cacheFolder)" >> "$GITHUB_OUTPUT" - name: Restore yarn cache uses: actions/cache@v4 @@ -105,7 +109,7 @@ jobs: path: './base/base_built_packages.zip' Detect: - name: Detect breaking changes + name: Detect breaking changes between PR and base runs-on: ubuntu-latest needs: ['buildPR', 'buildBase'] env: @@ -179,7 +183,7 @@ jobs: Report: - name: Report breaking changes in PR + name: Report breaking changes in PR comment runs-on: ubuntu-latest needs: ['Detect'] @@ -234,9 +238,9 @@ jobs: echo 'levitate_markdown<> $GITHUB_OUTPUT + } >> "$GITHUB_OUTPUT" else - echo "levitate_markdown=No breaking changes detected" >> $GITHUB_OUTPUT + echo "levitate_markdown=No breaking changes detected" >> "$GITHUB_OUTPUT" fi @@ -253,7 +257,6 @@ jobs: ${{ steps.levitate-markdown.outputs.levitate_markdown }} [Read our guideline](https://github.com/grafana/grafana/blob/main/contribute/breaking-changes-guide/breaking-changes-guide.md) - [Console output](${{ steps.levitate-run.outputs.job_link }}) * Your pull request merge won't be blocked. GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} @@ -359,5 +362,6 @@ jobs: if [ "${{ steps.levitate-run.outputs.exit_code }}" -ne 0 ]; then echo "Breaking changes detected. Please check the levitate report in your pull request. This workflow won't block merging." fi + exit ${{ steps.levitate-run.outputs.exit_code }} shell: bash diff --git a/Makefile b/Makefile index 6f61e20a974..9346e4b9b10 100644 --- a/Makefile +++ b/Makefile @@ -18,6 +18,9 @@ GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev) GO_BUILD_FLAGS += $(if $(GO_BUILD_TAGS),-build-tags=$(GO_BUILD_TAGS)) GO_BUILD_FLAGS += $(GO_RACE_FLAG) +# GNU xargs has flag -r, and BSD xargs (e.g. MacOS) has that behaviour by default +XARGSR = $(shell xargs --version 2>&1 | grep -q GNU && echo xargs -r || echo xargs) + targets := $(shell echo '$(sources)' | tr "," " ") GO_INTEGRATION_TESTS := $(shell find ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' | grep -o '\(.*\)/' | sort -u) @@ -303,6 +306,15 @@ golangci-lint: $(GOLANGCI_LINT) .PHONY: lint-go lint-go: golangci-lint ## Run all code checks for backend. You can use GO_LINT_FILES to specify exact files to check +.PHONY: lint-go-diff +lint-go-diff: $(GOLANGCI_LINT) + git diff --name-only remotes/origin/main | \ + grep '\.go$$' | \ + $(XARGSR) dirname | \ + sort -u | \ + sed 's,^,./,' | \ + $(XARGSR) $(GOLANGCI_LINT) run --config .golangci.toml + # with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts .PHONY: shellcheck shellcheck: $(SH_FILES) ## Run checks for shell scripts. diff --git a/conf/defaults.ini b/conf/defaults.ini index cb3965cc554..662e300834f 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -1346,6 +1346,11 @@ notification_log_retention = 5d # Duration for which a resolved alert state transition will continue to be sent to the Alertmanager. resolved_alert_retention = 15m +# Defines the limit of how many alert rule versions +# should be stored in the database for each alert rule in an organization including the current one. +# 0 value means no limit +rule_version_record_limit = 0 + [unified_alerting.screenshots] # Enable screenshots in notifications. You must have either installed the Grafana image rendering # plugin, or set up Grafana to use a remote rendering service. @@ -1560,8 +1565,8 @@ expire_time = 7 #################################### Internal Grafana Metrics ############ # Metrics available at HTTP URL /metrics and /metrics/plugins/:pluginId [metrics] -enabled = true -interval_seconds = 10 +enabled = true +interval_seconds = 10 # Disable total stats (stat_totals_*) metrics to be generated disable_total_stats = false # The interval at which the total stats collector will update the stats. Default is 1800 seconds. diff --git a/conf/sample.ini b/conf/sample.ini index 099de883121..366b04d884a 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -1335,6 +1335,11 @@ # Duration for which a resolved alert state transition will continue to be sent to the Alertmanager. ;resolved_alert_retention = 15m +# Defines the limit of how many alert rule versions +# should be stored in the database for each alert rule in an organization including the current one. +# 0 value means no limit +;rule_version_record_limit= 0 + [unified_alerting.screenshots] # Enable screenshots in notifications. You must have either installed the Grafana image rendering # plugin, or set up Grafana to use a remote rendering service. diff --git a/docs/sources/developers/http_api/access_control.md b/docs/sources/developers/http_api/access_control.md index 72c5aec8a76..ecd0ac62d52 100644 --- a/docs/sources/developers/http_api/access_control.md +++ b/docs/sources/developers/http_api/access_control.md @@ -566,6 +566,7 @@ Lists the roles that have been directly assigned to a given user. The list does Query Parameters: - `includeHidden`: Optional. Set to `true` to include roles that are `hidden`. +- `includeMapped`: Optional. Set to `true` to include roles that have been mapped through the group attribute sync feature. #### Required permissions diff --git a/docs/sources/introduction/grafana-enterprise.md b/docs/sources/introduction/grafana-enterprise.md index da08521b3ed..a8373cffba8 100644 --- a/docs/sources/introduction/grafana-enterprise.md +++ b/docs/sources/introduction/grafana-enterprise.md @@ -104,6 +104,7 @@ With a Grafana Enterprise license, you also get access to premium data sources, - [Sqlyze Datasource](/grafana/plugins/grafana-odbc-datasource) - [SumoLogic](/grafana/plugins/grafana-sumologic-datasource) - [Wavefront](/grafana/plugins/grafana-wavefront-datasource) +- [Zendesk](/grafana/plugins/grafana-zendesk-datasource) ## Try Grafana Enterprise diff --git a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md index 65e636dfa77..ef8bc6a3396 100644 --- a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md +++ b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md @@ -48,7 +48,6 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general- | `angularDeprecationUI` | Display Angular warnings in dashboards and panels | Yes | | `dashgpt` | Enable AI powered features in dashboards | Yes | | `alertingInsights` | Show the new alerting insights landing page | Yes | -| `externalServiceAccounts` | Automatic service account and token setup for plugins | Yes | | `panelMonitoring` | Enables panel monitoring through logs and measurements | Yes | | `formatString` | Enable format string transformer | Yes | | `transformationsVariableSupport` | Allows using variables in transformations | Yes | @@ -101,6 +100,7 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general- | `enableDatagridEditing` | Enables the edit functionality in the datagrid panel | | `sqlDatasourceDatabaseSelection` | Enables previous SQL data source dataset dropdown behavior | | `reportingRetries` | Enables rendering retries for the reporting feature | +| `externalServiceAccounts` | Automatic service account and token setup for plugins | | `cloudWatchBatchQueries` | Runs CloudWatch metrics queries as separate batches | | `teamHttpHeaders` | Enables LBAC for datasources to apply LogQL filtering of logs to the client requests for users in teams | | `pdfTables` | Enables generating table data as PDF in reporting | @@ -111,6 +111,7 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general- | `ssoSettingsSAML` | Use the new SSO Settings API to configure the SAML connector | | `accessActionSets` | Introduces action sets for resource permissions. Also ensures that all folder editors and admins can create subfolders without needing any additional permissions. | | `azureMonitorPrometheusExemplars` | Allows configuration of Azure Monitor as a data source that can provide Prometheus exemplars | +| `ssoSettingsLDAP` | Use the new SSO Settings API to configure LDAP | | `cloudwatchMetricInsightsCrossAccount` | Enables cross account observability for Cloudwatch Metric Insights query builder | | `useSessionStorageForRedirection` | Use session storage for handling the redirection after login | @@ -181,6 +182,7 @@ Experimental features might be changed or removed without prior notice. | `tableSharedCrosshair` | Enables shared crosshair in table panel | | `kubernetesFeatureToggles` | Use the kubernetes API for feature toggle management in the frontend | | `newFolderPicker` | Enables the nested folder picker without having nested folders enabled | +| `onPremToCloudMigrationsAlerts` | Enables the migration of alerts and its child resources to your Grafana Cloud stack. Requires `onPremToCloudMigrations` to be enabled in conjunction. | | `sqlExpressions` | Enables using SQL and DuckDB functions as Expressions. | | `nodeGraphDotLayout` | Changed the layout algorithm for the node graph | | `kubernetesAggregator` | Enable grafana's embedded kube-aggregator | @@ -206,6 +208,7 @@ Experimental features might be changed or removed without prior notice. | `appSidecar` | Enable the app sidecar feature that allows rendering 2 apps at the same time | | `alertingQueryAndExpressionsStepMode` | Enables step mode for alerting queries and expressions | | `rolePickerDrawer` | Enables the new role picker drawer design | +| `pluginsSriChecks` | Enables SRI checks for plugin assets | ## Development feature toggles diff --git a/docs/sources/setup-grafana/configure-security/configure-authentication/keycloak/index.md b/docs/sources/setup-grafana/configure-security/configure-authentication/keycloak/index.md index 6f2dbea11ab..1bd6cfde205 100644 --- a/docs/sources/setup-grafana/configure-security/configure-authentication/keycloak/index.md +++ b/docs/sources/setup-grafana/configure-security/configure-authentication/keycloak/index.md @@ -53,6 +53,12 @@ role_attribute_path = contains(roles[*], 'admin') && 'Admin' || contains(roles[* As an example, `` can be `keycloak-demo.grafana.org` and `` can be `grafana`. +To configure the `kc_idp_hint` parameter for Keycloak, you need to change the `auth_url` configuration to include the `kc_idp_hint` parameter. For example if you want to hint the Google identity provider: + +```ini +auth_url = https:///realms//protocol/openid-connect/auth?kc_idp_hint=google +``` + {{% admonition type="note" %}} api_url is not required if the id_token contains all the necessary user information and can add latency to the login process. It is useful as a fallback or if the user has more than 150 group memberships. diff --git a/e2e/cloud-plugins-suite/azure-monitor.spec.ts b/e2e/cloud-plugins-suite/azure-monitor.spec.ts index 64ab9069c91..1c6db341089 100644 --- a/e2e/cloud-plugins-suite/azure-monitor.spec.ts +++ b/e2e/cloud-plugins-suite/azure-monitor.spec.ts @@ -2,6 +2,8 @@ import { Interception } from 'cypress/types/net-stubbing'; import { load } from 'js-yaml'; import { v4 as uuidv4 } from 'uuid'; +import { selectors as rawSelectors } from '@grafana/e2e-selectors'; + import { selectors } from '../../public/app/plugins/datasource/azuremonitor/e2e/selectors'; import { AzureDataSourceJsonData, @@ -75,12 +77,13 @@ const addAzureMonitorVariable = ( isFirst: boolean, options?: { subscription?: string; resourceGroup?: string; namespace?: string; resource?: string; region?: string } ) => { - e2e.components.PageToolbar.item('Dashboard settings').click(); + e2e.components.NavToolbar.editDashboard.editButton().should('be.visible').click(); + e2e.components.NavToolbar.editDashboard.settingsButton().should('be.visible').click(); e2e.components.Tab.title('Variables').click(); if (isFirst) { e2e.pages.Dashboard.Settings.Variables.List.addVariableCTAV2().click(); } else { - e2e.pages.Dashboard.Settings.Variables.List.newButton().click(); + cy.get(`[data-testid="${rawSelectors.pages.Dashboard.Settings.Variables.List.newButton}"]`).click(); } e2e.pages.Dashboard.Settings.Variables.Edit.General.generalNameInputV2().clear().type(name); e2e.components.DataSourcePicker.inputV2().type(`${dataSourceName}{enter}`); @@ -113,7 +116,8 @@ const addAzureMonitorVariable = ( break; } e2e.pages.Dashboard.Settings.Variables.Edit.General.submitButton().click(); - e2e.pages.Dashboard.Settings.Actions.close().click(); + e2e.components.NavToolbar.editDashboard.backToDashboardButton().click(); + e2e.components.NavToolbar.editDashboard.exitButton().click(); }; const storageAcctName = 'azmonteststorage'; @@ -189,7 +193,8 @@ describe('Azure monitor datasource', () => { }, timeout: 10000, }); - e2e.components.PanelEditor.applyButton().click(); + e2e.components.NavToolbar.editDashboard.backToDashboardButton().click(); + e2e.components.NavToolbar.editDashboard.exitButton().click(); e2e.flows.addPanel({ dataSourceName, visitDashboardAtStart: false, @@ -209,7 +214,8 @@ describe('Azure monitor datasource', () => { }, timeout: 10000, }); - e2e.components.PanelEditor.applyButton().click(); + e2e.components.NavToolbar.editDashboard.backToDashboardButton().click(); + e2e.components.NavToolbar.editDashboard.exitButton().click(); e2e.flows.addPanel({ dataSourceName, visitDashboardAtStart: false, @@ -228,7 +234,8 @@ describe('Azure monitor datasource', () => { }, timeout: 10000, }); - e2e.components.PanelEditor.applyButton().click(); + e2e.components.NavToolbar.editDashboard.backToDashboardButton().click(); + e2e.components.NavToolbar.editDashboard.exitButton().click(); e2e.flows.addPanel({ dataSourceName, visitDashboardAtStart: false, @@ -275,25 +282,32 @@ describe('Azure monitor datasource', () => { namespace: '$namespace', region: '$region', }); - e2e.pages.Dashboard.SubMenu.submenuItemLabels('subscription').click(); - e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('grafanalabs-datasources-dev').click(); - e2e.pages.Dashboard.SubMenu.submenuItemLabels('resourceGroups').parent().find('button').click(); + e2e.pages.Dashboard.SubMenu.submenuItemLabels('subscription') + .parent() + .within(() => { + cy.get('input').click(); + }); + e2e.components.Select.option().contains('grafanalabs-datasources-dev').click(); e2e.pages.Dashboard.SubMenu.submenuItemLabels('resourceGroups') .parent() - .find('input') - .type('cloud-plugins-e2e-test-azmon{downArrow}{enter}'); - e2e.pages.Dashboard.SubMenu.submenuItemLabels('namespaces').parent().find('button').click(); + .within(() => { + cy.get('input').type('cloud-plugins-e2e-test-azmon{downArrow}{enter}'); + }); e2e.pages.Dashboard.SubMenu.submenuItemLabels('namespaces') .parent() - .find('input') - .type('microsoft.storage/storageaccounts{downArrow}{enter}'); - e2e.pages.Dashboard.SubMenu.submenuItemLabels('region').parent().find('button').click(); - e2e.pages.Dashboard.SubMenu.submenuItemLabels('region').parent().find('input').type('uk south{downArrow}{enter}'); - e2e.pages.Dashboard.SubMenu.submenuItemLabels('resource').parent().find('button').click(); + .within(() => { + cy.get('input').type('microsoft.storage/storageaccounts{downArrow}{enter}'); + }); + e2e.pages.Dashboard.SubMenu.submenuItemLabels('region') + .parent() + .within(() => { + cy.get('input').type('uk south{downArrow}{enter}'); + }); e2e.pages.Dashboard.SubMenu.submenuItemLabels('resource') .parent() - .find('input') - .type(`${storageAcctName}{downArrow}{enter}`); + .within(() => { + cy.get('input').type(`${storageAcctName}{downArrow}{enter}`); + }); e2e.flows.addPanel({ dataSourceName, visitDashboardAtStart: false, diff --git a/e2e/dashboards-suite/dashboard-templating.spec.ts b/e2e/dashboards-suite/dashboard-templating.spec.ts index 4e885df1258..7ff5f905262 100644 --- a/e2e/dashboards-suite/dashboard-templating.spec.ts +++ b/e2e/dashboards-suite/dashboard-templating.spec.ts @@ -34,7 +34,7 @@ describe('Dashboard templating', () => { `Server:sqlstring = 'A''A\\"A','BB\\\B','CCC'`, `Server:date = NaN`, `Server:text = All`, - `Server:queryparam = var-Server=A%27A%22A&var-Server=BB%5CB&var-Server=CCC`, + `Server:queryparam = var-Server=$__all`, `1 < 2`, `Example: from=now-6h&to=now`, ]; diff --git a/e2e/dashboards-suite/templating-dashboard-links-and-variables.spec.ts b/e2e/dashboards-suite/templating-dashboard-links-and-variables.spec.ts index 0aeadd56bcc..03fa10a4b2d 100644 --- a/e2e/dashboards-suite/templating-dashboard-links-and-variables.spec.ts +++ b/e2e/dashboards-suite/templating-dashboard-links-and-variables.spec.ts @@ -34,7 +34,7 @@ describe('Templating', () => { e2e.components.DashboardLinks.dropDown().should('be.visible').click().wait('@tagsTemplatingSearch'); - verifyLinks('var-custom=%24__all'); + verifyLinks('var-custom=$__all'); cy.get('body').click(); diff --git a/e2e/old-arch/utils/flows/importDashboard.ts b/e2e/old-arch/utils/flows/importDashboard.ts index 4e7b8ade4ad..7caadac21dd 100644 --- a/e2e/old-arch/utils/flows/importDashboard.ts +++ b/e2e/old-arch/utils/flows/importDashboard.ts @@ -49,7 +49,8 @@ export const importDashboard = (dashboardToImport: Dashboard, queryTimeout?: num dashboardToImport.panels.forEach((panel) => { // Look at the json data e2e.components.Panels.Panel.menu(panel.title).click({ force: true }); // force click because menu is hidden and show on hover - e2e.components.Panels.Panel.menuItems('Inspect').should('be.visible').click(); + e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true }); + e2e.components.Panels.Panel.menuItems('Data').click({ force: true }); e2e.components.Tab.title('JSON').should('be.visible').click(); e2e.components.PanelInspector.Json.content().should('be.visible').contains('Panel JSON').click({ force: true }); e2e.components.Select.option().should('be.visible').contains('Panel data').click(); diff --git a/e2e/old-arch/various-suite/inspect-drawer.spec.ts b/e2e/old-arch/various-suite/inspect-drawer.spec.ts index 2faf952ba08..48746dc7a9f 100644 --- a/e2e/old-arch/various-suite/inspect-drawer.spec.ts +++ b/e2e/old-arch/various-suite/inspect-drawer.spec.ts @@ -34,8 +34,10 @@ describe('Inspect drawer tests', () => { e2e.flows.openDashboard({ uid: 'wfTJJL5Wz' }); - // testing opening inspect drawer directly by clicking on Inspect in header menu - e2e.flows.openPanelMenuItem(e2e.flows.PanelMenuItems.Inspect, PANEL_UNDER_TEST); + e2e.components.Panels.Panel.title(PANEL_UNDER_TEST).scrollIntoView().should('be.visible'); + e2e.components.Panels.Panel.menu(PANEL_UNDER_TEST).click({ force: true }); // force click because menu is hidden and show on hover + e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true }); + e2e.components.Panels.Panel.menuItems('Data').click({ force: true }); expectDrawerTabsAndContent(); diff --git a/e2e/utils/flows/importDashboard.ts b/e2e/utils/flows/importDashboard.ts index d3c6b7e23b5..7b5e04fd0b1 100644 --- a/e2e/utils/flows/importDashboard.ts +++ b/e2e/utils/flows/importDashboard.ts @@ -49,7 +49,8 @@ export const importDashboard = (dashboardToImport: Dashboard, queryTimeout?: num dashboardToImport.panels.forEach((panel) => { // Look at the json data e2e.components.Panels.Panel.menu(panel.title).click({ force: true }); // force click because menu is hidden and show on hover - e2e.components.Panels.Panel.menuItems('Inspect').should('be.visible').click(); + e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true }); + e2e.components.Panels.Panel.menuItems('Data').click({ force: true }); e2e.components.Tab.title('JSON').should('be.visible').click(); e2e.components.PanelInspector.Json.content().should('be.visible'); e2e.components.ReactMonacoEditor.editorLazy().should('be.visible'); diff --git a/e2e/utils/flows/userPreferences.ts b/e2e/utils/flows/userPreferences.ts index 621bff04ce6..c69254ac623 100644 --- a/e2e/utils/flows/userPreferences.ts +++ b/e2e/utils/flows/userPreferences.ts @@ -5,7 +5,10 @@ import { fromBaseUrl } from '../support/url'; const defaultUserPreferences = { timezone: '', // "Default" option -} as const; // TODO: when we update typescript >4.9 change to `as const satisfies UserPreferencesDTO` + navbar: { + bookmarkUrls: [], + }, +} as const satisfies UserPreferencesDTO; // TODO: when we update typescript >4.9 change to `as const satisfies UserPreferencesDTO` // Only accept preferences we have defaults for as arguments. To allow a new preference to be set, add a default for it type UserPreferences = Pick; diff --git a/e2e/various-suite/bookmarks.spec.ts b/e2e/various-suite/bookmarks.spec.ts new file mode 100644 index 00000000000..6b607c414c4 --- /dev/null +++ b/e2e/various-suite/bookmarks.spec.ts @@ -0,0 +1,62 @@ +import { e2e } from '../utils'; +import { fromBaseUrl } from '../utils/support/url'; + +describe('Pin nav items', () => { + beforeEach(() => { + cy.viewport(1280, 800); + e2e.flows.login(Cypress.env('USERNAME'), Cypress.env('PASSWORD')); + cy.visit(fromBaseUrl('/')); + }); + afterEach(() => { + e2e.flows.setDefaultUserPreferences(); + }); + + it('should pin the selected menu item and add it as a Bookmarks menu item child', () => { + // Open, dock and check if the mega menu is visible + cy.get('[aria-label="Open menu"]').click(); + cy.get('[aria-label="Dock menu"]').click(); + e2e.components.NavMenu.Menu().should('be.visible'); + + // Check if the Bookmark section is visible + const bookmarkSection = cy.get('[href="/bookmarks"]'); + bookmarkSection.should('be.visible'); + + // Click on the pin icon to add Administration to the Bookmarks section + const adminItem = cy.contains('a', 'Administration'); + const bookmarkPinIcon = adminItem.siblings('button').should('have.attr', 'aria-label', 'Add to Bookmarks'); + bookmarkPinIcon.click({ force: true }); + + // Check if the Administration menu item is visible in the Bookmarks section + cy.get('[aria-label="Expand section Bookmarks"]').click(); + const bookmarks = cy.get('[href="/bookmarks"]').parentsUntil('li').siblings('ul'); + bookmarks.within(() => { + cy.get('a').should('contain.text', 'Administration'); + }); + }); + + it('should unpin the item and remove it from the Bookmarks section', () => { + // Set Administration as a pinned item and reload the page + e2e.flows.setUserPreferences({ navbar: { bookmarkUrls: ['/admin'] } }); + cy.reload(); + + // Open, dock and check if the mega menu is visible + cy.get('[aria-label="Open menu"]').click(); + cy.get('[aria-label="Dock menu"]').click(); + e2e.components.NavMenu.Menu().should('be.visible'); + + // Check if the Bookmark section is visible and open it + cy.get('[href="/bookmarks"]').should('be.visible'); + cy.get('[aria-label="Expand section Bookmarks"]').click(); + + // Check if the Administration menu item is visible in the Bookmarks section + const bookmarks = cy.get('[href="/bookmarks"]').parentsUntil('li').siblings('ul').children(); + const administrationIsPinned = bookmarks.filter('li').children().should('contain.text', 'Administration'); + + // Click on the pin icon to remove Administration from the Bookmarks section and check if it is removed + administrationIsPinned.within(() => { + cy.get('[aria-label="Remove from Bookmarks"]').click({ force: true }); + }); + cy.wait(500); + administrationIsPinned.should('not.exist'); + }); +}); diff --git a/e2e/various-suite/inspect-drawer.spec.ts b/e2e/various-suite/inspect-drawer.spec.ts index 2faf952ba08..48746dc7a9f 100644 --- a/e2e/various-suite/inspect-drawer.spec.ts +++ b/e2e/various-suite/inspect-drawer.spec.ts @@ -34,8 +34,10 @@ describe('Inspect drawer tests', () => { e2e.flows.openDashboard({ uid: 'wfTJJL5Wz' }); - // testing opening inspect drawer directly by clicking on Inspect in header menu - e2e.flows.openPanelMenuItem(e2e.flows.PanelMenuItems.Inspect, PANEL_UNDER_TEST); + e2e.components.Panels.Panel.title(PANEL_UNDER_TEST).scrollIntoView().should('be.visible'); + e2e.components.Panels.Panel.menu(PANEL_UNDER_TEST).click({ force: true }); // force click because menu is hidden and show on hover + e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true }); + e2e.components.Panels.Panel.menuItems('Data').click({ force: true }); expectDrawerTabsAndContent(); diff --git a/go.work.sum b/go.work.sum index 95e844c912a..8fbaee04958 100644 --- a/go.work.sum +++ b/go.work.sum @@ -378,7 +378,6 @@ github.com/blevesearch/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:9 github.com/blevesearch/goleveldb v1.0.1 h1:iAtV2Cu5s0GD1lwUiekkFHe2gTMCCNVj2foPclDLIFI= github.com/blevesearch/goleveldb v1.0.1/go.mod h1:WrU8ltZbIp0wAoig/MHbrPCXSOLpe79nz5lv5nqfYrQ= github.com/blevesearch/mmap-go v1.0.3/go.mod h1:pYvKl/grLQrBxuaRYgoTssa4rVujYYeenDp++2E+yvs= -github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt22Ac= github.com/blevesearch/snowball v0.6.1 h1:cDYjn/NCH+wwt2UdehaLpr2e4BwLIjN4V/TdLsL+B5A= github.com/blevesearch/snowball v0.6.1/go.mod h1:ZF0IBg5vgpeoUhnMza2v0A/z8m1cWPlwhke08LpNusg= github.com/blevesearch/stempel v0.2.0 h1:CYzVPaScODMvgE9o+kf6D4RJ/VRomyi9uHF+PtB+Afc= @@ -491,8 +490,6 @@ github.com/elastic/go-sysinfo v1.11.2/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJ github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0= github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss= github.com/elazarl/goproxy v0.0.0-20230731152917-f99041a5c027/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM= -github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8= -github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw= github.com/expr-lang/expr v1.16.2 h1:JvMnzUs3LeVHBvGFcXYmXo+Q6DPDmzrlcSBO6Wy3w4s= github.com/expr-lang/expr v1.16.2/go.mod h1:uCkhfG+x7fcZ5A5sXHKuQ07jGZRl6J0FCAaf2k4PtVQ= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= @@ -587,8 +584,6 @@ github.com/grafana/alerting v0.0.0-20240830172655-aa466962ea18 h1:3cQ+d+fkNL2Eqp github.com/grafana/alerting v0.0.0-20240830172655-aa466962ea18/go.mod h1:GMLi6d09Xqo96fCVUjNk//rcjP5NKEdjOzfWIffD5r4= github.com/grafana/alerting v0.0.0-20240917171353-6c25eb6eff10 h1:oDbLKM34O+JUF9EQFS+9aYhdYoeNfUpXqNjFCLIxwF4= github.com/grafana/alerting v0.0.0-20240917171353-6c25eb6eff10/go.mod h1:GMLi6d09Xqo96fCVUjNk//rcjP5NKEdjOzfWIffD5r4= -github.com/grafana/alerting v0.0.0-20240926233713-446ddd356f8d h1:HOK6RWTuVldWFtNbWHxPlTa2shZ+WsNJsxoRJhX56Zg= -github.com/grafana/alerting v0.0.0-20240926233713-446ddd356f8d/go.mod h1:GMLi6d09Xqo96fCVUjNk//rcjP5NKEdjOzfWIffD5r4= github.com/grafana/gomemcache v0.0.0-20240229205252-cd6a66d6fb56/go.mod h1:PGk3RjYHpxMM8HFPhKKo+vve3DdlPUELZLSDEFehPuU= github.com/grafana/prometheus-alertmanager v0.25.1-0.20240625192351-66ec17e3aa45 h1:AJKOtDKAOg8XNFnIZSmqqqutoTSxVlRs6vekL2p2KEY= github.com/grafana/prometheus-alertmanager v0.25.1-0.20240625192351-66ec17e3aa45/go.mod h1:01sXtHoRwI8W324IPAzuxDFOmALqYLCOhvSC2fUHWXc= @@ -852,6 +847,7 @@ github.com/stoewer/parquet-cli v0.0.7/go.mod h1:bskxHdj8q3H1EmfuCqjViFoeO3NEvs5l github.com/streadway/amqp v1.0.0 h1:kuuDrUJFZL1QYL9hUNuCxNObNzB0bV/ZG5jV3RWAQgo= github.com/streadway/handy v0.0.0-20200128134331-0f66f006fb2e h1:mOtuXaRAbVZsxAHVdPR3IjfmN8T1h2iczJLynhLybf8= github.com/substrait-io/substrait-go v0.4.2 h1:buDnjsb3qAqTaNbOR7VKmNgXf4lYQxWEcnSGUWBtmN8= +github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= github.com/tdewolff/minify/v2 v2.12.9 h1:dvn5MtmuQ/DFMwqf5j8QhEVpPX6fi3WGImhv8RUB4zA= github.com/tdewolff/minify/v2 v2.12.9/go.mod h1:qOqdlDfL+7v0/fyymB+OP497nIxJYSvX4MQWA8OoiXU= github.com/tdewolff/parse/v2 v2.6.8 h1:mhNZXYCx//xG7Yq2e/kVLNZw4YfYmeHbhx+Zc0OvFMA= @@ -1037,9 +1033,11 @@ golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= +golang.org/x/oauth2 v0.20.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1051,6 +1049,7 @@ golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= @@ -1070,6 +1069,7 @@ google.golang.org/genproto/googleapis/api v0.0.0-20240730163845-b1a4ccb954bf/go. google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo= google.golang.org/genproto/googleapis/bytestream v0.0.0-20240730163845-b1a4ccb954bf h1:T4tsZBlZYXK3j40sQNP5MBO32I+rn6ypV1PpklsiV8k= google.golang.org/genproto/googleapis/bytestream v0.0.0-20240730163845-b1a4ccb954bf/go.mod h1:5/MT647Cn/GGhwTpXC7QqcaR5Cnee4v4MKCU1/nwnIQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0= google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0= google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY= google.golang.org/genproto/googleapis/rpc v0.0.0-20240722135656-d784300faade/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY= diff --git a/package.json b/package.json index 2550f233202..e261fc3abcf 100644 --- a/package.json +++ b/package.json @@ -69,9 +69,9 @@ "releaseNotesUrl": "https://grafana.com/docs/grafana/next/release-notes/" }, "devDependencies": { - "@babel/core": "7.25.2", - "@babel/preset-env": "7.25.4", - "@babel/runtime": "7.25.6", + "@babel/core": "7.25.7", + "@babel/preset-env": "7.25.7", + "@babel/runtime": "7.25.7", "@betterer/betterer": "5.4.0", "@betterer/cli": "5.4.0", "@betterer/eslint": "5.4.0", @@ -82,11 +82,11 @@ "@grafana/plugin-e2e": "^1.8.3", "@grafana/tsconfig": "^2.0.0", "@manypkg/get-packages": "^2.2.0", - "@playwright/test": "1.47.2", + "@playwright/test": "1.48.0", "@pmmmwh/react-refresh-webpack-plugin": "0.5.15", "@react-types/button": "3.9.6", - "@react-types/menu": "3.9.11", - "@react-types/overlays": "3.8.9", + "@react-types/menu": "3.9.12", + "@react-types/overlays": "3.8.10", "@react-types/shared": "3.24.1", "@rtk-query/codegen-openapi": "^1.2.0", "@rtsao/plugin-proposal-class-properties": "7.0.1-patch.1", @@ -119,10 +119,10 @@ "@types/jquery": "3.5.31", "@types/js-yaml": "^4.0.5", "@types/jsurl": "^1.2.28", - "@types/lodash": "4.17.9", + "@types/lodash": "4.17.10", "@types/logfmt": "^1.2.3", "@types/lucene": "^2", - "@types/node": "20.16.9", + "@types/node": "20.16.11", "@types/node-forge": "^1", "@types/ol-ext": "npm:@siedlerchr/types-ol-ext@3.2.4", "@types/pluralize": "^0.0.33", @@ -181,7 +181,7 @@ "eslint-plugin-jsx-a11y": "6.10.0", "eslint-plugin-lodash": "7.4.0", "eslint-plugin-no-barrel-files": "^1.1.0", - "eslint-plugin-react": "7.37.0", + "eslint-plugin-react": "7.37.1", "eslint-plugin-react-hooks": "4.6.0", "eslint-plugin-testing-library": "^6.2.2", "eslint-scope": "^8.0.0", @@ -198,7 +198,7 @@ "jest-canvas-mock": "2.5.2", "jest-date-mock": "1.0.10", "jest-environment-jsdom": "29.7.0", - "jest-fail-on-console": "3.3.0", + "jest-fail-on-console": "3.3.1", "jest-junit": "16.0.0", "jest-matcher-utils": "29.7.0", "jest-watch-typeahead": "^2.2.2", @@ -221,7 +221,7 @@ "react-test-renderer": "18.2.0", "redux-mock-store": "1.5.4", "rimraf": "6.0.1", - "rudder-sdk-js": "2.48.18", + "rudder-sdk-js": "2.48.19", "sass": "1.79.3", "sass-loader": "16.0.2", "smtp-tester": "^2.1.0", @@ -246,17 +246,17 @@ "yargs": "^17.5.1" }, "dependencies": { - "@emotion/css": "11.13.0", + "@emotion/css": "11.13.4", "@emotion/react": "11.13.3", "@fingerprintjs/fingerprintjs": "^3.4.2", "@floating-ui/react": "0.26.24", "@formatjs/intl-durationformat": "^0.2.4", "@glideapps/glide-data-grid": "^6.0.0", - "@grafana/aws-sdk": "0.4.2", + "@grafana/aws-sdk": "0.5.0", "@grafana/azure-sdk": "0.0.3", "@grafana/data": "workspace:*", "@grafana/e2e-selectors": "workspace:*", - "@grafana/experimental": "1.8.0", + "@grafana/experimental": "2.1.1", "@grafana/faro-core": "^1.3.6", "@grafana/faro-web-sdk": "^1.3.6", "@grafana/faro-web-tracing": "^1.8.2", @@ -268,14 +268,14 @@ "@grafana/prometheus": "workspace:*", "@grafana/runtime": "workspace:*", "@grafana/saga-icons": "workspace:*", - "@grafana/scenes": "5.16.2", + "@grafana/scenes": "5.18.3", "@grafana/schema": "workspace:*", "@grafana/sql": "workspace:*", "@grafana/ui": "workspace:*", "@hello-pangea/dnd": "16.6.0", "@kusto/monaco-kusto": "^10.0.0", "@leeoniya/ufuzzy": "1.0.14", - "@lezer/common": "1.2.1", + "@lezer/common": "1.2.2", "@lezer/highlight": "1.2.1", "@lezer/lr": "1.3.3", "@locker/near-membrane-dom": "0.13.6", @@ -287,12 +287,12 @@ "@opentelemetry/exporter-collector": "0.25.0", "@opentelemetry/semantic-conventions": "1.27.0", "@popperjs/core": "2.11.8", - "@react-aria/dialog": "3.5.17", - "@react-aria/focus": "3.18.2", - "@react-aria/overlays": "3.23.2", - "@react-aria/utils": "3.25.2", + "@react-aria/dialog": "3.5.18", + "@react-aria/focus": "3.18.3", + "@react-aria/overlays": "3.23.3", + "@react-aria/utils": "3.25.3", "@react-awesome-query-builder/ui": "6.6.3", - "@reduxjs/toolkit": "2.2.7", + "@reduxjs/toolkit": "2.2.8", "@testing-library/react-hooks": "^8.0.1", "@visx/event": "3.3.0", "@visx/gradient": "3.3.0", @@ -345,7 +345,7 @@ "ml-regression-polynomial": "^3.0.0", "ml-regression-simple-linear": "^3.0.0", "moment": "2.30.1", - "moment-timezone": "0.5.45", + "moment-timezone": "0.5.46", "monaco-editor": "0.34.1", "moveable": "0.53.0", "nanoid": "^5.0.4", @@ -354,7 +354,7 @@ "ol-ext": "4.0.23", "pluralize": "^8.0.0", "prismjs": "1.29.0", - "rc-slider": "11.1.6", + "rc-slider": "11.1.7", "rc-time-picker": "3.7.3", "rc-tree": "5.9.0", "re-resizable": "6.10.0", diff --git a/packages/grafana-data/package.json b/packages/grafana-data/package.json index 7ff2bde3e22..ce45ca9e694 100644 --- a/packages/grafana-data/package.json +++ b/packages/grafana-data/package.json @@ -49,7 +49,7 @@ "marked": "12.0.2", "marked-mangle": "1.1.9", "moment": "2.30.1", - "moment-timezone": "0.5.45", + "moment-timezone": "0.5.46", "ol": "7.4.0", "papaparse": "5.4.1", "react-use": "17.5.1", @@ -65,8 +65,8 @@ "@rollup/plugin-node-resolve": "15.3.0", "@types/dompurify": "^3.0.0", "@types/history": "4.7.11", - "@types/lodash": "4.17.9", - "@types/node": "20.16.9", + "@types/lodash": "4.17.10", + "@types/node": "20.16.11", "@types/papaparse": "5.3.14", "@types/react": "18.3.3", "@types/react-dom": "18.2.25", diff --git a/packages/grafana-data/src/transformations/transformers/limit.ts b/packages/grafana-data/src/transformations/transformers/limit.ts index a37bc7b2143..3571efaf83a 100644 --- a/packages/grafana-data/src/transformations/transformers/limit.ts +++ b/packages/grafana-data/src/transformations/transformers/limit.ts @@ -34,6 +34,10 @@ export const limitTransformer: DataTransformerInfo = { limit = options.limitField; } } + // Prevent negative limit + if (limit < 0) { + limit = 0; + } return data.map((frame) => { if (frame.length > limit) { return { diff --git a/packages/grafana-data/src/types/featureToggles.gen.ts b/packages/grafana-data/src/types/featureToggles.gen.ts index f01cb7197a0..b8124c22120 100644 --- a/packages/grafana-data/src/types/featureToggles.gen.ts +++ b/packages/grafana-data/src/types/featureToggles.gen.ts @@ -158,6 +158,7 @@ export interface FeatureToggles { newFolderPicker?: boolean; jitterAlertRulesWithinGroups?: boolean; onPremToCloudMigrations?: boolean; + onPremToCloudMigrationsAlerts?: boolean; alertingSaveStatePeriodic?: boolean; promQLScope?: boolean; sqlExpressions?: boolean; @@ -219,4 +220,5 @@ export interface FeatureToggles { useSessionStorageForRedirection?: boolean; rolePickerDrawer?: boolean; unifiedStorageSearch?: boolean; + pluginsSriChecks?: boolean; } diff --git a/packages/grafana-data/src/types/plugin.ts b/packages/grafana-data/src/types/plugin.ts index 4233771dbe6..1f64879d317 100644 --- a/packages/grafana-data/src/types/plugin.ts +++ b/packages/grafana-data/src/types/plugin.ts @@ -99,6 +99,7 @@ export interface PluginMeta { angularDetected?: boolean; loadingStrategy?: PluginLoadingStrategy; extensions?: PluginExtensions; + moduleHash?: string; } interface PluginDependencyInfo { diff --git a/packages/grafana-e2e-selectors/package.json b/packages/grafana-e2e-selectors/package.json index 73973dd4318..ace51d6eee0 100644 --- a/packages/grafana-e2e-selectors/package.json +++ b/packages/grafana-e2e-selectors/package.json @@ -40,7 +40,7 @@ }, "devDependencies": { "@rollup/plugin-node-resolve": "15.3.0", - "@types/node": "20.16.9", + "@types/node": "20.16.11", "esbuild": "0.24.0", "rimraf": "6.0.1", "rollup": "^4.22.4", diff --git a/packages/grafana-flamegraph/package.json b/packages/grafana-flamegraph/package.json index 29f945b8d28..3ccd87c0a02 100644 --- a/packages/grafana-flamegraph/package.json +++ b/packages/grafana-flamegraph/package.json @@ -43,7 +43,7 @@ "not IE 11" ], "dependencies": { - "@emotion/css": "11.13.0", + "@emotion/css": "11.13.4", "@grafana/data": "11.3.0-pre", "@grafana/ui": "11.3.0-pre", "@leeoniya/ufuzzy": "1.0.14", @@ -56,9 +56,9 @@ "tslib": "2.7.0" }, "devDependencies": { - "@babel/core": "7.25.2", - "@babel/preset-env": "7.25.4", - "@babel/preset-react": "7.24.7", + "@babel/core": "7.25.7", + "@babel/preset-env": "7.25.7", + "@babel/preset-react": "7.25.7", "@grafana/tsconfig": "^2.0.0", "@rollup/plugin-node-resolve": "15.3.0", "@testing-library/dom": "10.0.0", @@ -67,8 +67,8 @@ "@testing-library/user-event": "14.5.2", "@types/d3": "^7", "@types/jest": "^29.5.4", - "@types/lodash": "4.17.9", - "@types/node": "20.16.9", + "@types/lodash": "4.17.10", + "@types/node": "20.16.11", "@types/react": "18.3.3", "@types/react-virtualized-auto-sizer": "1.0.4", "@types/tinycolor2": "1.4.6", diff --git a/packages/grafana-icons/package.json b/packages/grafana-icons/package.json index 98cb0c4bb0d..7313c7fe3ed 100644 --- a/packages/grafana-icons/package.json +++ b/packages/grafana-icons/package.json @@ -34,7 +34,7 @@ "build": "yarn generate && rollup -c rollup.config.ts --configPlugin esbuild" }, "devDependencies": { - "@babel/core": "7.25.2", + "@babel/core": "7.25.7", "@grafana/tsconfig": "^2.0.0", "@rollup/plugin-node-resolve": "^15.3.0", "@rollup/plugin-typescript": "^12.1.0", @@ -45,7 +45,7 @@ "@svgr/plugin-prettier": "^8.1.0", "@svgr/plugin-svgo": "^8.1.0", "@types/babel__core": "^7", - "@types/node": "20.16.9", + "@types/node": "20.16.11", "@types/react": "18.3.3", "@types/react-dom": "18.2.25", "esbuild": "0.24.0", diff --git a/packages/grafana-o11y-ds-frontend/package.json b/packages/grafana-o11y-ds-frontend/package.json index df4acb26060..a1e256f8dd1 100644 --- a/packages/grafana-o11y-ds-frontend/package.json +++ b/packages/grafana-o11y-ds-frontend/package.json @@ -17,10 +17,10 @@ "typecheck": "tsc --emitDeclarationOnly false --noEmit" }, "dependencies": { - "@emotion/css": "11.13.0", + "@emotion/css": "11.13.4", "@grafana/data": "11.3.0-pre", "@grafana/e2e-selectors": "11.3.0-pre", - "@grafana/experimental": "1.8.0", + "@grafana/experimental": "2.1.1", "@grafana/runtime": "11.3.0-pre", "@grafana/schema": "11.3.0-pre", "@grafana/ui": "11.3.0-pre", @@ -36,7 +36,7 @@ "@testing-library/react": "15.0.2", "@testing-library/user-event": "14.5.2", "@types/jest": "^29.5.4", - "@types/node": "20.16.9", + "@types/node": "20.16.11", "@types/react": "18.3.3", "@types/systemjs": "6.15.1", "@types/testing-library__jest-dom": "5.14.9", diff --git a/packages/grafana-prometheus/package.json b/packages/grafana-prometheus/package.json index 08c7f213e9f..5c8924babaf 100644 --- a/packages/grafana-prometheus/package.json +++ b/packages/grafana-prometheus/package.json @@ -36,21 +36,21 @@ "postpack": "mv package.json.bak package.json" }, "dependencies": { - "@emotion/css": "11.13.0", + "@emotion/css": "11.13.4", "@floating-ui/react": "0.26.24", "@grafana/data": "11.3.0-pre", - "@grafana/experimental": "1.8.0", - "@grafana/faro-web-sdk": "1.10.1", + "@grafana/experimental": "2.1.1", + "@grafana/faro-web-sdk": "1.10.2", "@grafana/runtime": "11.3.0-pre", "@grafana/schema": "11.3.0-pre", "@grafana/ui": "11.3.0-pre", "@hello-pangea/dnd": "16.6.0", "@leeoniya/ufuzzy": "1.0.14", - "@lezer/common": "1.2.1", + "@lezer/common": "1.2.2", "@lezer/highlight": "1.2.1", "@lezer/lr": "1.4.2", "@prometheus-io/lezer-promql": "0.54.1", - "@reduxjs/toolkit": "2.2.7", + "@reduxjs/toolkit": "2.2.8", "d3": "7.9.0", "date-fns": "3.6.0", "debounce-promise": "3.1.2", @@ -60,7 +60,7 @@ "marked": "12.0.2", "marked-mangle": "1.1.9", "moment": "2.30.1", - "moment-timezone": "0.5.45", + "moment-timezone": "0.5.46", "monaco-promql": "1.7.4", "pluralize": "8.0.0", "prismjs": "1.29.0", @@ -91,8 +91,8 @@ "@types/eslint": "8.56.10", "@types/jest": "29.5.13", "@types/jquery": "3.5.31", - "@types/lodash": "4.17.9", - "@types/node": "20.16.9", + "@types/lodash": "4.17.10", + "@types/node": "20.16.11", "@types/pluralize": "^0.0.33", "@types/prismjs": "1.26.4", "@types/react": "18.3.3", @@ -114,7 +114,7 @@ "eslint-plugin-jsdoc": "48.11.0", "eslint-plugin-jsx-a11y": "6.10.0", "eslint-plugin-lodash": "7.4.0", - "eslint-plugin-react": "7.37.0", + "eslint-plugin-react": "7.37.1", "eslint-plugin-react-hooks": "4.6.0", "eslint-webpack-plugin": "4.2.0", "fork-ts-checker-webpack-plugin": "9.0.2", diff --git a/packages/grafana-runtime/package.json b/packages/grafana-runtime/package.json index a690d8d5395..9aefe49a823 100644 --- a/packages/grafana-runtime/package.json +++ b/packages/grafana-runtime/package.json @@ -57,7 +57,7 @@ "@types/angular": "1.8.9", "@types/history": "4.7.11", "@types/jest": "29.5.13", - "@types/lodash": "4.17.9", + "@types/lodash": "4.17.10", "@types/react": "18.3.3", "@types/react-dom": "18.2.25", "@types/systemjs": "6.15.1", diff --git a/packages/grafana-runtime/src/config.ts b/packages/grafana-runtime/src/config.ts index 71ec381d45b..c7c00a37c3b 100644 --- a/packages/grafana-runtime/src/config.ts +++ b/packages/grafana-runtime/src/config.ts @@ -46,6 +46,7 @@ export type AppPluginConfig = { loadingStrategy: PluginLoadingStrategy; dependencies: PluginDependencies; extensions: PluginExtensions; + moduleHash?: string; }; export type PreinstalledPlugin = { diff --git a/packages/grafana-sql/package.json b/packages/grafana-sql/package.json index 854d02f8ac6..b291478a8f3 100644 --- a/packages/grafana-sql/package.json +++ b/packages/grafana-sql/package.json @@ -14,10 +14,10 @@ "typecheck": "tsc --emitDeclarationOnly false --noEmit" }, "dependencies": { - "@emotion/css": "11.13.0", + "@emotion/css": "11.13.4", "@grafana/data": "11.3.0-pre", "@grafana/e2e-selectors": "11.3.0-pre", - "@grafana/experimental": "1.8.0", + "@grafana/experimental": "2.1.1", "@grafana/runtime": "11.3.0-pre", "@grafana/ui": "11.3.0-pre", "@react-awesome-query-builder/ui": "6.6.3", @@ -41,8 +41,8 @@ "@testing-library/react-hooks": "^8.0.1", "@testing-library/user-event": "14.5.2", "@types/jest": "^29.5.4", - "@types/lodash": "4.17.9", - "@types/node": "20.16.9", + "@types/lodash": "4.17.10", + "@types/node": "20.16.11", "@types/react": "18.3.3", "@types/react-dom": "18.2.25", "@types/react-virtualized-auto-sizer": "1.0.4", diff --git a/packages/grafana-ui/package.json b/packages/grafana-ui/package.json index b44d5cd61a9..233095b9f89 100644 --- a/packages/grafana-ui/package.json +++ b/packages/grafana-ui/package.json @@ -47,7 +47,7 @@ "not IE 11" ], "dependencies": { - "@emotion/css": "11.13.0", + "@emotion/css": "11.13.4", "@emotion/react": "11.13.3", "@emotion/serialize": "1.3.2", "@floating-ui/react": "0.26.24", @@ -59,13 +59,13 @@ "@leeoniya/ufuzzy": "1.0.14", "@monaco-editor/react": "4.6.0", "@popperjs/core": "2.11.8", - "@react-aria/dialog": "3.5.17", - "@react-aria/focus": "3.18.2", - "@react-aria/overlays": "3.23.2", - "@react-aria/utils": "3.25.2", + "@react-aria/dialog": "3.5.18", + "@react-aria/focus": "3.18.3", + "@react-aria/overlays": "3.23.3", + "@react-aria/utils": "3.25.3", "@tanstack/react-virtual": "^3.5.1", "@types/jquery": "3.5.31", - "@types/lodash": "4.17.9", + "@types/lodash": "4.17.10", "@types/react-table": "7.7.20", "ansicolor": "1.1.100", "calculate-size": "1.1.1", @@ -87,13 +87,13 @@ "prismjs": "1.29.0", "rc-cascader": "3.28.1", "rc-drawer": "7.2.0", - "rc-slider": "11.1.6", + "rc-slider": "11.1.7", "rc-time-picker": "^3.7.3", "rc-tooltip": "6.2.1", "react-calendar": "5.0.0", "react-colorful": "5.6.1", "react-custom-scrollbars-2": "4.5.0", - "react-dropzone": "14.2.3", + "react-dropzone": "14.2.9", "react-highlight-words": "0.20.0", "react-hook-form": "^7.49.2", "react-i18next": "^14.0.0", @@ -115,7 +115,7 @@ "uuid": "9.0.1" }, "devDependencies": { - "@babel/core": "7.25.2", + "@babel/core": "7.25.7", "@faker-js/faker": "^9.0.0", "@grafana/tsconfig": "^2.0.0", "@rollup/plugin-node-resolve": "15.3.0", @@ -146,7 +146,7 @@ "@types/is-hotkey": "0.1.10", "@types/jest": "29.5.13", "@types/mock-raf": "1.0.6", - "@types/node": "20.16.9", + "@types/node": "20.16.11", "@types/prismjs": "1.26.4", "@types/react": "18.3.3", "@types/react-color": "3.0.12", diff --git a/packages/grafana-ui/src/components/Menu/MenuItem.tsx b/packages/grafana-ui/src/components/Menu/MenuItem.tsx index 11048b850c8..5bdfb00a682 100644 --- a/packages/grafana-ui/src/components/Menu/MenuItem.tsx +++ b/packages/grafana-ui/src/components/Menu/MenuItem.tsx @@ -152,7 +152,13 @@ export const MenuItem = React.memo( className={itemStyle} rel={target === '_blank' ? 'noopener noreferrer' : undefined} href={url} - onClick={onClick} + onClick={(event) => { + if (hasSubMenu && !isSubMenuOpen) { + event.preventDefault(); + event.stopPropagation(); + } + onClick?.(event); + }} onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave} onKeyDown={handleKeys} diff --git a/packages/grafana-ui/src/components/Table/TableCell.tsx b/packages/grafana-ui/src/components/Table/TableCell.tsx index ef45bd98400..5c4b9f455dc 100644 --- a/packages/grafana-ui/src/components/Table/TableCell.tsx +++ b/packages/grafana-ui/src/components/Table/TableCell.tsx @@ -40,6 +40,7 @@ export const TableCell = ({ } if (cellProps.style) { + cellProps.style.wordBreak = 'break-word'; cellProps.style.minWidth = cellProps.style.width; const justifyContent = (cell.column as any).justifyContent; diff --git a/packages/grafana-ui/src/components/VizRepeater/VizRepeater.tsx b/packages/grafana-ui/src/components/VizRepeater/VizRepeater.tsx index ee2f0585aad..fab514bde4b 100644 --- a/packages/grafana-ui/src/components/VizRepeater/VizRepeater.tsx +++ b/packages/grafana-ui/src/components/VizRepeater/VizRepeater.tsx @@ -180,6 +180,7 @@ export class VizRepeater extends PureComponent extends PureComponent 0 { for _, key := range result { key.AccessControl = metadata[strconv.FormatInt(key.ID, 10)] diff --git a/pkg/api/datasources.go b/pkg/api/datasources.go index 2a6b3bfd4b8..4a29dfa6c19 100644 --- a/pkg/api/datasources.go +++ b/pkg/api/datasources.go @@ -136,7 +136,7 @@ func (hs *HTTPServer) GetDataSourceById(c *contextmodel.ReqContext) response.Res dto := hs.convertModelToDtos(c.Req.Context(), dataSource) // Add accesscontrol metadata - dto.AccessControl = hs.getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID) + dto.AccessControl = getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID) return response.JSON(http.StatusOK, &dto) } @@ -222,7 +222,7 @@ func (hs *HTTPServer) GetDataSourceByUID(c *contextmodel.ReqContext) response.Re dto := hs.convertModelToDtos(c.Req.Context(), ds) // Add accesscontrol metadata - dto.AccessControl = hs.getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID) + dto.AccessControl = getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID) return response.JSON(http.StatusOK, &dto) } diff --git a/pkg/api/dtos/plugins.go b/pkg/api/dtos/plugins.go index b14b915e5ff..703178e7000 100644 --- a/pkg/api/dtos/plugins.go +++ b/pkg/api/dtos/plugins.go @@ -30,6 +30,7 @@ type PluginSetting struct { SignatureOrg string `json:"signatureOrg"` AngularDetected bool `json:"angularDetected"` LoadingStrategy plugins.LoadingStrategy `json:"loadingStrategy"` + ModuleHash string `json:"moduleHash,omitempty"` } type PluginListItem struct { diff --git a/pkg/api/folder.go b/pkg/api/folder.go index ca1cfe041cf..12b2e6b2d41 100644 --- a/pkg/api/folder.go +++ b/pkg/api/folder.go @@ -1,12 +1,16 @@ package api import ( + "context" "errors" + "fmt" "net/http" "strconv" + "strings" k8sErrors "k8s.io/apimachinery/pkg/api/errors" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/client-go/dynamic" @@ -28,6 +32,7 @@ import ( "github.com/grafana/grafana/pkg/services/guardian" "github.com/grafana/grafana/pkg/services/libraryelements/model" "github.com/grafana/grafana/pkg/services/search" + "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util/errhttp" "github.com/grafana/grafana/pkg/web" @@ -448,7 +453,7 @@ func (hs *HTTPServer) getFolderACMetadata(c *contextmodel.ReqContext, f *folder. folderIDs[p.UID] = true } - allMetadata := hs.getMultiAccessControlMetadata(c, dashboards.ScopeFoldersPrefix, folderIDs) + allMetadata := getMultiAccessControlMetadata(c, dashboards.ScopeFoldersPrefix, folderIDs) metadata := map[string]bool{} // Flatten metadata - if any parent has a permission, the child folder inherits it for _, md := range allMetadata { @@ -629,6 +634,9 @@ type folderK8sHandler struct { clientConfigProvider grafanaapiserver.DirectRestConfigProvider // #TODO check if it makes more sense to move this to FolderAPIBuilder accesscontrolService accesscontrol.Service + userService user.Service + // #TODO remove after we handle the nested folder case + folderService folder.Service } //----------------------------------------------------------------------------------------- @@ -641,6 +649,8 @@ func newFolderK8sHandler(hs *HTTPServer) *folderK8sHandler { namespacer: request.GetNamespaceMapper(hs.Cfg), clientConfigProvider: hs.clientConfigProvider, accesscontrolService: hs.accesscontrolService, + userService: hs.userService, + folderService: hs.folderService, } } @@ -693,12 +703,13 @@ func (fk8s *folderK8sHandler) createFolder(c *contextmodel.ReqContext) { } fk8s.accesscontrolService.ClearUserPermissionCache(c.SignedInUser) - f, err := internalfolders.UnstructuredToLegacyFolderDTO(*out) + folderDTO, err := fk8s.newToFolderDto(c, *out, c.SignedInUser.GetOrgID()) if err != nil { fk8s.writeError(c, err) return } - c.JSON(http.StatusOK, f) + + c.JSON(http.StatusOK, folderDTO) } // func (fk8s *folderK8sHandler) getFolder(c *contextmodel.ReqContext) { @@ -713,13 +724,13 @@ func (fk8s *folderK8sHandler) createFolder(c *contextmodel.ReqContext) { // return // } -// f, err := internalfolders.UnstructuredToLegacyFolderDTO(*out) -// if err != nil { -// fk8s.writeError(c, err) -// return -// } +// folderDTO, err := fk8s.newToFolderDto(c, *out) +// if err != nil { +// fk8s.writeError(c, err) +// return +// } -// c.JSON(http.StatusOK, f) +// c.JSON(http.StatusOK, folderDTO) // } // func (fk8s *folderK8sHandler) deleteFolder(c *contextmodel.ReqContext) { @@ -755,13 +766,13 @@ func (fk8s *folderK8sHandler) createFolder(c *contextmodel.ReqContext) { // return // } -// f, err := internalfolders.UnstructuredToLegacyFolderDTO(*out) -// if err != nil { -// fk8s.writeError(c, err) -// return -// } +// folderDTO, err := fk8s.newToFolderDto(c, *out) +// if err != nil { +// fk8s.writeError(c, err) +// return +// } -// c.JSON(http.StatusOK, f) +// c.JSON(http.StatusOK, folderDTO) // } //----------------------------------------------------------------------------------------- @@ -786,3 +797,188 @@ func (fk8s *folderK8sHandler) writeError(c *contextmodel.ReqContext, err error) } errhttp.Write(c.Req.Context(), err, c.Resp) } + +func (fk8s *folderK8sHandler) newToFolderDto(c *contextmodel.ReqContext, item unstructured.Unstructured, orgID int64) (dtos.Folder, error) { + // #TODO revisit how/where we get orgID + ctx := c.Req.Context() + + f := internalfolders.UnstructuredToLegacyFolder(item, orgID) + + fDTO, err := internalfolders.UnstructuredToLegacyFolderDTO(item) + if err != nil { + return dtos.Folder{}, err + } + + toID := func(rawIdentifier string) (int64, error) { + parts := strings.Split(rawIdentifier, ":") + if len(parts) < 2 { + return 0, fmt.Errorf("invalid user identifier") + } + userID, err := strconv.ParseInt(parts[1], 10, 64) + if err != nil { + return 0, fmt.Errorf("faild to parse user identifier") + } + return userID, nil + } + + toDTO := func(fold *folder.Folder, checkCanView bool) (dtos.Folder, error) { + g, err := guardian.NewByFolder(c.Req.Context(), fold, c.SignedInUser.GetOrgID(), c.SignedInUser) + if err != nil { + return dtos.Folder{}, err + } + + canEdit, _ := g.CanEdit() + canSave, _ := g.CanSave() + canAdmin, _ := g.CanAdmin() + canDelete, _ := g.CanDelete() + + // Finding creator and last updater of the folder + updater, creator := anonString, anonString + // #TODO refactor the various conversions of the folder so that we either set created by in folder.Folder or + // we convert from unstructured to folder DTO without an intermediate conversion to folder.Folder + if len(fDTO.CreatedBy) > 0 { + id, err := toID(fDTO.CreatedBy) + if err != nil { + return dtos.Folder{}, err + } + creator = fk8s.getUserLogin(ctx, id) + } + if len(fDTO.UpdatedBy) > 0 { + id, err := toID(fDTO.UpdatedBy) + if err != nil { + return dtos.Folder{}, err + } + updater = fk8s.getUserLogin(ctx, id) + } + + acMetadata, _ := fk8s.getFolderACMetadata(c, fold) + + if checkCanView { + canView, _ := g.CanView() + if !canView { + return dtos.Folder{ + UID: REDACTED, + Title: REDACTED, + }, nil + } + } + metrics.MFolderIDsAPICount.WithLabelValues(metrics.NewToFolderDTO).Inc() + + fDTO.CanSave = canSave + fDTO.CanEdit = canEdit + fDTO.CanAdmin = canAdmin + fDTO.CanDelete = canDelete + fDTO.CreatedBy = creator + fDTO.UpdatedBy = updater + fDTO.AccessControl = acMetadata + fDTO.OrgID = f.OrgID + // #TODO version doesn't seem to be used--confirm or set it properly + fDTO.Version = 1 + + return *fDTO, nil + } + + // no need to check view permission for the starting folder since it's already checked by the callers + folderDTO, err := toDTO(f, false) + if err != nil { + return dtos.Folder{}, err + } + + parents := []*folder.Folder{} + if folderDTO.ParentUID != "" { + parents, err = fk8s.folderService.GetParents( + c.Req.Context(), + folder.GetParentsQuery{ + UID: folderDTO.UID, + OrgID: folderDTO.OrgID, + }) + if err != nil { + return dtos.Folder{}, err + } + } + + // #TODO refactor so that we have just one function for converting to folder DTO + toParentDTO := func(fold *folder.Folder, checkCanView bool) (dtos.Folder, error) { + g, err := guardian.NewByFolder(c.Req.Context(), fold, c.SignedInUser.GetOrgID(), c.SignedInUser) + if err != nil { + return dtos.Folder{}, err + } + + if checkCanView { + canView, _ := g.CanView() + if !canView { + return dtos.Folder{ + UID: REDACTED, + Title: REDACTED, + }, nil + } + } + metrics.MFolderIDsAPICount.WithLabelValues(metrics.NewToFolderDTO).Inc() + + return dtos.Folder{ + UID: fold.UID, + Title: fold.Title, + URL: fold.URL, + }, nil + } + + folderDTO.Parents = make([]dtos.Folder, 0, len(parents)) + for _, f := range parents { + DTO, err := toParentDTO(f, true) + if err != nil { + // #TODO add logging + // fk8s.log.Error("failed to convert folder to DTO", "folder", f.UID, "org", f.OrgID, "error", err) + continue + } + folderDTO.Parents = append(folderDTO.Parents, DTO) + } + + return folderDTO, nil +} + +func (fk8s *folderK8sHandler) getUserLogin(ctx context.Context, userID int64) string { + ctx, span := tracer.Start(ctx, "api.getUserLogin") + defer span.End() + + query := user.GetUserByIDQuery{ID: userID} + user, err := fk8s.userService.GetByID(ctx, &query) + if err != nil { + return anonString + } + return user.Login +} + +func (fk8s *folderK8sHandler) getFolderACMetadata(c *contextmodel.ReqContext, f *folder.Folder) (accesscontrol.Metadata, error) { + if !c.QueryBool("accesscontrol") { + return nil, nil + } + + var err error + parents := []*folder.Folder{} + if f.ParentUID != "" { + parents, err = fk8s.folderService.GetParents( + c.Req.Context(), + folder.GetParentsQuery{ + UID: f.UID, + OrgID: c.SignedInUser.GetOrgID(), + }) + if err != nil { + return nil, err + } + } + + folderIDs := map[string]bool{f.UID: true} + for _, p := range parents { + folderIDs[p.UID] = true + } + + allMetadata := getMultiAccessControlMetadata(c, dashboards.ScopeFoldersPrefix, folderIDs) + metadata := map[string]bool{} + // Flatten metadata - if any parent has a permission, the child folder inherits it + for _, md := range allMetadata { + for action := range md { + metadata[action] = true + } + } + return metadata, nil +} diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go index 752e649a030..c5086968880 100644 --- a/pkg/api/folder_test.go +++ b/pkg/api/folder_test.go @@ -5,17 +5,20 @@ import ( "encoding/json" "fmt" "net/http" + "net/http/httptest" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" + clientrest "k8s.io/client-go/rest" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/services/accesscontrol" "github.com/grafana/grafana/pkg/services/accesscontrol/actest" acmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock" + contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/folder" @@ -523,3 +526,105 @@ func TestFolderGetAPIEndpoint(t *testing.T) { }) } } + +type mockClientConfigProvider struct { + host string +} + +func (m mockClientConfigProvider) GetDirectRestConfig(c *contextmodel.ReqContext) *clientrest.Config { + return &clientrest.Config{ + Host: m.host, + } +} + +func (m mockClientConfigProvider) DirectlyServeHTTP(w http.ResponseWriter, r *http.Request) {} + +func TestHTTPServer_FolderMetadataK8s(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + //nolint:errcheck + fmt.Fprintln(w, + `{ + "kind": "Folder", + "apiVersion": "folder.grafana.app/v0alpha1", + "metadata": { + "name": "ady4yobv315a8e", + "namespace": "default", + "uid": "28f306ee-ada1-40f4-8011-b2d1df462aad", + "creationTimestamp": "2024-09-17T04:16:35Z", + "annotations": { + "grafana.app/createdBy": "user:fdxsqt7t5ryf4a", + "grafana.app/originName": "SQL", + "grafana.app/originPath": "3" + } + }, + "spec": { + "title": "Example folder 226" + } + }`) + })) + defer ts.Close() + + mockClientConfigProvider := mockClientConfigProvider{ + host: ts.URL, + } + + setUpRBACGuardian(t) + folderService := &foldertest.FakeService{} + features := featuremgmt.WithFeatures(featuremgmt.FlagNestedFolders, featuremgmt.FlagKubernetesFolders) + server := SetupAPITestServer(t, func(hs *HTTPServer) { + hs.Cfg = setting.NewCfg() + hs.folderService = folderService + hs.QuotaService = quotatest.New(false, nil) + hs.SearchService = &mockSearchService{ + ExpectedResult: model.HitList{}, + } + hs.Features = features + hs.clientConfigProvider = mockClientConfigProvider + }) + + t.Run("Should attach access control metadata to folder response", func(t *testing.T) { + folderService.ExpectedFolder = &folder.Folder{UID: "ady4yobv315a8e"} + + req := server.NewGetRequest("/api/folders/ady4yobv315a8e?accesscontrol=true") + webtest.RequestWithSignedInUser(req, &user.SignedInUser{UserID: 1, OrgID: 1, Permissions: map[int64]map[string][]string{ + 1: accesscontrol.GroupScopesByActionContext(context.Background(), []accesscontrol.Permission{ + {Action: dashboards.ActionFoldersRead, Scope: dashboards.ScopeFoldersAll}, + {Action: dashboards.ActionFoldersWrite, Scope: dashboards.ScopeFoldersProvider.GetResourceScopeUID("ady4yobv315a8e")}, + }), + }}) + + res, err := server.Send(req) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + defer func() { require.NoError(t, res.Body.Close()) }() + + body := dtos.Folder{} + require.NoError(t, json.NewDecoder(res.Body).Decode(&body)) + + assert.True(t, body.AccessControl[dashboards.ActionFoldersRead]) + assert.True(t, body.AccessControl[dashboards.ActionFoldersWrite]) + }) + + t.Run("Should not attach access control metadata to folder response", func(t *testing.T) { + folderService.ExpectedFolder = &folder.Folder{UID: "ady4yobv315a8e"} + + req := server.NewGetRequest("/api/folders/ady4yobv315a8e") + webtest.RequestWithSignedInUser(req, &user.SignedInUser{UserID: 1, OrgID: 1, Permissions: map[int64]map[string][]string{ + 1: accesscontrol.GroupScopesByActionContext(context.Background(), []accesscontrol.Permission{ + {Action: dashboards.ActionFoldersRead, Scope: dashboards.ScopeFoldersAll}, + {Action: dashboards.ActionFoldersWrite, Scope: dashboards.ScopeFoldersProvider.GetResourceScopeUID("ady4yobv315a8e")}, + }), + }}) + + res, err := server.Send(req) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + defer func() { require.NoError(t, res.Body.Close()) }() + + body := dtos.Folder{} + require.NoError(t, json.NewDecoder(res.Body).Decode(&body)) + + assert.False(t, body.AccessControl[dashboards.ActionFoldersRead]) + assert.False(t, body.AccessControl[dashboards.ActionFoldersWrite]) + }) +} diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index 511d673ca6b..3aea664fe2d 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -145,6 +145,7 @@ func (hs *HTTPServer) getFrontendSettings(c *contextmodel.ReqContext) (*dtos.Fro AliasIDs: panel.AliasIDs, Info: panel.Info, Module: panel.Module, + ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), panel), BaseURL: panel.BaseURL, SkipDataQuery: panel.SkipDataQuery, HideFromList: panel.HideFromList, @@ -453,6 +454,7 @@ func (hs *HTTPServer) getFSDataSources(c *contextmodel.ReqContext, availablePlug JSONData: plugin.JSONData, Signature: plugin.Signature, Module: plugin.Module, + ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), plugin), BaseURL: plugin.BaseURL, Angular: plugin.Angular, MultiValueFilterOperators: plugin.MultiValueFilterOperators, @@ -538,8 +540,9 @@ func (hs *HTTPServer) getFSDataSources(c *contextmodel.ReqContext, availablePlug JSONData: ds.JSONData, Signature: ds.Signature, Module: ds.Module, - BaseURL: ds.BaseURL, - Angular: ds.Angular, + // ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), ds), + BaseURL: ds.BaseURL, + Angular: ds.Angular, }, } if ds.Name == grafanads.DatasourceName { @@ -563,6 +566,7 @@ func (hs *HTTPServer) newAppDTO(ctx context.Context, plugin pluginstore.Plugin, LoadingStrategy: hs.pluginAssets.LoadingStrategy(ctx, plugin), Extensions: plugin.Extensions, Dependencies: plugin.Dependencies, + ModuleHash: hs.pluginAssets.ModuleHash(ctx, plugin), } if settings.Enabled { diff --git a/pkg/api/frontendsettings_test.go b/pkg/api/frontendsettings_test.go index 6d7139e28db..9edb77235f4 100644 --- a/pkg/api/frontendsettings_test.go +++ b/pkg/api/frontendsettings_test.go @@ -18,6 +18,8 @@ import ( "github.com/grafana/grafana/pkg/login/social/socialimpl" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/config" + "github.com/grafana/grafana/pkg/plugins/manager/signature" + "github.com/grafana/grafana/pkg/plugins/manager/signature/statickey" "github.com/grafana/grafana/pkg/plugins/pluginscdn" accesscontrolmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock" "github.com/grafana/grafana/pkg/services/apiserver/endpoints/request" @@ -51,10 +53,11 @@ func setupTestEnvironment(t *testing.T, cfg *setting.Cfg, features featuremgmt.F }) } - pluginsCDN := pluginscdn.ProvideService(&config.PluginManagementCfg{ + pluginsCfg := &config.PluginManagementCfg{ PluginsCDNURLTemplate: cfg.PluginsCDNURLTemplate, PluginSettings: cfg.PluginSettings, - }) + } + pluginsCDN := pluginscdn.ProvideService(pluginsCfg) var pluginStore = pstore if pluginStore == nil { @@ -68,7 +71,8 @@ func setupTestEnvironment(t *testing.T, cfg *setting.Cfg, features featuremgmt.F var pluginsAssets = passets if pluginsAssets == nil { - pluginsAssets = pluginassets.ProvideService(cfg, pluginsCDN) + sig := signature.ProvideService(pluginsCfg, statickey.New()) + pluginsAssets = pluginassets.ProvideService(pluginsCfg, pluginsCDN, sig, pluginStore) } hs := &HTTPServer{ @@ -240,6 +244,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { PluginList: []pluginstore.Plugin{ { Module: fmt.Sprintf("/%s/module.js", "test-app"), + // ModuleHash: "sha256-test", JSONData: plugins.JSONData{ ID: "test-app", Info: plugins.Info{Version: "0.5.0"}, @@ -255,9 +260,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { Plugins: newAppSettings("test-app", false), } }, - pluginAssets: func() *pluginassets.Service { - return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{})) - }, + pluginAssets: newPluginAssets(), expected: settings{ Apps: map[string]*plugins.AppDTO{ "test-app": { @@ -266,6 +269,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { Path: "/test-app/module.js", Version: "0.5.0", LoadingStrategy: plugins.LoadingStrategyScript, + // ModuleHash: "sha256-test", }, }, }, @@ -277,6 +281,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { PluginList: []pluginstore.Plugin{ { Module: fmt.Sprintf("/%s/module.js", "test-app"), + // ModuleHash: "sha256-test", JSONData: plugins.JSONData{ ID: "test-app", Info: plugins.Info{Version: "0.5.0"}, @@ -292,9 +297,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { Plugins: newAppSettings("test-app", true), } }, - pluginAssets: func() *pluginassets.Service { - return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{})) - }, + pluginAssets: newPluginAssets(), expected: settings{ Apps: map[string]*plugins.AppDTO{ "test-app": { @@ -303,6 +306,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { Path: "/test-app/module.js", Version: "0.5.0", LoadingStrategy: plugins.LoadingStrategyScript, + // ModuleHash: "sha256-test", }, }, }, @@ -330,9 +334,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { Plugins: newAppSettings("test-app", true), } }, - pluginAssets: func() *pluginassets.Service { - return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{})) - }, + pluginAssets: newPluginAssets(), expected: settings{ Apps: map[string]*plugins.AppDTO{ "test-app": { @@ -368,15 +370,13 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { Plugins: newAppSettings("test-app", true), } }, - pluginAssets: func() *pluginassets.Service { - return pluginassets.ProvideService(&setting.Cfg{ - PluginSettings: map[string]map[string]string{ - "test-app": { - pluginassets.CreatePluginVersionCfgKey: pluginassets.CreatePluginVersionScriptSupportEnabled, - }, + pluginAssets: newPluginAssetsWithConfig(&config.PluginManagementCfg{ + PluginSettings: map[string]map[string]string{ + "test-app": { + pluginassets.CreatePluginVersionCfgKey: pluginassets.CreatePluginVersionScriptSupportEnabled, }, - }, pluginscdn.ProvideService(&config.PluginManagementCfg{})) - }, + }, + }), expected: settings{ Apps: map[string]*plugins.AppDTO{ "test-app": { @@ -412,9 +412,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) { Plugins: newAppSettings("test-app", true), } }, - pluginAssets: func() *pluginassets.Service { - return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{})) - }, + pluginAssets: newPluginAssets(), expected: settings{ Apps: map[string]*plugins.AppDTO{ "test-app": { @@ -456,3 +454,13 @@ func newAppSettings(id string, enabled bool) map[string]*pluginsettings.DTO { }, } } + +func newPluginAssets() func() *pluginassets.Service { + return newPluginAssetsWithConfig(&config.PluginManagementCfg{}) +} + +func newPluginAssetsWithConfig(pCfg *config.PluginManagementCfg) func() *pluginassets.Service { + return func() *pluginassets.Service { + return pluginassets.ProvideService(pCfg, pluginscdn.ProvideService(pCfg), signature.ProvideService(pCfg, statickey.New()), &pluginstore.FakePluginStore{}) + } +} diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index f9908deaadf..06ce9cc4b17 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -104,6 +104,7 @@ import ( "github.com/grafana/grafana/pkg/services/tag" "github.com/grafana/grafana/pkg/services/team" tempUser "github.com/grafana/grafana/pkg/services/temp_user" + "github.com/grafana/grafana/pkg/services/unifiedSearch" "github.com/grafana/grafana/pkg/services/updatechecker" "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/services/validations" @@ -156,6 +157,7 @@ type HTTPServer struct { LivePushGateway *pushhttp.Gateway StorageService store.StorageService SearchV2HTTPService searchV2.SearchHTTPService + UnifiedSearchHTTPService unifiedSearch.SearchHTTPService ContextHandler *contexthandler.ContextHandler LoggerMiddleware loggermw.Logger SQLStore db.DB @@ -266,7 +268,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi publicDashboardsApi *publicdashboardsApi.Api, userService user.Service, tempUserService tempUser.Service, loginAttemptService loginAttempt.Service, orgService org.Service, teamService team.Service, accesscontrolService accesscontrol.Service, navTreeService navtree.Service, - annotationRepo annotations.Repository, tagService tag.Service, searchv2HTTPService searchV2.SearchHTTPService, oauthTokenService oauthtoken.OAuthTokenService, + annotationRepo annotations.Repository, tagService tag.Service, searchv2HTTPService searchV2.SearchHTTPService, unifiedSearchHTTPService unifiedSearch.SearchHTTPService, oauthTokenService oauthtoken.OAuthTokenService, statsService stats.Service, authnService authn.Service, pluginsCDNService *pluginscdn.Service, promGatherer prometheus.Gatherer, starApi *starApi.API, promRegister prometheus.Registerer, clientConfigProvider grafanaapiserver.DirectRestConfigProvider, anonService anonymous.Service, userVerifier user.Verifier, @@ -308,6 +310,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi AccessControl: accessControl, DataProxy: dataSourceProxy, SearchV2HTTPService: searchv2HTTPService, + UnifiedSearchHTTPService: unifiedSearchHTTPService, SearchService: searchService, Live: live, LivePushGateway: livePushGateway, diff --git a/pkg/api/login.go b/pkg/api/login.go index de3bacdf7dc..dfca323c83f 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/apimachinery/identity" "github.com/grafana/grafana/pkg/infra/metrics" "github.com/grafana/grafana/pkg/infra/network" + "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware/cookies" "github.com/grafana/grafana/pkg/services/auth" "github.com/grafana/grafana/pkg/services/authn" @@ -181,6 +182,9 @@ func (hs *HTTPServer) tryAutoLogin(c *contextmodel.ReqContext) bool { for providerName, provider := range oauthInfos { if provider.AutoLogin || hs.Cfg.OAuthAutoLogin { redirectUrl := hs.Cfg.AppSubURL + "/login/" + providerName + if hs.Features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) { + redirectUrl += hs.getRedirectToForAutoLogin(c) + } c.Logger.Info("OAuth auto login enabled. Redirecting to " + redirectUrl) c.Redirect(redirectUrl, 307) return true @@ -189,6 +193,9 @@ func (hs *HTTPServer) tryAutoLogin(c *contextmodel.ReqContext) bool { if samlAutoLogin { redirectUrl := hs.Cfg.AppSubURL + "/login/saml" + if hs.Features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) { + redirectUrl += hs.getRedirectToForAutoLogin(c) + } c.Logger.Info("SAML auto login enabled. Redirecting to " + redirectUrl) c.Redirect(redirectUrl, 307) return true @@ -197,6 +204,21 @@ func (hs *HTTPServer) tryAutoLogin(c *contextmodel.ReqContext) bool { return false } +func (hs *HTTPServer) getRedirectToForAutoLogin(c *contextmodel.ReqContext) string { + redirectTo := c.Req.FormValue("redirectTo") + if hs.Cfg.AppSubURL != "" && strings.HasPrefix(redirectTo, hs.Cfg.AppSubURL) { + redirectTo = strings.TrimPrefix(redirectTo, hs.Cfg.AppSubURL) + } + + if redirectTo == "/" { + return "" + } + + // remove any forceLogin=true params + redirectTo = middleware.RemoveForceLoginParams(redirectTo) + return "?redirectTo=" + url.QueryEscape(redirectTo) +} + func (hs *HTTPServer) LoginAPIPing(c *contextmodel.ReqContext) response.Response { if c.IsSignedIn || c.IsAnonymous { return response.JSON(http.StatusOK, util.DynMap{"message": "Logged in"}) @@ -233,7 +255,7 @@ func (hs *HTTPServer) loginUserWithUser(user *user.User, c *contextmodel.ReqCont hs.log.Debug("Got IP address from client address", "addr", addr, "ip", ip) ctx := context.WithValue(c.Req.Context(), loginservice.RequestURIKey{}, c.Req.RequestURI) - userToken, err := hs.AuthTokenService.CreateToken(ctx, user, ip, c.Req.UserAgent()) + userToken, err := hs.AuthTokenService.CreateToken(ctx, &auth.CreateTokenCommand{User: user, ClientIP: ip, UserAgent: c.Req.UserAgent()}) if err != nil { return fmt.Errorf("%v: %w", "failed to create auth token", err) } diff --git a/pkg/api/login_oauth.go b/pkg/api/login_oauth.go index f59b09045a3..51a0857f96a 100644 --- a/pkg/api/login_oauth.go +++ b/pkg/api/login_oauth.go @@ -6,6 +6,7 @@ import ( "github.com/grafana/grafana/pkg/middleware/cookies" "github.com/grafana/grafana/pkg/services/authn" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" + "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/web" ) @@ -25,6 +26,7 @@ func (hs *HTTPServer) OAuthLogin(reqCtx *contextmodel.ReqContext) { } code := reqCtx.Query("code") + redirectTo := reqCtx.Query("redirectTo") req := &authn.Request{HTTPRequest: reqCtx.Req} if code == "" { @@ -36,6 +38,9 @@ func (hs *HTTPServer) OAuthLogin(reqCtx *contextmodel.ReqContext) { cookies.WriteCookie(reqCtx.Resp, OauthStateCookieName, redirect.Extra[authn.KeyOAuthState], hs.Cfg.OAuthCookieMaxAge, hs.CookieOptionsFromCfg) + if hs.Features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) { + cookies.WriteCookie(reqCtx.Resp, "redirectTo", redirectTo, hs.Cfg.OAuthCookieMaxAge, hs.CookieOptionsFromCfg) + } if pkce := redirect.Extra[authn.KeyOAuthPKCE]; pkce != "" { cookies.WriteCookie(reqCtx.Resp, OauthPKCECookieName, pkce, hs.Cfg.OAuthCookieMaxAge, hs.CookieOptionsFromCfg) } diff --git a/pkg/api/plugins.go b/pkg/api/plugins.go index e690dc91709..905feddef0f 100644 --- a/pkg/api/plugins.go +++ b/pkg/api/plugins.go @@ -123,7 +123,7 @@ func (hs *HTTPServer) GetPluginList(c *contextmodel.ReqContext) response.Respons } // Compute metadata - pluginsMetadata := hs.getMultiAccessControlMetadata(c, pluginaccesscontrol.ScopeProvider.GetResourceScope(""), filteredPluginIDs) + pluginsMetadata := getMultiAccessControlMetadata(c, pluginaccesscontrol.ScopeProvider.GetResourceScope(""), filteredPluginIDs) // Prepare DTO result := make(dtos.PluginList, 0) @@ -201,6 +201,7 @@ func (hs *HTTPServer) GetPluginSettingByID(c *contextmodel.ReqContext) response. Includes: plugin.Includes, BaseUrl: plugin.BaseURL, Module: plugin.Module, + ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), plugin), DefaultNavUrl: path.Join(hs.Cfg.AppSubURL, plugin.DefaultNavURL), State: plugin.State, Signature: plugin.Signature, diff --git a/pkg/api/plugins_test.go b/pkg/api/plugins_test.go index b517c964f84..7cb30bd428d 100644 --- a/pkg/api/plugins_test.go +++ b/pkg/api/plugins_test.go @@ -27,6 +27,8 @@ import ( "github.com/grafana/grafana/pkg/plugins/manager/fakes" "github.com/grafana/grafana/pkg/plugins/manager/filestore" "github.com/grafana/grafana/pkg/plugins/manager/registry" + "github.com/grafana/grafana/pkg/plugins/manager/signature" + "github.com/grafana/grafana/pkg/plugins/manager/signature/statickey" "github.com/grafana/grafana/pkg/plugins/pfs" "github.com/grafana/grafana/pkg/plugins/pluginscdn" ac "github.com/grafana/grafana/pkg/services/accesscontrol" @@ -788,7 +790,6 @@ func Test_PluginsSettings(t *testing.T) { Info: plugins.Info{ Version: "1.0.0", }}, plugins.ClassExternal, plugins.NewFakeFS()) - pluginRegistry := &fakes.FakePluginRegistry{ Store: map[string]*plugins.Plugin{ p1.ID: p1, @@ -843,8 +844,10 @@ func Test_PluginsSettings(t *testing.T) { ErrorCode: tc.errCode, }) } - pluginCDN := pluginscdn.ProvideService(&config.PluginManagementCfg{}) - hs.pluginAssets = pluginassets.ProvideService(hs.Cfg, pluginCDN) + pCfg := &config.PluginManagementCfg{} + pluginCDN := pluginscdn.ProvideService(pCfg) + sig := signature.ProvideService(pCfg, statickey.New()) + hs.pluginAssets = pluginassets.ProvideService(pCfg, pluginCDN, sig, hs.pluginStore) hs.pluginErrorResolver = pluginerrs.ProvideStore(errTracker) var err error hs.pluginsUpdateChecker, err = updatechecker.ProvidePluginsService(hs.Cfg, nil, tracing.InitializeTracerForTest()) diff --git a/pkg/api/user.go b/pkg/api/user.go index eb3b2c265ca..26096c78d38 100644 --- a/pkg/api/user.go +++ b/pkg/api/user.go @@ -91,7 +91,7 @@ func (hs *HTTPServer) getUserUserProfile(c *contextmodel.ReqContext, userID int6 userProfile.IsGrafanaAdminExternallySynced = login.IsGrafanaAdminExternallySynced(hs.Cfg, oauthInfo, authInfo.AuthModule) } - userProfile.AccessControl = hs.getAccessControlMetadata(c, "global.users:id:", strconv.FormatInt(userID, 10)) + userProfile.AccessControl = getAccessControlMetadata(c, "global.users:id:", strconv.FormatInt(userID, 10)) userProfile.AvatarURL = dtos.GetGravatarUrl(hs.Cfg, userProfile.Email) return response.JSON(http.StatusOK, userProfile) diff --git a/pkg/apiserver/storage/testing/watcher_tests.go b/pkg/apiserver/storage/testing/watcher_tests.go index 1684f15819f..213b7553faa 100644 --- a/pkg/apiserver/storage/testing/watcher_tests.go +++ b/pkg/apiserver/storage/testing/watcher_tests.go @@ -1407,22 +1407,25 @@ func RunWatchSemantics(ctx context.Context, t *testing.T, store storage.Interfac podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")}, expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods, }, - - { - name: "legacy, RV=0", - resourceVersion: "0", - initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")}, - expectedInitialEventsInRandomOrder: addEventsFromCreatedPods, - podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")}, - expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods, - }, - { - name: "legacy, RV=unset", - initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")}, - expectedInitialEventsInRandomOrder: addEventsFromCreatedPods, - podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")}, - expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods, - }, + // Not Supported by unistore because there is no way to differentiate between: + // - SendInitialEvents=nil && resourceVersion=0 + // - sendInitialEvents=false && resourceVersion=0 + // This is a Legacy feature in k8s.io/apiserver/pkg/storage/etcd3/watcher_test.go#196 + // { + // name: "legacy, RV=0", + // resourceVersion: "0", + // initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")}, + // expectedInitialEventsInRandomOrder: addEventsFromCreatedPods, + // podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")}, + // expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods, + // }, + // { + // name: "legacy, RV=unset", + // initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")}, + // expectedInitialEventsInRandomOrder: addEventsFromCreatedPods, + // podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")}, + // expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods, + // }, } for idx, scenario := range scenarios { t.Run(scenario.name, func(t *testing.T) { diff --git a/pkg/middleware/auth.go b/pkg/middleware/auth.go index 338ceb77f25..c85342dd0ce 100644 --- a/pkg/middleware/auth.go +++ b/pkg/middleware/auth.go @@ -98,7 +98,7 @@ func writeRedirectCookie(c *contextmodel.ReqContext) { } // remove any forceLogin=true params - redirectTo = removeForceLoginParams(redirectTo) + redirectTo = RemoveForceLoginParams(redirectTo) cookies.WriteCookie(c.Resp, "redirect_to", url.QueryEscape(redirectTo), 0, nil) } @@ -113,13 +113,13 @@ func getRedirectToQueryParam(c *contextmodel.ReqContext) string { } // remove any forceLogin=true params - redirectTo = removeForceLoginParams(redirectTo) + redirectTo = RemoveForceLoginParams(redirectTo) return "?redirectTo=" + url.QueryEscape(redirectTo) } var forceLoginParamsRegexp = regexp.MustCompile(`&?forceLogin=true`) -func removeForceLoginParams(str string) string { +func RemoveForceLoginParams(str string) string { return forceLoginParamsRegexp.ReplaceAllString(str, "") } @@ -138,7 +138,8 @@ func CanAdminPlugins(cfg *setting.Cfg, accessControl ac.AccessControl) func(c *c } func RoleAppPluginAuth(accessControl ac.AccessControl, ps pluginstore.Store, features featuremgmt.FeatureToggles, - logger log.Logger) func(c *contextmodel.ReqContext) { + logger log.Logger, +) func(c *contextmodel.ReqContext) { return func(c *contextmodel.ReqContext) { pluginID := web.Params(c.Req)[":id"] p, exists := ps.Plugin(c.Req.Context(), pluginID) diff --git a/pkg/middleware/auth_test.go b/pkg/middleware/auth_test.go index 3530c90f211..6db5d11cff2 100644 --- a/pkg/middleware/auth_test.go +++ b/pkg/middleware/auth_test.go @@ -352,7 +352,7 @@ func TestRemoveForceLoginparams(t *testing.T) { } for i, tc := range tcs { t.Run(fmt.Sprintf("testcase %d", i), func(t *testing.T) { - require.Equal(t, tc.exp, removeForceLoginParams(tc.inp)) + require.Equal(t, tc.exp, RemoveForceLoginParams(tc.inp)) }) } } diff --git a/pkg/models/usertoken/user_token.go b/pkg/models/usertoken/user_token.go index 7c9479c5b42..ea661d1d322 100644 --- a/pkg/models/usertoken/user_token.go +++ b/pkg/models/usertoken/user_token.go @@ -22,19 +22,20 @@ func (e *TokenRevokedError) Unwrap() error { return ErrInvalidSessionToken } // UserToken represents a user token type UserToken struct { - Id int64 - UserId int64 - AuthToken string - PrevAuthToken string - UserAgent string - ClientIp string - AuthTokenSeen bool - SeenAt int64 - RotatedAt int64 - CreatedAt int64 - UpdatedAt int64 - RevokedAt int64 - UnhashedToken string + Id int64 + UserId int64 + ExternalSessionId int64 + AuthToken string + PrevAuthToken string + UserAgent string + ClientIp string + AuthTokenSeen bool + SeenAt int64 + RotatedAt int64 + CreatedAt int64 + UpdatedAt int64 + RevokedAt int64 + UnhashedToken string } const UrgentRotateTime = 1 * time.Minute diff --git a/pkg/plugins/config/config.go b/pkg/plugins/config/config.go index a8081f728a7..37ba863c86f 100644 --- a/pkg/plugins/config/config.go +++ b/pkg/plugins/config/config.go @@ -32,6 +32,7 @@ type PluginManagementCfg struct { type Features struct { ExternalCorePluginsEnabled bool SkipHostEnvVarsEnabled bool + SriChecksEnabled bool } // NewPluginManagementCfg returns a new PluginManagementCfg. diff --git a/pkg/plugins/manager/signature/manifest.go b/pkg/plugins/manager/signature/manifest.go index 4bb035f011e..c363f888198 100644 --- a/pkg/plugins/manager/signature/manifest.go +++ b/pkg/plugins/manager/signature/manifest.go @@ -53,7 +53,7 @@ type PluginManifest struct { RootURLs []string `json:"rootUrls"` } -func (m *PluginManifest) isV2() bool { +func (m *PluginManifest) IsV2() bool { return strings.HasPrefix(m.ManifestVersion, "2.") } @@ -107,34 +107,17 @@ func (s *Signature) readPluginManifest(ctx context.Context, body []byte) (*Plugi return &manifest, nil } -func (s *Signature) Calculate(ctx context.Context, src plugins.PluginSource, plugin plugins.FoundPlugin) (plugins.Signature, error) { - if defaultSignature, exists := src.DefaultSignature(ctx); exists { - return defaultSignature, nil - } - fsFiles, err := plugin.FS.Files() - if err != nil { - return plugins.Signature{}, fmt.Errorf("files: %w", err) - } - if len(fsFiles) == 0 { - s.log.Warn("No plugin file information in directory", "pluginId", plugin.JSONData.ID) - return plugins.Signature{ - Status: plugins.SignatureStatusInvalid, - }, nil - } +var ErrSignatureTypeUnsigned = errors.New("plugin is unsigned") - f, err := plugin.FS.Open("MANIFEST.txt") +// ReadPluginManifestFromFS reads the plugin manifest from the provided plugins.FS. +// If the manifest is not found, it will return an error wrapping ErrSignatureTypeUnsigned. +func (s *Signature) ReadPluginManifestFromFS(ctx context.Context, pfs plugins.FS) (*PluginManifest, error) { + f, err := pfs.Open("MANIFEST.txt") if err != nil { if errors.Is(err, plugins.ErrFileNotExist) { - s.log.Debug("Could not find a MANIFEST.txt", "id", plugin.JSONData.ID, "error", err) - return plugins.Signature{ - Status: plugins.SignatureStatusUnsigned, - }, nil + return nil, fmt.Errorf("%w: could not find a MANIFEST.txt", ErrSignatureTypeUnsigned) } - - s.log.Debug("Could not open MANIFEST.txt", "id", plugin.JSONData.ID, "error", err) - return plugins.Signature{ - Status: plugins.SignatureStatusInvalid, - }, nil + return nil, fmt.Errorf("could not open MANIFEST.txt: %w", err) } defer func() { if f == nil { @@ -147,21 +130,47 @@ func (s *Signature) Calculate(ctx context.Context, src plugins.PluginSource, plu byteValue, err := io.ReadAll(f) if err != nil || len(byteValue) < 10 { - s.log.Debug("MANIFEST.TXT is invalid", "id", plugin.JSONData.ID) - return plugins.Signature{ - Status: plugins.SignatureStatusUnsigned, - }, nil + return nil, fmt.Errorf("%w: MANIFEST.txt is invalid", ErrSignatureTypeUnsigned) } manifest, err := s.readPluginManifest(ctx, byteValue) if err != nil { - s.log.Warn("Plugin signature invalid", "id", plugin.JSONData.ID, "error", err) + return nil, err + } + return manifest, nil +} + +func (s *Signature) Calculate(ctx context.Context, src plugins.PluginSource, plugin plugins.FoundPlugin) (plugins.Signature, error) { + if defaultSignature, exists := src.DefaultSignature(ctx); exists { + return defaultSignature, nil + } + + manifest, err := s.ReadPluginManifestFromFS(ctx, plugin.FS) + switch { + case errors.Is(err, ErrSignatureTypeUnsigned): + s.log.Warn("Plugin is unsigned", "id", plugin.JSONData.ID, "err", err) + return plugins.Signature{ + Status: plugins.SignatureStatusUnsigned, + }, nil + case err != nil: + s.log.Warn("Plugin signature is invalid", "id", plugin.JSONData.ID, "err", err) return plugins.Signature{ Status: plugins.SignatureStatusInvalid, }, nil } - if !manifest.isV2() { + if !manifest.IsV2() { + return plugins.Signature{ + Status: plugins.SignatureStatusInvalid, + }, nil + } + + fsFiles, err := plugin.FS.Files() + if err != nil { + return plugins.Signature{}, fmt.Errorf("files: %w", err) + } + if len(fsFiles) == 0 { + s.log.Warn("No plugin file information in directory", "pluginId", plugin.JSONData.ID) return plugins.Signature{ Status: plugins.SignatureStatusInvalid, }, nil @@ -328,7 +337,7 @@ func (s *Signature) validateManifest(ctx context.Context, m PluginManifest, bloc if len(m.Files) == 0 { return invalidFieldErr{field: "files"} } - if m.isV2() { + if m.IsV2() { if len(m.SignedByOrg) == 0 { return invalidFieldErr{field: "signedByOrg"} } diff --git a/pkg/plugins/manager/signature/manifest_test.go b/pkg/plugins/manager/signature/manifest_test.go index cb7364ed93a..e83b527eaf0 100644 --- a/pkg/plugins/manager/signature/manifest_test.go +++ b/pkg/plugins/manager/signature/manifest_test.go @@ -19,6 +19,14 @@ import ( "github.com/grafana/grafana/pkg/plugins/manager/signature/statickey" ) +func provideDefaultTestService() *Signature { + return provideTestServiceWithConfig(&config.PluginManagementCfg{}) +} + +func provideTestServiceWithConfig(cfg *config.PluginManagementCfg) *Signature { + return ProvideService(cfg, statickey.New()) +} + func TestReadPluginManifest(t *testing.T) { txt := `-----BEGIN PGP SIGNED MESSAGE----- Hash: SHA512 @@ -52,7 +60,7 @@ NR7DnB0CCQHO+4FlSPtXFTzNepoc+CytQyDAeOLMLmf2Tqhk2YShk+G/YlVX -----END PGP SIGNATURE-----` t.Run("valid manifest", func(t *testing.T) { - s := ProvideService(&config.PluginManagementCfg{}, statickey.New()) + s := provideDefaultTestService() manifest, err := s.readPluginManifest(context.Background(), []byte(txt)) require.NoError(t, err) @@ -68,8 +76,8 @@ NR7DnB0CCQHO+4FlSPtXFTzNepoc+CytQyDAeOLMLmf2Tqhk2YShk+G/YlVX }) t.Run("invalid manifest", func(t *testing.T) { + s := provideDefaultTestService() modified := strings.ReplaceAll(txt, "README.md", "xxxxxxxxxx") - s := ProvideService(&config.PluginManagementCfg{}, statickey.New()) _, err := s.readPluginManifest(context.Background(), []byte(modified)) require.Error(t, err) }) @@ -107,7 +115,7 @@ khdr/tZ1PDgRxMqB/u+Vtbpl0xSxgblnrDOYMSI= -----END PGP SIGNATURE-----` t.Run("valid manifest", func(t *testing.T) { - s := ProvideService(&config.PluginManagementCfg{}, statickey.New()) + s := provideDefaultTestService() manifest, err := s.readPluginManifest(context.Background(), []byte(txt)) require.NoError(t, err) @@ -126,6 +134,12 @@ khdr/tZ1PDgRxMqB/u+Vtbpl0xSxgblnrDOYMSI= } func TestCalculate(t *testing.T) { + parentDir, err := filepath.Abs("../") + if err != nil { + t.Errorf("could not construct absolute path of current dir") + return + } + t.Run("Validate root URL against App URL for non-private plugin if is specified in manifest", func(t *testing.T) { tcs := []struct { appURL string @@ -147,15 +161,9 @@ func TestCalculate(t *testing.T) { }, } - parentDir, err := filepath.Abs("../") - if err != nil { - t.Errorf("could not construct absolute path of current dir") - return - } - for _, tc := range tcs { basePath := filepath.Join(parentDir, "testdata/non-pvt-with-root-url/plugin") - s := ProvideService(&config.PluginManagementCfg{GrafanaAppURL: tc.appURL}, statickey.New()) + s := provideTestServiceWithConfig(&config.PluginManagementCfg{GrafanaAppURL: tc.appURL}) sig, err := s.Calculate(context.Background(), &fakes.FakePluginSource{ PluginClassFunc: func(ctx context.Context) plugins.Class { return plugins.ClassExternal @@ -183,7 +191,7 @@ func TestCalculate(t *testing.T) { basePath := "../testdata/renderer-added-file/plugin" runningWindows = true - s := ProvideService(&config.PluginManagementCfg{}, statickey.New()) + s := provideDefaultTestService() sig, err := s.Calculate(context.Background(), &fakes.FakePluginSource{ PluginClassFunc: func(ctx context.Context) plugins.Class { return plugins.ClassExternal @@ -247,7 +255,7 @@ func TestCalculate(t *testing.T) { toSlash = tc.platform.toSlashFunc() fromSlash = tc.platform.fromSlashFunc() - s := ProvideService(&config.PluginManagementCfg{}, statickey.New()) + s := provideDefaultTestService() pfs, err := tc.fsFactory() require.NoError(t, err) pfs, err = newPathSeparatorOverrideFS(string(tc.platform.separator), pfs) @@ -721,7 +729,7 @@ func Test_validateManifest(t *testing.T) { } for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { - s := ProvideService(&config.PluginManagementCfg{}, statickey.New()) + s := provideDefaultTestService() err := s.validateManifest(context.Background(), *tc.manifest, nil) require.Errorf(t, err, tc.expectedErr) }) diff --git a/pkg/plugins/models.go b/pkg/plugins/models.go index c14cc44dd17..6b9b8e05ad2 100644 --- a/pkg/plugins/models.go +++ b/pkg/plugins/models.go @@ -262,6 +262,7 @@ type PluginMetaDTO struct { JSONData Signature SignatureStatus `json:"signature"` Module string `json:"module"` + ModuleHash string `json:"moduleHash,omitempty"` BaseURL string `json:"baseUrl"` Angular AngularMeta `json:"angular"` MultiValueFilterOperators bool `json:"multiValueFilterOperators"` @@ -314,6 +315,7 @@ type PanelDTO struct { Module string `json:"module"` Angular AngularMeta `json:"angular"` LoadingStrategy LoadingStrategy `json:"loadingStrategy"` + ModuleHash string `json:"moduleHash,omitempty"` } type AppDTO struct { @@ -325,6 +327,7 @@ type AppDTO struct { LoadingStrategy LoadingStrategy `json:"loadingStrategy"` Extensions Extensions `json:"extensions"` Dependencies Dependencies `json:"dependencies"` + ModuleHash string `json:"moduleHash,omitempty"` } const ( diff --git a/pkg/registry/apis/folders/conversions.go b/pkg/registry/apis/folders/conversions.go index e70a5dff7d0..0deabc23ed3 100644 --- a/pkg/registry/apis/folders/conversions.go +++ b/pkg/registry/apis/folders/conversions.go @@ -50,13 +50,47 @@ func LegacyUpdateCommandToUnstructured(cmd folder.UpdateFolderCommand) unstructu return obj } -func UnstructuredToLegacyFolder(item unstructured.Unstructured) *folder.Folder { +func UnstructuredToLegacyFolder(item unstructured.Unstructured, orgID int64) *folder.Folder { + // #TODO reduce duplication of the different conversion functions spec := item.Object["spec"].(map[string]any) - return &folder.Folder{ - UID: item.GetName(), - Title: spec["title"].(string), - // #TODO add other fields + uid := item.GetName() + title := spec["title"].(string) + + meta, err := utils.MetaAccessor(&item) + if err != nil { + return nil } + + id, err := getLegacyID(meta) + if err != nil { + return nil + } + + created, err := getCreated(meta) + if err != nil { + return nil + } + + // avoid panic + var createdTime time.Time + if created != nil { + createdTime = created.Local() + } + + f := &folder.Folder{ + UID: uid, + Title: title, + ID: id, + ParentUID: meta.GetFolder(), + // #TODO add created by field if necessary + // CreatedBy: meta.GetCreatedBy(), + // UpdatedBy: meta.GetCreatedBy(), + URL: getURL(meta, title), + Created: createdTime, + Updated: createdTime, + OrgID: orgID, + } + return f } func UnstructuredToLegacyFolderDTO(item unstructured.Unstructured) (*dtos.Folder, error) { @@ -79,6 +113,14 @@ func UnstructuredToLegacyFolderDTO(item unstructured.Unstructured) (*dtos.Folder return nil, err } + // avoid panic + var createdTime time.Time + if created != nil { + // #TODO Fix this time format. The legacy time format seems to be along the lines of time.Now() + // which includes a part that represents a fraction of a second. + createdTime = created.Local() + } + dto := &dtos.Folder{ UID: uid, Title: title, @@ -87,20 +129,14 @@ func UnstructuredToLegacyFolderDTO(item unstructured.Unstructured) (*dtos.Folder // #TODO add back CreatedBy, UpdatedBy once we figure out how to access userService // to translate user ID into user login. meta.GetCreatedBy() only stores user ID // Could convert meta.GetCreatedBy() return value to a struct--id and name - // CreatedBy: meta.GetCreatedBy(), - // UpdatedBy: meta.GetCreatedBy(), - URL: getURL(meta, title), + CreatedBy: meta.GetCreatedBy(), + UpdatedBy: meta.GetCreatedBy(), + URL: getURL(meta, title), // #TODO get Created in format "2024-09-12T15:37:41.09466+02:00" - Created: *created, + Created: createdTime, // #TODO figure out whether we want to set "updated" and "updated by". Could replace with // meta.GetUpdatedTimestamp() but it currently gets overwritten in prepareObjectForStorage(). - Updated: *created, - // #TODO figure out how to set these properly - CanSave: true, - CanEdit: true, - CanAdmin: true, - CanDelete: true, - HasACL: false, + Updated: createdTime, // #TODO figure out about adding version, parents, orgID fields } @@ -135,6 +171,9 @@ func convertToK8sResource(v *folder.Folder, namespacer request.NamespaceMapper) Timestamp: &v.Created, }) } + // #TODO: turns out these get overwritten by Unified Storage (see pkg/storage/unified/apistore/prepare.go) + // We're going to have to align with that. For now we do need the user ID because the folder type stores it + // as the only user identifier if v.CreatedBy > 0 { meta.SetCreatedBy(fmt.Sprintf("user:%d", v.CreatedBy)) } diff --git a/pkg/server/wire.go b/pkg/server/wire.go index 5549a7d5966..1d31c7b1d35 100644 --- a/pkg/server/wire.go +++ b/pkg/server/wire.go @@ -149,6 +149,7 @@ import ( "github.com/grafana/grafana/pkg/services/team/teamimpl" tempuser "github.com/grafana/grafana/pkg/services/temp_user" "github.com/grafana/grafana/pkg/services/temp_user/tempuserimpl" + "github.com/grafana/grafana/pkg/services/unifiedSearch" "github.com/grafana/grafana/pkg/services/updatechecker" "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/services/user/userimpl" @@ -229,6 +230,8 @@ var wireBasicSet = wire.NewSet( search.ProvideService, searchV2.ProvideService, searchV2.ProvideSearchHTTPService, + unifiedSearch.ProvideService, + unifiedSearch.ProvideSearchHTTPService, store.ProvideService, store.ProvideSystemUsersService, live.ProvideService, diff --git a/pkg/services/accesscontrol/models.go b/pkg/services/accesscontrol/models.go index 9c09c77ae9f..c28a01429e1 100644 --- a/pkg/services/accesscontrol/models.go +++ b/pkg/services/accesscontrol/models.go @@ -173,10 +173,11 @@ type TeamRole struct { } type UserRole struct { - ID int64 `json:"id" xorm:"pk autoincr 'id'"` - OrgID int64 `json:"orgId" xorm:"org_id"` - RoleID int64 `json:"roleId" xorm:"role_id"` - UserID int64 `json:"userId" xorm:"user_id"` + ID int64 `json:"id" xorm:"pk autoincr 'id'"` + OrgID int64 `json:"orgId" xorm:"org_id"` + RoleID int64 `json:"roleId" xorm:"role_id"` + UserID int64 `json:"userId" xorm:"user_id"` + GroupMappingUID string `json:"groupMappingUID" xorm:"group_mapping_uid"` Created time.Time } @@ -455,6 +456,7 @@ const ( ActionAlertingReceiversCreate = "alert.notifications.receivers:create" ActionAlertingReceiversUpdate = "alert.notifications.receivers:write" ActionAlertingReceiversDelete = "alert.notifications.receivers:delete" + ActionAlertingReceiversTest = "alert.notifications.receivers:test" ActionAlertingReceiversPermissionsRead = "receivers.permissions:read" ActionAlertingReceiversPermissionsWrite = "receivers.permissions:write" diff --git a/pkg/services/accesscontrol/resourcepermissions/api.go b/pkg/services/accesscontrol/resourcepermissions/api.go index c979b8ade0b..4dc8aafd51f 100644 --- a/pkg/services/accesscontrol/resourcepermissions/api.go +++ b/pkg/services/accesscontrol/resourcepermissions/api.go @@ -11,6 +11,7 @@ import ( "github.com/grafana/grafana/pkg/services/accesscontrol" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" "github.com/grafana/grafana/pkg/services/org" + "github.com/grafana/grafana/pkg/services/team" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/web" "go.opentelemetry.io/otel" @@ -42,21 +43,27 @@ func (a *api) registerEndpoints() { licenseMW = nopMiddleware } + teamUIDResolver := team.MiddlewareTeamUIDResolver(a.service.teamService, ":teamID") + teamUIDResolverResource := func() web.Handler { return func(c *contextmodel.ReqContext) {} }() // no-op + if a.service.options.Resource == "teams" { + teamUIDResolverResource = team.MiddlewareTeamUIDResolver(a.service.teamService, ":resourceID") + } + a.router.Group(fmt.Sprintf("/api/access-control/%s", a.service.options.Resource), func(r routing.RouteRegister) { actionRead := fmt.Sprintf("%s.permissions:read", a.service.options.Resource) actionWrite := fmt.Sprintf("%s.permissions:write", a.service.options.Resource) scope := accesscontrol.Scope(a.service.options.Resource, a.service.options.ResourceAttribute, accesscontrol.Parameter(":resourceID")) r.Get("/description", auth(accesscontrol.EvalPermission(actionRead)), routing.Wrap(a.getDescription)) - r.Get("/:resourceID", auth(accesscontrol.EvalPermission(actionRead, scope)), routing.Wrap(a.getPermissions)) - r.Post("/:resourceID", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setPermissions)) + r.Get("/:resourceID", teamUIDResolverResource, auth(accesscontrol.EvalPermission(actionRead, scope)), routing.Wrap(a.getPermissions)) + r.Post("/:resourceID", teamUIDResolverResource, licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setPermissions)) if a.service.options.Assignments.Users { - r.Post("/:resourceID/users/:userID", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setUserPermission)) + r.Post("/:resourceID/users/:userID", licenseMW, teamUIDResolverResource, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setUserPermission)) } if a.service.options.Assignments.Teams { - r.Post("/:resourceID/teams/:teamID", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setTeamPermission)) + r.Post("/:resourceID/teams/:teamID", licenseMW, teamUIDResolverResource, teamUIDResolver, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setTeamPermission)) } if a.service.options.Assignments.BuiltInRoles { - r.Post("/:resourceID/builtInRoles/:builtInRole", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setBuiltinRolePermission)) + r.Post("/:resourceID/builtInRoles/:builtInRole", teamUIDResolverResource, licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setBuiltinRolePermission)) } }) } diff --git a/pkg/services/accesscontrol/resourcepermissions/api_test.go b/pkg/services/accesscontrol/resourcepermissions/api_test.go index 8719451294b..ea71d5c2b3a 100644 --- a/pkg/services/accesscontrol/resourcepermissions/api_test.go +++ b/pkg/services/accesscontrol/resourcepermissions/api_test.go @@ -257,6 +257,7 @@ type setTeamPermissionTestCase struct { expectedStatus int permission string permissions []accesscontrol.Permission + byUID bool } func TestApi_setTeamPermission(t *testing.T) { @@ -308,6 +309,20 @@ func TestApi_setTeamPermission(t *testing.T) { {Action: "dashboards.permissions:read", Scope: "dashboards:id:1"}, }, }, + { + desc: "should set View permission for team with id 1 but through UID", + teamID: 1, + resourceID: "1", + expectedStatus: 200, + permission: "View", + byUID: true, + permissions: []accesscontrol.Permission{ + {Action: "dashboards.permissions:read", Scope: "dashboards:id:1"}, + {Action: "dashboards.permissions:write", Scope: "dashboards:id:1"}, + {Action: accesscontrol.ActionTeamsRead, Scope: accesscontrol.ScopeTeamsAll}, + {Action: accesscontrol.ActionOrgUsersRead, Scope: accesscontrol.ScopeUsersAll}, + }, + }, } for _, tt := range tests { @@ -316,10 +331,16 @@ func TestApi_setTeamPermission(t *testing.T) { server := setupTestServer(t, &user.SignedInUser{OrgID: 1, Permissions: map[int64]map[string][]string{1: accesscontrol.GroupScopesByActionContext(context.Background(), tt.permissions)}}, service) // seed team - _, err := teamSvc.CreateTeam(context.Background(), "test", "test@test.com", 1) + team, err := teamSvc.CreateTeam(context.Background(), "test", "test@test.com", 1) require.NoError(t, err) - recorder := setPermission(t, server, testOptions.Resource, tt.resourceID, tt.permission, "teams", strconv.Itoa(int(tt.teamID))) + assignTo := strconv.Itoa(int(tt.teamID)) + if tt.byUID { + if team.ID == tt.teamID { + assignTo = team.UID + } + } + recorder := setPermission(t, server, testOptions.Resource, tt.resourceID, tt.permission, "teams", assignTo) assert.Equal(t, tt.expectedStatus, recorder.Code) assert.Equal(t, tt.expectedStatus, recorder.Code) diff --git a/pkg/services/anonymous/anonimpl/client.go b/pkg/services/anonymous/anonimpl/client.go index 57b4c9873fb..43c1d8107c2 100644 --- a/pkg/services/anonymous/anonimpl/client.go +++ b/pkg/services/anonymous/anonimpl/client.go @@ -17,8 +17,9 @@ import ( ) var ( - errInvalidOrg = errutil.Unauthorized("anonymous.invalid-org") - errInvalidID = errutil.Unauthorized("anonymous.invalid-id") + errInvalidOrg = errutil.Unauthorized("anonymous.invalid-org") + errInvalidID = errutil.Unauthorized("anonymous.invalid-id") + errDeviceLimit = errutil.Unauthorized("anonymous.device-limit-reached", errutil.WithPublicMessage("Anonymous device limit reached. Contact Administrator")) ) var _ authn.ContextAwareClient = new(Anonymous) @@ -51,7 +52,7 @@ func (a *Anonymous) Authenticate(ctx context.Context, r *authn.Request) (*authn. if err := a.anonDeviceService.TagDevice(ctx, httpReqCopy, anonymous.AnonDeviceUI); err != nil { if errors.Is(err, anonstore.ErrDeviceLimitReached) { - return nil, err + return nil, errDeviceLimit.Errorf("limit reached for anonymous devices: %w", err) } a.log.Warn("Failed to tag anonymous session", "error", err) diff --git a/pkg/services/anonymous/anonimpl/impl.go b/pkg/services/anonymous/anonimpl/impl.go index b34048be407..c0105441d62 100644 --- a/pkg/services/anonymous/anonimpl/impl.go +++ b/pkg/services/anonymous/anonimpl/impl.go @@ -2,6 +2,7 @@ package anonimpl import ( "context" + "errors" "net/http" "time" @@ -79,20 +80,29 @@ func (a *AnonDeviceService) usageStatFn(ctx context.Context) (map[string]any, er }, nil } -func (a *AnonDeviceService) tagDeviceUI(ctx context.Context, httpReq *http.Request, device *anonstore.Device) error { +func (a *AnonDeviceService) tagDeviceUI(ctx context.Context, device *anonstore.Device) error { key := device.CacheKey() - if _, ok := a.localCache.Get(key); ok { + if val, ok := a.localCache.Get(key); ok { + if boolVal, ok := val.(bool); ok && !boolVal { + return anonstore.ErrDeviceLimitReached + } return nil } - a.localCache.SetDefault(key, struct{}{}) + a.localCache.SetDefault(key, true) if a.cfg.Env == setting.Dev { a.log.Debug("Tagging device for UI", "deviceID", device.DeviceID, "device", device, "key", key) } if err := a.anonStore.CreateOrUpdateDevice(ctx, device); err != nil { + if errors.Is(err, anonstore.ErrDeviceLimitReached) { + a.localCache.SetDefault(key, false) + return err + } + // invalidate cache if there is an error + a.localCache.Delete(key) return err } @@ -142,7 +152,7 @@ func (a *AnonDeviceService) TagDevice(ctx context.Context, httpReq *http.Request UpdatedAt: time.Now(), } - err = a.tagDeviceUI(ctx, httpReq, taggedDevice) + err = a.tagDeviceUI(ctx, taggedDevice) if err != nil { a.log.Debug("Failed to tag device for UI", "error", err) return err diff --git a/pkg/services/anonymous/anonimpl/impl_test.go b/pkg/services/anonymous/anonimpl/impl_test.go index a84e913f3b1..b193d22edb6 100644 --- a/pkg/services/anonymous/anonimpl/impl_test.go +++ b/pkg/services/anonymous/anonimpl/impl_test.go @@ -26,6 +26,10 @@ func TestMain(m *testing.M) { } func TestIntegrationDeviceService_tag(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } + type tagReq struct { httpReq *http.Request kind anonymous.DeviceKind @@ -152,6 +156,9 @@ func TestIntegrationDeviceService_tag(t *testing.T) { // Ensure that the local cache prevents request from being tagged func TestIntegrationAnonDeviceService_localCacheSafety(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } store := db.InitTestDB(t) anonService := ProvideAnonymousDeviceService(&usagestats.UsageStatsMock{}, &authntest.FakeService{}, store, setting.NewCfg(), orgtest.NewOrgServiceFake(), nil, actest.FakeAccessControl{}, &routing.RouteRegisterImpl{}) @@ -184,6 +191,10 @@ func TestIntegrationAnonDeviceService_localCacheSafety(t *testing.T) { } func TestIntegrationDeviceService_SearchDevice(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } + fixedTime := time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC) // Fixed timestamp for testing testCases := []struct { @@ -271,3 +282,88 @@ func TestIntegrationDeviceService_SearchDevice(t *testing.T) { }) } } + +func TestIntegrationAnonDeviceService_DeviceLimitWithCache(t *testing.T) { + if testing.Short() { + t.Skip("skipping test in short mode") + } + // Setup test environment + store := db.InitTestDB(t) + cfg := setting.NewCfg() + cfg.AnonymousDeviceLimit = 1 // Set device limit to 1 for testing + anonService := ProvideAnonymousDeviceService( + &usagestats.UsageStatsMock{}, + &authntest.FakeService{}, + store, + cfg, + orgtest.NewOrgServiceFake(), + nil, + actest.FakeAccessControl{}, + &routing.RouteRegisterImpl{}, + ) + + // Define test cases + testCases := []struct { + name string + httpReq *http.Request + expectedErr error + }{ + { + name: "first request should succeed", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.1"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device1"}, + }, + }, + expectedErr: nil, + }, + { + name: "second request should fail due to device limit", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.2"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device2"}, + }, + }, + expectedErr: anonstore.ErrDeviceLimitReached, + }, + { + name: "repeat request should hit cache and succeed", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.1"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device1"}, + }, + }, + expectedErr: nil, + }, + { + name: "third request should hit cache and fail due to device limit", + httpReq: &http.Request{ + Header: http.Header{ + "User-Agent": []string{"test"}, + "X-Forwarded-For": []string{"10.30.30.2"}, + http.CanonicalHeaderKey(deviceIDHeader): []string{"device2"}, + }, + }, + expectedErr: anonstore.ErrDeviceLimitReached, + }, + } + + // Run test cases + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := anonService.TagDevice(context.Background(), tc.httpReq, anonymous.AnonDeviceUI) + if tc.expectedErr != nil { + require.Error(t, err) + assert.Equal(t, tc.expectedErr, err) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/pkg/services/auth/auth.go b/pkg/services/auth/auth.go index 12481ab0b77..88ff4d06157 100644 --- a/pkg/services/auth/auth.go +++ b/pkg/services/auth/auth.go @@ -20,8 +20,9 @@ const ( // Typed errors var ( - ErrUserTokenNotFound = errors.New("user token not found") - ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken + ErrUserTokenNotFound = errors.New("user token not found") + ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken + ErrExternalSessionNotFound = errors.New("external session not found") ) type ( @@ -65,10 +66,21 @@ type RotateCommand struct { UserAgent string } +type CreateTokenCommand struct { + User *user.User + ClientIP net.IP + UserAgent string + ExternalSession *ExternalSession +} + // UserTokenService are used for generating and validating user tokens type UserTokenService interface { - CreateToken(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*UserToken, error) + CreateToken(ctx context.Context, cmd *CreateTokenCommand) (*UserToken, error) LookupToken(ctx context.Context, unhashedToken string) (*UserToken, error) + GetTokenByExternalSessionID(ctx context.Context, externalSessionID int64) (*UserToken, error) + GetExternalSession(ctx context.Context, extSessionID int64) (*ExternalSession, error) + FindExternalSessions(ctx context.Context, query *ListExternalSessionQuery) ([]*ExternalSession, error) + // RotateToken will always rotate a valid token RotateToken(ctx context.Context, cmd RotateCommand) (*UserToken, error) RevokeToken(ctx context.Context, token *UserToken, soft bool) error diff --git a/pkg/services/auth/authimpl/auth_token.go b/pkg/services/auth/authimpl/auth_token.go index 1c51e07982d..a42784d2670 100644 --- a/pkg/services/auth/authimpl/auth_token.go +++ b/pkg/services/auth/authimpl/auth_token.go @@ -14,10 +14,11 @@ import ( "github.com/grafana/grafana/pkg/infra/db" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/serverlock" + "github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/models/usertoken" "github.com/grafana/grafana/pkg/services/auth" "github.com/grafana/grafana/pkg/services/quota" - "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/services/secrets" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) @@ -28,10 +29,13 @@ var ( errUserIDInvalid = errors.New("invalid user ID") ) +var _ auth.UserTokenService = (*UserAuthTokenService)(nil) + func ProvideUserAuthTokenService(sqlStore db.DB, serverLockService *serverlock.ServerLockService, - quotaService quota.Service, - cfg *setting.Cfg) (*UserAuthTokenService, error) { + quotaService quota.Service, secretService secrets.Service, + cfg *setting.Cfg, tracer tracing.Tracer, +) (*UserAuthTokenService, error) { s := &UserAuthTokenService{ sqlStore: sqlStore, serverLockService: serverLockService, @@ -39,6 +43,7 @@ func ProvideUserAuthTokenService(sqlStore db.DB, log: log.New("auth"), singleflight: new(singleflight.Group), } + s.externalSessionStore = provideExternalSessionStore(sqlStore, secretService, tracer) defaultLimits, err := readQuotaConfig(cfg) if err != nil { @@ -57,31 +62,32 @@ func ProvideUserAuthTokenService(sqlStore db.DB, } type UserAuthTokenService struct { - sqlStore db.DB - serverLockService *serverlock.ServerLockService - cfg *setting.Cfg - log log.Logger - singleflight *singleflight.Group + sqlStore db.DB + serverLockService *serverlock.ServerLockService + cfg *setting.Cfg + log log.Logger + externalSessionStore auth.ExternalSessionStore + singleflight *singleflight.Group } -func (s *UserAuthTokenService) CreateToken(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) { +func (s *UserAuthTokenService) CreateToken(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) { token, hashedToken, err := generateAndHashToken(s.cfg.SecretKey) if err != nil { return nil, err } now := getTime().Unix() - clientIPStr := clientIP.String() - if len(clientIP) == 0 { + clientIPStr := cmd.ClientIP.String() + if len(cmd.ClientIP) == 0 { clientIPStr = "" } userAuthToken := userAuthToken{ - UserId: user.ID, + UserId: cmd.User.ID, AuthToken: hashedToken, PrevAuthToken: hashedToken, ClientIp: clientIPStr, - UserAgent: userAgent, + UserAgent: cmd.UserAgent, RotatedAt: now, CreatedAt: now, UpdatedAt: now, @@ -90,11 +96,21 @@ func (s *UserAuthTokenService) CreateToken(ctx context.Context, user *user.User, AuthTokenSeen: false, } - err = s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error { - _, err = dbSession.Insert(&userAuthToken) - return err - }) + err = s.sqlStore.InTransaction(ctx, func(ctx context.Context) error { + if cmd.ExternalSession != nil { + inErr := s.externalSessionStore.Create(ctx, cmd.ExternalSession) + if inErr != nil { + return inErr + } + userAuthToken.ExternalSessionId = cmd.ExternalSession.ID + } + inErr := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error { + _, err := dbSession.Insert(&userAuthToken) + return err + }) + return inErr + }) if err != nil { return nil, err } @@ -164,7 +180,6 @@ func (s *UserAuthTokenService) LookupToken(ctx context.Context, unhashedToken st return err }) - if err != nil { return nil, err } @@ -190,7 +205,6 @@ func (s *UserAuthTokenService) LookupToken(ctx context.Context, unhashedToken st return err }) - if err != nil { return nil, err } @@ -210,6 +224,38 @@ func (s *UserAuthTokenService) LookupToken(ctx context.Context, unhashedToken st return &userToken, err } +func (s *UserAuthTokenService) GetTokenByExternalSessionID(ctx context.Context, externalSessionID int64) (*auth.UserToken, error) { + var token userAuthToken + err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error { + exists, err := dbSession.Where("external_session_id = ?", externalSessionID).Get(&token) + if err != nil { + return err + } + + if !exists { + return auth.ErrUserTokenNotFound + } + + return nil + }) + if err != nil { + return nil, err + } + + var userToken auth.UserToken + err = token.toUserToken(&userToken) + + return &userToken, err +} + +func (s *UserAuthTokenService) GetExternalSession(ctx context.Context, extSessionID int64) (*auth.ExternalSession, error) { + return s.externalSessionStore.Get(ctx, extSessionID) +} + +func (s *UserAuthTokenService) FindExternalSessions(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) { + return s.externalSessionStore.List(ctx, query) +} + func (s *UserAuthTokenService) RotateToken(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error) { if cmd.UnHashedToken == "" { return nil, auth.ErrInvalidSessionToken @@ -277,7 +323,6 @@ func (s *UserAuthTokenService) rotateToken(ctx context.Context, token *auth.User affected, err = res.RowsAffected() return err }) - if err != nil { return nil, err } @@ -305,6 +350,8 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *auth.User return err } + ctxLogger := s.log.FromContext(ctx) + var rowsAffected int64 if soft { @@ -324,7 +371,13 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *auth.User return err } - ctxLogger := s.log.FromContext(ctx) + if model.ExternalSessionId != 0 { + err = s.externalSessionStore.Delete(ctx, model.ExternalSessionId) + if err != nil { + // Intentionally not returning error here, as the token has been revoked -> the backround job will clean up orphaned external sessions + ctxLogger.Warn("Failed to delete external session", "externalSessionID", model.ExternalSessionId, "err", err) + } + } if rowsAffected == 0 { ctxLogger.Debug("User auth token not found/revoked", "tokenID", model.Id, "userID", model.UserId, "clientIP", model.ClientIp, "userAgent", model.UserAgent) @@ -337,51 +390,75 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *auth.User } func (s *UserAuthTokenService) RevokeAllUserTokens(ctx context.Context, userId int64) error { - return s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error { - sql := `DELETE from user_auth_token WHERE user_id = ?` - res, err := dbSession.Exec(sql, userId) + return s.sqlStore.InTransaction(ctx, func(ctx context.Context) error { + ctxLogger := s.log.FromContext(ctx) + err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error { + sql := `DELETE from user_auth_token WHERE user_id = ?` + res, err := dbSession.Exec(sql, userId) + if err != nil { + return err + } + + affected, err := res.RowsAffected() + if err != nil { + return err + } + + ctxLogger.Debug("All user tokens for user revoked", "userID", userId, "count", affected) + + return nil + }) if err != nil { return err } - affected, err := res.RowsAffected() + err = s.externalSessionStore.DeleteExternalSessionsByUserID(ctx, userId) if err != nil { - return err + // Intentionally not returning error here, as the token has been revoked -> the backround job will clean up orphaned external sessions + ctxLogger.Warn("Failed to delete external sessions for user", "userID", userId, "err", err) } - - s.log.FromContext(ctx).Debug("All user tokens for user revoked", "userID", userId, "count", affected) - - return err + return nil }) } func (s *UserAuthTokenService) BatchRevokeAllUserTokens(ctx context.Context, userIds []int64) error { - return s.sqlStore.WithTransactionalDbSession(ctx, func(dbSession *db.Session) error { + return s.sqlStore.InTransaction(ctx, func(ctx context.Context) error { + ctxLogger := s.log.FromContext(ctx) if len(userIds) == 0 { return nil } - user_id_params := strings.Repeat(",?", len(userIds)-1) - sql := "DELETE from user_auth_token WHERE user_id IN (?" + user_id_params + ")" + userIdParams := strings.Repeat(",?", len(userIds)-1) + sql := "DELETE from user_auth_token WHERE user_id IN (?" + userIdParams + ")" params := []any{sql} for _, v := range userIds { params = append(params, v) } - res, err := dbSession.Exec(params...) + var affected int64 + + err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error { + res, inErr := dbSession.Exec(params...) + if inErr != nil { + return inErr + } + + affected, inErr = res.RowsAffected() + return inErr + }) if err != nil { return err } - affected, err := res.RowsAffected() + err = s.externalSessionStore.BatchDeleteExternalSessionsByUserIDs(ctx, userIds) if err != nil { - return err + ctxLogger.Warn("Failed to delete external sessions for users", "users", userIds, "err", err) } - s.log.FromContext(ctx).Debug("All user tokens for given users revoked", "usersCount", len(userIds), "count", affected) + ctxLogger.Debug("All user tokens for given users revoked", "usersCount", len(userIds), "count", affected) - return err + return nil }) } diff --git a/pkg/services/auth/authimpl/auth_token_test.go b/pkg/services/auth/authimpl/auth_token_test.go index 97154f1f0c2..86d579c49c6 100644 --- a/pkg/services/auth/authimpl/auth_token_test.go +++ b/pkg/services/auth/authimpl/auth_token_test.go @@ -3,20 +3,25 @@ package authimpl import ( "context" "encoding/json" + "errors" "net" "reflect" "testing" "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "golang.org/x/sync/singleflight" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/db" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/services/auth" + "github.com/grafana/grafana/pkg/services/auth/authtest" "github.com/grafana/grafana/pkg/services/quota" + "github.com/grafana/grafana/pkg/services/secrets/fakes" "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tests/testsuite" @@ -36,8 +41,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { t.Run("When creating token", func(t *testing.T) { createToken := func() *auth.UserToken { - userToken, err := ctx.tokenService.CreateToken(context.Background(), usr, - net.ParseIP("192.168.10.11"), "some user agent") + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) require.NotNil(t, userToken) require.False(t, userToken.AuthTokenSeen) @@ -109,8 +117,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { userToken = createToken() t.Run("When creating an additional token", func(t *testing.T) { - userToken2, err := ctx.tokenService.CreateToken(context.Background(), usr, - net.ParseIP("192.168.10.11"), "some user agent") + userToken2, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) require.NotNil(t, userToken2) @@ -156,8 +167,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { for i := 0; i < 3; i++ { userId := usr.ID + int64(i+1) userIds = append(userIds, userId) - _, err := ctx.tokenService.CreateToken(context.Background(), usr, - net.ParseIP("192.168.10.11"), "some user agent") + _, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) } @@ -173,10 +187,89 @@ func TestIntegrationUserAuthToken(t *testing.T) { }) }) + t.Run("When creating token with external session", func(t *testing.T) { + createToken := func() *auth.UserToken { + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1}, + }) + require.Nil(t, err) + require.NotNil(t, userToken) + require.False(t, userToken.AuthTokenSeen) + return userToken + } + + userToken := createToken() + + t.Run("soft revoking existing token should remove the associated external session", func(t *testing.T) { + err := ctx.tokenService.RevokeToken(context.Background(), userToken, true) + require.Nil(t, err) + + model, err := ctx.getAuthTokenByID(userToken.Id) + require.Nil(t, err) + require.NotNil(t, model) + require.Greater(t, model.RevokedAt, int64(0)) + + extSess, err := ctx.getExternalSessionByID(userToken.ExternalSessionId) + require.Nil(t, err) + require.Nil(t, extSess) + }) + + t.Run("revoking existing token should also remove the associated external session", func(t *testing.T) { + err := ctx.tokenService.RevokeToken(context.Background(), userToken, false) + require.Nil(t, err) + + model, err := ctx.getAuthTokenByID(userToken.Id) + require.Nil(t, err) + require.Nil(t, model) + + extSess, err := ctx.getExternalSessionByID(userToken.ExternalSessionId) + require.Nil(t, err) + require.Nil(t, extSess) + }) + + t.Run("When revoking users tokens in a batch", func(t *testing.T) { + t.Run("Can revoke all users tokens and associated external sessions", func(t *testing.T) { + userIds := []int64{} + extSessionIds := []int64{} + for i := 0; i < 3; i++ { + userId := usr.ID + int64(i+1) + userIds = append(userIds, userId) + token, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + ExternalSession: &auth.ExternalSession{UserID: userId, AuthModule: "test", UserAuthID: 1}, + }) + require.Nil(t, err) + extSessionIds = append(extSessionIds, token.ExternalSessionId) + } + + err := ctx.tokenService.BatchRevokeAllUserTokens(context.Background(), userIds) + require.Nil(t, err) + + for i := 0; i < len(userIds); i++ { + tokens, err := ctx.tokenService.GetUserTokens(context.Background(), userIds[i]) + require.Nil(t, err) + require.Equal(t, 0, len(tokens)) + + extSess, err := ctx.getExternalSessionByID(extSessionIds[i]) + require.Nil(t, err) + require.Nil(t, extSess) + } + }) + }) + }) + t.Run("expires correctly", func(t *testing.T) { ctx := createTestContext(t) - userToken, err := ctx.tokenService.CreateToken(context.Background(), usr, - net.ParseIP("192.168.10.11"), "some user agent") + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) userToken, err = ctx.tokenService.LookupToken(context.Background(), userToken.UnhashedToken) @@ -262,7 +355,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { t.Run("can properly rotate tokens", func(t *testing.T) { getTime = func() time.Time { return now } ctx := createTestContext(t) - userToken, err := ctx.tokenService.CreateToken(context.Background(), usr, net.ParseIP("192.168.10.11"), "some user agent") + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) prevToken := userToken.AuthToken @@ -335,8 +432,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { t.Run("keeps prev token valid for 1 minute after it is confirmed", func(t *testing.T) { getTime = func() time.Time { return now } - userToken, err := ctx.tokenService.CreateToken(context.Background(), usr, - net.ParseIP("192.168.10.11"), "some user agent") + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) require.NotNil(t, userToken) @@ -368,8 +468,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { }) t.Run("will not mark token unseen when prev and current are the same", func(t *testing.T) { - userToken, err := ctx.tokenService.CreateToken(context.Background(), usr, - net.ParseIP("192.168.10.11"), "some user agent") + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) require.NotNil(t, userToken) @@ -389,7 +492,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { t.Run("RotateToken", func(t *testing.T) { var prev string - token, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "") + token, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: nil, + UserAgent: "", + }) require.NoError(t, err) t.Run("should rotate token when called with current auth token", func(t *testing.T) { prev = token.UnhashedToken @@ -412,7 +519,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { }) t.Run("should return error when token is revoked", func(t *testing.T) { - revokedToken, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "") + revokedToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: nil, + UserAgent: "", + }) require.NoError(t, err) // mark token as revoked err = ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error { @@ -426,7 +537,11 @@ func TestIntegrationUserAuthToken(t *testing.T) { }) t.Run("should return error when token has expired", func(t *testing.T) { - expiredToken, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "") + expiredToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: nil, + UserAgent: "", + }) require.NoError(t, err) // mark token as expired err = ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error { @@ -441,10 +556,18 @@ func TestIntegrationUserAuthToken(t *testing.T) { t.Run("should only delete revoked tokens that are outside on specified window", func(t *testing.T) { usr := &user.User{ID: 100} - token1, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "") + token1, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: nil, + UserAgent: "", + }) require.NoError(t, err) - token2, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "") + token2, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: nil, + UserAgent: "", + }) require.NoError(t, err) getTime = func() time.Time { @@ -474,18 +597,19 @@ func TestIntegrationUserAuthToken(t *testing.T) { t.Run("When populating userAuthToken from UserToken should copy all properties", func(t *testing.T) { ut := auth.UserToken{ - Id: 1, - UserId: 2, - AuthToken: "a", - PrevAuthToken: "b", - UserAgent: "c", - ClientIp: "d", - AuthTokenSeen: true, - SeenAt: 3, - RotatedAt: 4, - CreatedAt: 5, - UpdatedAt: 6, - UnhashedToken: "e", + Id: 1, + UserId: 2, + AuthToken: "a", + PrevAuthToken: "b", + UserAgent: "c", + ClientIp: "d", + AuthTokenSeen: true, + SeenAt: 3, + RotatedAt: 4, + CreatedAt: 5, + UpdatedAt: 6, + UnhashedToken: "e", + ExternalSessionId: 7, } utBytes, err := json.Marshal(ut) require.Nil(t, err) @@ -507,18 +631,19 @@ func TestIntegrationUserAuthToken(t *testing.T) { t.Run("When populating userToken from userAuthToken should copy all properties", func(t *testing.T) { uat := userAuthToken{ - Id: 1, - UserId: 2, - AuthToken: "a", - PrevAuthToken: "b", - UserAgent: "c", - ClientIp: "d", - AuthTokenSeen: true, - SeenAt: 3, - RotatedAt: 4, - CreatedAt: 5, - UpdatedAt: 6, - UnhashedToken: "e", + Id: 1, + UserId: 2, + AuthToken: "a", + PrevAuthToken: "b", + UserAgent: "c", + ClientIp: "d", + AuthTokenSeen: true, + SeenAt: 3, + RotatedAt: 4, + CreatedAt: 5, + UpdatedAt: 6, + UnhashedToken: "e", + ExternalSessionId: 7, } uatBytes, err := json.Marshal(uat) require.Nil(t, err) @@ -551,22 +676,27 @@ func createTestContext(t *testing.T) *testContext { TokenRotationIntervalMinutes: 10, } + extSessionStore := provideExternalSessionStore(sqlstore, &fakes.FakeSecretsService{}, tracing.InitializeTracerForTest()) + tokenService := &UserAuthTokenService{ - sqlStore: sqlstore, - cfg: cfg, - log: log.New("test-logger"), - singleflight: new(singleflight.Group), + sqlStore: sqlstore, + cfg: cfg, + log: log.New("test-logger"), + singleflight: new(singleflight.Group), + externalSessionStore: extSessionStore, } return &testContext{ - sqlstore: sqlstore, - tokenService: tokenService, + sqlstore: sqlstore, + tokenService: tokenService, + extSessionStore: &extSessionStore, } } type testContext struct { - sqlstore db.DB - tokenService *UserAuthTokenService + sqlstore db.DB + tokenService *UserAuthTokenService + extSessionStore *auth.ExternalSessionStore } func (c *testContext) getAuthTokenByID(id int64) (*userAuthToken, error) { @@ -585,6 +715,22 @@ func (c *testContext) getAuthTokenByID(id int64) (*userAuthToken, error) { return res, err } +func (c *testContext) getExternalSessionByID(ID int64) (*auth.ExternalSession, error) { + var res *auth.ExternalSession + err := c.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error { + var t auth.ExternalSession + found, err := sess.ID(ID).Get(&t) + if err != nil || !found { + return err + } + + res = &t + return nil + }) + + return res, err +} + func (c *testContext) updateRotatedAt(id, rotatedAt int64) (bool, error) { hasRowsAffected := false err := c.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error { @@ -609,8 +755,11 @@ func TestIntegrationTokenCount(t *testing.T) { user := &user.User{ID: int64(10)} createToken := func() *auth.UserToken { - userToken, err := ctx.tokenService.CreateToken(context.Background(), user, - net.ParseIP("192.168.10.11"), "some user agent") + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: user, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + }) require.Nil(t, err) require.NotNil(t, userToken) require.False(t, userToken.AuthTokenSeen) @@ -637,3 +786,108 @@ func TestIntegrationTokenCount(t *testing.T) { require.Nil(t, err) require.Equal(t, int64(0), count) } + +func TestRevokeAllUserTokens(t *testing.T) { + t.Run("should not fail if the external sessions could not be removed", func(t *testing.T) { + ctx := createTestContext(t) + usr := &user.User{ID: int64(10)} + + // Mock the external session store to return an error + mockExternalSessionStore := &authtest.MockExternalSessionStore{} + + mockExternalSessionStore.On("Create", mock.Anything, mock.IsType(&auth.ExternalSession{})).Run(func(args mock.Arguments) { + extSession := args.Get(1).(*auth.ExternalSession) + extSession.ID = 1 + }).Return(nil) + mockExternalSessionStore.On("DeleteExternalSessionsByUserID", mock.Anything, usr.ID).Return(errors.New("some error")) + ctx.tokenService.externalSessionStore = mockExternalSessionStore + + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1}, + }) + require.Nil(t, err) + require.NotNil(t, userToken) + + err = ctx.tokenService.RevokeAllUserTokens(context.Background(), usr.ID) + require.Nil(t, err) + + model, err := ctx.getAuthTokenByID(userToken.Id) + require.Nil(t, err) + require.Nil(t, model) + }) +} + +func TestRevokeToken(t *testing.T) { + t.Run("should not fail if the external sessions could not be removed", func(t *testing.T) { + ctx := createTestContext(t) + usr := &user.User{ID: int64(10)} + mockExternalSessionStore := &authtest.MockExternalSessionStore{} + + mockExternalSessionStore.On("Create", mock.Anything, mock.IsType(&auth.ExternalSession{})).Run(func(args mock.Arguments) { + extSession := args.Get(1).(*auth.ExternalSession) + extSession.ID = 2 + }).Return(nil) + mockExternalSessionStore.On("Delete", mock.Anything, int64(2)).Return(errors.New("some error")) + ctx.tokenService.externalSessionStore = mockExternalSessionStore + + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1}, + }) + require.Nil(t, err) + require.NotNil(t, userToken) + + err = ctx.tokenService.RevokeToken(context.Background(), userToken, false) + require.Nil(t, err) + + model, err := ctx.getAuthTokenByID(userToken.Id) + require.Nil(t, err) + require.Nil(t, model) + }) +} + +func TestBatchRevokeAllUserTokens(t *testing.T) { + t.Run("should not fail if the external sessions could not be removed", func(t *testing.T) { + ctx := createTestContext(t) + userIds := []int64{1, 2, 3} + mockExternalSessionStore := &authtest.MockExternalSessionStore{} + + mockExternalSessionStore.On("BatchDeleteExternalSessionsByUserIDs", mock.Anything, userIds).Return(errors.New("some error")) + ctr := int64(0) + mockExternalSessionStore.On("Create", mock.Anything, mock.IsType(&auth.ExternalSession{})).Run(func(args mock.Arguments) { + extSession := args.Get(1).(*auth.ExternalSession) + ctr += 1 + extSession.ID = ctr + }).Return(nil) + + ctx.tokenService.externalSessionStore = mockExternalSessionStore + + for _, userID := range userIds { + usr := &user.User{ID: userID} + userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{ + User: usr, + ClientIP: net.ParseIP("192.168.10.11"), + UserAgent: "some user agent", + ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1}, + }) + require.Nil(t, err) + require.NotNil(t, userToken) + } + + // Batch revoke all user tokens + err := ctx.tokenService.BatchRevokeAllUserTokens(context.Background(), userIds) + require.Nil(t, err) + + // Verify that the tokens have been revoked + for _, userID := range userIds { + tokens, err := ctx.tokenService.GetUserTokens(context.Background(), userID) + require.Nil(t, err) + require.Equal(t, 0, len(tokens)) + } + }) +} diff --git a/pkg/services/auth/authimpl/external_session_store.go b/pkg/services/auth/authimpl/external_session_store.go new file mode 100644 index 00000000000..544386673b8 --- /dev/null +++ b/pkg/services/auth/authimpl/external_session_store.go @@ -0,0 +1,244 @@ +package authimpl + +import ( + "context" + "crypto/sha256" + "encoding/base64" + + "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/infra/tracing" + "github.com/grafana/grafana/pkg/services/auth" + "github.com/grafana/grafana/pkg/services/secrets" +) + +var _ auth.ExternalSessionStore = (*store)(nil) + +type store struct { + sqlStore db.DB + secretsService secrets.Service + tracer tracing.Tracer +} + +func provideExternalSessionStore(sqlStore db.DB, secretService secrets.Service, tracer tracing.Tracer) auth.ExternalSessionStore { + return &store{ + sqlStore: sqlStore, + secretsService: secretService, + tracer: tracer, + } +} + +func (s *store) Get(ctx context.Context, extSessionID int64) (*auth.ExternalSession, error) { + ctx, span := s.tracer.Start(ctx, "externalsession.Get") + defer span.End() + + externalSession := &auth.ExternalSession{ID: extSessionID} + + err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error { + found, err := sess.Get(externalSession) + if err != nil { + return err + } + + if !found { + return auth.ErrExternalSessionNotFound + } + return nil + }) + if err != nil { + return nil, err + } + + err = s.decryptSecrets(externalSession) + if err != nil { + return nil, err + } + + return externalSession, nil +} + +func (s *store) List(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) { + ctx, span := s.tracer.Start(ctx, "externalsession.List") + defer span.End() + + externalSession := &auth.ExternalSession{} + if query.ID != 0 { + externalSession.ID = query.ID + } + + hash := sha256.New() + + if query.SessionID != "" { + hash.Write([]byte(query.SessionID)) + externalSession.SessionIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil)) + } + + if query.NameID != "" { + hash.Reset() + hash.Write([]byte(query.NameID)) + externalSession.NameIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil)) + } + + queryResult := make([]*auth.ExternalSession, 0) + err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error { + return sess.Find(&queryResult, externalSession) + }) + if err != nil { + return nil, err + } + + for _, extSession := range queryResult { + err := s.decryptSecrets(extSession) + if err != nil { + return nil, err + } + } + return queryResult, nil +} + +func (s *store) Create(ctx context.Context, extSession *auth.ExternalSession) error { + ctx, span := s.tracer.Start(ctx, "externalsession.Create") + defer span.End() + + var err error + clone := extSession.Clone() + + clone.AccessToken, err = s.encryptAndEncode(extSession.AccessToken) + if err != nil { + return err + } + + clone.RefreshToken, err = s.encryptAndEncode(extSession.RefreshToken) + if err != nil { + return err + } + + clone.IDToken, err = s.encryptAndEncode(extSession.IDToken) + if err != nil { + return err + } + + if extSession.NameID != "" { + hash := sha256.New() + hash.Write([]byte(extSession.NameID)) + clone.NameIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil)) + } + + clone.NameID, err = s.encryptAndEncode(extSession.NameID) + if err != nil { + return err + } + + if extSession.SessionID != "" { + hash := sha256.New() + hash.Write([]byte(extSession.SessionID)) + clone.SessionIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil)) + } + + clone.SessionID, err = s.encryptAndEncode(extSession.SessionID) + if err != nil { + return err + } + + err = s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error { + _, err := sess.Insert(clone) + return err + }) + if err != nil { + return err + } + extSession.ID = clone.ID + return nil +} + +func (s *store) Delete(ctx context.Context, ID int64) error { + ctx, span := s.tracer.Start(ctx, "externalsession.Delete") + defer span.End() + + externalSession := &auth.ExternalSession{ID: ID} + err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error { + _, err := sess.Delete(externalSession) + return err + }) + return err +} + +func (s *store) DeleteExternalSessionsByUserID(ctx context.Context, userID int64) error { + ctx, span := s.tracer.Start(ctx, "externalsession.DeleteExternalSessionsByUserID") + defer span.End() + + externalSession := &auth.ExternalSession{UserID: userID} + err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error { + _, err := sess.Delete(externalSession) + return err + }) + return err +} + +func (s *store) BatchDeleteExternalSessionsByUserIDs(ctx context.Context, userIDs []int64) error { + ctx, span := s.tracer.Start(ctx, "externalsession.BatchDeleteExternalSessionsByUserIDs") + defer span.End() + + externalSession := &auth.ExternalSession{} + err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error { + _, err := sess.In("user_id", userIDs).Delete(externalSession) + return err + }) + return err +} + +func (s *store) decryptSecrets(extSession *auth.ExternalSession) error { + var err error + extSession.AccessToken, err = s.decodeAndDecrypt(extSession.AccessToken) + if err != nil { + return err + } + + extSession.RefreshToken, err = s.decodeAndDecrypt(extSession.RefreshToken) + if err != nil { + return err + } + + extSession.IDToken, err = s.decodeAndDecrypt(extSession.IDToken) + if err != nil { + return err + } + + extSession.NameID, err = s.decodeAndDecrypt(extSession.NameID) + if err != nil { + return err + } + + extSession.SessionID, err = s.decodeAndDecrypt(extSession.SessionID) + if err != nil { + return err + } + return nil +} + +func (s *store) encryptAndEncode(str string) (string, error) { + if str == "" { + return "", nil + } + + encrypted, err := s.secretsService.Encrypt(context.Background(), []byte(str), secrets.WithoutScope()) + if err != nil { + return "", err + } + return base64.StdEncoding.EncodeToString(encrypted), nil +} + +func (s *store) decodeAndDecrypt(str string) (string, error) { + // Bail out if empty string since it'll cause a segfault in Decrypt + if str == "" { + return "", nil + } + decoded, err := base64.StdEncoding.DecodeString(str) + if err != nil { + return "", err + } + decrypted, err := s.secretsService.Decrypt(context.Background(), decoded) + if err != nil { + return "", err + } + return string(decrypted), nil +} diff --git a/pkg/services/auth/authimpl/external_session_store_test.go b/pkg/services/auth/authimpl/external_session_store_test.go new file mode 100644 index 00000000000..499bf7e59e4 --- /dev/null +++ b/pkg/services/auth/authimpl/external_session_store_test.go @@ -0,0 +1,228 @@ +package authimpl + +import ( + "context" + "testing" + + "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/infra/tracing" + "github.com/grafana/grafana/pkg/services/auth" + "github.com/grafana/grafana/pkg/services/secrets/fakes" + "github.com/stretchr/testify/require" +) + +func TestGetExternalSession(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + t.Run("returns existing external session", func(t *testing.T) { + store := setupTest(t) + + extSession := &auth.ExternalSession{ + AccessToken: "access-token", + } + + err := store.Create(context.Background(), extSession) + require.NoError(t, err) + + actual, err := store.Get(context.Background(), extSession.ID) + require.NoError(t, err) + require.EqualValues(t, extSession.ID, actual.ID) + require.EqualValues(t, extSession.AccessToken, actual.AccessToken) + }) + + t.Run("returns not found if the external session is missing", func(t *testing.T) { + store := setupTest(t) + + _, err := store.Get(context.Background(), 999) + require.ErrorIs(t, err, auth.ErrExternalSessionNotFound) + }) +} + +func TestFindExternalSessions(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + t.Run("returns external sessions by ID", func(t *testing.T) { + store := setupTest(t) + + extSession := &auth.ExternalSession{ + AccessToken: "access-token", + } + + err := store.Create(context.Background(), extSession) + require.NoError(t, err) + + query := &auth.ListExternalSessionQuery{ID: extSession.ID} + actual, err := store.List(context.Background(), query) + require.NoError(t, err) + require.Len(t, actual, 1) + require.EqualValues(t, extSession.ID, actual[0].ID) + require.EqualValues(t, extSession.AccessToken, actual[0].AccessToken) + }) + + t.Run("returns external sessions by SessionID", func(t *testing.T) { + store := setupTest(t) + + extSession := &auth.ExternalSession{ + SessionID: "session-index", + } + err := store.Create(context.Background(), extSession) + require.NoError(t, err) + + query := &auth.ListExternalSessionQuery{SessionID: extSession.SessionID} + actual, err := store.List(context.Background(), query) + require.NoError(t, err) + require.Len(t, actual, 1) + require.EqualValues(t, extSession.ID, actual[0].ID) + require.EqualValues(t, extSession.SessionID, actual[0].SessionID) + }) + + t.Run("returns external sessions by NameID", func(t *testing.T) { + store := setupTest(t) + + extSession := &auth.ExternalSession{ + NameID: "name-id", + } + + err := store.Create(context.Background(), extSession) + require.NoError(t, err) + + query := &auth.ListExternalSessionQuery{NameID: extSession.NameID} + actual, err := store.List(context.Background(), query) + require.NoError(t, err) + require.Len(t, actual, 1) + require.EqualValues(t, extSession.ID, actual[0].ID) + require.EqualValues(t, extSession.NameID, actual[0].NameID) + }) + + t.Run("returns empty result if no external sessions match the query", func(t *testing.T) { + store := setupTest(t) + + query := &auth.ListExternalSessionQuery{ID: 999} + actual, err := store.List(context.Background(), query) + require.NoError(t, err) + require.Len(t, actual, 0) + }) +} + +func TestDeleteExternalSessionsByUserID(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + t.Run("deletes all external sessions for a given user ID", func(t *testing.T) { + store := setupTest(t) + + userID := int64(1) + extSession1 := &auth.ExternalSession{ + UserID: userID, + AccessToken: "access-token-1", + } + extSession2 := &auth.ExternalSession{ + UserID: userID, + AccessToken: "access-token-2", + } + + err := store.Create(context.Background(), extSession1) + require.NoError(t, err) + err = store.Create(context.Background(), extSession2) + require.NoError(t, err) + + err = store.DeleteExternalSessionsByUserID(context.Background(), userID) + require.NoError(t, err) + + query := &auth.ListExternalSessionQuery{} + actual, err := store.List(context.Background(), query) + require.NoError(t, err) + require.Len(t, actual, 0) + }) + + t.Run("returns no error if no external sessions exist for the given user ID", func(t *testing.T) { + store := setupTest(t) + + userID := int64(999) + err := store.DeleteExternalSessionsByUserID(context.Background(), userID) + require.NoError(t, err) + }) +} + +func TestDeleteExternalSession(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + t.Run("deletes an existing external session", func(t *testing.T) { + store := setupTest(t) + + extSession := &auth.ExternalSession{ + AccessToken: "access-token", + } + + err := store.Create(context.Background(), extSession) + require.NoError(t, err) + + err = store.Delete(context.Background(), extSession.ID) + require.NoError(t, err) + + _, err = store.Get(context.Background(), extSession.ID) + require.ErrorIs(t, err, auth.ErrExternalSessionNotFound) + }) + + t.Run("returns no error if the external session does not exist", func(t *testing.T) { + store := setupTest(t) + + err := store.Delete(context.Background(), 999) + require.NoError(t, err) + }) +} + +func TestBatchDeleteExternalSessionsByUserIDs(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + t.Run("deletes all external sessions for given user IDs", func(t *testing.T) { + store := setupTest(t) + + userID1 := int64(1) + userID2 := int64(2) + extSession1 := &auth.ExternalSession{ + UserID: userID1, + AccessToken: "access-token-1", + } + extSession2 := &auth.ExternalSession{ + UserID: userID2, + AccessToken: "access-token-2", + } + + err := store.Create(context.Background(), extSession1) + require.NoError(t, err) + err = store.Create(context.Background(), extSession2) + require.NoError(t, err) + + err = store.BatchDeleteExternalSessionsByUserIDs(context.Background(), []int64{userID1, userID2}) + require.NoError(t, err) + + query := &auth.ListExternalSessionQuery{} + actual, err := store.List(context.Background(), query) + require.NoError(t, err) + require.Len(t, actual, 0) + }) + + t.Run("returns no error if no external sessions exist for the given user IDs", func(t *testing.T) { + store := setupTest(t) + + err := store.BatchDeleteExternalSessionsByUserIDs(context.Background(), []int64{999, 1000}) + require.NoError(t, err) + }) +} + +func setupTest(t *testing.T) *store { + sqlStore := db.InitTestDB(t) + secretService := fakes.NewFakeSecretsService() + tracer := tracing.InitializeTracerForTest() + externalSessionStore := provideExternalSessionStore(sqlStore, secretService, tracer).(*store) + return externalSessionStore +} diff --git a/pkg/services/auth/authimpl/model.go b/pkg/services/auth/authimpl/model.go index ef2544d730b..fa0e566cf8f 100644 --- a/pkg/services/auth/authimpl/model.go +++ b/pkg/services/auth/authimpl/model.go @@ -7,19 +7,20 @@ import ( ) type userAuthToken struct { - Id int64 - UserId int64 - AuthToken string - PrevAuthToken string - UserAgent string - ClientIp string - AuthTokenSeen bool - SeenAt int64 - RotatedAt int64 - CreatedAt int64 - UpdatedAt int64 - RevokedAt int64 - UnhashedToken string `xorm:"-"` + Id int64 + UserId int64 + AuthToken string + PrevAuthToken string + UserAgent string + ClientIp string + AuthTokenSeen bool + SeenAt int64 + RotatedAt int64 + CreatedAt int64 + UpdatedAt int64 + RevokedAt int64 + UnhashedToken string `xorm:"-"` + ExternalSessionId int64 } func userAuthTokenFromUserToken(ut *auth.UserToken) (*userAuthToken, error) { @@ -46,6 +47,7 @@ func (uat *userAuthToken) fromUserToken(ut *auth.UserToken) error { uat.UpdatedAt = ut.UpdatedAt uat.RevokedAt = ut.RevokedAt uat.UnhashedToken = ut.UnhashedToken + uat.ExternalSessionId = ut.ExternalSessionId return nil } @@ -68,5 +70,6 @@ func (uat *userAuthToken) toUserToken(ut *auth.UserToken) error { ut.UpdatedAt = uat.UpdatedAt ut.RevokedAt = uat.RevokedAt ut.UnhashedToken = uat.UnhashedToken + ut.ExternalSessionId = uat.ExternalSessionId return nil } diff --git a/pkg/services/auth/authimpl/token_cleanup.go b/pkg/services/auth/authimpl/token_cleanup.go index b0fcc4cafee..8804bc11e3b 100644 --- a/pkg/services/auth/authimpl/token_cleanup.go +++ b/pkg/services/auth/authimpl/token_cleanup.go @@ -16,6 +16,9 @@ func (s *UserAuthTokenService) Run(ctx context.Context) error { if _, err := s.deleteExpiredTokens(ctx, maxInactiveLifetime, maxLifetime); err != nil { s.log.Error("An error occurred while deleting expired tokens", "err", err) } + if err := s.deleteOrphanedExternalSessions(ctx); err != nil { + s.log.Error("An error occurred while deleting orphaned external sessions", "err", err) + } }) if err != nil { s.log.Error("Failed to lock and execute cleanup of expired auth token", "error", err) @@ -28,6 +31,9 @@ func (s *UserAuthTokenService) Run(ctx context.Context) error { if _, err := s.deleteExpiredTokens(ctx, maxInactiveLifetime, maxLifetime); err != nil { s.log.Error("An error occurred while deleting expired tokens", "err", err) } + if err := s.deleteOrphanedExternalSessions(ctx); err != nil { + s.log.Error("An error occurred while deleting orphaned external sessions", "err", err) + } }) if err != nil { s.log.Error("Failed to lock and execute cleanup of expired auth token", "error", err) @@ -66,3 +72,29 @@ func (s *UserAuthTokenService) deleteExpiredTokens(ctx context.Context, maxInact return affected, err } + +func (s *UserAuthTokenService) deleteOrphanedExternalSessions(ctx context.Context) error { + s.log.Debug("Starting cleanup of external sessions") + + var affected int64 + err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error { + sql := `DELETE FROM user_external_session WHERE NOT EXISTS (SELECT 1 FROM user_auth_token WHERE user_external_session.id = user_auth_token.external_session_id)` + + res, err := dbSession.Exec(sql) + if err != nil { + return err + } + + affected, err = res.RowsAffected() + if err != nil { + s.log.Error("Failed to cleanup orphaned external sessions", "error", err) + return nil + } + + s.log.Debug("Cleanup of orphaned external sessions done", "count", affected) + + return nil + }) + + return err +} diff --git a/pkg/services/auth/authimpl/token_cleanup_test.go b/pkg/services/auth/authimpl/token_cleanup_test.go index b6623d99036..a28423af151 100644 --- a/pkg/services/auth/authimpl/token_cleanup_test.go +++ b/pkg/services/auth/authimpl/token_cleanup_test.go @@ -9,9 +9,14 @@ import ( "github.com/stretchr/testify/require" "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/services/auth" ) -func TestUserAuthTokenCleanup(t *testing.T) { +func TestIntegrationUserAuthTokenCleanup(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + setup := func() *testContext { ctx := createTestContext(t) maxInactiveLifetime, _ := time.ParseDuration("168h") @@ -75,3 +80,61 @@ func TestUserAuthTokenCleanup(t *testing.T) { require.Equal(t, int64(3), affected) }) } + +func TestIntegrationOrphanedExternalSessionsCleanup(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + setup := func() *testContext { + ctx := createTestContext(t) + return ctx + } + + insertExternalSession := func(ctx *testContext, id int64) { + es := &auth.ExternalSession{ID: id, UserAuthID: 1, UserID: 1} + err := ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error { + _, err := sess.Insert(es) + require.Nil(t, err) + return nil + }) + require.NoError(t, err) + } + + insertAuthToken := func(ctx *testContext, token string, externalSessionId int64) { + ut := userAuthToken{AuthToken: token, PrevAuthToken: fmt.Sprintf("old%s", token), ExternalSessionId: externalSessionId} + err := ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error { + _, err := sess.Insert(&ut) + require.Nil(t, err) + return nil + }) + require.NoError(t, err) + } + + t.Run("should delete orphaned external sessions", func(t *testing.T) { + ctx := setup() + + // insert three external sessions + for i := int64(1); i <= 3; i++ { + insertExternalSession(ctx, i) + } + + // insert two auth tokens linked to external sessions + insertAuthToken(ctx, "token1", 1) + insertAuthToken(ctx, "token2", 2) + + // delete orphaned external sessions + err := ctx.tokenService.deleteOrphanedExternalSessions(context.Background()) + require.NoError(t, err) + + // verify that only the orphaned external session is deleted + var count int64 + err = ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error { + count, err = sess.Count(&auth.ExternalSession{}) + require.Nil(t, err) + return nil + }) + require.NoError(t, err) + require.Equal(t, int64(2), count) + }) +} diff --git a/pkg/services/auth/authtest/external_session_store_mock.go b/pkg/services/auth/authtest/external_session_store_mock.go new file mode 100644 index 00000000000..17694b08b3d --- /dev/null +++ b/pkg/services/auth/authtest/external_session_store_mock.go @@ -0,0 +1,162 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package authtest + +import ( + context "context" + + auth "github.com/grafana/grafana/pkg/services/auth" + + mock "github.com/stretchr/testify/mock" +) + +// MockExternalSessionStore is an autogenerated mock type for the ExternalSessionStore type +type MockExternalSessionStore struct { + mock.Mock +} + +// BatchDeleteExternalSessionsByUserIDs provides a mock function with given fields: ctx, userIDs +func (_m *MockExternalSessionStore) BatchDeleteExternalSessionsByUserIDs(ctx context.Context, userIDs []int64) error { + ret := _m.Called(ctx, userIDs) + + if len(ret) == 0 { + panic("no return value specified for BatchDeleteExternalSessionsByUserIDs") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, []int64) error); ok { + r0 = rf(ctx, userIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Create provides a mock function with given fields: ctx, extSesion +func (_m *MockExternalSessionStore) Create(ctx context.Context, extSesion *auth.ExternalSession) error { + ret := _m.Called(ctx, extSesion) + + if len(ret) == 0 { + panic("no return value specified for Create") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *auth.ExternalSession) error); ok { + r0 = rf(ctx, extSesion) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Delete provides a mock function with given fields: ctx, ID +func (_m *MockExternalSessionStore) Delete(ctx context.Context, ID int64) error { + ret := _m.Called(ctx, ID) + + if len(ret) == 0 { + panic("no return value specified for Delete") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok { + r0 = rf(ctx, ID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteExternalSessionsByUserID provides a mock function with given fields: ctx, userID +func (_m *MockExternalSessionStore) DeleteExternalSessionsByUserID(ctx context.Context, userID int64) error { + ret := _m.Called(ctx, userID) + + if len(ret) == 0 { + panic("no return value specified for DeleteExternalSessionsByUserID") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok { + r0 = rf(ctx, userID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Get provides a mock function with given fields: ctx, ID +func (_m *MockExternalSessionStore) Get(ctx context.Context, ID int64) (*auth.ExternalSession, error) { + ret := _m.Called(ctx, ID) + + if len(ret) == 0 { + panic("no return value specified for Get") + } + + var r0 *auth.ExternalSession + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) (*auth.ExternalSession, error)); ok { + return rf(ctx, ID) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) *auth.ExternalSession); ok { + r0 = rf(ctx, ID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*auth.ExternalSession) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, ID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// List provides a mock function with given fields: ctx, query +func (_m *MockExternalSessionStore) List(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) { + ret := _m.Called(ctx, query) + + if len(ret) == 0 { + panic("no return value specified for List") + } + + var r0 []*auth.ExternalSession + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error)); ok { + return rf(ctx, query) + } + if rf, ok := ret.Get(0).(func(context.Context, *auth.ListExternalSessionQuery) []*auth.ExternalSession); ok { + r0 = rf(ctx, query) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*auth.ExternalSession) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *auth.ListExternalSessionQuery) error); ok { + r1 = rf(ctx, query) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewMockExternalSessionStore creates a new instance of MockExternalSessionStore. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockExternalSessionStore(t interface { + mock.TestingT + Cleanup(func()) +}) *MockExternalSessionStore { + mock := &MockExternalSessionStore{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/services/auth/authtest/testing.go b/pkg/services/auth/authtest/testing.go index d5ffa7ae8ff..60265f4de8a 100644 --- a/pkg/services/auth/authtest/testing.go +++ b/pkg/services/auth/authtest/testing.go @@ -11,26 +11,28 @@ import ( "github.com/grafana/grafana/pkg/services/auth" "github.com/grafana/grafana/pkg/services/datasources" "github.com/grafana/grafana/pkg/services/login" - "github.com/grafana/grafana/pkg/services/user" ) type FakeUserAuthTokenService struct { - CreateTokenProvider func(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) - RotateTokenProvider func(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error) - TryRotateTokenProvider func(ctx context.Context, token *auth.UserToken, clientIP net.IP, userAgent string) (bool, *auth.UserToken, error) - LookupTokenProvider func(ctx context.Context, unhashedToken string) (*auth.UserToken, error) - RevokeTokenProvider func(ctx context.Context, token *auth.UserToken, soft bool) error - RevokeAllUserTokensProvider func(ctx context.Context, userID int64) error - ActiveTokenCountProvider func(ctx context.Context, userID *int64) (int64, error) - GetUserTokenProvider func(ctx context.Context, userID, userTokenID int64) (*auth.UserToken, error) - GetUserTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error) - GetUserRevokedTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error) - BatchRevokedTokenProvider func(ctx context.Context, userIDs []int64) error + CreateTokenProvider func(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) + RotateTokenProvider func(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error) + GetTokenByExternalSessionIDProvider func(ctx context.Context, externalSessionID int64) (*auth.UserToken, error) + GetExternalSessionProvider func(ctx context.Context, externalSessionID int64) (*auth.ExternalSession, error) + FindExternalSessionsProvider func(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) + TryRotateTokenProvider func(ctx context.Context, token *auth.UserToken, clientIP net.IP, userAgent string) (bool, *auth.UserToken, error) + LookupTokenProvider func(ctx context.Context, unhashedToken string) (*auth.UserToken, error) + RevokeTokenProvider func(ctx context.Context, token *auth.UserToken, soft bool) error + RevokeAllUserTokensProvider func(ctx context.Context, userID int64) error + ActiveTokenCountProvider func(ctx context.Context, userID *int64) (int64, error) + GetUserTokenProvider func(ctx context.Context, userID, userTokenID int64) (*auth.UserToken, error) + GetUserTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error) + GetUserRevokedTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error) + BatchRevokedTokenProvider func(ctx context.Context, userIDs []int64) error } func NewFakeUserAuthTokenService() *FakeUserAuthTokenService { return &FakeUserAuthTokenService{ - CreateTokenProvider: func(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) { + CreateTokenProvider: func(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) { return &auth.UserToken{ UserId: 0, UnhashedToken: "", @@ -72,14 +74,26 @@ func (s *FakeUserAuthTokenService) Init() error { return nil } -func (s *FakeUserAuthTokenService) CreateToken(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) { - return s.CreateTokenProvider(context.Background(), user, clientIP, userAgent) +func (s *FakeUserAuthTokenService) CreateToken(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) { + return s.CreateTokenProvider(context.Background(), cmd) } func (s *FakeUserAuthTokenService) RotateToken(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error) { return s.RotateTokenProvider(ctx, cmd) } +func (s *FakeUserAuthTokenService) GetTokenByExternalSessionID(ctx context.Context, externalSessionID int64) (*auth.UserToken, error) { + return s.GetTokenByExternalSessionIDProvider(ctx, externalSessionID) +} + +func (s *FakeUserAuthTokenService) GetExternalSession(ctx context.Context, externalSessionID int64) (*auth.ExternalSession, error) { + return s.GetExternalSessionProvider(ctx, externalSessionID) +} + +func (s *FakeUserAuthTokenService) FindExternalSessions(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) { + return s.FindExternalSessionsProvider(context.Background(), query) +} + func (s *FakeUserAuthTokenService) LookupToken(ctx context.Context, unhashedToken string) (*auth.UserToken, error) { return s.LookupTokenProvider(context.Background(), unhashedToken) } diff --git a/pkg/services/auth/external_session.go b/pkg/services/auth/external_session.go new file mode 100644 index 00000000000..d49db871a10 --- /dev/null +++ b/pkg/services/auth/external_session.go @@ -0,0 +1,66 @@ +package auth + +import ( + "context" + "time" +) + +type ExternalSession struct { + ID int64 `xorm:"pk autoincr 'id'"` + UserID int64 `xorm:"user_id"` + UserAuthID int64 `xorm:"user_auth_id"` + AuthModule string `xorm:"auth_module"` + AccessToken string `xorm:"access_token"` + IDToken string `xorm:"id_token"` + RefreshToken string `xorm:"refresh_token"` + SessionID string `xorm:"session_id"` + SessionIDHash string `xorm:"session_id_hash"` + NameID string `xorm:"name_id"` + NameIDHash string `xorm:"name_id_hash"` + ExpiresAt time.Time `xorm:"expires_at"` + CreatedAt time.Time `xorm:"created 'created_at'"` +} + +func (e *ExternalSession) TableName() string { + return "user_external_session" +} + +func (e *ExternalSession) Clone() *ExternalSession { + return &ExternalSession{ + ID: e.ID, + UserID: e.UserID, + UserAuthID: e.UserAuthID, + AuthModule: e.AuthModule, + AccessToken: e.AccessToken, + IDToken: e.IDToken, + RefreshToken: e.RefreshToken, + SessionID: e.SessionID, + SessionIDHash: e.SessionIDHash, + NameID: e.NameID, + NameIDHash: e.NameIDHash, + ExpiresAt: e.ExpiresAt, + CreatedAt: e.CreatedAt, + } +} + +type ListExternalSessionQuery struct { + ID int64 + NameID string + SessionID string +} + +//go:generate mockery --name ExternalSessionStore --structname MockExternalSessionStore --outpkg authtest --filename external_session_store_mock.go --output ./authtest/ +type ExternalSessionStore interface { + // Get returns the external session + Get(ctx context.Context, ID int64) (*ExternalSession, error) + // List returns all external sessions fπor the given query + List(ctx context.Context, query *ListExternalSessionQuery) ([]*ExternalSession, error) + // Create creates a new external session for a user + Create(ctx context.Context, extSesion *ExternalSession) error + // Delete deletes an external session + Delete(ctx context.Context, ID int64) error + // DeleteExternalSessionsByUserID deletes an external session + DeleteExternalSessionsByUserID(ctx context.Context, userID int64) error + // BatchDeleteExternalSessionsByUserIDs deletes external sessions by user IDs + BatchDeleteExternalSessionsByUserIDs(ctx context.Context, userIDs []int64) error +} diff --git a/pkg/services/authn/authn.go b/pkg/services/authn/authn.go index 3c43822e9e8..a024beaf2fb 100644 --- a/pkg/services/authn/authn.go +++ b/pkg/services/authn/authn.go @@ -32,9 +32,10 @@ const ( ) const ( - MetaKeyUsername = "username" - MetaKeyAuthModule = "authModule" - MetaKeyIsLogin = "isLogin" + MetaKeyUsername = "username" + MetaKeyAuthModule = "authModule" + MetaKeyIsLogin = "isLogin" + defaultRedirectToCookieKey = "redirect_to" ) // ClientParams are hints to the auth service about how to handle the identity management @@ -74,9 +75,11 @@ type FetchPermissionsParams struct { Roles []string } -type PostAuthHookFn func(ctx context.Context, identity *Identity, r *Request) error -type PostLoginHookFn func(ctx context.Context, identity *Identity, r *Request, err error) -type PreLogoutHookFn func(ctx context.Context, requester identity.Requester, sessionToken *usertoken.UserToken) error +type ( + PostAuthHookFn func(ctx context.Context, identity *Identity, r *Request) error + PostLoginHookFn func(ctx context.Context, identity *Identity, r *Request, err error) + PreLogoutHookFn func(ctx context.Context, requester identity.Requester, sessionToken *usertoken.UserToken) error +) type Authenticator interface { // Authenticate authenticates a request @@ -233,41 +236,52 @@ type RedirectValidator func(url string) error // HandleLoginResponse is a utility function to perform common operations after a successful login and returns response.NormalResponse func HandleLoginResponse(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) *response.NormalResponse { result := map[string]any{"message": "Logged in"} - result["redirectUrl"] = handleLogin(r, w, cfg, identity, validator, features) + result["redirectUrl"] = handleLogin(r, w, cfg, identity, validator, features, "") return response.JSON(http.StatusOK, result) } // HandleLoginRedirect is a utility function to perform common operations after a successful login and redirects func HandleLoginRedirect(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) { - redirectURL := handleLogin(r, w, cfg, identity, validator, features) + redirectURL := handleLogin(r, w, cfg, identity, validator, features, "redirectTo") http.Redirect(w, r, redirectURL, http.StatusFound) } // HandleLoginRedirectResponse is a utility function to perform common operations after a successful login and return a response.RedirectResponse -func HandleLoginRedirectResponse(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) *response.RedirectResponse { - return response.Redirect(handleLogin(r, w, cfg, identity, validator, features)) +func HandleLoginRedirectResponse(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles, redirectToCookieName string) *response.RedirectResponse { + return response.Redirect(handleLogin(r, w, cfg, identity, validator, features, redirectToCookieName)) } -func handleLogin(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) string { +func handleLogin(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles, redirectToCookieName string) string { WriteSessionCookie(w, cfg, identity.SessionToken) + redirectURL := cfg.AppSubURL + "/" if features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) { - return cfg.AppSubURL + "/" + if redirectToCookieName != "" { + scopedRedirectToCookie, err := r.Cookie(redirectToCookieName) + if err == nil { + redirectTo, _ := url.QueryUnescape(scopedRedirectToCookie.Value) + if redirectTo != "" && validator(redirectTo) == nil { + redirectURL = cfg.AppSubURL + redirectTo + } + cookies.DeleteCookie(w, redirectToCookieName, cookieOptions(cfg)) + } + } + return redirectURL } - redirectURL := cfg.AppSubURL + "/" + redirectURL = cfg.AppSubURL + "/" if redirectTo := getRedirectURL(r); len(redirectTo) > 0 { if validator(redirectTo) == nil { redirectURL = redirectTo } - cookies.DeleteCookie(w, "redirect_to", cookieOptions(cfg)) + cookies.DeleteCookie(w, defaultRedirectToCookieKey, cookieOptions(cfg)) } return redirectURL } func getRedirectURL(r *http.Request) string { - cookie, err := r.Cookie("redirect_to") + cookie, err := r.Cookie(defaultRedirectToCookieKey) if err != nil { return "" } diff --git a/pkg/services/authn/authnimpl/service.go b/pkg/services/authn/authnimpl/service.go index e34a0463f67..e7182dcab52 100644 --- a/pkg/services/authn/authnimpl/service.go +++ b/pkg/services/authn/authnimpl/service.go @@ -22,6 +22,7 @@ import ( "github.com/grafana/grafana/pkg/services/auth" "github.com/grafana/grafana/pkg/services/authn" "github.com/grafana/grafana/pkg/services/authn/clients" + "github.com/grafana/grafana/pkg/services/login" "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/web" @@ -52,8 +53,8 @@ func ProvideIdentitySynchronizer(s *Service) authn.IdentitySynchronizer { } func ProvideService( - cfg *setting.Cfg, tracer tracing.Tracer, - sessionService auth.UserTokenService, usageStats usagestats.Service, registerer prometheus.Registerer, + cfg *setting.Cfg, tracer tracing.Tracer, sessionService auth.UserTokenService, + usageStats usagestats.Service, registerer prometheus.Registerer, authTokenService login.AuthInfoService, ) *Service { s := &Service{ log: log.New("authn.service"), @@ -64,6 +65,7 @@ func ProvideService( tracer: tracer, metrics: newMetrics(registerer), sessionService: sessionService, + authTokenService: authTokenService, preLogoutHooks: newQueue[authn.PreLogoutHookFn](), postAuthHooks: newQueue[authn.PostAuthHookFn](), postLoginHooks: newQueue[authn.PostLoginHookFn](), @@ -85,7 +87,8 @@ type Service struct { tracer tracing.Tracer metrics *metrics - sessionService auth.UserTokenService + sessionService auth.UserTokenService + authTokenService login.AuthInfoService // postAuthHooks are called after a successful authentication. They can modify the identity. postAuthHooks *queue[authn.PostAuthHookFn] @@ -238,7 +241,9 @@ func (s *Service) Login(ctx context.Context, client string, r *authn.Request) (i s.log.FromContext(ctx).Debug("Failed to parse ip from address", "client", c.Name(), "id", id.ID, "addr", addr, "error", err) } - sessionToken, err := s.sessionService.CreateToken(ctx, &user.User{ID: userID}, ip, r.HTTPRequest.UserAgent()) + externalSession := s.resolveExternalSessionFromIdentity(ctx, id, userID) + + sessionToken, err := s.sessionService.CreateToken(ctx, &auth.CreateTokenCommand{User: &user.User{ID: userID}, ClientIP: ip, UserAgent: r.HTTPRequest.UserAgent(), ExternalSession: externalSession}) if err != nil { s.metrics.failedLogin.WithLabelValues(client).Inc() s.log.FromContext(ctx).Error("Failed to create session", "client", client, "id", id.ID, "err", err) @@ -403,7 +408,8 @@ func (s *Service) resolveIdenity(ctx context.Context, orgID int64, typedID strin AllowGlobalOrg: true, FetchSyncedUser: true, SyncPermissions: true, - }}, nil + }, + }, nil } if claims.IsIdentityType(t, claims.TypeServiceAccount) { @@ -415,7 +421,8 @@ func (s *Service) resolveIdenity(ctx context.Context, orgID int64, typedID strin AllowGlobalOrg: true, FetchSyncedUser: true, SyncPermissions: true, - }}, nil + }, + }, nil } resolver, ok := s.idenityResolverClients[string(t)] @@ -482,3 +489,35 @@ func orgIDFromHeader(req *http.Request) int64 { } return id } + +func (s *Service) resolveExternalSessionFromIdentity(ctx context.Context, identity *authn.Identity, userID int64) *auth.ExternalSession { + if identity.OAuthToken == nil { + return nil + } + + info, err := s.authTokenService.GetAuthInfo(ctx, &login.GetAuthInfoQuery{AuthId: identity.GetAuthID(), UserId: userID}) + if err != nil { + s.log.FromContext(ctx).Info("Failed to get auth info", "error", err, "authID", identity.GetAuthID(), "userID", userID) + return nil + } + + extSession := &auth.ExternalSession{ + AuthModule: identity.GetAuthenticatedBy(), + UserAuthID: info.Id, + UserID: userID, + } + extSession.AccessToken = identity.OAuthToken.AccessToken + extSession.RefreshToken = identity.OAuthToken.RefreshToken + extSession.ExpiresAt = identity.OAuthToken.Expiry + + if idToken, ok := identity.OAuthToken.Extra("id_token").(string); ok && idToken != "" { + extSession.IDToken = idToken + } + + // As of https://openid.net/specs/openid-connect-session-1_0.html + if sessionState, ok := identity.OAuthToken.Extra("session_state").(string); ok && sessionState != "" { + extSession.SessionID = sessionState + } + + return extSession +} diff --git a/pkg/services/authn/authnimpl/service_test.go b/pkg/services/authn/authnimpl/service_test.go index 2727a8dace0..50b03646abc 100644 --- a/pkg/services/authn/authnimpl/service_test.go +++ b/pkg/services/authn/authnimpl/service_test.go @@ -3,7 +3,6 @@ package authnimpl import ( "context" "errors" - "net" "net/http" "net/url" "slices" @@ -24,7 +23,6 @@ import ( "github.com/grafana/grafana/pkg/services/auth/authtest" "github.com/grafana/grafana/pkg/services/authn" "github.com/grafana/grafana/pkg/services/authn/authntest" - "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/setting" ) @@ -399,11 +397,11 @@ func TestService_Login(t *testing.T) { ExpectedIdentity: tt.expectedClientIdentity, }) svc.sessionService = &authtest.FakeUserAuthTokenService{ - CreateTokenProvider: func(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) { + CreateTokenProvider: func(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) { if tt.expectedSessionErr != nil { return nil, tt.expectedSessionErr } - return &auth.UserToken{UserId: user.ID}, nil + return &auth.UserToken{UserId: cmd.User.ID}, nil }, } }) diff --git a/pkg/services/authn/identity.go b/pkg/services/authn/identity.go index b99b5703712..e137809c1d2 100644 --- a/pkg/services/authn/identity.go +++ b/pkg/services/authn/identity.go @@ -44,7 +44,7 @@ type Identity struct { // IsGrafanaAdmin is true if the entity is a Grafana admin. IsGrafanaAdmin *bool // AuthenticatedBy is the name of the authentication client that was used to authenticate the current Identity. - // For example, "password", "apikey", "auth_ldap" or "auth_azuread". + // For example, "password", "apikey", "ldap" or "oauth_azuread". AuthenticatedBy string // AuthId is the unique identifier for the entity in the external system. // Empty if the identity is provided by Grafana. diff --git a/pkg/services/cloudmigration/api/api.go b/pkg/services/cloudmigration/api/api.go index 1cd851a9717..338ee41f911 100644 --- a/pkg/services/cloudmigration/api/api.go +++ b/pkg/services/cloudmigration/api/api.go @@ -13,6 +13,8 @@ import ( contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" "github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/web" + + "go.opentelemetry.io/otel/codes" ) type CloudMigrationAPI struct { @@ -78,6 +80,9 @@ func (cma *CloudMigrationAPI) GetToken(c *contextmodel.ReqContext) response.Resp token, err := cma.cloudMigrationService.GetToken(ctx) if err != nil { + span.SetStatus(codes.Error, "fetching cloud migration access token") + span.RecordError(err) + if !errors.Is(err, cloudmigration.ErrTokenNotFound) { logger.Error("fetching cloud migration access token", "err", err.Error()) } @@ -112,7 +117,10 @@ func (cma *CloudMigrationAPI) CreateToken(c *contextmodel.ReqContext) response.R resp, err := cma.cloudMigrationService.CreateToken(ctx) if err != nil { + span.SetStatus(codes.Error, "creating gcom access token") + span.RecordError(err) logger.Error("creating gcom access token", "err", err.Error()) + return response.ErrOrFallback(http.StatusInternalServerError, "creating gcom access token", err) } @@ -137,11 +145,17 @@ func (cma *CloudMigrationAPI) DeleteToken(c *contextmodel.ReqContext) response.R uid := web.Params(c.Req)[":uid"] if err := util.ValidateUID(uid); err != nil { + span.SetStatus(codes.Error, "invalid migration uid") + span.RecordError(err) + return response.Error(http.StatusBadRequest, "invalid migration uid", err) } if err := cma.cloudMigrationService.DeleteToken(ctx, uid); err != nil { + span.SetStatus(codes.Error, "deleting cloud migration token") + span.RecordError(err) logger.Error("deleting cloud migration token", "err", err.Error()) + return response.ErrOrFallback(http.StatusInternalServerError, "deleting cloud migration token", err) } @@ -163,6 +177,9 @@ func (cma *CloudMigrationAPI) GetSessionList(c *contextmodel.ReqContext) respons sl, err := cma.cloudMigrationService.GetSessionList(ctx) if err != nil { + span.SetStatus(codes.Error, "session list error") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "session list error", err) } @@ -185,11 +202,17 @@ func (cma *CloudMigrationAPI) GetSession(c *contextmodel.ReqContext) response.Re uid := web.Params(c.Req)[":uid"] if err := util.ValidateUID(uid); err != nil { + span.SetStatus(codes.Error, "invalid session uid") + span.RecordError(err) + return response.Error(http.StatusBadRequest, "invalid session uid", err) } s, err := cma.cloudMigrationService.GetSession(ctx, uid) if err != nil { + span.SetStatus(codes.Error, "session not found") + span.RecordError(err) + return response.ErrOrFallback(http.StatusNotFound, "session not found", err) } @@ -217,12 +240,18 @@ func (cma *CloudMigrationAPI) CreateSession(c *contextmodel.ReqContext) response cmd := CloudMigrationSessionRequestDTO{} if err := web.Bind(c.Req, &cmd); err != nil { + span.SetStatus(codes.Error, "bad request data") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "bad request data", err) } s, err := cma.cloudMigrationService.CreateSession(ctx, cloudmigration.CloudMigrationSessionRequest{ AuthToken: cmd.AuthToken, }) if err != nil { + span.SetStatus(codes.Error, "session creation error") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "session creation error", err) } @@ -250,11 +279,17 @@ func (cma *CloudMigrationAPI) DeleteSession(c *contextmodel.ReqContext) response uid := web.Params(c.Req)[":uid"] if err := util.ValidateUID(uid); err != nil { + span.SetStatus(codes.Error, "invalid session uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err) } _, err := cma.cloudMigrationService.DeleteSession(ctx, uid) if err != nil { + span.SetStatus(codes.Error, "session delete error") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "session delete error", err) } return response.Empty(http.StatusOK) @@ -278,11 +313,17 @@ func (cma *CloudMigrationAPI) CreateSnapshot(c *contextmodel.ReqContext) respons uid := web.Params(c.Req)[":uid"] if err := util.ValidateUID(uid); err != nil { + span.SetStatus(codes.Error, "invalid session uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err) } ss, err := cma.cloudMigrationService.CreateSnapshot(ctx, c.SignedInUser, uid) if err != nil { + span.SetStatus(codes.Error, "error creating snapshot") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "error creating snapshot", err) } @@ -307,9 +348,15 @@ func (cma *CloudMigrationAPI) GetSnapshot(c *contextmodel.ReqContext) response.R sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"] if err := util.ValidateUID(sessUid); err != nil { + span.SetStatus(codes.Error, "invalid session uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err) } if err := util.ValidateUID(snapshotUid); err != nil { + span.SetStatus(codes.Error, "invalid snapshot uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err) } @@ -327,6 +374,9 @@ func (cma *CloudMigrationAPI) GetSnapshot(c *contextmodel.ReqContext) response.R } snapshot, err := cma.cloudMigrationService.GetSnapshot(ctx, q) if err != nil { + span.SetStatus(codes.Error, "error retrieving snapshot") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "error retrieving snapshot", err) } @@ -335,11 +385,12 @@ func (cma *CloudMigrationAPI) GetSnapshot(c *contextmodel.ReqContext) response.R dtoResults := make([]MigrateDataResponseItemDTO, len(results)) for i := 0; i < len(results); i++ { dtoResults[i] = MigrateDataResponseItemDTO{ - Name: results[i].Name, - Type: MigrateDataType(results[i].Type), - RefID: results[i].RefID, - Status: ItemStatus(results[i].Status), - Message: results[i].Error, + Name: results[i].Name, + Type: MigrateDataType(results[i].Type), + RefID: results[i].RefID, + Status: ItemStatus(results[i].Status), + Message: results[i].Error, + ParentName: results[i].ParentName, } } @@ -386,6 +437,9 @@ func (cma *CloudMigrationAPI) GetSnapshotList(c *contextmodel.ReqContext) respon uid := web.Params(c.Req)[":uid"] if err := util.ValidateUID(uid); err != nil { + span.SetStatus(codes.Error, "invalid session uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err) } q := cloudmigration.ListSnapshotsQuery{ @@ -403,6 +457,9 @@ func (cma *CloudMigrationAPI) GetSnapshotList(c *contextmodel.ReqContext) respon snapshotList, err := cma.cloudMigrationService.GetSnapshotList(ctx, q) if err != nil { + span.SetStatus(codes.Error, "error retrieving snapshot list") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "error retrieving snapshot list", err) } @@ -438,13 +495,22 @@ func (cma *CloudMigrationAPI) UploadSnapshot(c *contextmodel.ReqContext) respons sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"] if err := util.ValidateUID(sessUid); err != nil { + span.SetStatus(codes.Error, "invalid session uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err) } if err := util.ValidateUID(snapshotUid); err != nil { + span.SetStatus(codes.Error, "invalid snapshot uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err) } if err := cma.cloudMigrationService.UploadSnapshot(ctx, sessUid, snapshotUid); err != nil { + span.SetStatus(codes.Error, "error uploading snapshot") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "error uploading snapshot", err) } @@ -468,13 +534,22 @@ func (cma *CloudMigrationAPI) CancelSnapshot(c *contextmodel.ReqContext) respons sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"] if err := util.ValidateUID(sessUid); err != nil { + span.SetStatus(codes.Error, "invalid session uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err) } if err := util.ValidateUID(snapshotUid); err != nil { + span.SetStatus(codes.Error, "invalid snapshot uid") + span.RecordError(err) + return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err) } if err := cma.cloudMigrationService.CancelSnapshot(ctx, sessUid, snapshotUid); err != nil { + span.SetStatus(codes.Error, "error canceling snapshot") + span.RecordError(err) + return response.ErrOrFallback(http.StatusInternalServerError, "error canceling snapshot", err) } diff --git a/pkg/services/cloudmigration/api/api_test.go b/pkg/services/cloudmigration/api/api_test.go index 1b56c8a4c20..5ede5b8b46d 100644 --- a/pkg/services/cloudmigration/api/api_test.go +++ b/pkg/services/cloudmigration/api/api_test.go @@ -345,7 +345,7 @@ func TestCloudMigrationAPI_GetSnapshot(t *testing.T) { requestUrl: "/api/cloudmigration/migration/1234/snapshot/1", basicRole: org.RoleAdmin, expectedHttpResult: http.StatusOK, - expectedBody: `{"uid":"fake_uid","status":"CREATING","sessionUid":"1234","created":"0001-01-01T00:00:00Z","finished":"0001-01-01T00:00:00Z","results":[],"stats":{"types":{},"statuses":{},"total":0}}`, + expectedBody: `{"uid":"fake_uid","status":"CREATING","sessionUid":"1234","created":"0001-01-01T00:00:00Z","finished":"0001-01-01T00:00:00Z","results":[{"name":"dashboard name","parentName":"dashboard parent name","type":"DASHBOARD","refId":"123","status":"PENDING"},{"name":"datasource name","parentName":"dashboard parent name","type":"DATASOURCE","refId":"456","status":"OK"}],"stats":{"types":{},"statuses":{},"total":0}}`, }, { desc: "should return 403 if no used is not admin", diff --git a/pkg/services/cloudmigration/api/dtos.go b/pkg/services/cloudmigration/api/dtos.go index d16f616739e..0e49d253c57 100644 --- a/pkg/services/cloudmigration/api/dtos.go +++ b/pkg/services/cloudmigration/api/dtos.go @@ -106,7 +106,8 @@ type MigrateDataResponseDTO struct { } type MigrateDataResponseItemDTO struct { - Name string `json:"name"` + Name string `json:"name"` + ParentName string `json:"parentName"` // required:true Type MigrateDataType `json:"type"` // required:true @@ -120,10 +121,15 @@ type MigrateDataResponseItemDTO struct { type MigrateDataType string const ( - DashboardDataType MigrateDataType = "DASHBOARD" - DatasourceDataType MigrateDataType = "DATASOURCE" - FolderDataType MigrateDataType = "FOLDER" - LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT" + DashboardDataType MigrateDataType = "DASHBOARD" + DatasourceDataType MigrateDataType = "DATASOURCE" + FolderDataType MigrateDataType = "FOLDER" + LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT" + AlertRuleType MigrateDataType = "ALERT_RULE" + ContactPointType MigrateDataType = "CONTACT_POINT" + NotificationPolicyType MigrateDataType = "NOTIFICATION_POLICY" + NotificationTemplateType MigrateDataType = "NOTIFICATION_TEMPLATE" + MuteTimingType MigrateDataType = "MUTE_TIMING" ) // swagger:enum ItemStatus diff --git a/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration.go b/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration.go index 886cb8565ef..bf689200719 100644 --- a/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration.go +++ b/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration.go @@ -12,6 +12,7 @@ import ( "time" "github.com/google/uuid" + "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" "github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/infra/db" "github.com/grafana/grafana/pkg/infra/kvstore" @@ -27,6 +28,7 @@ import ( "github.com/grafana/grafana/pkg/services/folder" "github.com/grafana/grafana/pkg/services/gcom" "github.com/grafana/grafana/pkg/services/libraryelements" + "github.com/grafana/grafana/pkg/services/ngalert" "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore" "github.com/grafana/grafana/pkg/services/secrets" secretskv "github.com/grafana/grafana/pkg/services/secrets/kvstore" @@ -35,6 +37,7 @@ import ( "github.com/grafana/grafana/pkg/util" "github.com/prometheus/client_golang/prometheus" "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" "go.opentelemetry.io/otel/trace" ) @@ -62,6 +65,7 @@ type Service struct { secretsService secrets.Service kvStore *kvstore.NamespacedKVStore libraryElementsService libraryelements.Service + ngAlert *ngalert.AlertNG api *api.CloudMigrationAPI tracer tracing.Tracer @@ -83,6 +87,7 @@ var _ cloudmigration.Service = (*Service)(nil) // builds the service, and api, and configures routes func ProvideService( cfg *setting.Cfg, + httpClientProvider *httpclient.Provider, features featuremgmt.FeatureToggles, db db.DB, dsService datasources.DataSourceService, @@ -96,6 +101,7 @@ func ProvideService( pluginStore pluginstore.Store, kvStore kvstore.KVStore, libraryElementsService libraryelements.Service, + ngAlert *ngalert.AlertNG, ) (cloudmigration.Service, error) { if !features.IsEnabledGlobally(featuremgmt.FlagOnPremToCloudMigrations) { return &NoopServiceImpl{}, nil @@ -115,18 +121,33 @@ func ProvideService( pluginStore: pluginStore, kvStore: kvstore.WithNamespace(kvStore, 0, "cloudmigration"), libraryElementsService: libraryElementsService, + ngAlert: ngAlert, } s.api = api.RegisterApi(routeRegister, s, tracer) - s.objectStorage = objectstorage.NewS3() + httpClientS3, err := httpClientProvider.New() + if err != nil { + return nil, fmt.Errorf("creating http client for S3: %w", err) + } + s.objectStorage = objectstorage.NewS3(httpClientS3, tracer) if !cfg.CloudMigration.IsDeveloperMode { - c, err := gmsclient.NewGMSClient(cfg) + httpClientGMS, err := httpClientProvider.New() + if err != nil { + return nil, fmt.Errorf("creating http client for GMS: %w", err) + } + + c, err := gmsclient.NewGMSClient(cfg, httpClientGMS) if err != nil { return nil, fmt.Errorf("initializing GMS client: %w", err) } s.gmsClient = c - s.gcomService = gcom.New(gcom.Config{ApiURL: cfg.GrafanaComAPIURL, Token: cfg.CloudMigration.GcomAPIToken}) + + httpClientGcom, err := httpClientProvider.New() + if err != nil { + return nil, fmt.Errorf("creating http client for GCOM: %w", err) + } + s.gcomService = gcom.New(gcom.Config{ApiURL: cfg.GrafanaComAPIURL, Token: cfg.CloudMigration.GcomAPIToken}, httpClientGcom) } else { s.gmsClient = gmsclient.NewInMemoryClient() s.gcomService = &gcomStub{policies: map[string]gcom.AccessPolicy{}, token: nil} @@ -169,7 +190,8 @@ func (s *Service) GetToken(ctx context.Context) (gcom.TokenView, error) { RequestID: requestID, Region: instance.RegionSlug, AccessPolicyName: accessPolicyName, - TokenName: accessTokenName}) + TokenName: accessTokenName, + }) if err != nil { return gcom.TokenView{}, fmt.Errorf("listing tokens: %w", err) } @@ -279,9 +301,6 @@ func (s *Service) CreateToken(ctx context.Context) (cloudmigration.CreateAccessT } func (s *Service) findAccessPolicyByName(ctx context.Context, regionSlug, accessPolicyName string) (*gcom.AccessPolicy, error) { - ctx, span := s.tracer.Start(ctx, "CloudMigrationService.findAccessPolicyByName") - defer span.End() - accessPolicies, err := s.gcomService.ListAccessPolicies(ctx, gcom.ListAccessPoliciesParams{ RequestID: tracing.TraceIDFromContext(ctx, false), Region: regionSlug, @@ -341,7 +360,7 @@ func (s *Service) DeleteToken(ctx context.Context, tokenID string) error { } func (s *Service) GetSession(ctx context.Context, uid string) (*cloudmigration.CloudMigrationSession, error) { - ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetMigration") + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetSession") defer span.End() migration, err := s.store.GetMigrationSessionByUID(ctx, uid) if err != nil { @@ -352,6 +371,9 @@ func (s *Service) GetSession(ctx context.Context, uid string) (*cloudmigration.C } func (s *Service) GetSessionList(ctx context.Context) (*cloudmigration.CloudMigrationSessionListResponse, error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetSessionList") + defer span.End() + values, err := s.store.GetCloudMigrationSessionList(ctx) if err != nil { return nil, fmt.Errorf("retrieving session list from store: %w", err) @@ -370,7 +392,7 @@ func (s *Service) GetSessionList(ctx context.Context) (*cloudmigration.CloudMigr } func (s *Service) CreateSession(ctx context.Context, cmd cloudmigration.CloudMigrationSessionRequest) (*cloudmigration.CloudMigrationSessionResponse, error) { - ctx, span := s.tracer.Start(ctx, "CloudMigrationService.createMigration") + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.CreateSession") defer span.End() base64Token := cmd.AuthToken @@ -405,6 +427,9 @@ func (s *Service) CreateSession(ctx context.Context, cmd cloudmigration.CloudMig } func (s *Service) DeleteSession(ctx context.Context, sessionUID string) (*cloudmigration.CloudMigrationSession, error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.DeleteSession") + defer span.End() + session, snapshots, err := s.store.DeleteMigrationSessionByUID(ctx, sessionUID) if err != nil { s.report(ctx, session, gmsclient.EventDisconnect, 0, err) @@ -470,26 +495,36 @@ func (s *Service) CreateSnapshot(ctx context.Context, signedInUser *user.SignedI s.cancelMutex.Unlock() }() - ctx, cancelFunc := context.WithCancel(context.Background()) + // Create context out the span context to ensure the trace is propagated + asyncCtx := trace.ContextWithSpanContext(context.Background(), span.SpanContext()) + asyncCtx, asyncSpan := s.tracer.Start(asyncCtx, "CloudMigrationService.CreateSnapshotAsync") + defer asyncSpan.End() + + asyncCtx, cancelFunc := context.WithCancel(asyncCtx) s.cancelFunc = cancelFunc - s.report(ctx, session, gmsclient.EventStartBuildingSnapshot, 0, nil) + s.report(asyncCtx, session, gmsclient.EventStartBuildingSnapshot, 0, nil) start := time.Now() - err := s.buildSnapshot(ctx, signedInUser, initResp.MaxItemsPerPartition, initResp.Metadata, snapshot) + err := s.buildSnapshot(asyncCtx, signedInUser, initResp.MaxItemsPerPartition, initResp.Metadata, snapshot) if err != nil { + asyncSpan.SetStatus(codes.Error, "error building snapshot") + asyncSpan.RecordError(err) s.log.Error("building snapshot", "err", err.Error()) + // Update status to error with retries - if err := s.updateSnapshotWithRetries(context.Background(), cloudmigration.UpdateSnapshotCmd{ + if err := s.updateSnapshotWithRetries(asyncCtx, cloudmigration.UpdateSnapshotCmd{ UID: snapshot.UID, SessionID: sessionUid, Status: cloudmigration.SnapshotStatusError, }); err != nil { s.log.Error("critical failure during snapshot creation - please report any error logs") + asyncSpan.RecordError(err) } } - s.report(ctx, session, gmsclient.EventDoneBuildingSnapshot, time.Since(start), err) + span.SetStatus(codes.Ok, "snapshot built") + s.report(asyncCtx, session, gmsclient.EventDoneBuildingSnapshot, time.Since(start), err) }() return &snapshot, nil @@ -624,32 +659,48 @@ func (s *Service) UploadSnapshot(ctx context.Context, sessionUid string, snapsho s.cancelMutex.Unlock() }() - ctx, cancelFunc := context.WithCancel(context.Background()) - s.cancelFunc = cancelFunc + // Create context out the span context to ensure the trace is propagated + asyncCtx := trace.ContextWithSpanContext(context.Background(), span.SpanContext()) + asyncCtx, asyncSpan := s.tracer.Start(asyncCtx, "CloudMigrationService.UploadSnapshot") + defer asyncSpan.End() - s.report(ctx, session, gmsclient.EventStartUploadingSnapshot, 0, nil) + asyncCtx, s.cancelFunc = context.WithCancel(asyncCtx) + + s.report(asyncCtx, session, gmsclient.EventStartUploadingSnapshot, 0, nil) start := time.Now() - err := s.uploadSnapshot(ctx, session, snapshot, uploadUrl) + err := s.uploadSnapshot(asyncCtx, session, snapshot, uploadUrl) if err != nil { + asyncSpan.SetStatus(codes.Error, "error uploading snapshot") + asyncSpan.RecordError(err) + s.log.Error("uploading snapshot", "err", err.Error()) // Update status to error with retries - if err := s.updateSnapshotWithRetries(context.Background(), cloudmigration.UpdateSnapshotCmd{ + if err := s.updateSnapshotWithRetries(asyncCtx, cloudmigration.UpdateSnapshotCmd{ UID: snapshot.UID, SessionID: sessionUid, Status: cloudmigration.SnapshotStatusError, }); err != nil { + asyncSpan.RecordError(err) s.log.Error("critical failure during snapshot upload - please report any error logs") } } - s.report(ctx, session, gmsclient.EventDoneUploadingSnapshot, time.Since(start), err) + s.report(asyncCtx, session, gmsclient.EventDoneUploadingSnapshot, time.Since(start), err) }() return nil } func (s *Service) CancelSnapshot(ctx context.Context, sessionUid string, snapshotUid string) (err error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.CancelSnapshot", + trace.WithAttributes( + attribute.String("sessionUid", sessionUid), + attribute.String("snapshotUid", snapshotUid), + ), + ) + defer span.End() + // The cancel func itself is protected by a mutex in the async threads, so it may or may not be set by the time CancelSnapshot is called // Attempt to cancel and recover from the panic if the cancel function is nil defer func() { @@ -684,6 +735,9 @@ func (s *Service) report( d time.Duration, evtErr error, ) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.report") + defer span.End() + id, err := s.getLocalEventId(ctx) if err != nil { s.log.Error("failed to report event", "type", t, "error", err.Error()) @@ -738,6 +792,9 @@ func (s *Service) getLocalEventId(ctx context.Context) (string, error) { } func (s *Service) deleteLocalFiles(snapshots []cloudmigration.CloudMigrationSnapshot) error { + _, span := s.tracer.Start(context.Background(), "CloudMigrationService.deleteLocalFiles") + defer span.End() + var err error for _, snapshot := range snapshots { err = os.RemoveAll(snapshot.LocalDir) diff --git a/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration_test.go b/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration_test.go index ff068b68a0c..74944e407fe 100644 --- a/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration_test.go +++ b/pkg/services/cloudmigration/cloudmigrationimpl/cloudmigration_test.go @@ -2,18 +2,24 @@ package cloudmigrationimpl import ( "context" + "maps" "os" "path/filepath" + "slices" "testing" "time" "github.com/google/uuid" "github.com/grafana/grafana/pkg/api/routing" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/infra/httpclient" "github.com/grafana/grafana/pkg/infra/kvstore" "github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/services/accesscontrol/actest" + "github.com/grafana/grafana/pkg/services/annotations/annotationstest" "github.com/grafana/grafana/pkg/services/cloudmigration" "github.com/grafana/grafana/pkg/services/cloudmigration/gmsclient" "github.com/grafana/grafana/pkg/services/contexthandler/ctxkey" @@ -26,7 +32,12 @@ import ( "github.com/grafana/grafana/pkg/services/folder/foldertest" libraryelementsfake "github.com/grafana/grafana/pkg/services/libraryelements/fake" libraryelements "github.com/grafana/grafana/pkg/services/libraryelements/model" + "github.com/grafana/grafana/pkg/services/ngalert" + "github.com/grafana/grafana/pkg/services/ngalert/metrics" + ngalertstore "github.com/grafana/grafana/pkg/services/ngalert/store" + ngalertfakes "github.com/grafana/grafana/pkg/services/ngalert/tests/fakes" "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore" + "github.com/grafana/grafana/pkg/services/quota/quotatest" secretsfakes "github.com/grafana/grafana/pkg/services/secrets/fakes" secretskv "github.com/grafana/grafana/pkg/services/secrets/kvstore" "github.com/grafana/grafana/pkg/services/user" @@ -391,6 +402,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) { Results: []cloudmigration.CloudMigrationResource{ { Name: "1 name", + ParentName: "1 parent name", Type: cloudmigration.DatasourceDataType, RefID: "1", // this will be core Status: cloudmigration.ItemStatusOK, @@ -398,6 +410,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) { }, { Name: "2 name", + ParentName: "", Type: cloudmigration.DatasourceDataType, RefID: "2", // this will be non-core Status: cloudmigration.ItemStatusOK, @@ -405,6 +418,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) { }, { Name: "3 name", + ParentName: "3 parent name", Type: cloudmigration.DatasourceDataType, RefID: "3", // this will be non-core with an error Status: cloudmigration.ItemStatusError, @@ -413,6 +427,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) { }, { Name: "4 name", + ParentName: "4 folder name", Type: cloudmigration.DatasourceDataType, RefID: "4", // this will be deleted Status: cloudmigration.ItemStatusOK, @@ -563,6 +578,122 @@ func TestReportEvent(t *testing.T) { require.Equal(t, 1, gmsMock.reportEventCalled) }) } +func TestGetFolderNamesForFolderUIDs(t *testing.T) { + s := setUpServiceTest(t, false).(*Service) + ctx, cancel := context.WithCancel(context.Background()) + t.Cleanup(cancel) + + user := &user.SignedInUser{OrgID: 1} + + testcases := []struct { + folders []*folder.Folder + folderUIDs []string + expectedFolderNames []string + }{ + { + folders: []*folder.Folder{ + {UID: "folderUID-A", Title: "Folder A", OrgID: 1}, + {UID: "folderUID-B", Title: "Folder B", OrgID: 1}, + }, + folderUIDs: []string{"folderUID-A", "folderUID-B"}, + expectedFolderNames: []string{"Folder A", "Folder B"}, + }, + { + folders: []*folder.Folder{ + {UID: "folderUID-A", Title: "Folder A", OrgID: 1}, + }, + folderUIDs: []string{"folderUID-A"}, + expectedFolderNames: []string{"Folder A"}, + }, + { + folders: []*folder.Folder{}, + folderUIDs: []string{"folderUID-A"}, + expectedFolderNames: []string{""}, + }, + { + folders: []*folder.Folder{ + {UID: "folderUID-A", Title: "Folder A", OrgID: 1}, + }, + folderUIDs: []string{"folderUID-A", "folderUID-B"}, + expectedFolderNames: []string{"Folder A", ""}, + }, + { + folders: []*folder.Folder{}, + folderUIDs: []string{""}, + expectedFolderNames: []string{""}, + }, + { + folders: []*folder.Folder{}, + folderUIDs: []string{}, + expectedFolderNames: []string{}, + }, + } + + for _, tc := range testcases { + s.folderService = &foldertest.FakeService{ExpectedFolders: tc.folders} + + folderUIDsToFolders, err := s.getFolderNamesForFolderUIDs(ctx, user, tc.folderUIDs) + require.NoError(t, err) + + resFolderNames := slices.Collect(maps.Values(folderUIDsToFolders)) + require.Len(t, resFolderNames, len(tc.expectedFolderNames)) + + require.ElementsMatch(t, resFolderNames, tc.expectedFolderNames) + } +} + +func TestGetParentNames(t *testing.T) { + s := setUpServiceTest(t, false).(*Service) + ctx, cancel := context.WithCancel(context.Background()) + t.Cleanup(cancel) + + user := &user.SignedInUser{OrgID: 1} + libraryElementFolderUID := "folderUID-A" + testcases := []struct { + fakeFolders []*folder.Folder + folders []folder.CreateFolderCommand + dashboards []dashboards.Dashboard + libraryElements []libraryElement + expectedDashParentNames []string + expectedFoldParentNames []string + }{ + { + fakeFolders: []*folder.Folder{ + {UID: "folderUID-A", Title: "Folder A", OrgID: 1, ParentUID: ""}, + {UID: "folderUID-B", Title: "Folder B", OrgID: 1, ParentUID: "folderUID-A"}, + }, + folders: []folder.CreateFolderCommand{ + {UID: "folderUID-C", Title: "Folder A", OrgID: 1, ParentUID: "folderUID-A"}, + }, + dashboards: []dashboards.Dashboard{ + {UID: "dashboardUID-0", OrgID: 1, FolderUID: ""}, + {UID: "dashboardUID-1", OrgID: 1, FolderUID: "folderUID-A"}, + {UID: "dashboardUID-2", OrgID: 1, FolderUID: "folderUID-B"}, + }, + libraryElements: []libraryElement{ + {UID: "libraryElementUID-0", FolderUID: &libraryElementFolderUID}, + {UID: "libraryElementUID-1"}, + }, + expectedDashParentNames: []string{"", "Folder A", "Folder B"}, + expectedFoldParentNames: []string{"Folder A"}, + }, + } + + for _, tc := range testcases { + s.folderService = &foldertest.FakeService{ExpectedFolders: tc.fakeFolders} + + dataUIDsToParentNamesByType, err := s.getParentNames(ctx, user, tc.dashboards, tc.folders, tc.libraryElements) + require.NoError(t, err) + + resDashParentNames := slices.Collect(maps.Values(dataUIDsToParentNamesByType[cloudmigration.DashboardDataType])) + require.Len(t, resDashParentNames, len(tc.expectedDashParentNames)) + require.ElementsMatch(t, resDashParentNames, tc.expectedDashParentNames) + + resFoldParentNames := slices.Collect(maps.Values(dataUIDsToParentNamesByType[cloudmigration.FolderDataType])) + require.Len(t, resFoldParentNames, len(tc.expectedFoldParentNames)) + require.ElementsMatch(t, resFoldParentNames, tc.expectedFoldParentNames) + } +} func TestGetLibraryElementsCommands(t *testing.T) { s := setUpServiceTest(t, false).(*Service) @@ -642,8 +773,29 @@ func setUpServiceTest(t *testing.T, withDashboardMock bool) cloudmigration.Servi }, } + featureToggles := featuremgmt.WithFeatures(featuremgmt.FlagOnPremToCloudMigrations, featuremgmt.FlagDashboardRestore) + + kvStore := kvstore.ProvideService(sqlStore) + + bus := bus.ProvideBus(tracer) + fakeAccessControl := actest.FakeAccessControl{} + fakeAccessControlService := actest.FakeService{} + alertMetrics := metrics.NewNGAlert(prometheus.NewRegistry()) + + ruleStore, err := ngalertstore.ProvideDBStore(cfg, featureToggles, sqlStore, mockFolder, dashboardService, fakeAccessControl) + require.NoError(t, err) + + ng, err := ngalert.ProvideService( + cfg, featureToggles, nil, nil, rr, sqlStore, kvStore, nil, nil, quotatest.New(false, nil), + secretsService, nil, alertMetrics, mockFolder, fakeAccessControl, dashboardService, nil, bus, fakeAccessControlService, + annotationstest.NewFakeAnnotationsRepo(), &pluginstore.FakePluginStore{}, tracer, ruleStore, + httpclient.NewProvider(), ngalertfakes.NewFakeReceiverPermissionsService(), + ) + require.NoError(t, err) + s, err := ProvideService( cfg, + httpclient.NewProvider(), featuremgmt.WithFeatures( featuremgmt.FlagOnPremToCloudMigrations, featuremgmt.FlagDashboardRestore), @@ -659,6 +811,7 @@ func setUpServiceTest(t *testing.T, withDashboardMock bool) cloudmigration.Servi &pluginstore.FakePluginStore{}, kvstore.ProvideService(sqlStore), &libraryelementsfake.LibraryElementService{}, + ng, ) require.NoError(t, err) diff --git a/pkg/services/cloudmigration/cloudmigrationimpl/fake/cloudmigration_fake.go b/pkg/services/cloudmigration/cloudmigrationimpl/fake/cloudmigration_fake.go index 50ccfff8ef9..8b5f224f710 100644 --- a/pkg/services/cloudmigration/cloudmigrationimpl/fake/cloudmigration_fake.go +++ b/pkg/services/cloudmigration/cloudmigrationimpl/fake/cloudmigration_fake.go @@ -98,10 +98,28 @@ func (m FakeServiceImpl) GetSnapshot(ctx context.Context, query cloudmigration.G if m.ReturnError { return nil, fmt.Errorf("mock error") } + cloudMigrationResources := []cloudmigration.CloudMigrationResource{ + { + Type: cloudmigration.DashboardDataType, + RefID: "123", + Status: cloudmigration.ItemStatusPending, + Name: "dashboard name", + ParentName: "dashboard parent name", + }, + { + Type: cloudmigration.DatasourceDataType, + RefID: "456", + Status: cloudmigration.ItemStatusOK, + Name: "datasource name", + ParentName: "dashboard parent name", + }, + } + return &cloudmigration.CloudMigrationSnapshot{ UID: "fake_uid", SessionUID: "fake_uid", Status: cloudmigration.SnapshotStatusCreating, + Resources: cloudMigrationResources, }, nil } diff --git a/pkg/services/cloudmigration/cloudmigrationimpl/snapshot_mgmt.go b/pkg/services/cloudmigration/cloudmigrationimpl/snapshot_mgmt.go index ece57dc92ed..5c9df830110 100644 --- a/pkg/services/cloudmigration/cloudmigrationimpl/snapshot_mgmt.go +++ b/pkg/services/cloudmigration/cloudmigrationimpl/snapshot_mgmt.go @@ -23,9 +23,21 @@ import ( "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/util/retryer" "golang.org/x/crypto/nacl/box" + + "go.opentelemetry.io/otel/codes" ) +var currentMigrationTypes = []cloudmigration.MigrateDataType{ + cloudmigration.DatasourceDataType, + cloudmigration.FolderDataType, + cloudmigration.LibraryElementDataType, + cloudmigration.DashboardDataType, +} + func (s *Service) getMigrationDataJSON(ctx context.Context, signedInUser *user.SignedInUser) (*cloudmigration.MigrateDataRequest, error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getMigrationDataJSON") + defer span.End() + // Data sources dataSources, err := s.getDataSourceCommands(ctx) if err != nil { @@ -95,14 +107,24 @@ func (s *Service) getMigrationDataJSON(ctx context.Context, signedInUser *user.S }) } + // Obtain the names of parent elements for Dashboard and Folders data types + parentNamesByType, err := s.getParentNames(ctx, signedInUser, dashs, folders, libraryElements) + if err != nil { + s.log.Error("Failed to get parent folder names", "err", err) + } + migrationData := &cloudmigration.MigrateDataRequest{ - Items: migrationDataSlice, + Items: migrationDataSlice, + ItemParentNames: parentNamesByType, } return migrationData, nil } func (s *Service) getDataSourceCommands(ctx context.Context) ([]datasources.AddDataSourceCommand, error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getDataSourceCommands") + defer span.End() + dataSources, err := s.dsService.GetAllDataSources(ctx, &datasources.GetAllDataSourcesQuery{}) if err != nil { s.log.Error("Failed to get all datasources", "err", err) @@ -141,6 +163,9 @@ func (s *Service) getDataSourceCommands(ctx context.Context) ([]datasources.AddD // getDashboardAndFolderCommands returns the json payloads required by the dashboard and folder creation APIs func (s *Service) getDashboardAndFolderCommands(ctx context.Context, signedInUser *user.SignedInUser) ([]dashboards.Dashboard, []folder.CreateFolderCommand, error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getDashboardAndFolderCommands") + defer span.End() + dashs, err := s.dashboardService.GetAllDashboards(ctx) if err != nil { return nil, nil, err @@ -196,6 +221,9 @@ type libraryElement struct { // getLibraryElementsCommands returns the json payloads required by the library elements creation API func (s *Service) getLibraryElementsCommands(ctx context.Context, signedInUser *user.SignedInUser) ([]libraryElement, error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getLibraryElementsCommands") + defer span.End() + const perPage = 100 cmds := make([]libraryElement, 0) @@ -242,6 +270,9 @@ func (s *Service) getLibraryElementsCommands(ctx context.Context, signedInUser * // asynchronous process for writing the snapshot to the filesystem and updating the snapshot status func (s *Service) buildSnapshot(ctx context.Context, signedInUser *user.SignedInUser, maxItemsPerPartition uint32, metadata []byte, snapshotMeta cloudmigration.CloudMigrationSnapshot) error { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.buildSnapshot") + defer span.End() + // TODO -- make sure we can only build one snapshot at a time s.buildSnapshotMutex.Lock() defer s.buildSnapshotMutex.Unlock() @@ -289,20 +320,21 @@ func (s *Service) buildSnapshot(ctx context.Context, signedInUser *user.SignedIn Data: item.Data, }) + parentName := "" + if _, exists := migrationData.ItemParentNames[item.Type]; exists { + parentName = migrationData.ItemParentNames[item.Type][item.RefID] + } + localSnapshotResource[i] = cloudmigration.CloudMigrationResource{ - Name: item.Name, - Type: item.Type, - RefID: item.RefID, - Status: cloudmigration.ItemStatusPending, + Name: item.Name, + Type: item.Type, + RefID: item.RefID, + Status: cloudmigration.ItemStatusPending, + ParentName: parentName, } } - for _, resourceType := range []cloudmigration.MigrateDataType{ - cloudmigration.DatasourceDataType, - cloudmigration.FolderDataType, - cloudmigration.LibraryElementDataType, - cloudmigration.DashboardDataType, - } { + for _, resourceType := range currentMigrationTypes { for chunk := range slices.Chunk(resourcesGroupedByType[resourceType], int(maxItemsPerPartition)) { if err := snapshotWriter.Write(string(resourceType), chunk); err != nil { return fmt.Errorf("writing resources to snapshot writer: resourceType=%s %w", resourceType, err) @@ -339,6 +371,9 @@ func (s *Service) buildSnapshot(ctx context.Context, signedInUser *user.SignedIn // asynchronous process for and updating the snapshot status func (s *Service) uploadSnapshot(ctx context.Context, session *cloudmigration.CloudMigrationSession, snapshotMeta *cloudmigration.CloudMigrationSnapshot, uploadUrl string) (err error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot") + defer span.End() + // TODO -- make sure we can only upload one snapshot at a time s.buildSnapshotMutex.Lock() defer s.buildSnapshotMutex.Unlock() @@ -361,37 +396,61 @@ func (s *Service) uploadSnapshot(ctx context.Context, session *cloudmigration.Cl } }() + _, readIndexSpan := s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot.readIndex") index, err := snapshot.ReadIndex(indexFile) if err != nil { + readIndexSpan.SetStatus(codes.Error, "reading index from file") + readIndexSpan.RecordError(err) + readIndexSpan.End() + return fmt.Errorf("reading index from file: %w", err) } + readIndexSpan.End() s.log.Debug(fmt.Sprintf("uploadSnapshot: read index file in %d ms", time.Since(start).Milliseconds())) + uploadCtx, uploadSpan := s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot.uploadDataFiles") // Upload the data files. for _, fileNames := range index.Items { for _, fileName := range fileNames { filePath := filepath.Join(snapshotMeta.LocalDir, fileName) key := fmt.Sprintf("%d/snapshots/%s/%s", session.StackID, snapshotMeta.GMSSnapshotUID, fileName) - if err := s.uploadUsingPresignedURL(ctx, uploadUrl, key, filePath); err != nil { + if err := s.uploadUsingPresignedURL(uploadCtx, uploadUrl, key, filePath); err != nil { + uploadSpan.SetStatus(codes.Error, "uploading snapshot data file using presigned url") + uploadSpan.RecordError(err) + uploadSpan.End() + return fmt.Errorf("uploading snapshot file using presigned url: %w", err) } s.log.Debug(fmt.Sprintf("uploadSnapshot: uploaded %s in %d ms", fileName, time.Since(start).Milliseconds())) } } + uploadSpan.End() s.log.Debug(fmt.Sprintf("uploadSnapshot: uploaded all data files in %d ms", time.Since(start).Milliseconds())) + uploadCtx, uploadSpan = s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot.uploadIndex") + // Upload the index file. Must be done after uploading the data files. key := fmt.Sprintf("%d/snapshots/%s/%s", session.StackID, snapshotMeta.GMSSnapshotUID, "index.json") if _, err := indexFile.Seek(0, 0); err != nil { + uploadSpan.SetStatus(codes.Error, "seeking to beginning of index file") + uploadSpan.RecordError(err) + uploadSpan.End() + return fmt.Errorf("seeking to beginning of index file: %w", err) } - if err := s.objectStorage.PresignedURLUpload(ctx, uploadUrl, key, indexFile); err != nil { + if err := s.objectStorage.PresignedURLUpload(uploadCtx, uploadUrl, key, indexFile); err != nil { + uploadSpan.SetStatus(codes.Error, "uploading index file using presigned url") + uploadSpan.RecordError(err) + uploadSpan.End() + return fmt.Errorf("uploading file using presigned url: %w", err) } + uploadSpan.End() + s.log.Debug(fmt.Sprintf("uploadSnapshot: uploaded index file in %d ms", time.Since(start).Milliseconds())) s.log.Info("successfully uploaded snapshot", "snapshotUid", snapshotMeta.UID, "cloud_snapshotUid", snapshotMeta.GMSSnapshotUID) @@ -408,6 +467,9 @@ func (s *Service) uploadSnapshot(ctx context.Context, session *cloudmigration.Cl } func (s *Service) uploadUsingPresignedURL(ctx context.Context, uploadURL, key string, filePath string) (err error) { + ctx, span := s.tracer.Start(ctx, "CloudMigrationService.uploadUsingPresignedURL") + defer span.End() + // The directory that contains the file can set in the configuration, therefore the directory can be any directory. // nolint:gosec file, err := os.Open(filePath) @@ -486,3 +548,74 @@ func sortFolders(input []folder.CreateFolderCommand) []folder.CreateFolderComman return input } + +// getFolderNamesForFolderUIDs queries the folders service to obtain folder names for a list of folderUIDs +func (s *Service) getFolderNamesForFolderUIDs(ctx context.Context, signedInUser *user.SignedInUser, folderUIDs []string) (map[string](string), error) { + folders, err := s.folderService.GetFolders(ctx, folder.GetFoldersQuery{ + UIDs: folderUIDs, + SignedInUser: signedInUser, + WithFullpathUIDs: true, + }) + if err != nil { + s.log.Error("Failed to obtain folders from folder UIDs", "err", err) + return nil, err + } + + folderUIDsToNames := make(map[string](string), len(folderUIDs)) + for _, folderUID := range folderUIDs { + folderUIDsToNames[folderUID] = "" + } + for _, f := range folders { + folderUIDsToNames[f.UID] = f.Title + } + return folderUIDsToNames, nil +} + +// getParentNames finds the parent names for resources and returns a map of data type: {data UID : parentName} +// for dashboards, folders and library elements - the parent is the parent folder +func (s *Service) getParentNames(ctx context.Context, signedInUser *user.SignedInUser, dashboards []dashboards.Dashboard, folders []folder.CreateFolderCommand, libraryElements []libraryElement) (map[cloudmigration.MigrateDataType]map[string](string), error) { + parentNamesByType := make(map[cloudmigration.MigrateDataType]map[string](string)) + for _, dataType := range currentMigrationTypes { + parentNamesByType[dataType] = make(map[string]string) + } + + // Obtain list of unique folderUIDs + parentFolderUIDsSet := make(map[string]struct{}, len(dashboards)+len(folders)+len(libraryElements)) + for _, dashboard := range dashboards { + parentFolderUIDsSet[dashboard.FolderUID] = struct{}{} + } + for _, f := range folders { + parentFolderUIDsSet[f.ParentUID] = struct{}{} + } + for _, libraryElement := range libraryElements { + if libraryElement.FolderUID != nil { + parentFolderUIDsSet[*libraryElement.FolderUID] = struct{}{} + } + } + parentFolderUIDsSlice := make([]string, 0, len(parentFolderUIDsSet)) + for parentFolderUID := range parentFolderUIDsSet { + parentFolderUIDsSlice = append(parentFolderUIDsSlice, parentFolderUID) + } + + // Obtain folder names given a list of folderUIDs + foldersUIDsToFolderName, err := s.getFolderNamesForFolderUIDs(ctx, signedInUser, parentFolderUIDsSlice) + if err != nil { + s.log.Error("Failed to get parent folder names from folder UIDs", "err", err) + return parentNamesByType, err + } + + // Prepare map of {data type: {data UID : parentName}} + for _, dashboard := range dashboards { + parentNamesByType[cloudmigration.DashboardDataType][dashboard.UID] = foldersUIDsToFolderName[dashboard.FolderUID] + } + for _, f := range folders { + parentNamesByType[cloudmigration.FolderDataType][f.UID] = foldersUIDsToFolderName[f.ParentUID] + } + for _, libraryElement := range libraryElements { + if libraryElement.FolderUID != nil { + parentNamesByType[cloudmigration.LibraryElementDataType][libraryElement.UID] = foldersUIDsToFolderName[*libraryElement.FolderUID] + } + } + + return parentNamesByType, err +} diff --git a/pkg/services/cloudmigration/gmsclient/gms_client.go b/pkg/services/cloudmigration/gmsclient/gms_client.go index 9cbe60d97e3..b041e4714af 100644 --- a/pkg/services/cloudmigration/gmsclient/gms_client.go +++ b/pkg/services/cloudmigration/gmsclient/gms_client.go @@ -18,19 +18,21 @@ import ( ) // NewGMSClient returns an implementation of Client that queries GrafanaMigrationService -func NewGMSClient(cfg *setting.Cfg) (Client, error) { +func NewGMSClient(cfg *setting.Cfg, httpClient *http.Client) (Client, error) { if cfg.CloudMigration.GMSDomain == "" { return nil, fmt.Errorf("missing GMS domain") } return &gmsClientImpl{ - cfg: cfg, - log: log.New(logPrefix), + cfg: cfg, + log: log.New(logPrefix), + httpClient: httpClient, }, nil } type gmsClientImpl struct { - cfg *setting.Cfg - log *log.ConcreteLogger + cfg *setting.Cfg + log *log.ConcreteLogger + httpClient *http.Client getStatusMux sync.Mutex getStatusLastQueried time.Time @@ -40,8 +42,11 @@ func (c *gmsClientImpl) ValidateKey(ctx context.Context, cm cloudmigration.Cloud // TODO: there is a lot of boilerplate code in these methods, we should consolidate them when we have a gardening period path := fmt.Sprintf("%s/api/v1/validate-key", c.buildBasePath(cm.ClusterSlug)) + ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSValidateKeyTimeout) + defer cancel() + // validation is an empty POST to GMS with the authorization header included - req, err := http.NewRequest("POST", path, bytes.NewReader(nil)) + req, err := http.NewRequestWithContext(ctx, "POST", path, bytes.NewReader(nil)) if err != nil { c.log.Error("error creating http request for token validation", "err", err.Error()) return fmt.Errorf("http request error: %w", err) @@ -49,10 +54,7 @@ func (c *gmsClientImpl) ValidateKey(ctx context.Context, cm cloudmigration.Cloud req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", cm.StackID, cm.AuthToken)) - client := &http.Client{ - Timeout: c.cfg.CloudMigration.GMSValidateKeyTimeout, - } - resp, err := client.Do(req) + resp, err := c.httpClient.Do(req) if err != nil { c.log.Error("error sending http request for token validation", "err", err.Error()) return fmt.Errorf("http request error: %w", err) @@ -74,8 +76,11 @@ func (c *gmsClientImpl) ValidateKey(ctx context.Context, cm cloudmigration.Cloud func (c *gmsClientImpl) StartSnapshot(ctx context.Context, session cloudmigration.CloudMigrationSession) (out *cloudmigration.StartSnapshotResponse, err error) { path := fmt.Sprintf("%s/api/v1/start-snapshot", c.buildBasePath(session.ClusterSlug)) + ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSStartSnapshotTimeout) + defer cancel() + // Send the request to cms with the associated auth token - req, err := http.NewRequest(http.MethodPost, path, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, path, nil) if err != nil { c.log.Error("error creating http request to start snapshot", "err", err.Error()) return nil, fmt.Errorf("http request error: %w", err) @@ -83,10 +88,7 @@ func (c *gmsClientImpl) StartSnapshot(ctx context.Context, session cloudmigratio req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken)) - client := &http.Client{ - Timeout: c.cfg.CloudMigration.GMSStartSnapshotTimeout, - } - resp, err := client.Do(req) + resp, err := c.httpClient.Do(req) if err != nil { c.log.Error("error sending http request to start snapshot", "err", err.Error()) return nil, fmt.Errorf("http request error: %w", err) @@ -119,8 +121,11 @@ func (c *gmsClientImpl) GetSnapshotStatus(ctx context.Context, session cloudmigr path := fmt.Sprintf("%s/api/v1/snapshots/%s/status?offset=%d", c.buildBasePath(session.ClusterSlug), snapshot.GMSSnapshotUID, offset) + ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSGetSnapshotStatusTimeout) + defer cancel() + // Send the request to gms with the associated auth token - req, err := http.NewRequest(http.MethodGet, path, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, path, nil) if err != nil { c.log.Error("error creating http request to get snapshot status", "err", err.Error()) return nil, fmt.Errorf("http request error: %w", err) @@ -128,11 +133,8 @@ func (c *gmsClientImpl) GetSnapshotStatus(ctx context.Context, session cloudmigr req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken)) - client := &http.Client{ - Timeout: c.cfg.CloudMigration.GMSGetSnapshotStatusTimeout, - } c.getStatusLastQueried = time.Now() - resp, err := client.Do(req) + resp, err := c.httpClient.Do(req) if err != nil { c.log.Error("error sending http request to get snapshot status", "err", err.Error()) return nil, fmt.Errorf("http request error: %w", err) @@ -163,8 +165,11 @@ func (c *gmsClientImpl) GetSnapshotStatus(ctx context.Context, session cloudmigr func (c *gmsClientImpl) CreatePresignedUploadUrl(ctx context.Context, session cloudmigration.CloudMigrationSession, snapshot cloudmigration.CloudMigrationSnapshot) (string, error) { path := fmt.Sprintf("%s/api/v1/snapshots/%s/create-upload-url", c.buildBasePath(session.ClusterSlug), snapshot.GMSSnapshotUID) + ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSCreateUploadUrlTimeout) + defer cancel() + // Send the request to gms with the associated auth token - req, err := http.NewRequest(http.MethodPost, path, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, path, nil) if err != nil { c.log.Error("error creating http request to create upload url", "err", err.Error()) return "", fmt.Errorf("http request error: %w", err) @@ -172,10 +177,7 @@ func (c *gmsClientImpl) CreatePresignedUploadUrl(ctx context.Context, session cl req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken)) - client := &http.Client{ - Timeout: c.cfg.CloudMigration.GMSCreateUploadUrlTimeout, - } - resp, err := client.Do(req) + resp, err := c.httpClient.Do(req) if err != nil { c.log.Error("error sending http request to create an upload url", "err", err.Error()) return "", fmt.Errorf("http request error: %w", err) @@ -208,6 +210,9 @@ func (c *gmsClientImpl) ReportEvent(ctx context.Context, session cloudmigration. return } + ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSReportEventTimeout) + defer cancel() + path := fmt.Sprintf("%s/api/v1/events", c.buildBasePath(session.ClusterSlug)) var buf bytes.Buffer @@ -216,7 +221,7 @@ func (c *gmsClientImpl) ReportEvent(ctx context.Context, session cloudmigration. return } // Send the request to gms with the associated auth token - req, err := http.NewRequest(http.MethodPost, path, &buf) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, path, &buf) if err != nil { c.log.Error("error creating http request to report event", "err", err.Error()) return @@ -224,10 +229,7 @@ func (c *gmsClientImpl) ReportEvent(ctx context.Context, session cloudmigration. req.Header.Set("Content-Type", "application/json") req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken)) - client := &http.Client{ - Timeout: c.cfg.CloudMigration.GMSReportEventTimeout, - } - resp, err := client.Do(req) + resp, err := c.httpClient.Do(req) if err != nil { c.log.Error("error sending http request for report event", "err", err.Error()) return diff --git a/pkg/services/cloudmigration/gmsclient/gms_client_test.go b/pkg/services/cloudmigration/gmsclient/gms_client_test.go index a2eb92d76cb..c8e769ecbc5 100644 --- a/pkg/services/cloudmigration/gmsclient/gms_client_test.go +++ b/pkg/services/cloudmigration/gmsclient/gms_client_test.go @@ -1,6 +1,7 @@ package gmsclient import ( + "net/http" "testing" "github.com/grafana/grafana/pkg/setting" @@ -16,7 +17,9 @@ func Test_buildBasePath(t *testing.T) { CloudMigration: setting.CloudMigrationSettings{ GMSDomain: "", }, - }) + }, + http.DefaultClient, + ) require.Error(t, err) // Domain is required @@ -24,7 +27,9 @@ func Test_buildBasePath(t *testing.T) { CloudMigration: setting.CloudMigrationSettings{ GMSDomain: "non-empty", }, - }) + }, + http.DefaultClient, + ) require.NoError(t, err) client := c.(*gmsClientImpl) diff --git a/pkg/services/cloudmigration/model.go b/pkg/services/cloudmigration/model.go index 786df738ca5..d097c777826 100644 --- a/pkg/services/cloudmigration/model.go +++ b/pkg/services/cloudmigration/model.go @@ -75,15 +75,21 @@ type CloudMigrationResource struct { Error string `xorm:"error_string" json:"error"` SnapshotUID string `xorm:"snapshot_uid"` + ParentName string `xorm:"parent_name" json:"parentName"` } type MigrateDataType string const ( - DashboardDataType MigrateDataType = "DASHBOARD" - DatasourceDataType MigrateDataType = "DATASOURCE" - FolderDataType MigrateDataType = "FOLDER" - LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT" + DashboardDataType MigrateDataType = "DASHBOARD" + DatasourceDataType MigrateDataType = "DATASOURCE" + FolderDataType MigrateDataType = "FOLDER" + LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT" + AlertRuleType MigrateDataType = "ALERT_RULE" + ContactPointType MigrateDataType = "CONTACT_POINT" + NotificationPolicyType MigrateDataType = "NOTIFICATION_POLICY" + NotificationTemplateType MigrateDataType = "NOTIFICATION_TEMPLATE" + MuteTimingType MigrateDataType = "MUTE_TIMING" ) type ItemStatus string @@ -185,7 +191,8 @@ type Base64HGInstance struct { // GMS domain structs type MigrateDataRequest struct { - Items []MigrateDataRequestItem + Items []MigrateDataRequestItem + ItemParentNames map[MigrateDataType]map[string](string) } type MigrateDataRequestItem struct { diff --git a/pkg/services/cloudmigration/objectstorage/s3.go b/pkg/services/cloudmigration/objectstorage/s3.go index 32bfecfd4f5..2ae4a9fbfd6 100644 --- a/pkg/services/cloudmigration/objectstorage/s3.go +++ b/pkg/services/cloudmigration/objectstorage/s3.go @@ -9,15 +9,26 @@ import ( "mime/multipart" "net/http" "net/url" + + "github.com/grafana/grafana/pkg/infra/tracing" + + "go.opentelemetry.io/otel/attribute" ) -type S3 struct{} +type S3 struct { + httpClient *http.Client + tracer tracing.Tracer +} -func NewS3() *S3 { - return &S3{} +func NewS3(httpClient *http.Client, tracer tracing.Tracer) *S3 { + return &S3{httpClient: httpClient, tracer: tracer} } func (s3 *S3) PresignedURLUpload(ctx context.Context, presignedURL, key string, reader io.Reader) (err error) { + ctx, span := s3.tracer.Start(ctx, "objectstorage.S3.PresignedURLUpload") + span.SetAttributes(attribute.String("key", key)) + defer span.End() + url, err := url.Parse(presignedURL) if err != nil { return fmt.Errorf("parsing presigned url") @@ -68,13 +79,13 @@ func (s3 *S3) PresignedURLUpload(ctx context.Context, presignedURL, key string, endpoint := fmt.Sprintf("%s://%s%s", url.Scheme, url.Host, url.Path) - request, err := http.NewRequest(http.MethodPost, endpoint, buffer) + request, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, buffer) if err != nil { return fmt.Errorf("creating http request: %w", err) } request.Header.Set("Content-Type", writer.FormDataContentType()) - httpClient := http.Client{} - response, err := httpClient.Do(request) + + response, err := s3.httpClient.Do(request) if err != nil { return fmt.Errorf("sending http request: %w", err) } diff --git a/pkg/services/cloudmigration/slicesext/slicesext.go b/pkg/services/cloudmigration/slicesext/slicesext.go new file mode 100644 index 00000000000..276858d5418 --- /dev/null +++ b/pkg/services/cloudmigration/slicesext/slicesext.go @@ -0,0 +1,11 @@ +package slicesext + +func Map[T any, U any](xs []T, f func(T) U) []U { + out := make([]U, 0, len(xs)) + + for _, x := range xs { + out = append(out, f(x)) + } + + return out +} diff --git a/pkg/services/cloudmigration/slicesext/slicesext_test.go b/pkg/services/cloudmigration/slicesext/slicesext_test.go new file mode 100644 index 00000000000..8e7080be9c1 --- /dev/null +++ b/pkg/services/cloudmigration/slicesext/slicesext_test.go @@ -0,0 +1,36 @@ +package slicesext_test + +import ( + "strconv" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/grafana/grafana/pkg/services/cloudmigration/slicesext" +) + +func TestMap(t *testing.T) { + t.Parallel() + + t.Run("mapping a nil slice does nothing and returns an empty slice", func(t *testing.T) { + t.Parallel() + + require.Empty(t, slicesext.Map[any, any](nil, nil)) + }) + + t.Run("mapping a non-nil slice with a nil function panics", func(t *testing.T) { + t.Parallel() + + require.Panics(t, func() { slicesext.Map[int, any]([]int{1, 2, 3}, nil) }) + }) + + t.Run("mapping a non-nil slice with a non-nil function returns the mapped slice", func(t *testing.T) { + t.Parallel() + + original := []int{1, 2, 3} + expected := []string{"1", "2", "3"} + fn := func(i int) string { return strconv.Itoa(i) } + + require.ElementsMatch(t, expected, slicesext.Map(original, fn)) + }) +} diff --git a/pkg/services/datasources/service/datasource.go b/pkg/services/datasources/service/datasource.go index ef51ebef430..495861cc941 100644 --- a/pkg/services/datasources/service/datasource.go +++ b/pkg/services/datasources/service/datasource.go @@ -255,7 +255,7 @@ func (s *Service) AddDataSource(ctx context.Context, cmd *datasources.AddDataSou } var dataSource *datasources.DataSource - return dataSource, s.db.InTransaction(ctx, func(ctx context.Context) error { + err = s.db.InTransaction(ctx, func(ctx context.Context) error { var err error cmd.EncryptedSecureJsonData = make(map[string][]byte) @@ -293,12 +293,18 @@ func (s *Service) AddDataSource(ctx context.Context, cmd *datasources.AddDataSou if cmd.UserID != 0 { permissions = append(permissions, accesscontrol.SetResourcePermissionCommand{UserID: cmd.UserID, Permission: "Admin"}) } - _, err = s.permissionsService.SetPermissions(ctx, cmd.OrgID, dataSource.UID, permissions...) - return err + if _, err = s.permissionsService.SetPermissions(ctx, cmd.OrgID, dataSource.UID, permissions...); err != nil { + return err + } } return nil }) + if err != nil { + return nil, err + } + + return dataSource, nil } // This will valid validate the instance settings return a version that is safe to be saved diff --git a/pkg/services/datasources/service/datasource_test.go b/pkg/services/datasources/service/datasource_test.go index 3433abdd2e2..c13456d4cf0 100644 --- a/pkg/services/datasources/service/datasource_test.go +++ b/pkg/services/datasources/service/datasource_test.go @@ -3,6 +3,7 @@ package service import ( "context" "encoding/json" + "errors" "fmt" "io" "net/http" @@ -104,6 +105,27 @@ func TestService_AddDataSource(t *testing.T) { require.EqualError(t, err, "[datasource.urlInvalid] max length is 255") }) + t.Run("should fail if the datasource managed permissions fail", func(t *testing.T) { + dsService := initDSService(t) + enableRBACManagedPermissions(t, dsService.cfg) + dsService.permissionsService = &actest.FakePermissionsService{ + ExpectedErr: errors.New("failed to set datasource permissions"), + } + dsService.pluginStore = &pluginstore.FakePluginStore{ + PluginList: []pluginstore.Plugin{}, + } + + cmd := &datasources.AddDataSourceCommand{ + OrgID: 1, + Type: datasources.DS_TESTDATA, + Name: "test", + } + + ds, err := dsService.AddDataSource(context.Background(), cmd) + assert.Nil(t, ds) + assert.ErrorContains(t, err, "failed to set datasource permissions") + }) + t.Run("if a plugin has an API version defined (EXPERIMENTAL)", func(t *testing.T) { t.Run("should success to run admission hooks", func(t *testing.T) { dsService := initDSService(t) @@ -580,11 +602,8 @@ func TestService_DeleteDataSource(t *testing.T) { permissionSvc := acmock.NewMockedPermissionsService() permissionSvc.On("SetPermissions", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return([]accesscontrol.ResourcePermission{}, nil).Once() permissionSvc.On("DeleteResourcePermissions", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() - - f := ini.Empty() - f.Section("rbac").Key("resources_with_managed_permissions_on_creation").SetValue("datasource") - cfg, err := setting.NewCfgFromINIFile(f) - require.NoError(t, err) + cfg := &setting.Cfg{} + enableRBACManagedPermissions(t, cfg) dsService, err := ProvideService(sqlStore, secretsService, secretsStore, cfg, featuremgmt.WithFeatures(), acmock.New(), permissionSvc, quotaService, &pluginstore.FakePluginStore{}, &pluginfakes.FakePluginClient{}, nil) require.NoError(t, err) @@ -1521,6 +1540,15 @@ func initDSService(t *testing.T) *Service { return dsService } +func enableRBACManagedPermissions(t testing.TB, cfg *setting.Cfg) { + t.Helper() + f := ini.Empty() + f.Section("rbac").Key("resources_with_managed_permissions_on_creation").SetValue("datasource") + tempCfg, err := setting.NewCfgFromINIFile(f) + cfg.RBAC = tempCfg.RBAC + require.NoError(t, err) +} + const caCert string = `-----BEGIN CERTIFICATE----- MIIDATCCAemgAwIBAgIJAMQ5hC3CPDTeMA0GCSqGSIb3DQEBCwUAMBcxFTATBgNV BAMMDGNhLWs4cy1zdGhsbTAeFw0xNjEwMjcwODQyMjdaFw00NDAzMTQwODQyMjda diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go index 60e78e82db3..ba68086a0af 100644 --- a/pkg/services/featuremgmt/registry.go +++ b/pkg/services/featuremgmt/registry.go @@ -430,11 +430,10 @@ var ( Owner: grafanaObservabilityLogsSquad, }, { - Name: "pluginsFrontendSandbox", - Description: "Enables the plugins frontend sandbox", - Stage: FeatureStageExperimental, - FrontendOnly: true, - Owner: grafanaPluginsPlatformSquad, + Name: "pluginsFrontendSandbox", + Description: "Enables the plugins frontend sandbox", + Stage: FeatureStageExperimental, + Owner: grafanaPluginsPlatformSquad, }, { Name: "frontendSandboxMonitorOnly", @@ -675,9 +674,8 @@ var ( Name: "externalServiceAccounts", Description: "Automatic service account and token setup for plugins", HideFromAdminPage: true, - Stage: FeatureStageGeneralAvailability, + Stage: FeatureStagePublicPreview, Owner: identityAccessTeam, - Expression: "true", // enabled by default }, { Name: "panelMonitoring", @@ -1063,6 +1061,12 @@ var ( Owner: grafanaOperatorExperienceSquad, Expression: "false", }, + { + Name: "onPremToCloudMigrationsAlerts", + Description: "Enables the migration of alerts and its child resources to your Grafana Cloud stack. Requires `onPremToCloudMigrations` to be enabled in conjunction.", + Stage: FeatureStageExperimental, + Owner: grafanaOperatorExperienceSquad, + }, { Name: "alertingSaveStatePeriodic", Description: "Writes the state periodically to the database, asynchronous to rule evaluation", @@ -1324,12 +1328,12 @@ var ( Expression: "true", }, { - Name: "ssoSettingsLDAP", - Description: "Use the new SSO Settings API to configure LDAP", - Stage: FeatureStageExperimental, - Owner: identityAccessTeam, - HideFromDocs: true, - HideFromAdminPage: true, + Name: "ssoSettingsLDAP", + Description: "Use the new SSO Settings API to configure LDAP", + Stage: FeatureStagePublicPreview, + Owner: identityAccessTeam, + AllowSelfServe: true, + RequiresRestart: true, }, { Name: "failWrongDSUID", @@ -1514,6 +1518,12 @@ var ( HideFromDocs: true, HideFromAdminPage: true, }, + { + Name: "pluginsSriChecks", + Description: "Enables SRI checks for plugin assets", + Stage: FeatureStageExperimental, + Owner: grafanaPluginsPlatformSquad, + }, } ) diff --git a/pkg/services/featuremgmt/toggles_gen.csv b/pkg/services/featuremgmt/toggles_gen.csv index 800f41a8e6f..27dc4bcdf2a 100644 --- a/pkg/services/featuremgmt/toggles_gen.csv +++ b/pkg/services/featuremgmt/toggles_gen.csv @@ -56,7 +56,7 @@ faroDatasourceSelector,preview,@grafana/app-o11y,false,false,true enableDatagridEditing,preview,@grafana/dataviz-squad,false,false,true extraThemes,experimental,@grafana/grafana-frontend-platform,false,false,true lokiPredefinedOperations,experimental,@grafana/observability-logs,false,false,true -pluginsFrontendSandbox,experimental,@grafana/plugins-platform-backend,false,false,true +pluginsFrontendSandbox,experimental,@grafana/plugins-platform-backend,false,false,false frontendSandboxMonitorOnly,experimental,@grafana/plugins-platform-backend,false,false,true pluginsDetailsRightPanel,experimental,@grafana/plugins-platform-backend,false,false,true sqlDatasourceDatabaseSelection,preview,@grafana/dataviz-squad,false,false,true @@ -88,7 +88,7 @@ wargamesTesting,experimental,@grafana/hosted-grafana-team,false,false,false alertingInsights,GA,@grafana/alerting-squad,false,false,true externalCorePlugins,experimental,@grafana/plugins-platform-backend,false,false,false pluginsAPIMetrics,experimental,@grafana/plugins-platform-backend,false,false,true -externalServiceAccounts,GA,@grafana/identity-access-team,false,false,false +externalServiceAccounts,preview,@grafana/identity-access-team,false,false,false panelMonitoring,GA,@grafana/dataviz-squad,false,false,true enableNativeHTTPHistogram,experimental,@grafana/grafana-backend-services-squad,false,true,false disableClassicHTTPHistogram,experimental,@grafana/grafana-backend-services-squad,false,true,false @@ -139,6 +139,7 @@ alertingQueryOptimization,GA,@grafana/alerting-squad,false,false,false newFolderPicker,experimental,@grafana/grafana-frontend-platform,false,false,true jitterAlertRulesWithinGroups,preview,@grafana/alerting-squad,false,true,false onPremToCloudMigrations,preview,@grafana/grafana-operator-experience-squad,false,false,false +onPremToCloudMigrationsAlerts,experimental,@grafana/grafana-operator-experience-squad,false,false,false alertingSaveStatePeriodic,privatePreview,@grafana/alerting-squad,false,false,false promQLScope,GA,@grafana/observability-metrics,false,false,false sqlExpressions,experimental,@grafana/grafana-app-platform-squad,false,false,false @@ -173,7 +174,7 @@ azureMonitorPrometheusExemplars,preview,@grafana/partner-datasources,false,false pinNavItems,GA,@grafana/grafana-frontend-platform,false,false,false authZGRPCServer,experimental,@grafana/identity-access-team,false,false,false openSearchBackendFlowEnabled,GA,@grafana/aws-datasources,false,false,false -ssoSettingsLDAP,experimental,@grafana/identity-access-team,false,false,false +ssoSettingsLDAP,preview,@grafana/identity-access-team,false,true,false failWrongDSUID,experimental,@grafana/plugins-platform-backend,false,false,false zanzana,experimental,@grafana/identity-access-team,false,false,false passScopeToDashboardApi,experimental,@grafana/dashboards-squad,false,false,false @@ -200,3 +201,4 @@ improvedExternalSessionHandling,experimental,@grafana/identity-access-team,false useSessionStorageForRedirection,preview,@grafana/identity-access-team,false,false,false rolePickerDrawer,experimental,@grafana/identity-access-team,false,false,false unifiedStorageSearch,experimental,@grafana/search-and-storage,false,false,false +pluginsSriChecks,experimental,@grafana/plugins-platform-backend,false,false,false diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go index d6df6fbfae8..147d0f4681e 100644 --- a/pkg/services/featuremgmt/toggles_gen.go +++ b/pkg/services/featuremgmt/toggles_gen.go @@ -567,6 +567,10 @@ const ( // Enable the Grafana Migration Assistant, which helps you easily migrate on-prem dashboards, folders, and data source configurations to your Grafana Cloud stack. FlagOnPremToCloudMigrations = "onPremToCloudMigrations" + // FlagOnPremToCloudMigrationsAlerts + // Enables the migration of alerts and its child resources to your Grafana Cloud stack. Requires `onPremToCloudMigrations` to be enabled in conjunction. + FlagOnPremToCloudMigrationsAlerts = "onPremToCloudMigrationsAlerts" + // FlagAlertingSaveStatePeriodic // Writes the state periodically to the database, asynchronous to rule evaluation FlagAlertingSaveStatePeriodic = "alertingSaveStatePeriodic" @@ -810,4 +814,8 @@ const ( // FlagUnifiedStorageSearch // Enable unified storage search FlagUnifiedStorageSearch = "unifiedStorageSearch" + + // FlagPluginsSriChecks + // Enables SRI checks for plugin assets + FlagPluginsSriChecks = "pluginsSriChecks" ) diff --git a/pkg/services/featuremgmt/toggles_gen.json b/pkg/services/featuremgmt/toggles_gen.json index eff0b8b4b6f..fbd7b6e335a 100644 --- a/pkg/services/featuremgmt/toggles_gen.json +++ b/pkg/services/featuremgmt/toggles_gen.json @@ -1234,18 +1234,14 @@ { "metadata": { "name": "externalServiceAccounts", - "resourceVersion": "1726562284896", - "creationTimestamp": "2023-09-28T07:26:37Z", - "annotations": { - "grafana.app/updatedTimestamp": "2024-09-17 08:38:04.896869045 +0000 UTC" - } + "resourceVersion": "1718727528075", + "creationTimestamp": "2023-09-28T07:26:37Z" }, "spec": { "description": "Automatic service account and token setup for plugins", - "stage": "GA", + "stage": "preview", "codeowner": "@grafana/identity-access-team", - "hideFromAdminPage": true, - "expression": "true" + "hideFromAdminPage": true } }, { @@ -2167,6 +2163,18 @@ "expression": "false" } }, + { + "metadata": { + "name": "onPremToCloudMigrationsAlerts", + "resourceVersion": "1728048163201", + "creationTimestamp": "2024-10-04T13:22:43Z" + }, + "spec": { + "description": "Enables the migration of alerts and its child resources to your Grafana Cloud stack. Requires `onPremToCloudMigrations` to be enabled in conjunction.", + "stage": "experimental", + "codeowner": "@grafana/grafana-operator-experience-squad" + } + }, { "metadata": { "name": "openSearchBackendFlowEnabled", @@ -2345,14 +2353,16 @@ { "metadata": { "name": "pluginsFrontendSandbox", - "resourceVersion": "1718727528075", - "creationTimestamp": "2023-06-05T08:51:36Z" + "resourceVersion": "1728388793917", + "creationTimestamp": "2023-06-05T08:51:36Z", + "annotations": { + "grafana.app/updatedTimestamp": "2024-10-08 11:59:53.917243 +0000 UTC" + } }, "spec": { "description": "Enables the plugins frontend sandbox", "stage": "experimental", - "codeowner": "@grafana/plugins-platform-backend", - "frontend": true + "codeowner": "@grafana/plugins-platform-backend" } }, { @@ -2367,6 +2377,18 @@ "codeowner": "@grafana/plugins-platform-backend" } }, + { + "metadata": { + "name": "pluginsSriChecks", + "resourceVersion": "1727785264632", + "creationTimestamp": "2024-10-01T12:21:04Z" + }, + "spec": { + "description": "Enables SRI checks for plugin assets", + "stage": "experimental", + "codeowner": "@grafana/plugins-platform-backend" + } + }, { "metadata": { "name": "preserveDashboardStateWhenNavigating", @@ -2842,15 +2864,18 @@ { "metadata": { "name": "ssoSettingsLDAP", - "resourceVersion": "1718727528075", - "creationTimestamp": "2024-06-18T11:31:27Z" + "resourceVersion": "1728034012257", + "creationTimestamp": "2024-06-18T11:31:27Z", + "annotations": { + "grafana.app/updatedTimestamp": "2024-10-04 09:26:52.257203 +0000 UTC" + } }, "spec": { "description": "Use the new SSO Settings API to configure LDAP", - "stage": "experimental", + "stage": "preview", "codeowner": "@grafana/identity-access-team", - "hideFromAdminPage": true, - "hideFromDocs": true + "requiresRestart": true, + "allowSelfServe": true } }, { diff --git a/pkg/services/gcom/gcom.go b/pkg/services/gcom/gcom.go index c510db5c095..2db6dc7e1d2 100644 --- a/pkg/services/gcom/gcom.go +++ b/pkg/services/gcom/gcom.go @@ -138,11 +138,11 @@ type Config struct { Token string } -func New(cfg Config) Service { +func New(cfg Config, httpClient *http.Client) Service { return &GcomClient{ log: log.New(LogPrefix), cfg: cfg, - httpClient: &http.Client{}, + httpClient: httpClient, } } @@ -360,6 +360,7 @@ func (client *GcomClient) ListTokens(ctx context.Context, params ListTokenParams return body.Items, nil } + func (client *GcomClient) CreateToken(ctx context.Context, params CreateTokenParams, payload CreateTokenPayload) (Token, error) { endpoint, err := url.JoinPath(client.cfg.ApiURL, "/v1/tokens") if err != nil { diff --git a/pkg/services/ldap/api/service_test.go b/pkg/services/ldap/api/service_test.go index c3147819d1f..c914ded932f 100644 --- a/pkg/services/ldap/api/service_test.go +++ b/pkg/services/ldap/api/service_test.go @@ -40,8 +40,10 @@ type LDAPMock struct { UserSearchError error } -var pingResult []*multildap.ServerStatus -var pingError error +var ( + pingResult []*multildap.ServerStatus + pingError error +) func (m *LDAPMock) Ping() ([]*multildap.ServerStatus, error) { return pingResult, pingError @@ -105,7 +107,8 @@ func TestGetUserFromLDAPAPIEndpoint_UserNotFound(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:read": {"*"}}}, + 1: {"ldap.user:read": {"*"}}, + }, }) res, err := server.Send(req) @@ -170,7 +173,8 @@ func TestGetUserFromLDAPAPIEndpoint_OrgNotfound(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:read": {"*"}}}, + 1: {"ldap.user:read": {"*"}}, + }, }) res, err := server.Send(req) @@ -239,7 +243,8 @@ func TestGetUserFromLDAPAPIEndpoint(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:read": {"*"}}}, + 1: {"ldap.user:read": {"*"}}, + }, }) res, err := server.Send(req) @@ -324,7 +329,8 @@ func TestGetUserFromLDAPAPIEndpoint_WithTeamHandler(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:read": {"*"}}}, + 1: {"ldap.user:read": {"*"}}, + }, }) res, err := server.Send(req) @@ -378,7 +384,8 @@ func TestGetLDAPStatusAPIEndpoint(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.status:read": {}}}, + 1: {"ldap.status:read": {}}, + }, }) res, err := server.Send(req) @@ -417,7 +424,8 @@ func TestPostSyncUserWithLDAPAPIEndpoint_Success(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:sync": {}}}, + 1: {"ldap.user:sync": {}}, + }, }) res, err := server.Send(req) @@ -452,7 +460,8 @@ func TestPostSyncUserWithLDAPAPIEndpoint_WhenUserNotFound(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:sync": {}}}, + 1: {"ldap.user:sync": {}}, + }, }) res, err := server.Send(req) @@ -488,7 +497,8 @@ func TestPostSyncUserWithLDAPAPIEndpoint_WhenGrafanaAdmin(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:sync": {}}}, + 1: {"ldap.user:sync": {}}, + }, }) res, err := server.Send(req) @@ -521,7 +531,8 @@ func TestPostSyncUserWithLDAPAPIEndpoint_WhenUserNotInLDAP(t *testing.T) { webtest.RequestWithSignedInUser(req, &user.SignedInUser{ OrgID: 1, Permissions: map[int64]map[string][]string{ - 1: {"ldap.user:sync": {}}}, + 1: {"ldap.user:sync": {}}, + }, }) res, err := server.Send(req) diff --git a/pkg/services/navtree/navtreeimpl/navtree.go b/pkg/services/navtree/navtreeimpl/navtree.go index d89454d5ee0..e4bdfcf2bfc 100644 --- a/pkg/services/navtree/navtreeimpl/navtree.go +++ b/pkg/services/navtree/navtreeimpl/navtree.go @@ -355,11 +355,13 @@ func (s *ServiceImpl) buildDashboardNavLinks(c *contextmodel.ReqContext) []*navt dashboardChildNavs := []*navtree.NavLink{} - dashboardChildNavs = append(dashboardChildNavs, &navtree.NavLink{ - Text: "Playlists", SubTitle: "Groups of dashboards that are displayed in a sequence", Id: "dashboards/playlists", Url: s.cfg.AppSubURL + "/playlists", Icon: "presentation-play", - }) - if c.IsSignedIn { + if c.SignedInUser.HasRole(org.RoleViewer) { + dashboardChildNavs = append(dashboardChildNavs, &navtree.NavLink{ + Text: "Playlists", SubTitle: "Groups of dashboards that are displayed in a sequence", Id: "dashboards/playlists", Url: s.cfg.AppSubURL + "/playlists", Icon: "presentation-play", + }) + } + if s.cfg.SnapshotEnabled { dashboardChildNavs = append(dashboardChildNavs, &navtree.NavLink{ Text: "Snapshots", diff --git a/pkg/services/ngalert/accesscontrol.go b/pkg/services/ngalert/accesscontrol.go index 94ce4069160..1dad2138a87 100644 --- a/pkg/services/ngalert/accesscontrol.go +++ b/pkg/services/ngalert/accesscontrol.go @@ -136,6 +136,7 @@ var ( Group: AlertRolesGroup, Permissions: []accesscontrol.Permission{ {Action: accesscontrol.ActionAlertingReceiversCreate}, + {Action: accesscontrol.ActionAlertingReceiversTest}, }, }, } diff --git a/pkg/services/ngalert/accesscontrol/rules.go b/pkg/services/ngalert/accesscontrol/rules.go index 812cc5816eb..8aa64c479e9 100644 --- a/pkg/services/ngalert/accesscontrol/rules.go +++ b/pkg/services/ngalert/accesscontrol/rules.go @@ -2,6 +2,7 @@ package accesscontrol import ( "fmt" + "slices" "golang.org/x/net/context" @@ -23,11 +24,17 @@ const ( type RuleService struct { genericService + notificationSettingsAuth notificationSettingsAuth +} + +type notificationSettingsAuth interface { + AuthorizeRead(context.Context, identity.Requester, *models.NotificationSettings) error } func NewRuleService(ac accesscontrol.AccessControl) *RuleService { return &RuleService{ - genericService{ac: ac}, + genericService: genericService{ac: ac}, + notificationSettingsAuth: NewReceiverAccess[*models.NotificationSettings](ac, true), } } @@ -196,6 +203,10 @@ func (r *RuleService) AuthorizeRuleChanges(ctx context.Context, user identity.Re }); err != nil { return err } + + if err := r.authorizeNotificationSettings(ctx, user, rule); err != nil { + return err + } } if !existingGroup { // create a new group, check that user has "read" access to that new group. Otherwise, it will not be able to read it back. @@ -237,6 +248,24 @@ func (r *RuleService) AuthorizeRuleChanges(ctx context.Context, user identity.Re } updateAuthorized = true } + + if !slices.EqualFunc(rule.Existing.NotificationSettings, rule.New.NotificationSettings, func(settings models.NotificationSettings, settings2 models.NotificationSettings) bool { + return settings.Equals(&settings2) + }) { + if err := r.authorizeNotificationSettings(ctx, user, rule.New); err != nil { + return err + } + } + } + return nil +} + +// authorizeNotificationSettings checks if the user has access to all receivers that are used by the rule's notification settings. +func (r *RuleService) authorizeNotificationSettings(ctx context.Context, user identity.Requester, rule *models.AlertRule) error { + for _, ns := range rule.NotificationSettings { + if err := r.notificationSettingsAuth.AuthorizeRead(ctx, user, &ns); err != nil { + return err + } } return nil } diff --git a/pkg/services/ngalert/accesscontrol/rules_test.go b/pkg/services/ngalert/accesscontrol/rules_test.go index cc85799ba3c..8c823763308 100644 --- a/pkg/services/ngalert/accesscontrol/rules_test.go +++ b/pkg/services/ngalert/accesscontrol/rules_test.go @@ -13,10 +13,14 @@ import ( "github.com/grafana/grafana/pkg/apimachinery/identity" "github.com/grafana/grafana/pkg/expr" "github.com/grafana/grafana/pkg/services/accesscontrol" + "github.com/grafana/grafana/pkg/services/accesscontrol/acimpl" + "github.com/grafana/grafana/pkg/services/authz/zanzana" "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/datasources" + "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/folder" "github.com/grafana/grafana/pkg/services/ngalert/models" + "github.com/grafana/grafana/pkg/services/ngalert/notifier/legacy_storage" "github.com/grafana/grafana/pkg/services/ngalert/store" "github.com/grafana/grafana/pkg/services/user" "github.com/grafana/grafana/pkg/util" @@ -75,6 +79,22 @@ func getDatasourceScopesForRules(rules models.RulesGroup) []string { return result } +func getReceiverScopesForRules(rules models.RulesGroup) []string { + scopesMap := map[string]struct{}{} + var result []string + for _, rule := range rules { + for _, ns := range rule.NotificationSettings { + scope := ScopeReceiversProvider.GetResourceScopeUID(legacy_storage.NameToUid(ns.Receiver)) + if _, ok := scopesMap[scope]; ok { + continue + } + result = append(result, scope) + scopesMap[scope] = struct{}{} + } + } + return result +} + func mapUpdates(updates []store.RuleDelta, mapFunc func(store.RuleDelta) *models.AlertRule) models.RulesGroup { result := make(models.RulesGroup, 0, len(updates)) for _, update := range updates { @@ -111,12 +131,6 @@ func TestAuthorizeRuleChanges(t *testing.T) { } }, permissions: func(c *store.GroupDelta) map[string][]string { - var scopes []string - for _, rule := range c.New { - for _, query := range rule.Data { - scopes = append(scopes, datasources.ScopeProvider.GetResourceScopeUID(query.DatasourceUID)) - } - } return map[string][]string{ ruleCreate: { namespaceIdScope, @@ -127,7 +141,8 @@ func TestAuthorizeRuleChanges(t *testing.T) { dashboards.ActionFoldersRead: { namespaceIdScope, }, - datasources.ActionQuery: scopes, + datasources.ActionQuery: getDatasourceScopesForRules(c.New), + accesscontrol.ActionAlertingReceiversRead: getReceiverScopesForRules(c.New), } }, }, @@ -313,6 +328,85 @@ func TestAuthorizeRuleChanges(t *testing.T) { } }, }, + { + name: "if there are new rules that have notification settings it should check access to all receivers", + changes: func() *store.GroupDelta { + receiverName := "test-receiver" + genWithNotificationSettings := genWithGroupKey.With(gen.WithNotificationSettingsGen(models.NotificationSettingsGen(models.NSMuts.WithReceiver(receiverName)))) + return &store.GroupDelta{ + GroupKey: groupKey, + New: genWithNotificationSettings.GenerateManyRef(1, 5), + Update: nil, + Delete: nil, + } + }, + permissions: func(c *store.GroupDelta) map[string][]string { + return map[string][]string{ + ruleCreate: { + namespaceIdScope, + }, + ruleRead: { + namespaceIdScope, + }, + dashboards.ActionFoldersRead: { + namespaceIdScope, + }, + datasources.ActionQuery: getDatasourceScopesForRules(c.New), + accesscontrol.ActionAlertingReceiversRead: getReceiverScopesForRules(c.New), + } + }, + }, + { + name: "if there are rules that modify notification settings it should check access to all receivers", + changes: func() *store.GroupDelta { + receiverName := "test-receiver" + genWithNotificationSettings := genWithGroupKey.With(gen.WithNotificationSettingsGen(models.NotificationSettingsGen(models.NSMuts.WithReceiver(receiverName)))) + rules1 := genWithNotificationSettings.GenerateManyRef(1, 5) + rules := genWithNotificationSettings.GenerateManyRef(1, 5) + updates := make([]store.RuleDelta, 0, len(rules)) + + for _, rule := range rules { + cp := models.CopyRule(rule) + for i := range cp.NotificationSettings { + cp.NotificationSettings[i].Receiver = "new-receiver" + } + updates = append(updates, store.RuleDelta{ + Existing: rule, + New: cp, + Diff: nil, + }) + } + + return &store.GroupDelta{ + GroupKey: groupKey, + AffectedGroups: map[models.AlertRuleGroupKey]models.RulesGroup{ + groupKey: append(rules, rules1...), + }, + New: nil, + Update: updates, + Delete: nil, + } + }, + permissions: func(c *store.GroupDelta) map[string][]string { + return map[string][]string{ + ruleRead: { + namespaceIdScope, + }, + dashboards.ActionFoldersRead: { + namespaceIdScope, + }, + ruleUpdate: { + namespaceIdScope, + }, + datasources.ActionQuery: getDatasourceScopesForRules(mapUpdates(c.Update, func(update store.RuleDelta) *models.AlertRule { + return update.New + })), + accesscontrol.ActionAlertingReceiversRead: getReceiverScopesForRules(mapUpdates(c.Update, func(update store.RuleDelta) *models.AlertRule { + return update.New + })), + } + }, + }, } for _, testCase := range testCases { @@ -325,9 +419,7 @@ func TestAuthorizeRuleChanges(t *testing.T) { permissionCombinations = permissionCombinations[0 : len(permissionCombinations)-1] // exclude all permissions for _, missing := range permissionCombinations { ac := &recordingAccessControlFake{} - srv := RuleService{ - genericService{ac: ac}, - } + srv := NewRuleService(ac) err := srv.AuthorizeRuleChanges(context.Background(), createUserWithPermissions(missing), groupChanges) assert.Errorf(t, err, "expected error because less permissions than expected were provided. Provided: %v; Expected: %v; Diff: %v", missing, permissions, cmp.Diff(permissions, missing)) @@ -335,19 +427,10 @@ func TestAuthorizeRuleChanges(t *testing.T) { } }) - ac := &recordingAccessControlFake{ - Callback: func(user identity.Requester, evaluator accesscontrol.Evaluator) (bool, error) { - response := evaluator.Evaluate(user.GetPermissions()) - require.Truef(t, response, "provided permissions [%v] is not enough for requested permissions [%s]", permissions, evaluator.GoString()) - return response, nil - }, - } - srv := RuleService{ - genericService{ac: ac}, - } + ac := acimpl.ProvideAccessControl(featuremgmt.WithFeatures(), zanzana.NewNoopClient()) + srv := NewRuleService(ac) err := srv.AuthorizeRuleChanges(context.Background(), createUserWithPermissions(permissions), groupChanges) require.NoError(t, err) - require.NotEmptyf(t, ac.EvaluateRecordings, "evaluation function is expected to be called but it was not.") }) } } @@ -387,9 +470,7 @@ func TestCheckDatasourcePermissionsForRule(t *testing.T) { } ac := &recordingAccessControlFake{} - svc := RuleService{ - genericService{ac: ac}, - } + svc := NewRuleService(ac) eval := svc.AuthorizeDatasourceAccessForRule(context.Background(), createUserWithPermissions(permissions), rule) @@ -403,9 +484,7 @@ func TestCheckDatasourcePermissionsForRule(t *testing.T) { return false, nil }, } - svc := RuleService{ - genericService{ac: ac}, - } + svc := NewRuleService(ac) result := svc.AuthorizeDatasourceAccessForRule(context.Background(), createUserWithPermissions(nil), rule) @@ -426,9 +505,7 @@ func Test_authorizeAccessToRuleGroup(t *testing.T) { dashboards.ActionFoldersRead: namespaceScopes, } ac := &recordingAccessControlFake{} - svc := RuleService{ - genericService{ac: ac}, - } + svc := NewRuleService(ac) result := svc.AuthorizeAccessToRuleGroup(context.Background(), createUserWithPermissions(permissions), rules) @@ -443,9 +520,7 @@ func Test_authorizeAccessToRuleGroup(t *testing.T) { rules := genWithFolder.GenerateManyRef(1, 5) ac := &recordingAccessControlFake{} - svc := RuleService{ - genericService{ac: ac}, - } + svc := NewRuleService(ac) result := svc.AuthorizeAccessToRuleGroup(context.Background(), createUserWithPermissions(map[string][]string{}), rules) @@ -456,9 +531,7 @@ func Test_authorizeAccessToRuleGroup(t *testing.T) { func TestCanReadAllRules(t *testing.T) { ac := &recordingAccessControlFake{} - svc := RuleService{ - genericService{ac: ac}, - } + svc := NewRuleService(ac) testCases := []struct { permissions map[string][]string diff --git a/pkg/services/ngalert/api/api.go b/pkg/services/ngalert/api/api.go index 5057b831b08..55d29355952 100644 --- a/pkg/services/ngalert/api/api.go +++ b/pkg/services/ngalert/api/api.go @@ -110,6 +110,7 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) { api.RuleStore, ruleAuthzService, ), + receiverAuthz: accesscontrol.NewReceiverAccess[ReceiverStatus](api.AccessControl, false), }, ), m) // Register endpoints for proxying to Prometheus-compatible backends. diff --git a/pkg/services/ngalert/api/api_alertmanager.go b/pkg/services/ngalert/api/api_alertmanager.go index 417beb74285..1d174a149c7 100644 --- a/pkg/services/ngalert/api/api_alertmanager.go +++ b/pkg/services/ngalert/api/api_alertmanager.go @@ -12,12 +12,14 @@ import ( alertingNotify "github.com/grafana/alerting/notify" "github.com/grafana/grafana/pkg/api/response" + "github.com/grafana/grafana/pkg/apimachinery/identity" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/services/accesscontrol" contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" "github.com/grafana/grafana/pkg/services/featuremgmt" apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" "github.com/grafana/grafana/pkg/services/ngalert/notifier" + "github.com/grafana/grafana/pkg/services/ngalert/notifier/legacy_storage" "github.com/grafana/grafana/pkg/services/ngalert/store" "github.com/grafana/grafana/pkg/services/org" "github.com/grafana/grafana/pkg/util" @@ -28,6 +30,10 @@ const ( maxTestReceiversTimeout = 30 * time.Second ) +type receiversAuthz interface { + FilterRead(ctx context.Context, user identity.Requester, receivers ...ReceiverStatus) ([]ReceiverStatus, error) +} + type AlertmanagerSrv struct { log log.Logger ac accesscontrol.AccessControl @@ -35,6 +41,7 @@ type AlertmanagerSrv struct { crypto notifier.Crypto silenceSvc SilenceService featureManager featuremgmt.FeatureToggles + receiverAuthz receiversAuthz } type UnknownReceiverError struct { @@ -237,7 +244,15 @@ func (srv AlertmanagerSrv) RouteGetReceivers(c *contextmodel.ReqContext) respons if err != nil { return ErrResp(http.StatusInternalServerError, err, "failed to retrieve receivers") } - return response.JSON(http.StatusOK, rcvs) + statuses := make([]ReceiverStatus, 0, len(rcvs)) + for _, rcv := range rcvs { // TODO this is temporary so we can use authz filter logic. + statuses = append(statuses, ReceiverStatus(rcv)) + } + statuses, err = srv.receiverAuthz.FilterRead(c.Req.Context(), c.SignedInUser, statuses...) + if err != nil { + response.ErrOrFallback(http.StatusInternalServerError, "failed to apply permissions to the receivers", err) + } + return response.JSON(http.StatusOK, statuses) } func (srv AlertmanagerSrv) RoutePostTestReceivers(c *contextmodel.ReqContext, body apimodels.TestReceiversConfigBodyParams) response.Response { @@ -368,3 +383,9 @@ func (srv AlertmanagerSrv) AlertmanagerFor(orgID int64) (notifier.Alertmanager, srv.log.Error("Unable to obtain the org's Alertmanager", "error", err) return nil, response.Error(http.StatusInternalServerError, "unable to obtain org's Alertmanager", err) } + +type ReceiverStatus apimodels.Receiver + +func (rs ReceiverStatus) GetUID() string { + return legacy_storage.NameToUid(rs.Name) +} diff --git a/pkg/services/ngalert/api/api_provisioning.go b/pkg/services/ngalert/api/api_provisioning.go index 082881c1fbb..f22cf445ce5 100644 --- a/pkg/services/ngalert/api/api_provisioning.go +++ b/pkg/services/ngalert/api/api_provisioning.go @@ -51,8 +51,8 @@ type TemplateService interface { } type NotificationPolicyService interface { - GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, error) - UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p alerting_models.Provenance) error + GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, string, error) + UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p alerting_models.Provenance, version string) error ResetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, error) } @@ -79,7 +79,7 @@ type AlertRuleService interface { } func (srv *ProvisioningSrv) RouteGetPolicyTree(c *contextmodel.ReqContext) response.Response { - policies, err := srv.policies.GetPolicyTree(c.Req.Context(), c.SignedInUser.GetOrgID()) + policies, _, err := srv.policies.GetPolicyTree(c.Req.Context(), c.SignedInUser.GetOrgID()) if errors.Is(err, store.ErrNoAlertmanagerConfiguration) { return ErrResp(http.StatusNotFound, err, "") } @@ -91,7 +91,7 @@ func (srv *ProvisioningSrv) RouteGetPolicyTree(c *contextmodel.ReqContext) respo } func (srv *ProvisioningSrv) RouteGetPolicyTreeExport(c *contextmodel.ReqContext) response.Response { - policies, err := srv.policies.GetPolicyTree(c.Req.Context(), c.SignedInUser.GetOrgID()) + policies, _, err := srv.policies.GetPolicyTree(c.Req.Context(), c.SignedInUser.GetOrgID()) if err != nil { if errors.Is(err, store.ErrNoAlertmanagerConfiguration) { return ErrResp(http.StatusNotFound, err, "") @@ -109,7 +109,7 @@ func (srv *ProvisioningSrv) RouteGetPolicyTreeExport(c *contextmodel.ReqContext) func (srv *ProvisioningSrv) RoutePutPolicyTree(c *contextmodel.ReqContext, tree definitions.Route) response.Response { provenance := determineProvenance(c) - err := srv.policies.UpdatePolicyTree(c.Req.Context(), c.SignedInUser.GetOrgID(), tree, alerting_models.Provenance(provenance)) + err := srv.policies.UpdatePolicyTree(c.Req.Context(), c.SignedInUser.GetOrgID(), tree, alerting_models.Provenance(provenance), "") if errors.Is(err, store.ErrNoAlertmanagerConfiguration) { return ErrResp(http.StatusNotFound, err, "") } @@ -117,7 +117,7 @@ func (srv *ProvisioningSrv) RoutePutPolicyTree(c *contextmodel.ReqContext, tree return ErrResp(http.StatusBadRequest, err, "") } if err != nil { - return ErrResp(http.StatusInternalServerError, err, "") + return response.ErrOrFallback(http.StatusInternalServerError, "", err) } return response.JSON(http.StatusAccepted, util.DynMap{"message": "policies updated"}) diff --git a/pkg/services/ngalert/api/api_provisioning_test.go b/pkg/services/ngalert/api/api_provisioning_test.go index f4e0a2e7eb5..e6747915ae6 100644 --- a/pkg/services/ngalert/api/api_provisioning_test.go +++ b/pkg/services/ngalert/api/api_provisioning_test.go @@ -153,7 +153,6 @@ func TestProvisioningApi(t *testing.T) { require.Equal(t, 500, response.Status()) require.NotEmpty(t, response.Body()) - require.Contains(t, string(response.Body()), "something went wrong") }) t.Run("DELETE returns 500", func(t *testing.T) { @@ -1985,16 +1984,16 @@ func createFakeNotificationPolicyService() *fakeNotificationPolicyService { } } -func (f *fakeNotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, error) { +func (f *fakeNotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, string, error) { if orgID != 1 { - return definitions.Route{}, store.ErrNoAlertmanagerConfiguration + return definitions.Route{}, "", store.ErrNoAlertmanagerConfiguration } result := f.tree result.Provenance = definitions.Provenance(f.prov) - return result, nil + return result, "", nil } -func (f *fakeNotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance) error { +func (f *fakeNotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance, _ string) error { if orgID != 1 { return store.ErrNoAlertmanagerConfiguration } @@ -2010,11 +2009,11 @@ func (f *fakeNotificationPolicyService) ResetPolicyTree(ctx context.Context, org type fakeFailingNotificationPolicyService struct{} -func (f *fakeFailingNotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, error) { - return definitions.Route{}, fmt.Errorf("something went wrong") +func (f *fakeFailingNotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, string, error) { + return definitions.Route{}, "", fmt.Errorf("something went wrong") } -func (f *fakeFailingNotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance) error { +func (f *fakeFailingNotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance, _ string) error { return fmt.Errorf("something went wrong") } @@ -2024,11 +2023,11 @@ func (f *fakeFailingNotificationPolicyService) ResetPolicyTree(ctx context.Conte type fakeRejectingNotificationPolicyService struct{} -func (f *fakeRejectingNotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, error) { - return definitions.Route{}, nil +func (f *fakeRejectingNotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, string, error) { + return definitions.Route{}, "", nil } -func (f *fakeRejectingNotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance) error { +func (f *fakeRejectingNotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance, _ string) error { return fmt.Errorf("%w: invalid policy tree", provisioning.ErrValidation) } diff --git a/pkg/services/ngalert/api/authorization.go b/pkg/services/ngalert/api/authorization.go index e0d77de1a16..083ae995637 100644 --- a/pkg/services/ngalert/api/authorization.go +++ b/pkg/services/ngalert/api/authorization.go @@ -210,11 +210,21 @@ func (api *API) authorize(method, path string) web.Handler { case http.MethodPost + "/api/alertmanager/grafana/config/history/{id}/_activate": eval = ac.EvalAny(ac.EvalPermission(ac.ActionAlertingNotificationsWrite)) case http.MethodGet + "/api/alertmanager/grafana/config/api/v1/receivers": - eval = ac.EvalPermission(ac.ActionAlertingNotificationsRead) + eval = ac.EvalAny( + ac.EvalPermission(ac.ActionAlertingNotificationsRead), + ac.EvalPermission(ac.ActionAlertingReceiversRead), + ac.EvalPermission(ac.ActionAlertingReceiversReadSecrets), + ) case http.MethodPost + "/api/alertmanager/grafana/config/api/v1/receivers/test": - eval = ac.EvalPermission(ac.ActionAlertingNotificationsWrite) + eval = ac.EvalAny( + ac.EvalPermission(ac.ActionAlertingNotificationsWrite), + ac.EvalPermission(ac.ActionAlertingReceiversTest), + ) case http.MethodPost + "/api/alertmanager/grafana/config/api/v1/templates/test": - eval = ac.EvalPermission(ac.ActionAlertingNotificationsWrite) + eval = ac.EvalAny( + ac.EvalPermission(ac.ActionAlertingNotificationsWrite), + ac.EvalPermission(ac.ActionAlertingNotificationsTemplatesRead), + ) // External Alertmanager Paths case http.MethodDelete + "/api/alertmanager/{DatasourceUID}/config/api/v1/alerts": diff --git a/pkg/services/ngalert/models/notifications.go b/pkg/services/ngalert/models/notifications.go index 602d21acfa7..d4ed80813fc 100644 --- a/pkg/services/ngalert/models/notifications.go +++ b/pkg/services/ngalert/models/notifications.go @@ -35,6 +35,10 @@ type NotificationSettings struct { MuteTimeIntervals []string `json:"mute_time_intervals,omitempty"` } +func (s *NotificationSettings) GetUID() string { + return NameToUid(s.Receiver) +} + // NormalizedGroupBy returns a consistent and ordered GroupBy. // - If the GroupBy is empty, it returns nil so that the parent group can be inherited. // - If the GroupBy contains the special label '...', it returns only '...'. diff --git a/pkg/services/ngalert/models/receivers.go b/pkg/services/ngalert/models/receivers.go index 160c07b6b05..31ffe697e70 100644 --- a/pkg/services/ngalert/models/receivers.go +++ b/pkg/services/ngalert/models/receivers.go @@ -2,6 +2,7 @@ package models import ( "context" + "encoding/base64" "encoding/json" "errors" "fmt" @@ -559,6 +560,10 @@ func (r *Receiver) GetUID() string { return r.UID } +func NameToUid(name string) string { + return base64.RawURLEncoding.EncodeToString([]byte(name)) +} + func (r *Receiver) Fingerprint() string { sum := newFingerprint() diff --git a/pkg/services/ngalert/notifier/legacy_storage/compat.go b/pkg/services/ngalert/notifier/legacy_storage/compat.go index 8cd0084471a..72252c270f8 100644 --- a/pkg/services/ngalert/notifier/legacy_storage/compat.go +++ b/pkg/services/ngalert/notifier/legacy_storage/compat.go @@ -11,9 +11,7 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/models" ) -func NameToUid(name string) string { - return base64.RawURLEncoding.EncodeToString([]byte(name)) -} +var NameToUid = models.NameToUid func UidToName(uid string) (string, error) { data, err := base64.RawURLEncoding.DecodeString(uid) diff --git a/pkg/services/ngalert/provisioning/notification_policies.go b/pkg/services/ngalert/provisioning/notification_policies.go index 345bf0ae7ab..8eeeeff5211 100644 --- a/pkg/services/ngalert/provisioning/notification_policies.go +++ b/pkg/services/ngalert/provisioning/notification_policies.go @@ -2,7 +2,15 @@ package provisioning import ( "context" + "encoding/binary" "fmt" + "hash" + "hash/fnv" + "slices" + "unsafe" + + "github.com/prometheus/common/model" + "golang.org/x/exp/maps" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" @@ -30,28 +38,27 @@ func NewNotificationPolicyService(am alertmanagerConfigStore, prov ProvisioningS } } -func (nps *NotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, error) { +func (nps *NotificationPolicyService) GetPolicyTree(ctx context.Context, orgID int64) (definitions.Route, string, error) { rev, err := nps.configStore.Get(ctx, orgID) if err != nil { - return definitions.Route{}, err + return definitions.Route{}, "", err } if rev.Config.AlertmanagerConfig.Config.Route == nil { - return definitions.Route{}, fmt.Errorf("no route present in current alertmanager config") + return definitions.Route{}, "", fmt.Errorf("no route present in current alertmanager config") } provenance, err := nps.provenanceStore.GetProvenance(ctx, rev.Config.AlertmanagerConfig.Route, orgID) if err != nil { - return definitions.Route{}, err + return definitions.Route{}, "", err } - result := *rev.Config.AlertmanagerConfig.Route result.Provenance = definitions.Provenance(provenance) - - return result, nil + version := calculateRouteFingerprint(result) + return result, version, nil } -func (nps *NotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance) error { +func (nps *NotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgID int64, tree definitions.Route, p models.Provenance, version string) error { err := tree.Validate() if err != nil { return fmt.Errorf("%w: %s", ErrValidation, err.Error()) @@ -62,6 +69,11 @@ func (nps *NotificationPolicyService) UpdatePolicyTree(ctx context.Context, orgI return err } + err = nps.checkOptimisticConcurrency(*revision.Config.AlertmanagerConfig.Route, models.Provenance(tree.Provenance), version, "update") + if err != nil { + return err + } + receivers, err := nps.receiversToMap(revision.Config.AlertmanagerConfig.Receivers) if err != nil { return err @@ -154,3 +166,109 @@ func (nps *NotificationPolicyService) ensureDefaultReceiverExists(cfg *definitio nps.log.Error("Grafana Alerting has been configured with a default configuration that is internally inconsistent! The default configuration's notification policy must have a corresponding receiver.") return fmt.Errorf("inconsistent default configuration") } + +func calculateRouteFingerprint(route definitions.Route) string { + sum := fnv.New64a() + writeToHash(sum, &route) + return fmt.Sprintf("%016x", sum.Sum64()) +} + +func writeToHash(sum hash.Hash, r *definitions.Route) { + writeBytes := func(b []byte) { + _, _ = sum.Write(b) + // add a byte sequence that cannot happen in UTF-8 strings. + _, _ = sum.Write([]byte{255}) + } + writeString := func(s string) { + if len(s) == 0 { + writeBytes(nil) + return + } + // #nosec G103 + // avoid allocation when converting string to byte slice + writeBytes(unsafe.Slice(unsafe.StringData(s), len(s))) + } + + // this temp slice is used to convert ints to bytes. + tmp := make([]byte, 8) + writeInt := func(u int64) { + binary.LittleEndian.PutUint64(tmp, uint64(u)) + writeBytes(tmp) + } + writeBool := func(b bool) { + if b { + writeInt(1) + } else { + writeInt(0) + } + } + writeDuration := func(d *model.Duration) { + if d == nil { + _, _ = sum.Write([]byte{255}) + } else { + binary.LittleEndian.PutUint64(tmp, uint64(*d)) + _, _ = sum.Write(tmp) + _, _ = sum.Write([]byte{255}) + } + } + + writeString(r.Receiver) + for _, s := range r.GroupByStr { + writeString(s) + } + for _, labelName := range r.GroupBy { + writeString(string(labelName)) + } + writeBool(r.GroupByAll) + if len(r.Match) > 0 { + keys := maps.Keys(r.Match) + slices.Sort(keys) + for _, key := range keys { + writeString(key) + writeString(r.Match[key]) + } + } + if len(r.MatchRE) > 0 { + keys := maps.Keys(r.MatchRE) + slices.Sort(keys) + for _, key := range keys { + writeString(key) + str, err := r.MatchRE[key].MarshalJSON() + if err != nil { + writeString(fmt.Sprintf("%+v", r.MatchRE)) + } + writeBytes(str) + } + } + for _, matcher := range r.Matchers { + writeString(matcher.String()) + } + for _, timeInterval := range r.MuteTimeIntervals { + writeString(timeInterval) + } + for _, timeInterval := range r.ActiveTimeIntervals { + writeString(timeInterval) + } + writeBool(r.Continue) + writeDuration(r.GroupWait) + writeDuration(r.GroupInterval) + writeDuration(r.RepeatInterval) + for _, route := range r.Routes { + writeToHash(sum, route) + } +} + +func (nps *NotificationPolicyService) checkOptimisticConcurrency(current definitions.Route, provenance models.Provenance, desiredVersion string, action string) error { + if desiredVersion == "" { + if provenance != models.ProvenanceFile { + // if version is not specified and it's not a file provisioning, emit a log message to reflect that optimistic concurrency is disabled for this request + nps.log.Debug("ignoring optimistic concurrency check because version was not provided", "operation", action) + } + return nil + } + currentVersion := calculateRouteFingerprint(current) + if currentVersion != desiredVersion { + return ErrVersionConflict.Errorf("provided version %s of routing tree does not match current version %s", desiredVersion, currentVersion) + } + return nil +} diff --git a/pkg/services/ngalert/provisioning/notification_policies_test.go b/pkg/services/ngalert/provisioning/notification_policies_test.go index bf9be3b2462..e67fb83be32 100644 --- a/pkg/services/ngalert/provisioning/notification_policies_test.go +++ b/pkg/services/ngalert/provisioning/notification_policies_test.go @@ -4,10 +4,9 @@ import ( "context" "testing" + "github.com/grafana/alerting/definition" "github.com/prometheus/alertmanager/config" - "github.com/prometheus/alertmanager/timeinterval" - "github.com/prometheus/common/model" - "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/grafana/grafana/pkg/infra/log" @@ -16,274 +15,304 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/notifier/legacy_storage" "github.com/grafana/grafana/pkg/services/ngalert/tests/fakes" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" ) -func TestNotificationPolicyService(t *testing.T) { - t.Run("service gets policy tree from org's AM config", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() +func TestGetPolicyTree(t *testing.T) { + orgID := int64(1) + rev := getDefaultConfigRevision() + expectedVersion := calculateRouteFingerprint(*rev.Config.AlertmanagerConfig.Route) - tree, err := sut.GetPolicyTree(context.Background(), 1) - require.NoError(t, err) + sut, store, prov := createNotificationPolicyServiceSut() + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + return &rev, nil + } + expectedProvenance := models.ProvenanceAPI + prov.GetProvenanceFunc = func(ctx context.Context, o models.Provisionable, org int64) (models.Provenance, error) { + return models.ProvenanceAPI, nil + } - require.Equal(t, "grafana-default-email", tree.Receiver) - }) + tree, version, err := sut.GetPolicyTree(context.Background(), orgID) + require.NoError(t, err) - t.Run("error if referenced mute time interval is not existing", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - mockStore := &legacy_storage.MockAMConfigStore{} - sut.configStore = legacy_storage.NewAlertmanagerConfigStore(mockStore) - cfg := createTestAlertingConfig() - cfg.AlertmanagerConfig.MuteTimeIntervals = []config.MuteTimeInterval{ + expectedRoute := *rev.Config.AlertmanagerConfig.Route + expectedRoute.Provenance = definitions.Provenance(models.ProvenanceAPI) + assert.Equal(t, expectedRoute, tree) + assert.Equal(t, expectedVersion, version) + assert.Equal(t, expectedProvenance, models.Provenance(tree.Provenance)) + + assert.Len(t, store.Calls, 1) + assert.Equal(t, "Get", store.Calls[0].Method) + assert.Equal(t, orgID, store.Calls[0].Args[1]) + + assert.Len(t, prov.Calls, 1) + assert.Equal(t, "GetProvenance", prov.Calls[0].MethodName) + assert.IsType(t, &definitions.Route{}, prov.Calls[0].Arguments[1]) + assert.Equal(t, orgID, prov.Calls[0].Arguments[2]) +} + +func TestUpdatePolicyTree(t *testing.T) { + orgID := int64(1) + rev := getDefaultConfigRevision() + + defaultVersion := calculateRouteFingerprint(*rev.Config.AlertmanagerConfig.Route) + + newRoute := definitions.Route{ + Receiver: rev.Config.AlertmanagerConfig.Receivers[0].Name, + Routes: []*definitions.Route{ { - Name: "not-the-one-we-need", - TimeIntervals: []timeinterval.TimeInterval{}, + Receiver: "", + MuteTimeIntervals: []string{ + rev.Config.AlertmanagerConfig.TimeIntervals[0].Name, + }, + }, + { + Receiver: rev.Config.AlertmanagerConfig.Receivers[0].Name, + }, + }, + } + + t.Run("ErrValidation if referenced mute time interval does not exist", func(t *testing.T) { + sut, store, _ := createNotificationPolicyServiceSut() + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + return &rev, nil + } + newRoute := definitions.Route{ + Receiver: rev.Config.AlertmanagerConfig.Receivers[0].Name, + MuteTimeIntervals: []string{ + "not-existing", }, } - data, _ := legacy_storage.SerializeAlertmanagerConfig(*cfg) - mockStore.On("GetLatestAlertmanagerConfiguration", mock.Anything, mock.Anything). - Return(&models.AlertConfiguration{AlertmanagerConfiguration: string(data)}, nil) - mockStore.EXPECT(). - UpdateAlertmanagerConfiguration(mock.Anything, mock.Anything). - Return(nil) - newRoute := createTestRoutingTree() - newRoute.Routes = append(newRoute.Routes, &definitions.Route{ - Receiver: "slack receiver", - MuteTimeIntervals: []string{"not-existing"}, - }) - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceNone) - require.Error(t, err) - }) - - t.Run("pass if referenced mute time interval is existing", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - mockStore := &legacy_storage.MockAMConfigStore{} - sut.configStore = legacy_storage.NewAlertmanagerConfigStore(mockStore) - cfg := createTestAlertingConfig() - cfg.AlertmanagerConfig.MuteTimeIntervals = []config.MuteTimeInterval{ - { - Name: "existing", - TimeIntervals: []timeinterval.TimeInterval{}, - }, - } - cfg.AlertmanagerConfig.TimeIntervals = []config.TimeInterval{ - { - Name: "existing-ti", - TimeIntervals: []timeinterval.TimeInterval{}, - }, - } - data, _ := legacy_storage.SerializeAlertmanagerConfig(*cfg) - mockStore.On("GetLatestAlertmanagerConfiguration", mock.Anything, mock.Anything). - Return(&models.AlertConfiguration{AlertmanagerConfiguration: string(data)}, nil) - mockStore.EXPECT(). - UpdateAlertmanagerConfiguration(mock.Anything, mock.Anything). - Return(nil) - newRoute := createTestRoutingTree() - newRoute.Routes = append(newRoute.Routes, &definitions.Route{ - Receiver: "slack receiver", - MuteTimeIntervals: []string{"existing", "existing-ti"}, - }) - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceNone) - require.NoError(t, err) - }) - - t.Run("service stitches policy tree into org's AM config", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - - newRoute := createTestRoutingTree() - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceNone) - require.NoError(t, err) - - updated, err := sut.GetPolicyTree(context.Background(), 1) - require.NoError(t, err) - require.Equal(t, "slack receiver", updated.Receiver) - }) - - t.Run("no root receiver will error", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - - newRoute := createTestRoutingTree() - newRoute.Receiver = "" - newRoute.Routes = append(newRoute.Routes, &definitions.Route{ - Receiver: "", - }) - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceNone) - require.EqualError(t, err, "invalid object specification: root route must specify a default receiver") - }) - - t.Run("allow receiver inheritance", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - - newRoute := createTestRoutingTree() - newRoute.Routes = append(newRoute.Routes, &definitions.Route{ - Receiver: "", - }) - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceNone) - require.NoError(t, err) - }) - - t.Run("not existing receiver reference will error", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - - newRoute := createTestRoutingTree() - newRoute.Routes = append(newRoute.Routes, &definitions.Route{ - Receiver: "not-existing", - }) - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceNone) - require.Error(t, err) - }) - - t.Run("existing receiver reference will pass", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - mockStore := &legacy_storage.MockAMConfigStore{} - sut.configStore = legacy_storage.NewAlertmanagerConfigStore(mockStore) - cfg := createTestAlertingConfig() - data, _ := legacy_storage.SerializeAlertmanagerConfig(*cfg) - mockStore.On("GetLatestAlertmanagerConfiguration", mock.Anything, mock.Anything). - Return(&models.AlertConfiguration{AlertmanagerConfiguration: string(data)}, nil) - mockStore.EXPECT(). - UpdateAlertmanagerConfiguration(mock.Anything, mock.Anything). - Return(nil) - newRoute := createTestRoutingTree() - newRoute.Routes = append(newRoute.Routes, &definitions.Route{ - Receiver: "existing", - }) - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceNone) - require.NoError(t, err) - }) - - t.Run("default provenance of records is none", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - - tree, err := sut.GetPolicyTree(context.Background(), 1) - require.NoError(t, err) - - require.Equal(t, models.ProvenanceNone, models.Provenance(tree.Provenance)) - }) - - t.Run("service returns upgraded provenance value", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - newRoute := createTestRoutingTree() - - err := sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceAPI) - require.NoError(t, err) - - updated, err := sut.GetPolicyTree(context.Background(), 1) - require.NoError(t, err) - require.Equal(t, models.ProvenanceAPI, models.Provenance(updated.Provenance)) - }) - - t.Run("service respects concurrency token when updating", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - fake := fakes.NewFakeAlertmanagerConfigStore(defaultAlertmanagerConfigJSON) - sut.configStore = legacy_storage.NewAlertmanagerConfigStore(fake) - newRoute := createTestRoutingTree() - config, err := sut.configStore.Get(context.Background(), 1) - require.NoError(t, err) - expectedConcurrencyToken := config.ConcurrencyToken - - err = sut.UpdatePolicyTree(context.Background(), 1, newRoute, models.ProvenanceAPI) - require.NoError(t, err) - - intercepted := fake.LastSaveCommand - require.Equal(t, expectedConcurrencyToken, intercepted.FetchedConfigurationHash) - }) - - t.Run("updating invalid route returns ValidationError", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - invalid := createTestRoutingTree() - repeat := model.Duration(0) - invalid.RepeatInterval = &repeat - - err := sut.UpdatePolicyTree(context.Background(), 1, invalid, models.ProvenanceNone) - - require.Error(t, err) + err := sut.UpdatePolicyTree(context.Background(), orgID, newRoute, models.ProvenanceNone, defaultVersion) require.ErrorIs(t, err, ErrValidation) }) - t.Run("deleting route replaces with default", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - - tree, err := sut.ResetPolicyTree(context.Background(), 1) - - require.NoError(t, err) - require.Equal(t, "grafana-default-email", tree.Receiver) - require.Nil(t, tree.Routes) - require.Equal(t, []model.LabelName{models.FolderTitleLabel, model.AlertNameLabel}, tree.GroupBy) + t.Run("ErrValidation if root route has no receiver", func(t *testing.T) { + rev := getDefaultConfigRevision() + sut, store, _ := createNotificationPolicyServiceSut() + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + return &rev, nil + } + newRoute := definitions.Route{ + Receiver: "", + } + err := sut.UpdatePolicyTree(context.Background(), orgID, newRoute, models.ProvenanceNone, defaultVersion) + require.ErrorIs(t, err, ErrValidation) }) - t.Run("deleting route with missing default receiver restores receiver", func(t *testing.T) { - sut := createNotificationPolicyServiceSut() - mockStore := &legacy_storage.MockAMConfigStore{} - sut.configStore = legacy_storage.NewAlertmanagerConfigStore(mockStore) - cfg := createTestAlertingConfig() - cfg.AlertmanagerConfig.Route = &definitions.Route{ - Receiver: "slack receiver", + t.Run("ErrValidation if referenced receiver does not exist", func(t *testing.T) { + rev := getDefaultConfigRevision() + sut, store, _ := createNotificationPolicyServiceSut() + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + return &rev, nil } - cfg.AlertmanagerConfig.Receivers = []*definitions.PostableApiReceiver{ - { - Receiver: config.Receiver{ - Name: "slack receiver", + newRoute := definitions.Route{ + Receiver: "unknown", + } + err := sut.UpdatePolicyTree(context.Background(), orgID, newRoute, models.ProvenanceNone, defaultVersion) + require.ErrorIs(t, err, ErrValidation) + + t.Run("including sub-routes", func(t *testing.T) { + newRoute := definitions.Route{ + Receiver: rev.Config.AlertmanagerConfig.Receivers[0].Name, + Routes: []*definitions.Route{ + {Receiver: "unknown"}, }, - }, - // No default receiver! Only our custom one. + } + err := sut.UpdatePolicyTree(context.Background(), orgID, newRoute, models.ProvenanceNone, defaultVersion) + require.ErrorIs(t, err, ErrValidation) + }) + }) + + t.Run("ErrVersionConflict if provided version does not match current", func(t *testing.T) { + rev := getDefaultConfigRevision() + sut, store, _ := createNotificationPolicyServiceSut() + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + return &rev, nil } - data, _ := legacy_storage.SerializeAlertmanagerConfig(*cfg) - mockStore.On("GetLatestAlertmanagerConfiguration", mock.Anything, mock.Anything). - Return(&models.AlertConfiguration{AlertmanagerConfiguration: string(data)}, nil) - var interceptedSave = models.SaveAlertmanagerConfigurationCmd{} - mockStore.EXPECT().SaveSucceedsIntercept(&interceptedSave) + newRoute := definitions.Route{ + Receiver: rev.Config.AlertmanagerConfig.Receivers[0].Name, + } + err := sut.UpdatePolicyTree(context.Background(), orgID, newRoute, models.ProvenanceNone, "wrong-version") + require.ErrorIs(t, err, ErrVersionConflict) + }) - tree, err := sut.ResetPolicyTree(context.Background(), 1) + t.Run("updates Route and sets provenance in transaction if route is valid and version matches", func(t *testing.T) { + sut, store, prov := createNotificationPolicyServiceSut() + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + return &rev, nil + } + expectedRev := getDefaultConfigRevision() + route := newRoute + expectedRev.ConcurrencyToken = rev.ConcurrencyToken + expectedRev.Config.AlertmanagerConfig.Route = &route + err := sut.UpdatePolicyTree(context.Background(), orgID, newRoute, models.ProvenanceAPI, defaultVersion) require.NoError(t, err) - require.Equal(t, "grafana-default-email", tree.Receiver) - require.NotEmpty(t, interceptedSave.AlertmanagerConfiguration) - // Deserializing with no error asserts that the saved configStore is semantically valid. - newCfg, err := legacy_storage.DeserializeAlertmanagerConfig([]byte(interceptedSave.AlertmanagerConfiguration)) + + assert.Len(t, store.Calls, 2) + assert.Equal(t, "Save", store.Calls[1].Method) + assertInTransaction(t, store.Calls[1].Args[0].(context.Context)) + assert.Equal(t, &expectedRev, store.Calls[1].Args[1]) + + assert.Len(t, prov.Calls, 1) + assert.Equal(t, "SetProvenance", prov.Calls[0].MethodName) + assertInTransaction(t, prov.Calls[0].Arguments[0].(context.Context)) + assert.IsType(t, &definitions.Route{}, prov.Calls[0].Arguments[1]) + assert.Equal(t, orgID, prov.Calls[0].Arguments[2].(int64)) + assert.Equal(t, models.ProvenanceAPI, prov.Calls[0].Arguments[3].(models.Provenance)) + }) + + t.Run("bypasses optimistic concurrency if provided version is empty", func(t *testing.T) { + sut, store, prov := createNotificationPolicyServiceSut() + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + return &rev, nil + } + + expectedRev := getDefaultConfigRevision() + expectedRev.Config.AlertmanagerConfig.Route = &newRoute + expectedRev.ConcurrencyToken = rev.ConcurrencyToken + + err := sut.UpdatePolicyTree(context.Background(), orgID, newRoute, models.ProvenanceAPI, "") require.NoError(t, err) - require.Len(t, newCfg.AlertmanagerConfig.Receivers, 2) + + assert.Len(t, store.Calls, 2) + assert.Equal(t, "Save", store.Calls[1].Method) + assertInTransaction(t, store.Calls[1].Args[0].(context.Context)) + assert.Equal(t, &expectedRev, store.Calls[1].Args[1]) + + assert.Len(t, prov.Calls, 1) + assert.Equal(t, "SetProvenance", prov.Calls[0].MethodName) + assertInTransaction(t, prov.Calls[0].Arguments[0].(context.Context)) + assert.IsType(t, &definitions.Route{}, prov.Calls[0].Arguments[1]) + assert.Equal(t, orgID, prov.Calls[0].Arguments[2].(int64)) + assert.Equal(t, models.ProvenanceAPI, prov.Calls[0].Arguments[3].(models.Provenance)) }) } -func createNotificationPolicyServiceSut() *NotificationPolicyService { +func TestResetPolicyTree(t *testing.T) { + orgID := int64(1) + + currentRevision := getDefaultConfigRevision() + currentRevision.Config.AlertmanagerConfig.Route = &definitions.Route{ + Receiver: "receiver", + } + currentRevision.Config.TemplateFiles = map[string]string{ + "test": "test", + } + currentRevision.Config.AlertmanagerConfig.TimeIntervals = []config.TimeInterval{ + { + Name: "test", + }, + } + currentRevision.Config.AlertmanagerConfig.Receivers = []*definitions.PostableApiReceiver{ + { + Receiver: config.Receiver{Name: "receiver"}, + PostableGrafanaReceivers: definitions.PostableGrafanaReceivers{ + GrafanaManagedReceivers: []*definitions.PostableGrafanaReceiver{ + { + UID: "test", Name: "test", Type: "email", Settings: []byte("{}"), + }, + }, + }, + }, + } + + t.Run("Error if default config is invalid", func(t *testing.T) { + sut, _, _ := createNotificationPolicyServiceSut() + sut.settings = setting.UnifiedAlertingSettings{ + DefaultConfiguration: "{", + } + _, err := sut.ResetPolicyTree(context.Background(), orgID) + require.ErrorContains(t, err, "failed to parse default alertmanager config") + }) + + t.Run("replaces route with one from the default config and copies receivers if do not exist", func(t *testing.T) { + defaultConfig := getDefaultConfigRevision().Config + data, err := legacy_storage.SerializeAlertmanagerConfig(*defaultConfig) + require.NoError(t, err) + + sut, store, prov := createNotificationPolicyServiceSut() + sut.settings = setting.UnifiedAlertingSettings{ + DefaultConfiguration: string(data), + } + + store.GetFn = func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + data, err := legacy_storage.SerializeAlertmanagerConfig(*currentRevision.Config) + require.NoError(t, err) + cfg, err := legacy_storage.DeserializeAlertmanagerConfig(data) + require.NoError(t, err) + return &legacy_storage.ConfigRevision{ + Config: cfg, + ConcurrencyToken: util.GenerateShortUID(), + }, nil + } + + expectedRev := currentRevision + expectedRev.Config.AlertmanagerConfig.Route = getDefaultConfigRevision().Config.AlertmanagerConfig.Route + expectedRev.Config.AlertmanagerConfig.Receivers = append(expectedRev.Config.AlertmanagerConfig.Receivers, getDefaultConfigRevision().Config.AlertmanagerConfig.Receivers[0]) + + tree, err := sut.ResetPolicyTree(context.Background(), orgID) + require.NoError(t, err) + assert.Equal(t, *defaultConfig.AlertmanagerConfig.Route, tree) + + assert.Len(t, store.Calls, 2) + assert.Equal(t, "Save", store.Calls[1].Method) + assertInTransaction(t, store.Calls[1].Args[0].(context.Context)) + resetRev := store.Calls[1].Args[1].(*legacy_storage.ConfigRevision) + assert.Equal(t, expectedRev.Config.AlertmanagerConfig, resetRev.Config.AlertmanagerConfig) + + assert.Len(t, prov.Calls, 1) + assert.Equal(t, "DeleteProvenance", prov.Calls[0].MethodName) + assertInTransaction(t, prov.Calls[0].Arguments[0].(context.Context)) + assert.IsType(t, &definitions.Route{}, prov.Calls[0].Arguments[1]) + assert.Equal(t, orgID, prov.Calls[0].Arguments[2]) + }) +} + +func createNotificationPolicyServiceSut() (*NotificationPolicyService, *legacy_storage.AlertmanagerConfigStoreFake, *fakes.FakeProvisioningStore) { + prov := fakes.NewFakeProvisioningStore() + configStore := &legacy_storage.AlertmanagerConfigStoreFake{ + GetFn: func(ctx context.Context, orgID int64) (*legacy_storage.ConfigRevision, error) { + rev := getDefaultConfigRevision() + return &rev, nil + }, + } return &NotificationPolicyService{ - configStore: legacy_storage.NewAlertmanagerConfigStore(fakes.NewFakeAlertmanagerConfigStore(defaultAlertmanagerConfigJSON)), - provenanceStore: fakes.NewFakeProvisioningStore(), + configStore: configStore, + provenanceStore: prov, xact: newNopTransactionManager(), log: log.NewNopLogger(), settings: setting.UnifiedAlertingSettings{ DefaultConfiguration: setting.GetAlertmanagerDefaultConfiguration(), }, - } + }, configStore, prov } -func createTestRoutingTree() definitions.Route { - return definitions.Route{ - Receiver: "slack receiver", +func getDefaultConfigRevision() legacy_storage.ConfigRevision { + return legacy_storage.ConfigRevision{ + Config: &definitions.PostableUserConfig{ + AlertmanagerConfig: definitions.PostableApiAlertingConfig{ + Config: definition.Config{ + Route: &definitions.Route{ + Receiver: "test-receiver", + }, + InhibitRules: nil, + TimeIntervals: []config.TimeInterval{ + { + Name: "test-mute-interval", + }, + }, + }, + Receivers: []*definitions.PostableApiReceiver{ + { + Receiver: config.Receiver{ + Name: "test-receiver", + }, + }, + }, + }, + }, + ConcurrencyToken: util.GenerateShortUID(), } } - -func createTestAlertingConfig() *definitions.PostableUserConfig { - cfg, _ := legacy_storage.DeserializeAlertmanagerConfig([]byte(setting.GetAlertmanagerDefaultConfiguration())) - cfg.AlertmanagerConfig.Receivers = append(cfg.AlertmanagerConfig.Receivers, - &definitions.PostableApiReceiver{ - Receiver: config.Receiver{ - // default one from createTestRoutingTree() - Name: "slack receiver", - }, - }) - cfg.AlertmanagerConfig.Receivers = append(cfg.AlertmanagerConfig.Receivers, - &definitions.PostableApiReceiver{ - Receiver: config.Receiver{ - Name: "existing", - }, - }) - return cfg -} diff --git a/pkg/services/ngalert/store/alert_rule.go b/pkg/services/ngalert/store/alert_rule.go index 93c19037b15..a09f1dcc144 100644 --- a/pkg/services/ngalert/store/alert_rule.go +++ b/pkg/services/ngalert/store/alert_rule.go @@ -274,9 +274,64 @@ func (st DBstore) UpdateAlertRules(ctx context.Context, rules []ngmodels.UpdateR if _, err := sess.Insert(&ruleVersions); err != nil { return fmt.Errorf("failed to create new rule versions: %w", err) } + + for _, rule := range ruleVersions { + // delete old versions of alert rule + _, err = st.deleteOldAlertRuleVersions(ctx, rule.RuleUID, rule.RuleOrgID, st.Cfg.RuleVersionRecordLimit) + if err != nil { + st.Logger.Warn("Failed to delete old alert rule versions", "org", rule.RuleOrgID, "rule", rule.RuleUID, "error", err) + } + } + } + + return nil + }) +} + +func (st DBstore) deleteOldAlertRuleVersions(ctx context.Context, ruleUID string, orgID int64, limit int) (int64, error) { + if limit < 0 { + return 0, fmt.Errorf("failed to delete old alert rule versions: limit is set to '%d' but needs to be > 0", limit) + } + + if limit < 1 { + return 0, nil + } + + var affectedRows int64 + err := st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error { + highest := &alertRuleVersion{} + ok, err := sess.Table("alert_rule_version").Desc("id").Where("rule_org_id = ?", orgID).Where("rule_uid = ?", ruleUID).Limit(1, limit).Get(highest) + if err != nil { + return err + } + if !ok { + // No alert rule versions past the limit exist. Nothing to clean up. + affectedRows = 0 + return nil + } + + res, err := sess.Exec(` + DELETE FROM + alert_rule_version + WHERE + rule_org_id = ? AND rule_uid = ? + AND + id <= ? + `, orgID, ruleUID, highest.ID) + if err != nil { + return err + } + rows, err := res.RowsAffected() + if err != nil { + return err + } + affectedRows = rows + if affectedRows > 0 { + st.Logger.Info("Deleted old alert_rule_version(s)", "org", orgID, "limit", limit, "delete_count", affectedRows) } return nil }) + return affectedRows, err } // preventIntermediateUniqueConstraintViolations prevents unique constraint violations caused by an intermediate update. @@ -352,7 +407,7 @@ func newTitlesOverlapExisting(rules []ngmodels.UpdateRule) bool { // CountInFolder is a handler for retrieving the number of alert rules of // specific organisation associated with a given namespace (parent folder). -func (st DBstore) CountInFolders(ctx context.Context, orgID int64, folderUIDs []string, u identity.Requester) (int64, error) { +func (st DBstore) CountInFolders(ctx context.Context, orgID int64, folderUIDs []string, _ identity.Requester) (int64, error) { if len(folderUIDs) == 0 { return 0, nil } diff --git a/pkg/services/ngalert/store/alert_rule_test.go b/pkg/services/ngalert/store/alert_rule_test.go index 2969f24d56b..df2cb83ddbc 100644 --- a/pkg/services/ngalert/store/alert_rule_test.go +++ b/pkg/services/ngalert/store/alert_rule_test.go @@ -1472,3 +1472,132 @@ func setupFolderService(t *testing.T, sqlStore db.DB, cfg *setting.Cfg, features return testutil.SetupFolderService(t, cfg, sqlStore, dashboardStore, folderStore, inProcBus, features, &actest.FakeAccessControl{ExpectedEvaluate: true}) } + +func TestIntegration_AlertRuleVersionsCleanup(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + cfg := setting.NewCfg() + cfg.UnifiedAlerting = setting.UnifiedAlertingSettings{ + BaseInterval: time.Duration(rand.Int63n(100)+1) * time.Second, + } + sqlStore := db.InitTestDB(t) + store := &DBstore{ + SQLStore: sqlStore, + Cfg: cfg.UnifiedAlerting, + FolderService: setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()), + Logger: &logtest.Fake{}, + } + generator := models.RuleGen + generator = generator.With(generator.WithIntervalMatching(store.Cfg.BaseInterval), generator.WithUniqueOrgID()) + + t.Run("when calling the cleanup with fewer records than the limit all records should stay", func(t *testing.T) { + alertingCfgSnapshot := cfg.UnifiedAlerting + defer func() { + cfg.UnifiedAlerting = alertingCfgSnapshot + }() + cfg.UnifiedAlerting = setting.UnifiedAlertingSettings{BaseInterval: alertingCfgSnapshot.BaseInterval, RuleVersionRecordLimit: 10} + rule := createRule(t, store, generator) + firstNewRule := models.CopyRule(rule) + firstNewRule.Title = util.GenerateShortUID() + err := store.UpdateAlertRules(context.Background(), []models.UpdateRule{{ + Existing: rule, + New: *firstNewRule, + }, + }) + require.NoError(t, err) + firstNewRule.Version = firstNewRule.Version + 1 + secondNewRule := models.CopyRule(firstNewRule) + secondNewRule.Title = util.GenerateShortUID() + err = store.UpdateAlertRules(context.Background(), []models.UpdateRule{{ + Existing: firstNewRule, + New: *secondNewRule, + }, + }) + require.NoError(t, err) + titleMap := map[string]bool{ + secondNewRule.Title: false, + rule.Title: false, + } + + err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error { + alertRuleVersions := make([]*alertRuleVersion, 0) + err := sess.Table(alertRuleVersion{}).Desc("id").Where("rule_org_id = ? and rule_uid = ?", rule.OrgID, rule.UID).Find(&alertRuleVersions) + if err != nil { + return err + } + require.NoError(t, err) + assert.Len(t, alertRuleVersions, 2) + for _, value := range alertRuleVersions { + assert.False(t, titleMap[value.Title]) + titleMap[value.Title] = true + } + assert.Equal(t, true, titleMap[firstNewRule.Title]) + assert.Equal(t, true, titleMap[secondNewRule.Title]) + return err + }) + require.NoError(t, err) + }) + + t.Run("only oldest records surpassing the limit should be deleted", func(t *testing.T) { + alertingCfgSnapshot := cfg.UnifiedAlerting + defer func() { + cfg.UnifiedAlerting = alertingCfgSnapshot + }() + cfg.UnifiedAlerting = setting.UnifiedAlertingSettings{BaseInterval: alertingCfgSnapshot.BaseInterval, RuleVersionRecordLimit: 1} + rule := createRule(t, store, generator) + oldRule := models.CopyRule(rule) + oldRule.Title = "old-record" + err := store.UpdateAlertRules(context.Background(), []models.UpdateRule{{ + Existing: rule, + New: *oldRule, + }}) // first entry in `rule_version_history` table happens here + require.NoError(t, err) + + rule.Version = rule.Version + 1 + middleRule := models.CopyRule(rule) + middleRule.Title = "middle-record" + err = store.UpdateAlertRules(context.Background(), []models.UpdateRule{{ + Existing: rule, + New: *middleRule, + }}) //second entry in `rule_version_history` table happens here + require.NoError(t, err) + + rule.Version = rule.Version + 1 + newerRule := models.CopyRule(rule) + newerRule.Title = "newer-record" + err = store.UpdateAlertRules(context.Background(), []models.UpdateRule{{ + Existing: rule, + New: *newerRule, + }}) //second entry in `rule_version_history` table happens here + require.NoError(t, err) + + // only the `old-record` should be deleted since limit is set to 1 and there are total 2 records + rowsAffected, err := store.deleteOldAlertRuleVersions(context.Background(), rule.UID, rule.OrgID, 1) + require.NoError(t, err) + require.Equal(t, int64(2), rowsAffected) + + err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error { + var alertRuleVersions []*alertRuleVersion + err := sess.Table(alertRuleVersion{}).Desc("id").Where("rule_org_id = ? and rule_uid = ?", rule.OrgID, rule.UID).Find(&alertRuleVersions) + if err != nil { + return err + } + require.NoError(t, err) + assert.Len(t, alertRuleVersions, 1) + assert.Equal(t, "newer-record", alertRuleVersions[0].Title) + return err + }) + require.NoError(t, err) + }) + + t.Run("limit set to 0 should not fail", func(t *testing.T) { + count, err := store.deleteOldAlertRuleVersions(context.Background(), "", 1, 0) + require.NoError(t, err) + require.Equal(t, int64(0), count) + }) + t.Run("limit set to negative should fail", func(t *testing.T) { + _, err := store.deleteOldAlertRuleVersions(context.Background(), "", 1, -1) + require.Error(t, err) + }) +} diff --git a/pkg/services/ngalert/tests/fakes/provisioning.go b/pkg/services/ngalert/tests/fakes/provisioning.go index 1274fa42ff6..43de0a6dc68 100644 --- a/pkg/services/ngalert/tests/fakes/provisioning.go +++ b/pkg/services/ngalert/tests/fakes/provisioning.go @@ -8,7 +8,12 @@ import ( ) type FakeProvisioningStore struct { - Records map[int64]map[string]models.Provenance + Calls []Call + Records map[int64]map[string]models.Provenance + GetProvenanceFunc func(ctx context.Context, o models.Provisionable, org int64) (models.Provenance, error) + GetProvenancesFunc func(ctx context.Context, orgID int64, resourceType string) (map[string]models.Provenance, error) + SetProvenanceFunc func(ctx context.Context, o models.Provisionable, org int64, p models.Provenance) error + DeleteProvenanceFunc func(ctx context.Context, o models.Provisionable, org int64) error } func NewFakeProvisioningStore() *FakeProvisioningStore { @@ -18,6 +23,10 @@ func NewFakeProvisioningStore() *FakeProvisioningStore { } func (f *FakeProvisioningStore) GetProvenance(ctx context.Context, o models.Provisionable, org int64) (models.Provenance, error) { + f.Calls = append(f.Calls, Call{MethodName: "GetProvenance", Arguments: []any{ctx, o, org}}) + if f.GetProvenanceFunc != nil { + return f.GetProvenanceFunc(ctx, o, org) + } if val, ok := f.Records[org]; ok { if prov, ok := val[o.ResourceID()+o.ResourceType()]; ok { return prov, nil @@ -27,6 +36,10 @@ func (f *FakeProvisioningStore) GetProvenance(ctx context.Context, o models.Prov } func (f *FakeProvisioningStore) GetProvenances(ctx context.Context, orgID int64, resourceType string) (map[string]models.Provenance, error) { + f.Calls = append(f.Calls, Call{MethodName: "GetProvenances", Arguments: []any{ctx, orgID, resourceType}}) + if f.GetProvenancesFunc != nil { + return f.GetProvenancesFunc(ctx, orgID, resourceType) + } results := make(map[string]models.Provenance) if val, ok := f.Records[orgID]; ok { for k, v := range val { @@ -39,15 +52,25 @@ func (f *FakeProvisioningStore) GetProvenances(ctx context.Context, orgID int64, } func (f *FakeProvisioningStore) SetProvenance(ctx context.Context, o models.Provisionable, org int64, p models.Provenance) error { + f.Calls = append(f.Calls, Call{MethodName: "SetProvenance", Arguments: []any{ctx, o, org, p}}) + if f.SetProvenanceFunc != nil { + return f.SetProvenanceFunc(ctx, o, org, p) + } if _, ok := f.Records[org]; !ok { f.Records[org] = map[string]models.Provenance{} } - _ = f.DeleteProvenance(ctx, o, org) // delete old entries first + if val, ok := f.Records[org]; ok { + delete(val, o.ResourceID()+o.ResourceType()) + } f.Records[org][o.ResourceID()+o.ResourceType()] = p return nil } func (f *FakeProvisioningStore) DeleteProvenance(ctx context.Context, o models.Provisionable, org int64) error { + f.Calls = append(f.Calls, Call{MethodName: "DeleteProvenance", Arguments: []any{ctx, o, org}}) + if f.DeleteProvenanceFunc != nil { + return f.DeleteProvenanceFunc(ctx, o, org) + } if val, ok := f.Records[org]; ok { delete(val, o.ResourceID()+o.ResourceType()) } diff --git a/pkg/services/pluginsintegration/pluginassets/pluginassets.go b/pkg/services/pluginsintegration/pluginassets/pluginassets.go index aca1e536a12..3184a4ec772 100644 --- a/pkg/services/pluginsintegration/pluginassets/pluginassets.go +++ b/pkg/services/pluginsintegration/pluginassets/pluginassets.go @@ -2,14 +2,21 @@ package pluginassets import ( "context" + "encoding/base64" + "encoding/hex" + "fmt" + "path" + "path/filepath" + "sync" "github.com/Masterminds/semver/v3" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/config" + "github.com/grafana/grafana/pkg/plugins/manager/signature" "github.com/grafana/grafana/pkg/plugins/pluginscdn" "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore" - "github.com/grafana/grafana/pkg/setting" ) const ( @@ -21,18 +28,24 @@ var ( scriptLoadingMinSupportedVersion = semver.MustParse(CreatePluginVersionScriptSupportEnabled) ) -func ProvideService(cfg *setting.Cfg, cdn *pluginscdn.Service) *Service { +func ProvideService(cfg *config.PluginManagementCfg, cdn *pluginscdn.Service, sig *signature.Signature, store pluginstore.Store) *Service { return &Service{ - cfg: cfg, - cdn: cdn, - log: log.New("pluginassets"), + cfg: cfg, + cdn: cdn, + signature: sig, + store: store, + log: log.New("pluginassets"), } } type Service struct { - cfg *setting.Cfg - cdn *pluginscdn.Service - log log.Logger + cfg *config.PluginManagementCfg + cdn *pluginscdn.Service + signature *signature.Signature + store pluginstore.Store + log log.Logger + + moduleHashCache sync.Map } // LoadingStrategy calculates the loading strategy for a plugin. @@ -69,6 +82,86 @@ func (s *Service) LoadingStrategy(_ context.Context, p pluginstore.Plugin) plugi return plugins.LoadingStrategyFetch } +// ModuleHash returns the module.js SHA256 hash for a plugin in the format expected by the browser for SRI checks. +// The module hash is read from the plugin's MANIFEST.txt file. +// The plugin can also be a nested plugin. +// If the plugin is unsigned, an empty string is returned. +// The results are cached to avoid repeated reads from the MANIFEST.txt file. +func (s *Service) ModuleHash(ctx context.Context, p pluginstore.Plugin) string { + k := s.moduleHashCacheKey(p) + cachedValue, ok := s.moduleHashCache.Load(k) + if ok { + return cachedValue.(string) + } + mh, err := s.moduleHash(ctx, p, "") + if err != nil { + s.log.Error("Failed to calculate module hash", "plugin", p.ID, "error", err) + } + s.moduleHashCache.Store(k, mh) + return mh +} + +// moduleHash is the underlying function for ModuleHash. See its documentation for more information. +// It will read the module hash from the MANIFEST.txt in the [[plugins.FS]] of the provided plugin. +// If childFSBase is provided, the function will try to get the hash from MANIFEST.txt for the provided children's +// module.js file, rather than for the provided plugin. +func (s *Service) moduleHash(ctx context.Context, p pluginstore.Plugin, childFSBase string) (r string, err error) { + if !s.cfg.Features.SriChecksEnabled { + return "", nil + } + + // Ignore unsigned plugins + if !p.Signature.IsValid() { + return "", nil + } + + if p.Parent != nil { + // Nested plugin + parent, ok := s.store.Plugin(ctx, p.Parent.ID) + if !ok { + return "", fmt.Errorf("parent plugin plugin %q for child plugin %q not found", p.Parent.ID, p.ID) + } + + // The module hash is contained within the parent's MANIFEST.txt file. + // For example, the parent's MANIFEST.txt will contain an entry similar to this: + // + // ``` + // "datasource/module.js": "1234567890abcdef..." + // ``` + // + // Recursively call moduleHash with the parent plugin and with the children plugin folder path + // to get the correct module hash for the nested plugin. + if childFSBase == "" { + childFSBase = p.Base() + } + return s.moduleHash(ctx, parent, childFSBase) + } + + manifest, err := s.signature.ReadPluginManifestFromFS(ctx, p.FS) + if err != nil { + return "", fmt.Errorf("read plugin manifest: %w", err) + } + if !manifest.IsV2() { + return "", nil + } + + var childPath string + if childFSBase != "" { + // Calculate the relative path of the child plugin folder from the parent plugin folder. + childPath, err = p.FS.Rel(childFSBase) + if err != nil { + return "", fmt.Errorf("rel path: %w", err) + } + // MANIFETS.txt uses forward slashes as path separators. + childPath = filepath.ToSlash(childPath) + } + moduleHash, ok := manifest.Files[path.Join(childPath, "module.js")] + if !ok { + return "", nil + } + return convertHashForSRI(moduleHash) +} + func (s *Service) compatibleCreatePluginVersion(ps map[string]string) bool { if cpv, ok := ps[CreatePluginVersionCfgKey]; ok { createPluginVer, err := semver.NewVersion(cpv) @@ -86,3 +179,17 @@ func (s *Service) compatibleCreatePluginVersion(ps map[string]string) bool { func (s *Service) cdnEnabled(pluginID string, class plugins.Class) bool { return s.cdn.PluginSupported(pluginID) || class == plugins.ClassCDN } + +// convertHashForSRI takes a SHA256 hash string and returns it as expected by the browser for SRI checks. +func convertHashForSRI(h string) (string, error) { + hb, err := hex.DecodeString(h) + if err != nil { + return "", fmt.Errorf("hex decode string: %w", err) + } + return "sha256-" + base64.StdEncoding.EncodeToString(hb), nil +} + +// moduleHashCacheKey returns a unique key for the module hash cache. +func (s *Service) moduleHashCacheKey(p pluginstore.Plugin) string { + return p.ID + ":" + p.Info.Version +} diff --git a/pkg/services/pluginsintegration/pluginassets/pluginassets_test.go b/pkg/services/pluginsintegration/pluginassets/pluginassets_test.go index 26f7fa181a5..371a116f86c 100644 --- a/pkg/services/pluginsintegration/pluginassets/pluginassets_test.go +++ b/pkg/services/pluginsintegration/pluginassets/pluginassets_test.go @@ -2,13 +2,17 @@ package pluginassets import ( "context" + "path/filepath" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/config" + "github.com/grafana/grafana/pkg/plugins/manager/signature" + "github.com/grafana/grafana/pkg/plugins/manager/signature/statickey" "github.com/grafana/grafana/pkg/plugins/pluginscdn" "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore" "github.com/grafana/grafana/pkg/setting" @@ -34,7 +38,7 @@ func TestService_Calculate(t *testing.T) { pluginSettings: newPluginSettings(pluginID, map[string]string{ CreatePluginVersionCfgKey: compatVersion, }), - plugin: newPlugin(pluginID, false), + plugin: newPlugin(pluginID, withAngular(false)), expected: plugins.LoadingStrategyScript, }, { @@ -42,7 +46,7 @@ func TestService_Calculate(t *testing.T) { pluginSettings: newPluginSettings("parent-datasource", map[string]string{ CreatePluginVersionCfgKey: compatVersion, }), - plugin: newPlugin(pluginID, false, func(p pluginstore.Plugin) pluginstore.Plugin { + plugin: newPlugin(pluginID, withAngular(false), func(p pluginstore.Plugin) pluginstore.Plugin { p.Parent = &pluginstore.ParentPlugin{ID: "parent-datasource"} return p }), @@ -53,7 +57,7 @@ func TestService_Calculate(t *testing.T) { pluginSettings: newPluginSettings(pluginID, map[string]string{ CreatePluginVersionCfgKey: futureVersion, }), - plugin: newPlugin(pluginID, false), + plugin: newPlugin(pluginID, withAngular(false)), expected: plugins.LoadingStrategyScript, }, { @@ -61,7 +65,7 @@ func TestService_Calculate(t *testing.T) { pluginSettings: newPluginSettings(pluginID, map[string]string{ // NOTE: cdn key is not set }), - plugin: newPlugin(pluginID, false), + plugin: newPlugin(pluginID, withAngular(false)), expected: plugins.LoadingStrategyScript, }, { @@ -70,7 +74,7 @@ func TestService_Calculate(t *testing.T) { CreatePluginVersionCfgKey: incompatVersion, // NOTE: cdn key is not set }), - plugin: newPlugin(pluginID, false, func(p pluginstore.Plugin) pluginstore.Plugin { + plugin: newPlugin(pluginID, withAngular(false), func(p pluginstore.Plugin) pluginstore.Plugin { p.Class = plugins.ClassExternal return p }), @@ -83,7 +87,7 @@ func TestService_Calculate(t *testing.T) { "cdn": "true", }, }, - plugin: newPlugin(pluginID, false, func(p pluginstore.Plugin) pluginstore.Plugin { + plugin: newPlugin(pluginID, withAngular(false), func(p pluginstore.Plugin) pluginstore.Plugin { p.Parent = &pluginstore.ParentPlugin{ID: "parent-datasource"} return p }), @@ -96,8 +100,7 @@ func TestService_Calculate(t *testing.T) { "cdn": "true", }, }, - plugin: newPlugin(pluginID, false, func(p pluginstore.Plugin) pluginstore.Plugin { - p.Angular.Detected = true + plugin: newPlugin(pluginID, withAngular(true), func(p pluginstore.Plugin) pluginstore.Plugin { p.Parent = &pluginstore.ParentPlugin{ID: "parent-datasource"} return p }), @@ -106,8 +109,7 @@ func TestService_Calculate(t *testing.T) { { name: "Expected LoadingStrategyFetch when parent create-plugin version is not set, is not configured as CDN enabled and plugin is angular", pluginSettings: setting.PluginSettings{}, - plugin: newPlugin(pluginID, false, func(p pluginstore.Plugin) pluginstore.Plugin { - p.Angular.Detected = true + plugin: newPlugin(pluginID, withAngular(true), func(p pluginstore.Plugin) pluginstore.Plugin { p.Parent = &pluginstore.ParentPlugin{ID: "parent-datasource"} return p }), @@ -119,7 +121,7 @@ func TestService_Calculate(t *testing.T) { "cdn": "true", CreatePluginVersionCfgKey: incompatVersion, }), - plugin: newPlugin(pluginID, false, func(p pluginstore.Plugin) pluginstore.Plugin { + plugin: newPlugin(pluginID, withAngular(false), func(p pluginstore.Plugin) pluginstore.Plugin { p.Class = plugins.ClassExternal return p }), @@ -130,7 +132,7 @@ func TestService_Calculate(t *testing.T) { pluginSettings: newPluginSettings(pluginID, map[string]string{ CreatePluginVersionCfgKey: incompatVersion, }), - plugin: newPlugin(pluginID, true), + plugin: newPlugin(pluginID, withAngular(true)), expected: plugins.LoadingStrategyFetch, }, { @@ -139,7 +141,7 @@ func TestService_Calculate(t *testing.T) { "cdn": "true", CreatePluginVersionCfgKey: incompatVersion, }), - plugin: newPlugin(pluginID, false), + plugin: newPlugin(pluginID, withAngular(false)), expected: plugins.LoadingStrategyFetch, }, { @@ -147,7 +149,7 @@ func TestService_Calculate(t *testing.T) { pluginSettings: newPluginSettings(pluginID, map[string]string{ CreatePluginVersionCfgKey: incompatVersion, }), - plugin: newPlugin(pluginID, false, func(p pluginstore.Plugin) pluginstore.Plugin { + plugin: newPlugin(pluginID, withAngular(false), func(p pluginstore.Plugin) pluginstore.Plugin { p.Class = plugins.ClassCDN return p }), @@ -158,7 +160,7 @@ func TestService_Calculate(t *testing.T) { pluginSettings: newPluginSettings(pluginID, map[string]string{ CreatePluginVersionCfgKey: "invalidSemver", }), - plugin: newPlugin(pluginID, false), + plugin: newPlugin(pluginID, withAngular(false)), expected: plugins.LoadingStrategyScript, }, } @@ -179,12 +181,305 @@ func TestService_Calculate(t *testing.T) { } } -func newPlugin(pluginID string, angular bool, cbs ...func(p pluginstore.Plugin) pluginstore.Plugin) pluginstore.Plugin { +func TestService_ModuleHash(t *testing.T) { + const ( + pluginID = "grafana-test-datasource" + parentPluginID = "grafana-test-app" + ) + for _, tc := range []struct { + name string + features *config.Features + store []pluginstore.Plugin + plugin pluginstore.Plugin + cdn bool + expModuleHash string + }{ + { + name: "unsigned should not return module hash", + plugin: newPlugin(pluginID, withSignatureStatus(plugins.SignatureStatusUnsigned)), + cdn: false, + features: &config.Features{SriChecksEnabled: false}, + expModuleHash: "", + }, + { + name: "feature flag on with cdn on should return module hash", + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid"))), + ), + cdn: true, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: newSRIHash(t, "5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03"), + }, + { + name: "feature flag on with cdn off should return module hash", + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid"))), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: newSRIHash(t, "5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03"), + }, + { + name: "feature flag off with cdn on should not return module hash", + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid"))), + ), + cdn: true, + features: &config.Features{SriChecksEnabled: false}, + expModuleHash: "", + }, + { + name: "feature flag off with cdn off should not return module hash", + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid"))), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: false}, + expModuleHash: "", + }, + { + // parentPluginID (/) + // └── pluginID (/datasource) + name: "nested plugin should return module hash from parent MANIFEST.txt", + store: []pluginstore.Plugin{ + newPlugin( + parentPluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-nested"))), + ), + }, + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-nested", "datasource"))), + withParent(parentPluginID), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: newSRIHash(t, "04d70db091d96c4775fb32ba5a8f84cc22893eb43afdb649726661d4425c6711"), + }, + { + // parentPluginID (/) + // └── pluginID (/panels/one) + name: "nested plugin deeper than one subfolder should return module hash from parent MANIFEST.txt", + store: []pluginstore.Plugin{ + newPlugin( + parentPluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-nested"))), + ), + }, + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-nested", "panels", "one"))), + withParent(parentPluginID), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: newSRIHash(t, "cbd1ac2284645a0e1e9a8722a729f5bcdd2b831222728709c6360beecdd6143f"), + }, + { + // grand-parent-app (/) + // ├── parent-datasource (/datasource) + // │ └── child-panel (/datasource/panels/one) + name: "nested plugin of a nested plugin should return module hash from parent MANIFEST.txt", + store: []pluginstore.Plugin{ + newPlugin( + "grand-parent-app", + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-deeply-nested"))), + ), + newPlugin( + "parent-datasource", + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-deeply-nested", "datasource"))), + withParent("grand-parent-app"), + ), + }, + plugin: newPlugin( + "child-panel", + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-deeply-nested", "datasource", "panels", "one"))), + withParent("parent-datasource"), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: newSRIHash(t, "cbd1ac2284645a0e1e9a8722a729f5bcdd2b831222728709c6360beecdd6143f"), + }, + { + name: "nested plugin should not return module hash from parent if it's not registered in the store", + store: []pluginstore.Plugin{}, + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-nested", "panels", "one"))), + withParent(parentPluginID), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: "", + }, + { + name: "missing module.js entry from MANIFEST.txt should not return module hash", + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-no-module-js"))), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: "", + }, + { + name: "signed status but missing MANIFEST.txt should not return module hash", + plugin: newPlugin( + pluginID, + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-no-manifest-txt"))), + ), + cdn: false, + features: &config.Features{SriChecksEnabled: true}, + expModuleHash: "", + }, + } { + t.Run(tc.name, func(t *testing.T) { + var pluginSettings setting.PluginSettings + if tc.cdn { + pluginSettings = newPluginSettings(pluginID, map[string]string{ + "cdn": "true", + }) + } + features := tc.features + if features == nil { + features = &config.Features{} + } + pCfg := &config.PluginManagementCfg{ + PluginsCDNURLTemplate: "http://cdn.example.com", + PluginSettings: pluginSettings, + Features: *features, + } + svc := ProvideService( + pCfg, + pluginscdn.ProvideService(pCfg), + signature.ProvideService(pCfg, statickey.New()), + pluginstore.NewFakePluginStore(tc.store...), + ) + mh := svc.ModuleHash(context.Background(), tc.plugin) + require.Equal(t, tc.expModuleHash, mh) + }) + } +} + +func TestService_ModuleHash_Cache(t *testing.T) { + pCfg := &config.PluginManagementCfg{ + PluginSettings: setting.PluginSettings{}, + Features: config.Features{SriChecksEnabled: true}, + } + svc := ProvideService( + pCfg, + pluginscdn.ProvideService(pCfg), + signature.ProvideService(pCfg, statickey.New()), + pluginstore.NewFakePluginStore(), + ) + const pluginID = "grafana-test-datasource" + + t.Run("cache key", func(t *testing.T) { + t.Run("with version", func(t *testing.T) { + const pluginVersion = "1.0.0" + p := newPlugin(pluginID, withInfo(plugins.Info{Version: pluginVersion})) + k := svc.moduleHashCacheKey(p) + require.Equal(t, pluginID+":"+pluginVersion, k, "cache key should be correct") + }) + + t.Run("without version", func(t *testing.T) { + p := newPlugin(pluginID) + k := svc.moduleHashCacheKey(p) + require.Equal(t, pluginID+":", k, "cache key should be correct") + }) + }) + + t.Run("ModuleHash usage", func(t *testing.T) { + pV1 := newPlugin( + pluginID, + withInfo(plugins.Info{Version: "1.0.0"}), + withSignatureStatus(plugins.SignatureStatusValid), + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid"))), + ) + k := svc.moduleHashCacheKey(pV1) + + _, ok := svc.moduleHashCache.Load(k) + require.False(t, ok, "cache should initially be empty") + + mhV1 := svc.ModuleHash(context.Background(), pV1) + pV1Exp := newSRIHash(t, "5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03") + require.Equal(t, pV1Exp, mhV1, "returned value should be correct") + + cachedMh, ok := svc.moduleHashCache.Load(k) + require.True(t, ok) + require.Equal(t, pV1Exp, cachedMh, "cache should contain the returned value") + + t.Run("different version uses different cache key", func(t *testing.T) { + pV2 := newPlugin( + pluginID, + withInfo(plugins.Info{Version: "2.0.0"}), + withSignatureStatus(plugins.SignatureStatusValid), + // different fs for different hash + withFS(plugins.NewLocalFS(filepath.Join("testdata", "module-hash-valid-nested"))), + ) + mhV2 := svc.ModuleHash(context.Background(), pV2) + require.NotEqual(t, mhV2, mhV1, "different version should have different hash") + require.Equal(t, newSRIHash(t, "266c19bc148b22ddef2a288fc5f8f40855bda22ccf60be53340b4931e469ae2a"), mhV2) + }) + + t.Run("cache should be used", func(t *testing.T) { + // edit cache directly + svc.moduleHashCache.Store(k, "hax") + require.Equal(t, "hax", svc.ModuleHash(context.Background(), pV1)) + }) + }) +} + +func TestConvertHashFromSRI(t *testing.T) { + for _, tc := range []struct { + hash string + expHash string + expErr bool + }{ + { + hash: "ddfcb449445064e6c39f0c20b15be3cb6a55837cf4781df23d02de005f436811", + expHash: "sha256-3fy0SURQZObDnwwgsVvjy2pVg3z0eB3yPQLeAF9DaBE=", + }, + { + hash: "not-a-valid-hash", + expErr: true, + }, + } { + t.Run(tc.hash, func(t *testing.T) { + r, err := convertHashForSRI(tc.hash) + if tc.expErr { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tc.expHash, r) + } + }) + } +} + +func newPlugin(pluginID string, cbs ...func(p pluginstore.Plugin) pluginstore.Plugin) pluginstore.Plugin { p := pluginstore.Plugin{ JSONData: plugins.JSONData{ ID: pluginID, }, - Angular: plugins.AngularMeta{Detected: angular}, } for _, cb := range cbs { p = cb(p) @@ -192,8 +487,43 @@ func newPlugin(pluginID string, angular bool, cbs ...func(p pluginstore.Plugin) return p } -func newCfg(ps setting.PluginSettings) *setting.Cfg { - return &setting.Cfg{ +func withInfo(info plugins.Info) func(p pluginstore.Plugin) pluginstore.Plugin { + return func(p pluginstore.Plugin) pluginstore.Plugin { + p.Info = info + return p + } +} + +func withFS(fs plugins.FS) func(p pluginstore.Plugin) pluginstore.Plugin { + return func(p pluginstore.Plugin) pluginstore.Plugin { + p.FS = fs + return p + } +} + +func withSignatureStatus(status plugins.SignatureStatus) func(p pluginstore.Plugin) pluginstore.Plugin { + return func(p pluginstore.Plugin) pluginstore.Plugin { + p.Signature = status + return p + } +} + +func withAngular(angular bool) func(p pluginstore.Plugin) pluginstore.Plugin { + return func(p pluginstore.Plugin) pluginstore.Plugin { + p.Angular = plugins.AngularMeta{Detected: angular} + return p + } +} + +func withParent(parentID string) func(p pluginstore.Plugin) pluginstore.Plugin { + return func(p pluginstore.Plugin) pluginstore.Plugin { + p.Parent = &pluginstore.ParentPlugin{ID: parentID} + return p + } +} + +func newCfg(ps setting.PluginSettings) *config.PluginManagementCfg { + return &config.PluginManagementCfg{ PluginSettings: ps, } } @@ -203,3 +533,9 @@ func newPluginSettings(pluginID string, kv map[string]string) setting.PluginSett pluginID: kv, } } + +func newSRIHash(t *testing.T, s string) string { + r, err := convertHashForSRI(s) + require.NoError(t, err) + return r +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-manifest-txt/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-manifest-txt/module.js new file mode 100644 index 00000000000..fcb71c21418 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-manifest-txt/module.js @@ -0,0 +1 @@ +hello parent diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-manifest-txt/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-manifest-txt/plugin.json new file mode 100644 index 00000000000..122b5358b89 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-manifest-txt/plugin.json @@ -0,0 +1,15 @@ +{ + "type": "app", + "name": "Test", + "id": "test-app", + "backend": true, + "executable": "test", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/MANIFEST.txt b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/MANIFEST.txt new file mode 100644 index 00000000000..d216cf1c023 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/MANIFEST.txt @@ -0,0 +1,29 @@ + +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ + "manifestVersion": "2.0.0", + "signatureType": "grafana", + "signedByOrg": "grafana", + "signedByOrgName": "Grafana Labs", + "plugin": "test-app", + "version": "1.0.0", + "time": 1726230812215, + "keyId": "7e4d0c6a708866e7", + "files": { + "plugin.json": "31f04aceb2a9b14c2e501f38a4de5ab1c7a3e7306f58353fa5c1a86b716c971c", + "something.js": "266c19bc148b22ddef2a288fc5f8f40855bda22ccf60be53340b4931e469ae2a" + } +} +-----BEGIN PGP SIGNATURE----- +Version: OpenPGP.js v4.10.11 +Comment: https://openpgpjs.org + +wrkEARMKAAYFAmbkMRwAIQkQfk0ManCIZucWIQTzOyW2kQdOhGNlcPN+TQxq +cIhm53UWAgkBE2oxqyzBji86eCOzLmCT7IgQaoSMMF48tu+XdgwFS5/NU5su +deKad3taDnSU9a7GkCaisRVQOWy/UtFS1FNQTtkCCQBc1cZ6JsPWh2Pd60h0 +9U5aviYde6g1DCKO1riaUzHzrruBiHmHWjzr2aYwACb89vs2XcZqvue1Byb+ +y2inBDhHvQ== +=qMej +-----END PGP SIGNATURE----- diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/plugin.json new file mode 100644 index 00000000000..122b5358b89 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/plugin.json @@ -0,0 +1,15 @@ +{ + "type": "app", + "name": "Test", + "id": "test-app", + "backend": true, + "executable": "test", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/something.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/something.js new file mode 100644 index 00000000000..fcb71c21418 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-no-module-js/something.js @@ -0,0 +1 @@ +hello parent diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/MANIFEST.txt b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/MANIFEST.txt new file mode 100644 index 00000000000..ae9ff16a609 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/MANIFEST.txt @@ -0,0 +1,33 @@ + +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ + "manifestVersion": "2.0.0", + "signatureType": "grafana", + "signedByOrg": "grafana", + "signedByOrgName": "Grafana Labs", + "plugin": "test-app", + "version": "1.0.0", + "time": 1726234125061, + "keyId": "7e4d0c6a708866e7", + "files": { + "datasource/module.js": "04d70db091d96c4775fb32ba5a8f84cc22893eb43afdb649726661d4425c6711", + "datasource/plugin.json": "3fd712717a21617cc76f9043efcd43d4ebf5564dd155a28e4e3c736739f6931e", + "datasource/panels/one/module.js": "cbd1ac2284645a0e1e9a8722a729f5bcdd2b831222728709c6360beecdd6143f", + "datasource/panels/one/plugin.json": "b9b4556a7220ea77650ffd228da6d441e68df3405d50dab5773c10f4afae5ad3", + "module.js": "266c19bc148b22ddef2a288fc5f8f40855bda22ccf60be53340b4931e469ae2a", + "plugin.json": "31f04aceb2a9b14c2e501f38a4de5ab1c7a3e7306f58353fa5c1a86b716c971c" + } +} +-----BEGIN PGP SIGNATURE----- +Version: OpenPGP.js v4.10.11 +Comment: https://openpgpjs.org + +wrkEARMKAAYFAmbkPg0AIQkQfk0ManCIZucWIQTzOyW2kQdOhGNlcPN+TQxq +cIhm5xTlAgkB3mG37KEdlP34nC69NbmriMpDH6PyyJ0IUwXB/SMTr4Gc2SvG +cVHvih/0WqVjYKxxQI0QHoYpBQW2jPx0YJLFof8CCQBHpdEEXNTYOOZWG6Cg +M3wB3AdCO+ChjXkKosbWqiMDfVqHFoLoLurwWxwOjvk/xTvX5GFbOxSfISyU +8iW03F5/Sw== +=wobV +-----END PGP SIGNATURE----- diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/module.js new file mode 100644 index 00000000000..c04165fc4e6 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/module.js @@ -0,0 +1 @@ +hello datasource diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/panels/one/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/panels/one/module.js new file mode 100644 index 00000000000..5bc5bf404df --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/panels/one/module.js @@ -0,0 +1 @@ +hello panel diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/panels/one/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/panels/one/plugin.json new file mode 100644 index 00000000000..137ce642626 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/panels/one/plugin.json @@ -0,0 +1,13 @@ +{ + "type": "panel", + "name": "Test Panel", + "id": "test-panel", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/plugin.json new file mode 100644 index 00000000000..ee61a2361ea --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/datasource/plugin.json @@ -0,0 +1,15 @@ +{ + "type": "datasource", + "name": "Test Datasource", + "id": "test-datasource", + "backend": true, + "executable": "test", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/module.js new file mode 100644 index 00000000000..fcb71c21418 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/module.js @@ -0,0 +1 @@ +hello parent diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/plugin.json new file mode 100644 index 00000000000..122b5358b89 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-deeply-nested/plugin.json @@ -0,0 +1,15 @@ +{ + "type": "app", + "name": "Test", + "id": "test-app", + "backend": true, + "executable": "test", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/MANIFEST.txt b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/MANIFEST.txt new file mode 100644 index 00000000000..ceb0a5e762b --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/MANIFEST.txt @@ -0,0 +1,33 @@ + +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ + "manifestVersion": "2.0.0", + "signatureType": "grafana", + "signedByOrg": "grafana", + "signedByOrgName": "Grafana Labs", + "plugin": "test-app", + "version": "1.0.0", + "time": 1726230803822, + "keyId": "7e4d0c6a708866e7", + "files": { + "module.js": "266c19bc148b22ddef2a288fc5f8f40855bda22ccf60be53340b4931e469ae2a", + "plugin.json": "31f04aceb2a9b14c2e501f38a4de5ab1c7a3e7306f58353fa5c1a86b716c971c", + "datasource/module.js": "04d70db091d96c4775fb32ba5a8f84cc22893eb43afdb649726661d4425c6711", + "datasource/plugin.json": "3fd712717a21617cc76f9043efcd43d4ebf5564dd155a28e4e3c736739f6931e", + "panels/one/module.js": "cbd1ac2284645a0e1e9a8722a729f5bcdd2b831222728709c6360beecdd6143f", + "panels/one/plugin.json": "b9b4556a7220ea77650ffd228da6d441e68df3405d50dab5773c10f4afae5ad3" + } +} +-----BEGIN PGP SIGNATURE----- +Version: OpenPGP.js v4.10.11 +Comment: https://openpgpjs.org + +wrkEARMKAAYFAmbkMRQAIQkQfk0ManCIZucWIQTzOyW2kQdOhGNlcPN+TQxq +cIhm50C8AgkAmzQpeYPnCgYimLGp5UGnCTrkbUEEqW+qXESrhi5T5ZuM+SzT +BcRlC5pP6+wuyXAIdfppzWQ/umkkoaTIuub0TXQCCQHVcpWKy4acRL9TlORQ +1VzVEV9PW0+x606HsDDHkterKQZgr5X6I/sTbSpBDMWPCMxqAk9fZn3G4iuq +MyS+hwUZDQ== +=7/Rd +-----END PGP SIGNATURE----- diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/datasource/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/datasource/module.js new file mode 100644 index 00000000000..c04165fc4e6 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/datasource/module.js @@ -0,0 +1 @@ +hello datasource diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/datasource/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/datasource/plugin.json new file mode 100644 index 00000000000..ee61a2361ea --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/datasource/plugin.json @@ -0,0 +1,15 @@ +{ + "type": "datasource", + "name": "Test Datasource", + "id": "test-datasource", + "backend": true, + "executable": "test", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/module.js new file mode 100644 index 00000000000..fcb71c21418 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/module.js @@ -0,0 +1 @@ +hello parent diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/panels/one/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/panels/one/module.js new file mode 100644 index 00000000000..5bc5bf404df --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/panels/one/module.js @@ -0,0 +1 @@ +hello panel diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/panels/one/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/panels/one/plugin.json new file mode 100644 index 00000000000..137ce642626 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/panels/one/plugin.json @@ -0,0 +1,13 @@ +{ + "type": "panel", + "name": "Test Panel", + "id": "test-panel", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/plugin.json new file mode 100644 index 00000000000..122b5358b89 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid-nested/plugin.json @@ -0,0 +1,15 @@ +{ + "type": "app", + "name": "Test", + "id": "test-app", + "backend": true, + "executable": "test", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/MANIFEST.txt b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/MANIFEST.txt new file mode 100644 index 00000000000..d34df338e30 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/MANIFEST.txt @@ -0,0 +1,32 @@ + +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ + "manifestVersion": "2.0.0", + "signatureType": "private", + "signedByOrg": "giuseppeguerra", + "signedByOrgName": "giuseppeguerra", + "rootUrls": [ + "http://127.0.0.1:3000/" + ], + "plugin": "test-datasource", + "version": "1.0.0", + "time": 1725959570435, + "keyId": "7e4d0c6a708866e7", + "files": { + "module.js": "5891b5b522d5df086d0ff0b110fbd9d21bb4fc7163af34d08286a2e846f6be03", + "plugin.json": "129fab4e0584d18c778ebdfa5fe1a68edf2e5c5aeb8290b2c68182c857cb59f8" + } +} +-----BEGIN PGP SIGNATURE----- +Version: OpenPGP.js v4.10.11 +Comment: https://openpgpjs.org + +wrkEARMKAAYFAmbgDZIAIQkQfk0ManCIZucWIQTzOyW2kQdOhGNlcPN+TQxq +cIhm5wbfAgkAXmKJcM8uAKb3TepYW/oyGhRLR8L6eM9mCoYwKkatITKJ6bRe +Wnz37AMcPx0DahgfCzCXRLo4CspPJylr2JV8DagCCQCfCjHgLFhKGpBP71Y1 +mgcQ1/CJefb6B2H45G25MwUFTlSTGLDqW4QMi2kQvXnnUMjXquv2+iVd6qyz +0Rqvpou/QQ== +=QNmr +-----END PGP SIGNATURE----- diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/module.js b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/module.js new file mode 100644 index 00000000000..ce013625030 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/module.js @@ -0,0 +1 @@ +hello diff --git a/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/plugin.json b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/plugin.json new file mode 100644 index 00000000000..328c11e6e22 --- /dev/null +++ b/pkg/services/pluginsintegration/pluginassets/testdata/module-hash-valid/plugin.json @@ -0,0 +1,15 @@ +{ + "type": "datasource", + "name": "Test", + "id": "test-datasource", + "backend": true, + "executable": "test", + "state": "alpha", + "info": { + "version": "1.0.0", + "description": "Test", + "author": { + "name": "Giuseppe Guerra" + } + } +} diff --git a/pkg/services/pluginsintegration/pluginconfig/config.go b/pkg/services/pluginsintegration/pluginconfig/config.go index 83ec526c33f..56b20aa6e1a 100644 --- a/pkg/services/pluginsintegration/pluginconfig/config.go +++ b/pkg/services/pluginsintegration/pluginconfig/config.go @@ -32,6 +32,7 @@ func ProvidePluginManagementConfig(cfg *setting.Cfg, settingProvider setting.Pro config.Features{ ExternalCorePluginsEnabled: features.IsEnabledGlobally(featuremgmt.FlagExternalCorePlugins), SkipHostEnvVarsEnabled: features.IsEnabledGlobally(featuremgmt.FlagPluginsSkipHostEnvVars), + SriChecksEnabled: features.IsEnabledGlobally(featuremgmt.FlagPluginsSriChecks), }, cfg.AngularSupportEnabled, cfg.GrafanaComAPIURL, diff --git a/pkg/services/pluginsintegration/plugins_integration_test.go b/pkg/services/pluginsintegration/plugins_integration_test.go index 66d6dda25d5..428fa1b8364 100644 --- a/pkg/services/pluginsintegration/plugins_integration_test.go +++ b/pkg/services/pluginsintegration/plugins_integration_test.go @@ -91,7 +91,7 @@ func TestIntegrationPluginManager(t *testing.T) { ms := mssql.ProvideService(cfg) db := db.InitTestDB(t, sqlstore.InitTestDBOpt{Cfg: cfg}) sv2 := searchV2.ProvideService(cfg, db, nil, nil, tracer, features, nil, nil, nil) - graf := grafanads.ProvideService(sv2, nil) + graf := grafanads.ProvideService(sv2, nil, nil, features) pyroscope := pyroscope.ProvideService(hcp) parca := parca.ProvideService(hcp) coreRegistry := coreplugin.ProvideCoreRegistry(tracing.InitializeTracerForTest(), am, cw, cm, es, grap, idb, lk, otsdb, pr, tmpo, td, pg, my, ms, graf, pyroscope, parca) diff --git a/pkg/services/pluginsintegration/pluginstore/plugins.go b/pkg/services/pluginsintegration/pluginstore/plugins.go index f041c77c5e5..30321e69286 100644 --- a/pkg/services/pluginsintegration/pluginstore/plugins.go +++ b/pkg/services/pluginsintegration/pluginstore/plugins.go @@ -10,7 +10,7 @@ import ( type Plugin struct { plugins.JSONData - fs plugins.FS + FS plugins.FS supportsStreaming bool Class plugins.Class @@ -42,7 +42,7 @@ func (p Plugin) SupportsStreaming() bool { } func (p Plugin) Base() string { - return p.fs.Base() + return p.FS.Base() } func (p Plugin) IsApp() bool { @@ -61,7 +61,7 @@ func ToGrafanaDTO(p *plugins.Plugin) Plugin { } dto := Plugin{ - fs: p.FS, + FS: p.FS, supportsStreaming: supportsStreaming, Class: p.Class, JSONData: p.JSONData, diff --git a/pkg/services/provisioning/alerting/notification_policy_provisioner.go b/pkg/services/provisioning/alerting/notification_policy_provisioner.go index 33e536de04d..55f3dd2ae9d 100644 --- a/pkg/services/provisioning/alerting/notification_policy_provisioner.go +++ b/pkg/services/provisioning/alerting/notification_policy_provisioner.go @@ -32,7 +32,7 @@ func (c *defaultNotificationPolicyProvisioner) Provision(ctx context.Context, for _, file := range files { for _, np := range file.Policies { err := c.notificationPolicyService.UpdatePolicyTree(ctx, np.OrgID, - np.Policy, models.ProvenanceFile) + np.Policy, models.ProvenanceFile, "") if err != nil { return fmt.Errorf("%s: %w", file.Filename, err) } diff --git a/pkg/services/quota/quotaimpl/quota.go b/pkg/services/quota/quotaimpl/quota.go index 34ba998317f..42d3e7b971a 100644 --- a/pkg/services/quota/quotaimpl/quota.go +++ b/pkg/services/quota/quotaimpl/quota.go @@ -18,8 +18,7 @@ import ( // initialized tracer from the opentelemetry package. var tracer = otel.Tracer("github.com/grafana/grafana/pkg/services/quota/quotaimpl/service") -type serviceDisabled struct { -} +type serviceDisabled struct{} func (s *serviceDisabled) QuotaReached(c *contextmodel.ReqContext, targetSrv quota.TargetSrv) (bool, error) { return false, nil diff --git a/pkg/services/quota/quotaimpl/quota_test.go b/pkg/services/quota/quotaimpl/quota_test.go index 66763397395..4422ed67ebe 100644 --- a/pkg/services/quota/quotaimpl/quota_test.go +++ b/pkg/services/quota/quotaimpl/quota_test.go @@ -483,7 +483,7 @@ func setupEnv(t *testing.T, sqlStore db.DB, cfg *setting.Cfg, b bus.Bus, quotaSe tracer := tracing.InitializeTracerForTest() _, err := apikeyimpl.ProvideService(sqlStore, cfg, quotaService) require.NoError(t, err) - _, err = authimpl.ProvideUserAuthTokenService(sqlStore, nil, quotaService, cfg) + _, err = authimpl.ProvideUserAuthTokenService(sqlStore, nil, quotaService, fakes.NewFakeSecretsService(), cfg, tracing.InitializeTracerForTest()) require.NoError(t, err) _, err = dashboardStore.ProvideDashboardStore(sqlStore, cfg, featuremgmt.WithFeatures(), tagimpl.ProvideService(sqlStore), quotaService) require.NoError(t, err) diff --git a/pkg/services/secrets/migrator/migrator.go b/pkg/services/secrets/migrator/migrator.go index 02872935b9f..3581c2afd99 100644 --- a/pkg/services/secrets/migrator/migrator.go +++ b/pkg/services/secrets/migrator/migrator.go @@ -46,6 +46,11 @@ func ProvideSecretsMigrator( b64Secret{simpleSecret: simpleSecret{tableName: "signing_key", columnName: "private_key"}, encoding: base64.StdEncoding}, alertingSecret{}, ssoSettingsSecret{}, + b64Secret{simpleSecret: simpleSecret{tableName: "user_external_session", columnName: "access_token"}, encoding: base64.StdEncoding}, + b64Secret{simpleSecret: simpleSecret{tableName: "user_external_session", columnName: "id_token"}, encoding: base64.StdEncoding}, + b64Secret{simpleSecret: simpleSecret{tableName: "user_external_session", columnName: "refresh_token"}, encoding: base64.StdEncoding}, + b64Secret{simpleSecret: simpleSecret{tableName: "user_external_session", columnName: "session_id"}, encoding: base64.StdEncoding}, + b64Secret{simpleSecret: simpleSecret{tableName: "user_external_session", columnName: "name_id"}, encoding: base64.StdEncoding}, } return &SecretsMigrator{ diff --git a/pkg/services/secrets/migrator/reencrypt.go b/pkg/services/secrets/migrator/reencrypt.go index 6c1271277a4..25aec74bbb8 100644 --- a/pkg/services/secrets/migrator/reencrypt.go +++ b/pkg/services/secrets/migrator/reencrypt.go @@ -7,6 +7,7 @@ import ( "fmt" "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/services/encryption" "github.com/grafana/grafana/pkg/services/ngalert/notifier" "github.com/grafana/grafana/pkg/services/secrets" "github.com/grafana/grafana/pkg/services/secrets/manager" @@ -308,26 +309,10 @@ func (s ssoSettingsSecret) ReEncrypt(ctx context.Context, secretsSrv *manager.Se for _, result := range results { err := sqlStore.InTransaction(ctx, func(ctx context.Context) error { - for field, value := range result.Settings { - if ssosettingsimpl.IsSecretField(field) { - decrypted, err := s.decryptValue(ctx, value, secretsSrv) - if err != nil { - logger.Warn("Could not decrypt SSO settings secret", "id", result.ID, "field", field, "error", err) - return err - } - - if decrypted == nil { - continue - } - - reencrypted, err := secretsSrv.Encrypt(ctx, decrypted, secrets.WithoutScope()) - if err != nil { - logger.Warn("Could not re-encrypt SSO settings secret", "id", result.ID, "field", field, "error", err) - return err - } - - result.Settings[field] = base64.RawStdEncoding.EncodeToString(reencrypted) - } + result.Settings, err = s.reEncryptSecretsInMap(ctx, result.Settings, secretsSrv, nil, "") + if err != nil { + logger.Warn("failed re-encrypting SSO settings secret", "id", result.ID, "error", err) + return err } err = sqlStore.WithDbSession(ctx, func(sess *db.Session) error { @@ -378,3 +363,77 @@ func (s ssoSettingsSecret) decryptValue(ctx context.Context, value any, secretsS return decrypted, nil } + +func (s ssoSettingsSecret) reEncryptSecretsInMap(ctx context.Context, m map[string]any, secretsSrv *manager.SecretsService, encryptionSrv encryption.Internal, secretKey string) (map[string]any, error) { + var err error + + result := make(map[string]any) + for k, v := range m { + switch v := v.(type) { + case string: + result[k] = v + if ssosettingsimpl.IsSecretField(k) { + decrypted, err := s.decryptValue(ctx, v, secretsSrv) + if err != nil { + logger.Warn("Could not decrypt SSO settings secret", "field", k, "error", err) + return nil, err + } + + if decrypted == nil { + continue + } + + var reencrypted []byte + if encryptionSrv == nil { + reencrypted, err = secretsSrv.Encrypt(ctx, decrypted, secrets.WithoutScope()) + } else { + reencrypted, err = encryptionSrv.Encrypt(ctx, decrypted, secretKey) + } + if err != nil { + logger.Warn("Could not re-encrypt SSO settings secret", "id", "field", k, "error", err) + return nil, err + } + + result[k] = base64.RawStdEncoding.EncodeToString(reencrypted) + } + case []any: + result[k], err = s.reEncryptSecretsInSlice(ctx, v, secretsSrv, encryptionSrv, secretKey) + if err != nil { + return nil, err + } + case map[string]any: + result[k], err = s.reEncryptSecretsInMap(ctx, v, secretsSrv, encryptionSrv, secretKey) + if err != nil { + return nil, err + } + default: + result[k] = v + } + } + + return result, nil +} + +func (s ssoSettingsSecret) reEncryptSecretsInSlice(ctx context.Context, a []any, secretsSrv *manager.SecretsService, encryptionSrv encryption.Internal, secretKey string) ([]any, error) { + result := make([]any, 0) + for _, v := range a { + switch v := v.(type) { + case []any: + inner, err := s.reEncryptSecretsInSlice(ctx, v, secretsSrv, encryptionSrv, secretKey) + if err != nil { + return nil, err + } + result = append(result, inner) + case map[string]any: + inner, err := s.reEncryptSecretsInMap(ctx, v, secretsSrv, encryptionSrv, secretKey) + if err != nil { + return nil, err + } + result = append(result, inner) + default: + result = append(result, v) + } + } + + return result, nil +} diff --git a/pkg/services/secrets/migrator/rollback.go b/pkg/services/secrets/migrator/rollback.go index 8a8838c0f96..97baabbca3e 100644 --- a/pkg/services/secrets/migrator/rollback.go +++ b/pkg/services/secrets/migrator/rollback.go @@ -11,7 +11,6 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/notifier" "github.com/grafana/grafana/pkg/services/secrets/manager" "github.com/grafana/grafana/pkg/services/ssosettings/models" - "github.com/grafana/grafana/pkg/services/ssosettings/ssosettingsimpl" ) func (s simpleSecret) Rollback( @@ -317,26 +316,10 @@ func (s ssoSettingsSecret) Rollback( for _, result := range results { err := sqlStore.WithTransactionalDbSession(ctx, func(sess *db.Session) error { - for field, value := range result.Settings { - if ssosettingsimpl.IsSecretField(field) { - decrypted, err := s.decryptValue(ctx, value, secretsSrv) - if err != nil { - logger.Warn("Could not decrypt SSO settings secret", "id", result.ID, "field", field, "error", err) - return err - } - - if decrypted == nil { - continue - } - - reencrypted, err := encryptionSrv.Encrypt(ctx, decrypted, secretKey) - if err != nil { - logger.Warn("Could not re-encrypt SSO settings secret", "id", result.ID, "field", field, "error", err) - return err - } - - result.Settings[field] = base64.RawStdEncoding.EncodeToString(reencrypted) - } + result.Settings, err = s.reEncryptSecretsInMap(ctx, result.Settings, secretsSrv, encryptionSrv, secretKey) + if err != nil { + logger.Warn("failed rolling back SSO settings secret", "id", result.ID, "error", err) + return err } err = sqlStore.WithDbSession(ctx, func(sess *db.Session) error { diff --git a/pkg/services/sqlstore/migrations/accesscontrol/action_set_migration.go b/pkg/services/sqlstore/migrations/accesscontrol/action_set_migration.go new file mode 100644 index 00000000000..f04810ab912 --- /dev/null +++ b/pkg/services/sqlstore/migrations/accesscontrol/action_set_migration.go @@ -0,0 +1,141 @@ +package accesscontrol + +import ( + "fmt" + "strings" + "time" + + "xorm.io/xorm" + + "github.com/grafana/grafana/pkg/services/accesscontrol" + "github.com/grafana/grafana/pkg/services/sqlstore/migrator" +) + +const AddActionSetMigrationID = "adding action set permissions" + +func AddActionSetPermissionsMigrator(mg *migrator.Migrator) { + mg.AddMigration(AddActionSetMigrationID, &actionSetMigrator{}) +} + +type actionSetMigrator struct { + sess *xorm.Session + migrator *migrator.Migrator + migrator.MigrationBase +} + +var _ migrator.CodeMigration = new(actionSetMigrator) + +func (m *actionSetMigrator) SQL(migrator.Dialect) string { + return "code migration" +} + +func (m *actionSetMigrator) Exec(sess *xorm.Session, migrator *migrator.Migrator) error { + m.sess = sess + m.migrator = migrator + return m.addActionSetActions() +} + +func (m *actionSetMigrator) addActionSetActions() error { + var results []accesscontrol.Permission + + // Find action sets and dashboard permissions for managed roles + // We don't need all dashboard permissions, just enough to help us determine what action set permissions to add + sql := ` + SELECT permission.role_id, permission.action, permission.scope FROM permission + LEFT JOIN role ON permission.role_id = role.id + WHERE permission.action IN ('dashboards:read', 'dashboards:write', 'dashboards.permissions:read', 'dashboards:view', 'dashboards:edit', 'dashboards:admin', 'folders:view', 'folders:edit', 'folders:admin') + AND role.name LIKE 'managed:%' +` + if err := m.sess.SQL(sql).Find(&results); err != nil { + return fmt.Errorf("failed to query permissions: %w", err) + } + + // group permissions by map[roleID]map[scope]actionSet + groupedPermissions := make(map[int64]map[string]string) + hasActionSet := make(map[int64]map[string]bool) + for _, result := range results { + // keep track of which dash/folder permission grants already have an action set permission + if isActionSetAction(result.Action) { + if _, ok := hasActionSet[result.RoleID]; !ok { + hasActionSet[result.RoleID] = make(map[string]bool) + } + hasActionSet[result.RoleID][result.Scope] = true + delete(groupedPermissions[result.RoleID], result.Scope) + continue + } + + // don't add action set permissions where they already exist + if _, has := hasActionSet[result.RoleID]; has && hasActionSet[result.RoleID][result.Scope] { + continue + } + + if _, ok := groupedPermissions[result.RoleID]; !ok { + groupedPermissions[result.RoleID] = make(map[string]string) + } + + // store the most permissive action set permission + currentActionSet := groupedPermissions[result.RoleID][result.Scope] + switch result.Action { + case "dashboards:read": + if currentActionSet == "" { + groupedPermissions[result.RoleID][result.Scope] = "view" + } + case "dashboards:write": + if currentActionSet != "admin" { + groupedPermissions[result.RoleID][result.Scope] = "edit" + } + case "dashboards.permissions:read": + groupedPermissions[result.RoleID][result.Scope] = "admin" + } + } + + toAdd := make([]accesscontrol.Permission, 0, len(groupedPermissions)) + + now := time.Now() + for roleID, permissions := range groupedPermissions { + for scope, action := range permissions { + // should never be the case, but keeping this check for extra safety + if _, ok := hasActionSet[roleID][scope]; ok { + continue + } + + if strings.HasPrefix(scope, "folders:") { + action = fmt.Sprintf("folders:%s", action) + } else { + action = fmt.Sprintf("dashboards:%s", action) + } + + kind, attr, identifier := accesscontrol.SplitScope(scope) + toAdd = append(toAdd, accesscontrol.Permission{ + RoleID: roleID, + Scope: scope, + Action: action, + Kind: kind, + Attribute: attr, + Identifier: identifier, + Created: now, + Updated: now, + }) + } + } + + if len(toAdd) > 0 { + err := batch(len(toAdd), batchSize, func(start, end int) error { + m.migrator.Logger.Debug(fmt.Sprintf("inserting permissions %v", toAdd[start:end])) + if _, err := m.sess.InsertMulti(toAdd[start:end]); err != nil { + return fmt.Errorf("failed to add action sets: %w", err) + } + return nil + }) + if err != nil { + return err + } + m.migrator.Logger.Debug("updated managed roles with dash and folder action set permissions") + } + + return nil +} + +func isActionSetAction(action string) bool { + return action == "dashboards:view" || action == "dashboards:edit" || action == "dashboards:admin" || action == "folders:view" || action == "folders:edit" || action == "folders:admin" +} diff --git a/pkg/services/sqlstore/migrations/accesscontrol/migrations.go b/pkg/services/sqlstore/migrations/accesscontrol/migrations.go index da3c021dee0..31e9530c959 100644 --- a/pkg/services/sqlstore/migrations/accesscontrol/migrations.go +++ b/pkg/services/sqlstore/migrations/accesscontrol/migrations.go @@ -196,4 +196,18 @@ func AddMigration(mg *migrator.Migrator) { Type: migrator.UniqueIndex, Cols: []string{"role_id", "action", "scope"}, })) + + mg.AddMigration("add group mapping UID column to user_role table", migrator.NewAddColumnMigration(userRoleV1, &migrator.Column{ + Name: "group_mapping_uid", Type: migrator.DB_NVarchar, Length: 40, Default: "''", Nullable: true, + })) + + mg.AddMigration("add user_role org ID, user ID, role ID, group mapping UID index", migrator.NewAddIndexMigration(userRoleV1, &migrator.Index{ + Type: migrator.UniqueIndex, + Cols: []string{"org_id", "user_id", "role_id", "group_mapping_uid"}, + })) + + mg.AddMigration("remove user_role org ID, user ID, role ID index", migrator.NewDropIndexMigration(userRoleV1, &migrator.Index{ + Type: migrator.UniqueIndex, + Cols: []string{"org_id", "user_id", "role_id"}, + })) } diff --git a/pkg/services/sqlstore/migrations/accesscontrol/orphaned.go b/pkg/services/sqlstore/migrations/accesscontrol/orphaned.go index 05ade14da83..e96f966c862 100644 --- a/pkg/services/sqlstore/migrations/accesscontrol/orphaned.go +++ b/pkg/services/sqlstore/migrations/accesscontrol/orphaned.go @@ -49,7 +49,13 @@ func (m *orphanedServiceAccountPermissions) Exec(sess *xorm.Session, mg *migrato return nil } - // Then find all existing service accounts + return batch(len(ids), batchSize, func(start, end int) error { + return m.exec(sess, mg, ids[start:end]) + }) +} + +func (m *orphanedServiceAccountPermissions) exec(sess *xorm.Session, mg *migrator.Migrator, ids []int64) error { + // get all service accounts from batch raw := "SELECT u.id FROM " + mg.Dialect.Quote("user") + " AS u WHERE u.is_service_account AND u.id IN(?" + strings.Repeat(",?", len(ids)-1) + ")" args := make([]any, 0, len(ids)) for _, id := range ids { @@ -57,7 +63,7 @@ func (m *orphanedServiceAccountPermissions) Exec(sess *xorm.Session, mg *migrato } var existingIDs []int64 - err = sess.SQL(raw, args...).Find(&existingIDs) + err := sess.SQL(raw, args...).Find(&existingIDs) if err != nil { return fmt.Errorf("failed to fetch existing service accounts: %w", err) } diff --git a/pkg/services/sqlstore/migrations/accesscontrol/test/action_set_migration_test.go b/pkg/services/sqlstore/migrations/accesscontrol/test/action_set_migration_test.go new file mode 100644 index 00000000000..949b257aaf2 --- /dev/null +++ b/pkg/services/sqlstore/migrations/accesscontrol/test/action_set_migration_test.go @@ -0,0 +1,282 @@ +package test + +import ( + "slices" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/services/accesscontrol" + "github.com/grafana/grafana/pkg/services/accesscontrol/ossaccesscontrol" + acmig "github.com/grafana/grafana/pkg/services/sqlstore/migrations/accesscontrol" + "github.com/grafana/grafana/pkg/services/sqlstore/migrator" + "github.com/grafana/grafana/pkg/setting" +) + +func TestActionSetMigration(t *testing.T) { + // Run initial migration to have a working DB + x := setupTestDB(t) + + type migrationTestCase struct { + desc string + existingRolePerms map[string]map[string][]string + expectedActionSets map[string]map[string]string + } + testCases := []migrationTestCase{ + { + desc: "empty perms", + existingRolePerms: map[string]map[string][]string{}, + }, + { + desc: "dashboard permissions that are not managed don't get an action set", + existingRolePerms: map[string]map[string][]string{ + "my_custom_role": { + "dashboards:uid:1": ossaccesscontrol.DashboardViewActions, + }, + }, + }, + { + desc: "managed permissions that are not dashboard permissions don't get an action set", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "datasources:uid:1": {"datasources:query", "datasources:read"}, + }, + }, + }, + { + desc: "managed dash viewer gets a viewer action set", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": ossaccesscontrol.DashboardViewActions, + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": "dashboards:view", + }, + }, + }, + { + desc: "managed dash editor gets an editor action set", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": ossaccesscontrol.DashboardEditActions, + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": "dashboards:edit", + }, + }, + }, + { + desc: "managed dash admin gets an admin action set", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": ossaccesscontrol.DashboardAdminActions, + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": "dashboards:admin", + }, + }, + }, + { + desc: "managed folder viewer gets a viewer action set", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": append(ossaccesscontrol.FolderViewActions, ossaccesscontrol.DashboardViewActions...), + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": "folders:view", + }, + }, + }, + { + desc: "managed folder editor gets an editor action set", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": append(ossaccesscontrol.FolderEditActions, ossaccesscontrol.DashboardEditActions...), + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": "folders:edit", + }, + }, + }, + { + desc: "managed folder admin gets an admin action set", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": append(ossaccesscontrol.FolderAdminActions, ossaccesscontrol.DashboardAdminActions...), + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": "folders:admin", + }, + }, + }, + { + desc: "can add action sets for multiple folders and dashboards under the same managed permission", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": append(ossaccesscontrol.FolderAdminActions, ossaccesscontrol.DashboardAdminActions...), + "dashboards:uid:1": ossaccesscontrol.DashboardEditActions, + "datasources:uid:1": {"datasources:query", "datasources:read"}, + "folders:uid:2": append(ossaccesscontrol.FolderViewActions, ossaccesscontrol.DashboardViewActions...), + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": "folders:admin", + "folders:uid:2": "folders:view", + "dashboards:uid:1": "dashboards:edit", + }, + }, + }, + { + desc: "can add action sets for multiple managed roles", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": append(ossaccesscontrol.FolderAdminActions, ossaccesscontrol.DashboardAdminActions...), + "folders:uid:2": append(ossaccesscontrol.FolderViewActions, ossaccesscontrol.DashboardViewActions...), + }, + "managed:users:1:permissions": { + "folders:uid:1": append(ossaccesscontrol.FolderEditActions, ossaccesscontrol.DashboardEditActions...), + "dashboards:uid:1": ossaccesscontrol.DashboardEditActions, + }, + "managed:teams:1:permissions": { + "folders:uid:1": append(ossaccesscontrol.FolderEditActions, ossaccesscontrol.DashboardEditActions...), + "folders:uid:2": append(ossaccesscontrol.FolderAdminActions, ossaccesscontrol.DashboardAdminActions...), + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "folders:uid:1": "folders:admin", + "folders:uid:2": "folders:view", + }, + "managed:users:1:permissions": { + "folders:uid:1": "folders:edit", + "dashboards:uid:1": "dashboards:edit", + }, + "managed:teams:1:permissions": { + "folders:uid:1": "folders:edit", + "folders:uid:2": "folders:admin", + }, + }, + }, + { + desc: "can handle existing action sets", + existingRolePerms: map[string]map[string][]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": append(ossaccesscontrol.DashboardAdminActions, "dashboards:admin"), + "dashboards:uid:2": ossaccesscontrol.DashboardViewActions, + "dashboards:uid:4": append(ossaccesscontrol.DashboardViewActions, "dashboards:view"), + }, + "managed:users:1:permissions": { + "dashboards:uid:1": append(ossaccesscontrol.DashboardEditActions, "dashboards:edit"), + "dashboards:uid:2": append(ossaccesscontrol.DashboardViewActions, "dashboards:view"), + "dashboards:uid:3": ossaccesscontrol.DashboardEditActions, + "dashboards:uid:4": ossaccesscontrol.DashboardAdminActions, + }, + }, + expectedActionSets: map[string]map[string]string{ + "managed:builtins:viewer:permissions": { + "dashboards:uid:1": "dashboards:admin", + "dashboards:uid:2": "dashboards:view", + "dashboards:uid:4": "dashboards:view", + }, + "managed:users:1:permissions": { + "dashboards:uid:1": "dashboards:edit", + "dashboards:uid:2": "dashboards:view", + "dashboards:uid:3": "dashboards:edit", + "dashboards:uid:4": "dashboards:admin", + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + // Remove migration, roles and permissions + _, errDeleteMig := x.Exec(`DELETE FROM migration_log WHERE migration_id = ?`, acmig.AddActionSetMigrationID) + require.NoError(t, errDeleteMig) + _, errDeleteRole := x.Exec(`DELETE FROM role`) + require.NoError(t, errDeleteRole) + _, errDeletePerms := x.Exec(`DELETE FROM permission`) + require.NoError(t, errDeletePerms) + + orgID := 1 + rolePerms := map[string][]rawPermission{} + for roleName, permissions := range tc.existingRolePerms { + rawPerms := []rawPermission{} + for scope, actions := range permissions { + for _, action := range actions { + rawPerms = append(rawPerms, rawPermission{Scope: scope, Action: action}) + } + } + rolePerms[roleName] = rawPerms + } + perms := map[int64]map[string][]rawPermission{int64(orgID): rolePerms} + + // seed DB with permissions + putTestPermissions(t, x, perms) + + // Run action set migration + acmigrator := migrator.NewMigrator(x, &setting.Cfg{Logger: log.New("acmigration.test")}) + acmig.AddActionSetPermissionsMigrator(acmigrator) + + errRunningMig := acmigrator.Start(false, 0) + require.NoError(t, errRunningMig) + + // verify got == want + for roleName, existingPerms := range tc.existingRolePerms { + // Check the role exists + role := accesscontrol.Role{} + hasRole, err := x.Table("role").Where("org_id = ? AND name = ?", orgID, roleName).Get(&role) + require.NoError(t, err) + require.True(t, hasRole, "expected role to exist", "role", roleName) + + // Check permissions associated with each role + perms := []accesscontrol.Permission{} + _, err = x.Table("permission").Where("role_id = ?", role.ID).FindAndCount(&perms) + require.NoError(t, err) + + gotRawPerms := convertToScopeActionMap(perms) + expectedPerms := getExpectedPerms(existingPerms, tc.expectedActionSets[roleName]) + require.Equal(t, len(gotRawPerms), len(expectedPerms), "expected role to contain the same amount of scopes", "role", roleName) + for scope, actions := range expectedPerms { + require.ElementsMatch(t, gotRawPerms[scope], actions, "expected role to have the same permissions", "role", roleName) + } + } + }) + } +} + +func convertToScopeActionMap(perms []accesscontrol.Permission) map[string][]string { + result := map[string][]string{} + for _, perm := range perms { + if _, ok := result[perm.Scope]; !ok { + result[perm.Scope] = []string{} + } + result[perm.Scope] = append(result[perm.Scope], perm.Action) + } + return result +} + +func getExpectedPerms(existingPerms map[string][]string, actionSets map[string]string) map[string][]string { + for scope := range existingPerms { + if actionSet, ok := actionSets[scope]; ok { + if !slices.Contains(existingPerms[scope], actionSet) { + existingPerms[scope] = append(existingPerms[scope], actionSets[scope]) + } + } + } + return existingPerms +} diff --git a/pkg/services/sqlstore/migrations/cloud_migrations.go b/pkg/services/sqlstore/migrations/cloud_migrations.go index 8b9e3a72d60..7dd4b167595 100644 --- a/pkg/services/sqlstore/migrations/cloud_migrations.go +++ b/pkg/services/sqlstore/migrations/cloud_migrations.go @@ -164,4 +164,10 @@ func addCloudMigrationsMigrations(mg *Migrator) { Type: DB_Text, Nullable: true, })) + + mg.AddMigration("add cloud_migration_resource.parent_name column", NewAddColumnMigration(migrationResourceTable, &Column{ + Name: "parent_name", + Type: DB_Text, + Nullable: true, + })) } diff --git a/pkg/services/sqlstore/migrations/externalsession/migrations.go b/pkg/services/sqlstore/migrations/externalsession/migrations.go new file mode 100644 index 00000000000..e3350334a0a --- /dev/null +++ b/pkg/services/sqlstore/migrations/externalsession/migrations.go @@ -0,0 +1,31 @@ +package externalsession + +import "github.com/grafana/grafana/pkg/services/sqlstore/migrator" + +func AddMigration(mg *migrator.Migrator) { + externalSessionV1 := migrator.Table{ + Name: "user_external_session", + Columns: []*migrator.Column{ + {Name: "id", Type: migrator.DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "user_auth_id", Type: migrator.DB_BigInt, Nullable: false}, + {Name: "user_id", Type: migrator.DB_BigInt, Nullable: false}, + {Name: "auth_module", Type: migrator.DB_NVarchar, Length: 190, Nullable: false}, + {Name: "access_token", Type: migrator.DB_Text, Nullable: true}, + {Name: "id_token", Type: migrator.DB_Text, Nullable: true}, + {Name: "refresh_token", Type: migrator.DB_Text, Nullable: true}, + {Name: "session_id", Type: migrator.DB_NVarchar, Length: 255, Nullable: true}, + {Name: "session_id_hash", Type: migrator.DB_Char, Length: 44, Nullable: true}, + {Name: "name_id", Type: migrator.DB_NVarchar, Length: 255, Nullable: true}, + {Name: "name_id_hash", Type: migrator.DB_Char, Length: 44, Nullable: true}, + {Name: "expires_at", Type: migrator.DB_DateTime, Nullable: true}, + {Name: "created_at", Type: migrator.DB_DateTime, Nullable: false}, + }, + Indices: []*migrator.Index{ + {Cols: []string{"user_id"}}, + {Cols: []string{"session_id_hash"}}, + {Cols: []string{"name_id_hash"}}, + }, + } + + mg.AddMigration("create user_external_session table", migrator.NewAddTableMigration(externalSessionV1)) +} diff --git a/pkg/services/sqlstore/migrations/migrations.go b/pkg/services/sqlstore/migrations/migrations.go index f8de7ec326e..aba1ff6e43e 100644 --- a/pkg/services/sqlstore/migrations/migrations.go +++ b/pkg/services/sqlstore/migrations/migrations.go @@ -5,6 +5,7 @@ import ( "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/sqlstore/migrations/accesscontrol" "github.com/grafana/grafana/pkg/services/sqlstore/migrations/anonservice" + "github.com/grafana/grafana/pkg/services/sqlstore/migrations/externalsession" "github.com/grafana/grafana/pkg/services/sqlstore/migrations/signingkeys" "github.com/grafana/grafana/pkg/services/sqlstore/migrations/ssosettings" "github.com/grafana/grafana/pkg/services/sqlstore/migrations/ualert" @@ -133,6 +134,10 @@ func (oss *OSSMigrations) AddMigration(mg *Migrator) { ualert.AddRuleMetadata(mg) accesscontrol.AddOrphanedMigrations(mg) + + accesscontrol.AddActionSetPermissionsMigrator(mg) + + externalsession.AddMigration(mg) } func addStarMigrations(mg *Migrator) { diff --git a/pkg/services/sqlstore/migrations/user_auth_token_mig.go b/pkg/services/sqlstore/migrations/user_auth_token_mig.go index 2b24e56897e..2be726947ca 100644 --- a/pkg/services/sqlstore/migrations/user_auth_token_mig.go +++ b/pkg/services/sqlstore/migrations/user_auth_token_mig.go @@ -48,4 +48,8 @@ func addUserAuthTokenMigrations(mg *Migrator) { mg.AddMigration("add index user_auth_token.revoked_at", NewAddIndexMigration(userAuthTokenV1, &Index{ Cols: []string{"revoked_at"}, })) + + mg.AddMigration("add external_session_id to user_auth_token", NewAddColumnMigration(userAuthTokenV1, &Column{ + Name: "external_session_id", Type: DB_BigInt, Nullable: true, + })) } diff --git a/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go b/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go index 1d60a4f7f28..0737091bda4 100644 --- a/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go +++ b/pkg/services/sqlstore/migrations/usermig/service_account_multiple_org_login_migrator.go @@ -9,12 +9,16 @@ import ( const ( AllowSameLoginCrossOrgs = "update login field with orgid to allow for multiple service accounts with same name across orgs" + DedupOrgInLogin = "update service accounts login field orgid to appear only once" ) // Service accounts login were not unique per org. this migration is part of making it unique per org // to be able to create service accounts that are unique per org func AddServiceAccountsAllowSameLoginCrossOrgs(mg *migrator.Migrator) { mg.AddMigration(AllowSameLoginCrossOrgs, &ServiceAccountsSameLoginCrossOrgs{}) + // Before it was fixed, the previous migration introduced the org_id again in logins that already had it. + // This migration removes the duplicate org_id from the login. + mg.AddMigration(DedupOrgInLogin, &ServiceAccountsDeduplicateOrgInLogin{}) } var _ migrator.CodeMigration = new(ServiceAccountsSameLoginCrossOrgs) @@ -35,34 +39,89 @@ func (p *ServiceAccountsSameLoginCrossOrgs) Exec(sess *xorm.Session, mg *migrato var err error switch p.dialect.DriverName() { case migrator.Postgres: - _, err = p.sess.Exec(`UPDATE "user" - SET login = 'sa-' || org_id::text || '-' || - CASE - WHEN login LIKE 'sa-%' THEN SUBSTRING(login FROM 4) - ELSE login - END - WHERE login IS NOT NULL AND is_service_account = true;`, - ) + _, err = p.sess.Exec(` + UPDATE "user" + SET login = 'sa-' || org_id::text || '-' || + CASE + WHEN login LIKE 'sa-%' THEN SUBSTRING(login FROM 4) + ELSE login + END + WHERE login IS NOT NULL + AND is_service_account = true + AND login NOT LIKE 'sa-' || org_id::text || '-%'; + `) case migrator.MySQL: - _, err = p.sess.Exec(`UPDATE user - SET login = CONCAT('sa-', CAST(org_id AS CHAR), '-', - CASE - WHEN login LIKE 'sa-%' THEN SUBSTRING(login, 4) - ELSE login - END) - WHERE login IS NOT NULL AND is_service_account = 1;`, - ) + _, err = p.sess.Exec(` + UPDATE user + SET login = CONCAT('sa-', CAST(org_id AS CHAR), '-', + CASE + WHEN login LIKE 'sa-%' THEN SUBSTRING(login, 4) + ELSE login + END + ) + WHERE login IS NOT NULL + AND is_service_account = 1 + AND login NOT LIKE CONCAT('sa-', org_id, '-%'); + `) case migrator.SQLite: - _, err = p.sess.Exec(`Update ` + p.dialect.Quote("user") + ` - SET login = 'sa-' || CAST(org_id AS TEXT) || '-' || - CASE - WHEN SUBSTR(login, 1, 3) = 'sa-' THEN SUBSTR(login, 4) - ELSE login - END - WHERE login IS NOT NULL AND is_service_account = 1;`, - ) + _, err = p.sess.Exec(` + UPDATE ` + p.dialect.Quote("user") + ` + SET login = 'sa-' || CAST(org_id AS TEXT) || '-' || + CASE + WHEN SUBSTR(login, 1, 3) = 'sa-' THEN SUBSTR(login, 4) + ELSE login + END + WHERE login IS NOT NULL + AND is_service_account = 1 + AND login NOT LIKE 'sa-' || CAST(org_id AS TEXT) || '-%'; + `) default: return fmt.Errorf("dialect not supported: %s", p.dialect) } return err } + +type ServiceAccountsDeduplicateOrgInLogin struct { + migrator.MigrationBase +} + +func (p *ServiceAccountsDeduplicateOrgInLogin) SQL(dialect migrator.Dialect) string { + return "code migration" +} + +func (p *ServiceAccountsDeduplicateOrgInLogin) Exec(sess *xorm.Session, mg *migrator.Migrator) error { + dialect := mg.Dialect + var err error + + // var logins []Login + switch dialect.DriverName() { + case migrator.Postgres: + _, err = sess.Exec(` + UPDATE "user" + SET login = 'sa-' || org_id::text || SUBSTRING(login FROM LENGTH('sa-' || org_id::text || '-' || org_id::text)+1) + WHERE login IS NOT NULL + AND is_service_account = true + AND login LIKE 'sa-' || org_id::text || '-' || org_id::text || '-%'; + `) + case migrator.MySQL: + _, err = sess.Exec(` + UPDATE user + SET login = CONCAT('sa-', org_id, SUBSTRING(login, LENGTH(CONCAT('sa-', org_id, '-', org_id))+1)) + WHERE login IS NOT NULL + AND is_service_account = 1 + AND login LIKE CONCAT('sa-', org_id, '-', org_id, '-%'); + `) + case migrator.SQLite: + _, err = sess.Exec(` + UPDATE ` + dialect.Quote("user") + ` + SET login = 'sa-' || CAST(org_id AS TEXT) || SUBSTRING(login, LENGTH('sa-'||CAST(org_id AS TEXT)||'-'||CAST(org_id AS TEXT))+1) + WHERE login IS NOT NULL + AND is_service_account = 1 + AND login LIKE 'sa-'||CAST(org_id AS TEXT)||'-'||CAST(org_id AS TEXT)||'-%'; + `) + default: + return fmt.Errorf("dialect not supported: %s", dialect) + } + + return err +} diff --git a/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go b/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go index 043c9852245..7ef8b475855 100644 --- a/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go +++ b/pkg/services/sqlstore/migrations/usermig/test/service_account_test.go @@ -15,6 +15,9 @@ import ( ) func TestIntegrationServiceAccountMigration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } // Run initial migration to have a working DB x := setupTestDB(t) @@ -211,6 +214,43 @@ func TestIntegrationServiceAccountMigration(t *testing.T) { }, }, }, + { + desc: "avoid reapply of migration", + serviceAccounts: []*user.User{ + { + ID: 11, + UID: "u11", + Name: "sa-1-extsvc-bug", + Login: "sa-1-extsvc-bug", + Email: "sa-1-extsvc-bug@org.com", + OrgID: 1, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + { + ID: 12, + UID: "u12", + Name: "sa-2-extsvc-bug2", + Login: "sa-2-extsvc-bug2", + Email: "sa-2-extsvc-bug2@org.com", + OrgID: 2, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + }, + wantServiceAccounts: []*user.User{ + { + ID: 11, + Login: "sa-1-extsvc-bug", + }, + { + ID: 12, + Login: "sa-2-extsvc-bug2", + }, + }, + }, } for _, tc := range testCases { @@ -245,3 +285,125 @@ func TestIntegrationServiceAccountMigration(t *testing.T) { }) } } + +func TestIntegrationServiceAccountDedupOrgMigration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test in short mode") + } + // Run initial migration to have a working DB + x := setupTestDB(t) + + type migrationTestCase struct { + desc string + serviceAccounts []*user.User + wantServiceAccounts []*user.User + } + testCases := []migrationTestCase{ + { + desc: "no change", + serviceAccounts: []*user.User{ + { + ID: 1, + UID: "u1", + Name: "sa-1-nochange", + Login: "sa-1-nochange", + Email: "sa-1-nochange@example.org", + OrgID: 1, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + { + ID: 2, + UID: "u2", + Name: "sa-2-nochange", + Login: "sa-2-nochange", + Email: "sa-2-nochange@example.org", + OrgID: 2, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + }, + wantServiceAccounts: []*user.User{ + { + ID: 1, + Login: "sa-1-nochange", + }, + { + ID: 2, + Login: "sa-2-nochange", + }, + }, + }, + { + desc: "dedup org in login", + serviceAccounts: []*user.User{ + { + ID: 3, + UID: "u3", + Name: "sa-1-dedup", + Login: "sa-1-1-dedup", + Email: "sa-1-dedup@example.org", + OrgID: 1, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + { + ID: 4, + UID: "u4", + Name: "sa-6480-dedup", + Login: "sa-6480-6480-dedup", + Email: "sa-6480-dedup@example.org", + OrgID: 6480, + Created: now, + Updated: now, + IsServiceAccount: true, + }, + }, + wantServiceAccounts: []*user.User{ + { + ID: 3, + Login: "sa-1-dedup", + }, + { + ID: 4, + Login: "sa-6480-dedup", + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + // Remove migration and permissions + _, errDeleteMig := x.Exec(`DELETE FROM migration_log WHERE migration_id = ?`, usermig.DedupOrgInLogin) + require.NoError(t, errDeleteMig) + + // insert service accounts + serviceAccoutsCount, err := x.Insert(tc.serviceAccounts) + require.NoError(t, err) + require.Equal(t, int64(len(tc.serviceAccounts)), serviceAccoutsCount) + + // run the migration + usermigrator := migrator.NewMigrator(x, &setting.Cfg{Logger: log.New("usermigration.test")}) + usermigrator.AddMigration(usermig.DedupOrgInLogin, &usermig.ServiceAccountsDeduplicateOrgInLogin{}) + errRunningMig := usermigrator.Start(false, 0) + require.NoError(t, errRunningMig) + + // Check service accounts + resultingServiceAccounts := []user.User{} + err = x.Table("user").Find(&resultingServiceAccounts) + require.NoError(t, err) + + for i := range tc.wantServiceAccounts { + for _, sa := range resultingServiceAccounts { + if sa.ID == tc.wantServiceAccounts[i].ID { + assert.Equal(t, tc.wantServiceAccounts[i].Login, sa.Login) + } + } + } + }) + } +} diff --git a/pkg/services/team/model.go b/pkg/services/team/model.go index debb2cd32d9..1fcc79f941f 100644 --- a/pkg/services/team/model.go +++ b/pkg/services/team/model.go @@ -54,8 +54,10 @@ type DeleteTeamCommand struct { } type GetTeamByIDQuery struct { - OrgID int64 + OrgID int64 + // Get team by ID or UID. If ID is set, UID is ignored. ID int64 + UID string SignedInUser identity.Requester HiddenUsers map[string]struct{} } diff --git a/pkg/services/team/team.go b/pkg/services/team/team.go index 3a44a8883fa..3df15bcddc1 100644 --- a/pkg/services/team/team.go +++ b/pkg/services/team/team.go @@ -2,6 +2,10 @@ package team import ( "context" + "strconv" + + contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" + "github.com/grafana/grafana/pkg/web" ) type Service interface { @@ -18,3 +22,24 @@ type Service interface { GetTeamMembers(ctx context.Context, query *GetTeamMembersQuery) ([]*TeamMemberDTO, error) RegisterDelete(query string) } + +func MiddlewareTeamUIDResolver(teamService Service, paramName string) web.Handler { + return func(c *contextmodel.ReqContext) { + // Get team id from request, fetch team and replace teamId with team id + teamID := web.Params(c.Req)[paramName] + // if teamID is empty or is an integer, we assume it's a team id and we don't need to resolve it + _, err := strconv.ParseInt(teamID, 10, 64) + if teamID == "" || err == nil { + return + } + + team, err := teamService.GetTeamByID(c.Req.Context(), &GetTeamByIDQuery{UID: teamID, OrgID: c.OrgID}) + if err == nil { + gotParams := web.Params(c.Req) + gotParams[paramName] = strconv.FormatInt(team.ID, 10) + web.SetURLParams(c.Req, gotParams) + } else { + c.JsonApiErr(404, "Not found", nil) + } + } +} diff --git a/pkg/services/team/teamapi/api.go b/pkg/services/team/teamapi/api.go index 955c6f119d2..f18072436ed 100644 --- a/pkg/services/team/teamapi/api.go +++ b/pkg/services/team/teamapi/api.go @@ -55,34 +55,35 @@ func ProvideTeamAPI( func (tapi *TeamAPI) registerRoutes(router routing.RouteRegister, ac accesscontrol.AccessControl) { authorize := accesscontrol.Middleware(ac) + teamResolver := team.MiddlewareTeamUIDResolver(tapi.teamService, ":teamId") router.Group("/api", func(apiRoute routing.RouteRegister) { // team (admin permission required) apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) { teamsRoute.Post("/", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsCreate)), routing.Wrap(tapi.createTeam)) - teamsRoute.Put("/:teamId", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsWrite, + teamsRoute.Put("/:teamId", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsWrite, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.updateTeam)) - teamsRoute.Delete("/:teamId", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsDelete, + teamsRoute.Delete("/:teamId", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsDelete, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.deleteTeamByID)) - teamsRoute.Get("/:teamId/members", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsRead, + teamsRoute.Get("/:teamId/members", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsRead, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.getTeamMembers)) - teamsRoute.Post("/:teamId/members", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, + teamsRoute.Post("/:teamId/members", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.addTeamMember)) - teamsRoute.Put("/:teamId/members/:userId", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, + teamsRoute.Put("/:teamId/members/:userId", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.updateTeamMember)) - teamsRoute.Put("/:teamId/members", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, + teamsRoute.Put("/:teamId/members", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.setTeamMemberships)) - teamsRoute.Delete("/:teamId/members/:userId", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, + teamsRoute.Delete("/:teamId/members/:userId", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsPermissionsWrite, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.removeTeamMember)) - teamsRoute.Get("/:teamId/preferences", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsRead, + teamsRoute.Get("/:teamId/preferences", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsRead, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.getTeamPreferences)) - teamsRoute.Put("/:teamId/preferences", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsWrite, + teamsRoute.Put("/:teamId/preferences", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsWrite, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.updateTeamPreferences)) }, requestmeta.SetOwner(requestmeta.TeamAuth)) // team without requirement of user to be org admin apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) { - teamsRoute.Get("/:teamId", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsRead, + teamsRoute.Get("/:teamId", teamResolver, authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsRead, accesscontrol.ScopeTeamsID)), routing.Wrap(tapi.getTeamByID)) teamsRoute.Get("/search", authorize(accesscontrol.EvalPermission(accesscontrol.ActionTeamsRead)), routing.Wrap(tapi.searchTeams)) diff --git a/pkg/services/team/teamapi/team.go b/pkg/services/team/teamapi/team.go index 71d90504c38..a76bd28c3e9 100644 --- a/pkg/services/team/teamapi/team.go +++ b/pkg/services/team/teamapi/team.go @@ -59,6 +59,7 @@ func (tapi *TeamAPI) createTeam(c *contextmodel.ReqContext) response.Response { return response.JSON(http.StatusOK, &util.DynMap{ "teamId": t.ID, + "uid": t.UID, "message": "Team created", }) } @@ -230,7 +231,7 @@ func (tapi *TeamAPI) getTeamByID(c *contextmodel.ReqContext) response.Response { } // Add accesscontrol metadata - queryResult.AccessControl = tapi.getAccessControlMetadata(c, c.SignedInUser.GetOrgID(), "teams:id:", strconv.FormatInt(queryResult.ID, 10)) + queryResult.AccessControl = tapi.getAccessControlMetadata(c, "teams:id:", strconv.FormatInt(queryResult.ID, 10)) queryResult.AvatarURL = dtos.GetGravatarUrlWithDefault(tapi.cfg, queryResult.Email, queryResult.Name) return response.JSON(http.StatusOK, &queryResult) @@ -362,6 +363,7 @@ type CreateTeamResponse struct { // in: body Body struct { TeamId int64 `json:"teamId"` + Uid string `json:"uid"` Message string `json:"message"` } `json:"body"` } @@ -384,7 +386,7 @@ func (tapi *TeamAPI) getMultiAccessControlMetadata(c *contextmodel.ReqContext, // Metadata helpers // getAccessControlMetadata returns the accesscontrol metadata associated with a given resource func (tapi *TeamAPI) getAccessControlMetadata(c *contextmodel.ReqContext, - orgID int64, prefix string, resourceID string) accesscontrol.Metadata { + prefix string, resourceID string) accesscontrol.Metadata { ids := map[string]bool{resourceID: true} return tapi.getMultiAccessControlMetadata(c, prefix, ids)[resourceID] } diff --git a/pkg/services/team/teamapi/team_members_test.go b/pkg/services/team/teamapi/team_members_test.go index fd616925b31..43727d6e5cf 100644 --- a/pkg/services/team/teamapi/team_members_test.go +++ b/pkg/services/team/teamapi/team_members_test.go @@ -28,14 +28,18 @@ import ( "github.com/grafana/grafana/pkg/web/webtest" ) -func SetupAPITestServer(t *testing.T, opts ...func(a *TeamAPI)) *webtest.Server { +func SetupAPITestServer(t *testing.T, teamService team.Service, opts ...func(a *TeamAPI)) *webtest.Server { t.Helper() router := routing.NewRouteRegister() cfg := setting.NewCfg() cfg.LDAPAuthEnabled = true + if teamService == nil { + teamService = teamtest.NewFakeService() + } + a := ProvideTeamAPI(router, - teamtest.NewFakeService(), + teamService, actest.FakeService{}, acimpl.ProvideAccessControl(featuremgmt.WithFeatures(), zanzana.NewNoopClient()), &actest.FakePermissionsService{}, @@ -55,7 +59,7 @@ func SetupAPITestServer(t *testing.T, opts ...func(a *TeamAPI)) *webtest.Server } func TestAddTeamMembersAPIEndpoint(t *testing.T) { - server := SetupAPITestServer(t) + server := SetupAPITestServer(t, &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{ID: 1, UID: "a00001"}}) t.Run("should be able to add team member with correct permission", func(t *testing.T) { req := webtest.RequestWithSignedInUser( @@ -68,6 +72,17 @@ func TestAddTeamMembersAPIEndpoint(t *testing.T) { require.NoError(t, res.Body.Close()) }) + t.Run("should be able to add team member with correct permission by UID", func(t *testing.T) { + req := webtest.RequestWithSignedInUser( + server.NewRequest(http.MethodPost, "/api/teams/a00001/members", strings.NewReader("{\"userId\": 1}")), + authedUserWithPermissions(1, 1, []accesscontrol.Permission{{Action: accesscontrol.ActionTeamsPermissionsWrite, Scope: "teams:id:1"}}), + ) + res, err := server.SendJSON(req) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) + t.Run("should not be able to add team member without correct permission", func(t *testing.T) { req := webtest.RequestWithSignedInUser( server.NewRequest(http.MethodPost, "/api/teams/1/members", strings.NewReader("{\"userId\": 1}")), @@ -81,7 +96,7 @@ func TestAddTeamMembersAPIEndpoint(t *testing.T) { } func TestGetTeamMembersAPIEndpoint(t *testing.T) { - server := SetupAPITestServer(t) + server := SetupAPITestServer(t, &teamtest.FakeService{ExpectedIsMember: true, ExpectedTeamDTO: &team.TeamDTO{ID: 1, UID: "a00001"}}) t.Run("should be able to get team members with correct permission", func(t *testing.T) { req := webtest.RequestWithSignedInUser( @@ -93,6 +108,18 @@ func TestGetTeamMembersAPIEndpoint(t *testing.T) { assert.Equal(t, http.StatusOK, res.StatusCode) require.NoError(t, res.Body.Close()) }) + + t.Run("should be able to get team members with correct permission by UID", func(t *testing.T) { + req := webtest.RequestWithSignedInUser( + server.NewGetRequest("/api/teams/a00001/members"), + authedUserWithPermissions(1, 1, []accesscontrol.Permission{{Action: accesscontrol.ActionTeamsPermissionsRead, Scope: "teams:id:1"}}), + ) + res, err := server.SendJSON(req) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) + t.Run("should not be able to get team members without correct permission", func(t *testing.T) { req := webtest.RequestWithSignedInUser( server.NewGetRequest("/api/teams/1/members"), @@ -106,9 +133,7 @@ func TestGetTeamMembersAPIEndpoint(t *testing.T) { } func TestUpdateTeamMembersAPIEndpoint(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { - hs.teamService = &teamtest.FakeService{ExpectedIsMember: true} - }) + server := SetupAPITestServer(t, &teamtest.FakeService{ExpectedIsMember: true, ExpectedTeamDTO: &team.TeamDTO{ID: 1, UID: "a00001"}}) t.Run("should be able to update team member with correct permission", func(t *testing.T) { req := webtest.RequestWithSignedInUser( @@ -120,6 +145,18 @@ func TestUpdateTeamMembersAPIEndpoint(t *testing.T) { assert.Equal(t, http.StatusOK, res.StatusCode) require.NoError(t, res.Body.Close()) }) + + t.Run("should be able to update team member with correct permission by team UID", func(t *testing.T) { + req := webtest.RequestWithSignedInUser( + server.NewRequest(http.MethodPut, "/api/teams/a00001/members/1", strings.NewReader("{\"permission\": 1}")), + authedUserWithPermissions(1, 1, []accesscontrol.Permission{{Action: accesscontrol.ActionTeamsPermissionsWrite, Scope: "teams:id:1"}}), + ) + res, err := server.SendJSON(req) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) + t.Run("should not be able to update team member without correct permission", func(t *testing.T) { req := webtest.RequestWithSignedInUser( server.NewRequest(http.MethodPut, "/api/teams/1/members/1", strings.NewReader("{\"permission\": 1}")), @@ -133,7 +170,7 @@ func TestUpdateTeamMembersAPIEndpoint(t *testing.T) { } func TestDeleteTeamMembersAPIEndpoint(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { + server := SetupAPITestServer(t, nil, func(hs *TeamAPI) { hs.teamService = &teamtest.FakeService{ExpectedIsMember: true} hs.teamPermissionsService = &actest.FakePermissionsService{} }) diff --git a/pkg/services/team/teamapi/team_test.go b/pkg/services/team/teamapi/team_test.go index 4cd3f541046..ba95cf53192 100644 --- a/pkg/services/team/teamapi/team_test.go +++ b/pkg/services/team/teamapi/team_test.go @@ -21,16 +21,14 @@ import ( const ( searchTeamsURL = "/api/teams/search" createTeamURL = "/api/teams/" - detailTeamURL = "/api/teams/%d" - detailTeamPreferenceURL = "/api/teams/%d/preferences" + detailTeamURL = "/api/teams/%v" + detailTeamPreferenceURL = "/api/teams/%v/preferences" teamCmd = `{"name": "MyTestTeam%d"}` teamPreferenceCmd = `{"theme": "dark"}` ) func TestTeamAPIEndpoint_CreateTeam(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { - hs.teamService = teamtest.NewFakeService() - }) + server := SetupAPITestServer(t, nil) input := strings.NewReader(fmt.Sprintf(teamCmd, 1)) t.Run("Access control allows creating teams with the correct permissions", func(t *testing.T) { @@ -54,9 +52,7 @@ func TestTeamAPIEndpoint_CreateTeam(t *testing.T) { } func TestTeamAPIEndpoint_SearchTeams(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { - hs.teamService = teamtest.NewFakeService() - }) + server := SetupAPITestServer(t, nil) t.Run("Access control prevents searching for teams with the incorrect permissions", func(t *testing.T) { req := server.NewGetRequest(searchTeamsURL) @@ -80,9 +76,7 @@ func TestTeamAPIEndpoint_SearchTeams(t *testing.T) { } func TestTeamAPIEndpoint_GetTeamByID(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { - hs.teamService = &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{}} - }) + server := SetupAPITestServer(t, &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{ID: 1, UID: "a00001"}}) url := fmt.Sprintf(detailTeamURL, 1) @@ -106,6 +100,30 @@ func TestTeamAPIEndpoint_GetTeamByID(t *testing.T) { require.NoError(t, res.Body.Close()) }) + t.Run("Access control prevents getting a team when missing permissions by UID", func(t *testing.T) { + url := fmt.Sprintf(detailTeamURL, "a00001") + req := server.NewGetRequest(url) + req = webtest.RequestWithSignedInUser(req, authedUserWithPermissions(1, 1, []accesscontrol.Permission{ + {Action: accesscontrol.ActionTeamsRead, Scope: "teams:id:2"}, + })) + res, err := server.Send(req) + require.NoError(t, err) + assert.Equal(t, http.StatusForbidden, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) + + t.Run("Access control allows getting a team by UID with the correct permissions", func(t *testing.T) { + url := fmt.Sprintf(detailTeamURL, "a00001") + req := server.NewGetRequest(url) + req = webtest.RequestWithSignedInUser(req, authedUserWithPermissions(1, 1, []accesscontrol.Permission{ + {Action: accesscontrol.ActionTeamsRead, Scope: "teams:id:1"}, + })) + res, err := server.Send(req) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) + t.Run("Access control allows getting a team with wildcard scope", func(t *testing.T) { req := server.NewGetRequest(url) req = webtest.RequestWithSignedInUser(req, authedUserWithPermissions(1, 1, []accesscontrol.Permission{ @@ -122,11 +140,9 @@ func TestTeamAPIEndpoint_GetTeamByID(t *testing.T) { // Then the endpoint should return 200 if the user has accesscontrol.ActionTeamsWrite with teams:id:1 scope // else return 403 func TestTeamAPIEndpoint_UpdateTeam(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { - hs.teamService = &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{}} - }) + server := SetupAPITestServer(t, &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{ID: 1, UID: "a00001"}}) - request := func(teamID int64, user *user.SignedInUser) (*http.Response, error) { + request := func(teamID any, user *user.SignedInUser) (*http.Response, error) { req := server.NewRequest(http.MethodPut, fmt.Sprintf(detailTeamURL, teamID), strings.NewReader(teamCmd)) req = webtest.RequestWithSignedInUser(req, user) return server.SendJSON(req) @@ -141,6 +157,15 @@ func TestTeamAPIEndpoint_UpdateTeam(t *testing.T) { require.NoError(t, res.Body.Close()) }) + t.Run("Access control allows updating team by UID with the correct permissions", func(t *testing.T) { + res, err := request("a00001", authedUserWithPermissions(1, 1, []accesscontrol.Permission{ + {Action: accesscontrol.ActionTeamsWrite, Scope: "teams:id:1"}, + })) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) + t.Run("Access control allows updating teams with the wildcard scope", func(t *testing.T) { res, err := request(1, authedUserWithPermissions(1, 1, []accesscontrol.Permission{ {Action: accesscontrol.ActionTeamsWrite, Scope: "teams:*"}, @@ -164,11 +189,9 @@ func TestTeamAPIEndpoint_UpdateTeam(t *testing.T) { // Then the endpoint should return 200 if the user has accesscontrol.ActionTeamsDelete with teams:id:1 scope // else return 403 func TestTeamAPIEndpoint_DeleteTeam(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { - hs.teamService = &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{}} - }) + server := SetupAPITestServer(t, &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{ID: 1, UID: "a00001"}}) - request := func(teamID int64, user *user.SignedInUser) (*http.Response, error) { + request := func(teamID any, user *user.SignedInUser) (*http.Response, error) { req := server.NewRequest(http.MethodDelete, fmt.Sprintf(detailTeamURL, teamID), http.NoBody) req = webtest.RequestWithSignedInUser(req, user) return server.Send(req) @@ -191,17 +214,26 @@ func TestTeamAPIEndpoint_DeleteTeam(t *testing.T) { assert.Equal(t, http.StatusOK, res.StatusCode) require.NoError(t, res.Body.Close()) }) + + t.Run("Access control allows deleting teams with the correct permissions by UID", func(t *testing.T) { + res, err := request("a00001", authedUserWithPermissions(1, 1, []accesscontrol.Permission{ + {Action: accesscontrol.ActionTeamsDelete, Scope: "teams:id:1"}, + })) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) } // Given a team with a user, when the user is granted X permission, // Then the endpoint should return 200 if the user has accesscontrol.ActionTeamsRead with teams:id:1 scope // else return 403 func TestTeamAPIEndpoint_GetTeamPreferences(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { + server := SetupAPITestServer(t, &teamtest.FakeService{ExpectedTeamDTO: &team.TeamDTO{ID: 1, UID: "a00001"}}, func(hs *TeamAPI) { hs.preferenceService = &preftest.FakePreferenceService{ExpectedPreference: &pref.Preference{}} }) - request := func(teamID int64, user *user.SignedInUser) (*http.Response, error) { + request := func(teamID any, user *user.SignedInUser) (*http.Response, error) { req := server.NewGetRequest(fmt.Sprintf(detailTeamPreferenceURL, teamID)) req = webtest.RequestWithSignedInUser(req, user) return server.Send(req) @@ -216,6 +248,15 @@ func TestTeamAPIEndpoint_GetTeamPreferences(t *testing.T) { require.NoError(t, res.Body.Close()) }) + t.Run("Access control allows getting team preferences with the correct permissions by UID", func(t *testing.T) { + res, err := request("a00001", authedUserWithPermissions(1, 1, []accesscontrol.Permission{ + {Action: accesscontrol.ActionTeamsRead, Scope: "teams:id:1"}, + })) + require.NoError(t, err) + assert.Equal(t, http.StatusOK, res.StatusCode) + require.NoError(t, res.Body.Close()) + }) + t.Run("Access control prevents getting team preferences with the incorrect permissions", func(t *testing.T) { res, err := request(1, authedUserWithPermissions(1, 1, []accesscontrol.Permission{ {Action: accesscontrol.ActionTeamsRead, Scope: "teams:id:2"}, @@ -230,7 +271,7 @@ func TestTeamAPIEndpoint_GetTeamPreferences(t *testing.T) { // Then the endpoint should return 200 if the user has accesscontrol.ActionTeamsWrite with teams:id:1 scope // else return 403 func TestTeamAPIEndpoint_UpdateTeamPreferences(t *testing.T) { - server := SetupAPITestServer(t, func(hs *TeamAPI) { + server := SetupAPITestServer(t, nil, func(hs *TeamAPI) { hs.preferenceService = &preftest.FakePreferenceService{ExpectedPreference: &pref.Preference{}} }) diff --git a/pkg/services/team/teamimpl/store.go b/pkg/services/team/teamimpl/store.go index e1069cd9f4e..23824060677 100644 --- a/pkg/services/team/teamimpl/store.go +++ b/pkg/services/team/teamimpl/store.go @@ -3,6 +3,7 @@ package teamimpl import ( "bytes" "context" + "errors" "fmt" "strings" "time" @@ -268,6 +269,12 @@ func (ss *xormStore) Search(ctx context.Context, query *team.SearchTeamsQuery) ( func (ss *xormStore) GetByID(ctx context.Context, query *team.GetTeamByIDQuery) (*team.TeamDTO, error) { var queryResult *team.TeamDTO + + // Check if both ID and UID are unset + if query.ID == 0 && query.UID == "" { + return nil, errors.New("either ID or UID must be set") + } + err := ss.db.WithDbSession(ctx, func(sess *db.Session) error { var sql bytes.Buffer params := make([]any, 0) @@ -278,8 +285,14 @@ func (ss *xormStore) GetByID(ctx context.Context, query *team.GetTeamByIDQuery) params = append(params, user) } - sql.WriteString(` WHERE team.org_id = ? and team.id = ?`) - params = append(params, query.OrgID, query.ID) + // Prioritize ID over UID + if query.ID != 0 { + sql.WriteString(` WHERE team.org_id = ? and team.id = ?`) + params = append(params, query.OrgID, query.ID) + } else { + sql.WriteString(` WHERE team.org_id = ? and team.uid = ?`) + params = append(params, query.OrgID, query.UID) + } var t team.TeamDTO exists, err := sess.SQL(sql.String(), params...).Get(&t) diff --git a/pkg/services/team/teamimpl/team.go b/pkg/services/team/teamimpl/team.go index e7c9537b751..e31e6c75db6 100644 --- a/pkg/services/team/teamimpl/team.go +++ b/pkg/services/team/teamimpl/team.go @@ -64,6 +64,7 @@ func (s *Service) GetTeamByID(ctx context.Context, query *team.GetTeamByIDQuery) ctx, span := s.tracer.Start(ctx, "team.GetTeamByID", trace.WithAttributes( attribute.Int64("orgID", query.OrgID), attribute.Int64("teamID", query.ID), + attribute.String("teamUID", query.UID), )) defer span.End() return s.store.GetByID(ctx, query) diff --git a/pkg/services/unifiedSearch/http.go b/pkg/services/unifiedSearch/http.go new file mode 100644 index 00000000000..ed885b1c40f --- /dev/null +++ b/pkg/services/unifiedSearch/http.go @@ -0,0 +1,73 @@ +package unifiedSearch + +import ( + "encoding/json" + "errors" + "io" + "net/http" + + "github.com/grafana/grafana-plugin-sdk-go/backend" + "github.com/grafana/grafana-plugin-sdk-go/data" + + "github.com/grafana/grafana/pkg/api/response" + "github.com/grafana/grafana/pkg/api/routing" + "github.com/grafana/grafana/pkg/middleware" + contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model" +) + +type SearchHTTPService interface { + RegisterHTTPRoutes(storageRoute routing.RouteRegister) +} + +type searchHTTPService struct { + search SearchService +} + +func ProvideSearchHTTPService(search SearchService) SearchHTTPService { + return &searchHTTPService{search: search} +} + +func (s *searchHTTPService) RegisterHTTPRoutes(storageRoute routing.RouteRegister) { + storageRoute.Post("/", middleware.ReqSignedIn, routing.Wrap(s.doQuery)) +} + +func (s *searchHTTPService) doQuery(c *contextmodel.ReqContext) response.Response { + searchReadinessCheckResp := s.search.IsReady(c.Req.Context(), c.SignedInUser.GetOrgID()) + if !searchReadinessCheckResp.IsReady { + return response.JSON(http.StatusOK, &backend.DataResponse{ + Frames: []*data.Frame{{ + Name: "Loading", + }}, + Error: nil, + }) + } + + body, err := io.ReadAll(c.Req.Body) + if err != nil { + return response.Error(http.StatusInternalServerError, "error reading bytes", err) + } + + query := &Query{} + err = json.Unmarshal(body, query) + if err != nil { + return response.Error(http.StatusBadRequest, "error parsing body", err) + } + + resp := s.search.doQuery(c.Req.Context(), c.SignedInUser, c.SignedInUser.GetOrgID(), *query) + + if resp.Error != nil { + return response.Error(http.StatusInternalServerError, "error handling search request", resp.Error) + } + + if len(resp.Frames) == 0 { + msg := "invalid search response" + return response.Error(http.StatusInternalServerError, msg, errors.New(msg)) + } + + bytes, err := resp.MarshalJSON() + if err != nil { + return response.Error(http.StatusInternalServerError, "error marshalling response", err) + } + + return response.JSON(http.StatusOK, bytes) +} diff --git a/pkg/services/unifiedSearch/service.go b/pkg/services/unifiedSearch/service.go new file mode 100644 index 00000000000..eb83ac8bbaa --- /dev/null +++ b/pkg/services/unifiedSearch/service.go @@ -0,0 +1,176 @@ +package unifiedSearch + +import ( + "context" + "errors" + + "github.com/grafana/grafana-plugin-sdk-go/backend" + "github.com/grafana/grafana-plugin-sdk-go/data" + "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/infra/tracing" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/services/accesscontrol" + "github.com/grafana/grafana/pkg/services/featuremgmt" + "github.com/grafana/grafana/pkg/services/folder" + "github.com/grafana/grafana/pkg/services/org" + "github.com/grafana/grafana/pkg/services/store" + "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/storage/unified/resource" +) + +type StandardSearchService struct { + registry.BackgroundService + cfg *setting.Cfg + sql db.DB + ac accesscontrol.Service + orgService org.Service + userService user.Service + logger log.Logger + reIndexCh chan struct{} + features featuremgmt.FeatureToggles + resourceClient resource.ResourceClient +} + +func (s *StandardSearchService) IsReady(ctx context.Context, orgId int64) IsSearchReadyResponse { + return IsSearchReadyResponse{IsReady: true} +} + +func ProvideService(cfg *setting.Cfg, sql db.DB, entityEventStore store.EntityEventsService, + ac accesscontrol.Service, tracer tracing.Tracer, features featuremgmt.FeatureToggles, orgService org.Service, + userService user.Service, folderStore folder.Store, resourceClient resource.ResourceClient) SearchService { + logger := log.New("searchV3") + s := &StandardSearchService{ + cfg: cfg, + sql: sql, + ac: ac, + logger: logger, + reIndexCh: make(chan struct{}, 1), + orgService: orgService, + userService: userService, + features: features, + resourceClient: resourceClient, + } + return s +} + +func (s *StandardSearchService) IsDisabled() bool { + return !s.features.IsEnabledGlobally(featuremgmt.FlagPanelTitleSearch) +} + +func (s *StandardSearchService) Run(ctx context.Context) error { + // TODO: implement this? ( copied from pkg/services/searchV2/service.go ) + // orgQuery := &org.SearchOrgsQuery{} + // result, err := s.orgService.Search(ctx, orgQuery) + // if err != nil { + // return fmt.Errorf("can't get org list: %w", err) + // } + // orgIDs := make([]int64, 0, len(result)) + // for _, org := range result { + // orgIDs = append(orgIDs, org.ID) + // } + // TODO: do we need to initialize the bleve index again ( should be initialized on startup )? + // return s.dashboardIndex.run(ctx, orgIDs, s.reIndexCh) + return nil +} + +func (s *StandardSearchService) TriggerReIndex() { + select { + case s.reIndexCh <- struct{}{}: + default: + // channel is full => re-index will happen soon anyway. + } +} + +func (s *StandardSearchService) getUser(ctx context.Context, backendUser *backend.User, orgId int64) (*user.SignedInUser, error) { + // TODO: get user & user's permissions from the request context + var usr *user.SignedInUser + if s.cfg.AnonymousEnabled && backendUser.Email == "" && backendUser.Login == "" { + getOrg := org.GetOrgByNameQuery{Name: s.cfg.AnonymousOrgName} + orga, err := s.orgService.GetByName(ctx, &getOrg) + if err != nil { + s.logger.Error("Anonymous access organization error.", "org_name", s.cfg.AnonymousOrgName, "error", err) + return nil, err + } + + usr = &user.SignedInUser{ + OrgID: orga.ID, + OrgName: orga.Name, + OrgRole: org.RoleType(s.cfg.AnonymousOrgRole), + IsAnonymous: true, + } + } else { + getSignedInUserQuery := &user.GetSignedInUserQuery{ + Login: backendUser.Login, + Email: backendUser.Email, + OrgID: orgId, + } + var err error + usr, err = s.userService.GetSignedInUser(ctx, getSignedInUserQuery) + if err != nil { + s.logger.Error("Error while retrieving user", "error", err, "email", backendUser.Email, "login", getSignedInUserQuery.Login) + return nil, errors.New("auth error") + } + + if usr == nil { + s.logger.Error("No user found", "email", backendUser.Email) + return nil, errors.New("auth error") + } + } + + if usr.Permissions == nil { + usr.Permissions = make(map[int64]map[string][]string) + } + + if _, ok := usr.Permissions[orgId]; ok { + // permissions as part of the `s.sql.GetSignedInUser` query - return early + return usr, nil + } + + // TODO: ensure this is cached + permissions, err := s.ac.GetUserPermissions(ctx, usr, + accesscontrol.Options{ReloadCache: false}) + if err != nil { + s.logger.Error("Failed to retrieve user permissions", "error", err, "email", backendUser.Email) + return nil, errors.New("auth error") + } + + usr.Permissions[orgId] = accesscontrol.GroupScopesByActionContext(ctx, permissions) + return usr, nil +} + +func (s *StandardSearchService) DoQuery(ctx context.Context, user *backend.User, orgID int64, q Query) *backend.DataResponse { + signedInUser, err := s.getUser(ctx, user, orgID) + if err != nil { + return &backend.DataResponse{Error: err} + } + + query := s.doQuery(ctx, signedInUser, orgID, q) + return query +} + +func (s *StandardSearchService) doQuery(ctx context.Context, signedInUser *user.SignedInUser, orgID int64, q Query) *backend.DataResponse { + response := s.doSearchQuery(ctx, q, s.cfg.AppSubURL) + return response +} + +func (s *StandardSearchService) doSearchQuery(ctx context.Context, qry Query, _ string) *backend.DataResponse { + response := &backend.DataResponse{} + + req := &resource.SearchRequest{Tenant: s.cfg.StackID, Query: qry.Query} + res, err := s.resourceClient.Search(ctx, req) + if err != nil { + response.Error = err + return response + } + + // TODO: implement this correctly + frame := data.NewFrame("results", data.NewField("value", nil, []string{})) + frame.Meta = &data.FrameMeta{Notices: []data.Notice{{Text: "TODO"}}} + for _, r := range res.Items { + frame.AppendRow(string(r.Value)) + } + response.Frames = append(response.Frames, frame) + return response +} diff --git a/pkg/services/unifiedSearch/types.go b/pkg/services/unifiedSearch/types.go new file mode 100644 index 00000000000..8faa56f93f0 --- /dev/null +++ b/pkg/services/unifiedSearch/types.go @@ -0,0 +1,49 @@ +package unifiedSearch + +import ( + "context" + + "github.com/grafana/grafana-plugin-sdk-go/backend" + + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/services/user" +) + +type FacetField struct { + Field string `json:"field"` + Limit int `json:"limit,omitempty"` // explicit page size +} + +type Query struct { + Query string `json:"query"` + Location string `json:"location,omitempty"` // parent folder ID + Sort string `json:"sort,omitempty"` // field ASC/DESC + Datasource string `json:"ds_uid,omitempty"` // "datasource" collides with the JSON value at the same level :() + DatasourceType string `json:"ds_type,omitempty"` + Tags []string `json:"tags,omitempty"` + Kind []string `json:"kind,omitempty"` + PanelType string `json:"panel_type,omitempty"` + UIDs []string `json:"uid,omitempty"` + Explain bool `json:"explain,omitempty"` // adds details on why document matched + WithAllowedActions bool `json:"withAllowedActions,omitempty"` // adds allowed actions per entity + Facet []FacetField `json:"facet,omitempty"` + SkipLocation bool `json:"skipLocation,omitempty"` + HasPreview string `json:"hasPreview,omitempty"` // the light|dark theme + Limit int `json:"limit,omitempty"` // explicit page size + From int `json:"from,omitempty"` // for paging +} + +type IsSearchReadyResponse struct { + IsReady bool + Reason string // initial-indexing-ongoing, org-indexing-ongoing +} + +type SearchService interface { + registry.CanBeDisabled + registry.BackgroundService + DoQuery(ctx context.Context, user *backend.User, orgId int64, query Query) *backend.DataResponse + doQuery(ctx context.Context, user *user.SignedInUser, orgId int64, query Query) *backend.DataResponse + IsReady(ctx context.Context, orgId int64) IsSearchReadyResponse + // RegisterDashboardIndexExtender(ext DashboardIndexExtender) + TriggerReIndex() +} diff --git a/pkg/setting/setting_unified_alerting.go b/pkg/setting/setting_unified_alerting.go index 3de83db8ce5..8fe863247b7 100644 --- a/pkg/setting/setting_unified_alerting.go +++ b/pkg/setting/setting_unified_alerting.go @@ -121,6 +121,11 @@ type UnifiedAlertingSettings struct { // Duration for which a resolved alert state transition will continue to be sent to the Alertmanager. ResolvedAlertRetention time.Duration + + // RuleVersionRecordLimit defines the limit of how many alert rule versions + // should be stored in the database for each alert_rule in an organization including the current one. + // 0 value means no limit + RuleVersionRecordLimit int } type RecordingRuleSettings struct { @@ -455,6 +460,11 @@ func (cfg *Cfg) ReadUnifiedAlertingSettings(iniFile *ini.File) error { return err } + uaCfg.RuleVersionRecordLimit = ua.Key("rule_version_record_limit").MustInt(0) + if uaCfg.RuleVersionRecordLimit < 0 { + return fmt.Errorf("setting 'rule_version_record_limit' is invalid, only 0 or a positive integer are allowed") + } + cfg.UnifiedAlerting = uaCfg return nil } diff --git a/pkg/storage/unified/apistore/store.go b/pkg/storage/unified/apistore/store.go index 4d2947ff54b..d4911da167c 100644 --- a/pkg/storage/unified/apistore/store.go +++ b/pkg/storage/unified/apistore/store.go @@ -26,7 +26,6 @@ import ( "k8s.io/apiserver/pkg/storage/storagebackend" "k8s.io/apiserver/pkg/storage/storagebackend/factory" "k8s.io/client-go/tools/cache" - "k8s.io/klog/v2" "github.com/grafana/grafana/pkg/apimachinery/utils" grafanaregistry "github.com/grafana/grafana/pkg/apiserver/registry/generic" @@ -51,7 +50,6 @@ type Storage struct { store resource.ResourceClient getKey func(string) (*resource.ResourceKey, error) - watchSet *WatchSet versioner storage.Versioner } @@ -84,8 +82,7 @@ func NewStorage( trigger: trigger, indexers: indexers, - watchSet: NewWatchSet(), - getKey: keyParser, + getKey: keyParser, versioner: &storage.APIObjectVersioner{}, } @@ -112,9 +109,7 @@ func NewStorage( } } - return s, func() { - s.watchSet.cleanupWatchers() - }, nil + return s, func() {}, nil } func (s *Storage) Versioner() storage.Versioner { @@ -165,11 +160,6 @@ func (s *Storage) Create(ctx context.Context, key string, obj runtime.Object, ou }) } - s.watchSet.notifyWatchers(watch.Event{ - Object: out.DeepCopyObject(), - Type: watch.Added, - }, nil) - return nil } @@ -226,16 +216,11 @@ func (s *Storage) Delete( if err := s.versioner.UpdateObject(out, uint64(rsp.ResourceVersion)); err != nil { return err } - - s.watchSet.notifyWatchers(watch.Event{ - Object: out.DeepCopyObject(), - Type: watch.Deleted, - }, nil) return nil } // This version is not yet passing the watch tests -func (s *Storage) WatchNEXT(ctx context.Context, key string, opts storage.ListOptions) (watch.Interface, error) { +func (s *Storage) Watch(ctx context.Context, key string, opts storage.ListOptions) (watch.Interface, error) { k, err := s.getKey(key) if err != nil { return watch.NewEmptyWatch(), nil @@ -255,10 +240,11 @@ func (s *Storage) WatchNEXT(ctx context.Context, key string, opts storage.ListOp if opts.SendInitialEvents != nil { cmd.SendInitialEvents = *opts.SendInitialEvents } - + ctx, cancelWatch := context.WithCancel(ctx) client, err := s.store.Watch(ctx, cmd) if err != nil { // if the context was canceled, just return a new empty watch + cancelWatch() if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) || errors.Is(err, io.EOF) { return watch.NewEmptyWatch(), nil } @@ -266,138 +252,11 @@ func (s *Storage) WatchNEXT(ctx context.Context, key string, opts storage.ListOp } reporter := apierrors.NewClientErrorReporter(500, "WATCH", "") - decoder := &streamDecoder{ - client: client, - newFunc: s.newFunc, - predicate: predicate, - codec: s.codec, - } + decoder := newStreamDecoder(client, s.newFunc, predicate, s.codec, cancelWatch) return watch.NewStreamWatcher(decoder, reporter), nil } -// Watch begins watching the specified key. Events are decoded into API objects, -// and any items selected by the predicate are sent down to returned watch.Interface. -// resourceVersion may be used to specify what version to begin watching, -// which should be the current resourceVersion, and no longer rv+1 -// (e.g. reconnecting without missing any updates). -// If resource version is "0", this interface will get current object at given key -// and send it in an "ADDED" event, before watch starts. -func (s *Storage) Watch(ctx context.Context, key string, opts storage.ListOptions) (watch.Interface, error) { - k, err := s.getKey(key) - if err != nil { - return watch.NewEmptyWatch(), nil - } - - req, predicate, err := toListRequest(k, opts) - if err != nil { - return watch.NewEmptyWatch(), nil - } - - listObj := s.newListFunc() - - var namespace *string - if k.Namespace != "" { - namespace = &k.Namespace - } - - if ctx.Err() != nil { - return watch.NewEmptyWatch(), nil - } - - if (opts.SendInitialEvents == nil && req.ResourceVersion == 0) || (opts.SendInitialEvents != nil && *opts.SendInitialEvents) { - if err := s.GetList(ctx, key, opts, listObj); err != nil { - return nil, err - } - - listAccessor, err := meta.ListAccessor(listObj) - if err != nil { - klog.Errorf("could not determine new list accessor in watch") - return nil, err - } - // Updated if requesting RV was either "0" or "" - maybeUpdatedRV, err := s.versioner.ParseResourceVersion(listAccessor.GetResourceVersion()) - if err != nil { - klog.Errorf("could not determine new list RV in watch") - return nil, err - } - - jw := s.watchSet.newWatch(ctx, maybeUpdatedRV, predicate, s.versioner, namespace) - - initEvents := make([]watch.Event, 0) - listPtr, err := meta.GetItemsPtr(listObj) - if err != nil { - return nil, err - } - v, err := conversion.EnforcePtr(listPtr) - if err != nil || v.Kind() != reflect.Slice { - return nil, fmt.Errorf("need pointer to slice: %v", err) - } - - for i := 0; i < v.Len(); i++ { - obj, ok := v.Index(i).Addr().Interface().(runtime.Object) - if !ok { - return nil, fmt.Errorf("need item to be a runtime.Object: %v", err) - } - - initEvents = append(initEvents, watch.Event{ - Type: watch.Added, - Object: obj.DeepCopyObject(), - }) - } - - if predicate.AllowWatchBookmarks && len(initEvents) > 0 { - listRV, err := s.versioner.ParseResourceVersion(listAccessor.GetResourceVersion()) - if err != nil { - return nil, fmt.Errorf("could not get last init event's revision for bookmark: %v", err) - } - - bookmarkEvent := watch.Event{ - Type: watch.Bookmark, - Object: s.newFunc(), - } - - if err := s.versioner.UpdateObject(bookmarkEvent.Object, listRV); err != nil { - return nil, err - } - - bookmarkObject, err := meta.Accessor(bookmarkEvent.Object) - if err != nil { - return nil, fmt.Errorf("could not get bookmark object's acccesor: %v", err) - } - bookmarkObject.SetAnnotations(map[string]string{"k8s.io/initial-events-end": "true"}) - initEvents = append(initEvents, bookmarkEvent) - } - - jw.Start(initEvents...) - return jw, nil - } - - maybeUpdatedRV := uint64(req.ResourceVersion) - if maybeUpdatedRV == 0 { - rsp, err := s.store.List(ctx, &resource.ListRequest{ - Options: &resource.ListOptions{ - Key: k, - }, - Limit: 1, // we ignore the results, just look at the RV - }) - if err != nil { - return nil, err - } - if rsp.Error != nil { - return nil, resource.GetError(rsp.Error) - } - maybeUpdatedRV = uint64(rsp.ResourceVersion) - if maybeUpdatedRV < 1 { - return nil, fmt.Errorf("expecting a non-zero resource version") - } - } - jw := s.watchSet.newWatch(ctx, maybeUpdatedRV, predicate, s.versioner, namespace) - - jw.Start() - return jw, nil -} - // Get unmarshals object found at key into objPtr. On a not found error, will either // return a zero object of the requested type, or an error, depending on 'opts.ignoreNotFound'. // Treats empty responses and nil response nodes exactly like a not found error. @@ -668,17 +527,6 @@ func (s *Storage) GuaranteedUpdate( return err } - if created { - s.watchSet.notifyWatchers(watch.Event{ - Object: destination.DeepCopyObject(), - Type: watch.Added, - }, nil) - } else { - s.watchSet.notifyWatchers(watch.Event{ - Object: destination.DeepCopyObject(), - Type: watch.Modified, - }, existingObj.DeepCopyObject()) - } return nil } diff --git a/pkg/storage/unified/apistore/store_test.go b/pkg/storage/unified/apistore/store_test.go index 8977693c966..287aeea5c41 100644 --- a/pkg/storage/unified/apistore/store_test.go +++ b/pkg/storage/unified/apistore/store_test.go @@ -92,12 +92,13 @@ func TestCreate(t *testing.T) { storagetesting.RunTestCreate(ctx, t, store, checkStorageInvariants(store)) } -func TestCreateWithTTL(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestCreateWithTTL(ctx, t, store) -} +// No TTL support in unifed storage +// func TestCreateWithTTL(t *testing.T) { +// ctx, store, destroyFunc, err := testSetup(t) +// defer destroyFunc() +// assert.NoError(t, err) +// storagetesting.RunTestCreateWithTTL(ctx, t, store) +// } func TestCreateWithKeyExist(t *testing.T) { ctx, store, destroyFunc, err := testSetup(t) diff --git a/pkg/storage/unified/apistore/stream.go b/pkg/storage/unified/apistore/stream.go index a425279185a..9546e3e8b64 100644 --- a/pkg/storage/unified/apistore/stream.go +++ b/pkg/storage/unified/apistore/stream.go @@ -1,9 +1,11 @@ package apistore import ( + "context" "errors" "fmt" "io" + "sync" grpcCodes "google.golang.org/grpc/codes" grpcStatus "google.golang.org/grpc/status" @@ -17,12 +19,23 @@ import ( ) type streamDecoder struct { - client resource.ResourceStore_WatchClient - newFunc func() runtime.Object - predicate storage.SelectionPredicate - codec runtime.Codec + client resource.ResourceStore_WatchClient + newFunc func() runtime.Object + predicate storage.SelectionPredicate + codec runtime.Codec + cancelWatch context.CancelFunc + done sync.WaitGroup } +func newStreamDecoder(client resource.ResourceStore_WatchClient, newFunc func() runtime.Object, predicate storage.SelectionPredicate, codec runtime.Codec, cancelWatch context.CancelFunc) *streamDecoder { + return &streamDecoder{ + client: client, + newFunc: newFunc, + predicate: predicate, + codec: codec, + cancelWatch: cancelWatch, + } +} func (d *streamDecoder) toObject(w *resource.WatchEvent_Resource) (runtime.Object, error) { obj, _, err := d.codec.Decode(w.Value, nil, d.newFunc()) if err == nil { @@ -35,25 +48,30 @@ func (d *streamDecoder) toObject(w *resource.WatchEvent_Resource) (runtime.Objec return obj, err } +// nolint: gocyclo // we may be able to simplify this in the future, but this is a complex function by nature func (d *streamDecoder) Decode() (action watch.EventType, object runtime.Object, err error) { + d.done.Add(1) + defer d.done.Done() decode: for { - err := d.client.Context().Err() - if err != nil { - klog.Errorf("client: context error: %s\n", err) - return watch.Error, nil, err + var evt *resource.WatchEvent + var err error + select { + case <-d.client.Context().Done(): + default: + evt, err = d.client.Recv() } - evt, err := d.client.Recv() - if errors.Is(err, io.EOF) { + switch { + case errors.Is(d.client.Context().Err(), context.Canceled): + return watch.Error, nil, io.EOF + case d.client.Context().Err() != nil: + return watch.Error, nil, d.client.Context().Err() + case errors.Is(err, io.EOF): + return watch.Error, nil, io.EOF + case grpcStatus.Code(err) == grpcCodes.Canceled: return watch.Error, nil, err - } - - if grpcStatus.Code(err) == grpcCodes.Canceled { - return watch.Error, nil, err - } - - if err != nil { + case err != nil: klog.Errorf("client: error receiving result: %s", err) return watch.Error, nil, err } @@ -194,10 +212,15 @@ decode: } func (d *streamDecoder) Close() { + // Close the send stream err := d.client.CloseSend() if err != nil { klog.Errorf("error closing watch stream: %s", err) } + // Cancel the send context + d.cancelWatch() + // Wait for all decode operations to finish + d.done.Wait() } var _ watch.Decoder = (*streamDecoder)(nil) diff --git a/pkg/storage/unified/apistore/watcher_test.go b/pkg/storage/unified/apistore/watcher_test.go index fb4deb11811..600e8f11af8 100644 --- a/pkg/storage/unified/apistore/watcher_test.go +++ b/pkg/storage/unified/apistore/watcher_test.go @@ -7,9 +7,9 @@ package apistore import ( "context" - "fmt" "os" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -29,7 +29,19 @@ import ( "k8s.io/apiserver/pkg/storage/storagebackend/factory" storagetesting "github.com/grafana/grafana/pkg/apiserver/storage/testing" + infraDB "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/storage/unified/resource" + "github.com/grafana/grafana/pkg/storage/unified/sql" + "github.com/grafana/grafana/pkg/storage/unified/sql/db/dbimpl" + "github.com/grafana/grafana/pkg/tests/testsuite" +) + +type StorageType string + +const ( + StorageTypeFile StorageType = "file" + StorageTypeUnified StorageType = "unified" ) var scheme = runtime.NewScheme() @@ -48,6 +60,7 @@ type setupOptions struct { prefix string resourcePrefix string groupResource schema.GroupResource + storageType StorageType } type setupOption func(*setupOptions, testing.TB) @@ -59,10 +72,20 @@ func withDefaults(options *setupOptions, t testing.TB) { options.prefix = t.TempDir() options.resourcePrefix = storagetesting.KeyFunc("", "") options.groupResource = schema.GroupResource{Resource: "pods"} + options.storageType = StorageTypeFile +} +func withStorageType(storageType StorageType) setupOption { + return func(options *setupOptions, t testing.TB) { + options.storageType = storageType + } } var _ setupOption = withDefaults +func TestMain(m *testing.M) { + testsuite.Run(m) +} + func testSetup(t testing.TB, opts ...setupOption) (context.Context, storage.Interface, factory.DestroyFunc, error) { setupOpts := setupOptions{} opts = append([]setupOption{withDefaults}, opts...) @@ -85,18 +108,55 @@ func testSetup(t testing.TB, opts ...setupOption) (context.Context, storage.Inte Metadata: fileblob.MetadataDontWrite, // skip }) require.NoError(t, err) - fmt.Printf("ROOT: %s\n\n", tmp) } ctx := storagetesting.NewContext() - backend, err := resource.NewCDKBackend(ctx, resource.CDKBackendOptions{ - Bucket: bucket, - }) - require.NoError(t, err) - server, err := resource.NewResourceServer(resource.ResourceServerOptions{ - Backend: backend, - }) - require.NoError(t, err) + var server resource.ResourceServer + switch setupOpts.storageType { + case StorageTypeFile: + backend, err := resource.NewCDKBackend(ctx, resource.CDKBackendOptions{ + Bucket: bucket, + }) + require.NoError(t, err) + + server, err = resource.NewResourceServer(resource.ResourceServerOptions{ + Backend: backend, + }) + require.NoError(t, err) + + // Issue a health check to ensure the server is initialized + _, err = server.IsHealthy(ctx, &resource.HealthCheckRequest{}) + require.NoError(t, err) + case StorageTypeUnified: + if testing.Short() { + t.Skip("skipping integration test") + } + dbstore := infraDB.InitTestDB(t) + cfg := setting.NewCfg() + + eDB, err := dbimpl.ProvideResourceDB(dbstore, cfg, nil) + require.NoError(t, err) + require.NotNil(t, eDB) + + ret, err := sql.NewBackend(sql.BackendOptions{ + DBProvider: eDB, + PollingInterval: time.Millisecond, // Keep this fast + }) + require.NoError(t, err) + require.NotNil(t, ret) + ctx := storagetesting.NewContext() + err = ret.Init(ctx) + require.NoError(t, err) + + server, err = resource.NewResourceServer(resource.ResourceServerOptions{ + Backend: ret, + Diagnostics: ret, + Lifecycle: ret, + }) + require.NoError(t, err) + default: + t.Fatalf("unsupported storage type: %s", setupOpts.storageType) + } client := resource.NewLocalResourceClient(server) config := storagebackend.NewDefaultConfig(setupOpts.prefix, setupOpts.codec) @@ -124,55 +184,82 @@ func testSetup(t testing.TB, opts ...setupOption) (context.Context, storage.Inte } func TestWatch(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestWatch(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t, withStorageType(s)) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestWatch(ctx, t, store) + }) + } } func TestClusterScopedWatch(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestClusterScopedWatch(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestClusterScopedWatch(ctx, t, store) + }) + } } func TestNamespaceScopedWatch(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestNamespaceScopedWatch(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestNamespaceScopedWatch(ctx, t, store) + }) + } } func TestDeleteTriggerWatch(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestDeleteTriggerWatch(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestDeleteTriggerWatch(ctx, t, store) + }) + } } -func TestWatchFromZero(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestWatchFromZero(ctx, t, store, nil) -} +// Not Supported by unistore because there is no way to differentiate between: +// - SendInitialEvents=nil && resourceVersion=0 +// - sendInitialEvents=false && resourceVersion=0 +// This is a Legacy feature in k8s.io/apiserver/pkg/storage/etcd3/watcher_test.go#196 +// func TestWatchFromZero(t *testing.T) { +// ctx, store, destroyFunc, err := testSetup(t) +// defer destroyFunc() +// assert.NoError(t, err) +// storagetesting.RunTestWatchFromZero(ctx, t, store, nil) +// } // TestWatchFromNonZero tests that // - watch from non-0 should just watch changes after given version func TestWatchFromNonZero(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestWatchFromNonZero(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestWatchFromNonZero(ctx, t, store) + }) + } } +/* +Only valid when using a cached storage func TestDelayedWatchDelivery(t *testing.T) { ctx, store, destroyFunc, err := testSetup(t) defer destroyFunc() assert.NoError(t, err) storagetesting.RunTestDelayedWatchDelivery(ctx, t, store) } +/* /* func TestWatchError(t *testing.T) { @@ -182,24 +269,36 @@ func TestWatchError(t *testing.T) { */ func TestWatchContextCancel(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestWatchContextCancel(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestWatchContextCancel(ctx, t, store) + }) + } } func TestWatcherTimeout(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestWatcherTimeout(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestWatcherTimeout(ctx, t, store) + }) + } } func TestWatchDeleteEventObjectHaveLatestRV(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestWatchDeleteEventObjectHaveLatestRV(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestWatchDeleteEventObjectHaveLatestRV(ctx, t, store) + }) + } } // TODO: enable when we support flow control and priority fairness @@ -221,31 +320,47 @@ func TestWatchDeleteEventObjectHaveLatestRV(t *testing.T) { // setting allowWatchBookmarks query param against // etcd implementation doesn't have any effect. func TestWatchDispatchBookmarkEvents(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunTestWatchDispatchBookmarkEvents(ctx, t, store, false) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunTestWatchDispatchBookmarkEvents(ctx, t, store, false) + }) + } } func TestSendInitialEventsBackwardCompatibility(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunSendInitialEventsBackwardCompatibility(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunSendInitialEventsBackwardCompatibility(ctx, t, store) + }) + } } func TestEtcdWatchSemantics(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunWatchSemantics(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunWatchSemantics(ctx, t, store) + }) + } } func TestEtcdWatchSemanticInitialEventsExtended(t *testing.T) { - ctx, store, destroyFunc, err := testSetup(t) - defer destroyFunc() - assert.NoError(t, err) - storagetesting.RunWatchSemanticInitialEventsExtended(ctx, t, store) + for _, s := range []StorageType{StorageTypeFile, StorageTypeUnified} { + t.Run(string(s), func(t *testing.T) { + ctx, store, destroyFunc, err := testSetup(t) + defer destroyFunc() + assert.NoError(t, err) + storagetesting.RunWatchSemanticInitialEventsExtended(ctx, t, store) + }) + } } func newPod() runtime.Object { diff --git a/pkg/storage/unified/apistore/watchset.go b/pkg/storage/unified/apistore/watchset.go deleted file mode 100644 index 9c9d214b4b6..00000000000 --- a/pkg/storage/unified/apistore/watchset.go +++ /dev/null @@ -1,376 +0,0 @@ -// SPDX-License-Identifier: AGPL-3.0-only -// Provenance-includes-location: https://github.com/tilt-dev/tilt-apiserver/blob/main/pkg/storage/filepath/watchset.go -// Provenance-includes-license: Apache-2.0 -// Provenance-includes-copyright: The Kubernetes Authors. - -package apistore - -import ( - "context" - "fmt" - "sync" - "sync/atomic" - - "k8s.io/apimachinery/pkg/api/meta" - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/apimachinery/pkg/watch" - "k8s.io/apiserver/pkg/storage" - "k8s.io/klog/v2" -) - -const ( - UpdateChannelSize = 25 - InitialWatchNodesSize = 20 - InitialBufferedEventsSize = 25 -) - -type eventWrapper struct { - ev watch.Event - // optional: oldObject is only set for modifications for determining their type as necessary (when using predicate filtering) - oldObject runtime.Object -} - -type watchNode struct { - ctx context.Context - s *WatchSet - id uint64 - updateCh chan eventWrapper - outCh chan watch.Event - requestedRV uint64 - // the watch may or may not be namespaced for a namespaced resource. This is always nil for cluster-scoped kinds - watchNamespace *string - predicate storage.SelectionPredicate - versioner storage.Versioner -} - -// Keeps track of which watches need to be notified -type WatchSet struct { - mu sync.RWMutex - // mu protects both nodes and counter - nodes map[uint64]*watchNode - counter atomic.Uint64 - buffered []eventWrapper - bufferedMutex sync.RWMutex -} - -func NewWatchSet() *WatchSet { - return &WatchSet{ - buffered: make([]eventWrapper, 0, InitialBufferedEventsSize), - nodes: make(map[uint64]*watchNode, InitialWatchNodesSize), - } -} - -// Creates a new watch with a unique id, but -// does not start sending events to it until start() is called. -func (s *WatchSet) newWatch(ctx context.Context, requestedRV uint64, p storage.SelectionPredicate, versioner storage.Versioner, namespace *string) *watchNode { - s.counter.Add(1) - - node := &watchNode{ - ctx: ctx, - requestedRV: requestedRV, - id: s.counter.Load(), - s: s, - // updateCh size needs to be > 1 to allow slower clients to not block passing new events - updateCh: make(chan eventWrapper, UpdateChannelSize), - // outCh size needs to be > 1 for single process use-cases such as tests where watch and event seeding from CUD - // events is happening on the same thread - outCh: make(chan watch.Event, UpdateChannelSize), - predicate: p, - watchNamespace: namespace, - versioner: versioner, - } - - return node -} - -func (s *WatchSet) cleanupWatchers() { - s.mu.Lock() - defer s.mu.Unlock() - for _, w := range s.nodes { - w.stop() - } -} - -// oldObject is only passed in the event of a modification -// in case a predicate filtered watch is impacted as a result of modification -// NOTE: this function gives one the misperception that a newly added node will never -// get a double event, one from buffered and one from the update channel -// That perception is not true. Even though this function maintains the lock throughout the function body -// it is not true of the Start function. So basically, the Start function running after this function -// fully stands the chance of another future notifyWatchers double sending it the event through the two means mentioned -func (s *WatchSet) notifyWatchers(ev watch.Event, oldObject runtime.Object) { - s.mu.RLock() - defer s.mu.RUnlock() - - updateEv := eventWrapper{ - ev: ev, - } - if oldObject != nil { - updateEv.oldObject = oldObject - } - - // Events are always buffered. - // this is because of an inadvertent delay which is built into the watch process - // Watch() from storage returns Watch.Interface with a async start func. - // The only way to guarantee that we can interpret the passed RV correctly is to play it against missed events - // (notice the loop below over s.nodes isn't exactly going to work on a new node - // unless start is called on it) - s.bufferedMutex.Lock() - s.buffered = append(s.buffered, updateEv) - s.bufferedMutex.Unlock() - - for _, w := range s.nodes { - w.updateCh <- updateEv - } -} - -// isValid is not necessary to be called on oldObject in UpdateEvents - assuming the Watch pushes correctly setup eventWrapper our way -// first bool is whether the event is valid for current watcher -// second bool is whether checking the old value against the predicate may be valuable to the caller -// second bool may be a helpful aid to establish context around MODIFIED events -// (note that this second bool is only marked true if we pass other checks first, namely RV and namespace) -func (w *watchNode) isValid(e eventWrapper) (bool, bool, error) { - obj, err := meta.Accessor(e.ev.Object) - if err != nil { - klog.Error("Could not get accessor to object in event") - return false, false, nil - } - - eventRV, err := w.getResourceVersionAsInt(e.ev.Object) - if err != nil { - return false, false, err - } - - if eventRV < w.requestedRV { - return false, false, nil - } - - if w.watchNamespace != nil && *w.watchNamespace != obj.GetNamespace() { - return false, false, err - } - - valid, err := w.predicate.Matches(e.ev.Object) - if err != nil { - return false, false, err - } - - return valid, e.ev.Type == watch.Modified, nil -} - -// Only call this method if current object matches the predicate -func (w *watchNode) handleAddedForFilteredList(e eventWrapper) (*watch.Event, error) { - if e.oldObject == nil { - return nil, fmt.Errorf("oldObject should be set for modified events") - } - - ok, err := w.predicate.Matches(e.oldObject) - if err != nil { - return nil, err - } - - if !ok { - e.ev.Type = watch.Added - return &e.ev, nil - } - - return nil, nil -} - -func (w *watchNode) handleDeletedForFilteredList(e eventWrapper) (*watch.Event, error) { - if e.oldObject == nil { - return nil, fmt.Errorf("oldObject should be set for modified events") - } - - ok, err := w.predicate.Matches(e.oldObject) - if err != nil { - return nil, err - } - - if !ok { - return nil, nil - } - - // isn't a match but used to be - e.ev.Type = watch.Deleted - - oldObjectAccessor, err := meta.Accessor(e.oldObject) - if err != nil { - klog.Errorf("Could not get accessor to correct the old RV of filtered out object") - return nil, err - } - - currentRV, err := getResourceVersion(e.ev.Object) - if err != nil { - klog.Errorf("Could not get accessor to object in event") - return nil, err - } - - oldObjectAccessor.SetResourceVersion(currentRV) - e.ev.Object = e.oldObject - - return &e.ev, nil -} - -func (w *watchNode) processEvent(e eventWrapper, isInitEvent bool) error { - if isInitEvent { - // Init events have already been vetted against the predicate and other RV behavior - // Let them pass through - w.outCh <- e.ev - return nil - } - - valid, runDeleteFromFilteredListHandler, err := w.isValid(e) - if err != nil { - klog.Errorf("Could not determine validity of the event: %v", err) - return err - } - if valid { - if e.ev.Type == watch.Modified { - ev, err := w.handleAddedForFilteredList(e) - if err != nil { - return err - } - if ev != nil { - w.outCh <- *ev - } else { - // forward the original event if add handling didn't signal any impact - w.outCh <- e.ev - } - } else { - w.outCh <- e.ev - } - return nil - } - - if runDeleteFromFilteredListHandler { - if e.ev.Type == watch.Modified { - ev, err := w.handleDeletedForFilteredList(e) - if err != nil { - return err - } - if ev != nil { - w.outCh <- *ev - } - } // explicitly doesn't have an event forward for the else case here - return nil - } - - return nil -} - -// Start sending events to this watch. -func (w *watchNode) Start(initEvents ...watch.Event) { - w.s.mu.Lock() - w.s.nodes[w.id] = w - w.s.mu.Unlock() - - go func() { - maxRV := uint64(0) - for _, ev := range initEvents { - currentRV, err := w.getResourceVersionAsInt(ev.Object) - if err != nil { - klog.Errorf("Could not determine init event RV for deduplication of buffered events: %v", err) - continue - } - - if maxRV < currentRV { - maxRV = currentRV - } - - if err := w.processEvent(eventWrapper{ev: ev}, true); err != nil { - klog.Errorf("Could not process event: %v", err) - } - } - - // If we had no init events, simply rely on the passed RV - if maxRV == 0 { - maxRV = w.requestedRV - } - - w.s.bufferedMutex.RLock() - for _, e := range w.s.buffered { - eventRV, err := w.getResourceVersionAsInt(e.ev.Object) - if err != nil { - klog.Errorf("Could not determine RV for deduplication of buffered events: %v", err) - continue - } - - if maxRV >= eventRV { - continue - } else { - maxRV = eventRV - } - - if err := w.processEvent(e, false); err != nil { - klog.Errorf("Could not process event: %v", err) - } - } - w.s.bufferedMutex.RUnlock() - - for { - select { - case e, ok := <-w.updateCh: - if !ok { - close(w.outCh) - return - } - - eventRV, err := w.getResourceVersionAsInt(e.ev.Object) - if err != nil { - klog.Errorf("Could not determine RV for deduplication of channel events: %v", err) - continue - } - - if maxRV >= eventRV { - continue - } else { - maxRV = eventRV - } - - if err := w.processEvent(e, false); err != nil { - klog.Errorf("Could not process event: %v", err) - } - case <-w.ctx.Done(): - close(w.outCh) - return - } - } - }() -} - -func (w *watchNode) Stop() { - w.s.mu.Lock() - defer w.s.mu.Unlock() - w.stop() -} - -// Unprotected func: ensure mutex on the parent watch set is locked before calling -func (w *watchNode) stop() { - if _, ok := w.s.nodes[w.id]; ok { - delete(w.s.nodes, w.id) - close(w.updateCh) - } -} - -func (w *watchNode) ResultChan() <-chan watch.Event { - return w.outCh -} - -func getResourceVersion(obj runtime.Object) (string, error) { - accessor, err := meta.Accessor(obj) - if err != nil { - klog.Error("Could not get accessor to object in event") - return "", err - } - return accessor.GetResourceVersion(), nil -} - -func (w *watchNode) getResourceVersionAsInt(obj runtime.Object) (uint64, error) { - accessor, err := meta.Accessor(obj) - if err != nil { - klog.Error("Could not get accessor to object in event") - return 0, err - } - - return w.versioner.ParseResourceVersion(accessor.GetResourceVersion()) -} diff --git a/pkg/storage/unified/client.go b/pkg/storage/unified/client.go index 67ed6eebc06..07709113562 100644 --- a/pkg/storage/unified/client.go +++ b/pkg/storage/unified/client.go @@ -34,6 +34,7 @@ func ProvideUnifiedStorageClient( DataPath: apiserverCfg.Key("storage_path").MustString(filepath.Join(cfg.DataPath, "grafana-apiserver")), Address: apiserverCfg.Key("address").MustString(""), } + ctx := context.Background() switch opts.StorageType { case options.StorageTypeFile: @@ -47,7 +48,7 @@ func ProvideUnifiedStorageClient( if err != nil { return nil, err } - backend, err := resource.NewCDKBackend(context.Background(), resource.CDKBackendOptions{ + backend, err := resource.NewCDKBackend(ctx, resource.CDKBackendOptions{ Bucket: bucket, }) if err != nil { @@ -84,7 +85,7 @@ func ProvideUnifiedStorageClient( // Use the local SQL default: - server, err := sql.NewResourceServer(db, cfg, features, tracer) + server, err := sql.NewResourceServer(ctx, db, cfg, features, tracer) if err != nil { return nil, err } diff --git a/pkg/storage/unified/resource/go.mod b/pkg/storage/unified/resource/go.mod index ac411a5fcd9..3be85811afb 100644 --- a/pkg/storage/unified/resource/go.mod +++ b/pkg/storage/unified/resource/go.mod @@ -17,8 +17,35 @@ require ( k8s.io/apimachinery v0.31.1 ) +require ( + github.com/RoaringBitmap/roaring v1.9.3 // indirect + github.com/bits-and-blooms/bitset v1.12.0 // indirect + github.com/blevesearch/bleve_index_api v1.1.10 // indirect + github.com/blevesearch/geo v0.1.20 // indirect + github.com/blevesearch/go-faiss v1.0.20 // indirect + github.com/blevesearch/go-porterstemmer v1.0.3 // indirect + github.com/blevesearch/gtreap v0.1.1 // indirect + github.com/blevesearch/mmap-go v1.0.4 // indirect + github.com/blevesearch/scorch_segment_api/v2 v2.2.15 // indirect + github.com/blevesearch/segment v0.9.1 // indirect + github.com/blevesearch/snowballstem v0.9.0 // indirect + github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect + github.com/blevesearch/vellum v1.0.10 // indirect + github.com/blevesearch/zapx/v11 v11.3.10 // indirect + github.com/blevesearch/zapx/v12 v12.3.10 // indirect + github.com/blevesearch/zapx/v13 v13.3.10 // indirect + github.com/blevesearch/zapx/v14 v14.3.10 // indirect + github.com/blevesearch/zapx/v15 v15.3.13 // indirect + github.com/blevesearch/zapx/v16 v16.1.5 // indirect + github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/mschoch/smat v0.2.0 // indirect + go.etcd.io/bbolt v1.3.9 // indirect +) + require ( github.com/beorn7/perks v1.0.1 // indirect + github.com/blevesearch/bleve/v2 v2.4.2 github.com/bufbuild/protocompile v0.4.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect @@ -29,6 +56,7 @@ require ( github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/gofuzz v1.2.0 // indirect + github.com/google/uuid v1.6.0 github.com/googleapis/gax-go/v2 v2.13.0 // indirect github.com/jhump/protoreflect v1.15.1 // indirect github.com/json-iterator/go v1.1.12 // indirect diff --git a/pkg/storage/unified/resource/go.sum b/pkg/storage/unified/resource/go.sum index e9b7986db13..391c6c08d49 100644 --- a/pkg/storage/unified/resource/go.sum +++ b/pkg/storage/unified/resource/go.sum @@ -13,6 +13,8 @@ cloud.google.com/go/iam v1.1.13/go.mod h1:K8mY0uSXwEXS30KrnVb+j54LB/ntfZu1dr+4zF cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs= cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/RoaringBitmap/roaring v1.9.3 h1:t4EbC5qQwnisr5PrP9nt0IRhRTb9gMUgQF4t4S2OByM= +github.com/RoaringBitmap/roaring v1.9.3/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90= github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU= github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY= @@ -55,6 +57,44 @@ github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE= github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA= +github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/blevesearch/bleve/v2 v2.4.2 h1:NooYP1mb3c0StkiY9/xviiq2LGSaE8BQBCc/pirMx0U= +github.com/blevesearch/bleve/v2 v2.4.2/go.mod h1:ATNKj7Yl2oJv/lGuF4kx39bST2dveX6w0th2FFYLkc8= +github.com/blevesearch/bleve_index_api v1.1.10 h1:PDLFhVjrjQWr6jCuU7TwlmByQVCSEURADHdCqVS9+g0= +github.com/blevesearch/bleve_index_api v1.1.10/go.mod h1:PbcwjIcRmjhGbkS/lJCpfgVSMROV6TRubGGAODaK1W8= +github.com/blevesearch/geo v0.1.20 h1:paaSpu2Ewh/tn5DKn/FB5SzvH0EWupxHEIwbCk/QPqM= +github.com/blevesearch/geo v0.1.20/go.mod h1:DVG2QjwHNMFmjo+ZgzrIq2sfCh6rIHzy9d9d0B59I6w= +github.com/blevesearch/go-faiss v1.0.20 h1:AIkdTQFWuZ5LQmKQSebgMR4RynGNw8ZseJXaan5kvtI= +github.com/blevesearch/go-faiss v1.0.20/go.mod h1:jrxHrbl42X/RnDPI+wBoZU8joxxuRwedrxqswQ3xfU8= +github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo= +github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M= +github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y= +github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk= +github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc= +github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs= +github.com/blevesearch/scorch_segment_api/v2 v2.2.15 h1:prV17iU/o+A8FiZi9MXmqbagd8I0bCqM7OKUYPbnb5Y= +github.com/blevesearch/scorch_segment_api/v2 v2.2.15/go.mod h1:db0cmP03bPNadXrCDuVkKLV6ywFSiRgPFT1YVrestBc= +github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU= +github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw= +github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s= +github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs= +github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A= +github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ= +github.com/blevesearch/vellum v1.0.10 h1:HGPJDT2bTva12hrHepVT3rOyIKFFF4t7Gf6yMxyMIPI= +github.com/blevesearch/vellum v1.0.10/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k= +github.com/blevesearch/zapx/v11 v11.3.10 h1:hvjgj9tZ9DeIqBCxKhi70TtSZYMdcFn7gDb71Xo/fvk= +github.com/blevesearch/zapx/v11 v11.3.10/go.mod h1:0+gW+FaE48fNxoVtMY5ugtNHHof/PxCqh7CnhYdnMzQ= +github.com/blevesearch/zapx/v12 v12.3.10 h1:yHfj3vXLSYmmsBleJFROXuO08mS3L1qDCdDK81jDl8s= +github.com/blevesearch/zapx/v12 v12.3.10/go.mod h1:0yeZg6JhaGxITlsS5co73aqPtM04+ycnI6D1v0mhbCs= +github.com/blevesearch/zapx/v13 v13.3.10 h1:0KY9tuxg06rXxOZHg3DwPJBjniSlqEgVpxIqMGahDE8= +github.com/blevesearch/zapx/v13 v13.3.10/go.mod h1:w2wjSDQ/WBVeEIvP0fvMJZAzDwqwIEzVPnCPrz93yAk= +github.com/blevesearch/zapx/v14 v14.3.10 h1:SG6xlsL+W6YjhX5N3aEiL/2tcWh3DO75Bnz77pSwwKU= +github.com/blevesearch/zapx/v14 v14.3.10/go.mod h1:qqyuR0u230jN1yMmE4FIAuCxmahRQEOehF78m6oTgns= +github.com/blevesearch/zapx/v15 v15.3.13 h1:6EkfaZiPlAxqXz0neniq35my6S48QI94W/wyhnpDHHQ= +github.com/blevesearch/zapx/v15 v15.3.13/go.mod h1:Turk/TNRKj9es7ZpKK95PS7f6D44Y7fAFy8F4LXQtGg= +github.com/blevesearch/zapx/v16 v16.1.5 h1:b0sMcarqNFxuXvjoXsF8WtwVahnxyhEvBSRJi/AUHjU= +github.com/blevesearch/zapx/v16 v16.1.5/go.mod h1:J4mSF39w1QELc11EWRSBFkPeZuO7r/NPKkHzDCoiaI8= github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -86,6 +126,8 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFiguSNcXmS6rkKT+X7FdIrTtfo= +github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= @@ -104,6 +146,8 @@ github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -162,6 +206,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mschoch/smat v0.2.0 h1:8imxQsjDm8yFEAVBe7azKmKSgzSkZXDuKkSq9374khM= +github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= @@ -198,6 +244,8 @@ github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcY github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI= +go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0 h1:9G6E0TXzGFVfTnawRzrPl83iHOAV7L8NJiR8RSGYV1g= diff --git a/pkg/storage/unified/resource/index.go b/pkg/storage/unified/resource/index.go new file mode 100644 index 00000000000..cdd8b6f68c5 --- /dev/null +++ b/pkg/storage/unified/resource/index.go @@ -0,0 +1,250 @@ +package resource + +import ( + "context" + "encoding/json" + "fmt" + "log" + "os" + + "github.com/blevesearch/bleve/v2" + "github.com/blevesearch/bleve/v2/analysis/lang/en" + "github.com/blevesearch/bleve/v2/mapping" + "github.com/google/uuid" +) + +type Shard struct { + index bleve.Index + path string + batch *bleve.Batch +} + +type Index struct { + shards map[string]Shard + opts Opts + s *server +} + +func NewIndex(s *server, opts Opts) *Index { + idx := &Index{ + s: s, + opts: opts, + shards: make(map[string]Shard), + } + return idx +} + +func (i *Index) Init(ctx context.Context) error { + resourceTypes := fetchResourceTypes() + for _, rt := range resourceTypes { + r := &ListRequest{Options: rt} + list, err := i.s.List(ctx, r) + if err != nil { + return err + } + + for _, obj := range list.Items { + res, err := getResource(obj.Value) + if err != nil { + return err + } + + shard, err := i.getShard(tenant(res)) + if err != nil { + return err + } + + var jsonDoc interface{} + err = json.Unmarshal(obj.Value, &jsonDoc) + if err != nil { + return err + } + err = shard.batch.Index(res.Metadata.Uid, jsonDoc) + if err != nil { + return err + } + } + + for _, shard := range i.shards { + err := shard.index.Batch(shard.batch) + if err != nil { + return err + } + shard.batch.Reset() + } + } + + return nil +} + +func (i *Index) Index(ctx context.Context, data *Data) error { + res, err := getResource(data.Value.Value) + if err != nil { + return err + } + tenant := tenant(res) + shard, err := i.getShard(tenant) + if err != nil { + return err + } + var jsonDoc interface{} + err = json.Unmarshal(data.Value.Value, &jsonDoc) + if err != nil { + return err + } + err = shard.index.Index(res.Metadata.Uid, jsonDoc) + if err != nil { + return err + } + return nil +} + +func (i *Index) Delete(ctx context.Context, uid string, key *ResourceKey) error { + shard, err := i.getShard(key.Namespace) + if err != nil { + return err + } + err = shard.index.Delete(uid) + if err != nil { + return err + } + return nil +} + +func (i *Index) Search(ctx context.Context, tenant string, query string) ([]string, error) { + if tenant == "" { + tenant = "default" + } + shard, err := i.getShard(tenant) + if err != nil { + return nil, err + } + req := bleve.NewSearchRequest(bleve.NewQueryStringQuery(query)) + req.Fields = []string{"kind", "spec.title"} + + res, err := shard.index.Search(req) + if err != nil { + return nil, err + } + + hits := res.Hits + results := []string{} + for _, hit := range hits { + val := fmt.Sprintf("%s:%s", hit.Fields["kind"], hit.Fields["spec.title"]) + results = append(results, val) + } + return results, nil +} + +func tenant(res *Resource) string { + return res.Metadata.Namespace +} + +type Metadata struct { + Name string + Namespace string + Uid string + CreationTimestamp string + Labels map[string]string + Annotations map[string]string +} + +type Resource struct { + Kind string + ApiVersion string + Metadata Metadata +} + +type Opts struct { + Workers int // This controls how many goroutines are used to index objects + BatchSize int // This is the batch size for how many objects to add to the index at once + Concurrent bool +} + +func createFileIndex() (bleve.Index, string, error) { + indexPath := fmt.Sprintf("%s%s.bleve", os.TempDir(), uuid.New().String()) + index, err := bleve.New(indexPath, createIndexMappings()) + if err != nil { + log.Fatalf("Failed to create index: %v", err) + } + return index, indexPath, err +} + +// TODO: clean this up. it was copied from owens performance test +func createIndexMappings() *mapping.IndexMappingImpl { + //Create mapping for the name and creationTimestamp fields in the metadata + nameFieldMapping := bleve.NewTextFieldMapping() + creationTimestampFieldMapping := bleve.NewDateTimeFieldMapping() + metaMapping := bleve.NewDocumentMapping() + metaMapping.AddFieldMappingsAt("name", nameFieldMapping) + metaMapping.AddFieldMappingsAt("creationTimestamp", creationTimestampFieldMapping) + metaMapping.Dynamic = false + metaMapping.Enabled = true + + specMapping := bleve.NewDocumentMapping() + specMapping.AddFieldMappingsAt("title", nameFieldMapping) + specMapping.Dynamic = false + specMapping.Enabled = true + + //Create a sub-document mapping for the metadata field + objectMapping := bleve.NewDocumentMapping() + objectMapping.AddSubDocumentMapping("metadata", metaMapping) + objectMapping.AddSubDocumentMapping("spec", specMapping) + objectMapping.Dynamic = false + objectMapping.Enabled = true + + // a generic reusable mapping for english text + englishTextFieldMapping := bleve.NewTextFieldMapping() + englishTextFieldMapping.Analyzer = en.AnalyzerName + + // Map top level fields - just kind for now + objectMapping.AddFieldMappingsAt("kind", englishTextFieldMapping) + objectMapping.Dynamic = false + + // Create the index mapping + indexMapping := bleve.NewIndexMapping() + indexMapping.DefaultMapping = objectMapping + + return indexMapping +} + +func getResource(data []byte) (*Resource, error) { + res := &Resource{} + err := json.Unmarshal(data, res) + if err != nil { + return nil, err + } + return res, nil +} + +func (i *Index) getShard(tenant string) (Shard, error) { + shard, ok := i.shards[tenant] + if ok { + return shard, nil + } + index, path, err := createFileIndex() + if err != nil { + return Shard{}, err + } + + shard = Shard{ + index: index, + path: path, + batch: index.NewBatch(), + } + // TODO: do we need to lock this? + i.shards[tenant] = shard + return shard, nil +} + +// TODO - fetch from api +func fetchResourceTypes() []*ListOptions { + items := []*ListOptions{} + items = append(items, &ListOptions{ + Key: &ResourceKey{ + Group: "playlist.grafana.app", + Resource: "playlists", + }, + }) + return items +} diff --git a/pkg/storage/unified/resource/index_server.go b/pkg/storage/unified/resource/index_server.go new file mode 100644 index 00000000000..47937e7f7f3 --- /dev/null +++ b/pkg/storage/unified/resource/index_server.go @@ -0,0 +1,221 @@ +package resource + +import ( + "context" + "errors" + "log" + "strings" + + "google.golang.org/grpc" +) + +type IndexServer struct { + ResourceServer + s *server + index *Index + ws *indexWatchServer +} + +func (is IndexServer) Search(ctx context.Context, req *SearchRequest) (*SearchResponse, error) { + results, err := is.index.Search(ctx, req.Tenant, req.Query) + if err != nil { + return nil, err + } + res := &SearchResponse{} + for _, r := range results { + res.Items = append(res.Items, &ResourceWrapper{Value: []byte(r)}) + } + return res, nil +} + +func (is IndexServer) History(ctx context.Context, req *HistoryRequest) (*HistoryResponse, error) { + return nil, nil +} + +func (is IndexServer) Origin(ctx context.Context, req *OriginRequest) (*OriginResponse, error) { + return nil, nil +} + +// Load the index +func (is *IndexServer) Load(ctx context.Context) error { + is.index = NewIndex(is.s, Opts{}) + err := is.index.Init(ctx) + if err != nil { + return err + } + return nil +} + +// Watch resources for changes and update the index +func (is *IndexServer) Watch(ctx context.Context) error { + rtList := fetchResourceTypes() + for _, rt := range rtList { + wr := &WatchRequest{ + Options: rt, + } + + go func() { + // TODO: handle error + err := is.s.Watch(wr, is.ws) + if err != nil { + log.Printf("Error watching resource %v", err) + } + }() + } + return nil +} + +// Init sets the resource server on the index server +// so we can call the resource server from the index server +// TODO: a chicken and egg problem - index server needs the resource server but the resource server is created with the index server +func (is *IndexServer) Init(ctx context.Context, rs *server) error { + is.s = rs + is.ws = &indexWatchServer{ + is: is, + context: ctx, + } + return nil +} + +func NewResourceIndexServer() ResourceIndexServer { + return &IndexServer{} +} + +type ResourceIndexer interface { + Index(ctx context.Context) (*Index, error) +} + +type indexWatchServer struct { + grpc.ServerStream + context context.Context + is *IndexServer +} + +func (f *indexWatchServer) Send(we *WatchEvent) error { + if we.Type == WatchEvent_ADDED { + return f.Add(we) + } + + if we.Type == WatchEvent_DELETED { + return f.Delete(we) + } + + if we.Type == WatchEvent_MODIFIED { + return f.Update(we) + } + + return nil +} + +func (f *indexWatchServer) RecvMsg(m interface{}) error { + return nil +} + +func (f *indexWatchServer) SendMsg(m interface{}) error { + return errors.New("not implemented") +} + +func (f *indexWatchServer) Context() context.Context { + if f.context == nil { + f.context = context.Background() + } + return f.context +} + +func (f *indexWatchServer) Index() *Index { + return f.is.index +} + +func (f *indexWatchServer) Add(we *WatchEvent) error { + data, err := getData(we.Resource) + if err != nil { + return err + } + err = f.Index().Index(f.context, data) + if err != nil { + return err + } + return nil +} + +func (f *indexWatchServer) Delete(we *WatchEvent) error { + rs, err := resource(we) + if err != nil { + return err + } + data, err := getData(rs) + if err != nil { + return err + } + err = f.Index().Delete(f.context, data.Uid, data.Key) + if err != nil { + return err + } + return nil +} + +func (f *indexWatchServer) Update(we *WatchEvent) error { + rs, err := resource(we) + if err != nil { + return err + } + data, err := getData(rs) + if err != nil { + return err + } + err = f.Index().Delete(f.context, data.Uid, data.Key) + if err != nil { + return err + } + err = f.Index().Index(f.context, data) + if err != nil { + return err + } + return nil +} + +type Data struct { + Key *ResourceKey + Value *ResourceWrapper + Uid string +} + +func getGroup(r *Resource) string { + v := strings.Split(r.ApiVersion, "/") + if len(v) > 0 { + return v[0] + } + return "" +} + +func getData(wr *WatchEvent_Resource) (*Data, error) { + r, err := getResource(wr.Value) + if err != nil { + return nil, err + } + + key := &ResourceKey{ + Group: getGroup(r), + Resource: r.Kind, + Namespace: r.Metadata.Namespace, + Name: r.Metadata.Name, + } + + value := &ResourceWrapper{ + ResourceVersion: wr.Version, + Value: wr.Value, + } + return &Data{Key: key, Value: value, Uid: r.Metadata.Uid}, nil +} + +func resource(we *WatchEvent) (*WatchEvent_Resource, error) { + rs := we.Resource + if rs == nil || len(rs.Value) == 0 { + // for updates/deletes + rs = we.Previous + } + if rs == nil || len(rs.Value) == 0 { + return nil, errors.New("resource not found") + } + return rs, nil +} diff --git a/pkg/storage/unified/resource/resource.pb.go b/pkg/storage/unified/resource/resource.pb.go index 44e5be68f8c..7728dd880be 100644 --- a/pkg/storage/unified/resource/resource.pb.go +++ b/pkg/storage/unified/resource/resource.pb.go @@ -1619,7 +1619,8 @@ type SearchRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + Tenant string `protobuf:"bytes,2,opt,name=tenant,proto3" json:"tenant,omitempty"` } func (x *SearchRequest) Reset() { @@ -1661,6 +1662,13 @@ func (x *SearchRequest) GetQuery() string { return "" } +func (x *SearchRequest) GetTenant() string { + if x != nil { + return x.Tenant + } + return "" +} + type SearchResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2441,135 +2449,136 @@ var file_resource_proto_rawDesc = []byte{ 0x09, 0x0a, 0x05, 0x41, 0x44, 0x44, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x4f, 0x44, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x42, 0x4f, 0x4f, 0x4b, 0x4d, 0x41, 0x52, - 0x4b, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x05, 0x22, 0x25, + 0x4b, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x05, 0x22, 0x3d, 0x0a, 0x0d, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x71, 0x75, 0x65, 0x72, 0x79, 0x22, 0x41, 0x0a, 0x0e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x57, 0x72, 0x61, 0x70, 0x70, 0x65, - 0x72, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, 0x9a, 0x01, 0x0a, 0x0e, 0x48, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6e, - 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x27, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x68, 0x6f, 0x77, 0x5f, 0x64, 0x65, 0x6c, 0x65, 0x74, - 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x73, 0x68, 0x6f, 0x77, 0x44, 0x65, - 0x6c, 0x65, 0x74, 0x65, 0x64, 0x22, 0xbf, 0x01, 0x0a, 0x0f, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x05, 0x69, 0x74, 0x65, - 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, - 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, - 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, - 0x29, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2b, 0x0a, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x8e, 0x01, 0x0a, 0x0d, 0x4f, 0x72, 0x69, 0x67, - 0x69, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, - 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, - 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x27, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, + 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x22, 0x41, 0x0a, + 0x0e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x2f, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, + 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x57, 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, + 0x22, 0x9a, 0x01, 0x0a, 0x0e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, + 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6c, + 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, + 0x74, 0x12, 0x27, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x68, + 0x6f, 0x77, 0x5f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0b, 0x73, 0x68, 0x6f, 0x77, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x22, 0xbf, 0x01, + 0x0a, 0x0f, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x2c, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x16, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, + 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, + 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x29, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x2b, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x8e, 0x01, 0x0a, 0x0d, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, + 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, + 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, + 0x27, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x72, 0x69, 0x67, + 0x69, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, + 0x22, 0xe5, 0x01, 0x0a, 0x12, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x72, 0x69, + 0x67, 0x69, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x27, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x22, 0xe5, 0x01, 0x0a, 0x12, 0x52, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x12, - 0x27, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x23, 0x0a, - 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x48, 0x61, - 0x73, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, - 0x74, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, - 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, - 0x73, 0x68, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x22, 0xc4, 0x01, 0x0a, 0x0e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x49, 0x6e, 0x66, 0x6f, - 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, - 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, - 0x29, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2b, 0x0a, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2e, 0x0a, 0x12, 0x48, 0x65, 0x61, 0x6c, 0x74, - 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, - 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0xab, 0x01, 0x0a, 0x13, 0x48, 0x65, 0x61, 0x6c, - 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x43, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x2b, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, - 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x4f, 0x0a, 0x0d, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, - 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, - 0x0f, 0x0a, 0x0b, 0x4e, 0x4f, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x10, 0x02, - 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x43, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, - 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x2a, 0x33, 0x0a, 0x14, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x10, 0x0a, - 0x0c, 0x4e, 0x6f, 0x74, 0x4f, 0x6c, 0x64, 0x65, 0x72, 0x54, 0x68, 0x61, 0x6e, 0x10, 0x00, 0x12, - 0x09, 0x0a, 0x05, 0x45, 0x78, 0x61, 0x63, 0x74, 0x10, 0x01, 0x32, 0xed, 0x02, 0x0a, 0x0d, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x35, 0x0a, 0x04, - 0x52, 0x65, 0x61, 0x64, 0x12, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, - 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x17, 0x2e, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x3b, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x17, 0x2e, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, - 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x17, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x35, 0x0a, 0x04, 0x4c, 0x69, - 0x73, 0x74, 0x12, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x37, 0x0a, 0x05, 0x57, 0x61, 0x74, 0x63, 0x68, 0x12, 0x16, 0x2e, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x57, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x57, 0x61, - 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x30, 0x01, 0x32, 0xc9, 0x01, 0x0a, 0x0d, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3b, 0x0a, 0x06, - 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x17, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, - 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x07, 0x48, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x12, 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, - 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, - 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x4f, 0x72, 0x69, - 0x67, 0x69, 0x6e, 0x12, 0x17, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4f, - 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x57, 0x0a, 0x0b, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, - 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x48, 0x0a, 0x09, 0x49, 0x73, 0x48, 0x65, 0x61, 0x6c, 0x74, - 0x68, 0x79, 0x12, 0x1c, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x48, 0x65, - 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1d, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x48, 0x65, 0x61, 0x6c, - 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, - 0x39, 0x5a, 0x37, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x72, - 0x61, 0x66, 0x61, 0x6e, 0x61, 0x2f, 0x67, 0x72, 0x61, 0x66, 0x61, 0x6e, 0x61, 0x2f, 0x70, 0x6b, - 0x67, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2f, 0x75, 0x6e, 0x69, 0x66, 0x69, 0x65, - 0x64, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x69, 0x7a, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x48, 0x61, 0x73, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x72, + 0x69, 0x67, 0x69, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x72, 0x69, 0x67, + 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xc4, 0x01, 0x0a, 0x0e, 0x4f, 0x72, 0x69, + 0x67, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x05, 0x69, + 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4f, 0x72, + 0x69, 0x67, 0x69, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, + 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, + 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x29, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x2b, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x2e, 0x0a, 0x12, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, + 0xab, 0x01, 0x0a, 0x13, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x43, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2b, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x4f, 0x0a, 0x0d, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, + 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x45, + 0x52, 0x56, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x4e, 0x4f, 0x54, 0x5f, 0x53, + 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x45, 0x52, 0x56, + 0x49, 0x43, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x2a, 0x33, 0x0a, + 0x14, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x4d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x10, 0x0a, 0x0c, 0x4e, 0x6f, 0x74, 0x4f, 0x6c, 0x64, 0x65, + 0x72, 0x54, 0x68, 0x61, 0x6e, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x78, 0x61, 0x63, 0x74, + 0x10, 0x01, 0x32, 0xed, 0x02, 0x0a, 0x0d, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, + 0x74, 0x6f, 0x72, 0x65, 0x12, 0x35, 0x0a, 0x04, 0x52, 0x65, 0x61, 0x64, 0x12, 0x15, 0x2e, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, + 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x12, 0x17, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, + 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x12, 0x17, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x55, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, + 0x17, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x35, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x15, 0x2e, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4c, 0x69, 0x73, + 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x05, 0x57, 0x61, 0x74, + 0x63, 0x68, 0x12, 0x16, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x57, 0x61, + 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x57, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x30, 0x01, 0x32, 0xc9, 0x01, 0x0a, 0x0d, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x12, 0x3b, 0x0a, 0x06, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x17, + 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x2e, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x3e, 0x0a, 0x07, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x18, 0x2e, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x12, 0x17, 0x2e, 0x72, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, + 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x57, + 0x0a, 0x0b, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x48, 0x0a, + 0x09, 0x49, 0x73, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x79, 0x12, 0x1c, 0x2e, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, + 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x39, 0x5a, 0x37, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x72, 0x61, 0x66, 0x61, 0x6e, 0x61, 0x2f, 0x67, 0x72, + 0x61, 0x66, 0x61, 0x6e, 0x61, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x2f, 0x75, 0x6e, 0x69, 0x66, 0x69, 0x65, 0x64, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/pkg/storage/unified/resource/resource.proto b/pkg/storage/unified/resource/resource.proto index 9859ea51994..dcaee25b909 100644 --- a/pkg/storage/unified/resource/resource.proto +++ b/pkg/storage/unified/resource/resource.proto @@ -326,6 +326,7 @@ message WatchEvent { message SearchRequest { string query = 1; + string tenant = 2; } message SearchResponse { diff --git a/pkg/storage/unified/resource/server.go b/pkg/storage/unified/resource/server.go index 8484653049a..0a8da09d322 100644 --- a/pkg/storage/unified/resource/server.go +++ b/pkg/storage/unified/resource/server.go @@ -7,6 +7,7 @@ import ( "log/slog" "net/http" "sync" + "sync/atomic" "time" "go.opentelemetry.io/otel/trace" @@ -99,25 +100,6 @@ type ResourceServerOptions struct { Now func() int64 } -type indexServer struct{} - -func (s indexServer) Search(ctx context.Context, req *SearchRequest) (*SearchResponse, error) { - res := &SearchResponse{} - return res, nil -} - -func (s indexServer) History(ctx context.Context, req *HistoryRequest) (*HistoryResponse, error) { - return nil, nil -} - -func (s indexServer) Origin(ctx context.Context, req *OriginRequest) (*OriginResponse, error) { - return nil, nil -} - -func NewResourceIndexServer() ResourceIndexServer { - return indexServer{} -} - func NewResourceServer(opts ResourceServerOptions) (ResourceServer, error) { if opts.Tracer == nil { opts.Tracer = noop.NewTracerProvider().Tracer("resource-server") @@ -161,14 +143,15 @@ func NewResourceServer(opts ResourceServerOptions) (ResourceServer, error) { var _ ResourceServer = &server{} type server struct { - tracer trace.Tracer - log *slog.Logger - backend StorageBackend - index ResourceIndexServer - diagnostics DiagnosticsServer - access WriteAccessHooks - lifecycle LifecycleHooks - now func() int64 + tracer trace.Tracer + log *slog.Logger + backend StorageBackend + index ResourceIndexServer + diagnostics DiagnosticsServer + access WriteAccessHooks + lifecycle LifecycleHooks + now func() int64 + mostRecentRV atomic.Int64 // The most recent resource version seen by the server // Background watch task -- this has permissions for everything ctx context.Context @@ -343,12 +326,12 @@ func (s *server) Create(ctx context.Context, req *CreateRequest) (*CreateRespons rsp.Error = e return rsp, nil } - var err error rsp.ResourceVersion, err = s.backend.WriteEvent(ctx, *event) if err != nil { rsp.Error = AsErrorResult(err) } + s.log.Debug("server.WriteEvent", "type", event.Type, "rv", rsp.ResourceVersion, "previousRV", event.PreviousRV, "group", event.Key.Group, "namespace", event.Key.Namespace, "name", event.Key.Name, "resource", event.Key.Resource) return rsp, nil } @@ -554,6 +537,8 @@ func (s *server) initWatcher() error { for { // pipe all events v := <-events + s.log.Debug("Server. Streaming Event", "type", v.Type, "previousRV", v.PreviousRV, "group", v.Key.Group, "namespace", v.Key.Namespace, "resource", v.Key.Resource, "name", v.Key.Name) + s.mostRecentRV.Store(v.ResourceVersion) out <- v } }() @@ -569,23 +554,67 @@ func (s *server) Watch(req *WatchRequest, srv ResourceStore_WatchServer) error { return err } - // Start listening -- this will buffer any changes that happen while we backfill + // Start listening -- this will buffer any changes that happen while we backfill. + // If events are generated faster than we can process them, then some events will be dropped. + // TODO: Think of a way to allow the client to catch up. stream, err := s.broadcaster.Subscribe(ctx) if err != nil { return err } defer s.broadcaster.Unsubscribe(stream) - since := req.Since - if req.SendInitialEvents { - fmt.Printf("TODO... query\n") - // All initial events are CREATE + if !req.SendInitialEvents && req.Since == 0 { + // This is a temporary hack only relevant for tests to ensure that the first events are sent. + // This is required because the SQL backend polls the database every 100ms. + // TODO: Implement a getLatestResourceVersion method in the backend. + time.Sleep(10 * time.Millisecond) + } - if req.AllowWatchBookmarks { - fmt.Printf("TODO... send bookmark\n") + mostRecentRV := s.mostRecentRV.Load() // get the latest resource version + var initialEventsRV int64 // resource version coming from the initial events + if req.SendInitialEvents { + // Backfill the stream by adding every existing entities. + initialEventsRV, err = s.backend.ListIterator(ctx, &ListRequest{Options: req.Options}, func(iter ListIterator) error { + for iter.Next() { + if err := iter.Error(); err != nil { + return err + } + if err := srv.Send(&WatchEvent{ + Type: WatchEvent_ADDED, + Resource: &WatchEvent_Resource{ + Value: iter.Value(), + Version: iter.ResourceVersion(), + }, + }); err != nil { + return err + } + } + return nil + }) + if err != nil { + return err + } + } + if req.SendInitialEvents && req.AllowWatchBookmarks { + if err := srv.Send(&WatchEvent{ + Type: WatchEvent_BOOKMARK, + Resource: &WatchEvent_Resource{ + Version: initialEventsRV, + }, + }); err != nil { + return err } } + var since int64 // resource version to start watching from + switch { + case req.SendInitialEvents: + since = initialEventsRV + case req.Since == 0: + since = mostRecentRV + default: + since = req.Since + } for { select { case <-ctx.Done(): @@ -596,23 +625,39 @@ func (s *server) Watch(req *WatchRequest, srv ResourceStore_WatchServer) error { s.log.Debug("watch events closed") return nil } - + s.log.Debug("Server Broadcasting", "type", event.Type, "rv", event.ResourceVersion, "previousRV", event.PreviousRV, "group", event.Key.Group, "namespace", event.Key.Namespace, "resource", event.Key.Resource, "name", event.Key.Name) if event.ResourceVersion > since && matchesQueryKey(req.Options.Key, event.Key) { - // Currently sending *every* event - // if req.Options.Labels != nil { - // // match *either* the old or new object - // } - // TODO: return values that match either the old or the new - - if err := srv.Send(&WatchEvent{ + value := event.Value + // remove the delete marker stored in the value for deleted objects + if event.Type == WatchEvent_DELETED { + value = []byte{} + } + resp := &WatchEvent{ Timestamp: event.Timestamp, Type: event.Type, Resource: &WatchEvent_Resource{ - Value: event.Value, + Value: value, Version: event.ResourceVersion, }, - // TODO... previous??? - }); err != nil { + } + if event.PreviousRV > 0 { + prevObj, err := s.Read(ctx, &ReadRequest{Key: event.Key, ResourceVersion: event.PreviousRV}) + if err != nil { + // This scenario should never happen, but if it does, we should log it and continue + // sending the event without the previous object. The client will decide what to do. + s.log.Error("error reading previous object", "key", event.Key, "resource_version", event.PreviousRV, "error", prevObj.Error) + } else { + if prevObj.ResourceVersion != event.PreviousRV { + s.log.Error("resource version mismatch", "key", event.Key, "resource_version", event.PreviousRV, "actual", prevObj.ResourceVersion) + return fmt.Errorf("resource version mismatch") + } + resp.Previous = &WatchEvent_Resource{ + Value: prevObj.Value, + Version: prevObj.ResourceVersion, + } + } + } + if err := srv.Send(resp); err != nil { return err } } @@ -643,6 +688,28 @@ func (s *server) Origin(ctx context.Context, req *OriginRequest) (*OriginRespons return s.index.Origin(ctx, req) } +// Index returns the search index. If the index is not initialized, it will be initialized. +func (s *server) Index(ctx context.Context) (*Index, error) { + index := s.index.(*IndexServer) + if index.index == nil { + err := index.Init(ctx, s) + if err != nil { + return nil, err + } + + err = index.Load(ctx) + if err != nil { + return nil, err + } + + err = index.Watch(ctx) + if err != nil { + return nil, err + } + } + return index.index, nil +} + // IsHealthy implements ResourceServer. func (s *server) IsHealthy(ctx context.Context, req *HealthCheckRequest) (*HealthCheckResponse, error) { if err := s.Init(ctx); err != nil { diff --git a/pkg/storage/unified/sql/backend.go b/pkg/storage/unified/sql/backend.go index ae70e141219..1a6e79e3257 100644 --- a/pkg/storage/unified/sql/backend.go +++ b/pkg/storage/unified/sql/backend.go @@ -22,6 +22,7 @@ import ( ) const trace_prefix = "sql.resource." +const defaultPollingInterval = 100 * time.Millisecond type Backend interface { resource.StorageBackend @@ -30,8 +31,9 @@ type Backend interface { } type BackendOptions struct { - DBProvider db.DBProvider - Tracer trace.Tracer + DBProvider db.DBProvider + Tracer trace.Tracer + PollingInterval time.Duration } func NewBackend(opts BackendOptions) (Backend, error) { @@ -43,12 +45,17 @@ func NewBackend(opts BackendOptions) (Backend, error) { } ctx, cancel := context.WithCancel(context.Background()) + pollingInterval := opts.PollingInterval + if pollingInterval == 0 { + pollingInterval = defaultPollingInterval + } return &backend{ - done: ctx.Done(), - cancel: cancel, - log: log.New("sql-resource-server"), - tracer: opts.Tracer, - dbProvider: opts.DBProvider, + done: ctx.Done(), + cancel: cancel, + log: log.New("sql-resource-server"), + tracer: opts.Tracer, + dbProvider: opts.DBProvider, + pollingInterval: pollingInterval, }, nil } @@ -70,6 +77,7 @@ type backend struct { // watch streaming //stream chan *resource.WatchEvent + pollingInterval time.Duration } func (b *backend) Init(ctx context.Context) error { @@ -180,7 +188,6 @@ func (b *backend) create(ctx context.Context, event resource.WriteEvent) (int64, return nil }) - return newVersion, err } @@ -512,8 +519,7 @@ func (b *backend) WatchWriteEvents(ctx context.Context) (<-chan *resource.Writte } func (b *backend) poller(ctx context.Context, since groupResourceRV, stream chan<- *resource.WrittenEvent) { - interval := 100 * time.Millisecond // TODO make this configurable - t := time.NewTicker(interval) + t := time.NewTicker(b.pollingInterval) defer close(stream) defer t.Stop() @@ -526,7 +532,7 @@ func (b *backend) poller(ctx context.Context, since groupResourceRV, stream chan grv, err := b.listLatestRVs(ctx) if err != nil { b.log.Error("get the latest resource version", "err", err) - t.Reset(interval) + t.Reset(b.pollingInterval) continue } for group, items := range grv { @@ -543,7 +549,7 @@ func (b *backend) poller(ctx context.Context, since groupResourceRV, stream chan next, err := b.poll(ctx, group, resource, since[group][resource], stream) if err != nil { b.log.Error("polling for resource", "err", err) - t.Reset(interval) + t.Reset(b.pollingInterval) continue } if next > since[group][resource] { @@ -552,7 +558,7 @@ func (b *backend) poller(ctx context.Context, since groupResourceRV, stream chan } } - t.Reset(interval) + t.Reset(b.pollingInterval) } } } @@ -627,6 +633,10 @@ func (b *backend) poll(ctx context.Context, grp string, res string, since int64, return nextRV, fmt.Errorf("missing key in response") } nextRV = rec.ResourceVersion + prevRV := rec.PreviousRV + if prevRV == nil { + *prevRV = int64(0) + } stream <- &resource.WrittenEvent{ WriteEvent: resource.WriteEvent{ Value: rec.Value, @@ -636,7 +646,8 @@ func (b *backend) poll(ctx context.Context, grp string, res string, since int64, Resource: rec.Key.Resource, Name: rec.Key.Name, }, - Type: resource.WatchEvent_Type(rec.Action), + Type: resource.WatchEvent_Type(rec.Action), + PreviousRV: *prevRV, }, ResourceVersion: rec.ResourceVersion, // Timestamp: , // TODO: add timestamp @@ -663,15 +674,16 @@ func resourceVersionAtomicInc(ctx context.Context, x db.ContextExecer, d sqltemp if errors.Is(err, sql.ErrNoRows) { // if there wasn't a row associated with the given resource, we create one with - // version 1 + // version 2 to match the etcd behavior. if _, err = dbutil.Exec(ctx, x, sqlResourceVersionInsert, sqlResourceVersionRequest{ - SQLTemplate: sqltemplate.New(d), - Group: key.Group, - Resource: key.Resource, + SQLTemplate: sqltemplate.New(d), + Group: key.Group, + Resource: key.Resource, + resourceVersion: &resourceVersion{1}, }); err != nil { return 0, fmt.Errorf("insert into resource_version: %w", err) } - return 1, nil + return 2, nil } if err != nil { diff --git a/pkg/storage/unified/sql/backend_test.go b/pkg/storage/unified/sql/backend_test.go index b24024aef90..33b7bab7d6a 100644 --- a/pkg/storage/unified/sql/backend_test.go +++ b/pkg/storage/unified/sql/backend_test.go @@ -227,7 +227,7 @@ func TestResourceVersionAtomicInc(t *testing.T) { v, err := resourceVersionAtomicInc(ctx, b.DB, dialect, resKey) require.NoError(t, err) - require.Equal(t, int64(1), v) + require.Equal(t, int64(2), v) }) t.Run("happy path - update existing row", func(t *testing.T) { @@ -304,7 +304,7 @@ func TestBackend_create(t *testing.T) { v, err := b.create(ctx, event) require.NoError(t, err) - require.Equal(t, int64(1), v) + require.Equal(t, int64(2), v) }) t.Run("error inserting into resource", func(t *testing.T) { @@ -409,7 +409,7 @@ func TestBackend_update(t *testing.T) { v, err := b.update(ctx, event) require.NoError(t, err) - require.Equal(t, int64(1), v) + require.Equal(t, int64(2), v) }) t.Run("error in first update to resource", func(t *testing.T) { @@ -513,7 +513,7 @@ func TestBackend_delete(t *testing.T) { v, err := b.delete(ctx, event) require.NoError(t, err) - require.Equal(t, int64(1), v) + require.Equal(t, int64(2), v) }) t.Run("error deleting resource", func(t *testing.T) { diff --git a/pkg/storage/unified/sql/data/resource_history_insert.sql b/pkg/storage/unified/sql/data/resource_history_insert.sql index 018b65739d8..2669ef82447 100644 --- a/pkg/storage/unified/sql/data/resource_history_insert.sql +++ b/pkg/storage/unified/sql/data/resource_history_insert.sql @@ -6,6 +6,7 @@ INSERT INTO {{ .Ident "resource_history" }} {{ .Ident "namespace" }}, {{ .Ident "name" }}, + {{ .Ident "previous_resource_version"}}, {{ .Ident "value" }}, {{ .Ident "action" }} ) @@ -17,6 +18,7 @@ INSERT INTO {{ .Ident "resource_history" }} {{ .Arg .WriteEvent.Key.Namespace }}, {{ .Arg .WriteEvent.Key.Name }}, + {{ .Arg .WriteEvent.PreviousRV }}, {{ .Arg .WriteEvent.Value }}, {{ .Arg .WriteEvent.Type }} ) diff --git a/pkg/storage/unified/sql/data/resource_history_poll.sql b/pkg/storage/unified/sql/data/resource_history_poll.sql index bebfab9286d..8e4a7374fdb 100644 --- a/pkg/storage/unified/sql/data/resource_history_poll.sql +++ b/pkg/storage/unified/sql/data/resource_history_poll.sql @@ -5,7 +5,8 @@ SELECT {{ .Ident "resource" | .Into .Response.Key.Resource }}, {{ .Ident "name" | .Into .Response.Key.Name }}, {{ .Ident "value" | .Into .Response.Value }}, - {{ .Ident "action" | .Into .Response.Action }} + {{ .Ident "action" | .Into .Response.Action }}, + {{ .Ident "previous_resource_version" | .Into .Response.PreviousRV }} FROM {{ .Ident "resource_history" }} WHERE 1 = 1 diff --git a/pkg/storage/unified/sql/data/resource_insert.sql b/pkg/storage/unified/sql/data/resource_insert.sql index e127901ae50..ccaca2f12f7 100644 --- a/pkg/storage/unified/sql/data/resource_insert.sql +++ b/pkg/storage/unified/sql/data/resource_insert.sql @@ -7,6 +7,7 @@ INSERT INTO {{ .Ident "resource" }} {{ .Ident "namespace" }}, {{ .Ident "name" }}, + {{ .Ident "previous_resource_version" }}, {{ .Ident "value" }}, {{ .Ident "action" }} ) @@ -17,6 +18,7 @@ INSERT INTO {{ .Ident "resource" }} {{ .Arg .WriteEvent.Key.Namespace }}, {{ .Arg .WriteEvent.Key.Name }}, + {{ .Arg .WriteEvent.PreviousRV }}, {{ .Arg .WriteEvent.Value }}, {{ .Arg .WriteEvent.Type }} ) diff --git a/pkg/storage/unified/sql/data/resource_version_insert.sql b/pkg/storage/unified/sql/data/resource_version_insert.sql index 6c2342905da..6c3aab0dcd4 100644 --- a/pkg/storage/unified/sql/data/resource_version_insert.sql +++ b/pkg/storage/unified/sql/data/resource_version_insert.sql @@ -8,6 +8,6 @@ INSERT INTO {{ .Ident "resource_version" }} VALUES ( {{ .Arg .Group }}, {{ .Arg .Resource }}, - 1 + 2 ) ; diff --git a/pkg/storage/unified/sql/db/dbimpl/dbEngine.go b/pkg/storage/unified/sql/db/dbimpl/dbEngine.go index d068a658481..3968cfc7839 100644 --- a/pkg/storage/unified/sql/db/dbimpl/dbEngine.go +++ b/pkg/storage/unified/sql/db/dbimpl/dbEngine.go @@ -7,23 +7,26 @@ import ( "time" "github.com/go-sql-driver/mysql" + "xorm.io/xorm" + "github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/storage/unified/sql/db" - "xorm.io/xorm" ) -func getEngineMySQL(getter *sectionGetter, tracer tracing.Tracer) (*xorm.Engine, error) { +func getEngineMySQL(getter confGetter, tracer tracing.Tracer) (*xorm.Engine, error) { config := mysql.NewConfig() - config.User = getter.String("db_user") - config.Passwd = getter.String("db_pass") + config.User = getter.String("user") + // accept the core Grafana jargon of `password` as well, originally Unified + // Storage used `pass` + config.Passwd = cmp.Or(getter.String("pass"), getter.String("password")) config.Net = "tcp" - config.Addr = getter.String("db_host") - config.DBName = getter.String("db_name") + config.Addr = getter.String("host") + config.DBName = getter.String("name") config.Params = map[string]string{ // See: https://dev.mysql.com/doc/refman/en/sql-mode.html "@@SESSION.sql_mode": "ANSI", } - tls := getter.String("db_tls") + tls := getter.String("tls") if tls != "" { config.Params["tls"] = tls } @@ -39,8 +42,8 @@ func getEngineMySQL(getter *sectionGetter, tracer tracing.Tracer) (*xorm.Engine, //config.MultiStatements = true // TODO: do we want to support these? - // config.ServerPubKey = getter.String("db_server_pub_key") - // config.TLSConfig = getter.String("db_tls_config_name") + // config.ServerPubKey = getter.String("server_pub_key") + // config.TLSConfig = getter.String("tls_config_name") if err := getter.Err(); err != nil { return nil, fmt.Errorf("config error: %w", err) @@ -65,12 +68,14 @@ func getEngineMySQL(getter *sectionGetter, tracer tracing.Tracer) (*xorm.Engine, return engine, nil } -func getEnginePostgres(getter *sectionGetter, tracer tracing.Tracer) (*xorm.Engine, error) { +func getEnginePostgres(getter confGetter, tracer tracing.Tracer) (*xorm.Engine, error) { dsnKV := map[string]string{ - "user": getter.String("db_user"), - "password": getter.String("db_pass"), - "dbname": getter.String("db_name"), - "sslmode": cmp.Or(getter.String("db_sslmode"), "disable"), + "user": getter.String("user"), + // accept the core Grafana jargon of `password` as well, originally + // Unified Storage used `pass` + "password": cmp.Or(getter.String("pass"), getter.String("password")), + "dbname": getter.String("name"), + "sslmode": cmp.Or(getter.String("sslmode"), "disable"), } // TODO: probably interesting: @@ -88,7 +93,7 @@ func getEnginePostgres(getter *sectionGetter, tracer tracing.Tracer) (*xorm.Engi // More on Postgres connection string parameters: // https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING - hostport := getter.String("db_host") + hostport := getter.String("host") if err := getter.Err(); err != nil { return nil, fmt.Errorf("config error: %w", err) @@ -96,7 +101,7 @@ func getEnginePostgres(getter *sectionGetter, tracer tracing.Tracer) (*xorm.Engi host, port, err := splitHostPortDefault(hostport, "127.0.0.1", "5432") if err != nil { - return nil, fmt.Errorf("invalid db_host: %w", err) + return nil, fmt.Errorf("invalid host: %w", err) } dsnKV["host"] = host dsnKV["port"] = port diff --git a/pkg/storage/unified/sql/db/dbimpl/dbEngine_test.go b/pkg/storage/unified/sql/db/dbimpl/dbEngine_test.go index ab761b98744..659d3bd1bb6 100644 --- a/pkg/storage/unified/sql/db/dbimpl/dbEngine_test.go +++ b/pkg/storage/unified/sql/db/dbimpl/dbEngine_test.go @@ -6,22 +6,33 @@ import ( "github.com/stretchr/testify/assert" ) -func newValidMySQLGetter() *sectionGetter { - return newTestSectionGetter(map[string]string{ - "db_type": dbTypeMySQL, - "db_host": "/var/run/mysql.socket", - "db_name": "grafana", - "db_user": "user", - "db_password": "password", - }) +func newValidMySQLGetter(withKeyPrefix bool) confGetter { + var prefix string + if withKeyPrefix { + prefix = "db_" + } + return newTestConfGetter(map[string]string{ + prefix + "type": dbTypeMySQL, + prefix + "host": "/var/run/mysql.socket", + prefix + "name": "grafana", + prefix + "user": "user", + prefix + "password": "password", + }, prefix) } func TestGetEngineMySQLFromConfig(t *testing.T) { t.Parallel() - t.Run("happy path", func(t *testing.T) { + t.Run("happy path - with key prefix", func(t *testing.T) { t.Parallel() - engine, err := getEngineMySQL(newValidMySQLGetter(), nil) + engine, err := getEngineMySQL(newValidMySQLGetter(true), nil) + assert.NotNil(t, engine) + assert.NoError(t, err) + }) + + t.Run("happy path - without key prefix", func(t *testing.T) { + t.Parallel() + engine, err := getEngineMySQL(newValidMySQLGetter(false), nil) assert.NotNil(t, engine) assert.NoError(t, err) }) @@ -29,13 +40,13 @@ func TestGetEngineMySQLFromConfig(t *testing.T) { t.Run("invalid string", func(t *testing.T) { t.Parallel() - getter := newTestSectionGetter(map[string]string{ + getter := newTestConfGetter(map[string]string{ "db_type": dbTypeMySQL, "db_host": "/var/run/mysql.socket", "db_name": string(invalidUTF8ByteSequence), "db_user": "user", "db_password": "password", - }) + }, "db_") engine, err := getEngineMySQL(getter, nil) assert.Nil(t, engine) assert.Error(t, err) @@ -43,35 +54,46 @@ func TestGetEngineMySQLFromConfig(t *testing.T) { }) } -func newValidPostgresGetter() *sectionGetter { - return newTestSectionGetter(map[string]string{ - "db_type": dbTypePostgres, - "db_host": "localhost", - "db_name": "grafana", - "db_user": "user", - "db_password": "password", - }) +func newValidPostgresGetter(withKeyPrefix bool) confGetter { + var prefix string + if withKeyPrefix { + prefix = "db_" + } + return newTestConfGetter(map[string]string{ + prefix + "type": dbTypePostgres, + prefix + "host": "localhost", + prefix + "name": "grafana", + prefix + "user": "user", + prefix + "password": "password", + }, prefix) } func TestGetEnginePostgresFromConfig(t *testing.T) { t.Parallel() - t.Run("happy path", func(t *testing.T) { + t.Run("happy path - with key prefix", func(t *testing.T) { t.Parallel() - engine, err := getEnginePostgres(newValidPostgresGetter(), nil) + engine, err := getEnginePostgres(newValidPostgresGetter(true), nil) + assert.NotNil(t, engine) + assert.NoError(t, err) + }) + + t.Run("happy path - without key prefix", func(t *testing.T) { + t.Parallel() + engine, err := getEnginePostgres(newValidPostgresGetter(false), nil) assert.NotNil(t, engine) assert.NoError(t, err) }) t.Run("invalid string", func(t *testing.T) { t.Parallel() - getter := newTestSectionGetter(map[string]string{ + getter := newTestConfGetter(map[string]string{ "db_type": dbTypePostgres, "db_host": string(invalidUTF8ByteSequence), "db_name": "grafana", "db_user": "user", "db_password": "password", - }) + }, "db_") engine, err := getEnginePostgres(getter, nil) assert.Nil(t, engine) @@ -81,13 +103,13 @@ func TestGetEnginePostgresFromConfig(t *testing.T) { t.Run("invalid hostport", func(t *testing.T) { t.Parallel() - getter := newTestSectionGetter(map[string]string{ + getter := newTestConfGetter(map[string]string{ "db_type": dbTypePostgres, "db_host": "1:1:1", "db_name": "grafana", "db_user": "user", "db_password": "password", - }) + }, "db_") engine, err := getEnginePostgres(getter, nil) assert.Nil(t, engine) diff --git a/pkg/storage/unified/sql/db/dbimpl/dbimpl.go b/pkg/storage/unified/sql/db/dbimpl/dbimpl.go index b9498b87cd5..4756a1f4d56 100644 --- a/pkg/storage/unified/sql/db/dbimpl/dbimpl.go +++ b/pkg/storage/unified/sql/db/dbimpl/dbimpl.go @@ -2,6 +2,7 @@ package dbimpl import ( "context" + "errors" "fmt" "sync" @@ -12,7 +13,6 @@ import ( infraDB "github.com/grafana/grafana/pkg/infra/db" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/storage/unified/sql/db" "github.com/grafana/grafana/pkg/storage/unified/sql/db/migrations" @@ -23,8 +23,17 @@ const ( dbTypePostgres = "postgres" ) -func ProvideResourceDB(grafanaDB infraDB.DB, cfg *setting.Cfg, features featuremgmt.FeatureToggles, tracer tracing.Tracer) (db.DBProvider, error) { - p, err := newResourceDBProvider(grafanaDB, cfg, features, tracer) +const grafanaDBInstrumentQueriesKey = "instrument_queries" + +var errGrafanaDBInstrumentedNotSupported = errors.New("the Resource API is " + + "attempting to leverage the database from core Grafana defined in the" + + " [database] INI section since a database configuration was not provided" + + " in the [resource_api] section. But we detected that the key `" + + grafanaDBInstrumentQueriesKey + "` is enabled in [database], and that" + + " setup is currently unsupported. Please, consider disabling that flag") + +func ProvideResourceDB(grafanaDB infraDB.DB, cfg *setting.Cfg, tracer tracing.Tracer) (db.DBProvider, error) { + p, err := newResourceDBProvider(grafanaDB, cfg, tracer) if err != nil { return nil, fmt.Errorf("provide Resource DB: %w", err) } @@ -54,41 +63,67 @@ type resourceDBProvider struct { logQueries bool } -func newResourceDBProvider(grafanaDB infraDB.DB, cfg *setting.Cfg, features featuremgmt.FeatureToggles, tracer tracing.Tracer) (p *resourceDBProvider, err error) { - // TODO: This should be renamed resource_api - getter := §ionGetter{ - DynamicSection: cfg.SectionWithEnvOverrides("resource_api"), - } +func newResourceDBProvider(grafanaDB infraDB.DB, cfg *setting.Cfg, tracer tracing.Tracer) (p *resourceDBProvider, err error) { + // Resource API has other configs in its section besides database ones, so + // we prefix them with "db_". We use the database config from core Grafana + // as fallback, and as it uses a dedicated INI section, then keys are not + // prefixed with "db_" + getter := newConfGetter(cfg.SectionWithEnvOverrides("resource_api"), "db_") + fallbackGetter := newConfGetter(cfg.SectionWithEnvOverrides("database"), "") p = &resourceDBProvider{ cfg: cfg, log: log.New("entity-db"), - logQueries: getter.Key("log_queries").MustBool(false), + logQueries: getter.Bool("log_queries"), migrateFunc: migrations.MigrateResourceStore, } - switch dbType := getter.Key("db_type").MustString(""); dbType { - case dbTypePostgres: + dbType := getter.String("type") + grafanaDBType := fallbackGetter.String("type") + switch { + // First try with the config in the "resource_api" section, which is + // specific to Unified Storage + case dbType == dbTypePostgres: p.registerMetrics = true p.engine, err = getEnginePostgres(getter, tracer) return p, err - case dbTypeMySQL: + case dbType == dbTypeMySQL: p.registerMetrics = true p.engine, err = getEngineMySQL(getter, tracer) return p, err - case "": + // TODO: add support for SQLite + + case dbType != "": + return p, fmt.Errorf("invalid db type specified: %s", dbType) + + // If we have an empty Resource API db config, try with the core Grafana + // database config + + case grafanaDBType == dbTypePostgres: + p.registerMetrics = true + p.engine, err = getEnginePostgres(fallbackGetter, tracer) + return p, err + + case grafanaDBType == dbTypeMySQL: + p.registerMetrics = true + p.engine, err = getEngineMySQL(fallbackGetter, tracer) + return p, err + + // TODO: add support for SQLite + + case grafanaDB != nil: // try to use the grafana db connection - if grafanaDB == nil { - return p, fmt.Errorf("no db connection provided") + + if fallbackGetter.Bool(grafanaDBInstrumentQueriesKey) { + return nil, errGrafanaDBInstrumentedNotSupported } p.engine = grafanaDB.GetEngine() return p, nil default: - // TODO: sqlite support - return p, fmt.Errorf("invalid db type specified: %s", dbType) + return p, fmt.Errorf("no db connection provided") } } @@ -102,7 +137,6 @@ func (p *resourceDBProvider) init(ctx context.Context) (db.DB, error) { _ = p.logQueries // TODO: configure SQL logging // TODO: change the migrator to use db.DB instead of xorm - // Skip migrations if feature flag is not enabled if p.migrateFunc != nil { err := p.migrateFunc(ctx, p.engine, p.cfg) if err != nil { diff --git a/pkg/storage/unified/sql/db/dbimpl/dbimpl_test.go b/pkg/storage/unified/sql/db/dbimpl/dbimpl_test.go new file mode 100644 index 00000000000..bb317509888 --- /dev/null +++ b/pkg/storage/unified/sql/db/dbimpl/dbimpl_test.go @@ -0,0 +1,159 @@ +package dbimpl + +import ( + "context" + "database/sql" + "net/http" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/trace" + traceNoop "go.opentelemetry.io/otel/trace/noop" + ini "gopkg.in/ini.v1" + + "github.com/grafana/grafana/pkg/bus" + infraDB "github.com/grafana/grafana/pkg/infra/db" + "github.com/grafana/grafana/pkg/infra/tracing" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" +) + +type ( + // cfgSectionMap represents an INI section, mapping from an INI key to an + // INI value. + cfgSectionMap = map[string]string + // cfgMap is a map from INI section name to INI section contents. + cfgMap = map[string]cfgSectionMap +) + +// setupDBForGrafana modifies `m` in the following way: +// +// [database] +// type = sqlite3 +// path = unique-random-path +// +// After that, it initializes a temporary SQLite filesystem-backed database that +// is later deleted when the test finishes. +func setupDBForGrafana(t *testing.T, ctx context.Context, m cfgMap) { + dbSection, ok := m["database"] + if !ok { + dbSection = cfgSectionMap{} + m["database"] = dbSection + } + dbSection["type"] = "sqlite3" + dbSection["path"] = t.TempDir() + "/" + uuid.New().String() + + db, err := sql.Open("sqlite3", "file:"+dbSection["path"]) + require.NoError(t, err) + + _, err = db.ExecContext(ctx, ` + CREATE TABLE user ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + version INTEGER NOT NULL, + login TEXT NOT NULL, + email TEXT NOT NULL, + name TEXT NULL, + password TEXT NULL, + salt TEXT NULL, + rands TEXT NULL, + company TEXT NULL, + org_id INTEGER NOT NULL, + is_admin INTEGER NOT NULL, + email_verified INTEGER NULL, + theme TEXT NULL, + created DATETIME NOT NULL, + updated DATETIME NOT NULL, + help_flags1 INTEGER NOT NULL DEFAULT 0, + last_seen_at DATETIME NULL, + is_disabled INTEGER NOT NULL DEFAULT 0, + is_service_account BOOLEAN DEFAULT 0, + uid TEXT NULL + ); + CREATE TABLE org ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + version INTEGER NOT NULL, + name TEXT NOT NULL, + address1 TEXT NULL, + address2 TEXT NULL, + city TEXT NULL, + state TEXT NULL, + zip_code TEXT NULL, + country TEXT NULL, + billing_email TEXT NULL, + created DATETIME NOT NULL, + updated DATETIME NOT NULL + ); + CREATE TABLE org_user ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + org_id INTEGER NOT NULL, + user_id INTEGER NOT NULL, + role TEXT NOT NULL, + created DATETIME NOT NULL, + updated DATETIME NOT NULL + ); + `) + require.NoError(t, err) +} + +func newTestInfraDB(t *testing.T, m cfgMap) infraDB.DB { + t.Helper() + // nil migrations means no migrations + sqlstoreDB, err := sqlstore.ProvideService( + newCfgFromIniMap(t, m), // *setting.Cfg + featureTogglesNop{}, // featuremgmt.FeatureToggles + nil, // registry.DatabaseMigrator + nopBus{}, // github.com/grafana/grafana/pkg/bus.Bus + newNopTestGrafanaTracer(), + ) + require.NoError(t, err) + + return sqlstoreDB +} + +func newCfgFromIniMap(t *testing.T, m cfgMap) *setting.Cfg { + t.Helper() + cfg, err := setting.NewCfgFromINIFile(newTestINIFile(t, m)) + require.NoError(t, err) + return cfg +} + +func newTestINIFile(t *testing.T, m cfgMap) *ini.File { + t.Helper() + f := ini.Empty() + for sectionName, kvs := range m { + section, err := f.NewSection(sectionName) + require.NoError(t, err) + for k, v := range kvs { + _, err := section.NewKey(k, v) + require.NoError(t, err) + } + } + return f +} + +type ( + testGrafanaTracer struct { + trace.Tracer + } + featureTogglesNop struct{} + nopBus struct{} +) + +func (testGrafanaTracer) Inject(context.Context, http.Header, trace.Span) {} +func newNopTestGrafanaTracer() tracing.Tracer { + return testGrafanaTracer{traceNoop.NewTracerProvider().Tracer("test")} +} + +func (featureTogglesNop) IsEnabled(context.Context, string) bool { + return false +} +func (featureTogglesNop) IsEnabledGlobally(string) bool { + return false +} +func (featureTogglesNop) GetEnabled(context.Context) map[string]bool { + return map[string]bool{} +} + +func (nopBus) Publish(context.Context, bus.Msg) error { return nil } +func (nopBus) AddEventListener(bus.HandlerFunc) {} diff --git a/pkg/storage/unified/sql/db/dbimpl/regression_incident_2144_test.go b/pkg/storage/unified/sql/db/dbimpl/regression_incident_2144_test.go new file mode 100644 index 00000000000..1d0b6ba9b02 --- /dev/null +++ b/pkg/storage/unified/sql/db/dbimpl/regression_incident_2144_test.go @@ -0,0 +1,204 @@ +package dbimpl + +import ( + "context" + "database/sql" + "database/sql/driver" + "sync" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/grafana/grafana/pkg/util/testutil" +) + +// defined in the standard library in database/sql/ctxutil.go +const noIsolationLevelSupportErrStr = "sql: driver does not support non-" + + "default isolation level" + +var _ driver.Driver = driverWithoutIsolationLevel{} +var _ driver.Driver = driverWithIsolationLevel{} + +const ( + driverWithoutIsolationLevelName = "test driver without isolation levels" + driverWithIsolationLevelName = "test driver with isolation levels" +) + +var registerTestDriversOnce sync.Once + +func registerTestSQLDrivers() { + registerTestDriversOnce.Do(func() { + sql.Register(driverWithoutIsolationLevelName, driverWithoutIsolationLevel{}) + sql.Register(driverWithIsolationLevelName, driverWithIsolationLevel{}) + }) +} + +type ( + // without isolation level + + driverWithoutIsolationLevel struct{} + connWithoutIsolationLevel struct{} + + // with isolation level + + driverWithIsolationLevel struct{} + connWithIsolationLevel struct { + connWithoutIsolationLevel + } + + // common + + testStmt struct{} + testTx struct{} + testResults struct{} + testRows struct{} +) + +// driver.Driver + +func (driverWithoutIsolationLevel) Open(name string) (driver.Conn, error) { + return connWithoutIsolationLevel{}, nil +} + +func (driverWithIsolationLevel) Open(name string) (driver.Conn, error) { + return connWithIsolationLevel{}, nil +} + +// driver.Conn + +func (connWithoutIsolationLevel) Prepare(query string) (driver.Stmt, error) { + return testStmt{}, nil +} +func (connWithoutIsolationLevel) Close() error { + return nil +} +func (connWithoutIsolationLevel) Begin() (driver.Tx, error) { + return testTx{}, nil +} + +func (connWithIsolationLevel) BeginTx(context.Context, driver.TxOptions) (driver.Tx, error) { + return testTx{}, nil +} + +// driver.Stmt + +func (testStmt) Close() error { return nil } +func (testStmt) NumInput() int { return 0 } +func (testStmt) Exec(args []driver.Value) (driver.Result, error) { return testResults{}, nil } +func (testStmt) Query(args []driver.Value) (driver.Rows, error) { return testRows{}, nil } + +// driver.Tx + +func (testTx) Commit() error { return nil } +func (testTx) Rollback() error { return nil } + +// driver.Results + +func (testResults) LastInsertId() (int64, error) { return 1, nil } +func (testResults) RowsAffected() (int64, error) { return 1, nil } + +// driver.Rows + +func (testRows) Columns() []string { return nil } +func (testRows) Close() error { return nil } +func (testRows) Next(dest []driver.Value) error { return nil } + +func TestReproIncident2144IndependentOfGrafanaDB(t *testing.T) { + t.Parallel() + registerTestSQLDrivers() + txOpts := &sql.TxOptions{ + Isolation: sql.LevelSerializable, + } + + t.Run("driver without isolation level should fail", func(t *testing.T) { + t.Parallel() + ctx := testutil.NewDefaultTestContext(t) + + db, err := sql.Open(driverWithoutIsolationLevelName, "") + require.NoError(t, err) + require.NotNil(t, db) + + _, err = db.BeginTx(ctx, txOpts) + require.Error(t, err) + require.Equal(t, noIsolationLevelSupportErrStr, err.Error()) + }) + + t.Run("driver with isolation level should work", func(t *testing.T) { + t.Parallel() + ctx := testutil.NewDefaultTestContext(t) + + db, err := sql.Open(driverWithIsolationLevelName, "") + require.NoError(t, err) + require.NotNil(t, db) + + _, err = db.BeginTx(ctx, txOpts) + require.NoError(t, err) + }) +} + +func TestReproIncident2144UsingGrafanaDB(t *testing.T) { + t.Parallel() + txOpts := &sql.TxOptions{ + Isolation: sql.LevelSerializable, + } + + t.Run("core Grafana db without instrumentation preserves driver ability to use isolation levels", + func(t *testing.T) { + t.Parallel() + + t.Run("base behaviour is preserved", func(t *testing.T) { + t.Parallel() + ctx := testutil.NewDefaultTestContext(t) + cfgMap := cfgMap{} + setupDBForGrafana(t, ctx, cfgMap) + grafanaDB := newTestInfraDB(t, cfgMap) + db := grafanaDB.GetEngine().DB().DB + _, err := db.BeginTx(ctx, txOpts) + require.NoError(t, err) + }) + + t.Run("Resource API does not fail and correctly uses Grafana DB as fallback", + func(t *testing.T) { + t.Parallel() + ctx := testutil.NewDefaultTestContext(t) + cfgMap := cfgMap{} + cfg := newCfgFromIniMap(t, cfgMap) + setupDBForGrafana(t, ctx, cfgMap) + grafanaDB := newTestInfraDB(t, cfgMap) + resourceDB, err := ProvideResourceDB(grafanaDB, cfg, testGrafanaTracer{}) + require.NotNil(t, resourceDB) + require.NoError(t, err) + }) + }) + + t.Run("core Grafana db instrumentation removes driver ability to use isolation levels", + func(t *testing.T) { + t.Parallel() + ctx := testutil.NewDefaultTestContext(t) + cfgMap := cfgMap{ + "database": cfgSectionMap{ + grafanaDBInstrumentQueriesKey: "true", + }, + } + setupDBForGrafana(t, ctx, cfgMap) + grafanaDB := newTestInfraDB(t, cfgMap) + + t.Run("base failure caused by instrumentation", func(t *testing.T) { + t.Parallel() + ctx := testutil.NewDefaultTestContext(t) + db := grafanaDB.GetEngine().DB().DB + _, err := db.BeginTx(ctx, txOpts) + require.Error(t, err) + require.Equal(t, noIsolationLevelSupportErrStr, err.Error()) + }) + + t.Run("Resource API provides a reasonable error for this case", func(t *testing.T) { + t.Parallel() + cfg := newCfgFromIniMap(t, cfgMap) + resourceDB, err := ProvideResourceDB(grafanaDB, cfg, testGrafanaTracer{}) + require.Nil(t, resourceDB) + require.Error(t, err) + require.ErrorIs(t, err, errGrafanaDBInstrumentedNotSupported) + }) + }) +} diff --git a/pkg/storage/unified/sql/db/dbimpl/util.go b/pkg/storage/unified/sql/db/dbimpl/util.go index 4065121fe56..da142be7d04 100644 --- a/pkg/storage/unified/sql/db/dbimpl/util.go +++ b/pkg/storage/unified/sql/db/dbimpl/util.go @@ -14,17 +14,35 @@ import ( var errInvalidUTF8Sequence = errors.New("invalid UTF-8 sequence") +type confGetter interface { + Err() error + Bool(key string) bool + String(key string) string +} + +func newConfGetter(ds *setting.DynamicSection, keyPrefix string) confGetter { + return §ionGetter{ + ds: ds, + keyPrefix: keyPrefix, + } +} + type sectionGetter struct { - *setting.DynamicSection - err error + ds *setting.DynamicSection + keyPrefix string + err error } func (g *sectionGetter) Err() error { return g.err } +func (g *sectionGetter) Bool(key string) bool { + return g.ds.Key(g.keyPrefix + key).MustBool(false) +} + func (g *sectionGetter) String(key string) string { - v := g.DynamicSection.Key(key).MustString("") + v := g.ds.Key(g.keyPrefix + key).MustString("") if !utf8.ValidString(v) { g.err = fmt.Errorf("value for key %q: %w", key, errInvalidUTF8Sequence) diff --git a/pkg/storage/unified/sql/db/dbimpl/util_test.go b/pkg/storage/unified/sql/db/dbimpl/util_test.go index 8364d0f653f..9fff4209e3e 100644 --- a/pkg/storage/unified/sql/db/dbimpl/util_test.go +++ b/pkg/storage/unified/sql/db/dbimpl/util_test.go @@ -17,35 +17,75 @@ func setSectionKeyValues(section *setting.DynamicSection, m map[string]string) { } } -func newTestSectionGetter(m map[string]string) *sectionGetter { +func newTestConfGetter(m map[string]string, keyPrefix string) confGetter { section := setting.NewCfg().SectionWithEnvOverrides("entity_api") setSectionKeyValues(section, m) - return §ionGetter{ - DynamicSection: section, - } + return newConfGetter(section, keyPrefix) } func TestSectionGetter(t *testing.T) { t.Parallel() var ( - key = "the key" - val = string(invalidUTF8ByteSequence) + key = "the key" + keyBoolTrue = "I'm true" + keyBoolFalse = "not me!" + prefix = "this is some prefix" + val = string(invalidUTF8ByteSequence) ) - g := newTestSectionGetter(map[string]string{ - key: val, + t.Run("with prefix", func(t *testing.T) { + t.Parallel() + + g := newTestConfGetter(map[string]string{ + prefix + key: val, + prefix + keyBoolTrue: "YES", + prefix + keyBoolFalse: "0", + }, prefix) + + require.False(t, g.Bool("whatever bool")) + require.NoError(t, g.Err()) + + require.False(t, g.Bool(keyBoolFalse)) + require.NoError(t, g.Err()) + + require.True(t, g.Bool(keyBoolTrue)) + require.NoError(t, g.Err()) + + require.Empty(t, g.String("whatever string")) + require.NoError(t, g.Err()) + + require.Empty(t, g.String(key)) + require.Error(t, g.Err()) + require.ErrorIs(t, g.Err(), errInvalidUTF8Sequence) }) - v := g.String("whatever") - require.Empty(t, v) - require.NoError(t, g.Err()) + t.Run("without prefix", func(t *testing.T) { + t.Parallel() - v = g.String(key) - require.Empty(t, v) - require.Error(t, g.Err()) - require.ErrorIs(t, g.Err(), errInvalidUTF8Sequence) + g := newTestConfGetter(map[string]string{ + key: val, + keyBoolTrue: "true", + keyBoolFalse: "f", + }, "") + + require.False(t, g.Bool("whatever bool")) + require.NoError(t, g.Err()) + + require.False(t, g.Bool(keyBoolFalse)) + require.NoError(t, g.Err()) + + require.True(t, g.Bool(keyBoolTrue)) + require.NoError(t, g.Err()) + + require.Empty(t, g.String("whatever string")) + require.NoError(t, g.Err()) + + require.Empty(t, g.String(key)) + require.Error(t, g.Err()) + require.ErrorIs(t, g.Err(), errInvalidUTF8Sequence) + }) } func TestMakeDSN(t *testing.T) { diff --git a/pkg/storage/unified/sql/db/migrations/resource_mig.go b/pkg/storage/unified/sql/db/migrations/resource_mig.go index adfd75a0b73..15e13c08a38 100644 --- a/pkg/storage/unified/sql/db/migrations/resource_mig.go +++ b/pkg/storage/unified/sql/db/migrations/resource_mig.go @@ -10,8 +10,7 @@ func initResourceTables(mg *migrator.Migrator) string { marker := "Initialize resource tables" mg.AddMigration(marker, &migrator.RawSQLMigration{}) - tables := []migrator.Table{} - tables = append(tables, migrator.Table{ + resource_table := migrator.Table{ Name: "resource", Columns: []*migrator.Column{ // primary identifier @@ -33,9 +32,8 @@ func initResourceTables(mg *migrator.Migrator) string { Indices: []*migrator.Index{ {Cols: []string{"namespace", "group", "resource", "name"}, Type: migrator.UniqueIndex}, }, - }) - - tables = append(tables, migrator.Table{ + } + resource_history_table := migrator.Table{ Name: "resource_history", Columns: []*migrator.Column{ // primary identifier @@ -62,7 +60,9 @@ func initResourceTables(mg *migrator.Migrator) string { // index to support watch poller {Cols: []string{"resource_version"}, Type: migrator.IndexType}, }, - }) + } + + tables := []migrator.Table{resource_table, resource_history_table} // tables = append(tables, migrator.Table{ // Name: "resource_label_set", @@ -97,5 +97,13 @@ func initResourceTables(mg *migrator.Migrator) string { } } + mg.AddMigration("Add column previous_resource_version in resource_history", migrator.NewAddColumnMigration(resource_history_table, &migrator.Column{ + Name: "previous_resource_version", Type: migrator.DB_BigInt, Nullable: true, + })) + + mg.AddMigration("Add column previous_resource_version in resource", migrator.NewAddColumnMigration(resource_table, &migrator.Column{ + Name: "previous_resource_version", Type: migrator.DB_BigInt, Nullable: true, + })) + return marker } diff --git a/pkg/storage/unified/sql/queries.go b/pkg/storage/unified/sql/queries.go index 893169c3f3a..dc31b7726c4 100644 --- a/pkg/storage/unified/sql/queries.go +++ b/pkg/storage/unified/sql/queries.go @@ -70,6 +70,7 @@ func (r sqlResourceRequest) Validate() error { type historyPollResponse struct { Key resource.ResourceKey ResourceVersion int64 + PreviousRV *int64 Value []byte Action int } @@ -93,6 +94,10 @@ func (r *sqlResourceHistoryPollRequest) Validate() error { } func (r *sqlResourceHistoryPollRequest) Results() (*historyPollResponse, error) { + prevRV := r.Response.PreviousRV + if prevRV == nil { + *prevRV = int64(0) + } return &historyPollResponse{ Key: resource.ResourceKey{ Namespace: r.Response.Key.Namespace, @@ -101,6 +106,7 @@ func (r *sqlResourceHistoryPollRequest) Results() (*historyPollResponse, error) Name: r.Response.Key.Name, }, ResourceVersion: r.Response.ResourceVersion, + PreviousRV: prevRV, Value: r.Response.Value, Action: r.Response.Action, }, nil diff --git a/pkg/storage/unified/sql/queries_test.go b/pkg/storage/unified/sql/queries_test.go index b5ac7f57217..df7ed9167f7 100644 --- a/pkg/storage/unified/sql/queries_test.go +++ b/pkg/storage/unified/sql/queries_test.go @@ -104,6 +104,18 @@ func TestUnifiedStorageQueries(t *testing.T) { }, }, }, + sqlResourceHistoryPoll: { + { + Name: "single path", + Data: &sqlResourceHistoryPollRequest{ + SQLTemplate: mocks.NewTestingSQLTemplate(), + Resource: "res", + Group: "group", + SinceResourceVersion: 1234, + Response: new(historyPollResponse), + }, + }, + }, sqlResourceUpdateRV: { { @@ -143,7 +155,8 @@ func TestUnifiedStorageQueries(t *testing.T) { Data: &sqlResourceRequest{ SQLTemplate: mocks.NewTestingSQLTemplate(), WriteEvent: resource.WriteEvent{ - Key: &resource.ResourceKey{}, + Key: &resource.ResourceKey{}, + PreviousRV: 1234, }, }, }, diff --git a/pkg/storage/unified/sql/server.go b/pkg/storage/unified/sql/server.go index 58770875bc3..bdb108fd47e 100644 --- a/pkg/storage/unified/sql/server.go +++ b/pkg/storage/unified/sql/server.go @@ -1,6 +1,10 @@ package sql import ( + "context" + "errors" + + "github.com/grafana/authlib/claims" infraDB "github.com/grafana/grafana/pkg/infra/db" "github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/services/featuremgmt" @@ -10,12 +14,12 @@ import ( ) // Creates a new ResourceServer -func NewResourceServer(db infraDB.DB, cfg *setting.Cfg, features featuremgmt.FeatureToggles, tracer tracing.Tracer) (resource.ResourceServer, error) { +func NewResourceServer(ctx context.Context, db infraDB.DB, cfg *setting.Cfg, features featuremgmt.FeatureToggles, tracer tracing.Tracer) (resource.ResourceServer, error) { opts := resource.ResourceServerOptions{ Tracer: tracer, } - eDB, err := dbimpl.ProvideResourceDB(db, cfg, features, tracer) + eDB, err := dbimpl.ProvideResourceDB(db, cfg, tracer) if err != nil { return nil, err } @@ -29,6 +33,28 @@ func NewResourceServer(db infraDB.DB, cfg *setting.Cfg, features featuremgmt.Fea if features.IsEnabledGlobally(featuremgmt.FlagUnifiedStorageSearch) { opts.Index = resource.NewResourceIndexServer() + server, err := resource.NewResourceServer(opts) + if err != nil { + return nil, err + } + // initialze the search index + indexer, ok := server.(resource.ResourceIndexer) + if !ok { + return nil, errors.New("index server does not implement ResourceIndexer") + } + _, err = indexer.Index(ctx) + return server, err + } + + if features.IsEnabledGlobally(featuremgmt.FlagKubernetesFolders) { + opts.WriteAccess = resource.WriteAccessHooks{ + Folder: func(ctx context.Context, user claims.AuthInfo, uid string) bool { + // #TODO build on the logic here + // #TODO only enable write access when the resource being written in the folder + // is another folder + return true + }, + } } return resource.NewResourceServer(opts) diff --git a/pkg/storage/unified/sql/service.go b/pkg/storage/unified/sql/service.go index 5b435b5f8d4..03bf7b3634e 100644 --- a/pkg/storage/unified/sql/service.go +++ b/pkg/storage/unified/sql/service.go @@ -87,7 +87,7 @@ func ProvideUnifiedStorageGrpcService( } func (s *service) start(ctx context.Context) error { - server, err := NewResourceServer(s.db, s.cfg, s.features, s.tracing) + server, err := NewResourceServer(ctx, s.db, s.cfg, s.features, s.tracing) if err != nil { return err } diff --git a/pkg/storage/unified/sql/test/integration_test.go b/pkg/storage/unified/sql/test/integration_test.go index 1b70c33b2f7..1ab5b765ef9 100644 --- a/pkg/storage/unified/sql/test/integration_test.go +++ b/pkg/storage/unified/sql/test/integration_test.go @@ -32,9 +32,8 @@ func newServer(t *testing.T) (sql.Backend, resource.ResourceServer) { dbstore := infraDB.InitTestDB(t) cfg := setting.NewCfg() - features := featuremgmt.WithFeatures() - eDB, err := dbimpl.ProvideResourceDB(dbstore, cfg, features, nil) + eDB, err := dbimpl.ProvideResourceDB(dbstore, cfg, nil) require.NoError(t, err) require.NotNil(t, eDB) diff --git a/pkg/storage/unified/sql/testdata/mysql--resource_history_insert-insert into resource_history.sql b/pkg/storage/unified/sql/testdata/mysql--resource_history_insert-insert into resource_history.sql index 27f5000fc9f..d76132ae625 100755 --- a/pkg/storage/unified/sql/testdata/mysql--resource_history_insert-insert into resource_history.sql +++ b/pkg/storage/unified/sql/testdata/mysql--resource_history_insert-insert into resource_history.sql @@ -5,6 +5,7 @@ INSERT INTO `resource_history` `resource`, `namespace`, `name`, + `previous_resource_version`, `value`, `action` ) @@ -14,6 +15,7 @@ INSERT INTO `resource_history` '', '', '', + 1234, '[]', 'UNKNOWN' ) diff --git a/pkg/storage/unified/sql/testdata/mysql--resource_history_poll-single path.sql b/pkg/storage/unified/sql/testdata/mysql--resource_history_poll-single path.sql new file mode 100755 index 00000000000..a29cf35d4da --- /dev/null +++ b/pkg/storage/unified/sql/testdata/mysql--resource_history_poll-single path.sql @@ -0,0 +1,16 @@ +SELECT + `resource_version`, + `namespace`, + `group`, + `resource`, + `name`, + `value`, + `action`, + `previous_resource_version` + FROM `resource_history` + WHERE 1 = 1 + AND `group` = 'group' + AND `resource` = 'res' + AND `resource_version` > 1234 + ORDER BY `resource_version` ASC +; diff --git a/pkg/storage/unified/sql/testdata/mysql--resource_insert-simple.sql b/pkg/storage/unified/sql/testdata/mysql--resource_insert-simple.sql index 0897963b19c..5bf3424e55b 100755 --- a/pkg/storage/unified/sql/testdata/mysql--resource_insert-simple.sql +++ b/pkg/storage/unified/sql/testdata/mysql--resource_insert-simple.sql @@ -5,6 +5,7 @@ INSERT INTO `resource` `resource`, `namespace`, `name`, + `previous_resource_version`, `value`, `action` ) @@ -14,6 +15,7 @@ INSERT INTO `resource` 'rr', 'nn', 'name', + 123, '[]', 'ADDED' ) diff --git a/pkg/storage/unified/sql/testdata/mysql--resource_version_insert-single path.sql b/pkg/storage/unified/sql/testdata/mysql--resource_version_insert-single path.sql index 350f77472ab..f99b2b00148 100755 --- a/pkg/storage/unified/sql/testdata/mysql--resource_version_insert-single path.sql +++ b/pkg/storage/unified/sql/testdata/mysql--resource_version_insert-single path.sql @@ -7,6 +7,6 @@ INSERT INTO `resource_version` VALUES ( '', '', - 1 + 2 ) ; diff --git a/pkg/storage/unified/sql/testdata/postgres--resource_history_insert-insert into resource_history.sql b/pkg/storage/unified/sql/testdata/postgres--resource_history_insert-insert into resource_history.sql index 643741bc3b1..a15a8db4b1e 100755 --- a/pkg/storage/unified/sql/testdata/postgres--resource_history_insert-insert into resource_history.sql +++ b/pkg/storage/unified/sql/testdata/postgres--resource_history_insert-insert into resource_history.sql @@ -5,6 +5,7 @@ INSERT INTO "resource_history" "resource", "namespace", "name", + "previous_resource_version", "value", "action" ) @@ -14,6 +15,7 @@ INSERT INTO "resource_history" '', '', '', + 1234, '[]', 'UNKNOWN' ) diff --git a/pkg/storage/unified/sql/testdata/postgres--resource_history_poll-single path.sql b/pkg/storage/unified/sql/testdata/postgres--resource_history_poll-single path.sql new file mode 100755 index 00000000000..d038317381a --- /dev/null +++ b/pkg/storage/unified/sql/testdata/postgres--resource_history_poll-single path.sql @@ -0,0 +1,16 @@ +SELECT + "resource_version", + "namespace", + "group", + "resource", + "name", + "value", + "action", + "previous_resource_version" + FROM "resource_history" + WHERE 1 = 1 + AND "group" = 'group' + AND "resource" = 'res' + AND "resource_version" > 1234 + ORDER BY "resource_version" ASC +; diff --git a/pkg/storage/unified/sql/testdata/postgres--resource_insert-simple.sql b/pkg/storage/unified/sql/testdata/postgres--resource_insert-simple.sql index 9150eb59fef..fc2d22be1c4 100755 --- a/pkg/storage/unified/sql/testdata/postgres--resource_insert-simple.sql +++ b/pkg/storage/unified/sql/testdata/postgres--resource_insert-simple.sql @@ -5,6 +5,7 @@ INSERT INTO "resource" "resource", "namespace", "name", + "previous_resource_version", "value", "action" ) @@ -14,6 +15,7 @@ INSERT INTO "resource" 'rr', 'nn', 'name', + 123, '[]', 'ADDED' ) diff --git a/pkg/storage/unified/sql/testdata/postgres--resource_version_insert-single path.sql b/pkg/storage/unified/sql/testdata/postgres--resource_version_insert-single path.sql index 99003d5fefe..14b25955585 100755 --- a/pkg/storage/unified/sql/testdata/postgres--resource_version_insert-single path.sql +++ b/pkg/storage/unified/sql/testdata/postgres--resource_version_insert-single path.sql @@ -7,6 +7,6 @@ INSERT INTO "resource_version" VALUES ( '', '', - 1 + 2 ) ; diff --git a/pkg/storage/unified/sql/testdata/sqlite--resource_history_insert-insert into resource_history.sql b/pkg/storage/unified/sql/testdata/sqlite--resource_history_insert-insert into resource_history.sql index 643741bc3b1..a15a8db4b1e 100755 --- a/pkg/storage/unified/sql/testdata/sqlite--resource_history_insert-insert into resource_history.sql +++ b/pkg/storage/unified/sql/testdata/sqlite--resource_history_insert-insert into resource_history.sql @@ -5,6 +5,7 @@ INSERT INTO "resource_history" "resource", "namespace", "name", + "previous_resource_version", "value", "action" ) @@ -14,6 +15,7 @@ INSERT INTO "resource_history" '', '', '', + 1234, '[]', 'UNKNOWN' ) diff --git a/pkg/storage/unified/sql/testdata/sqlite--resource_history_poll-single path.sql b/pkg/storage/unified/sql/testdata/sqlite--resource_history_poll-single path.sql new file mode 100755 index 00000000000..d038317381a --- /dev/null +++ b/pkg/storage/unified/sql/testdata/sqlite--resource_history_poll-single path.sql @@ -0,0 +1,16 @@ +SELECT + "resource_version", + "namespace", + "group", + "resource", + "name", + "value", + "action", + "previous_resource_version" + FROM "resource_history" + WHERE 1 = 1 + AND "group" = 'group' + AND "resource" = 'res' + AND "resource_version" > 1234 + ORDER BY "resource_version" ASC +; diff --git a/pkg/storage/unified/sql/testdata/sqlite--resource_insert-simple.sql b/pkg/storage/unified/sql/testdata/sqlite--resource_insert-simple.sql index 9150eb59fef..fc2d22be1c4 100755 --- a/pkg/storage/unified/sql/testdata/sqlite--resource_insert-simple.sql +++ b/pkg/storage/unified/sql/testdata/sqlite--resource_insert-simple.sql @@ -5,6 +5,7 @@ INSERT INTO "resource" "resource", "namespace", "name", + "previous_resource_version", "value", "action" ) @@ -14,6 +15,7 @@ INSERT INTO "resource" 'rr', 'nn', 'name', + 123, '[]', 'ADDED' ) diff --git a/pkg/storage/unified/sql/testdata/sqlite--resource_version_insert-single path.sql b/pkg/storage/unified/sql/testdata/sqlite--resource_version_insert-single path.sql index 99003d5fefe..14b25955585 100755 --- a/pkg/storage/unified/sql/testdata/sqlite--resource_version_insert-single path.sql +++ b/pkg/storage/unified/sql/testdata/sqlite--resource_version_insert-single path.sql @@ -7,6 +7,6 @@ INSERT INTO "resource_version" VALUES ( '', '', - 1 + 2 ) ; diff --git a/pkg/tsdb/grafanads/grafana.go b/pkg/tsdb/grafanads/grafana.go index e3eb5c7f8bd..4a7cc09413b 100644 --- a/pkg/tsdb/grafanads/grafana.go +++ b/pkg/tsdb/grafanads/grafana.go @@ -15,8 +15,10 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/services/datasources" + "github.com/grafana/grafana/pkg/services/featuremgmt" "github.com/grafana/grafana/pkg/services/searchV2" "github.com/grafana/grafana/pkg/services/store" + "github.com/grafana/grafana/pkg/services/unifiedSearch" testdatasource "github.com/grafana/grafana/pkg/tsdb/grafana-testdata-datasource" ) @@ -51,15 +53,17 @@ var ( ) ) -func ProvideService(search searchV2.SearchService, store store.StorageService) *Service { - return newService(search, store) +func ProvideService(search searchV2.SearchService, searchNext unifiedSearch.SearchService, store store.StorageService, features featuremgmt.FeatureToggles) *Service { + return newService(search, searchNext, store, features) } -func newService(search searchV2.SearchService, store store.StorageService) *Service { +func newService(search searchV2.SearchService, searchNext unifiedSearch.SearchService, store store.StorageService, features featuremgmt.FeatureToggles) *Service { s := &Service{ - search: search, - store: store, - log: log.New("grafanads"), + search: search, + searchNext: searchNext, + store: store, + log: log.New("grafanads"), + features: features, } return s @@ -67,9 +71,11 @@ func newService(search searchV2.SearchService, store store.StorageService) *Serv // Service exists regardless of user settings type Service struct { - search searchV2.SearchService - store store.StorageService - log log.Logger + search searchV2.SearchService + searchNext unifiedSearch.SearchService + store store.StorageService + log log.Logger + features featuremgmt.FeatureToggles } func DataSourceModel(orgId int64) *datasources.DataSource { @@ -95,7 +101,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) response.Responses[q.RefID] = s.doListQuery(ctx, q) case queryTypeRead: response.Responses[q.RefID] = s.doReadQuery(ctx, q) - case queryTypeSearch: + case queryTypeSearch, queryTypeSearchNext: response.Responses[q.RefID] = s.doSearchQuery(ctx, req, q) default: response.Responses[q.RefID] = backend.DataResponse{ @@ -177,6 +183,18 @@ func (s *Service) doRandomWalk(query backend.DataQuery) backend.DataResponse { } func (s *Service) doSearchQuery(ctx context.Context, req *backend.QueryDataRequest, query backend.DataQuery) backend.DataResponse { + m := requestModel{} + err := json.Unmarshal(query.JSON, &m) + if err != nil { + return backend.DataResponse{ + Error: err, + } + } + + if s.features.IsEnabled(ctx, featuremgmt.FlagUnifiedStorageSearch) { + return *s.searchNext.DoQuery(ctx, req.PluginContext.User, req.PluginContext.OrgID, m.SearchNext) + } + searchReadinessCheckResp := s.search.IsReady(ctx, req.PluginContext.OrgID) if !searchReadinessCheckResp.IsReady { dashboardSearchNotServedRequestsCounter.With(prometheus.Labels{ @@ -192,17 +210,11 @@ func (s *Service) doSearchQuery(ctx context.Context, req *backend.QueryDataReque } } - m := requestModel{} - err := json.Unmarshal(query.JSON, &m) - if err != nil { - return backend.DataResponse{ - Error: err, - } - } return *s.search.DoDashboardQuery(ctx, req.PluginContext.User, req.PluginContext.OrgID, m.Search) } type requestModel struct { - QueryType string `json:"queryType"` - Search searchV2.DashboardQuery `json:"search,omitempty"` + QueryType string `json:"queryType"` + Search searchV2.DashboardQuery `json:"search,omitempty"` + SearchNext unifiedSearch.Query `json:"searchNext,omitempty"` } diff --git a/pkg/tsdb/grafanads/query.go b/pkg/tsdb/grafanads/query.go index 8abaf7b5f8d..af0b7afdd7f 100644 --- a/pkg/tsdb/grafanads/query.go +++ b/pkg/tsdb/grafanads/query.go @@ -7,6 +7,9 @@ const ( // QueryTypeList will list the files in a folder queryTypeSearch = "search" + // queryTypeSearchNext will perform a search query using the next generation search service + queryTypeSearchNext = "searchNext" + // QueryTypeList will list the files in a folder queryTypeList = "list" diff --git a/public/api-enterprise-spec.json b/public/api-enterprise-spec.json index e2d491516ff..7a4e13be50b 100644 --- a/public/api-enterprise-spec.json +++ b/public/api-enterprise-spec.json @@ -228,7 +228,7 @@ }, "/access-control/roles/{roleUID}/assignments": { "get": { - "description": "Get role assignments for the role with the given UID.\n\nYou need to have a permission with action `teams.roles:list` and scope `teams:id:*` and `users.roles:list` and scope `users:id:*`.", + "description": "Get role assignments for the role with the given UID.\nDoes not include role assignments mapped through group attribute sync.\n\nYou need to have a permission with action `teams.roles:list` and scope `teams:id:*` and `users.roles:list` and scope `users:id:*`.", "tags": [ "access_control", "enterprise" @@ -582,7 +582,7 @@ } }, "put": { - "description": "Update the user’s role assignments to match the provided set of UIDs. This will remove any assigned roles that aren’t in the request and add roles that are in the set but are not already assigned to the user.\nIf you want to add or remove a single role, consider using Add a user role assignment or Remove a user role assignment instead.\n\nYou need to have a permission with action `users.roles:add` and `users.roles:remove` and scope `permissions:type:delegate` for each. `permissions:type:delegate` scope ensures that users can only assign or unassign roles which have same, or a subset of permissions which the user has. For example, if a user does not have required permissions for creating users, they won’t be able to assign or unassign a role which will allow to do that. This is done to prevent escalation of privileges.", + "description": "Update the user’s role assignments to match the provided set of UIDs. This will remove any assigned roles that aren’t in the request and add roles that are in the set but are not already assigned to the user.\nRoles mapped through group attribute sync are not impacted.\nIf you want to add or remove a single role, consider using Add a user role assignment or Remove a user role assignment instead.\n\nYou need to have a permission with action `users.roles:add` and `users.roles:remove` and scope `permissions:type:delegate` for each. `permissions:type:delegate` scope ensures that users can only assign or unassign roles which have same, or a subset of permissions which the user has. For example, if a user does not have required permissions for creating users, they won’t be able to assign or unassign a role which will allow to do that. This is done to prevent escalation of privileges.", "tags": [ "access_control", "enterprise" @@ -3382,6 +3382,7 @@ } }, "CorrelationType": { + "description": "the type of correlation, either query for containing query information, or external for containing an external URL\n+enum", "type": "string" }, "CreateAccessTokenResponseDTO": { @@ -5419,6 +5420,9 @@ "name": { "type": "string" }, + "parentName": { + "type": "string" + }, "refId": { "type": "string" }, @@ -5438,7 +5442,12 @@ "DASHBOARD", "DATASOURCE", "FOLDER", - "LIBRARY_ELEMENT" + "LIBRARY_ELEMENT", + "ALERT_RULE", + "CONTACT_POINT", + "NOTIFICATION_POLICY", + "NOTIFICATION_TEMPLATE", + "MUTE_TIMING" ] } } @@ -6559,6 +6568,9 @@ "orientation": { "type": "string" }, + "pdfShowTemplateVariables": { + "type": "boolean" + }, "timeRange": { "$ref": "#/definitions/ReportTimeRange" } @@ -8354,6 +8366,10 @@ "type": "integer", "format": "int64" }, + "ExternalSessionId": { + "type": "integer", + "format": "int64" + }, "Id": { "type": "integer", "format": "int64" @@ -8812,6 +8828,9 @@ "teamId": { "type": "integer", "format": "int64" + }, + "uid": { + "type": "string" } } } diff --git a/public/api-merged.json b/public/api-merged.json index a149ee3ead8..51e16c1393a 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -228,7 +228,7 @@ }, "/access-control/roles/{roleUID}/assignments": { "get": { - "description": "Get role assignments for the role with the given UID.\n\nYou need to have a permission with action `teams.roles:list` and scope `teams:id:*` and `users.roles:list` and scope `users:id:*`.", + "description": "Get role assignments for the role with the given UID.\nDoes not include role assignments mapped through group attribute sync.\n\nYou need to have a permission with action `teams.roles:list` and scope `teams:id:*` and `users.roles:list` and scope `users:id:*`.", "tags": [ "access_control", "enterprise" @@ -582,7 +582,7 @@ } }, "put": { - "description": "Update the user’s role assignments to match the provided set of UIDs. This will remove any assigned roles that aren’t in the request and add roles that are in the set but are not already assigned to the user.\nIf you want to add or remove a single role, consider using Add a user role assignment or Remove a user role assignment instead.\n\nYou need to have a permission with action `users.roles:add` and `users.roles:remove` and scope `permissions:type:delegate` for each. `permissions:type:delegate` scope ensures that users can only assign or unassign roles which have same, or a subset of permissions which the user has. For example, if a user does not have required permissions for creating users, they won’t be able to assign or unassign a role which will allow to do that. This is done to prevent escalation of privileges.", + "description": "Update the user’s role assignments to match the provided set of UIDs. This will remove any assigned roles that aren’t in the request and add roles that are in the set but are not already assigned to the user.\nRoles mapped through group attribute sync are not impacted.\nIf you want to add or remove a single role, consider using Add a user role assignment or Remove a user role assignment instead.\n\nYou need to have a permission with action `users.roles:add` and `users.roles:remove` and scope `permissions:type:delegate` for each. `permissions:type:delegate` scope ensures that users can only assign or unassign roles which have same, or a subset of permissions which the user has. For example, if a user does not have required permissions for creating users, they won’t be able to assign or unassign a role which will allow to do that. This is done to prevent escalation of privileges.", "tags": [ "access_control", "enterprise" @@ -16909,6 +16909,9 @@ "name": { "type": "string" }, + "parentName": { + "type": "string" + }, "refId": { "type": "string" }, @@ -16928,7 +16931,12 @@ "DASHBOARD", "DATASOURCE", "FOLDER", - "LIBRARY_ELEMENT" + "LIBRARY_ELEMENT", + "ALERT_RULE", + "CONTACT_POINT", + "NOTIFICATION_POLICY", + "NOTIFICATION_TEMPLATE", + "MUTE_TIMING" ] } } @@ -19218,6 +19226,9 @@ "orientation": { "type": "string" }, + "pdfShowTemplateVariables": { + "type": "boolean" + }, "timeRange": { "$ref": "#/definitions/ReportTimeRange" } @@ -21919,6 +21930,10 @@ "type": "integer", "format": "int64" }, + "ExternalSessionId": { + "type": "integer", + "format": "int64" + }, "Id": { "type": "integer", "format": "int64" @@ -23149,6 +23164,9 @@ "teamId": { "type": "integer", "format": "int64" + }, + "uid": { + "type": "string" } } } diff --git a/public/app/AppWrapper.tsx b/public/app/AppWrapper.tsx index 91a9e5efd4b..d2580059d16 100644 --- a/public/app/AppWrapper.tsx +++ b/public/app/AppWrapper.tsx @@ -4,7 +4,7 @@ import { Provider } from 'react-redux'; import { Switch, RouteComponentProps } from 'react-router-dom'; import { CompatRoute, Navigate } from 'react-router-dom-v5-compat'; -import { config, navigationLogger, reportInteraction } from '@grafana/runtime'; +import { config, locationService, navigationLogger, reportInteraction } from '@grafana/runtime'; import { ErrorBoundaryAlert, GlobalStyles, PortalContainer } from '@grafana/ui'; import { getAppRoutes } from 'app/routes/routes'; import { store } from 'app/store/store'; @@ -56,7 +56,6 @@ export class AppWrapper extends Component { renderRoute = (route: RouteDescriptor) => { const roles = route.roles ? route.roles() : []; - return ( { path={route.path} key={route.path} render={(props: RouteComponentProps) => { + const location = locationService.getLocation(); // TODO[Router]: test this logic if (roles?.length) { if (!roles.some((r: string) => contextSrv.hasRole(r))) { @@ -71,7 +71,7 @@ export class AppWrapper extends Component { } } - return ; + return ; }} /> ); diff --git a/public/app/app.ts b/public/app/app.ts index df42edc1df0..936b7b5293a 100644 --- a/public/app/app.ts +++ b/public/app/app.ts @@ -399,7 +399,6 @@ function handleRedirectTo(): void { } if (!contextSrv.user.isSignedIn) { - locationService.replace('/login'); return; } diff --git a/public/app/core/components/AppChrome/AppChrome.tsx b/public/app/core/components/AppChrome/AppChrome.tsx index e41bc143940..133deae94d3 100644 --- a/public/app/core/components/AppChrome/AppChrome.tsx +++ b/public/app/core/components/AppChrome/AppChrome.tsx @@ -92,6 +92,9 @@ export function AppChrome({ children }: Props) { Skip to main content + {isSingleTopNav && menuDockedAndOpen && ( + chrome.setMegaMenuOpen(false)} /> + )}
{isSingleTopNav ? (
- {menuDockedAndOpen && ( + {!isSingleTopNav && menuDockedAndOpen && ( chrome.setMegaMenuOpen(false)} /> )} {!state.chromeless && ( diff --git a/public/app/core/components/AppChrome/MegaMenu/MegaMenuItemText.tsx b/public/app/core/components/AppChrome/MegaMenu/MegaMenuItemText.tsx index 84d090d8584..bb34a6fe969 100644 --- a/public/app/core/components/AppChrome/MegaMenu/MegaMenuItemText.tsx +++ b/public/app/core/components/AppChrome/MegaMenu/MegaMenuItemText.tsx @@ -78,14 +78,14 @@ const getStyles = (theme: GrafanaTheme2, isActive: Props['isActive']) => ({ }), wrapperBookmark: css({ '.pin-icon': { - display: 'none', + visibility: 'hidden', }, '&:hover, &:focus-within': { a: { width: 'calc(100% - 20px)', }, '.pin-icon': { - display: 'inline-flex', + visibility: 'visible', }, }, }), diff --git a/public/app/core/components/AppChrome/TopBar/SingleTopBar.tsx b/public/app/core/components/AppChrome/TopBar/SingleTopBar.tsx index ec10fcb7a58..e7c3def7054 100644 --- a/public/app/core/components/AppChrome/TopBar/SingleTopBar.tsx +++ b/public/app/core/components/AppChrome/TopBar/SingleTopBar.tsx @@ -9,7 +9,6 @@ import { useGrafana } from 'app/core/context/GrafanaContext'; import { contextSrv } from 'app/core/core'; import { t } from 'app/core/internationalization'; import { HOME_NAV_ID } from 'app/core/reducers/navModel'; -import { ScopesSelector } from 'app/features/scopes'; import { useSelector } from 'app/types'; import { Branding } from '../../Branding/Branding'; @@ -61,7 +60,6 @@ export const SingleTopBar = memo(function SingleTopBar({ )} - diff --git a/public/app/core/components/Page/PageHeader.tsx b/public/app/core/components/Page/PageHeader.tsx index c83d3d9d101..d729628189e 100644 --- a/public/app/core/components/Page/PageHeader.tsx +++ b/public/app/core/components/Page/PageHeader.tsx @@ -58,8 +58,8 @@ const getStyles = (theme: GrafanaTheme2) => { display: 'flex', flexDirection: 'row', maxWidth: '100%', + flex: 1, h1: { - display: 'flex', marginBottom: 0, }, }), diff --git a/public/app/core/navigation/GrafanaRoute.tsx b/public/app/core/navigation/GrafanaRoute.tsx index f3718d09aaa..52ab58ba691 100644 --- a/public/app/core/navigation/GrafanaRoute.tsx +++ b/public/app/core/navigation/GrafanaRoute.tsx @@ -25,7 +25,7 @@ export function GrafanaRoute(props: Props) { useEffect(() => { updateBodyClassNames(props.route); cleanupDOM(); - navigationLogger('GrafanaRoute', false, 'Mounted', props.match); + navigationLogger('GrafanaRoute', false, 'Mounted', props.route); return () => { navigationLogger('GrafanaRoute', false, 'Unmounted', props.route); diff --git a/public/app/features/admin/ldap/LdapDrawer.tsx b/public/app/features/admin/ldap/LdapDrawer.tsx index dc84858191b..0533fadb634 100644 --- a/public/app/features/admin/ldap/LdapDrawer.tsx +++ b/public/app/features/admin/ldap/LdapDrawer.tsx @@ -369,17 +369,14 @@ export const LdapDrawerComponent = ({ - { - setValue(`${serverConfig}.client_cert_value`, ''); - setMapCertConfigured({ ...mapCertConfigured, clientCertValue: false }); - }} + type="text" + {...register(`${serverConfig}.client_cert_value`)} /> @@ -401,35 +398,25 @@ export const LdapDrawerComponent = ({ {encryptionProvider === EncryptionProvider.FilePath && ( <> - { - setValue(`${serverConfig}.root_ca_cert`, ''); - setMapCertConfigured({ ...mapCertConfigured, rootCaCertPath: false }); - }} - value={watch(`${serverConfig}.root_ca_cert`)} - onChange={({ currentTarget: { value } }) => setValue(`${serverConfig}.root_ca_cert`, value)} + type="text" + {...register(`${serverConfig}.root_ca_cert`)} /> - { - setValue(`${serverConfig}.client_cert`, ''); - setMapCertConfigured({ ...mapCertConfigured, clientCertPath: false }); - }} - value={watch(`${serverConfig}.client_cert`)} - onChange={({ currentTarget: { value } }) => setValue(`${serverConfig}.client_cert`, value)} + type="text" + {...register(`${serverConfig}.client_cert`)} /> diff --git a/public/app/features/admin/ldap/LdapSettingsPage.tsx b/public/app/features/admin/ldap/LdapSettingsPage.tsx index 1132b95265f..6c518cb973b 100644 --- a/public/app/features/admin/ldap/LdapSettingsPage.tsx +++ b/public/app/features/admin/ldap/LdapSettingsPage.tsx @@ -20,6 +20,7 @@ import { TextLink, Dropdown, MultiSelect, + SecretInput, } from '@grafana/ui'; import { FormPrompt } from 'app/core/components/FormPrompt/FormPrompt'; import { Page } from 'app/core/components/Page/Page'; @@ -98,14 +99,9 @@ export const LdapSettingsPage = () => { const [isLoading, setIsLoading] = useState(true); const [isDrawerOpen, setIsDrawerOpen] = useState(false); + const [isBindPasswordConfigured, setBindPasswordConfigured] = useState(false); const [mapKeyCertConfigured, setMapKeyCertConfigured] = useState({ - // values - rootCaCertValue: false, - clientCertValue: false, clientKeyCertValue: false, - // paths - rootCaCertPath: false, - clientCertPath: false, clientKeyCertPath: false, }); @@ -114,6 +110,7 @@ export const LdapSettingsPage = () => { control, formState: { isDirty }, getValues, + setValue, handleSubmit, register, reset, @@ -130,13 +127,10 @@ export const LdapSettingsPage = () => { serverConfig = payload.settings.config.servers[0]; } setMapKeyCertConfigured({ - rootCaCertValue: serverConfig.root_ca_cert_value?.length > 0, - clientCertValue: isOptionDefined(serverConfig.client_cert_value), clientKeyCertValue: isOptionDefined(serverConfig.client_key_value), - rootCaCertPath: isOptionDefined(serverConfig.root_ca_cert), - clientCertPath: isOptionDefined(serverConfig.client_cert), clientKeyCertPath: isOptionDefined(serverConfig.client_key), }); + setBindPasswordConfigured(isOptionDefined(serverConfig.bind_password)); reset(payload); setIsLoading(false); @@ -325,10 +319,15 @@ export const LdapSettingsPage = () => { /> - { + setValue(`${serverConfig}.bind_password`, ''); + setBindPasswordConfigured(false); + }} + value={watch(`${serverConfig}.bind_password`)} + onChange={({ currentTarget: { value } }) => setValue(`${serverConfig}.bind_password`, value)} /> import(/* webpackChunkName: "NotificationsListPage" */ 'app/features/alerting/unified/Receivers') + () => + import( + /* webpackChunkName: "ContactPoints" */ 'app/features/alerting/unified/components/contact-points/ContactPoints' + ) + ), + }, + { + path: '/alerting/notifications/receivers/new', + roles: evaluateAccess([ + AccessControlAction.AlertingNotificationsRead, + AccessControlAction.AlertingNotificationsExternalRead, + ...PERMISSIONS_CONTACT_POINTS, + ]), + component: importAlertingComponent( + () => + import( + /* webpackChunkName: "NewReceiverView" */ 'app/features/alerting/unified/components/receivers/NewReceiverView' + ) + ), + }, + { + path: '/alerting/notifications/receivers/:name/edit', + roles: evaluateAccess([ + AccessControlAction.AlertingNotificationsWrite, + AccessControlAction.AlertingNotificationsExternalWrite, + AccessControlAction.AlertingNotificationsRead, + AccessControlAction.AlertingNotificationsExternalRead, + // We check any contact point permission here because a user without edit permissions + // still has to be able to visit the "edit" page, because we don't have a separate view for edit vs view + // (we just disable the form instead) + ...PERMISSIONS_CONTACT_POINTS, + ]), + component: importAlertingComponent( + () => + import( + /* webpackChunkName: "EditContactPoint" */ 'app/features/alerting/unified/components/contact-points/EditContactPoint' + ) ), }, { @@ -118,60 +151,16 @@ export function getAlertingRoutes(cfg = config): RouteDescriptor[] { ), }, { - path: '/alerting/notifications/:type/new', - roles: evaluateAccess([ - AccessControlAction.AlertingNotificationsWrite, - AccessControlAction.AlertingNotificationsExternalWrite, - ...PERMISSIONS_CONTACT_POINTS_MODIFY, - ]), - component: importAlertingComponent( - () => import(/* webpackChunkName: "NotificationsListPage" */ 'app/features/alerting/unified/Receivers') - ), - }, - { - path: '/alerting/notifications/receivers/:id/edit', - roles: evaluateAccess([ - AccessControlAction.AlertingNotificationsWrite, - AccessControlAction.AlertingNotificationsExternalWrite, - AccessControlAction.AlertingNotificationsRead, - AccessControlAction.AlertingNotificationsExternalRead, - // We check any contact point permission here because a user without edit permissions - // still has to be able to visit the "edit" page, because we don't have a separate view for edit vs view - // (we just disable the form instead) - ...PERMISSIONS_CONTACT_POINTS, - ]), - component: importAlertingComponent( - () => import(/* webpackChunkName: "NotificationsListPage" */ 'app/features/alerting/unified/Receivers') - ), - }, - { - path: '/alerting/notifications/:type/:id/edit', + path: '/alerting/notifications/global-config', roles: evaluateAccess([ AccessControlAction.AlertingNotificationsWrite, AccessControlAction.AlertingNotificationsExternalWrite, ]), component: importAlertingComponent( - () => import(/* webpackChunkName: "NotificationsListPage" */ 'app/features/alerting/unified/Receivers') - ), - }, - { - path: '/alerting/notifications/:type/:id/duplicate', - roles: evaluateAccess([ - AccessControlAction.AlertingNotificationsWrite, - AccessControlAction.AlertingNotificationsExternalWrite, - ]), - component: importAlertingComponent( - () => import(/* webpackChunkName: "NotificationsListPage" */ 'app/features/alerting/unified/Receivers') - ), - }, - { - path: '/alerting/notifications/:type', - roles: evaluateAccess([ - AccessControlAction.AlertingNotificationsWrite, - AccessControlAction.AlertingNotificationsExternalWrite, - ]), - component: importAlertingComponent( - () => import(/* webpackChunkName: "NotificationsListPage" */ 'app/features/alerting/unified/Receivers') + () => + import( + /* webpackChunkName: "GlobalConfig" */ 'app/features/alerting/unified/components/contact-points/components/GlobalConfig' + ) ), }, { diff --git a/public/app/features/alerting/unified/Receivers.test.tsx b/public/app/features/alerting/unified/Receivers.test.tsx index c7619e39f18..6a4d4227e6f 100644 --- a/public/app/features/alerting/unified/Receivers.test.tsx +++ b/public/app/features/alerting/unified/Receivers.test.tsx @@ -1,3 +1,4 @@ +import { Route, Routes } from 'react-router-dom-v5-compat'; import { selectOptionInTest } from 'test/helpers/selectOptionInTest'; import { render, screen, waitFor, userEvent } from 'test/test-utils'; @@ -11,9 +12,10 @@ import { grantUserPermissions } from 'app/features/alerting/unified/mocks'; import { setupDataSources } from 'app/features/alerting/unified/testSetup/datasources'; import { AccessControlAction } from 'app/types'; -import ContactPoints from './Receivers'; - import 'core-js/stable/structured-clone'; +import ContactPoints from './components/contact-points/ContactPoints'; +import EditContactPoint from './components/contact-points/EditContactPoint'; +import NewReceiverView from './components/receivers/NewReceiverView'; const server = setupMswServer(); @@ -28,6 +30,21 @@ const saveContactPoint = async () => { return user.click(await screen.findByRole('button', { name: /save contact point/i })); }; +const setup = (location: string) => { + return render( + + } /> + } /> + } /> + , + { + historyOptions: { + initialEntries: [location], + }, + } + ); +}; + beforeEach(() => { grantUserPermissions([ AccessControlAction.AlertingNotificationsRead, @@ -41,18 +58,7 @@ beforeEach(() => { }); it('can save a contact point with a select dropdown', async () => { - const user = userEvent.setup(); - - render(, { - historyOptions: { - initialEntries: [ - { - pathname: `/alerting/notifications/receivers/new`, - search: `?alertmanager=${PROVISIONED_MIMIR_ALERTMANAGER_UID}`, - }, - ], - }, - }); + const { user } = setup(`/alerting/notifications/receivers/new?alertmanager=${PROVISIONED_MIMIR_ALERTMANAGER_UID}`); // Fill out contact point name const contactPointName = await screen.findByPlaceholderText(/name/i); @@ -75,16 +81,7 @@ it('can save a contact point with a select dropdown', async () => { }); it('can save existing Telegram contact point', async () => { - render(, { - historyOptions: { - initialEntries: [ - { - pathname: `/alerting/notifications/receivers/Telegram/edit`, - search: `?alertmanager=${PROVISIONED_MIMIR_ALERTMANAGER_UID}`, - }, - ], - }, - }); + setup(`/alerting/notifications/receivers/Telegram/edit?alertmanager=${PROVISIONED_MIMIR_ALERTMANAGER_UID}`); // Here, we're implicitly testing that our parsing of an existing Telegram integration works correctly // Our mock server will reject a request if we've sent the Chat ID as `0`, diff --git a/public/app/features/alerting/unified/Receivers.tsx b/public/app/features/alerting/unified/Receivers.tsx deleted file mode 100644 index 7b6e3b87052..00000000000 --- a/public/app/features/alerting/unified/Receivers.tsx +++ /dev/null @@ -1,24 +0,0 @@ -import { Route, Switch } from 'react-router-dom'; - -import { withErrorBoundary } from '@grafana/ui'; -import { SafeDynamicImport } from 'app/core/components/DynamicImports/SafeDynamicImport'; - -import { AlertmanagerPageWrapper } from './components/AlertingPageWrapper'; - -const ContactPointsV2 = SafeDynamicImport(() => import('./components/contact-points/ContactPoints')); -const EditContactPoint = SafeDynamicImport(() => import('./components/contact-points/EditContactPoint')); -const NewReceiverView = SafeDynamicImport(() => import('./components/receivers/NewReceiverView')); -const GlobalConfig = SafeDynamicImport(() => import('./components/contact-points/components/GlobalConfig')); - -const ContactPoints = (): JSX.Element => ( - - - - - - - - -); - -export default withErrorBoundary(ContactPoints, { style: 'page' }); diff --git a/public/app/features/alerting/unified/Silences.tsx b/public/app/features/alerting/unified/Silences.tsx index 5bfbb676278..bc257e65a2e 100644 --- a/public/app/features/alerting/unified/Silences.tsx +++ b/public/app/features/alerting/unified/Silences.tsx @@ -1,4 +1,5 @@ import { Route, Switch } from 'react-router-dom'; +import { useLocation } from 'react-router-dom-v5-compat'; import { withErrorBoundary } from '@grafana/ui'; import { @@ -30,25 +31,7 @@ const Silences = () => { - {({ location }) => { - const queryParams = new URLSearchParams(location.search); - - const potentialAlertRuleMatcher = parseQueryParamMatchers(queryParams.getAll('matcher')).find( - (m) => m.name === MATCHER_ALERT_RULE_UID - ); - - const potentialRuleUid = potentialAlertRuleMatcher?.value; - - const formValues = getDefaultSilenceFormValues(defaultsFromQuery(queryParams)); - - return ( - - ); - }} + @@ -69,3 +52,23 @@ function SilencesPage() { } export default withErrorBoundary(SilencesPage, { style: 'page' }); + +type SilencesEditorComponentProps = { + selectedAlertmanager: string; +}; +const SilencesEditorComponent = ({ selectedAlertmanager }: SilencesEditorComponentProps) => { + const location = useLocation(); + const queryParams = new URLSearchParams(location.search); + + const potentialAlertRuleMatcher = parseQueryParamMatchers(queryParams.getAll('matcher')).find( + (m) => m.name === MATCHER_ALERT_RULE_UID + ); + + const potentialRuleUid = potentialAlertRuleMatcher?.value; + + const formValues = getDefaultSilenceFormValues(defaultsFromQuery(queryParams)); + + return ( + + ); +}; diff --git a/public/app/features/alerting/unified/Templates.test.tsx b/public/app/features/alerting/unified/Templates.test.tsx index 6d695af5826..9d048d60495 100644 --- a/public/app/features/alerting/unified/Templates.test.tsx +++ b/public/app/features/alerting/unified/Templates.test.tsx @@ -1,4 +1,6 @@ +import { InitialEntry } from 'history/createMemoryHistory'; import * as React from 'react'; +import { Route, Routes } from 'react-router-dom-v5-compat'; import { Props } from 'react-virtualized-auto-sizer'; import { render, screen, within } from 'test/test-utils'; import { byRole } from 'testing-library-selector'; @@ -54,23 +56,35 @@ beforeEach(() => { grantUserPermissions([AccessControlAction.AlertingNotificationsRead, AccessControlAction.AlertingNotificationsWrite]); }); +const setup = (initialEntries: InitialEntry[]) => { + return render( + <> + + + } /> + + , + { + historyOptions: { initialEntries }, + } + ); +}; + describe('Templates routes', () => { it('allows duplication of template with spaces in name', async () => { - render(, { - historyOptions: { initialEntries: [navUrl.duplicate('template%20with%20spaces')] }, - }); + setup([navUrl.duplicate('template%20with%20spaces')]); expect(await screen.findByText('Edit payload')).toBeInTheDocument(); }); it('allows editing of template with spaces in name', async () => { - render(, { historyOptions: { initialEntries: [navUrl.edit('template%20with%20spaces')] } }); + setup([navUrl.edit('template%20with%20spaces')]); expect(await screen.findByText('Edit payload')).toBeInTheDocument(); }); it('renders empty template form', async () => { - render(, { historyOptions: { initialEntries: [navUrl.new] } }); + setup([navUrl.new]); const form = await ui.templateForm.find(); @@ -83,9 +97,7 @@ describe('Templates K8s API', () => { testWithFeatureToggles(['alertingApiServer']); it('form edit renders with correct form values', async () => { - render(, { - historyOptions: { initialEntries: [navUrl.edit('k8s-custom-email-resource-name')] }, - }); + setup([navUrl.edit('k8s-custom-email-resource-name')]); const form = await ui.templateForm.find(); @@ -97,9 +109,7 @@ describe('Templates K8s API', () => { }); it('renders duplicate template form with correct values', async () => { - render(, { - historyOptions: { initialEntries: [navUrl.duplicate('k8s-custom-email-resource-name')] }, - }); + setup([navUrl.duplicate('k8s-custom-email-resource-name')]); const form = await ui.templateForm.find(); @@ -111,15 +121,7 @@ describe('Templates K8s API', () => { }); it('updates a template', async () => { - const { user } = render( - <> - - - , - { - historyOptions: { initialEntries: [navUrl.edit('k8s-custom-email-resource-name')] }, - } - ); + const { user } = setup([navUrl.edit('k8s-custom-email-resource-name')]); const form = await ui.templateForm.find(); diff --git a/public/app/features/alerting/unified/Templates.tsx b/public/app/features/alerting/unified/Templates.tsx index f7aef31d9fc..dda183041d6 100644 --- a/public/app/features/alerting/unified/Templates.tsx +++ b/public/app/features/alerting/unified/Templates.tsx @@ -1,15 +1,11 @@ -import { Route, Switch } from 'react-router-dom'; +import { Routes, Route } from 'react-router-dom-v5-compat'; import { withErrorBoundary } from '@grafana/ui'; -import { SafeDynamicImport } from 'app/core/components/DynamicImports/SafeDynamicImport'; import { AlertmanagerPageWrapper } from './components/AlertingPageWrapper'; - -const EditMessageTemplate = SafeDynamicImport(() => import('./components/contact-points/EditMessageTemplate')); -const NewMessageTemplate = SafeDynamicImport(() => import('./components/contact-points/NewMessageTemplate')); -const DuplicateMessageTemplate = SafeDynamicImport( - () => import('./components/contact-points/DuplicateMessageTemplate') -); +import DuplicateMessageTemplate from './components/contact-points/DuplicateMessageTemplate'; +import EditMessageTemplate from './components/contact-points/EditMessageTemplate'; +import NewMessageTemplate from './components/contact-points/NewMessageTemplate'; const NotificationTemplates = (): JSX.Element => ( ( accessType="notification" pageNav={{ id: 'templates', text: 'Notification templates', subTitle: 'Create and edit notification templates' }} > - - - - - + + } /> + } /> + } /> + ); diff --git a/public/app/features/alerting/unified/components/contact-points/ContactPoints.test.tsx b/public/app/features/alerting/unified/components/contact-points/ContactPoints.test.tsx index cf5417fc4ac..76d8b271183 100644 --- a/public/app/features/alerting/unified/components/contact-points/ContactPoints.test.tsx +++ b/public/app/features/alerting/unified/components/contact-points/ContactPoints.test.tsx @@ -19,7 +19,7 @@ import { setupDataSources } from '../../testSetup/datasources'; import { DataSourceType, GRAFANA_RULES_SOURCE_NAME } from '../../utils/datasource'; import { ContactPoint } from './ContactPoint'; -import ContactPointsPageContents from './ContactPoints'; +import { ContactPointsPageContents } from './ContactPoints'; import setupMimirFlavoredServer, { MIMIR_DATASOURCE_UID } from './__mocks__/mimirFlavoredServer'; import setupVanillaAlertmanagerFlavoredServer, { VANILLA_ALERTMANAGER_DATASOURCE_UID, diff --git a/public/app/features/alerting/unified/components/contact-points/ContactPoints.tsx b/public/app/features/alerting/unified/components/contact-points/ContactPoints.tsx index 17c4f58cd9b..7f89081d217 100644 --- a/public/app/features/alerting/unified/components/contact-points/ContactPoints.tsx +++ b/public/app/features/alerting/unified/components/contact-points/ContactPoints.tsx @@ -12,6 +12,7 @@ import { TabContent, TabsBar, Text, + withErrorBoundary, } from '@grafana/ui'; import { contextSrv } from 'app/core/core'; import { t, Trans } from 'app/core/internationalization'; @@ -24,6 +25,7 @@ import { usePagination } from '../../hooks/usePagination'; import { useURLSearchParams } from '../../hooks/useURLSearchParams'; import { useAlertmanager } from '../../state/AlertmanagerContext'; import { GRAFANA_RULES_SOURCE_NAME } from '../../utils/datasource'; +import { AlertmanagerPageWrapper } from '../AlertingPageWrapper'; import { GrafanaAlertmanagerDeliveryWarning } from '../GrafanaAlertmanagerDeliveryWarning'; import { ContactPoint } from './ContactPoint'; @@ -179,7 +181,7 @@ const useTabQueryParam = () => { return [param, setParam] as const; }; -const ContactPointsPageContents = () => { +export const ContactPointsPageContents = () => { const { selectedAlertmanager } = useAlertmanager(); const [activeTab, setActiveTab] = useTabQueryParam(); @@ -242,4 +244,12 @@ const ContactPointsList = ({ contactPoints, search, pageSize = DEFAULT_PAGE_SIZE ); }; -export default ContactPointsPageContents; +function ContactPointsPage() { + return ( + + + + ); +} + +export default withErrorBoundary(ContactPointsPage, { style: 'page' }); diff --git a/public/app/features/alerting/unified/components/contact-points/DuplicateMessageTemplate.tsx b/public/app/features/alerting/unified/components/contact-points/DuplicateMessageTemplate.tsx index edd4edc2ecb..3ded1052b5d 100644 --- a/public/app/features/alerting/unified/components/contact-points/DuplicateMessageTemplate.tsx +++ b/public/app/features/alerting/unified/components/contact-points/DuplicateMessageTemplate.tsx @@ -1,4 +1,4 @@ -import { RouteChildrenProps } from 'react-router-dom'; +import { useParams } from 'react-router-dom-v5-compat'; import { Alert, LoadingPlaceholder } from '@grafana/ui'; import { EntityNotFound } from 'app/core/components/PageNotFound/EntityNotFound'; @@ -12,13 +12,12 @@ import { TemplateForm } from '../receivers/TemplateForm'; import { useGetNotificationTemplate, useNotificationTemplates } from './useNotificationTemplates'; -type Props = RouteChildrenProps<{ name: string }>; - const notFoundComponent = ; -const DuplicateMessageTemplate = ({ match }: Props) => { +const DuplicateMessageTemplate = () => { const { selectedAlertmanager } = useAlertmanager(); - const templateUid = match?.params.name ? decodeURIComponent(match?.params.name) : undefined; + const { name } = useParams<{ name: string }>(); + const templateUid = name ? decodeURIComponent(name) : undefined; const { currentData: template, diff --git a/public/app/features/alerting/unified/components/contact-points/EditContactPoint.tsx b/public/app/features/alerting/unified/components/contact-points/EditContactPoint.tsx index f85c3eeeef0..96a00fc1d33 100644 --- a/public/app/features/alerting/unified/components/contact-points/EditContactPoint.tsx +++ b/public/app/features/alerting/unified/components/contact-points/EditContactPoint.tsx @@ -1,18 +1,18 @@ -import { RouteChildrenProps } from 'react-router-dom'; +import { useParams } from 'react-router-dom-v5-compat'; -import { Alert, LoadingPlaceholder } from '@grafana/ui'; +import { Alert, LoadingPlaceholder, withErrorBoundary } from '@grafana/ui'; import { useGetContactPoint } from 'app/features/alerting/unified/components/contact-points/useContactPoints'; import { stringifyErrorLike } from 'app/features/alerting/unified/utils/misc'; import { useAlertmanager } from '../../state/AlertmanagerContext'; +import { AlertmanagerPageWrapper } from '../AlertingPageWrapper'; import { EditReceiverView } from '../receivers/EditReceiverView'; -type Props = RouteChildrenProps<{ name: string }>; - -const EditContactPoint = ({ match }: Props) => { +const EditContactPoint = () => { const { selectedAlertmanager } = useAlertmanager(); + const { name = '' } = useParams(); - const contactPointName = decodeURIComponent(match?.params.name!); + const contactPointName = decodeURIComponent(name); const { isLoading, error, @@ -42,4 +42,12 @@ const EditContactPoint = ({ match }: Props) => { return ; }; -export default EditContactPoint; +function EditContactPointPage() { + return ( + + + + ); +} + +export default withErrorBoundary(EditContactPointPage, { style: 'page' }); diff --git a/public/app/features/alerting/unified/components/contact-points/EditMessageTemplate.tsx b/public/app/features/alerting/unified/components/contact-points/EditMessageTemplate.tsx index 2e2cfc6273b..6c7df2a7fad 100644 --- a/public/app/features/alerting/unified/components/contact-points/EditMessageTemplate.tsx +++ b/public/app/features/alerting/unified/components/contact-points/EditMessageTemplate.tsx @@ -1,4 +1,4 @@ -import { useParams } from 'react-router-dom'; +import { useParams } from 'react-router-dom-v5-compat'; import { Alert, LoadingPlaceholder } from '@grafana/ui'; import { EntityNotFound } from 'app/core/components/PageNotFound/EntityNotFound'; diff --git a/public/app/features/alerting/unified/components/contact-points/components/GlobalConfig.tsx b/public/app/features/alerting/unified/components/contact-points/components/GlobalConfig.tsx index da56a3f5148..e5514b01d66 100644 --- a/public/app/features/alerting/unified/components/contact-points/components/GlobalConfig.tsx +++ b/public/app/features/alerting/unified/components/contact-points/components/GlobalConfig.tsx @@ -1,7 +1,8 @@ -import { Alert } from '@grafana/ui'; +import { Alert, withErrorBoundary } from '@grafana/ui'; import { useAlertmanagerConfig } from '../../../hooks/useAlertmanagerConfig'; import { useAlertmanager } from '../../../state/AlertmanagerContext'; +import { AlertmanagerPageWrapper } from '../../AlertingPageWrapper'; import { GlobalConfigForm } from '../../receivers/GlobalConfigForm'; const NewMessageTemplate = () => { @@ -27,4 +28,12 @@ const NewMessageTemplate = () => { return ; }; -export default NewMessageTemplate; +function NewMessageTemplatePage() { + return ( + + + + ); +} + +export default withErrorBoundary(NewMessageTemplatePage, { style: 'page' }); diff --git a/public/app/features/alerting/unified/components/receivers/NewReceiverView.tsx b/public/app/features/alerting/unified/components/receivers/NewReceiverView.tsx index c84a9492d26..4f2a3322da4 100644 --- a/public/app/features/alerting/unified/components/receivers/NewReceiverView.tsx +++ b/public/app/features/alerting/unified/components/receivers/NewReceiverView.tsx @@ -1,6 +1,8 @@ +import { withErrorBoundary } from '@grafana/ui'; import { useAlertmanager } from 'app/features/alerting/unified/state/AlertmanagerContext'; import { GRAFANA_RULES_SOURCE_NAME } from '../../utils/datasource'; +import { AlertmanagerPageWrapper } from '../AlertingPageWrapper'; import { CloudReceiverForm } from './form/CloudReceiverForm'; import { GrafanaReceiverForm } from './form/GrafanaReceiverForm'; @@ -14,4 +16,12 @@ const NewReceiverView = () => { } }; -export default NewReceiverView; +function NewReceiverViewPage() { + return ( + + + + ); +} + +export default withErrorBoundary(NewReceiverViewPage, { style: 'page' }); diff --git a/public/app/features/alerting/unified/components/rule-editor/query-and-alert-condition/SimpleCondition.tsx b/public/app/features/alerting/unified/components/rule-editor/query-and-alert-condition/SimpleCondition.tsx index 12b967cd743..7307675defd 100644 --- a/public/app/features/alerting/unified/components/rule-editor/query-and-alert-condition/SimpleCondition.tsx +++ b/public/app/features/alerting/unified/components/rule-editor/query-and-alert-condition/SimpleCondition.tsx @@ -143,7 +143,7 @@ export const SimpleConditionEditor = ({ type="number" width={10} onChange={onEvaluateValueChange} - value={simpleCondition.evaluator.params[0] || 0} + value={simpleCondition.evaluator.params[0]} /> )} diff --git a/public/app/features/alerting/unified/hooks/useCombinedRule.ts b/public/app/features/alerting/unified/hooks/useCombinedRule.ts index 7f0239142fc..a01cce491b8 100644 --- a/public/app/features/alerting/unified/hooks/useCombinedRule.ts +++ b/public/app/features/alerting/unified/hooks/useCombinedRule.ts @@ -1,6 +1,7 @@ import { useEffect, useMemo } from 'react'; import { useAsync } from 'react-use'; +import { isGrafanaRulesSource } from 'app/features/alerting/unified/utils/datasource'; import { CombinedRule, RuleIdentifier, RulesSource, RuleWithLocation } from 'app/types/unified-alerting'; import { RulerRuleGroupDTO } from 'app/types/unified-alerting-dto'; @@ -10,7 +11,8 @@ import { getDataSourceByName } from '../utils/datasource'; import * as ruleId from '../utils/rule-id'; import { isCloudRuleIdentifier, isGrafanaRuleIdentifier, isPrometheusRuleIdentifier } from '../utils/rules'; -import { attachRulerRulesToCombinedRules, combineRulesNamespaces } from './useCombinedRuleNamespaces'; +import { attachRulerRulesToCombinedRules, combineRulesNamespace } from './useCombinedRuleNamespaces'; +import { stringifyFolder, useFolder } from './useFolder'; export function useCloudCombinedRulesMatching( ruleName: string, @@ -116,6 +118,10 @@ export function useCombinedRule({ ruleIdentifier, limitAlerts }: Props): Request refetchOnMountOrArgChange: true, } ); + // in case of Grafana folder, we need to use the folder name instead of uid, as in promrules we don't use uid + const isGrafanaRule = isGrafanaRulesSource(ruleSourceName); + const folder = useFolder(isGrafanaRule ? ruleLocation?.namespace : undefined); + const namespaceName = isGrafanaRule && folder.folder ? stringifyFolder(folder.folder) : ruleLocation?.namespace; const [ fetchRulerRuleGroup, @@ -139,9 +145,9 @@ export function useCombinedRule({ ruleIdentifier, limitAlerts }: Props): Request return; } - const rulerConfig = rulerRuleGroup ? { [ruleLocation.namespace]: [rulerRuleGroup] } : {}; + const rulerConfig = rulerRuleGroup && namespaceName ? { [namespaceName]: [rulerRuleGroup] } : {}; - const combinedNamespaces = combineRulesNamespaces(ruleSource, promRuleNs, rulerConfig); + const combinedNamespaces = combineRulesNamespace(ruleSource, promRuleNs, rulerConfig); const combinedRules = combinedNamespaces.flatMap((ns) => ns.groups).flatMap((group) => group.rules); const matchingRule = combinedRules.find((rule) => @@ -149,7 +155,7 @@ export function useCombinedRule({ ruleIdentifier, limitAlerts }: Props): Request ); return matchingRule; - }, [ruleIdentifier, ruleSourceName, promRuleNs, rulerRuleGroup, ruleSource, ruleLocation]); + }, [ruleIdentifier, ruleSourceName, promRuleNs, rulerRuleGroup, ruleSource, ruleLocation, namespaceName]); return { loading: isLoadingDsFeatures || isLoadingPromRules || isLoadingRulerGroup, diff --git a/public/app/features/alerting/unified/hooks/useCombinedRuleNamespaces.ts b/public/app/features/alerting/unified/hooks/useCombinedRuleNamespaces.ts index a8d9ac4be77..c95a2b2ebe6 100644 --- a/public/app/features/alerting/unified/hooks/useCombinedRuleNamespaces.ts +++ b/public/app/features/alerting/unified/hooks/useCombinedRuleNamespaces.ts @@ -126,7 +126,7 @@ export function useCombinedRuleNamespaces( }, [promRulesResponses, rulerRulesResponses, rulesSources, grafanaPromRuleNamespaces]); } -export function combineRulesNamespaces( +export function combineRulesNamespace( rulesSource: RulesSource, promNamespaces: RuleNamespace[], rulerRules?: RulerRulesConfigDTO diff --git a/public/app/features/alerting/unified/hooks/useFolder.ts b/public/app/features/alerting/unified/hooks/useFolder.ts index c627453445b..e4fcd3882f8 100644 --- a/public/app/features/alerting/unified/hooks/useFolder.ts +++ b/public/app/features/alerting/unified/hooks/useFolder.ts @@ -32,3 +32,7 @@ export function useFolder(uid?: string): ReturnBag { loading: false, }; } + +export function stringifyFolder({ title, parents }: FolderDTO) { + return parents && parents?.length ? [...parents.map((p) => p.title), title].join('/') : title; +} diff --git a/public/app/features/alerting/unified/hooks/useSilenceNavData.test.tsx b/public/app/features/alerting/unified/hooks/useSilenceNavData.test.tsx index 3ba43193666..f06c47805d1 100644 --- a/public/app/features/alerting/unified/hooks/useSilenceNavData.test.tsx +++ b/public/app/features/alerting/unified/hooks/useSilenceNavData.test.tsx @@ -1,11 +1,11 @@ import { render } from '@testing-library/react'; -import { useRouteMatch } from 'react-router-dom'; +import { useMatch } from 'react-router-dom-v5-compat'; import { useSilenceNavData } from './useSilenceNavData'; -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), - useRouteMatch: jest.fn(), +jest.mock('react-router-dom-v5-compat', () => ({ + ...jest.requireActual('react-router-dom-v5-compat'), + useMatch: jest.fn(), })); const setup = () => { @@ -21,7 +21,7 @@ const setup = () => { }; describe('useSilenceNavData', () => { it('should return correct nav data when route is "/alerting/silence/new"', () => { - (useRouteMatch as jest.Mock).mockReturnValue({ isExact: true, path: '/alerting/silence/new' }); + (useMatch as jest.Mock).mockImplementation((param) => param === '/alerting/silence/new'); const { result } = setup(); expect(result).toMatchObject({ @@ -30,7 +30,7 @@ describe('useSilenceNavData', () => { }); it('should return correct nav data when route is "/alerting/silence/:id/edit"', () => { - (useRouteMatch as jest.Mock).mockReturnValue({ isExact: true, path: '/alerting/silence/:id/edit' }); + (useMatch as jest.Mock).mockImplementation((param) => param === '/alerting/silence/:id/edit'); const { result } = setup(); expect(result).toMatchObject({ diff --git a/public/app/features/alerting/unified/hooks/useSilenceNavData.ts b/public/app/features/alerting/unified/hooks/useSilenceNavData.ts index 241e54a4c98..9df734e8aa2 100644 --- a/public/app/features/alerting/unified/hooks/useSilenceNavData.ts +++ b/public/app/features/alerting/unified/hooks/useSilenceNavData.ts @@ -1,5 +1,5 @@ import { useEffect, useState } from 'react'; -import { useRouteMatch } from 'react-router-dom'; +import { useMatch } from 'react-router-dom-v5-compat'; import { NavModelItem } from '@grafana/data'; @@ -8,18 +8,19 @@ const defaultPageNav: Partial = { }; export function useSilenceNavData() { - const { isExact, path } = useRouteMatch(); const [pageNav, setPageNav] = useState(); + const isNewPath = useMatch('/alerting/silence/new'); + const isEditPath = useMatch('/alerting/silence/:id/edit'); useEffect(() => { - if (path === '/alerting/silence/new') { + if (isNewPath) { setPageNav({ ...defaultPageNav, id: 'silence-new', text: 'Silence alert rule', subTitle: 'Configure silences to stop notifications from a particular alert rule', }); - } else if (path === '/alerting/silence/:id/edit') { + } else if (isEditPath) { setPageNav({ ...defaultPageNav, id: 'silence-edit', @@ -27,7 +28,7 @@ export function useSilenceNavData() { subTitle: 'Recreate existing silence to stop notifications from a particular alert rule', }); } - }, [path, isExact]); + }, [isEditPath, isNewPath]); return pageNav; } diff --git a/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.test.tsx b/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.test.tsx index da8dd9998f7..276c71f61dd 100644 --- a/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.test.tsx +++ b/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.test.tsx @@ -1,8 +1,11 @@ -import { render, screen } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import { render, screen } from 'test/test-utils'; import { FieldType, getDefaultTimeRange, LoadingState, toDataFrame } from '@grafana/data'; import { getPanelPlugin } from '@grafana/data/test/__mocks__/pluginMocks'; +import { config } from '@grafana/runtime'; import { SceneQueryRunner, SceneTimeRange, VizPanel, VizPanelMenu } from '@grafana/scenes'; +import { contextSrv } from 'app/core/services/context_srv'; import { DashboardScene } from '../../scene/DashboardScene'; import { VizPanelLinks, VizPanelLinksMenu } from '../../scene/PanelLinks'; @@ -11,12 +14,65 @@ import { DefaultGridLayoutManager } from '../../scene/layout-default/DefaultGrid import { HelpWizard } from './HelpWizard'; +jest.mock('./utils.ts', () => ({ + ...jest.requireActual('./utils.ts'), + getGithubMarkdown: () => new Uint8Array(1024 * 1024).toString(), +})); + async function setup() { const { panel } = await buildTestScene(); panel.getPlugin = () => getPanelPlugin({ skipDataQuery: false }); return render( {}} />); } + +describe('HelpWizard', () => { + it('should render support bundle info if user has support bundle access', async () => { + config.supportBundlesEnabled = true; + jest.spyOn(contextSrv, 'hasPermission').mockReturnValue(true); + + setup(); + expect(await screen.findByText(/You can also retrieve a support bundle/)).toBeInTheDocument(); + }); + + it('should not render support bundle info if user does not have support bundle access', async () => { + config.supportBundlesEnabled = false; + setup(); + + expect(screen.queryByText('You can also retrieve a support bundle')).not.toBeInTheDocument(); + }); + + it('should show error as alert', async () => { + setup(); + await userEvent.click(await screen.findByTestId('data-testid Tab Data')); + await userEvent.click((await screen.findAllByText('Copy to clipboard'))[0]); + expect(await screen.findByText(/Snapshot is too large/)).toBeInTheDocument(); + }); + + describe('support tab', () => { + it('should render', async () => { + setup(); + expect(await screen.findByText(/Modify the original data to hide sensitive information/)).toBeInTheDocument(); + }); + }); + + describe('data tab', () => { + it('should show "copy to clipboard" button if template is "GitHub comment"', async () => { + setup(); + await userEvent.click(await screen.findByTestId('data-testid Tab Data')); + expect(await screen.findByText('Copy to clipboard')).toBeInTheDocument(); + }); + + it('should show download button for other templates', async () => { + setup(); + await userEvent.click(await screen.findByTestId('data-testid Tab Data')); + await userEvent.click(await screen.findByRole('combobox')); + await userEvent.click(await screen.findByText(/Panel support snapshot/)); + expect(await screen.findByText(/^Download/)).toBeInTheDocument(); + }); + }); +}); + describe('SupportSnapshot', () => { it('Can render', async () => { setup(); diff --git a/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.tsx b/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.tsx index 59662f7c513..960df0696da 100644 --- a/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.tsx +++ b/public/app/features/dashboard-scene/inspect/HelpWizard/HelpWizard.tsx @@ -69,7 +69,7 @@ export function HelpWizard({ panel, onClose }: Props) { return ( ({ }), })); +jest.mock('react-router-dom-v5-compat', () => ({ + ...jest.requireActual('react-router-dom-v5-compat'), + useParams: jest.fn().mockReturnValue({ uid: 'my-dash-uid' }), +})); + const getPluginLinkExtensionsMock = jest.mocked(getPluginLinkExtensions); function setup({ routeProps }: { routeProps?: Partial } = {}) { @@ -55,12 +61,6 @@ function setup({ routeProps }: { routeProps?: Partial { it('Can render dashboard', async () => { setup(); - await waitForDashbordToRender(); + await waitForDashboardToRender(); expect(await screen.findByTitle('Panel A')).toBeInTheDocument(); expect(await screen.findByText('Content A')).toBeInTheDocument(); @@ -175,7 +175,7 @@ describe('DashboardScenePage', () => { it('routeReloadCounter should trigger reload', async () => { const { rerender, props } = setup(); - await waitForDashbordToRender(); + await waitForDashboardToRender(); expect(await screen.findByTitle('Panel A')).toBeInTheDocument(); @@ -196,7 +196,7 @@ describe('DashboardScenePage', () => { it('Can inspect panel', async () => { setup(); - await waitForDashbordToRender(); + await waitForDashboardToRender(); expect(screen.queryByText('Inspect: Panel B')).not.toBeInTheDocument(); @@ -206,7 +206,7 @@ describe('DashboardScenePage', () => { const inspectMenuItem = await screen.findAllByText('Inspect'); - act(() => fireEvent.click(inspectMenuItem[0])); + await userEvent.click(inspectMenuItem[0]); expect(await screen.findByText('Inspect: Panel B')).toBeInTheDocument(); @@ -218,7 +218,7 @@ describe('DashboardScenePage', () => { it('Can view panel in fullscreen', async () => { setup(); - await waitForDashbordToRender(); + await waitForDashboardToRender(); expect(await screen.findByTitle('Panel A')).toBeInTheDocument(); @@ -239,7 +239,7 @@ describe('DashboardScenePage', () => { it('shows and hides empty state when panels are added and removed', async () => { setup(); - await waitForDashbordToRender(); + await waitForDashboardToRender(); expect(await screen.queryByText('Start your new dashboard by adding a visualization')).not.toBeInTheDocument(); @@ -272,7 +272,7 @@ describe('DashboardScenePage', () => { setup(); - await waitForDashbordToRender(); + await waitForDashboardToRender(); const panelAMenu = await screen.findByLabelText('Menu for panel with title Panel A'); expect(panelAMenu).toBeInTheDocument(); @@ -283,21 +283,17 @@ describe('DashboardScenePage', () => { describe('home page', () => { it('should render the dashboard when the route is home', async () => { + (useParams as jest.Mock).mockReturnValue({}); setup({ routeProps: { route: { ...getRouteComponentProps().route, routeName: DashboardRoutes.Home, }, - match: { - ...getRouteComponentProps().match, - path: '/', - params: {}, - }, }, }); - await waitForDashbordToRender(); + await waitForDashboardToRender(); expect(await screen.findByTitle('Panel A')).toBeInTheDocument(); expect(await screen.findByText('Content A')).toBeInTheDocument(); @@ -328,7 +324,7 @@ function CustomVizPanel(props: VizProps) { return
{props.options.content}
; } -async function waitForDashbordToRender() { +async function waitForDashboardToRender() { expect(await screen.findByText('Last 6 hours')).toBeInTheDocument(); expect(await screen.findByTitle('Panel A')).toBeInTheDocument(); } diff --git a/public/app/features/dashboard-scene/pages/DashboardScenePage.tsx b/public/app/features/dashboard-scene/pages/DashboardScenePage.tsx index ada4df7acac..59b0f8bd8b5 100644 --- a/public/app/features/dashboard-scene/pages/DashboardScenePage.tsx +++ b/public/app/features/dashboard-scene/pages/DashboardScenePage.tsx @@ -1,5 +1,6 @@ // Libraries import { useEffect, useMemo } from 'react'; +import { useParams } from 'react-router-dom-v5-compat'; import { usePrevious } from 'react-use'; import { PageLayoutType } from '@grafana/data'; @@ -17,13 +18,15 @@ import { DashboardPrompt } from '../saving/DashboardPrompt'; import { getDashboardScenePageStateManager } from './DashboardScenePageStateManager'; -export interface Props extends GrafanaRouteComponentProps {} +export interface Props + extends Omit, 'match'> {} -export function DashboardScenePage({ match, route, queryParams, history }: Props) { - const prevMatch = usePrevious(match); +export function DashboardScenePage({ route, queryParams, history }: Props) { + const params = useParams(); + const { type, slug, uid } = params; + const prevMatch = usePrevious({ params }); const stateManager = getDashboardScenePageStateManager(); const { dashboard, isLoading, loadError } = stateManager.useState(); - // After scene migration is complete and we get rid of old dashboard we should refactor dashboardWatcher so this route reload is not need const routeReloadCounter = (history.location.state as any)?.routeReloadCounter; @@ -31,14 +34,14 @@ export function DashboardScenePage({ match, route, queryParams, history }: Props const comingFromExplore = useMemo(() => { return Boolean(store.getObject(DASHBOARD_FROM_LS_KEY)); // eslint-disable-next-line react-hooks/exhaustive-deps - }, [match.params.uid, match.params.slug, match.params.type]); + }, [uid, slug, type]); useEffect(() => { - if (route.routeName === DashboardRoutes.Normal && match.params.type === 'snapshot') { - stateManager.loadSnapshot(match.params.slug!); + if (route.routeName === DashboardRoutes.Normal && type === 'snapshot') { + stateManager.loadSnapshot(slug!); } else { stateManager.loadDashboard({ - uid: match.params.uid ?? '', + uid: uid ?? '', route: route.routeName as DashboardRoutes, urlFolderUid: queryParams.folderUid, keepDashboardFromExploreInLocalStorage: false, @@ -48,15 +51,7 @@ export function DashboardScenePage({ match, route, queryParams, history }: Props return () => { stateManager.clearState(); }; - }, [ - stateManager, - match.params.uid, - route.routeName, - queryParams.folderUid, - routeReloadCounter, - match.params.slug, - match.params.type, - ]); + }, [stateManager, uid, route.routeName, queryParams.folderUid, routeReloadCounter, slug, type]); // Effect that handles explore->dashboards workflow useEffect(() => { @@ -84,9 +79,9 @@ export function DashboardScenePage({ match, route, queryParams, history }: Props } // Do not render anything when transitioning from one dashboard to another - // A bit tricky for transition to or from Home dashbord that does not have a uid in the url (but could have it in the dashboard model) + // A bit tricky for transition to or from Home dashboard that does not have a uid in the url (but could have it in the dashboard model) // if prevMatch is undefined we are going from normal route to home route or vice versa - if (match.params.type !== 'snapshot' && (!prevMatch || match.params.uid !== prevMatch?.params.uid)) { + if (type !== 'snapshot' && (!prevMatch || uid !== prevMatch?.params.uid)) { console.log('skipping rendering'); return null; } diff --git a/public/app/features/dashboard-scene/panel-edit/PanelEditor.test.ts b/public/app/features/dashboard-scene/panel-edit/PanelEditor.test.ts index 78370296e0a..bc9d4be13b2 100644 --- a/public/app/features/dashboard-scene/panel-edit/PanelEditor.test.ts +++ b/public/app/features/dashboard-scene/panel-edit/PanelEditor.test.ts @@ -277,13 +277,17 @@ describe('PanelEditor', () => { describe('PanelDataPane', () => { it('should not exist if panel is skipDataQuery', async () => { - const { panelEditor } = await setup({ pluginSkipDataQuery: true }); + const { panelEditor, panel } = await setup({ pluginSkipDataQuery: true }); expect(panelEditor.state.dataPane).toBeUndefined(); + + expect(panel.state.$data).toBeUndefined(); }); it('should exist if panel is supporting querying', async () => { - const { panelEditor } = await setup({ pluginSkipDataQuery: false }); + const { panelEditor, panel } = await setup({ pluginSkipDataQuery: false }); expect(panelEditor.state.dataPane).toBeDefined(); + + expect(panel.state.$data).toBeDefined(); }); }); }); diff --git a/public/app/features/dashboard-scene/panel-edit/PanelEditor.tsx b/public/app/features/dashboard-scene/panel-edit/PanelEditor.tsx index ed0b3f624ae..1eb6db12bf7 100644 --- a/public/app/features/dashboard-scene/panel-edit/PanelEditor.tsx +++ b/public/app/features/dashboard-scene/panel-edit/PanelEditor.tsx @@ -2,18 +2,21 @@ import * as H from 'history'; import { debounce } from 'lodash'; import { NavIndex, PanelPlugin } from '@grafana/data'; -import { locationService } from '@grafana/runtime'; +import { config, locationService } from '@grafana/runtime'; import { PanelBuilders, + SceneDataTransformer, SceneObjectBase, SceneObjectRef, SceneObjectState, SceneObjectStateChangedEvent, + SceneQueryRunner, sceneUtils, VizPanel, } from '@grafana/scenes'; import { Panel } from '@grafana/schema/dist/esm/index.gen'; import { OptionFilter } from 'app/features/dashboard/components/PanelEditor/OptionsPaneOptions'; +import { getLastUsedDatasourceFromStorage } from 'app/features/dashboard/utils/dashboard'; import { saveLibPanel } from 'app/features/library-panels/state/api'; import { DashboardSceneChangeTracker } from '../saving/DashboardSceneChangeTracker'; @@ -183,13 +186,46 @@ export class PanelEditor extends SceneObjectBase { private _updateDataPane(plugin: PanelPlugin) { const skipDataQuery = plugin.meta.skipDataQuery; - if (skipDataQuery && this.state.dataPane) { - locationService.partial({ tab: null }, true); - this.setState({ dataPane: undefined }); + const panel = this.state.panelRef.resolve(); + + if (skipDataQuery) { + if (this.state.dataPane) { + locationService.partial({ tab: null }, true); + this.setState({ dataPane: undefined }); + } + + // clean up data provider when switching from data to non data panel + if (panel.state.$data) { + panel.setState({ + $data: undefined, + }); + } } - if (!skipDataQuery && !this.state.dataPane) { - this.setState({ dataPane: PanelDataPane.createFor(this.getPanel()) }); + if (!skipDataQuery) { + if (!this.state.dataPane) { + this.setState({ dataPane: PanelDataPane.createFor(this.getPanel()) }); + } + + // add data provider when switching from non data to data panel + if (!panel.state.$data) { + let ds = getLastUsedDatasourceFromStorage(getDashboardSceneFor(this).state.uid!)?.datasourceUid; + if (!ds) { + ds = config.defaultDatasource; + } + + panel.setState({ + $data: new SceneDataTransformer({ + $data: new SceneQueryRunner({ + datasource: { + uid: ds, + }, + queries: [{ refId: 'A' }], + }), + transformations: [], + }), + }); + } } } diff --git a/public/app/features/dashboard-scene/panel-edit/PanelOptions.tsx b/public/app/features/dashboard-scene/panel-edit/PanelOptions.tsx index 3428dbe707b..202eb942fee 100644 --- a/public/app/features/dashboard-scene/panel-edit/PanelOptions.tsx +++ b/public/app/features/dashboard-scene/panel-edit/PanelOptions.tsx @@ -73,7 +73,7 @@ export const PanelOptions = React.memo(({ panel, searchQuery, listMode, d } ), // eslint-disable-next-line react-hooks/exhaustive-deps - [searchQuery, panel, fieldConfig] + [data, searchQuery, panel, fieldConfig] ); const isSearching = searchQuery.length > 0; diff --git a/public/app/features/dashboard-scene/scene/DashboardScene.test.tsx b/public/app/features/dashboard-scene/scene/DashboardScene.test.tsx index f7f49b5b5f8..8c35c9d8618 100644 --- a/public/app/features/dashboard-scene/scene/DashboardScene.test.tsx +++ b/public/app/features/dashboard-scene/scene/DashboardScene.test.tsx @@ -1,5 +1,5 @@ import { CoreApp, GrafanaConfig, LoadingState, getDefaultTimeRange, locationUtil, store } from '@grafana/data'; -import { locationService } from '@grafana/runtime'; +import { locationService, RefreshEvent } from '@grafana/runtime'; import { sceneGraph, SceneGridLayout, @@ -750,6 +750,21 @@ describe('DashboardScene', () => { variable.setState({ name: 'A' }); expect(scene.state.isDirty).toBe(false); }); + + it('should trigger scene RefreshEvent when a scene variable changes', () => { + const varA = new TestVariable({ name: 'A', query: 'A.*', value: 'A.AA', text: '', options: [], delayMs: 0 }); + const scene = buildTestScene({ + $variables: new SceneVariableSet({ variables: [varA] }), + }); + + scene.activate(); + + const eventHandler = jest.fn(); + // this RefreshEvent is from the scenes library + scene.subscribeToEvent(RefreshEvent, eventHandler); + varA.changeValueTo('A.AB'); + expect(eventHandler).toHaveBeenCalledTimes(1); + }); }); describe('When a dashboard is restored', () => { diff --git a/public/app/features/dashboard-scene/scene/DashboardScene.tsx b/public/app/features/dashboard-scene/scene/DashboardScene.tsx index 0d037f71285..aa3fee00f82 100644 --- a/public/app/features/dashboard-scene/scene/DashboardScene.tsx +++ b/public/app/features/dashboard-scene/scene/DashboardScene.tsx @@ -10,7 +10,7 @@ import { DataSourceGetTagKeysOptions, DataSourceGetTagValuesOptions, } from '@grafana/data'; -import { config, locationService } from '@grafana/runtime'; +import { config, locationService, RefreshEvent } from '@grafana/runtime'; import { sceneGraph, SceneGridRow, @@ -716,6 +716,9 @@ export class DashboardVariableDependency implements SceneVariableDependencyConfi if (hasChanged) { // Temp solution for some core panels (like dashlist) to know that variables have changed appEvents.publish(new VariablesChanged({ refreshAll: true, panelIds: [] })); + // Backwards compat with plugins that rely on the RefreshEvent when a + // variable changes. TODO: We should redirect plugin devs to use VariablesChanged event + this._dashboard.publishEvent(new RefreshEvent()); } if (variable.state.name === PANEL_SEARCH_VAR) { diff --git a/public/app/features/dashboard-scene/scene/NavToolbarActions.tsx b/public/app/features/dashboard-scene/scene/NavToolbarActions.tsx index 4b00fa431ba..7f952fbcf93 100644 --- a/public/app/features/dashboard-scene/scene/NavToolbarActions.tsx +++ b/public/app/features/dashboard-scene/scene/NavToolbarActions.tsx @@ -12,6 +12,7 @@ import { Dropdown, Icon, Menu, + Stack, ToolbarButton, ToolbarButtonRow, useStyles2, @@ -23,6 +24,7 @@ import { contextSrv } from 'app/core/core'; import { Trans, t } from 'app/core/internationalization'; import { getDashboardSrv } from 'app/features/dashboard/services/DashboardSrv'; import { playlistSrv } from 'app/features/playlist/PlaylistSrv'; +import { ScopesSelector } from 'app/features/scopes'; import { shareDashboardType } from '../../dashboard/components/ShareModal/utils'; import { PanelEditor, buildPanelEditScene } from '../panel-edit/PanelEditor'; @@ -67,6 +69,7 @@ export function ToolbarActions({ dashboard }: Props) { // Means we are not in settings view, fullscreen panel or edit panel const isShowingDashboard = !editview && !isViewingPanel && !isEditingPanel; const isEditingAndShowingDashboard = isEditing && isShowingDashboard; + const showScopesSelector = config.featureToggles.singleTopNav && config.featureToggles.scopeFilters; if (!isEditingPanel) { // This adds the precence indicators in enterprise @@ -590,7 +593,12 @@ export function ToolbarActions({ dashboard }: Props) { lastGroup = action.group; } - return {actionElements}; + return ( + + {showScopesSelector && } + {actionElements} + + ); } function addDynamicActions( diff --git a/public/app/features/dashboard-scene/scene/PanelSearchLayout.tsx b/public/app/features/dashboard-scene/scene/PanelSearchLayout.tsx index 06d2e65d0b8..679bca8cb59 100644 --- a/public/app/features/dashboard-scene/scene/PanelSearchLayout.tsx +++ b/public/app/features/dashboard-scene/scene/PanelSearchLayout.tsx @@ -3,7 +3,7 @@ import classNames from 'classnames'; import { useEffect } from 'react'; import { GrafanaTheme2 } from '@grafana/data'; -import { VizPanel, sceneGraph } from '@grafana/scenes'; +import { SceneGridRow, VizPanel, sceneGraph } from '@grafana/scenes'; import { useStyles2 } from '@grafana/ui'; import { Trans } from 'app/core/internationalization'; @@ -34,26 +34,33 @@ export function PanelSearchLayout({ dashboard, panelSearch = '', panelsPerRow }: for (const gridItem of bodyGrid.state.children) { if (gridItem instanceof DashboardGridItem) { - const panels = gridItem.state.repeatedPanels ?? [gridItem.state.body]; - for (const panel of panels) { - const interpolatedTitle = panel.interpolate(panel.state.title, undefined, 'text').toLowerCase(); - const interpolatedSearchString = sceneGraph.interpolate(dashboard, panelSearch).toLowerCase(); - if (interpolatedTitle.includes(interpolatedSearchString)) { - filteredPanels.push(panel); + filterPanels(gridItem, dashboard, panelSearch, filteredPanels); + } else if (gridItem instanceof SceneGridRow) { + for (const rowItem of gridItem.state.children) { + if (rowItem instanceof DashboardGridItem) { + filterPanels(rowItem, dashboard, panelSearch, filteredPanels); } } } } + if (filteredPanels.length > 0) { + return ( +
} + > + {filteredPanels.map((panel) => ( + + ))} +
+ ); + } + return ( -
} - > - {filteredPanels.map((panel) => ( - - ))} -
+

+ No matches found +

); } @@ -74,5 +81,37 @@ function getStyles(theme: GrafanaTheme2) { perRow: css({ gridTemplateColumns: `repeat(var(${panelsPerRowCSSVar}, 3), 1fr)`, }), + noHits: css({ + display: 'grid', + placeItems: 'center', + }), }; } + +function filterPanels( + gridItem: DashboardGridItem, + dashboard: DashboardScene, + searchString: string, + filteredPanels: VizPanel[] +) { + const interpolatedSearchString = sceneGraph.interpolate(dashboard, searchString).toLowerCase(); + + // activate inactive repeat panel if one of its children will be matched + if (gridItem.state.variableName && !gridItem.isActive) { + const panel = gridItem.state.body; + const interpolatedTitle = panel.interpolate(panel.state.title, undefined, 'text').toLowerCase(); + if (interpolatedTitle.includes(interpolatedSearchString)) { + gridItem.activate(); + } + } + + const panels = gridItem.state.repeatedPanels ?? [gridItem.state.body]; + for (const panel of panels) { + const interpolatedTitle = panel.interpolate(panel.state.title, undefined, 'text').toLowerCase(); + if (interpolatedTitle.includes(interpolatedSearchString)) { + filteredPanels.push(panel); + } + } + + return filteredPanels; +} diff --git a/public/app/features/dashboard-scene/scene/row-actions/RowActions.tsx b/public/app/features/dashboard-scene/scene/row-actions/RowActions.tsx index a1cc833602b..7015fe78abb 100644 --- a/public/app/features/dashboard-scene/scene/row-actions/RowActions.tsx +++ b/public/app/features/dashboard-scene/scene/row-actions/RowActions.tsx @@ -54,16 +54,22 @@ export class RowActions extends SceneObjectBase { } } - if (repeat && !repeatBehavior) { - const repeatBehavior = new RowRepeaterBehavior({ variableName: repeat }); - row.setState({ $behaviors: [...(row.state.$behaviors ?? []), repeatBehavior] }); - } else if (repeatBehavior) { - repeatBehavior.removeBehavior(); - } - if (title !== row.state.title) { row.setState({ title }); } + + if (repeat) { + // Remove repeat behavior if it exists + // to retrigger repeat when adding new one + if (repeatBehavior) { + repeatBehavior.removeBehavior(); + } + + repeatBehavior = new RowRepeaterBehavior({ variableName: repeat }); + row.setState({ $behaviors: [...(row.state.$behaviors ?? []), repeatBehavior] }); + } else { + repeatBehavior?.removeBehavior(); + } }; public onDelete = () => { diff --git a/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.test.ts b/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.test.ts index 454b7721a06..97750a6d500 100644 --- a/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.test.ts +++ b/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.test.ts @@ -190,6 +190,62 @@ describe('sceneVariablesSetToVariables', () => { `); }); + it('should handle Query variable when sceneVariablesSetToVariables should discard options', () => { + const variable = new QueryVariable({ + name: 'test', + label: 'test-label', + description: 'test-desc', + value: ['selected-value'], + text: ['selected-value-text'], + datasource: { uid: 'fake-std', type: 'fake-std' }, + query: 'query', + options: [ + { label: 'test', value: 'test' }, + { label: 'test1', value: 'test1' }, + { label: 'test2', value: 'test2' }, + ], + includeAll: true, + allValue: 'test-all', + isMulti: true, + }); + + const set = new SceneVariableSet({ + variables: [variable], + }); + const result = sceneVariablesSetToVariables(set); + expect(result).toHaveLength(1); + expect(result[0].options).toEqual([]); + }); + + it('should handle Query variable when sceneVariablesSetToVariables shoudl keep options', () => { + const variable = new QueryVariable({ + name: 'test', + label: 'test-label', + description: 'test-desc', + value: ['test'], + text: ['test'], + datasource: { uid: 'fake-std', type: 'fake-std' }, + query: 'query', + options: [ + { label: 'test', value: 'test' }, + { label: 'test1', value: 'test1' }, + { label: 'test2', value: 'test2' }, + ], + includeAll: true, + allValue: 'test-all', + isMulti: true, + }); + + const set = new SceneVariableSet({ + variables: [variable], + }); + const keepQueryOptions = true; + const result = sceneVariablesSetToVariables(set, keepQueryOptions); + expect(result).toHaveLength(1); + expect(result[0].options).not.toEqual([]); + expect(result[0].options?.length).toEqual(3); + }); + it('should handle DatasourceVariable', () => { const variable = new DataSourceVariable({ name: 'test', diff --git a/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.ts b/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.ts index 1ad5608a47a..2eacfecf359 100644 --- a/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.ts +++ b/public/app/features/dashboard-scene/serialization/sceneVariablesSetToVariables.ts @@ -4,7 +4,16 @@ import { VariableHide, VariableModel, VariableOption, VariableRefresh, VariableS import { getIntervalsQueryFromNewIntervalModel } from '../utils/utils'; -export function sceneVariablesSetToVariables(set: SceneVariables) { +/** + * Converts a SceneVariables object into an array of VariableModel objects. + * @param set - The SceneVariables object containing the variables to convert. + * @param keepQueryOptions - (Optional) A boolean flag indicating whether to keep the options for query variables. + * This should be set to `false` when variables are saved in the dashboard model, + * but should be set to `true` when variables are used in the templateSrv to keep them in sync. + * If `true`, the options for query variables are kept. + * */ + +export function sceneVariablesSetToVariables(set: SceneVariables, keepQueryOptions?: boolean) { const variables: VariableModel[] = []; for (const variable of set.state.variables) { const commonProperties = { @@ -19,7 +28,7 @@ export function sceneVariablesSetToVariables(set: SceneVariables) { let options: VariableOption[] = []; // Not sure if we actually have to still support this option given // that it's not exposed in the UI - if (variable.state.refresh === VariableRefresh.never) { + if (variable.state.refresh === VariableRefresh.never || keepQueryOptions) { options = variableValueOptionsToVariableOptions(variable.state); } variables.push({ diff --git a/public/app/features/dashboard-scene/sharing/ExportButton/ExportButton.tsx b/public/app/features/dashboard-scene/sharing/ExportButton/ExportButton.tsx index 52ec254b2d7..5fd21ab76c6 100644 --- a/public/app/features/dashboard-scene/sharing/ExportButton/ExportButton.tsx +++ b/public/app/features/dashboard-scene/sharing/ExportButton/ExportButton.tsx @@ -30,7 +30,7 @@ export default function ExportButton({ dashboard }: { dashboard: DashboardScene tooltip={t('export.menu.export-as-json-tooltip', 'Export')} > Export  - + diff --git a/public/app/features/dashboard-scene/sharing/ShareButton/ShareMenu.tsx b/public/app/features/dashboard-scene/sharing/ShareButton/ShareMenu.tsx index b56c817c62c..0c1a6eddb60 100644 --- a/public/app/features/dashboard-scene/sharing/ShareButton/ShareMenu.tsx +++ b/public/app/features/dashboard-scene/sharing/ShareButton/ShareMenu.tsx @@ -47,7 +47,6 @@ export default function ShareMenu({ dashboard, panel }: { dashboard: DashboardSc testId: newShareButtonSelector.shareInternally, icon: 'building', label: t('share-dashboard.menu.share-internally-title', 'Share internally'), - description: t('share-dashboard.menu.share-internally-description', 'Link settings'), renderCondition: true, onClick: () => onMenuItemClick(shareDashboardType.link), }); diff --git a/public/app/features/dashboard-scene/solo/SoloPanelPage.tsx b/public/app/features/dashboard-scene/solo/SoloPanelPage.tsx index 6fe49965f4d..6747828b974 100644 --- a/public/app/features/dashboard-scene/solo/SoloPanelPage.tsx +++ b/public/app/features/dashboard-scene/solo/SoloPanelPage.tsx @@ -1,6 +1,7 @@ // Libraries import { css } from '@emotion/css'; import { useEffect } from 'react'; +import { useParams } from 'react-router-dom-v5-compat'; import { GrafanaTheme2 } from '@grafana/data'; import { Alert, Spinner, useStyles2 } from '@grafana/ui'; @@ -20,14 +21,15 @@ export interface Props extends GrafanaRouteComponentProps { - stateManager.loadDashboard({ uid: match.params.uid!, route: DashboardRoutes.Embedded }); + stateManager.loadDashboard({ uid, route: DashboardRoutes.Embedded }); return () => stateManager.clearState(); - }, [stateManager, match, queryParams]); + }, [stateManager, queryParams, uid]); if (!queryParams.panelId) { return ; diff --git a/public/app/features/dashboard-scene/utils/getVariablesCompatibility.ts b/public/app/features/dashboard-scene/utils/getVariablesCompatibility.ts index 54c47284639..6a3d1efa16a 100644 --- a/public/app/features/dashboard-scene/utils/getVariablesCompatibility.ts +++ b/public/app/features/dashboard-scene/utils/getVariablesCompatibility.ts @@ -5,7 +5,13 @@ import { sceneVariablesSetToVariables } from '../serialization/sceneVariablesSet export function getVariablesCompatibility(sceneObject: SceneObject): TypedVariableModel[] { const set = sceneGraph.getVariables(sceneObject); - const legacyModels = sceneVariablesSetToVariables(set); + const keepQueryOptions = true; + + // `sceneVariablesSetToVariables` is also used when transforming the scene to a save model. + // In those cases, query options will be stripped out. + // However, when `getVariablesCompatibility` is called from `templateSrv`, it is used to get all variables in the scene. + // Therefore, options should be kept. + const legacyModels = sceneVariablesSetToVariables(set, keepQueryOptions); // Sadly templateSrv.getVariables returns TypedVariableModel but sceneVariablesSetToVariables return persisted schema model // They look close to identical (differ in what is optional in some places). diff --git a/public/app/features/dashboard/components/DashNav/DashNav.tsx b/public/app/features/dashboard/components/DashNav/DashNav.tsx index 86a3986be31..3d92e3731c3 100644 --- a/public/app/features/dashboard/components/DashNav/DashNav.tsx +++ b/public/app/features/dashboard/components/DashNav/DashNav.tsx @@ -16,7 +16,6 @@ import { Badge, } from '@grafana/ui'; import { updateNavIndex } from 'app/core/actions'; -import { AppChromeUpdate } from 'app/core/components/AppChrome/AppChromeUpdate'; import { NavToolbarSeparator } from 'app/core/components/AppChrome/NavToolbar/NavToolbarSeparator'; import config from 'app/core/config'; import { useAppNotification } from 'app/core/copy/appNotification'; @@ -83,6 +82,7 @@ export const DashNav = memo((props) => { // this ensures the component rerenders when the location changes useLocation(); const forceUpdate = useForceUpdate(); + const isSingleTopNav = config.featureToggles.singleTopNav; // We don't really care about the event payload here only that it triggeres a re-render of this component useBusEvent(props.dashboard.events, DashboardMetaChangedEvent); @@ -357,15 +357,11 @@ export const DashNav = memo((props) => { }; return ( - - {renderLeftActions()} - - {renderRightActions()} - - } - /> + <> + {renderLeftActions()} + {!isSingleTopNav && } + {renderRightActions()} + ); }); diff --git a/public/app/features/dashboard/components/DashboardPermissions/AccessControlDashboardPermissions.tsx b/public/app/features/dashboard/components/DashboardPermissions/AccessControlDashboardPermissions.tsx index 6b31090b447..303cae3ac2b 100644 --- a/public/app/features/dashboard/components/DashboardPermissions/AccessControlDashboardPermissions.tsx +++ b/public/app/features/dashboard/components/DashboardPermissions/AccessControlDashboardPermissions.tsx @@ -5,12 +5,12 @@ import { AccessControlAction } from 'app/types'; import { SettingsPageProps } from '../DashboardSettings/types'; -export const AccessControlDashboardPermissions = ({ dashboard, sectionNav }: SettingsPageProps) => { +export const AccessControlDashboardPermissions = ({ dashboard, sectionNav, toolbar }: SettingsPageProps) => { const canSetPermissions = contextSrv.hasPermission(AccessControlAction.DashboardsPermissionsWrite); const pageNav = sectionNav.node.parentItem; return ( - + ); diff --git a/public/app/features/dashboard/components/DashboardSettings/AnnotationsSettings.tsx b/public/app/features/dashboard/components/DashboardSettings/AnnotationsSettings.tsx index 158d80b9dcc..e345cf1533d 100644 --- a/public/app/features/dashboard/components/DashboardSettings/AnnotationsSettings.tsx +++ b/public/app/features/dashboard/components/DashboardSettings/AnnotationsSettings.tsx @@ -7,7 +7,7 @@ import { AnnotationSettingsEdit, AnnotationSettingsList, newAnnotationName } fro import { SettingsPageProps } from './types'; -export function AnnotationsSettings({ dashboard, editIndex, sectionNav }: SettingsPageProps) { +export function AnnotationsSettings({ dashboard, editIndex, sectionNav, toolbar }: SettingsPageProps) { const onNew = () => { const newAnnotation: AnnotationQuery = { name: newAnnotationName, @@ -27,7 +27,7 @@ export function AnnotationsSettings({ dashboard, editIndex, sectionNav }: Settin const isEditing = editIndex != null && editIndex < dashboard.annotations.list.length; return ( - + {!isEditing && } {isEditing && } diff --git a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx index adb6af01c37..ab2fd4d6a21 100644 --- a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx +++ b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx @@ -4,7 +4,7 @@ import { useLocation } from 'react-router-dom-v5-compat'; import { locationUtil, NavModel, NavModelItem } from '@grafana/data'; import { selectors } from '@grafana/e2e-selectors'; -import { locationService } from '@grafana/runtime'; +import { config, locationService } from '@grafana/runtime'; import { Button, Stack, Text, ToolbarButtonRow } from '@grafana/ui'; import { AppChromeUpdate } from 'app/core/components/AppChrome/AppChromeUpdate'; import { Page } from 'app/core/components/Page/Page'; @@ -36,6 +36,7 @@ const onClose = () => locationService.partial({ editview: null, editIndex: null export function DashboardSettings({ dashboard, editview, pageNav, sectionNav }: Props) { const [updateId, setUpdateId] = useState(0); + const isSingleTopNav = config.featureToggles.singleTopNav; useEffect(() => { dashboard.events.subscribe(DashboardMetaChangedEvent, () => setUpdateId((v) => v + 1)); }, [dashboard]); @@ -81,8 +82,15 @@ export function DashboardSettings({ dashboard, editview, pageNav, sectionNav }: return ( <> - {actions}} /> - + {!isSingleTopNav && ( + {actions}} /> + )} + {actions} : undefined} + sectionNav={subSectionNav} + dashboard={dashboard} + editIndex={editIndex} + /> ); } @@ -209,9 +217,9 @@ function getSectionNav( }; } -function MakeEditable({ dashboard, sectionNav }: SettingsPageProps) { +function MakeEditable({ dashboard, sectionNav, toolbar }: SettingsPageProps) { return ( - + Dashboard not editable