mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge remote-tracking branch 'origin/main' into drclau/unistor/replace-authenticators-3
This commit is contained in:
commit
33fd104cfd
@ -3863,13 +3863,9 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/TraceView.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "3"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/Actions/ActionButton.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/Actions/TracePageActions.tsx:5381": [
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
@ -3882,69 +3878,24 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "4"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "5"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "6"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "7"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "8"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "9"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "10"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "11"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "12"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "13"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "7"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/SearchBar/TracePageSearchBar.tsx:5381": [
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/SpanFilters/SpanFilters.tsx:5381": [
|
||||
[0, 0, 0, "\'HorizontalGroup\' import from \'@grafana/ui\' is restricted from being used by a pattern. Use Stack component instead.", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/CanvasSpanGraph.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/GraphTicks.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/Scrubber.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/TickLabels.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/SpanGraph/ViewingLayer.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "8"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/TracePageHeader.tsx:5381": [
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TracePageHeader/index.tsx:5381": [
|
||||
[0, 0, 0, "Do not re-export imported variable (\`./TracePageHeader\`)", "0"]
|
||||
@ -3954,193 +3905,42 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanBarRow.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "8"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "9"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "10"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "11"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "12"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "13"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "14"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "15"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "16"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "17"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "18"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "19"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "20"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "21"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianKeyValues.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "8"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianLogs.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "3"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "4"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "5"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "6"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "7"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "8"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "9"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "5"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianReferences.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "8"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "9"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "10"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "11"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "12"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "13"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "14"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "15"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "16"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "17"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "3"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/AccordianText.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/KeyValuesTable.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/SpanFlameGraph.tsx:5381": [
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/TextList.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetail/index.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "8"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "9"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "10"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "11"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanDetailRow.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanLinks.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/SpanTreeOffset.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/Ticks.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineCollapser.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineColumnResizer.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineHeaderRow.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "3"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/TimelineViewingLayer.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/TimelineHeaderRow/index.tsx:5381": [
|
||||
[0, 0, 0, "Do not re-export imported variable (\`./TimelineHeaderRow\`)", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/VirtualizedTraceView.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/index.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/TraceTimelineViewer/utils.tsx:5381": [
|
||||
[0, 0, 0, "Do not re-export imported variable (\`../utils/date\`)", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/common/BreakableText.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/common/CopyIcon.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/common/LabeledList.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/common/NewWindowIcon.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/common/TraceName.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/demo/trace-generators.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
],
|
||||
@ -4175,10 +3975,6 @@ exports[`better eslint`] = {
|
||||
"public/app/features/explore/TraceView/components/model/transform-trace-data.tsx:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/settings/SpanBarSettings.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"]
|
||||
],
|
||||
"public/app/features/explore/TraceView/components/types/index.tsx:5381": [
|
||||
[0, 0, 0, "Do not re-export imported variable (\`./trace\`)", "0"],
|
||||
[0, 0, 0, "Do not re-export imported variable (\`../settings/SpanBarSettings\`)", "1"],
|
||||
@ -4593,8 +4389,7 @@ exports[`better eslint`] = {
|
||||
"public/app/features/migrate-to-cloud/onprem/NameCell.tsx:5381": [
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "1"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"],
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "3"]
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "2"]
|
||||
],
|
||||
"public/app/features/notifications/StoredNotifications.tsx:5381": [
|
||||
[0, 0, 0, "No untranslated strings. Wrap text with <Trans />", "0"]
|
||||
@ -5225,9 +5020,6 @@ exports[`better eslint`] = {
|
||||
"public/app/features/teams/state/reducers.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
],
|
||||
"public/app/features/teams/state/selectors.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/features/templating/fieldAccessorCache.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
|
142
.drone.yml
142
.drone.yml
@ -4332,142 +4332,6 @@ volumes:
|
||||
clone:
|
||||
retries: 3
|
||||
depends_on: []
|
||||
environment:
|
||||
EDITION: oss
|
||||
image_pull_secrets:
|
||||
- gcr
|
||||
- gar
|
||||
kind: pipeline
|
||||
name: release-test-frontend
|
||||
node:
|
||||
type: no-parallel
|
||||
platform:
|
||||
arch: amd64
|
||||
os: linux
|
||||
services: []
|
||||
steps:
|
||||
- commands:
|
||||
- echo $DRONE_RUNNER_NAME
|
||||
image: alpine:3.20.3
|
||||
name: identify-runner
|
||||
- commands:
|
||||
- yarn install --immutable || yarn install --immutable
|
||||
depends_on: []
|
||||
image: node:20.9.0-alpine
|
||||
name: yarn-install
|
||||
- commands:
|
||||
- apk add --update git bash
|
||||
- yarn betterer ci
|
||||
depends_on:
|
||||
- yarn-install
|
||||
image: node:20.9.0-alpine
|
||||
name: betterer-frontend
|
||||
- commands:
|
||||
- yarn run ci:test-frontend
|
||||
depends_on:
|
||||
- yarn-install
|
||||
environment:
|
||||
TEST_MAX_WORKERS: 50%
|
||||
image: node:20.9.0-alpine
|
||||
name: test-frontend
|
||||
trigger:
|
||||
event:
|
||||
exclude:
|
||||
- promote
|
||||
ref:
|
||||
exclude:
|
||||
- refs/tags/*-cloud*
|
||||
include:
|
||||
- refs/tags/v*
|
||||
type: docker
|
||||
volumes:
|
||||
- host:
|
||||
path: /var/run/docker.sock
|
||||
name: docker
|
||||
---
|
||||
clone:
|
||||
retries: 3
|
||||
depends_on: []
|
||||
environment:
|
||||
EDITION: oss
|
||||
image_pull_secrets:
|
||||
- gcr
|
||||
- gar
|
||||
kind: pipeline
|
||||
name: release-test-backend
|
||||
node:
|
||||
type: no-parallel
|
||||
platform:
|
||||
arch: amd64
|
||||
os: linux
|
||||
services: []
|
||||
steps:
|
||||
- commands:
|
||||
- echo $DRONE_RUNNER_NAME
|
||||
image: alpine:3.20.3
|
||||
name: identify-runner
|
||||
- commands:
|
||||
- '# It is required that code generated from Thema/CUE be committed and in sync
|
||||
with its inputs.'
|
||||
- '# The following command will fail if running code generators produces any diff
|
||||
in output.'
|
||||
- apk add --update make
|
||||
- CODEGEN_VERIFY=1 make gen-cue
|
||||
depends_on: []
|
||||
image: golang:1.23.1-alpine
|
||||
name: verify-gen-cue
|
||||
- commands:
|
||||
- '# It is required that generated jsonnet is committed and in sync with its inputs.'
|
||||
- '# The following command will fail if running code generators produces any diff
|
||||
in output.'
|
||||
- apk add --update make
|
||||
- CODEGEN_VERIFY=1 make gen-jsonnet
|
||||
depends_on: []
|
||||
image: golang:1.23.1-alpine
|
||||
name: verify-gen-jsonnet
|
||||
- commands:
|
||||
- apk add --update make
|
||||
- make gen-go
|
||||
depends_on:
|
||||
- verify-gen-cue
|
||||
image: golang:1.23.1-alpine
|
||||
name: wire-install
|
||||
- commands:
|
||||
- apk add --update build-base shared-mime-info shared-mime-info-lang
|
||||
- go list -f '{{.Dir}}/...' -m | xargs go test -short -covermode=atomic -timeout=5m
|
||||
depends_on:
|
||||
- wire-install
|
||||
image: golang:1.23.1-alpine
|
||||
name: test-backend
|
||||
- commands:
|
||||
- apk add --update build-base
|
||||
- go test -count=1 -covermode=atomic -timeout=5m -run '^TestIntegration' $(find
|
||||
./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+'
|
||||
| grep -o '\(.*\)/' | sort -u)
|
||||
depends_on:
|
||||
- wire-install
|
||||
image: golang:1.23.1-alpine
|
||||
name: test-backend-integration
|
||||
trigger:
|
||||
event:
|
||||
exclude:
|
||||
- promote
|
||||
ref:
|
||||
exclude:
|
||||
- refs/tags/*-cloud*
|
||||
include:
|
||||
- refs/tags/v*
|
||||
type: docker
|
||||
volumes:
|
||||
- host:
|
||||
path: /var/run/docker.sock
|
||||
name: docker
|
||||
---
|
||||
clone:
|
||||
retries: 3
|
||||
depends_on:
|
||||
- release-test-backend
|
||||
- release-test-frontend
|
||||
image_pull_secrets:
|
||||
- gcr
|
||||
- gar
|
||||
@ -4648,9 +4512,7 @@ volumes:
|
||||
---
|
||||
clone:
|
||||
retries: 3
|
||||
depends_on:
|
||||
- release-test-backend
|
||||
- release-test-frontend
|
||||
depends_on: []
|
||||
image_pull_secrets:
|
||||
- gcr
|
||||
- gar
|
||||
@ -6151,6 +6013,6 @@ kind: secret
|
||||
name: gcr_credentials
|
||||
---
|
||||
kind: signature
|
||||
hmac: 766cd43d479f82bdb5bbaa3b48ed87ad13ea71d3418deb5d0c89ec7b77ae0475
|
||||
hmac: e618274ea7a8bfbf3d5e151d459348aa9382fe63fe7fef76c997db3cba74779f
|
||||
|
||||
...
|
||||
|
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@ -298,6 +298,7 @@
|
||||
/pkg/modules/ @grafana/grafana-app-platform-squad
|
||||
/pkg/services/grpcserver/ @grafana/grafana-search-and-storage
|
||||
/pkg/generated @grafana/grafana-app-platform-squad
|
||||
/pkg/services/unifiedSearch/ @grafana/grafana-search-and-storage
|
||||
|
||||
# Alerting
|
||||
/pkg/services/ngalert/ @grafana/alerting-backend
|
||||
|
@ -2,6 +2,10 @@
|
||||
---
|
||||
name: Levitate / Detect breaking changes in PR
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
@ -11,7 +15,7 @@ on:
|
||||
|
||||
jobs:
|
||||
buildPR:
|
||||
name: Build PR
|
||||
name: Build PR packages artifacts
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@ -27,7 +31,7 @@ jobs:
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Restore yarn cache
|
||||
uses: actions/cache@v4
|
||||
@ -57,7 +61,7 @@ jobs:
|
||||
path: './pr/pr_built_packages.zip'
|
||||
|
||||
buildBase:
|
||||
name: Build Base
|
||||
name: Build Base packages artifacts
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@ -75,7 +79,7 @@ jobs:
|
||||
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
|
||||
run: echo "dir=$(yarn config get cacheFolder)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Restore yarn cache
|
||||
uses: actions/cache@v4
|
||||
@ -105,7 +109,7 @@ jobs:
|
||||
path: './base/base_built_packages.zip'
|
||||
|
||||
Detect:
|
||||
name: Detect breaking changes
|
||||
name: Detect breaking changes between PR and base
|
||||
runs-on: ubuntu-latest
|
||||
needs: ['buildPR', 'buildBase']
|
||||
env:
|
||||
@ -179,7 +183,7 @@ jobs:
|
||||
|
||||
|
||||
Report:
|
||||
name: Report breaking changes in PR
|
||||
name: Report breaking changes in PR comment
|
||||
runs-on: ubuntu-latest
|
||||
needs: ['Detect']
|
||||
|
||||
@ -234,9 +238,9 @@ jobs:
|
||||
echo 'levitate_markdown<<EOF'
|
||||
cat levitate.md
|
||||
echo EOF
|
||||
} >> $GITHUB_OUTPUT
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "levitate_markdown=No breaking changes detected" >> $GITHUB_OUTPUT
|
||||
echo "levitate_markdown=No breaking changes detected" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
|
||||
@ -253,7 +257,6 @@ jobs:
|
||||
${{ steps.levitate-markdown.outputs.levitate_markdown }}
|
||||
|
||||
[Read our guideline](https://github.com/grafana/grafana/blob/main/contribute/breaking-changes-guide/breaking-changes-guide.md)
|
||||
[Console output](${{ steps.levitate-run.outputs.job_link }})
|
||||
|
||||
* Your pull request merge won't be blocked.
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
@ -359,5 +362,6 @@ jobs:
|
||||
if [ "${{ steps.levitate-run.outputs.exit_code }}" -ne 0 ]; then
|
||||
echo "Breaking changes detected. Please check the levitate report in your pull request. This workflow won't block merging."
|
||||
fi
|
||||
|
||||
exit ${{ steps.levitate-run.outputs.exit_code }}
|
||||
shell: bash
|
||||
|
12
Makefile
12
Makefile
@ -18,6 +18,9 @@ GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev)
|
||||
GO_BUILD_FLAGS += $(if $(GO_BUILD_TAGS),-build-tags=$(GO_BUILD_TAGS))
|
||||
GO_BUILD_FLAGS += $(GO_RACE_FLAG)
|
||||
|
||||
# GNU xargs has flag -r, and BSD xargs (e.g. MacOS) has that behaviour by default
|
||||
XARGSR = $(shell xargs --version 2>&1 | grep -q GNU && echo xargs -r || echo xargs)
|
||||
|
||||
targets := $(shell echo '$(sources)' | tr "," " ")
|
||||
|
||||
GO_INTEGRATION_TESTS := $(shell find ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' | grep -o '\(.*\)/' | sort -u)
|
||||
@ -303,6 +306,15 @@ golangci-lint: $(GOLANGCI_LINT)
|
||||
.PHONY: lint-go
|
||||
lint-go: golangci-lint ## Run all code checks for backend. You can use GO_LINT_FILES to specify exact files to check
|
||||
|
||||
.PHONY: lint-go-diff
|
||||
lint-go-diff: $(GOLANGCI_LINT)
|
||||
git diff --name-only remotes/origin/main | \
|
||||
grep '\.go$$' | \
|
||||
$(XARGSR) dirname | \
|
||||
sort -u | \
|
||||
sed 's,^,./,' | \
|
||||
$(XARGSR) $(GOLANGCI_LINT) run --config .golangci.toml
|
||||
|
||||
# with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts
|
||||
.PHONY: shellcheck
|
||||
shellcheck: $(SH_FILES) ## Run checks for shell scripts.
|
||||
|
@ -1346,6 +1346,11 @@ notification_log_retention = 5d
|
||||
# Duration for which a resolved alert state transition will continue to be sent to the Alertmanager.
|
||||
resolved_alert_retention = 15m
|
||||
|
||||
# Defines the limit of how many alert rule versions
|
||||
# should be stored in the database for each alert rule in an organization including the current one.
|
||||
# 0 value means no limit
|
||||
rule_version_record_limit = 0
|
||||
|
||||
[unified_alerting.screenshots]
|
||||
# Enable screenshots in notifications. You must have either installed the Grafana image rendering
|
||||
# plugin, or set up Grafana to use a remote rendering service.
|
||||
@ -1560,8 +1565,8 @@ expire_time = 7
|
||||
#################################### Internal Grafana Metrics ############
|
||||
# Metrics available at HTTP URL /metrics and /metrics/plugins/:pluginId
|
||||
[metrics]
|
||||
enabled = true
|
||||
interval_seconds = 10
|
||||
enabled = true
|
||||
interval_seconds = 10
|
||||
# Disable total stats (stat_totals_*) metrics to be generated
|
||||
disable_total_stats = false
|
||||
# The interval at which the total stats collector will update the stats. Default is 1800 seconds.
|
||||
|
@ -1335,6 +1335,11 @@
|
||||
# Duration for which a resolved alert state transition will continue to be sent to the Alertmanager.
|
||||
;resolved_alert_retention = 15m
|
||||
|
||||
# Defines the limit of how many alert rule versions
|
||||
# should be stored in the database for each alert rule in an organization including the current one.
|
||||
# 0 value means no limit
|
||||
;rule_version_record_limit= 0
|
||||
|
||||
[unified_alerting.screenshots]
|
||||
# Enable screenshots in notifications. You must have either installed the Grafana image rendering
|
||||
# plugin, or set up Grafana to use a remote rendering service.
|
||||
|
@ -566,6 +566,7 @@ Lists the roles that have been directly assigned to a given user. The list does
|
||||
Query Parameters:
|
||||
|
||||
- `includeHidden`: Optional. Set to `true` to include roles that are `hidden`.
|
||||
- `includeMapped`: Optional. Set to `true` to include roles that have been mapped through the group attribute sync feature.
|
||||
|
||||
#### Required permissions
|
||||
|
||||
|
@ -104,6 +104,7 @@ With a Grafana Enterprise license, you also get access to premium data sources,
|
||||
- [Sqlyze Datasource](/grafana/plugins/grafana-odbc-datasource)
|
||||
- [SumoLogic](/grafana/plugins/grafana-sumologic-datasource)
|
||||
- [Wavefront](/grafana/plugins/grafana-wavefront-datasource)
|
||||
- [Zendesk](/grafana/plugins/grafana-zendesk-datasource)
|
||||
|
||||
## Try Grafana Enterprise
|
||||
|
||||
|
@ -48,7 +48,6 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general-
|
||||
| `angularDeprecationUI` | Display Angular warnings in dashboards and panels | Yes |
|
||||
| `dashgpt` | Enable AI powered features in dashboards | Yes |
|
||||
| `alertingInsights` | Show the new alerting insights landing page | Yes |
|
||||
| `externalServiceAccounts` | Automatic service account and token setup for plugins | Yes |
|
||||
| `panelMonitoring` | Enables panel monitoring through logs and measurements | Yes |
|
||||
| `formatString` | Enable format string transformer | Yes |
|
||||
| `transformationsVariableSupport` | Allows using variables in transformations | Yes |
|
||||
@ -101,6 +100,7 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general-
|
||||
| `enableDatagridEditing` | Enables the edit functionality in the datagrid panel |
|
||||
| `sqlDatasourceDatabaseSelection` | Enables previous SQL data source dataset dropdown behavior |
|
||||
| `reportingRetries` | Enables rendering retries for the reporting feature |
|
||||
| `externalServiceAccounts` | Automatic service account and token setup for plugins |
|
||||
| `cloudWatchBatchQueries` | Runs CloudWatch metrics queries as separate batches |
|
||||
| `teamHttpHeaders` | Enables LBAC for datasources to apply LogQL filtering of logs to the client requests for users in teams |
|
||||
| `pdfTables` | Enables generating table data as PDF in reporting |
|
||||
@ -111,6 +111,7 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general-
|
||||
| `ssoSettingsSAML` | Use the new SSO Settings API to configure the SAML connector |
|
||||
| `accessActionSets` | Introduces action sets for resource permissions. Also ensures that all folder editors and admins can create subfolders without needing any additional permissions. |
|
||||
| `azureMonitorPrometheusExemplars` | Allows configuration of Azure Monitor as a data source that can provide Prometheus exemplars |
|
||||
| `ssoSettingsLDAP` | Use the new SSO Settings API to configure LDAP |
|
||||
| `cloudwatchMetricInsightsCrossAccount` | Enables cross account observability for Cloudwatch Metric Insights query builder |
|
||||
| `useSessionStorageForRedirection` | Use session storage for handling the redirection after login |
|
||||
|
||||
@ -181,6 +182,7 @@ Experimental features might be changed or removed without prior notice.
|
||||
| `tableSharedCrosshair` | Enables shared crosshair in table panel |
|
||||
| `kubernetesFeatureToggles` | Use the kubernetes API for feature toggle management in the frontend |
|
||||
| `newFolderPicker` | Enables the nested folder picker without having nested folders enabled |
|
||||
| `onPremToCloudMigrationsAlerts` | Enables the migration of alerts and its child resources to your Grafana Cloud stack. Requires `onPremToCloudMigrations` to be enabled in conjunction. |
|
||||
| `sqlExpressions` | Enables using SQL and DuckDB functions as Expressions. |
|
||||
| `nodeGraphDotLayout` | Changed the layout algorithm for the node graph |
|
||||
| `kubernetesAggregator` | Enable grafana's embedded kube-aggregator |
|
||||
@ -206,6 +208,7 @@ Experimental features might be changed or removed without prior notice.
|
||||
| `appSidecar` | Enable the app sidecar feature that allows rendering 2 apps at the same time |
|
||||
| `alertingQueryAndExpressionsStepMode` | Enables step mode for alerting queries and expressions |
|
||||
| `rolePickerDrawer` | Enables the new role picker drawer design |
|
||||
| `pluginsSriChecks` | Enables SRI checks for plugin assets |
|
||||
|
||||
## Development feature toggles
|
||||
|
||||
|
@ -53,6 +53,12 @@ role_attribute_path = contains(roles[*], 'admin') && 'Admin' || contains(roles[*
|
||||
As an example, `<PROVIDER_DOMAIN>` can be `keycloak-demo.grafana.org`
|
||||
and `<REALM_NAME>` can be `grafana`.
|
||||
|
||||
To configure the `kc_idp_hint` parameter for Keycloak, you need to change the `auth_url` configuration to include the `kc_idp_hint` parameter. For example if you want to hint the Google identity provider:
|
||||
|
||||
```ini
|
||||
auth_url = https://<PROVIDER_DOMAIN>/realms/<REALM_NAME>/protocol/openid-connect/auth?kc_idp_hint=google
|
||||
```
|
||||
|
||||
{{% admonition type="note" %}}
|
||||
api_url is not required if the id_token contains all the necessary user information and can add latency to the login process.
|
||||
It is useful as a fallback or if the user has more than 150 group memberships.
|
||||
|
@ -2,6 +2,8 @@ import { Interception } from 'cypress/types/net-stubbing';
|
||||
import { load } from 'js-yaml';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { selectors as rawSelectors } from '@grafana/e2e-selectors';
|
||||
|
||||
import { selectors } from '../../public/app/plugins/datasource/azuremonitor/e2e/selectors';
|
||||
import {
|
||||
AzureDataSourceJsonData,
|
||||
@ -75,12 +77,13 @@ const addAzureMonitorVariable = (
|
||||
isFirst: boolean,
|
||||
options?: { subscription?: string; resourceGroup?: string; namespace?: string; resource?: string; region?: string }
|
||||
) => {
|
||||
e2e.components.PageToolbar.item('Dashboard settings').click();
|
||||
e2e.components.NavToolbar.editDashboard.editButton().should('be.visible').click();
|
||||
e2e.components.NavToolbar.editDashboard.settingsButton().should('be.visible').click();
|
||||
e2e.components.Tab.title('Variables').click();
|
||||
if (isFirst) {
|
||||
e2e.pages.Dashboard.Settings.Variables.List.addVariableCTAV2().click();
|
||||
} else {
|
||||
e2e.pages.Dashboard.Settings.Variables.List.newButton().click();
|
||||
cy.get(`[data-testid="${rawSelectors.pages.Dashboard.Settings.Variables.List.newButton}"]`).click();
|
||||
}
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.generalNameInputV2().clear().type(name);
|
||||
e2e.components.DataSourcePicker.inputV2().type(`${dataSourceName}{enter}`);
|
||||
@ -113,7 +116,8 @@ const addAzureMonitorVariable = (
|
||||
break;
|
||||
}
|
||||
e2e.pages.Dashboard.Settings.Variables.Edit.General.submitButton().click();
|
||||
e2e.pages.Dashboard.Settings.Actions.close().click();
|
||||
e2e.components.NavToolbar.editDashboard.backToDashboardButton().click();
|
||||
e2e.components.NavToolbar.editDashboard.exitButton().click();
|
||||
};
|
||||
|
||||
const storageAcctName = 'azmonteststorage';
|
||||
@ -189,7 +193,8 @@ describe('Azure monitor datasource', () => {
|
||||
},
|
||||
timeout: 10000,
|
||||
});
|
||||
e2e.components.PanelEditor.applyButton().click();
|
||||
e2e.components.NavToolbar.editDashboard.backToDashboardButton().click();
|
||||
e2e.components.NavToolbar.editDashboard.exitButton().click();
|
||||
e2e.flows.addPanel({
|
||||
dataSourceName,
|
||||
visitDashboardAtStart: false,
|
||||
@ -209,7 +214,8 @@ describe('Azure monitor datasource', () => {
|
||||
},
|
||||
timeout: 10000,
|
||||
});
|
||||
e2e.components.PanelEditor.applyButton().click();
|
||||
e2e.components.NavToolbar.editDashboard.backToDashboardButton().click();
|
||||
e2e.components.NavToolbar.editDashboard.exitButton().click();
|
||||
e2e.flows.addPanel({
|
||||
dataSourceName,
|
||||
visitDashboardAtStart: false,
|
||||
@ -228,7 +234,8 @@ describe('Azure monitor datasource', () => {
|
||||
},
|
||||
timeout: 10000,
|
||||
});
|
||||
e2e.components.PanelEditor.applyButton().click();
|
||||
e2e.components.NavToolbar.editDashboard.backToDashboardButton().click();
|
||||
e2e.components.NavToolbar.editDashboard.exitButton().click();
|
||||
e2e.flows.addPanel({
|
||||
dataSourceName,
|
||||
visitDashboardAtStart: false,
|
||||
@ -275,25 +282,32 @@ describe('Azure monitor datasource', () => {
|
||||
namespace: '$namespace',
|
||||
region: '$region',
|
||||
});
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('subscription').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemValueDropDownOptionTexts('grafanalabs-datasources-dev').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('resourceGroups').parent().find('button').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('subscription')
|
||||
.parent()
|
||||
.within(() => {
|
||||
cy.get('input').click();
|
||||
});
|
||||
e2e.components.Select.option().contains('grafanalabs-datasources-dev').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('resourceGroups')
|
||||
.parent()
|
||||
.find('input')
|
||||
.type('cloud-plugins-e2e-test-azmon{downArrow}{enter}');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('namespaces').parent().find('button').click();
|
||||
.within(() => {
|
||||
cy.get('input').type('cloud-plugins-e2e-test-azmon{downArrow}{enter}');
|
||||
});
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('namespaces')
|
||||
.parent()
|
||||
.find('input')
|
||||
.type('microsoft.storage/storageaccounts{downArrow}{enter}');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('region').parent().find('button').click();
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('region').parent().find('input').type('uk south{downArrow}{enter}');
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('resource').parent().find('button').click();
|
||||
.within(() => {
|
||||
cy.get('input').type('microsoft.storage/storageaccounts{downArrow}{enter}');
|
||||
});
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('region')
|
||||
.parent()
|
||||
.within(() => {
|
||||
cy.get('input').type('uk south{downArrow}{enter}');
|
||||
});
|
||||
e2e.pages.Dashboard.SubMenu.submenuItemLabels('resource')
|
||||
.parent()
|
||||
.find('input')
|
||||
.type(`${storageAcctName}{downArrow}{enter}`);
|
||||
.within(() => {
|
||||
cy.get('input').type(`${storageAcctName}{downArrow}{enter}`);
|
||||
});
|
||||
e2e.flows.addPanel({
|
||||
dataSourceName,
|
||||
visitDashboardAtStart: false,
|
||||
|
@ -34,7 +34,7 @@ describe('Dashboard templating', () => {
|
||||
`Server:sqlstring = 'A''A\\"A','BB\\\B','CCC'`,
|
||||
`Server:date = NaN`,
|
||||
`Server:text = All`,
|
||||
`Server:queryparam = var-Server=A%27A%22A&var-Server=BB%5CB&var-Server=CCC`,
|
||||
`Server:queryparam = var-Server=$__all`,
|
||||
`1 < 2`,
|
||||
`Example: from=now-6h&to=now`,
|
||||
];
|
||||
|
@ -34,7 +34,7 @@ describe('Templating', () => {
|
||||
|
||||
e2e.components.DashboardLinks.dropDown().should('be.visible').click().wait('@tagsTemplatingSearch');
|
||||
|
||||
verifyLinks('var-custom=%24__all');
|
||||
verifyLinks('var-custom=$__all');
|
||||
|
||||
cy.get('body').click();
|
||||
|
||||
|
@ -49,7 +49,8 @@ export const importDashboard = (dashboardToImport: Dashboard, queryTimeout?: num
|
||||
dashboardToImport.panels.forEach((panel) => {
|
||||
// Look at the json data
|
||||
e2e.components.Panels.Panel.menu(panel.title).click({ force: true }); // force click because menu is hidden and show on hover
|
||||
e2e.components.Panels.Panel.menuItems('Inspect').should('be.visible').click();
|
||||
e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true });
|
||||
e2e.components.Panels.Panel.menuItems('Data').click({ force: true });
|
||||
e2e.components.Tab.title('JSON').should('be.visible').click();
|
||||
e2e.components.PanelInspector.Json.content().should('be.visible').contains('Panel JSON').click({ force: true });
|
||||
e2e.components.Select.option().should('be.visible').contains('Panel data').click();
|
||||
|
@ -34,8 +34,10 @@ describe('Inspect drawer tests', () => {
|
||||
|
||||
e2e.flows.openDashboard({ uid: 'wfTJJL5Wz' });
|
||||
|
||||
// testing opening inspect drawer directly by clicking on Inspect in header menu
|
||||
e2e.flows.openPanelMenuItem(e2e.flows.PanelMenuItems.Inspect, PANEL_UNDER_TEST);
|
||||
e2e.components.Panels.Panel.title(PANEL_UNDER_TEST).scrollIntoView().should('be.visible');
|
||||
e2e.components.Panels.Panel.menu(PANEL_UNDER_TEST).click({ force: true }); // force click because menu is hidden and show on hover
|
||||
e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true });
|
||||
e2e.components.Panels.Panel.menuItems('Data').click({ force: true });
|
||||
|
||||
expectDrawerTabsAndContent();
|
||||
|
||||
|
@ -49,7 +49,8 @@ export const importDashboard = (dashboardToImport: Dashboard, queryTimeout?: num
|
||||
dashboardToImport.panels.forEach((panel) => {
|
||||
// Look at the json data
|
||||
e2e.components.Panels.Panel.menu(panel.title).click({ force: true }); // force click because menu is hidden and show on hover
|
||||
e2e.components.Panels.Panel.menuItems('Inspect').should('be.visible').click();
|
||||
e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true });
|
||||
e2e.components.Panels.Panel.menuItems('Data').click({ force: true });
|
||||
e2e.components.Tab.title('JSON').should('be.visible').click();
|
||||
e2e.components.PanelInspector.Json.content().should('be.visible');
|
||||
e2e.components.ReactMonacoEditor.editorLazy().should('be.visible');
|
||||
|
@ -5,7 +5,10 @@ import { fromBaseUrl } from '../support/url';
|
||||
|
||||
const defaultUserPreferences = {
|
||||
timezone: '', // "Default" option
|
||||
} as const; // TODO: when we update typescript >4.9 change to `as const satisfies UserPreferencesDTO`
|
||||
navbar: {
|
||||
bookmarkUrls: [],
|
||||
},
|
||||
} as const satisfies UserPreferencesDTO; // TODO: when we update typescript >4.9 change to `as const satisfies UserPreferencesDTO`
|
||||
|
||||
// Only accept preferences we have defaults for as arguments. To allow a new preference to be set, add a default for it
|
||||
type UserPreferences = Pick<UserPreferencesDTO, keyof typeof defaultUserPreferences>;
|
||||
|
62
e2e/various-suite/bookmarks.spec.ts
Normal file
62
e2e/various-suite/bookmarks.spec.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import { e2e } from '../utils';
|
||||
import { fromBaseUrl } from '../utils/support/url';
|
||||
|
||||
describe('Pin nav items', () => {
|
||||
beforeEach(() => {
|
||||
cy.viewport(1280, 800);
|
||||
e2e.flows.login(Cypress.env('USERNAME'), Cypress.env('PASSWORD'));
|
||||
cy.visit(fromBaseUrl('/'));
|
||||
});
|
||||
afterEach(() => {
|
||||
e2e.flows.setDefaultUserPreferences();
|
||||
});
|
||||
|
||||
it('should pin the selected menu item and add it as a Bookmarks menu item child', () => {
|
||||
// Open, dock and check if the mega menu is visible
|
||||
cy.get('[aria-label="Open menu"]').click();
|
||||
cy.get('[aria-label="Dock menu"]').click();
|
||||
e2e.components.NavMenu.Menu().should('be.visible');
|
||||
|
||||
// Check if the Bookmark section is visible
|
||||
const bookmarkSection = cy.get('[href="/bookmarks"]');
|
||||
bookmarkSection.should('be.visible');
|
||||
|
||||
// Click on the pin icon to add Administration to the Bookmarks section
|
||||
const adminItem = cy.contains('a', 'Administration');
|
||||
const bookmarkPinIcon = adminItem.siblings('button').should('have.attr', 'aria-label', 'Add to Bookmarks');
|
||||
bookmarkPinIcon.click({ force: true });
|
||||
|
||||
// Check if the Administration menu item is visible in the Bookmarks section
|
||||
cy.get('[aria-label="Expand section Bookmarks"]').click();
|
||||
const bookmarks = cy.get('[href="/bookmarks"]').parentsUntil('li').siblings('ul');
|
||||
bookmarks.within(() => {
|
||||
cy.get('a').should('contain.text', 'Administration');
|
||||
});
|
||||
});
|
||||
|
||||
it('should unpin the item and remove it from the Bookmarks section', () => {
|
||||
// Set Administration as a pinned item and reload the page
|
||||
e2e.flows.setUserPreferences({ navbar: { bookmarkUrls: ['/admin'] } });
|
||||
cy.reload();
|
||||
|
||||
// Open, dock and check if the mega menu is visible
|
||||
cy.get('[aria-label="Open menu"]').click();
|
||||
cy.get('[aria-label="Dock menu"]').click();
|
||||
e2e.components.NavMenu.Menu().should('be.visible');
|
||||
|
||||
// Check if the Bookmark section is visible and open it
|
||||
cy.get('[href="/bookmarks"]').should('be.visible');
|
||||
cy.get('[aria-label="Expand section Bookmarks"]').click();
|
||||
|
||||
// Check if the Administration menu item is visible in the Bookmarks section
|
||||
const bookmarks = cy.get('[href="/bookmarks"]').parentsUntil('li').siblings('ul').children();
|
||||
const administrationIsPinned = bookmarks.filter('li').children().should('contain.text', 'Administration');
|
||||
|
||||
// Click on the pin icon to remove Administration from the Bookmarks section and check if it is removed
|
||||
administrationIsPinned.within(() => {
|
||||
cy.get('[aria-label="Remove from Bookmarks"]').click({ force: true });
|
||||
});
|
||||
cy.wait(500);
|
||||
administrationIsPinned.should('not.exist');
|
||||
});
|
||||
});
|
@ -34,8 +34,10 @@ describe('Inspect drawer tests', () => {
|
||||
|
||||
e2e.flows.openDashboard({ uid: 'wfTJJL5Wz' });
|
||||
|
||||
// testing opening inspect drawer directly by clicking on Inspect in header menu
|
||||
e2e.flows.openPanelMenuItem(e2e.flows.PanelMenuItems.Inspect, PANEL_UNDER_TEST);
|
||||
e2e.components.Panels.Panel.title(PANEL_UNDER_TEST).scrollIntoView().should('be.visible');
|
||||
e2e.components.Panels.Panel.menu(PANEL_UNDER_TEST).click({ force: true }); // force click because menu is hidden and show on hover
|
||||
e2e.components.Panels.Panel.menuItems('Inspect').trigger('mouseover', { force: true });
|
||||
e2e.components.Panels.Panel.menuItems('Data').click({ force: true });
|
||||
|
||||
expectDrawerTabsAndContent();
|
||||
|
||||
|
10
go.work.sum
10
go.work.sum
@ -378,7 +378,6 @@ github.com/blevesearch/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:9
|
||||
github.com/blevesearch/goleveldb v1.0.1 h1:iAtV2Cu5s0GD1lwUiekkFHe2gTMCCNVj2foPclDLIFI=
|
||||
github.com/blevesearch/goleveldb v1.0.1/go.mod h1:WrU8ltZbIp0wAoig/MHbrPCXSOLpe79nz5lv5nqfYrQ=
|
||||
github.com/blevesearch/mmap-go v1.0.3/go.mod h1:pYvKl/grLQrBxuaRYgoTssa4rVujYYeenDp++2E+yvs=
|
||||
github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt22Ac=
|
||||
github.com/blevesearch/snowball v0.6.1 h1:cDYjn/NCH+wwt2UdehaLpr2e4BwLIjN4V/TdLsL+B5A=
|
||||
github.com/blevesearch/snowball v0.6.1/go.mod h1:ZF0IBg5vgpeoUhnMza2v0A/z8m1cWPlwhke08LpNusg=
|
||||
github.com/blevesearch/stempel v0.2.0 h1:CYzVPaScODMvgE9o+kf6D4RJ/VRomyi9uHF+PtB+Afc=
|
||||
@ -491,8 +490,6 @@ github.com/elastic/go-sysinfo v1.11.2/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJ
|
||||
github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0=
|
||||
github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss=
|
||||
github.com/elazarl/goproxy v0.0.0-20230731152917-f99041a5c027/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
|
||||
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8=
|
||||
github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw=
|
||||
github.com/expr-lang/expr v1.16.2 h1:JvMnzUs3LeVHBvGFcXYmXo+Q6DPDmzrlcSBO6Wy3w4s=
|
||||
github.com/expr-lang/expr v1.16.2/go.mod h1:uCkhfG+x7fcZ5A5sXHKuQ07jGZRl6J0FCAaf2k4PtVQ=
|
||||
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
|
||||
@ -587,8 +584,6 @@ github.com/grafana/alerting v0.0.0-20240830172655-aa466962ea18 h1:3cQ+d+fkNL2Eqp
|
||||
github.com/grafana/alerting v0.0.0-20240830172655-aa466962ea18/go.mod h1:GMLi6d09Xqo96fCVUjNk//rcjP5NKEdjOzfWIffD5r4=
|
||||
github.com/grafana/alerting v0.0.0-20240917171353-6c25eb6eff10 h1:oDbLKM34O+JUF9EQFS+9aYhdYoeNfUpXqNjFCLIxwF4=
|
||||
github.com/grafana/alerting v0.0.0-20240917171353-6c25eb6eff10/go.mod h1:GMLi6d09Xqo96fCVUjNk//rcjP5NKEdjOzfWIffD5r4=
|
||||
github.com/grafana/alerting v0.0.0-20240926233713-446ddd356f8d h1:HOK6RWTuVldWFtNbWHxPlTa2shZ+WsNJsxoRJhX56Zg=
|
||||
github.com/grafana/alerting v0.0.0-20240926233713-446ddd356f8d/go.mod h1:GMLi6d09Xqo96fCVUjNk//rcjP5NKEdjOzfWIffD5r4=
|
||||
github.com/grafana/gomemcache v0.0.0-20240229205252-cd6a66d6fb56/go.mod h1:PGk3RjYHpxMM8HFPhKKo+vve3DdlPUELZLSDEFehPuU=
|
||||
github.com/grafana/prometheus-alertmanager v0.25.1-0.20240625192351-66ec17e3aa45 h1:AJKOtDKAOg8XNFnIZSmqqqutoTSxVlRs6vekL2p2KEY=
|
||||
github.com/grafana/prometheus-alertmanager v0.25.1-0.20240625192351-66ec17e3aa45/go.mod h1:01sXtHoRwI8W324IPAzuxDFOmALqYLCOhvSC2fUHWXc=
|
||||
@ -852,6 +847,7 @@ github.com/stoewer/parquet-cli v0.0.7/go.mod h1:bskxHdj8q3H1EmfuCqjViFoeO3NEvs5l
|
||||
github.com/streadway/amqp v1.0.0 h1:kuuDrUJFZL1QYL9hUNuCxNObNzB0bV/ZG5jV3RWAQgo=
|
||||
github.com/streadway/handy v0.0.0-20200128134331-0f66f006fb2e h1:mOtuXaRAbVZsxAHVdPR3IjfmN8T1h2iczJLynhLybf8=
|
||||
github.com/substrait-io/substrait-go v0.4.2 h1:buDnjsb3qAqTaNbOR7VKmNgXf4lYQxWEcnSGUWBtmN8=
|
||||
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
|
||||
github.com/tdewolff/minify/v2 v2.12.9 h1:dvn5MtmuQ/DFMwqf5j8QhEVpPX6fi3WGImhv8RUB4zA=
|
||||
github.com/tdewolff/minify/v2 v2.12.9/go.mod h1:qOqdlDfL+7v0/fyymB+OP497nIxJYSvX4MQWA8OoiXU=
|
||||
github.com/tdewolff/parse/v2 v2.6.8 h1:mhNZXYCx//xG7Yq2e/kVLNZw4YfYmeHbhx+Zc0OvFMA=
|
||||
@ -1037,9 +1033,11 @@ golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
|
||||
golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
|
||||
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
|
||||
golang.org/x/oauth2 v0.20.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@ -1051,6 +1049,7 @@ golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk=
|
||||
golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0=
|
||||
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
@ -1070,6 +1069,7 @@ google.golang.org/genproto/googleapis/api v0.0.0-20240730163845-b1a4ccb954bf/go.
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo=
|
||||
google.golang.org/genproto/googleapis/bytestream v0.0.0-20240730163845-b1a4ccb954bf h1:T4tsZBlZYXK3j40sQNP5MBO32I+rn6ypV1PpklsiV8k=
|
||||
google.golang.org/genproto/googleapis/bytestream v0.0.0-20240730163845-b1a4ccb954bf/go.mod h1:5/MT647Cn/GGhwTpXC7QqcaR5Cnee4v4MKCU1/nwnIQ=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240722135656-d784300faade/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
|
||||
|
46
package.json
46
package.json
@ -69,9 +69,9 @@
|
||||
"releaseNotesUrl": "https://grafana.com/docs/grafana/next/release-notes/"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.25.2",
|
||||
"@babel/preset-env": "7.25.4",
|
||||
"@babel/runtime": "7.25.6",
|
||||
"@babel/core": "7.25.7",
|
||||
"@babel/preset-env": "7.25.7",
|
||||
"@babel/runtime": "7.25.7",
|
||||
"@betterer/betterer": "5.4.0",
|
||||
"@betterer/cli": "5.4.0",
|
||||
"@betterer/eslint": "5.4.0",
|
||||
@ -82,11 +82,11 @@
|
||||
"@grafana/plugin-e2e": "^1.8.3",
|
||||
"@grafana/tsconfig": "^2.0.0",
|
||||
"@manypkg/get-packages": "^2.2.0",
|
||||
"@playwright/test": "1.47.2",
|
||||
"@playwright/test": "1.48.0",
|
||||
"@pmmmwh/react-refresh-webpack-plugin": "0.5.15",
|
||||
"@react-types/button": "3.9.6",
|
||||
"@react-types/menu": "3.9.11",
|
||||
"@react-types/overlays": "3.8.9",
|
||||
"@react-types/menu": "3.9.12",
|
||||
"@react-types/overlays": "3.8.10",
|
||||
"@react-types/shared": "3.24.1",
|
||||
"@rtk-query/codegen-openapi": "^1.2.0",
|
||||
"@rtsao/plugin-proposal-class-properties": "7.0.1-patch.1",
|
||||
@ -119,10 +119,10 @@
|
||||
"@types/jquery": "3.5.31",
|
||||
"@types/js-yaml": "^4.0.5",
|
||||
"@types/jsurl": "^1.2.28",
|
||||
"@types/lodash": "4.17.9",
|
||||
"@types/lodash": "4.17.10",
|
||||
"@types/logfmt": "^1.2.3",
|
||||
"@types/lucene": "^2",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/node-forge": "^1",
|
||||
"@types/ol-ext": "npm:@siedlerchr/types-ol-ext@3.2.4",
|
||||
"@types/pluralize": "^0.0.33",
|
||||
@ -181,7 +181,7 @@
|
||||
"eslint-plugin-jsx-a11y": "6.10.0",
|
||||
"eslint-plugin-lodash": "7.4.0",
|
||||
"eslint-plugin-no-barrel-files": "^1.1.0",
|
||||
"eslint-plugin-react": "7.37.0",
|
||||
"eslint-plugin-react": "7.37.1",
|
||||
"eslint-plugin-react-hooks": "4.6.0",
|
||||
"eslint-plugin-testing-library": "^6.2.2",
|
||||
"eslint-scope": "^8.0.0",
|
||||
@ -198,7 +198,7 @@
|
||||
"jest-canvas-mock": "2.5.2",
|
||||
"jest-date-mock": "1.0.10",
|
||||
"jest-environment-jsdom": "29.7.0",
|
||||
"jest-fail-on-console": "3.3.0",
|
||||
"jest-fail-on-console": "3.3.1",
|
||||
"jest-junit": "16.0.0",
|
||||
"jest-matcher-utils": "29.7.0",
|
||||
"jest-watch-typeahead": "^2.2.2",
|
||||
@ -221,7 +221,7 @@
|
||||
"react-test-renderer": "18.2.0",
|
||||
"redux-mock-store": "1.5.4",
|
||||
"rimraf": "6.0.1",
|
||||
"rudder-sdk-js": "2.48.18",
|
||||
"rudder-sdk-js": "2.48.19",
|
||||
"sass": "1.79.3",
|
||||
"sass-loader": "16.0.2",
|
||||
"smtp-tester": "^2.1.0",
|
||||
@ -246,17 +246,17 @@
|
||||
"yargs": "^17.5.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.0",
|
||||
"@emotion/css": "11.13.4",
|
||||
"@emotion/react": "11.13.3",
|
||||
"@fingerprintjs/fingerprintjs": "^3.4.2",
|
||||
"@floating-ui/react": "0.26.24",
|
||||
"@formatjs/intl-durationformat": "^0.2.4",
|
||||
"@glideapps/glide-data-grid": "^6.0.0",
|
||||
"@grafana/aws-sdk": "0.4.2",
|
||||
"@grafana/aws-sdk": "0.5.0",
|
||||
"@grafana/azure-sdk": "0.0.3",
|
||||
"@grafana/data": "workspace:*",
|
||||
"@grafana/e2e-selectors": "workspace:*",
|
||||
"@grafana/experimental": "1.8.0",
|
||||
"@grafana/experimental": "2.1.1",
|
||||
"@grafana/faro-core": "^1.3.6",
|
||||
"@grafana/faro-web-sdk": "^1.3.6",
|
||||
"@grafana/faro-web-tracing": "^1.8.2",
|
||||
@ -268,14 +268,14 @@
|
||||
"@grafana/prometheus": "workspace:*",
|
||||
"@grafana/runtime": "workspace:*",
|
||||
"@grafana/saga-icons": "workspace:*",
|
||||
"@grafana/scenes": "5.16.2",
|
||||
"@grafana/scenes": "5.18.3",
|
||||
"@grafana/schema": "workspace:*",
|
||||
"@grafana/sql": "workspace:*",
|
||||
"@grafana/ui": "workspace:*",
|
||||
"@hello-pangea/dnd": "16.6.0",
|
||||
"@kusto/monaco-kusto": "^10.0.0",
|
||||
"@leeoniya/ufuzzy": "1.0.14",
|
||||
"@lezer/common": "1.2.1",
|
||||
"@lezer/common": "1.2.2",
|
||||
"@lezer/highlight": "1.2.1",
|
||||
"@lezer/lr": "1.3.3",
|
||||
"@locker/near-membrane-dom": "0.13.6",
|
||||
@ -287,12 +287,12 @@
|
||||
"@opentelemetry/exporter-collector": "0.25.0",
|
||||
"@opentelemetry/semantic-conventions": "1.27.0",
|
||||
"@popperjs/core": "2.11.8",
|
||||
"@react-aria/dialog": "3.5.17",
|
||||
"@react-aria/focus": "3.18.2",
|
||||
"@react-aria/overlays": "3.23.2",
|
||||
"@react-aria/utils": "3.25.2",
|
||||
"@react-aria/dialog": "3.5.18",
|
||||
"@react-aria/focus": "3.18.3",
|
||||
"@react-aria/overlays": "3.23.3",
|
||||
"@react-aria/utils": "3.25.3",
|
||||
"@react-awesome-query-builder/ui": "6.6.3",
|
||||
"@reduxjs/toolkit": "2.2.7",
|
||||
"@reduxjs/toolkit": "2.2.8",
|
||||
"@testing-library/react-hooks": "^8.0.1",
|
||||
"@visx/event": "3.3.0",
|
||||
"@visx/gradient": "3.3.0",
|
||||
@ -345,7 +345,7 @@
|
||||
"ml-regression-polynomial": "^3.0.0",
|
||||
"ml-regression-simple-linear": "^3.0.0",
|
||||
"moment": "2.30.1",
|
||||
"moment-timezone": "0.5.45",
|
||||
"moment-timezone": "0.5.46",
|
||||
"monaco-editor": "0.34.1",
|
||||
"moveable": "0.53.0",
|
||||
"nanoid": "^5.0.4",
|
||||
@ -354,7 +354,7 @@
|
||||
"ol-ext": "4.0.23",
|
||||
"pluralize": "^8.0.0",
|
||||
"prismjs": "1.29.0",
|
||||
"rc-slider": "11.1.6",
|
||||
"rc-slider": "11.1.7",
|
||||
"rc-time-picker": "3.7.3",
|
||||
"rc-tree": "5.9.0",
|
||||
"re-resizable": "6.10.0",
|
||||
|
@ -49,7 +49,7 @@
|
||||
"marked": "12.0.2",
|
||||
"marked-mangle": "1.1.9",
|
||||
"moment": "2.30.1",
|
||||
"moment-timezone": "0.5.45",
|
||||
"moment-timezone": "0.5.46",
|
||||
"ol": "7.4.0",
|
||||
"papaparse": "5.4.1",
|
||||
"react-use": "17.5.1",
|
||||
@ -65,8 +65,8 @@
|
||||
"@rollup/plugin-node-resolve": "15.3.0",
|
||||
"@types/dompurify": "^3.0.0",
|
||||
"@types/history": "4.7.11",
|
||||
"@types/lodash": "4.17.9",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/lodash": "4.17.10",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/papaparse": "5.3.14",
|
||||
"@types/react": "18.3.3",
|
||||
"@types/react-dom": "18.2.25",
|
||||
|
@ -34,6 +34,10 @@ export const limitTransformer: DataTransformerInfo<LimitTransformerOptions> = {
|
||||
limit = options.limitField;
|
||||
}
|
||||
}
|
||||
// Prevent negative limit
|
||||
if (limit < 0) {
|
||||
limit = 0;
|
||||
}
|
||||
return data.map((frame) => {
|
||||
if (frame.length > limit) {
|
||||
return {
|
||||
|
@ -158,6 +158,7 @@ export interface FeatureToggles {
|
||||
newFolderPicker?: boolean;
|
||||
jitterAlertRulesWithinGroups?: boolean;
|
||||
onPremToCloudMigrations?: boolean;
|
||||
onPremToCloudMigrationsAlerts?: boolean;
|
||||
alertingSaveStatePeriodic?: boolean;
|
||||
promQLScope?: boolean;
|
||||
sqlExpressions?: boolean;
|
||||
@ -219,4 +220,5 @@ export interface FeatureToggles {
|
||||
useSessionStorageForRedirection?: boolean;
|
||||
rolePickerDrawer?: boolean;
|
||||
unifiedStorageSearch?: boolean;
|
||||
pluginsSriChecks?: boolean;
|
||||
}
|
||||
|
@ -99,6 +99,7 @@ export interface PluginMeta<T extends KeyValue = {}> {
|
||||
angularDetected?: boolean;
|
||||
loadingStrategy?: PluginLoadingStrategy;
|
||||
extensions?: PluginExtensions;
|
||||
moduleHash?: string;
|
||||
}
|
||||
|
||||
interface PluginDependencyInfo {
|
||||
|
@ -40,7 +40,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-node-resolve": "15.3.0",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/node": "20.16.11",
|
||||
"esbuild": "0.24.0",
|
||||
"rimraf": "6.0.1",
|
||||
"rollup": "^4.22.4",
|
||||
|
@ -43,7 +43,7 @@
|
||||
"not IE 11"
|
||||
],
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.0",
|
||||
"@emotion/css": "11.13.4",
|
||||
"@grafana/data": "11.3.0-pre",
|
||||
"@grafana/ui": "11.3.0-pre",
|
||||
"@leeoniya/ufuzzy": "1.0.14",
|
||||
@ -56,9 +56,9 @@
|
||||
"tslib": "2.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.25.2",
|
||||
"@babel/preset-env": "7.25.4",
|
||||
"@babel/preset-react": "7.24.7",
|
||||
"@babel/core": "7.25.7",
|
||||
"@babel/preset-env": "7.25.7",
|
||||
"@babel/preset-react": "7.25.7",
|
||||
"@grafana/tsconfig": "^2.0.0",
|
||||
"@rollup/plugin-node-resolve": "15.3.0",
|
||||
"@testing-library/dom": "10.0.0",
|
||||
@ -67,8 +67,8 @@
|
||||
"@testing-library/user-event": "14.5.2",
|
||||
"@types/d3": "^7",
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/lodash": "4.17.9",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/lodash": "4.17.10",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/react": "18.3.3",
|
||||
"@types/react-virtualized-auto-sizer": "1.0.4",
|
||||
"@types/tinycolor2": "1.4.6",
|
||||
|
@ -34,7 +34,7 @@
|
||||
"build": "yarn generate && rollup -c rollup.config.ts --configPlugin esbuild"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.25.2",
|
||||
"@babel/core": "7.25.7",
|
||||
"@grafana/tsconfig": "^2.0.0",
|
||||
"@rollup/plugin-node-resolve": "^15.3.0",
|
||||
"@rollup/plugin-typescript": "^12.1.0",
|
||||
@ -45,7 +45,7 @@
|
||||
"@svgr/plugin-prettier": "^8.1.0",
|
||||
"@svgr/plugin-svgo": "^8.1.0",
|
||||
"@types/babel__core": "^7",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/react": "18.3.3",
|
||||
"@types/react-dom": "18.2.25",
|
||||
"esbuild": "0.24.0",
|
||||
|
@ -17,10 +17,10 @@
|
||||
"typecheck": "tsc --emitDeclarationOnly false --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.0",
|
||||
"@emotion/css": "11.13.4",
|
||||
"@grafana/data": "11.3.0-pre",
|
||||
"@grafana/e2e-selectors": "11.3.0-pre",
|
||||
"@grafana/experimental": "1.8.0",
|
||||
"@grafana/experimental": "2.1.1",
|
||||
"@grafana/runtime": "11.3.0-pre",
|
||||
"@grafana/schema": "11.3.0-pre",
|
||||
"@grafana/ui": "11.3.0-pre",
|
||||
@ -36,7 +36,7 @@
|
||||
"@testing-library/react": "15.0.2",
|
||||
"@testing-library/user-event": "14.5.2",
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/react": "18.3.3",
|
||||
"@types/systemjs": "6.15.1",
|
||||
"@types/testing-library__jest-dom": "5.14.9",
|
||||
|
@ -36,21 +36,21 @@
|
||||
"postpack": "mv package.json.bak package.json"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.0",
|
||||
"@emotion/css": "11.13.4",
|
||||
"@floating-ui/react": "0.26.24",
|
||||
"@grafana/data": "11.3.0-pre",
|
||||
"@grafana/experimental": "1.8.0",
|
||||
"@grafana/faro-web-sdk": "1.10.1",
|
||||
"@grafana/experimental": "2.1.1",
|
||||
"@grafana/faro-web-sdk": "1.10.2",
|
||||
"@grafana/runtime": "11.3.0-pre",
|
||||
"@grafana/schema": "11.3.0-pre",
|
||||
"@grafana/ui": "11.3.0-pre",
|
||||
"@hello-pangea/dnd": "16.6.0",
|
||||
"@leeoniya/ufuzzy": "1.0.14",
|
||||
"@lezer/common": "1.2.1",
|
||||
"@lezer/common": "1.2.2",
|
||||
"@lezer/highlight": "1.2.1",
|
||||
"@lezer/lr": "1.4.2",
|
||||
"@prometheus-io/lezer-promql": "0.54.1",
|
||||
"@reduxjs/toolkit": "2.2.7",
|
||||
"@reduxjs/toolkit": "2.2.8",
|
||||
"d3": "7.9.0",
|
||||
"date-fns": "3.6.0",
|
||||
"debounce-promise": "3.1.2",
|
||||
@ -60,7 +60,7 @@
|
||||
"marked": "12.0.2",
|
||||
"marked-mangle": "1.1.9",
|
||||
"moment": "2.30.1",
|
||||
"moment-timezone": "0.5.45",
|
||||
"moment-timezone": "0.5.46",
|
||||
"monaco-promql": "1.7.4",
|
||||
"pluralize": "8.0.0",
|
||||
"prismjs": "1.29.0",
|
||||
@ -91,8 +91,8 @@
|
||||
"@types/eslint": "8.56.10",
|
||||
"@types/jest": "29.5.13",
|
||||
"@types/jquery": "3.5.31",
|
||||
"@types/lodash": "4.17.9",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/lodash": "4.17.10",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/pluralize": "^0.0.33",
|
||||
"@types/prismjs": "1.26.4",
|
||||
"@types/react": "18.3.3",
|
||||
@ -114,7 +114,7 @@
|
||||
"eslint-plugin-jsdoc": "48.11.0",
|
||||
"eslint-plugin-jsx-a11y": "6.10.0",
|
||||
"eslint-plugin-lodash": "7.4.0",
|
||||
"eslint-plugin-react": "7.37.0",
|
||||
"eslint-plugin-react": "7.37.1",
|
||||
"eslint-plugin-react-hooks": "4.6.0",
|
||||
"eslint-webpack-plugin": "4.2.0",
|
||||
"fork-ts-checker-webpack-plugin": "9.0.2",
|
||||
|
@ -57,7 +57,7 @@
|
||||
"@types/angular": "1.8.9",
|
||||
"@types/history": "4.7.11",
|
||||
"@types/jest": "29.5.13",
|
||||
"@types/lodash": "4.17.9",
|
||||
"@types/lodash": "4.17.10",
|
||||
"@types/react": "18.3.3",
|
||||
"@types/react-dom": "18.2.25",
|
||||
"@types/systemjs": "6.15.1",
|
||||
|
@ -46,6 +46,7 @@ export type AppPluginConfig = {
|
||||
loadingStrategy: PluginLoadingStrategy;
|
||||
dependencies: PluginDependencies;
|
||||
extensions: PluginExtensions;
|
||||
moduleHash?: string;
|
||||
};
|
||||
|
||||
export type PreinstalledPlugin = {
|
||||
|
@ -14,10 +14,10 @@
|
||||
"typecheck": "tsc --emitDeclarationOnly false --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.0",
|
||||
"@emotion/css": "11.13.4",
|
||||
"@grafana/data": "11.3.0-pre",
|
||||
"@grafana/e2e-selectors": "11.3.0-pre",
|
||||
"@grafana/experimental": "1.8.0",
|
||||
"@grafana/experimental": "2.1.1",
|
||||
"@grafana/runtime": "11.3.0-pre",
|
||||
"@grafana/ui": "11.3.0-pre",
|
||||
"@react-awesome-query-builder/ui": "6.6.3",
|
||||
@ -41,8 +41,8 @@
|
||||
"@testing-library/react-hooks": "^8.0.1",
|
||||
"@testing-library/user-event": "14.5.2",
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/lodash": "4.17.9",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/lodash": "4.17.10",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/react": "18.3.3",
|
||||
"@types/react-dom": "18.2.25",
|
||||
"@types/react-virtualized-auto-sizer": "1.0.4",
|
||||
|
@ -47,7 +47,7 @@
|
||||
"not IE 11"
|
||||
],
|
||||
"dependencies": {
|
||||
"@emotion/css": "11.13.0",
|
||||
"@emotion/css": "11.13.4",
|
||||
"@emotion/react": "11.13.3",
|
||||
"@emotion/serialize": "1.3.2",
|
||||
"@floating-ui/react": "0.26.24",
|
||||
@ -59,13 +59,13 @@
|
||||
"@leeoniya/ufuzzy": "1.0.14",
|
||||
"@monaco-editor/react": "4.6.0",
|
||||
"@popperjs/core": "2.11.8",
|
||||
"@react-aria/dialog": "3.5.17",
|
||||
"@react-aria/focus": "3.18.2",
|
||||
"@react-aria/overlays": "3.23.2",
|
||||
"@react-aria/utils": "3.25.2",
|
||||
"@react-aria/dialog": "3.5.18",
|
||||
"@react-aria/focus": "3.18.3",
|
||||
"@react-aria/overlays": "3.23.3",
|
||||
"@react-aria/utils": "3.25.3",
|
||||
"@tanstack/react-virtual": "^3.5.1",
|
||||
"@types/jquery": "3.5.31",
|
||||
"@types/lodash": "4.17.9",
|
||||
"@types/lodash": "4.17.10",
|
||||
"@types/react-table": "7.7.20",
|
||||
"ansicolor": "1.1.100",
|
||||
"calculate-size": "1.1.1",
|
||||
@ -87,13 +87,13 @@
|
||||
"prismjs": "1.29.0",
|
||||
"rc-cascader": "3.28.1",
|
||||
"rc-drawer": "7.2.0",
|
||||
"rc-slider": "11.1.6",
|
||||
"rc-slider": "11.1.7",
|
||||
"rc-time-picker": "^3.7.3",
|
||||
"rc-tooltip": "6.2.1",
|
||||
"react-calendar": "5.0.0",
|
||||
"react-colorful": "5.6.1",
|
||||
"react-custom-scrollbars-2": "4.5.0",
|
||||
"react-dropzone": "14.2.3",
|
||||
"react-dropzone": "14.2.9",
|
||||
"react-highlight-words": "0.20.0",
|
||||
"react-hook-form": "^7.49.2",
|
||||
"react-i18next": "^14.0.0",
|
||||
@ -115,7 +115,7 @@
|
||||
"uuid": "9.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.25.2",
|
||||
"@babel/core": "7.25.7",
|
||||
"@faker-js/faker": "^9.0.0",
|
||||
"@grafana/tsconfig": "^2.0.0",
|
||||
"@rollup/plugin-node-resolve": "15.3.0",
|
||||
@ -146,7 +146,7 @@
|
||||
"@types/is-hotkey": "0.1.10",
|
||||
"@types/jest": "29.5.13",
|
||||
"@types/mock-raf": "1.0.6",
|
||||
"@types/node": "20.16.9",
|
||||
"@types/node": "20.16.11",
|
||||
"@types/prismjs": "1.26.4",
|
||||
"@types/react": "18.3.3",
|
||||
"@types/react-color": "3.0.12",
|
||||
|
@ -152,7 +152,13 @@ export const MenuItem = React.memo(
|
||||
className={itemStyle}
|
||||
rel={target === '_blank' ? 'noopener noreferrer' : undefined}
|
||||
href={url}
|
||||
onClick={onClick}
|
||||
onClick={(event) => {
|
||||
if (hasSubMenu && !isSubMenuOpen) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
}
|
||||
onClick?.(event);
|
||||
}}
|
||||
onMouseEnter={onMouseEnter}
|
||||
onMouseLeave={onMouseLeave}
|
||||
onKeyDown={handleKeys}
|
||||
|
@ -40,6 +40,7 @@ export const TableCell = ({
|
||||
}
|
||||
|
||||
if (cellProps.style) {
|
||||
cellProps.style.wordBreak = 'break-word';
|
||||
cellProps.style.minWidth = cellProps.style.width;
|
||||
const justifyContent = (cell.column as any).justifyContent;
|
||||
|
||||
|
@ -180,6 +180,7 @@ export class VizRepeater<V, D = {}> extends PureComponent<PropsWithDefaults<V, D
|
||||
const defaultVizHeight = (height + itemSpacing) / values.length - itemSpacing;
|
||||
repeaterStyle.flexDirection = 'column';
|
||||
repeaterStyle.height = `${height}px`;
|
||||
repeaterStyle.overflowX = 'hidden';
|
||||
itemStyles.marginBottom = `${itemSpacing}px`;
|
||||
vizWidth = width;
|
||||
vizHeight = clamp(defaultVizHeight, minVizHeight ?? 0, maxVizHeight ?? defaultVizHeight);
|
||||
@ -187,6 +188,7 @@ export class VizRepeater<V, D = {}> extends PureComponent<PropsWithDefaults<V, D
|
||||
case VizOrientation.Vertical:
|
||||
repeaterStyle.flexDirection = 'row';
|
||||
repeaterStyle.justifyContent = 'space-between';
|
||||
repeaterStyle.overflowY = 'hidden';
|
||||
itemStyles.marginRight = `${itemSpacing}px`;
|
||||
vizHeight = height;
|
||||
vizWidth = Math.max(width / values.length - itemSpacing + itemSpacing / values.length, minVizWidth ?? 0);
|
||||
|
@ -656,15 +656,15 @@ func (hs *HTTPServer) declareFixedRoles() error {
|
||||
|
||||
// Metadata helpers
|
||||
// getAccessControlMetadata returns the accesscontrol metadata associated with a given resource
|
||||
func (hs *HTTPServer) getAccessControlMetadata(c *contextmodel.ReqContext,
|
||||
func getAccessControlMetadata(c *contextmodel.ReqContext,
|
||||
prefix string, resourceID string) ac.Metadata {
|
||||
ids := map[string]bool{resourceID: true}
|
||||
return hs.getMultiAccessControlMetadata(c, prefix, ids)[resourceID]
|
||||
return getMultiAccessControlMetadata(c, prefix, ids)[resourceID]
|
||||
}
|
||||
|
||||
// getMultiAccessControlMetadata returns the accesscontrol metadata associated with a given set of resources
|
||||
// Context must contain permissions in the given org (see LoadPermissionsMiddleware or AuthorizeInOrgMiddleware)
|
||||
func (hs *HTTPServer) getMultiAccessControlMetadata(c *contextmodel.ReqContext,
|
||||
func getMultiAccessControlMetadata(c *contextmodel.ReqContext,
|
||||
prefix string, resourceIDs map[string]bool) map[string]ac.Metadata {
|
||||
if !c.QueryBool("accesscontrol") {
|
||||
return map[string]ac.Metadata{}
|
||||
|
@ -306,6 +306,10 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
apiRoute.Group("/search-v2", hs.SearchV2HTTPService.RegisterHTTPRoutes)
|
||||
}
|
||||
|
||||
if hs.Features.IsEnabledGlobally(featuremgmt.FlagUnifiedStorageSearch) {
|
||||
apiRoute.Group("/unified-search", hs.UnifiedSearchHTTPService.RegisterHTTPRoutes)
|
||||
}
|
||||
|
||||
// current org
|
||||
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
|
||||
userIDScope := ac.Scope("users", "id", ac.Parameter(":userId"))
|
||||
|
@ -56,7 +56,7 @@ func (hs *HTTPServer) GetAPIKeys(c *contextmodel.ReqContext) response.Response {
|
||||
}
|
||||
}
|
||||
|
||||
metadata := hs.getMultiAccessControlMetadata(c, "apikeys:id", ids)
|
||||
metadata := getMultiAccessControlMetadata(c, "apikeys:id", ids)
|
||||
if len(metadata) > 0 {
|
||||
for _, key := range result {
|
||||
key.AccessControl = metadata[strconv.FormatInt(key.ID, 10)]
|
||||
|
@ -136,7 +136,7 @@ func (hs *HTTPServer) GetDataSourceById(c *contextmodel.ReqContext) response.Res
|
||||
dto := hs.convertModelToDtos(c.Req.Context(), dataSource)
|
||||
|
||||
// Add accesscontrol metadata
|
||||
dto.AccessControl = hs.getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID)
|
||||
dto.AccessControl = getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID)
|
||||
|
||||
return response.JSON(http.StatusOK, &dto)
|
||||
}
|
||||
@ -222,7 +222,7 @@ func (hs *HTTPServer) GetDataSourceByUID(c *contextmodel.ReqContext) response.Re
|
||||
dto := hs.convertModelToDtos(c.Req.Context(), ds)
|
||||
|
||||
// Add accesscontrol metadata
|
||||
dto.AccessControl = hs.getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID)
|
||||
dto.AccessControl = getAccessControlMetadata(c, datasources.ScopePrefix, dto.UID)
|
||||
|
||||
return response.JSON(http.StatusOK, &dto)
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ type PluginSetting struct {
|
||||
SignatureOrg string `json:"signatureOrg"`
|
||||
AngularDetected bool `json:"angularDetected"`
|
||||
LoadingStrategy plugins.LoadingStrategy `json:"loadingStrategy"`
|
||||
ModuleHash string `json:"moduleHash,omitempty"`
|
||||
}
|
||||
|
||||
type PluginListItem struct {
|
||||
|
@ -1,12 +1,16 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
k8sErrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/client-go/dynamic"
|
||||
|
||||
@ -28,6 +32,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/guardian"
|
||||
"github.com/grafana/grafana/pkg/services/libraryelements/model"
|
||||
"github.com/grafana/grafana/pkg/services/search"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
"github.com/grafana/grafana/pkg/util/errhttp"
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
@ -448,7 +453,7 @@ func (hs *HTTPServer) getFolderACMetadata(c *contextmodel.ReqContext, f *folder.
|
||||
folderIDs[p.UID] = true
|
||||
}
|
||||
|
||||
allMetadata := hs.getMultiAccessControlMetadata(c, dashboards.ScopeFoldersPrefix, folderIDs)
|
||||
allMetadata := getMultiAccessControlMetadata(c, dashboards.ScopeFoldersPrefix, folderIDs)
|
||||
metadata := map[string]bool{}
|
||||
// Flatten metadata - if any parent has a permission, the child folder inherits it
|
||||
for _, md := range allMetadata {
|
||||
@ -629,6 +634,9 @@ type folderK8sHandler struct {
|
||||
clientConfigProvider grafanaapiserver.DirectRestConfigProvider
|
||||
// #TODO check if it makes more sense to move this to FolderAPIBuilder
|
||||
accesscontrolService accesscontrol.Service
|
||||
userService user.Service
|
||||
// #TODO remove after we handle the nested folder case
|
||||
folderService folder.Service
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------------------
|
||||
@ -641,6 +649,8 @@ func newFolderK8sHandler(hs *HTTPServer) *folderK8sHandler {
|
||||
namespacer: request.GetNamespaceMapper(hs.Cfg),
|
||||
clientConfigProvider: hs.clientConfigProvider,
|
||||
accesscontrolService: hs.accesscontrolService,
|
||||
userService: hs.userService,
|
||||
folderService: hs.folderService,
|
||||
}
|
||||
}
|
||||
|
||||
@ -693,12 +703,13 @@ func (fk8s *folderK8sHandler) createFolder(c *contextmodel.ReqContext) {
|
||||
}
|
||||
|
||||
fk8s.accesscontrolService.ClearUserPermissionCache(c.SignedInUser)
|
||||
f, err := internalfolders.UnstructuredToLegacyFolderDTO(*out)
|
||||
folderDTO, err := fk8s.newToFolderDto(c, *out, c.SignedInUser.GetOrgID())
|
||||
if err != nil {
|
||||
fk8s.writeError(c, err)
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, f)
|
||||
|
||||
c.JSON(http.StatusOK, folderDTO)
|
||||
}
|
||||
|
||||
// func (fk8s *folderK8sHandler) getFolder(c *contextmodel.ReqContext) {
|
||||
@ -713,13 +724,13 @@ func (fk8s *folderK8sHandler) createFolder(c *contextmodel.ReqContext) {
|
||||
// return
|
||||
// }
|
||||
|
||||
// f, err := internalfolders.UnstructuredToLegacyFolderDTO(*out)
|
||||
// if err != nil {
|
||||
// fk8s.writeError(c, err)
|
||||
// return
|
||||
// }
|
||||
// folderDTO, err := fk8s.newToFolderDto(c, *out)
|
||||
// if err != nil {
|
||||
// fk8s.writeError(c, err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// c.JSON(http.StatusOK, f)
|
||||
// c.JSON(http.StatusOK, folderDTO)
|
||||
// }
|
||||
|
||||
// func (fk8s *folderK8sHandler) deleteFolder(c *contextmodel.ReqContext) {
|
||||
@ -755,13 +766,13 @@ func (fk8s *folderK8sHandler) createFolder(c *contextmodel.ReqContext) {
|
||||
// return
|
||||
// }
|
||||
|
||||
// f, err := internalfolders.UnstructuredToLegacyFolderDTO(*out)
|
||||
// if err != nil {
|
||||
// fk8s.writeError(c, err)
|
||||
// return
|
||||
// }
|
||||
// folderDTO, err := fk8s.newToFolderDto(c, *out)
|
||||
// if err != nil {
|
||||
// fk8s.writeError(c, err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// c.JSON(http.StatusOK, f)
|
||||
// c.JSON(http.StatusOK, folderDTO)
|
||||
// }
|
||||
|
||||
//-----------------------------------------------------------------------------------------
|
||||
@ -786,3 +797,188 @@ func (fk8s *folderK8sHandler) writeError(c *contextmodel.ReqContext, err error)
|
||||
}
|
||||
errhttp.Write(c.Req.Context(), err, c.Resp)
|
||||
}
|
||||
|
||||
func (fk8s *folderK8sHandler) newToFolderDto(c *contextmodel.ReqContext, item unstructured.Unstructured, orgID int64) (dtos.Folder, error) {
|
||||
// #TODO revisit how/where we get orgID
|
||||
ctx := c.Req.Context()
|
||||
|
||||
f := internalfolders.UnstructuredToLegacyFolder(item, orgID)
|
||||
|
||||
fDTO, err := internalfolders.UnstructuredToLegacyFolderDTO(item)
|
||||
if err != nil {
|
||||
return dtos.Folder{}, err
|
||||
}
|
||||
|
||||
toID := func(rawIdentifier string) (int64, error) {
|
||||
parts := strings.Split(rawIdentifier, ":")
|
||||
if len(parts) < 2 {
|
||||
return 0, fmt.Errorf("invalid user identifier")
|
||||
}
|
||||
userID, err := strconv.ParseInt(parts[1], 10, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("faild to parse user identifier")
|
||||
}
|
||||
return userID, nil
|
||||
}
|
||||
|
||||
toDTO := func(fold *folder.Folder, checkCanView bool) (dtos.Folder, error) {
|
||||
g, err := guardian.NewByFolder(c.Req.Context(), fold, c.SignedInUser.GetOrgID(), c.SignedInUser)
|
||||
if err != nil {
|
||||
return dtos.Folder{}, err
|
||||
}
|
||||
|
||||
canEdit, _ := g.CanEdit()
|
||||
canSave, _ := g.CanSave()
|
||||
canAdmin, _ := g.CanAdmin()
|
||||
canDelete, _ := g.CanDelete()
|
||||
|
||||
// Finding creator and last updater of the folder
|
||||
updater, creator := anonString, anonString
|
||||
// #TODO refactor the various conversions of the folder so that we either set created by in folder.Folder or
|
||||
// we convert from unstructured to folder DTO without an intermediate conversion to folder.Folder
|
||||
if len(fDTO.CreatedBy) > 0 {
|
||||
id, err := toID(fDTO.CreatedBy)
|
||||
if err != nil {
|
||||
return dtos.Folder{}, err
|
||||
}
|
||||
creator = fk8s.getUserLogin(ctx, id)
|
||||
}
|
||||
if len(fDTO.UpdatedBy) > 0 {
|
||||
id, err := toID(fDTO.UpdatedBy)
|
||||
if err != nil {
|
||||
return dtos.Folder{}, err
|
||||
}
|
||||
updater = fk8s.getUserLogin(ctx, id)
|
||||
}
|
||||
|
||||
acMetadata, _ := fk8s.getFolderACMetadata(c, fold)
|
||||
|
||||
if checkCanView {
|
||||
canView, _ := g.CanView()
|
||||
if !canView {
|
||||
return dtos.Folder{
|
||||
UID: REDACTED,
|
||||
Title: REDACTED,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
metrics.MFolderIDsAPICount.WithLabelValues(metrics.NewToFolderDTO).Inc()
|
||||
|
||||
fDTO.CanSave = canSave
|
||||
fDTO.CanEdit = canEdit
|
||||
fDTO.CanAdmin = canAdmin
|
||||
fDTO.CanDelete = canDelete
|
||||
fDTO.CreatedBy = creator
|
||||
fDTO.UpdatedBy = updater
|
||||
fDTO.AccessControl = acMetadata
|
||||
fDTO.OrgID = f.OrgID
|
||||
// #TODO version doesn't seem to be used--confirm or set it properly
|
||||
fDTO.Version = 1
|
||||
|
||||
return *fDTO, nil
|
||||
}
|
||||
|
||||
// no need to check view permission for the starting folder since it's already checked by the callers
|
||||
folderDTO, err := toDTO(f, false)
|
||||
if err != nil {
|
||||
return dtos.Folder{}, err
|
||||
}
|
||||
|
||||
parents := []*folder.Folder{}
|
||||
if folderDTO.ParentUID != "" {
|
||||
parents, err = fk8s.folderService.GetParents(
|
||||
c.Req.Context(),
|
||||
folder.GetParentsQuery{
|
||||
UID: folderDTO.UID,
|
||||
OrgID: folderDTO.OrgID,
|
||||
})
|
||||
if err != nil {
|
||||
return dtos.Folder{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// #TODO refactor so that we have just one function for converting to folder DTO
|
||||
toParentDTO := func(fold *folder.Folder, checkCanView bool) (dtos.Folder, error) {
|
||||
g, err := guardian.NewByFolder(c.Req.Context(), fold, c.SignedInUser.GetOrgID(), c.SignedInUser)
|
||||
if err != nil {
|
||||
return dtos.Folder{}, err
|
||||
}
|
||||
|
||||
if checkCanView {
|
||||
canView, _ := g.CanView()
|
||||
if !canView {
|
||||
return dtos.Folder{
|
||||
UID: REDACTED,
|
||||
Title: REDACTED,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
metrics.MFolderIDsAPICount.WithLabelValues(metrics.NewToFolderDTO).Inc()
|
||||
|
||||
return dtos.Folder{
|
||||
UID: fold.UID,
|
||||
Title: fold.Title,
|
||||
URL: fold.URL,
|
||||
}, nil
|
||||
}
|
||||
|
||||
folderDTO.Parents = make([]dtos.Folder, 0, len(parents))
|
||||
for _, f := range parents {
|
||||
DTO, err := toParentDTO(f, true)
|
||||
if err != nil {
|
||||
// #TODO add logging
|
||||
// fk8s.log.Error("failed to convert folder to DTO", "folder", f.UID, "org", f.OrgID, "error", err)
|
||||
continue
|
||||
}
|
||||
folderDTO.Parents = append(folderDTO.Parents, DTO)
|
||||
}
|
||||
|
||||
return folderDTO, nil
|
||||
}
|
||||
|
||||
func (fk8s *folderK8sHandler) getUserLogin(ctx context.Context, userID int64) string {
|
||||
ctx, span := tracer.Start(ctx, "api.getUserLogin")
|
||||
defer span.End()
|
||||
|
||||
query := user.GetUserByIDQuery{ID: userID}
|
||||
user, err := fk8s.userService.GetByID(ctx, &query)
|
||||
if err != nil {
|
||||
return anonString
|
||||
}
|
||||
return user.Login
|
||||
}
|
||||
|
||||
func (fk8s *folderK8sHandler) getFolderACMetadata(c *contextmodel.ReqContext, f *folder.Folder) (accesscontrol.Metadata, error) {
|
||||
if !c.QueryBool("accesscontrol") {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var err error
|
||||
parents := []*folder.Folder{}
|
||||
if f.ParentUID != "" {
|
||||
parents, err = fk8s.folderService.GetParents(
|
||||
c.Req.Context(),
|
||||
folder.GetParentsQuery{
|
||||
UID: f.UID,
|
||||
OrgID: c.SignedInUser.GetOrgID(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
folderIDs := map[string]bool{f.UID: true}
|
||||
for _, p := range parents {
|
||||
folderIDs[p.UID] = true
|
||||
}
|
||||
|
||||
allMetadata := getMultiAccessControlMetadata(c, dashboards.ScopeFoldersPrefix, folderIDs)
|
||||
metadata := map[string]bool{}
|
||||
// Flatten metadata - if any parent has a permission, the child folder inherits it
|
||||
for _, md := range allMetadata {
|
||||
for action := range md {
|
||||
metadata[action] = true
|
||||
}
|
||||
}
|
||||
return metadata, nil
|
||||
}
|
||||
|
@ -5,17 +5,20 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
clientrest "k8s.io/client-go/rest"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/actest"
|
||||
acmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock"
|
||||
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
@ -523,3 +526,105 @@ func TestFolderGetAPIEndpoint(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type mockClientConfigProvider struct {
|
||||
host string
|
||||
}
|
||||
|
||||
func (m mockClientConfigProvider) GetDirectRestConfig(c *contextmodel.ReqContext) *clientrest.Config {
|
||||
return &clientrest.Config{
|
||||
Host: m.host,
|
||||
}
|
||||
}
|
||||
|
||||
func (m mockClientConfigProvider) DirectlyServeHTTP(w http.ResponseWriter, r *http.Request) {}
|
||||
|
||||
func TestHTTPServer_FolderMetadataK8s(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
//nolint:errcheck
|
||||
fmt.Fprintln(w,
|
||||
`{
|
||||
"kind": "Folder",
|
||||
"apiVersion": "folder.grafana.app/v0alpha1",
|
||||
"metadata": {
|
||||
"name": "ady4yobv315a8e",
|
||||
"namespace": "default",
|
||||
"uid": "28f306ee-ada1-40f4-8011-b2d1df462aad",
|
||||
"creationTimestamp": "2024-09-17T04:16:35Z",
|
||||
"annotations": {
|
||||
"grafana.app/createdBy": "user:fdxsqt7t5ryf4a",
|
||||
"grafana.app/originName": "SQL",
|
||||
"grafana.app/originPath": "3"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"title": "Example folder 226"
|
||||
}
|
||||
}`)
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
mockClientConfigProvider := mockClientConfigProvider{
|
||||
host: ts.URL,
|
||||
}
|
||||
|
||||
setUpRBACGuardian(t)
|
||||
folderService := &foldertest.FakeService{}
|
||||
features := featuremgmt.WithFeatures(featuremgmt.FlagNestedFolders, featuremgmt.FlagKubernetesFolders)
|
||||
server := SetupAPITestServer(t, func(hs *HTTPServer) {
|
||||
hs.Cfg = setting.NewCfg()
|
||||
hs.folderService = folderService
|
||||
hs.QuotaService = quotatest.New(false, nil)
|
||||
hs.SearchService = &mockSearchService{
|
||||
ExpectedResult: model.HitList{},
|
||||
}
|
||||
hs.Features = features
|
||||
hs.clientConfigProvider = mockClientConfigProvider
|
||||
})
|
||||
|
||||
t.Run("Should attach access control metadata to folder response", func(t *testing.T) {
|
||||
folderService.ExpectedFolder = &folder.Folder{UID: "ady4yobv315a8e"}
|
||||
|
||||
req := server.NewGetRequest("/api/folders/ady4yobv315a8e?accesscontrol=true")
|
||||
webtest.RequestWithSignedInUser(req, &user.SignedInUser{UserID: 1, OrgID: 1, Permissions: map[int64]map[string][]string{
|
||||
1: accesscontrol.GroupScopesByActionContext(context.Background(), []accesscontrol.Permission{
|
||||
{Action: dashboards.ActionFoldersRead, Scope: dashboards.ScopeFoldersAll},
|
||||
{Action: dashboards.ActionFoldersWrite, Scope: dashboards.ScopeFoldersProvider.GetResourceScopeUID("ady4yobv315a8e")},
|
||||
}),
|
||||
}})
|
||||
|
||||
res, err := server.Send(req)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, http.StatusOK, res.StatusCode)
|
||||
defer func() { require.NoError(t, res.Body.Close()) }()
|
||||
|
||||
body := dtos.Folder{}
|
||||
require.NoError(t, json.NewDecoder(res.Body).Decode(&body))
|
||||
|
||||
assert.True(t, body.AccessControl[dashboards.ActionFoldersRead])
|
||||
assert.True(t, body.AccessControl[dashboards.ActionFoldersWrite])
|
||||
})
|
||||
|
||||
t.Run("Should not attach access control metadata to folder response", func(t *testing.T) {
|
||||
folderService.ExpectedFolder = &folder.Folder{UID: "ady4yobv315a8e"}
|
||||
|
||||
req := server.NewGetRequest("/api/folders/ady4yobv315a8e")
|
||||
webtest.RequestWithSignedInUser(req, &user.SignedInUser{UserID: 1, OrgID: 1, Permissions: map[int64]map[string][]string{
|
||||
1: accesscontrol.GroupScopesByActionContext(context.Background(), []accesscontrol.Permission{
|
||||
{Action: dashboards.ActionFoldersRead, Scope: dashboards.ScopeFoldersAll},
|
||||
{Action: dashboards.ActionFoldersWrite, Scope: dashboards.ScopeFoldersProvider.GetResourceScopeUID("ady4yobv315a8e")},
|
||||
}),
|
||||
}})
|
||||
|
||||
res, err := server.Send(req)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, http.StatusOK, res.StatusCode)
|
||||
defer func() { require.NoError(t, res.Body.Close()) }()
|
||||
|
||||
body := dtos.Folder{}
|
||||
require.NoError(t, json.NewDecoder(res.Body).Decode(&body))
|
||||
|
||||
assert.False(t, body.AccessControl[dashboards.ActionFoldersRead])
|
||||
assert.False(t, body.AccessControl[dashboards.ActionFoldersWrite])
|
||||
})
|
||||
}
|
||||
|
@ -145,6 +145,7 @@ func (hs *HTTPServer) getFrontendSettings(c *contextmodel.ReqContext) (*dtos.Fro
|
||||
AliasIDs: panel.AliasIDs,
|
||||
Info: panel.Info,
|
||||
Module: panel.Module,
|
||||
ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), panel),
|
||||
BaseURL: panel.BaseURL,
|
||||
SkipDataQuery: panel.SkipDataQuery,
|
||||
HideFromList: panel.HideFromList,
|
||||
@ -453,6 +454,7 @@ func (hs *HTTPServer) getFSDataSources(c *contextmodel.ReqContext, availablePlug
|
||||
JSONData: plugin.JSONData,
|
||||
Signature: plugin.Signature,
|
||||
Module: plugin.Module,
|
||||
ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), plugin),
|
||||
BaseURL: plugin.BaseURL,
|
||||
Angular: plugin.Angular,
|
||||
MultiValueFilterOperators: plugin.MultiValueFilterOperators,
|
||||
@ -538,8 +540,9 @@ func (hs *HTTPServer) getFSDataSources(c *contextmodel.ReqContext, availablePlug
|
||||
JSONData: ds.JSONData,
|
||||
Signature: ds.Signature,
|
||||
Module: ds.Module,
|
||||
BaseURL: ds.BaseURL,
|
||||
Angular: ds.Angular,
|
||||
// ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), ds),
|
||||
BaseURL: ds.BaseURL,
|
||||
Angular: ds.Angular,
|
||||
},
|
||||
}
|
||||
if ds.Name == grafanads.DatasourceName {
|
||||
@ -563,6 +566,7 @@ func (hs *HTTPServer) newAppDTO(ctx context.Context, plugin pluginstore.Plugin,
|
||||
LoadingStrategy: hs.pluginAssets.LoadingStrategy(ctx, plugin),
|
||||
Extensions: plugin.Extensions,
|
||||
Dependencies: plugin.Dependencies,
|
||||
ModuleHash: hs.pluginAssets.ModuleHash(ctx, plugin),
|
||||
}
|
||||
|
||||
if settings.Enabled {
|
||||
|
@ -18,6 +18,8 @@ import (
|
||||
"github.com/grafana/grafana/pkg/login/social/socialimpl"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/config"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/signature"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/signature/statickey"
|
||||
"github.com/grafana/grafana/pkg/plugins/pluginscdn"
|
||||
accesscontrolmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
@ -51,10 +53,11 @@ func setupTestEnvironment(t *testing.T, cfg *setting.Cfg, features featuremgmt.F
|
||||
})
|
||||
}
|
||||
|
||||
pluginsCDN := pluginscdn.ProvideService(&config.PluginManagementCfg{
|
||||
pluginsCfg := &config.PluginManagementCfg{
|
||||
PluginsCDNURLTemplate: cfg.PluginsCDNURLTemplate,
|
||||
PluginSettings: cfg.PluginSettings,
|
||||
})
|
||||
}
|
||||
pluginsCDN := pluginscdn.ProvideService(pluginsCfg)
|
||||
|
||||
var pluginStore = pstore
|
||||
if pluginStore == nil {
|
||||
@ -68,7 +71,8 @@ func setupTestEnvironment(t *testing.T, cfg *setting.Cfg, features featuremgmt.F
|
||||
|
||||
var pluginsAssets = passets
|
||||
if pluginsAssets == nil {
|
||||
pluginsAssets = pluginassets.ProvideService(cfg, pluginsCDN)
|
||||
sig := signature.ProvideService(pluginsCfg, statickey.New())
|
||||
pluginsAssets = pluginassets.ProvideService(pluginsCfg, pluginsCDN, sig, pluginStore)
|
||||
}
|
||||
|
||||
hs := &HTTPServer{
|
||||
@ -240,6 +244,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
PluginList: []pluginstore.Plugin{
|
||||
{
|
||||
Module: fmt.Sprintf("/%s/module.js", "test-app"),
|
||||
// ModuleHash: "sha256-test",
|
||||
JSONData: plugins.JSONData{
|
||||
ID: "test-app",
|
||||
Info: plugins.Info{Version: "0.5.0"},
|
||||
@ -255,9 +260,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
Plugins: newAppSettings("test-app", false),
|
||||
}
|
||||
},
|
||||
pluginAssets: func() *pluginassets.Service {
|
||||
return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{}))
|
||||
},
|
||||
pluginAssets: newPluginAssets(),
|
||||
expected: settings{
|
||||
Apps: map[string]*plugins.AppDTO{
|
||||
"test-app": {
|
||||
@ -266,6 +269,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
Path: "/test-app/module.js",
|
||||
Version: "0.5.0",
|
||||
LoadingStrategy: plugins.LoadingStrategyScript,
|
||||
// ModuleHash: "sha256-test",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -277,6 +281,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
PluginList: []pluginstore.Plugin{
|
||||
{
|
||||
Module: fmt.Sprintf("/%s/module.js", "test-app"),
|
||||
// ModuleHash: "sha256-test",
|
||||
JSONData: plugins.JSONData{
|
||||
ID: "test-app",
|
||||
Info: plugins.Info{Version: "0.5.0"},
|
||||
@ -292,9 +297,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
Plugins: newAppSettings("test-app", true),
|
||||
}
|
||||
},
|
||||
pluginAssets: func() *pluginassets.Service {
|
||||
return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{}))
|
||||
},
|
||||
pluginAssets: newPluginAssets(),
|
||||
expected: settings{
|
||||
Apps: map[string]*plugins.AppDTO{
|
||||
"test-app": {
|
||||
@ -303,6 +306,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
Path: "/test-app/module.js",
|
||||
Version: "0.5.0",
|
||||
LoadingStrategy: plugins.LoadingStrategyScript,
|
||||
// ModuleHash: "sha256-test",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -330,9 +334,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
Plugins: newAppSettings("test-app", true),
|
||||
}
|
||||
},
|
||||
pluginAssets: func() *pluginassets.Service {
|
||||
return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{}))
|
||||
},
|
||||
pluginAssets: newPluginAssets(),
|
||||
expected: settings{
|
||||
Apps: map[string]*plugins.AppDTO{
|
||||
"test-app": {
|
||||
@ -368,15 +370,13 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
Plugins: newAppSettings("test-app", true),
|
||||
}
|
||||
},
|
||||
pluginAssets: func() *pluginassets.Service {
|
||||
return pluginassets.ProvideService(&setting.Cfg{
|
||||
PluginSettings: map[string]map[string]string{
|
||||
"test-app": {
|
||||
pluginassets.CreatePluginVersionCfgKey: pluginassets.CreatePluginVersionScriptSupportEnabled,
|
||||
},
|
||||
pluginAssets: newPluginAssetsWithConfig(&config.PluginManagementCfg{
|
||||
PluginSettings: map[string]map[string]string{
|
||||
"test-app": {
|
||||
pluginassets.CreatePluginVersionCfgKey: pluginassets.CreatePluginVersionScriptSupportEnabled,
|
||||
},
|
||||
}, pluginscdn.ProvideService(&config.PluginManagementCfg{}))
|
||||
},
|
||||
},
|
||||
}),
|
||||
expected: settings{
|
||||
Apps: map[string]*plugins.AppDTO{
|
||||
"test-app": {
|
||||
@ -412,9 +412,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
||||
Plugins: newAppSettings("test-app", true),
|
||||
}
|
||||
},
|
||||
pluginAssets: func() *pluginassets.Service {
|
||||
return pluginassets.ProvideService(setting.NewCfg(), pluginscdn.ProvideService(&config.PluginManagementCfg{}))
|
||||
},
|
||||
pluginAssets: newPluginAssets(),
|
||||
expected: settings{
|
||||
Apps: map[string]*plugins.AppDTO{
|
||||
"test-app": {
|
||||
@ -456,3 +454,13 @@ func newAppSettings(id string, enabled bool) map[string]*pluginsettings.DTO {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func newPluginAssets() func() *pluginassets.Service {
|
||||
return newPluginAssetsWithConfig(&config.PluginManagementCfg{})
|
||||
}
|
||||
|
||||
func newPluginAssetsWithConfig(pCfg *config.PluginManagementCfg) func() *pluginassets.Service {
|
||||
return func() *pluginassets.Service {
|
||||
return pluginassets.ProvideService(pCfg, pluginscdn.ProvideService(pCfg), signature.ProvideService(pCfg, statickey.New()), &pluginstore.FakePluginStore{})
|
||||
}
|
||||
}
|
||||
|
@ -104,6 +104,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/tag"
|
||||
"github.com/grafana/grafana/pkg/services/team"
|
||||
tempUser "github.com/grafana/grafana/pkg/services/temp_user"
|
||||
"github.com/grafana/grafana/pkg/services/unifiedSearch"
|
||||
"github.com/grafana/grafana/pkg/services/updatechecker"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/services/validations"
|
||||
@ -156,6 +157,7 @@ type HTTPServer struct {
|
||||
LivePushGateway *pushhttp.Gateway
|
||||
StorageService store.StorageService
|
||||
SearchV2HTTPService searchV2.SearchHTTPService
|
||||
UnifiedSearchHTTPService unifiedSearch.SearchHTTPService
|
||||
ContextHandler *contexthandler.ContextHandler
|
||||
LoggerMiddleware loggermw.Logger
|
||||
SQLStore db.DB
|
||||
@ -266,7 +268,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
|
||||
publicDashboardsApi *publicdashboardsApi.Api, userService user.Service, tempUserService tempUser.Service,
|
||||
loginAttemptService loginAttempt.Service, orgService org.Service, teamService team.Service,
|
||||
accesscontrolService accesscontrol.Service, navTreeService navtree.Service,
|
||||
annotationRepo annotations.Repository, tagService tag.Service, searchv2HTTPService searchV2.SearchHTTPService, oauthTokenService oauthtoken.OAuthTokenService,
|
||||
annotationRepo annotations.Repository, tagService tag.Service, searchv2HTTPService searchV2.SearchHTTPService, unifiedSearchHTTPService unifiedSearch.SearchHTTPService, oauthTokenService oauthtoken.OAuthTokenService,
|
||||
statsService stats.Service, authnService authn.Service, pluginsCDNService *pluginscdn.Service, promGatherer prometheus.Gatherer,
|
||||
starApi *starApi.API, promRegister prometheus.Registerer, clientConfigProvider grafanaapiserver.DirectRestConfigProvider, anonService anonymous.Service,
|
||||
userVerifier user.Verifier,
|
||||
@ -308,6 +310,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
|
||||
AccessControl: accessControl,
|
||||
DataProxy: dataSourceProxy,
|
||||
SearchV2HTTPService: searchv2HTTPService,
|
||||
UnifiedSearchHTTPService: unifiedSearchHTTPService,
|
||||
SearchService: searchService,
|
||||
Live: live,
|
||||
LivePushGateway: livePushGateway,
|
||||
|
@ -14,6 +14,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/infra/metrics"
|
||||
"github.com/grafana/grafana/pkg/infra/network"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
"github.com/grafana/grafana/pkg/middleware/cookies"
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
"github.com/grafana/grafana/pkg/services/authn"
|
||||
@ -181,6 +182,9 @@ func (hs *HTTPServer) tryAutoLogin(c *contextmodel.ReqContext) bool {
|
||||
for providerName, provider := range oauthInfos {
|
||||
if provider.AutoLogin || hs.Cfg.OAuthAutoLogin {
|
||||
redirectUrl := hs.Cfg.AppSubURL + "/login/" + providerName
|
||||
if hs.Features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) {
|
||||
redirectUrl += hs.getRedirectToForAutoLogin(c)
|
||||
}
|
||||
c.Logger.Info("OAuth auto login enabled. Redirecting to " + redirectUrl)
|
||||
c.Redirect(redirectUrl, 307)
|
||||
return true
|
||||
@ -189,6 +193,9 @@ func (hs *HTTPServer) tryAutoLogin(c *contextmodel.ReqContext) bool {
|
||||
|
||||
if samlAutoLogin {
|
||||
redirectUrl := hs.Cfg.AppSubURL + "/login/saml"
|
||||
if hs.Features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) {
|
||||
redirectUrl += hs.getRedirectToForAutoLogin(c)
|
||||
}
|
||||
c.Logger.Info("SAML auto login enabled. Redirecting to " + redirectUrl)
|
||||
c.Redirect(redirectUrl, 307)
|
||||
return true
|
||||
@ -197,6 +204,21 @@ func (hs *HTTPServer) tryAutoLogin(c *contextmodel.ReqContext) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) getRedirectToForAutoLogin(c *contextmodel.ReqContext) string {
|
||||
redirectTo := c.Req.FormValue("redirectTo")
|
||||
if hs.Cfg.AppSubURL != "" && strings.HasPrefix(redirectTo, hs.Cfg.AppSubURL) {
|
||||
redirectTo = strings.TrimPrefix(redirectTo, hs.Cfg.AppSubURL)
|
||||
}
|
||||
|
||||
if redirectTo == "/" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// remove any forceLogin=true params
|
||||
redirectTo = middleware.RemoveForceLoginParams(redirectTo)
|
||||
return "?redirectTo=" + url.QueryEscape(redirectTo)
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) LoginAPIPing(c *contextmodel.ReqContext) response.Response {
|
||||
if c.IsSignedIn || c.IsAnonymous {
|
||||
return response.JSON(http.StatusOK, util.DynMap{"message": "Logged in"})
|
||||
@ -233,7 +255,7 @@ func (hs *HTTPServer) loginUserWithUser(user *user.User, c *contextmodel.ReqCont
|
||||
|
||||
hs.log.Debug("Got IP address from client address", "addr", addr, "ip", ip)
|
||||
ctx := context.WithValue(c.Req.Context(), loginservice.RequestURIKey{}, c.Req.RequestURI)
|
||||
userToken, err := hs.AuthTokenService.CreateToken(ctx, user, ip, c.Req.UserAgent())
|
||||
userToken, err := hs.AuthTokenService.CreateToken(ctx, &auth.CreateTokenCommand{User: user, ClientIP: ip, UserAgent: c.Req.UserAgent()})
|
||||
if err != nil {
|
||||
return fmt.Errorf("%v: %w", "failed to create auth token", err)
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/middleware/cookies"
|
||||
"github.com/grafana/grafana/pkg/services/authn"
|
||||
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
)
|
||||
|
||||
@ -25,6 +26,7 @@ func (hs *HTTPServer) OAuthLogin(reqCtx *contextmodel.ReqContext) {
|
||||
}
|
||||
|
||||
code := reqCtx.Query("code")
|
||||
redirectTo := reqCtx.Query("redirectTo")
|
||||
|
||||
req := &authn.Request{HTTPRequest: reqCtx.Req}
|
||||
if code == "" {
|
||||
@ -36,6 +38,9 @@ func (hs *HTTPServer) OAuthLogin(reqCtx *contextmodel.ReqContext) {
|
||||
|
||||
cookies.WriteCookie(reqCtx.Resp, OauthStateCookieName, redirect.Extra[authn.KeyOAuthState], hs.Cfg.OAuthCookieMaxAge, hs.CookieOptionsFromCfg)
|
||||
|
||||
if hs.Features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) {
|
||||
cookies.WriteCookie(reqCtx.Resp, "redirectTo", redirectTo, hs.Cfg.OAuthCookieMaxAge, hs.CookieOptionsFromCfg)
|
||||
}
|
||||
if pkce := redirect.Extra[authn.KeyOAuthPKCE]; pkce != "" {
|
||||
cookies.WriteCookie(reqCtx.Resp, OauthPKCECookieName, pkce, hs.Cfg.OAuthCookieMaxAge, hs.CookieOptionsFromCfg)
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ func (hs *HTTPServer) GetPluginList(c *contextmodel.ReqContext) response.Respons
|
||||
}
|
||||
|
||||
// Compute metadata
|
||||
pluginsMetadata := hs.getMultiAccessControlMetadata(c, pluginaccesscontrol.ScopeProvider.GetResourceScope(""), filteredPluginIDs)
|
||||
pluginsMetadata := getMultiAccessControlMetadata(c, pluginaccesscontrol.ScopeProvider.GetResourceScope(""), filteredPluginIDs)
|
||||
|
||||
// Prepare DTO
|
||||
result := make(dtos.PluginList, 0)
|
||||
@ -201,6 +201,7 @@ func (hs *HTTPServer) GetPluginSettingByID(c *contextmodel.ReqContext) response.
|
||||
Includes: plugin.Includes,
|
||||
BaseUrl: plugin.BaseURL,
|
||||
Module: plugin.Module,
|
||||
ModuleHash: hs.pluginAssets.ModuleHash(c.Req.Context(), plugin),
|
||||
DefaultNavUrl: path.Join(hs.Cfg.AppSubURL, plugin.DefaultNavURL),
|
||||
State: plugin.State,
|
||||
Signature: plugin.Signature,
|
||||
|
@ -27,6 +27,8 @@ import (
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/fakes"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/filestore"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/registry"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/signature"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/signature/statickey"
|
||||
"github.com/grafana/grafana/pkg/plugins/pfs"
|
||||
"github.com/grafana/grafana/pkg/plugins/pluginscdn"
|
||||
ac "github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
@ -788,7 +790,6 @@ func Test_PluginsSettings(t *testing.T) {
|
||||
Info: plugins.Info{
|
||||
Version: "1.0.0",
|
||||
}}, plugins.ClassExternal, plugins.NewFakeFS())
|
||||
|
||||
pluginRegistry := &fakes.FakePluginRegistry{
|
||||
Store: map[string]*plugins.Plugin{
|
||||
p1.ID: p1,
|
||||
@ -843,8 +844,10 @@ func Test_PluginsSettings(t *testing.T) {
|
||||
ErrorCode: tc.errCode,
|
||||
})
|
||||
}
|
||||
pluginCDN := pluginscdn.ProvideService(&config.PluginManagementCfg{})
|
||||
hs.pluginAssets = pluginassets.ProvideService(hs.Cfg, pluginCDN)
|
||||
pCfg := &config.PluginManagementCfg{}
|
||||
pluginCDN := pluginscdn.ProvideService(pCfg)
|
||||
sig := signature.ProvideService(pCfg, statickey.New())
|
||||
hs.pluginAssets = pluginassets.ProvideService(pCfg, pluginCDN, sig, hs.pluginStore)
|
||||
hs.pluginErrorResolver = pluginerrs.ProvideStore(errTracker)
|
||||
var err error
|
||||
hs.pluginsUpdateChecker, err = updatechecker.ProvidePluginsService(hs.Cfg, nil, tracing.InitializeTracerForTest())
|
||||
|
@ -91,7 +91,7 @@ func (hs *HTTPServer) getUserUserProfile(c *contextmodel.ReqContext, userID int6
|
||||
userProfile.IsGrafanaAdminExternallySynced = login.IsGrafanaAdminExternallySynced(hs.Cfg, oauthInfo, authInfo.AuthModule)
|
||||
}
|
||||
|
||||
userProfile.AccessControl = hs.getAccessControlMetadata(c, "global.users:id:", strconv.FormatInt(userID, 10))
|
||||
userProfile.AccessControl = getAccessControlMetadata(c, "global.users:id:", strconv.FormatInt(userID, 10))
|
||||
userProfile.AvatarURL = dtos.GetGravatarUrl(hs.Cfg, userProfile.Email)
|
||||
|
||||
return response.JSON(http.StatusOK, userProfile)
|
||||
|
@ -1407,22 +1407,25 @@ func RunWatchSemantics(ctx context.Context, t *testing.T, store storage.Interfac
|
||||
podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")},
|
||||
expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods,
|
||||
},
|
||||
|
||||
{
|
||||
name: "legacy, RV=0",
|
||||
resourceVersion: "0",
|
||||
initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")},
|
||||
expectedInitialEventsInRandomOrder: addEventsFromCreatedPods,
|
||||
podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")},
|
||||
expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods,
|
||||
},
|
||||
{
|
||||
name: "legacy, RV=unset",
|
||||
initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")},
|
||||
expectedInitialEventsInRandomOrder: addEventsFromCreatedPods,
|
||||
podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")},
|
||||
expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods,
|
||||
},
|
||||
// Not Supported by unistore because there is no way to differentiate between:
|
||||
// - SendInitialEvents=nil && resourceVersion=0
|
||||
// - sendInitialEvents=false && resourceVersion=0
|
||||
// This is a Legacy feature in k8s.io/apiserver/pkg/storage/etcd3/watcher_test.go#196
|
||||
// {
|
||||
// name: "legacy, RV=0",
|
||||
// resourceVersion: "0",
|
||||
// initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")},
|
||||
// expectedInitialEventsInRandomOrder: addEventsFromCreatedPods,
|
||||
// podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")},
|
||||
// expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods,
|
||||
// },
|
||||
// {
|
||||
// name: "legacy, RV=unset",
|
||||
// initialPods: []*example.Pod{makePod("1"), makePod("2"), makePod("3")},
|
||||
// expectedInitialEventsInRandomOrder: addEventsFromCreatedPods,
|
||||
// podsAfterEstablishingWatch: []*example.Pod{makePod("4"), makePod("5")},
|
||||
// expectedEventsAfterEstablishingWatch: addEventsFromCreatedPods,
|
||||
// },
|
||||
}
|
||||
for idx, scenario := range scenarios {
|
||||
t.Run(scenario.name, func(t *testing.T) {
|
||||
|
@ -98,7 +98,7 @@ func writeRedirectCookie(c *contextmodel.ReqContext) {
|
||||
}
|
||||
|
||||
// remove any forceLogin=true params
|
||||
redirectTo = removeForceLoginParams(redirectTo)
|
||||
redirectTo = RemoveForceLoginParams(redirectTo)
|
||||
cookies.WriteCookie(c.Resp, "redirect_to", url.QueryEscape(redirectTo), 0, nil)
|
||||
}
|
||||
|
||||
@ -113,13 +113,13 @@ func getRedirectToQueryParam(c *contextmodel.ReqContext) string {
|
||||
}
|
||||
|
||||
// remove any forceLogin=true params
|
||||
redirectTo = removeForceLoginParams(redirectTo)
|
||||
redirectTo = RemoveForceLoginParams(redirectTo)
|
||||
return "?redirectTo=" + url.QueryEscape(redirectTo)
|
||||
}
|
||||
|
||||
var forceLoginParamsRegexp = regexp.MustCompile(`&?forceLogin=true`)
|
||||
|
||||
func removeForceLoginParams(str string) string {
|
||||
func RemoveForceLoginParams(str string) string {
|
||||
return forceLoginParamsRegexp.ReplaceAllString(str, "")
|
||||
}
|
||||
|
||||
@ -138,7 +138,8 @@ func CanAdminPlugins(cfg *setting.Cfg, accessControl ac.AccessControl) func(c *c
|
||||
}
|
||||
|
||||
func RoleAppPluginAuth(accessControl ac.AccessControl, ps pluginstore.Store, features featuremgmt.FeatureToggles,
|
||||
logger log.Logger) func(c *contextmodel.ReqContext) {
|
||||
logger log.Logger,
|
||||
) func(c *contextmodel.ReqContext) {
|
||||
return func(c *contextmodel.ReqContext) {
|
||||
pluginID := web.Params(c.Req)[":id"]
|
||||
p, exists := ps.Plugin(c.Req.Context(), pluginID)
|
||||
|
@ -352,7 +352,7 @@ func TestRemoveForceLoginparams(t *testing.T) {
|
||||
}
|
||||
for i, tc := range tcs {
|
||||
t.Run(fmt.Sprintf("testcase %d", i), func(t *testing.T) {
|
||||
require.Equal(t, tc.exp, removeForceLoginParams(tc.inp))
|
||||
require.Equal(t, tc.exp, RemoveForceLoginParams(tc.inp))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -22,19 +22,20 @@ func (e *TokenRevokedError) Unwrap() error { return ErrInvalidSessionToken }
|
||||
|
||||
// UserToken represents a user token
|
||||
type UserToken struct {
|
||||
Id int64
|
||||
UserId int64
|
||||
AuthToken string
|
||||
PrevAuthToken string
|
||||
UserAgent string
|
||||
ClientIp string
|
||||
AuthTokenSeen bool
|
||||
SeenAt int64
|
||||
RotatedAt int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
RevokedAt int64
|
||||
UnhashedToken string
|
||||
Id int64
|
||||
UserId int64
|
||||
ExternalSessionId int64
|
||||
AuthToken string
|
||||
PrevAuthToken string
|
||||
UserAgent string
|
||||
ClientIp string
|
||||
AuthTokenSeen bool
|
||||
SeenAt int64
|
||||
RotatedAt int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
RevokedAt int64
|
||||
UnhashedToken string
|
||||
}
|
||||
|
||||
const UrgentRotateTime = 1 * time.Minute
|
||||
|
@ -32,6 +32,7 @@ type PluginManagementCfg struct {
|
||||
type Features struct {
|
||||
ExternalCorePluginsEnabled bool
|
||||
SkipHostEnvVarsEnabled bool
|
||||
SriChecksEnabled bool
|
||||
}
|
||||
|
||||
// NewPluginManagementCfg returns a new PluginManagementCfg.
|
||||
|
@ -53,7 +53,7 @@ type PluginManifest struct {
|
||||
RootURLs []string `json:"rootUrls"`
|
||||
}
|
||||
|
||||
func (m *PluginManifest) isV2() bool {
|
||||
func (m *PluginManifest) IsV2() bool {
|
||||
return strings.HasPrefix(m.ManifestVersion, "2.")
|
||||
}
|
||||
|
||||
@ -107,34 +107,17 @@ func (s *Signature) readPluginManifest(ctx context.Context, body []byte) (*Plugi
|
||||
return &manifest, nil
|
||||
}
|
||||
|
||||
func (s *Signature) Calculate(ctx context.Context, src plugins.PluginSource, plugin plugins.FoundPlugin) (plugins.Signature, error) {
|
||||
if defaultSignature, exists := src.DefaultSignature(ctx); exists {
|
||||
return defaultSignature, nil
|
||||
}
|
||||
fsFiles, err := plugin.FS.Files()
|
||||
if err != nil {
|
||||
return plugins.Signature{}, fmt.Errorf("files: %w", err)
|
||||
}
|
||||
if len(fsFiles) == 0 {
|
||||
s.log.Warn("No plugin file information in directory", "pluginId", plugin.JSONData.ID)
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusInvalid,
|
||||
}, nil
|
||||
}
|
||||
var ErrSignatureTypeUnsigned = errors.New("plugin is unsigned")
|
||||
|
||||
f, err := plugin.FS.Open("MANIFEST.txt")
|
||||
// ReadPluginManifestFromFS reads the plugin manifest from the provided plugins.FS.
|
||||
// If the manifest is not found, it will return an error wrapping ErrSignatureTypeUnsigned.
|
||||
func (s *Signature) ReadPluginManifestFromFS(ctx context.Context, pfs plugins.FS) (*PluginManifest, error) {
|
||||
f, err := pfs.Open("MANIFEST.txt")
|
||||
if err != nil {
|
||||
if errors.Is(err, plugins.ErrFileNotExist) {
|
||||
s.log.Debug("Could not find a MANIFEST.txt", "id", plugin.JSONData.ID, "error", err)
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusUnsigned,
|
||||
}, nil
|
||||
return nil, fmt.Errorf("%w: could not find a MANIFEST.txt", ErrSignatureTypeUnsigned)
|
||||
}
|
||||
|
||||
s.log.Debug("Could not open MANIFEST.txt", "id", plugin.JSONData.ID, "error", err)
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusInvalid,
|
||||
}, nil
|
||||
return nil, fmt.Errorf("could not open MANIFEST.txt: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if f == nil {
|
||||
@ -147,21 +130,47 @@ func (s *Signature) Calculate(ctx context.Context, src plugins.PluginSource, plu
|
||||
|
||||
byteValue, err := io.ReadAll(f)
|
||||
if err != nil || len(byteValue) < 10 {
|
||||
s.log.Debug("MANIFEST.TXT is invalid", "id", plugin.JSONData.ID)
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusUnsigned,
|
||||
}, nil
|
||||
return nil, fmt.Errorf("%w: MANIFEST.txt is invalid", ErrSignatureTypeUnsigned)
|
||||
}
|
||||
|
||||
manifest, err := s.readPluginManifest(ctx, byteValue)
|
||||
if err != nil {
|
||||
s.log.Warn("Plugin signature invalid", "id", plugin.JSONData.ID, "error", err)
|
||||
return nil, err
|
||||
}
|
||||
return manifest, nil
|
||||
}
|
||||
|
||||
func (s *Signature) Calculate(ctx context.Context, src plugins.PluginSource, plugin plugins.FoundPlugin) (plugins.Signature, error) {
|
||||
if defaultSignature, exists := src.DefaultSignature(ctx); exists {
|
||||
return defaultSignature, nil
|
||||
}
|
||||
|
||||
manifest, err := s.ReadPluginManifestFromFS(ctx, plugin.FS)
|
||||
switch {
|
||||
case errors.Is(err, ErrSignatureTypeUnsigned):
|
||||
s.log.Warn("Plugin is unsigned", "id", plugin.JSONData.ID, "err", err)
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusUnsigned,
|
||||
}, nil
|
||||
case err != nil:
|
||||
s.log.Warn("Plugin signature is invalid", "id", plugin.JSONData.ID, "err", err)
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusInvalid,
|
||||
}, nil
|
||||
}
|
||||
|
||||
if !manifest.isV2() {
|
||||
if !manifest.IsV2() {
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusInvalid,
|
||||
}, nil
|
||||
}
|
||||
|
||||
fsFiles, err := plugin.FS.Files()
|
||||
if err != nil {
|
||||
return plugins.Signature{}, fmt.Errorf("files: %w", err)
|
||||
}
|
||||
if len(fsFiles) == 0 {
|
||||
s.log.Warn("No plugin file information in directory", "pluginId", plugin.JSONData.ID)
|
||||
return plugins.Signature{
|
||||
Status: plugins.SignatureStatusInvalid,
|
||||
}, nil
|
||||
@ -328,7 +337,7 @@ func (s *Signature) validateManifest(ctx context.Context, m PluginManifest, bloc
|
||||
if len(m.Files) == 0 {
|
||||
return invalidFieldErr{field: "files"}
|
||||
}
|
||||
if m.isV2() {
|
||||
if m.IsV2() {
|
||||
if len(m.SignedByOrg) == 0 {
|
||||
return invalidFieldErr{field: "signedByOrg"}
|
||||
}
|
||||
|
@ -19,6 +19,14 @@ import (
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/signature/statickey"
|
||||
)
|
||||
|
||||
func provideDefaultTestService() *Signature {
|
||||
return provideTestServiceWithConfig(&config.PluginManagementCfg{})
|
||||
}
|
||||
|
||||
func provideTestServiceWithConfig(cfg *config.PluginManagementCfg) *Signature {
|
||||
return ProvideService(cfg, statickey.New())
|
||||
}
|
||||
|
||||
func TestReadPluginManifest(t *testing.T) {
|
||||
txt := `-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
@ -52,7 +60,7 @@ NR7DnB0CCQHO+4FlSPtXFTzNepoc+CytQyDAeOLMLmf2Tqhk2YShk+G/YlVX
|
||||
-----END PGP SIGNATURE-----`
|
||||
|
||||
t.Run("valid manifest", func(t *testing.T) {
|
||||
s := ProvideService(&config.PluginManagementCfg{}, statickey.New())
|
||||
s := provideDefaultTestService()
|
||||
manifest, err := s.readPluginManifest(context.Background(), []byte(txt))
|
||||
|
||||
require.NoError(t, err)
|
||||
@ -68,8 +76,8 @@ NR7DnB0CCQHO+4FlSPtXFTzNepoc+CytQyDAeOLMLmf2Tqhk2YShk+G/YlVX
|
||||
})
|
||||
|
||||
t.Run("invalid manifest", func(t *testing.T) {
|
||||
s := provideDefaultTestService()
|
||||
modified := strings.ReplaceAll(txt, "README.md", "xxxxxxxxxx")
|
||||
s := ProvideService(&config.PluginManagementCfg{}, statickey.New())
|
||||
_, err := s.readPluginManifest(context.Background(), []byte(modified))
|
||||
require.Error(t, err)
|
||||
})
|
||||
@ -107,7 +115,7 @@ khdr/tZ1PDgRxMqB/u+Vtbpl0xSxgblnrDOYMSI=
|
||||
-----END PGP SIGNATURE-----`
|
||||
|
||||
t.Run("valid manifest", func(t *testing.T) {
|
||||
s := ProvideService(&config.PluginManagementCfg{}, statickey.New())
|
||||
s := provideDefaultTestService()
|
||||
manifest, err := s.readPluginManifest(context.Background(), []byte(txt))
|
||||
|
||||
require.NoError(t, err)
|
||||
@ -126,6 +134,12 @@ khdr/tZ1PDgRxMqB/u+Vtbpl0xSxgblnrDOYMSI=
|
||||
}
|
||||
|
||||
func TestCalculate(t *testing.T) {
|
||||
parentDir, err := filepath.Abs("../")
|
||||
if err != nil {
|
||||
t.Errorf("could not construct absolute path of current dir")
|
||||
return
|
||||
}
|
||||
|
||||
t.Run("Validate root URL against App URL for non-private plugin if is specified in manifest", func(t *testing.T) {
|
||||
tcs := []struct {
|
||||
appURL string
|
||||
@ -147,15 +161,9 @@ func TestCalculate(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
parentDir, err := filepath.Abs("../")
|
||||
if err != nil {
|
||||
t.Errorf("could not construct absolute path of current dir")
|
||||
return
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
basePath := filepath.Join(parentDir, "testdata/non-pvt-with-root-url/plugin")
|
||||
s := ProvideService(&config.PluginManagementCfg{GrafanaAppURL: tc.appURL}, statickey.New())
|
||||
s := provideTestServiceWithConfig(&config.PluginManagementCfg{GrafanaAppURL: tc.appURL})
|
||||
sig, err := s.Calculate(context.Background(), &fakes.FakePluginSource{
|
||||
PluginClassFunc: func(ctx context.Context) plugins.Class {
|
||||
return plugins.ClassExternal
|
||||
@ -183,7 +191,7 @@ func TestCalculate(t *testing.T) {
|
||||
basePath := "../testdata/renderer-added-file/plugin"
|
||||
|
||||
runningWindows = true
|
||||
s := ProvideService(&config.PluginManagementCfg{}, statickey.New())
|
||||
s := provideDefaultTestService()
|
||||
sig, err := s.Calculate(context.Background(), &fakes.FakePluginSource{
|
||||
PluginClassFunc: func(ctx context.Context) plugins.Class {
|
||||
return plugins.ClassExternal
|
||||
@ -247,7 +255,7 @@ func TestCalculate(t *testing.T) {
|
||||
toSlash = tc.platform.toSlashFunc()
|
||||
fromSlash = tc.platform.fromSlashFunc()
|
||||
|
||||
s := ProvideService(&config.PluginManagementCfg{}, statickey.New())
|
||||
s := provideDefaultTestService()
|
||||
pfs, err := tc.fsFactory()
|
||||
require.NoError(t, err)
|
||||
pfs, err = newPathSeparatorOverrideFS(string(tc.platform.separator), pfs)
|
||||
@ -721,7 +729,7 @@ func Test_validateManifest(t *testing.T) {
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
s := ProvideService(&config.PluginManagementCfg{}, statickey.New())
|
||||
s := provideDefaultTestService()
|
||||
err := s.validateManifest(context.Background(), *tc.manifest, nil)
|
||||
require.Errorf(t, err, tc.expectedErr)
|
||||
})
|
||||
|
@ -262,6 +262,7 @@ type PluginMetaDTO struct {
|
||||
JSONData
|
||||
Signature SignatureStatus `json:"signature"`
|
||||
Module string `json:"module"`
|
||||
ModuleHash string `json:"moduleHash,omitempty"`
|
||||
BaseURL string `json:"baseUrl"`
|
||||
Angular AngularMeta `json:"angular"`
|
||||
MultiValueFilterOperators bool `json:"multiValueFilterOperators"`
|
||||
@ -314,6 +315,7 @@ type PanelDTO struct {
|
||||
Module string `json:"module"`
|
||||
Angular AngularMeta `json:"angular"`
|
||||
LoadingStrategy LoadingStrategy `json:"loadingStrategy"`
|
||||
ModuleHash string `json:"moduleHash,omitempty"`
|
||||
}
|
||||
|
||||
type AppDTO struct {
|
||||
@ -325,6 +327,7 @@ type AppDTO struct {
|
||||
LoadingStrategy LoadingStrategy `json:"loadingStrategy"`
|
||||
Extensions Extensions `json:"extensions"`
|
||||
Dependencies Dependencies `json:"dependencies"`
|
||||
ModuleHash string `json:"moduleHash,omitempty"`
|
||||
}
|
||||
|
||||
const (
|
||||
|
@ -50,13 +50,47 @@ func LegacyUpdateCommandToUnstructured(cmd folder.UpdateFolderCommand) unstructu
|
||||
return obj
|
||||
}
|
||||
|
||||
func UnstructuredToLegacyFolder(item unstructured.Unstructured) *folder.Folder {
|
||||
func UnstructuredToLegacyFolder(item unstructured.Unstructured, orgID int64) *folder.Folder {
|
||||
// #TODO reduce duplication of the different conversion functions
|
||||
spec := item.Object["spec"].(map[string]any)
|
||||
return &folder.Folder{
|
||||
UID: item.GetName(),
|
||||
Title: spec["title"].(string),
|
||||
// #TODO add other fields
|
||||
uid := item.GetName()
|
||||
title := spec["title"].(string)
|
||||
|
||||
meta, err := utils.MetaAccessor(&item)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
id, err := getLegacyID(meta)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
created, err := getCreated(meta)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// avoid panic
|
||||
var createdTime time.Time
|
||||
if created != nil {
|
||||
createdTime = created.Local()
|
||||
}
|
||||
|
||||
f := &folder.Folder{
|
||||
UID: uid,
|
||||
Title: title,
|
||||
ID: id,
|
||||
ParentUID: meta.GetFolder(),
|
||||
// #TODO add created by field if necessary
|
||||
// CreatedBy: meta.GetCreatedBy(),
|
||||
// UpdatedBy: meta.GetCreatedBy(),
|
||||
URL: getURL(meta, title),
|
||||
Created: createdTime,
|
||||
Updated: createdTime,
|
||||
OrgID: orgID,
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func UnstructuredToLegacyFolderDTO(item unstructured.Unstructured) (*dtos.Folder, error) {
|
||||
@ -79,6 +113,14 @@ func UnstructuredToLegacyFolderDTO(item unstructured.Unstructured) (*dtos.Folder
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// avoid panic
|
||||
var createdTime time.Time
|
||||
if created != nil {
|
||||
// #TODO Fix this time format. The legacy time format seems to be along the lines of time.Now()
|
||||
// which includes a part that represents a fraction of a second.
|
||||
createdTime = created.Local()
|
||||
}
|
||||
|
||||
dto := &dtos.Folder{
|
||||
UID: uid,
|
||||
Title: title,
|
||||
@ -87,20 +129,14 @@ func UnstructuredToLegacyFolderDTO(item unstructured.Unstructured) (*dtos.Folder
|
||||
// #TODO add back CreatedBy, UpdatedBy once we figure out how to access userService
|
||||
// to translate user ID into user login. meta.GetCreatedBy() only stores user ID
|
||||
// Could convert meta.GetCreatedBy() return value to a struct--id and name
|
||||
// CreatedBy: meta.GetCreatedBy(),
|
||||
// UpdatedBy: meta.GetCreatedBy(),
|
||||
URL: getURL(meta, title),
|
||||
CreatedBy: meta.GetCreatedBy(),
|
||||
UpdatedBy: meta.GetCreatedBy(),
|
||||
URL: getURL(meta, title),
|
||||
// #TODO get Created in format "2024-09-12T15:37:41.09466+02:00"
|
||||
Created: *created,
|
||||
Created: createdTime,
|
||||
// #TODO figure out whether we want to set "updated" and "updated by". Could replace with
|
||||
// meta.GetUpdatedTimestamp() but it currently gets overwritten in prepareObjectForStorage().
|
||||
Updated: *created,
|
||||
// #TODO figure out how to set these properly
|
||||
CanSave: true,
|
||||
CanEdit: true,
|
||||
CanAdmin: true,
|
||||
CanDelete: true,
|
||||
HasACL: false,
|
||||
Updated: createdTime,
|
||||
|
||||
// #TODO figure out about adding version, parents, orgID fields
|
||||
}
|
||||
@ -135,6 +171,9 @@ func convertToK8sResource(v *folder.Folder, namespacer request.NamespaceMapper)
|
||||
Timestamp: &v.Created,
|
||||
})
|
||||
}
|
||||
// #TODO: turns out these get overwritten by Unified Storage (see pkg/storage/unified/apistore/prepare.go)
|
||||
// We're going to have to align with that. For now we do need the user ID because the folder type stores it
|
||||
// as the only user identifier
|
||||
if v.CreatedBy > 0 {
|
||||
meta.SetCreatedBy(fmt.Sprintf("user:%d", v.CreatedBy))
|
||||
}
|
||||
|
@ -149,6 +149,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/team/teamimpl"
|
||||
tempuser "github.com/grafana/grafana/pkg/services/temp_user"
|
||||
"github.com/grafana/grafana/pkg/services/temp_user/tempuserimpl"
|
||||
"github.com/grafana/grafana/pkg/services/unifiedSearch"
|
||||
"github.com/grafana/grafana/pkg/services/updatechecker"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/services/user/userimpl"
|
||||
@ -229,6 +230,8 @@ var wireBasicSet = wire.NewSet(
|
||||
search.ProvideService,
|
||||
searchV2.ProvideService,
|
||||
searchV2.ProvideSearchHTTPService,
|
||||
unifiedSearch.ProvideService,
|
||||
unifiedSearch.ProvideSearchHTTPService,
|
||||
store.ProvideService,
|
||||
store.ProvideSystemUsersService,
|
||||
live.ProvideService,
|
||||
|
@ -173,10 +173,11 @@ type TeamRole struct {
|
||||
}
|
||||
|
||||
type UserRole struct {
|
||||
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||
OrgID int64 `json:"orgId" xorm:"org_id"`
|
||||
RoleID int64 `json:"roleId" xorm:"role_id"`
|
||||
UserID int64 `json:"userId" xorm:"user_id"`
|
||||
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||
OrgID int64 `json:"orgId" xorm:"org_id"`
|
||||
RoleID int64 `json:"roleId" xorm:"role_id"`
|
||||
UserID int64 `json:"userId" xorm:"user_id"`
|
||||
GroupMappingUID string `json:"groupMappingUID" xorm:"group_mapping_uid"`
|
||||
|
||||
Created time.Time
|
||||
}
|
||||
@ -455,6 +456,7 @@ const (
|
||||
ActionAlertingReceiversCreate = "alert.notifications.receivers:create"
|
||||
ActionAlertingReceiversUpdate = "alert.notifications.receivers:write"
|
||||
ActionAlertingReceiversDelete = "alert.notifications.receivers:delete"
|
||||
ActionAlertingReceiversTest = "alert.notifications.receivers:test"
|
||||
ActionAlertingReceiversPermissionsRead = "receivers.permissions:read"
|
||||
ActionAlertingReceiversPermissionsWrite = "receivers.permissions:write"
|
||||
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
|
||||
"github.com/grafana/grafana/pkg/services/org"
|
||||
"github.com/grafana/grafana/pkg/services/team"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
"go.opentelemetry.io/otel"
|
||||
@ -42,21 +43,27 @@ func (a *api) registerEndpoints() {
|
||||
licenseMW = nopMiddleware
|
||||
}
|
||||
|
||||
teamUIDResolver := team.MiddlewareTeamUIDResolver(a.service.teamService, ":teamID")
|
||||
teamUIDResolverResource := func() web.Handler { return func(c *contextmodel.ReqContext) {} }() // no-op
|
||||
if a.service.options.Resource == "teams" {
|
||||
teamUIDResolverResource = team.MiddlewareTeamUIDResolver(a.service.teamService, ":resourceID")
|
||||
}
|
||||
|
||||
a.router.Group(fmt.Sprintf("/api/access-control/%s", a.service.options.Resource), func(r routing.RouteRegister) {
|
||||
actionRead := fmt.Sprintf("%s.permissions:read", a.service.options.Resource)
|
||||
actionWrite := fmt.Sprintf("%s.permissions:write", a.service.options.Resource)
|
||||
scope := accesscontrol.Scope(a.service.options.Resource, a.service.options.ResourceAttribute, accesscontrol.Parameter(":resourceID"))
|
||||
r.Get("/description", auth(accesscontrol.EvalPermission(actionRead)), routing.Wrap(a.getDescription))
|
||||
r.Get("/:resourceID", auth(accesscontrol.EvalPermission(actionRead, scope)), routing.Wrap(a.getPermissions))
|
||||
r.Post("/:resourceID", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setPermissions))
|
||||
r.Get("/:resourceID", teamUIDResolverResource, auth(accesscontrol.EvalPermission(actionRead, scope)), routing.Wrap(a.getPermissions))
|
||||
r.Post("/:resourceID", teamUIDResolverResource, licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setPermissions))
|
||||
if a.service.options.Assignments.Users {
|
||||
r.Post("/:resourceID/users/:userID", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setUserPermission))
|
||||
r.Post("/:resourceID/users/:userID", licenseMW, teamUIDResolverResource, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setUserPermission))
|
||||
}
|
||||
if a.service.options.Assignments.Teams {
|
||||
r.Post("/:resourceID/teams/:teamID", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setTeamPermission))
|
||||
r.Post("/:resourceID/teams/:teamID", licenseMW, teamUIDResolverResource, teamUIDResolver, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setTeamPermission))
|
||||
}
|
||||
if a.service.options.Assignments.BuiltInRoles {
|
||||
r.Post("/:resourceID/builtInRoles/:builtInRole", licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setBuiltinRolePermission))
|
||||
r.Post("/:resourceID/builtInRoles/:builtInRole", teamUIDResolverResource, licenseMW, auth(accesscontrol.EvalPermission(actionWrite, scope)), routing.Wrap(a.setBuiltinRolePermission))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -257,6 +257,7 @@ type setTeamPermissionTestCase struct {
|
||||
expectedStatus int
|
||||
permission string
|
||||
permissions []accesscontrol.Permission
|
||||
byUID bool
|
||||
}
|
||||
|
||||
func TestApi_setTeamPermission(t *testing.T) {
|
||||
@ -308,6 +309,20 @@ func TestApi_setTeamPermission(t *testing.T) {
|
||||
{Action: "dashboards.permissions:read", Scope: "dashboards:id:1"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should set View permission for team with id 1 but through UID",
|
||||
teamID: 1,
|
||||
resourceID: "1",
|
||||
expectedStatus: 200,
|
||||
permission: "View",
|
||||
byUID: true,
|
||||
permissions: []accesscontrol.Permission{
|
||||
{Action: "dashboards.permissions:read", Scope: "dashboards:id:1"},
|
||||
{Action: "dashboards.permissions:write", Scope: "dashboards:id:1"},
|
||||
{Action: accesscontrol.ActionTeamsRead, Scope: accesscontrol.ScopeTeamsAll},
|
||||
{Action: accesscontrol.ActionOrgUsersRead, Scope: accesscontrol.ScopeUsersAll},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
@ -316,10 +331,16 @@ func TestApi_setTeamPermission(t *testing.T) {
|
||||
server := setupTestServer(t, &user.SignedInUser{OrgID: 1, Permissions: map[int64]map[string][]string{1: accesscontrol.GroupScopesByActionContext(context.Background(), tt.permissions)}}, service)
|
||||
|
||||
// seed team
|
||||
_, err := teamSvc.CreateTeam(context.Background(), "test", "test@test.com", 1)
|
||||
team, err := teamSvc.CreateTeam(context.Background(), "test", "test@test.com", 1)
|
||||
require.NoError(t, err)
|
||||
|
||||
recorder := setPermission(t, server, testOptions.Resource, tt.resourceID, tt.permission, "teams", strconv.Itoa(int(tt.teamID)))
|
||||
assignTo := strconv.Itoa(int(tt.teamID))
|
||||
if tt.byUID {
|
||||
if team.ID == tt.teamID {
|
||||
assignTo = team.UID
|
||||
}
|
||||
}
|
||||
recorder := setPermission(t, server, testOptions.Resource, tt.resourceID, tt.permission, "teams", assignTo)
|
||||
assert.Equal(t, tt.expectedStatus, recorder.Code)
|
||||
|
||||
assert.Equal(t, tt.expectedStatus, recorder.Code)
|
||||
|
@ -17,8 +17,9 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
errInvalidOrg = errutil.Unauthorized("anonymous.invalid-org")
|
||||
errInvalidID = errutil.Unauthorized("anonymous.invalid-id")
|
||||
errInvalidOrg = errutil.Unauthorized("anonymous.invalid-org")
|
||||
errInvalidID = errutil.Unauthorized("anonymous.invalid-id")
|
||||
errDeviceLimit = errutil.Unauthorized("anonymous.device-limit-reached", errutil.WithPublicMessage("Anonymous device limit reached. Contact Administrator"))
|
||||
)
|
||||
|
||||
var _ authn.ContextAwareClient = new(Anonymous)
|
||||
@ -51,7 +52,7 @@ func (a *Anonymous) Authenticate(ctx context.Context, r *authn.Request) (*authn.
|
||||
|
||||
if err := a.anonDeviceService.TagDevice(ctx, httpReqCopy, anonymous.AnonDeviceUI); err != nil {
|
||||
if errors.Is(err, anonstore.ErrDeviceLimitReached) {
|
||||
return nil, err
|
||||
return nil, errDeviceLimit.Errorf("limit reached for anonymous devices: %w", err)
|
||||
}
|
||||
|
||||
a.log.Warn("Failed to tag anonymous session", "error", err)
|
||||
|
@ -2,6 +2,7 @@ package anonimpl
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
@ -79,20 +80,29 @@ func (a *AnonDeviceService) usageStatFn(ctx context.Context) (map[string]any, er
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AnonDeviceService) tagDeviceUI(ctx context.Context, httpReq *http.Request, device *anonstore.Device) error {
|
||||
func (a *AnonDeviceService) tagDeviceUI(ctx context.Context, device *anonstore.Device) error {
|
||||
key := device.CacheKey()
|
||||
|
||||
if _, ok := a.localCache.Get(key); ok {
|
||||
if val, ok := a.localCache.Get(key); ok {
|
||||
if boolVal, ok := val.(bool); ok && !boolVal {
|
||||
return anonstore.ErrDeviceLimitReached
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
a.localCache.SetDefault(key, struct{}{})
|
||||
a.localCache.SetDefault(key, true)
|
||||
|
||||
if a.cfg.Env == setting.Dev {
|
||||
a.log.Debug("Tagging device for UI", "deviceID", device.DeviceID, "device", device, "key", key)
|
||||
}
|
||||
|
||||
if err := a.anonStore.CreateOrUpdateDevice(ctx, device); err != nil {
|
||||
if errors.Is(err, anonstore.ErrDeviceLimitReached) {
|
||||
a.localCache.SetDefault(key, false)
|
||||
return err
|
||||
}
|
||||
// invalidate cache if there is an error
|
||||
a.localCache.Delete(key)
|
||||
return err
|
||||
}
|
||||
|
||||
@ -142,7 +152,7 @@ func (a *AnonDeviceService) TagDevice(ctx context.Context, httpReq *http.Request
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
|
||||
err = a.tagDeviceUI(ctx, httpReq, taggedDevice)
|
||||
err = a.tagDeviceUI(ctx, taggedDevice)
|
||||
if err != nil {
|
||||
a.log.Debug("Failed to tag device for UI", "error", err)
|
||||
return err
|
||||
|
@ -26,6 +26,10 @@ func TestMain(m *testing.M) {
|
||||
}
|
||||
|
||||
func TestIntegrationDeviceService_tag(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping test in short mode")
|
||||
}
|
||||
|
||||
type tagReq struct {
|
||||
httpReq *http.Request
|
||||
kind anonymous.DeviceKind
|
||||
@ -152,6 +156,9 @@ func TestIntegrationDeviceService_tag(t *testing.T) {
|
||||
|
||||
// Ensure that the local cache prevents request from being tagged
|
||||
func TestIntegrationAnonDeviceService_localCacheSafety(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping test in short mode")
|
||||
}
|
||||
store := db.InitTestDB(t)
|
||||
anonService := ProvideAnonymousDeviceService(&usagestats.UsageStatsMock{},
|
||||
&authntest.FakeService{}, store, setting.NewCfg(), orgtest.NewOrgServiceFake(), nil, actest.FakeAccessControl{}, &routing.RouteRegisterImpl{})
|
||||
@ -184,6 +191,10 @@ func TestIntegrationAnonDeviceService_localCacheSafety(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestIntegrationDeviceService_SearchDevice(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping test in short mode")
|
||||
}
|
||||
|
||||
fixedTime := time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC) // Fixed timestamp for testing
|
||||
|
||||
testCases := []struct {
|
||||
@ -271,3 +282,88 @@ func TestIntegrationDeviceService_SearchDevice(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntegrationAnonDeviceService_DeviceLimitWithCache(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping test in short mode")
|
||||
}
|
||||
// Setup test environment
|
||||
store := db.InitTestDB(t)
|
||||
cfg := setting.NewCfg()
|
||||
cfg.AnonymousDeviceLimit = 1 // Set device limit to 1 for testing
|
||||
anonService := ProvideAnonymousDeviceService(
|
||||
&usagestats.UsageStatsMock{},
|
||||
&authntest.FakeService{},
|
||||
store,
|
||||
cfg,
|
||||
orgtest.NewOrgServiceFake(),
|
||||
nil,
|
||||
actest.FakeAccessControl{},
|
||||
&routing.RouteRegisterImpl{},
|
||||
)
|
||||
|
||||
// Define test cases
|
||||
testCases := []struct {
|
||||
name string
|
||||
httpReq *http.Request
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "first request should succeed",
|
||||
httpReq: &http.Request{
|
||||
Header: http.Header{
|
||||
"User-Agent": []string{"test"},
|
||||
"X-Forwarded-For": []string{"10.30.30.1"},
|
||||
http.CanonicalHeaderKey(deviceIDHeader): []string{"device1"},
|
||||
},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "second request should fail due to device limit",
|
||||
httpReq: &http.Request{
|
||||
Header: http.Header{
|
||||
"User-Agent": []string{"test"},
|
||||
"X-Forwarded-For": []string{"10.30.30.2"},
|
||||
http.CanonicalHeaderKey(deviceIDHeader): []string{"device2"},
|
||||
},
|
||||
},
|
||||
expectedErr: anonstore.ErrDeviceLimitReached,
|
||||
},
|
||||
{
|
||||
name: "repeat request should hit cache and succeed",
|
||||
httpReq: &http.Request{
|
||||
Header: http.Header{
|
||||
"User-Agent": []string{"test"},
|
||||
"X-Forwarded-For": []string{"10.30.30.1"},
|
||||
http.CanonicalHeaderKey(deviceIDHeader): []string{"device1"},
|
||||
},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "third request should hit cache and fail due to device limit",
|
||||
httpReq: &http.Request{
|
||||
Header: http.Header{
|
||||
"User-Agent": []string{"test"},
|
||||
"X-Forwarded-For": []string{"10.30.30.2"},
|
||||
http.CanonicalHeaderKey(deviceIDHeader): []string{"device2"},
|
||||
},
|
||||
},
|
||||
expectedErr: anonstore.ErrDeviceLimitReached,
|
||||
},
|
||||
}
|
||||
|
||||
// Run test cases
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
err := anonService.TagDevice(context.Background(), tc.httpReq, anonymous.AnonDeviceUI)
|
||||
if tc.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
assert.Equal(t, tc.expectedErr, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -20,8 +20,9 @@ const (
|
||||
|
||||
// Typed errors
|
||||
var (
|
||||
ErrUserTokenNotFound = errors.New("user token not found")
|
||||
ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken
|
||||
ErrUserTokenNotFound = errors.New("user token not found")
|
||||
ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken
|
||||
ErrExternalSessionNotFound = errors.New("external session not found")
|
||||
)
|
||||
|
||||
type (
|
||||
@ -65,10 +66,21 @@ type RotateCommand struct {
|
||||
UserAgent string
|
||||
}
|
||||
|
||||
type CreateTokenCommand struct {
|
||||
User *user.User
|
||||
ClientIP net.IP
|
||||
UserAgent string
|
||||
ExternalSession *ExternalSession
|
||||
}
|
||||
|
||||
// UserTokenService are used for generating and validating user tokens
|
||||
type UserTokenService interface {
|
||||
CreateToken(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*UserToken, error)
|
||||
CreateToken(ctx context.Context, cmd *CreateTokenCommand) (*UserToken, error)
|
||||
LookupToken(ctx context.Context, unhashedToken string) (*UserToken, error)
|
||||
GetTokenByExternalSessionID(ctx context.Context, externalSessionID int64) (*UserToken, error)
|
||||
GetExternalSession(ctx context.Context, extSessionID int64) (*ExternalSession, error)
|
||||
FindExternalSessions(ctx context.Context, query *ListExternalSessionQuery) ([]*ExternalSession, error)
|
||||
|
||||
// RotateToken will always rotate a valid token
|
||||
RotateToken(ctx context.Context, cmd RotateCommand) (*UserToken, error)
|
||||
RevokeToken(ctx context.Context, token *UserToken, soft bool) error
|
||||
|
@ -14,10 +14,11 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/serverlock"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/models/usertoken"
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/services/secrets"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
@ -28,10 +29,13 @@ var (
|
||||
errUserIDInvalid = errors.New("invalid user ID")
|
||||
)
|
||||
|
||||
var _ auth.UserTokenService = (*UserAuthTokenService)(nil)
|
||||
|
||||
func ProvideUserAuthTokenService(sqlStore db.DB,
|
||||
serverLockService *serverlock.ServerLockService,
|
||||
quotaService quota.Service,
|
||||
cfg *setting.Cfg) (*UserAuthTokenService, error) {
|
||||
quotaService quota.Service, secretService secrets.Service,
|
||||
cfg *setting.Cfg, tracer tracing.Tracer,
|
||||
) (*UserAuthTokenService, error) {
|
||||
s := &UserAuthTokenService{
|
||||
sqlStore: sqlStore,
|
||||
serverLockService: serverLockService,
|
||||
@ -39,6 +43,7 @@ func ProvideUserAuthTokenService(sqlStore db.DB,
|
||||
log: log.New("auth"),
|
||||
singleflight: new(singleflight.Group),
|
||||
}
|
||||
s.externalSessionStore = provideExternalSessionStore(sqlStore, secretService, tracer)
|
||||
|
||||
defaultLimits, err := readQuotaConfig(cfg)
|
||||
if err != nil {
|
||||
@ -57,31 +62,32 @@ func ProvideUserAuthTokenService(sqlStore db.DB,
|
||||
}
|
||||
|
||||
type UserAuthTokenService struct {
|
||||
sqlStore db.DB
|
||||
serverLockService *serverlock.ServerLockService
|
||||
cfg *setting.Cfg
|
||||
log log.Logger
|
||||
singleflight *singleflight.Group
|
||||
sqlStore db.DB
|
||||
serverLockService *serverlock.ServerLockService
|
||||
cfg *setting.Cfg
|
||||
log log.Logger
|
||||
externalSessionStore auth.ExternalSessionStore
|
||||
singleflight *singleflight.Group
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) CreateToken(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) {
|
||||
func (s *UserAuthTokenService) CreateToken(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) {
|
||||
token, hashedToken, err := generateAndHashToken(s.cfg.SecretKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
now := getTime().Unix()
|
||||
clientIPStr := clientIP.String()
|
||||
if len(clientIP) == 0 {
|
||||
clientIPStr := cmd.ClientIP.String()
|
||||
if len(cmd.ClientIP) == 0 {
|
||||
clientIPStr = ""
|
||||
}
|
||||
|
||||
userAuthToken := userAuthToken{
|
||||
UserId: user.ID,
|
||||
UserId: cmd.User.ID,
|
||||
AuthToken: hashedToken,
|
||||
PrevAuthToken: hashedToken,
|
||||
ClientIp: clientIPStr,
|
||||
UserAgent: userAgent,
|
||||
UserAgent: cmd.UserAgent,
|
||||
RotatedAt: now,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
@ -90,11 +96,21 @@ func (s *UserAuthTokenService) CreateToken(ctx context.Context, user *user.User,
|
||||
AuthTokenSeen: false,
|
||||
}
|
||||
|
||||
err = s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error {
|
||||
_, err = dbSession.Insert(&userAuthToken)
|
||||
return err
|
||||
})
|
||||
err = s.sqlStore.InTransaction(ctx, func(ctx context.Context) error {
|
||||
if cmd.ExternalSession != nil {
|
||||
inErr := s.externalSessionStore.Create(ctx, cmd.ExternalSession)
|
||||
if inErr != nil {
|
||||
return inErr
|
||||
}
|
||||
userAuthToken.ExternalSessionId = cmd.ExternalSession.ID
|
||||
}
|
||||
|
||||
inErr := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error {
|
||||
_, err := dbSession.Insert(&userAuthToken)
|
||||
return err
|
||||
})
|
||||
return inErr
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -164,7 +180,6 @@ func (s *UserAuthTokenService) LookupToken(ctx context.Context, unhashedToken st
|
||||
|
||||
return err
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -190,7 +205,6 @@ func (s *UserAuthTokenService) LookupToken(ctx context.Context, unhashedToken st
|
||||
|
||||
return err
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -210,6 +224,38 @@ func (s *UserAuthTokenService) LookupToken(ctx context.Context, unhashedToken st
|
||||
return &userToken, err
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) GetTokenByExternalSessionID(ctx context.Context, externalSessionID int64) (*auth.UserToken, error) {
|
||||
var token userAuthToken
|
||||
err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error {
|
||||
exists, err := dbSession.Where("external_session_id = ?", externalSessionID).Get(&token)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !exists {
|
||||
return auth.ErrUserTokenNotFound
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var userToken auth.UserToken
|
||||
err = token.toUserToken(&userToken)
|
||||
|
||||
return &userToken, err
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) GetExternalSession(ctx context.Context, extSessionID int64) (*auth.ExternalSession, error) {
|
||||
return s.externalSessionStore.Get(ctx, extSessionID)
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) FindExternalSessions(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) {
|
||||
return s.externalSessionStore.List(ctx, query)
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) RotateToken(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error) {
|
||||
if cmd.UnHashedToken == "" {
|
||||
return nil, auth.ErrInvalidSessionToken
|
||||
@ -277,7 +323,6 @@ func (s *UserAuthTokenService) rotateToken(ctx context.Context, token *auth.User
|
||||
affected, err = res.RowsAffected()
|
||||
return err
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -305,6 +350,8 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *auth.User
|
||||
return err
|
||||
}
|
||||
|
||||
ctxLogger := s.log.FromContext(ctx)
|
||||
|
||||
var rowsAffected int64
|
||||
|
||||
if soft {
|
||||
@ -324,7 +371,13 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *auth.User
|
||||
return err
|
||||
}
|
||||
|
||||
ctxLogger := s.log.FromContext(ctx)
|
||||
if model.ExternalSessionId != 0 {
|
||||
err = s.externalSessionStore.Delete(ctx, model.ExternalSessionId)
|
||||
if err != nil {
|
||||
// Intentionally not returning error here, as the token has been revoked -> the backround job will clean up orphaned external sessions
|
||||
ctxLogger.Warn("Failed to delete external session", "externalSessionID", model.ExternalSessionId, "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
if rowsAffected == 0 {
|
||||
ctxLogger.Debug("User auth token not found/revoked", "tokenID", model.Id, "userID", model.UserId, "clientIP", model.ClientIp, "userAgent", model.UserAgent)
|
||||
@ -337,51 +390,75 @@ func (s *UserAuthTokenService) RevokeToken(ctx context.Context, token *auth.User
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) RevokeAllUserTokens(ctx context.Context, userId int64) error {
|
||||
return s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error {
|
||||
sql := `DELETE from user_auth_token WHERE user_id = ?`
|
||||
res, err := dbSession.Exec(sql, userId)
|
||||
return s.sqlStore.InTransaction(ctx, func(ctx context.Context) error {
|
||||
ctxLogger := s.log.FromContext(ctx)
|
||||
err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error {
|
||||
sql := `DELETE from user_auth_token WHERE user_id = ?`
|
||||
res, err := dbSession.Exec(sql, userId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
affected, err := res.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ctxLogger.Debug("All user tokens for user revoked", "userID", userId, "count", affected)
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
affected, err := res.RowsAffected()
|
||||
err = s.externalSessionStore.DeleteExternalSessionsByUserID(ctx, userId)
|
||||
if err != nil {
|
||||
return err
|
||||
// Intentionally not returning error here, as the token has been revoked -> the backround job will clean up orphaned external sessions
|
||||
ctxLogger.Warn("Failed to delete external sessions for user", "userID", userId, "err", err)
|
||||
}
|
||||
|
||||
s.log.FromContext(ctx).Debug("All user tokens for user revoked", "userID", userId, "count", affected)
|
||||
|
||||
return err
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) BatchRevokeAllUserTokens(ctx context.Context, userIds []int64) error {
|
||||
return s.sqlStore.WithTransactionalDbSession(ctx, func(dbSession *db.Session) error {
|
||||
return s.sqlStore.InTransaction(ctx, func(ctx context.Context) error {
|
||||
ctxLogger := s.log.FromContext(ctx)
|
||||
if len(userIds) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
user_id_params := strings.Repeat(",?", len(userIds)-1)
|
||||
sql := "DELETE from user_auth_token WHERE user_id IN (?" + user_id_params + ")"
|
||||
userIdParams := strings.Repeat(",?", len(userIds)-1)
|
||||
sql := "DELETE from user_auth_token WHERE user_id IN (?" + userIdParams + ")"
|
||||
|
||||
params := []any{sql}
|
||||
for _, v := range userIds {
|
||||
params = append(params, v)
|
||||
}
|
||||
|
||||
res, err := dbSession.Exec(params...)
|
||||
var affected int64
|
||||
|
||||
err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error {
|
||||
res, inErr := dbSession.Exec(params...)
|
||||
if inErr != nil {
|
||||
return inErr
|
||||
}
|
||||
|
||||
affected, inErr = res.RowsAffected()
|
||||
return inErr
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
affected, err := res.RowsAffected()
|
||||
err = s.externalSessionStore.BatchDeleteExternalSessionsByUserIDs(ctx, userIds)
|
||||
if err != nil {
|
||||
return err
|
||||
ctxLogger.Warn("Failed to delete external sessions for users", "users", userIds, "err", err)
|
||||
}
|
||||
|
||||
s.log.FromContext(ctx).Debug("All user tokens for given users revoked", "usersCount", len(userIds), "count", affected)
|
||||
ctxLogger.Debug("All user tokens for given users revoked", "usersCount", len(userIds), "count", affected)
|
||||
|
||||
return err
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -3,20 +3,25 @@ package authimpl
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/sync/singleflight"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
"github.com/grafana/grafana/pkg/services/auth/authtest"
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/services/secrets/fakes"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tests/testsuite"
|
||||
@ -36,8 +41,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
|
||||
t.Run("When creating token", func(t *testing.T) {
|
||||
createToken := func() *auth.UserToken {
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), usr,
|
||||
net.ParseIP("192.168.10.11"), "some user agent")
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
require.False(t, userToken.AuthTokenSeen)
|
||||
@ -109,8 +117,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
userToken = createToken()
|
||||
|
||||
t.Run("When creating an additional token", func(t *testing.T) {
|
||||
userToken2, err := ctx.tokenService.CreateToken(context.Background(), usr,
|
||||
net.ParseIP("192.168.10.11"), "some user agent")
|
||||
userToken2, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken2)
|
||||
|
||||
@ -156,8 +167,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
for i := 0; i < 3; i++ {
|
||||
userId := usr.ID + int64(i+1)
|
||||
userIds = append(userIds, userId)
|
||||
_, err := ctx.tokenService.CreateToken(context.Background(), usr,
|
||||
net.ParseIP("192.168.10.11"), "some user agent")
|
||||
_, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
}
|
||||
|
||||
@ -173,10 +187,89 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("When creating token with external session", func(t *testing.T) {
|
||||
createToken := func() *auth.UserToken {
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1},
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
require.False(t, userToken.AuthTokenSeen)
|
||||
return userToken
|
||||
}
|
||||
|
||||
userToken := createToken()
|
||||
|
||||
t.Run("soft revoking existing token should remove the associated external session", func(t *testing.T) {
|
||||
err := ctx.tokenService.RevokeToken(context.Background(), userToken, true)
|
||||
require.Nil(t, err)
|
||||
|
||||
model, err := ctx.getAuthTokenByID(userToken.Id)
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, model)
|
||||
require.Greater(t, model.RevokedAt, int64(0))
|
||||
|
||||
extSess, err := ctx.getExternalSessionByID(userToken.ExternalSessionId)
|
||||
require.Nil(t, err)
|
||||
require.Nil(t, extSess)
|
||||
})
|
||||
|
||||
t.Run("revoking existing token should also remove the associated external session", func(t *testing.T) {
|
||||
err := ctx.tokenService.RevokeToken(context.Background(), userToken, false)
|
||||
require.Nil(t, err)
|
||||
|
||||
model, err := ctx.getAuthTokenByID(userToken.Id)
|
||||
require.Nil(t, err)
|
||||
require.Nil(t, model)
|
||||
|
||||
extSess, err := ctx.getExternalSessionByID(userToken.ExternalSessionId)
|
||||
require.Nil(t, err)
|
||||
require.Nil(t, extSess)
|
||||
})
|
||||
|
||||
t.Run("When revoking users tokens in a batch", func(t *testing.T) {
|
||||
t.Run("Can revoke all users tokens and associated external sessions", func(t *testing.T) {
|
||||
userIds := []int64{}
|
||||
extSessionIds := []int64{}
|
||||
for i := 0; i < 3; i++ {
|
||||
userId := usr.ID + int64(i+1)
|
||||
userIds = append(userIds, userId)
|
||||
token, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
ExternalSession: &auth.ExternalSession{UserID: userId, AuthModule: "test", UserAuthID: 1},
|
||||
})
|
||||
require.Nil(t, err)
|
||||
extSessionIds = append(extSessionIds, token.ExternalSessionId)
|
||||
}
|
||||
|
||||
err := ctx.tokenService.BatchRevokeAllUserTokens(context.Background(), userIds)
|
||||
require.Nil(t, err)
|
||||
|
||||
for i := 0; i < len(userIds); i++ {
|
||||
tokens, err := ctx.tokenService.GetUserTokens(context.Background(), userIds[i])
|
||||
require.Nil(t, err)
|
||||
require.Equal(t, 0, len(tokens))
|
||||
|
||||
extSess, err := ctx.getExternalSessionByID(extSessionIds[i])
|
||||
require.Nil(t, err)
|
||||
require.Nil(t, extSess)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("expires correctly", func(t *testing.T) {
|
||||
ctx := createTestContext(t)
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), usr,
|
||||
net.ParseIP("192.168.10.11"), "some user agent")
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
|
||||
userToken, err = ctx.tokenService.LookupToken(context.Background(), userToken.UnhashedToken)
|
||||
@ -262,7 +355,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
t.Run("can properly rotate tokens", func(t *testing.T) {
|
||||
getTime = func() time.Time { return now }
|
||||
ctx := createTestContext(t)
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), usr, net.ParseIP("192.168.10.11"), "some user agent")
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
|
||||
prevToken := userToken.AuthToken
|
||||
@ -335,8 +432,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
|
||||
t.Run("keeps prev token valid for 1 minute after it is confirmed", func(t *testing.T) {
|
||||
getTime = func() time.Time { return now }
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), usr,
|
||||
net.ParseIP("192.168.10.11"), "some user agent")
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
|
||||
@ -368,8 +468,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("will not mark token unseen when prev and current are the same", func(t *testing.T) {
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), usr,
|
||||
net.ParseIP("192.168.10.11"), "some user agent")
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
|
||||
@ -389,7 +492,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
|
||||
t.Run("RotateToken", func(t *testing.T) {
|
||||
var prev string
|
||||
token, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "")
|
||||
token, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: nil,
|
||||
UserAgent: "",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
t.Run("should rotate token when called with current auth token", func(t *testing.T) {
|
||||
prev = token.UnhashedToken
|
||||
@ -412,7 +519,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("should return error when token is revoked", func(t *testing.T) {
|
||||
revokedToken, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "")
|
||||
revokedToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: nil,
|
||||
UserAgent: "",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
// mark token as revoked
|
||||
err = ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error {
|
||||
@ -426,7 +537,11 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("should return error when token has expired", func(t *testing.T) {
|
||||
expiredToken, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "")
|
||||
expiredToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: nil,
|
||||
UserAgent: "",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
// mark token as expired
|
||||
err = ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error {
|
||||
@ -441,10 +556,18 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
|
||||
t.Run("should only delete revoked tokens that are outside on specified window", func(t *testing.T) {
|
||||
usr := &user.User{ID: 100}
|
||||
token1, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "")
|
||||
token1, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: nil,
|
||||
UserAgent: "",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
token2, err := ctx.tokenService.CreateToken(context.Background(), usr, nil, "")
|
||||
token2, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: nil,
|
||||
UserAgent: "",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
getTime = func() time.Time {
|
||||
@ -474,18 +597,19 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
|
||||
t.Run("When populating userAuthToken from UserToken should copy all properties", func(t *testing.T) {
|
||||
ut := auth.UserToken{
|
||||
Id: 1,
|
||||
UserId: 2,
|
||||
AuthToken: "a",
|
||||
PrevAuthToken: "b",
|
||||
UserAgent: "c",
|
||||
ClientIp: "d",
|
||||
AuthTokenSeen: true,
|
||||
SeenAt: 3,
|
||||
RotatedAt: 4,
|
||||
CreatedAt: 5,
|
||||
UpdatedAt: 6,
|
||||
UnhashedToken: "e",
|
||||
Id: 1,
|
||||
UserId: 2,
|
||||
AuthToken: "a",
|
||||
PrevAuthToken: "b",
|
||||
UserAgent: "c",
|
||||
ClientIp: "d",
|
||||
AuthTokenSeen: true,
|
||||
SeenAt: 3,
|
||||
RotatedAt: 4,
|
||||
CreatedAt: 5,
|
||||
UpdatedAt: 6,
|
||||
UnhashedToken: "e",
|
||||
ExternalSessionId: 7,
|
||||
}
|
||||
utBytes, err := json.Marshal(ut)
|
||||
require.Nil(t, err)
|
||||
@ -507,18 +631,19 @@ func TestIntegrationUserAuthToken(t *testing.T) {
|
||||
|
||||
t.Run("When populating userToken from userAuthToken should copy all properties", func(t *testing.T) {
|
||||
uat := userAuthToken{
|
||||
Id: 1,
|
||||
UserId: 2,
|
||||
AuthToken: "a",
|
||||
PrevAuthToken: "b",
|
||||
UserAgent: "c",
|
||||
ClientIp: "d",
|
||||
AuthTokenSeen: true,
|
||||
SeenAt: 3,
|
||||
RotatedAt: 4,
|
||||
CreatedAt: 5,
|
||||
UpdatedAt: 6,
|
||||
UnhashedToken: "e",
|
||||
Id: 1,
|
||||
UserId: 2,
|
||||
AuthToken: "a",
|
||||
PrevAuthToken: "b",
|
||||
UserAgent: "c",
|
||||
ClientIp: "d",
|
||||
AuthTokenSeen: true,
|
||||
SeenAt: 3,
|
||||
RotatedAt: 4,
|
||||
CreatedAt: 5,
|
||||
UpdatedAt: 6,
|
||||
UnhashedToken: "e",
|
||||
ExternalSessionId: 7,
|
||||
}
|
||||
uatBytes, err := json.Marshal(uat)
|
||||
require.Nil(t, err)
|
||||
@ -551,22 +676,27 @@ func createTestContext(t *testing.T) *testContext {
|
||||
TokenRotationIntervalMinutes: 10,
|
||||
}
|
||||
|
||||
extSessionStore := provideExternalSessionStore(sqlstore, &fakes.FakeSecretsService{}, tracing.InitializeTracerForTest())
|
||||
|
||||
tokenService := &UserAuthTokenService{
|
||||
sqlStore: sqlstore,
|
||||
cfg: cfg,
|
||||
log: log.New("test-logger"),
|
||||
singleflight: new(singleflight.Group),
|
||||
sqlStore: sqlstore,
|
||||
cfg: cfg,
|
||||
log: log.New("test-logger"),
|
||||
singleflight: new(singleflight.Group),
|
||||
externalSessionStore: extSessionStore,
|
||||
}
|
||||
|
||||
return &testContext{
|
||||
sqlstore: sqlstore,
|
||||
tokenService: tokenService,
|
||||
sqlstore: sqlstore,
|
||||
tokenService: tokenService,
|
||||
extSessionStore: &extSessionStore,
|
||||
}
|
||||
}
|
||||
|
||||
type testContext struct {
|
||||
sqlstore db.DB
|
||||
tokenService *UserAuthTokenService
|
||||
sqlstore db.DB
|
||||
tokenService *UserAuthTokenService
|
||||
extSessionStore *auth.ExternalSessionStore
|
||||
}
|
||||
|
||||
func (c *testContext) getAuthTokenByID(id int64) (*userAuthToken, error) {
|
||||
@ -585,6 +715,22 @@ func (c *testContext) getAuthTokenByID(id int64) (*userAuthToken, error) {
|
||||
return res, err
|
||||
}
|
||||
|
||||
func (c *testContext) getExternalSessionByID(ID int64) (*auth.ExternalSession, error) {
|
||||
var res *auth.ExternalSession
|
||||
err := c.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error {
|
||||
var t auth.ExternalSession
|
||||
found, err := sess.ID(ID).Get(&t)
|
||||
if err != nil || !found {
|
||||
return err
|
||||
}
|
||||
|
||||
res = &t
|
||||
return nil
|
||||
})
|
||||
|
||||
return res, err
|
||||
}
|
||||
|
||||
func (c *testContext) updateRotatedAt(id, rotatedAt int64) (bool, error) {
|
||||
hasRowsAffected := false
|
||||
err := c.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error {
|
||||
@ -609,8 +755,11 @@ func TestIntegrationTokenCount(t *testing.T) {
|
||||
user := &user.User{ID: int64(10)}
|
||||
|
||||
createToken := func() *auth.UserToken {
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), user,
|
||||
net.ParseIP("192.168.10.11"), "some user agent")
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: user,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
require.False(t, userToken.AuthTokenSeen)
|
||||
@ -637,3 +786,108 @@ func TestIntegrationTokenCount(t *testing.T) {
|
||||
require.Nil(t, err)
|
||||
require.Equal(t, int64(0), count)
|
||||
}
|
||||
|
||||
func TestRevokeAllUserTokens(t *testing.T) {
|
||||
t.Run("should not fail if the external sessions could not be removed", func(t *testing.T) {
|
||||
ctx := createTestContext(t)
|
||||
usr := &user.User{ID: int64(10)}
|
||||
|
||||
// Mock the external session store to return an error
|
||||
mockExternalSessionStore := &authtest.MockExternalSessionStore{}
|
||||
|
||||
mockExternalSessionStore.On("Create", mock.Anything, mock.IsType(&auth.ExternalSession{})).Run(func(args mock.Arguments) {
|
||||
extSession := args.Get(1).(*auth.ExternalSession)
|
||||
extSession.ID = 1
|
||||
}).Return(nil)
|
||||
mockExternalSessionStore.On("DeleteExternalSessionsByUserID", mock.Anything, usr.ID).Return(errors.New("some error"))
|
||||
ctx.tokenService.externalSessionStore = mockExternalSessionStore
|
||||
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1},
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
|
||||
err = ctx.tokenService.RevokeAllUserTokens(context.Background(), usr.ID)
|
||||
require.Nil(t, err)
|
||||
|
||||
model, err := ctx.getAuthTokenByID(userToken.Id)
|
||||
require.Nil(t, err)
|
||||
require.Nil(t, model)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRevokeToken(t *testing.T) {
|
||||
t.Run("should not fail if the external sessions could not be removed", func(t *testing.T) {
|
||||
ctx := createTestContext(t)
|
||||
usr := &user.User{ID: int64(10)}
|
||||
mockExternalSessionStore := &authtest.MockExternalSessionStore{}
|
||||
|
||||
mockExternalSessionStore.On("Create", mock.Anything, mock.IsType(&auth.ExternalSession{})).Run(func(args mock.Arguments) {
|
||||
extSession := args.Get(1).(*auth.ExternalSession)
|
||||
extSession.ID = 2
|
||||
}).Return(nil)
|
||||
mockExternalSessionStore.On("Delete", mock.Anything, int64(2)).Return(errors.New("some error"))
|
||||
ctx.tokenService.externalSessionStore = mockExternalSessionStore
|
||||
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1},
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
|
||||
err = ctx.tokenService.RevokeToken(context.Background(), userToken, false)
|
||||
require.Nil(t, err)
|
||||
|
||||
model, err := ctx.getAuthTokenByID(userToken.Id)
|
||||
require.Nil(t, err)
|
||||
require.Nil(t, model)
|
||||
})
|
||||
}
|
||||
|
||||
func TestBatchRevokeAllUserTokens(t *testing.T) {
|
||||
t.Run("should not fail if the external sessions could not be removed", func(t *testing.T) {
|
||||
ctx := createTestContext(t)
|
||||
userIds := []int64{1, 2, 3}
|
||||
mockExternalSessionStore := &authtest.MockExternalSessionStore{}
|
||||
|
||||
mockExternalSessionStore.On("BatchDeleteExternalSessionsByUserIDs", mock.Anything, userIds).Return(errors.New("some error"))
|
||||
ctr := int64(0)
|
||||
mockExternalSessionStore.On("Create", mock.Anything, mock.IsType(&auth.ExternalSession{})).Run(func(args mock.Arguments) {
|
||||
extSession := args.Get(1).(*auth.ExternalSession)
|
||||
ctr += 1
|
||||
extSession.ID = ctr
|
||||
}).Return(nil)
|
||||
|
||||
ctx.tokenService.externalSessionStore = mockExternalSessionStore
|
||||
|
||||
for _, userID := range userIds {
|
||||
usr := &user.User{ID: userID}
|
||||
userToken, err := ctx.tokenService.CreateToken(context.Background(), &auth.CreateTokenCommand{
|
||||
User: usr,
|
||||
ClientIP: net.ParseIP("192.168.10.11"),
|
||||
UserAgent: "some user agent",
|
||||
ExternalSession: &auth.ExternalSession{UserID: usr.ID, AuthModule: "test", UserAuthID: 1},
|
||||
})
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, userToken)
|
||||
}
|
||||
|
||||
// Batch revoke all user tokens
|
||||
err := ctx.tokenService.BatchRevokeAllUserTokens(context.Background(), userIds)
|
||||
require.Nil(t, err)
|
||||
|
||||
// Verify that the tokens have been revoked
|
||||
for _, userID := range userIds {
|
||||
tokens, err := ctx.tokenService.GetUserTokens(context.Background(), userID)
|
||||
require.Nil(t, err)
|
||||
require.Equal(t, 0, len(tokens))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
244
pkg/services/auth/authimpl/external_session_store.go
Normal file
244
pkg/services/auth/authimpl/external_session_store.go
Normal file
@ -0,0 +1,244 @@
|
||||
package authimpl
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
"github.com/grafana/grafana/pkg/services/secrets"
|
||||
)
|
||||
|
||||
var _ auth.ExternalSessionStore = (*store)(nil)
|
||||
|
||||
type store struct {
|
||||
sqlStore db.DB
|
||||
secretsService secrets.Service
|
||||
tracer tracing.Tracer
|
||||
}
|
||||
|
||||
func provideExternalSessionStore(sqlStore db.DB, secretService secrets.Service, tracer tracing.Tracer) auth.ExternalSessionStore {
|
||||
return &store{
|
||||
sqlStore: sqlStore,
|
||||
secretsService: secretService,
|
||||
tracer: tracer,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *store) Get(ctx context.Context, extSessionID int64) (*auth.ExternalSession, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "externalsession.Get")
|
||||
defer span.End()
|
||||
|
||||
externalSession := &auth.ExternalSession{ID: extSessionID}
|
||||
|
||||
err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
found, err := sess.Get(externalSession)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !found {
|
||||
return auth.ErrExternalSessionNotFound
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = s.decryptSecrets(externalSession)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return externalSession, nil
|
||||
}
|
||||
|
||||
func (s *store) List(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "externalsession.List")
|
||||
defer span.End()
|
||||
|
||||
externalSession := &auth.ExternalSession{}
|
||||
if query.ID != 0 {
|
||||
externalSession.ID = query.ID
|
||||
}
|
||||
|
||||
hash := sha256.New()
|
||||
|
||||
if query.SessionID != "" {
|
||||
hash.Write([]byte(query.SessionID))
|
||||
externalSession.SessionIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil))
|
||||
}
|
||||
|
||||
if query.NameID != "" {
|
||||
hash.Reset()
|
||||
hash.Write([]byte(query.NameID))
|
||||
externalSession.NameIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil))
|
||||
}
|
||||
|
||||
queryResult := make([]*auth.ExternalSession, 0)
|
||||
err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
return sess.Find(&queryResult, externalSession)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, extSession := range queryResult {
|
||||
err := s.decryptSecrets(extSession)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return queryResult, nil
|
||||
}
|
||||
|
||||
func (s *store) Create(ctx context.Context, extSession *auth.ExternalSession) error {
|
||||
ctx, span := s.tracer.Start(ctx, "externalsession.Create")
|
||||
defer span.End()
|
||||
|
||||
var err error
|
||||
clone := extSession.Clone()
|
||||
|
||||
clone.AccessToken, err = s.encryptAndEncode(extSession.AccessToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
clone.RefreshToken, err = s.encryptAndEncode(extSession.RefreshToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
clone.IDToken, err = s.encryptAndEncode(extSession.IDToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if extSession.NameID != "" {
|
||||
hash := sha256.New()
|
||||
hash.Write([]byte(extSession.NameID))
|
||||
clone.NameIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil))
|
||||
}
|
||||
|
||||
clone.NameID, err = s.encryptAndEncode(extSession.NameID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if extSession.SessionID != "" {
|
||||
hash := sha256.New()
|
||||
hash.Write([]byte(extSession.SessionID))
|
||||
clone.SessionIDHash = base64.RawStdEncoding.EncodeToString(hash.Sum(nil))
|
||||
}
|
||||
|
||||
clone.SessionID, err = s.encryptAndEncode(extSession.SessionID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
_, err := sess.Insert(clone)
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
extSession.ID = clone.ID
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) Delete(ctx context.Context, ID int64) error {
|
||||
ctx, span := s.tracer.Start(ctx, "externalsession.Delete")
|
||||
defer span.End()
|
||||
|
||||
externalSession := &auth.ExternalSession{ID: ID}
|
||||
err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
_, err := sess.Delete(externalSession)
|
||||
return err
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *store) DeleteExternalSessionsByUserID(ctx context.Context, userID int64) error {
|
||||
ctx, span := s.tracer.Start(ctx, "externalsession.DeleteExternalSessionsByUserID")
|
||||
defer span.End()
|
||||
|
||||
externalSession := &auth.ExternalSession{UserID: userID}
|
||||
err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
_, err := sess.Delete(externalSession)
|
||||
return err
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *store) BatchDeleteExternalSessionsByUserIDs(ctx context.Context, userIDs []int64) error {
|
||||
ctx, span := s.tracer.Start(ctx, "externalsession.BatchDeleteExternalSessionsByUserIDs")
|
||||
defer span.End()
|
||||
|
||||
externalSession := &auth.ExternalSession{}
|
||||
err := s.sqlStore.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
_, err := sess.In("user_id", userIDs).Delete(externalSession)
|
||||
return err
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *store) decryptSecrets(extSession *auth.ExternalSession) error {
|
||||
var err error
|
||||
extSession.AccessToken, err = s.decodeAndDecrypt(extSession.AccessToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
extSession.RefreshToken, err = s.decodeAndDecrypt(extSession.RefreshToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
extSession.IDToken, err = s.decodeAndDecrypt(extSession.IDToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
extSession.NameID, err = s.decodeAndDecrypt(extSession.NameID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
extSession.SessionID, err = s.decodeAndDecrypt(extSession.SessionID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) encryptAndEncode(str string) (string, error) {
|
||||
if str == "" {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
encrypted, err := s.secretsService.Encrypt(context.Background(), []byte(str), secrets.WithoutScope())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(encrypted), nil
|
||||
}
|
||||
|
||||
func (s *store) decodeAndDecrypt(str string) (string, error) {
|
||||
// Bail out if empty string since it'll cause a segfault in Decrypt
|
||||
if str == "" {
|
||||
return "", nil
|
||||
}
|
||||
decoded, err := base64.StdEncoding.DecodeString(str)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
decrypted, err := s.secretsService.Decrypt(context.Background(), decoded)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(decrypted), nil
|
||||
}
|
228
pkg/services/auth/authimpl/external_session_store_test.go
Normal file
228
pkg/services/auth/authimpl/external_session_store_test.go
Normal file
@ -0,0 +1,228 @@
|
||||
package authimpl
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
"github.com/grafana/grafana/pkg/services/secrets/fakes"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetExternalSession(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
t.Run("returns existing external session", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
extSession := &auth.ExternalSession{
|
||||
AccessToken: "access-token",
|
||||
}
|
||||
|
||||
err := store.Create(context.Background(), extSession)
|
||||
require.NoError(t, err)
|
||||
|
||||
actual, err := store.Get(context.Background(), extSession.ID)
|
||||
require.NoError(t, err)
|
||||
require.EqualValues(t, extSession.ID, actual.ID)
|
||||
require.EqualValues(t, extSession.AccessToken, actual.AccessToken)
|
||||
})
|
||||
|
||||
t.Run("returns not found if the external session is missing", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
_, err := store.Get(context.Background(), 999)
|
||||
require.ErrorIs(t, err, auth.ErrExternalSessionNotFound)
|
||||
})
|
||||
}
|
||||
|
||||
func TestFindExternalSessions(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
|
||||
t.Run("returns external sessions by ID", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
extSession := &auth.ExternalSession{
|
||||
AccessToken: "access-token",
|
||||
}
|
||||
|
||||
err := store.Create(context.Background(), extSession)
|
||||
require.NoError(t, err)
|
||||
|
||||
query := &auth.ListExternalSessionQuery{ID: extSession.ID}
|
||||
actual, err := store.List(context.Background(), query)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, actual, 1)
|
||||
require.EqualValues(t, extSession.ID, actual[0].ID)
|
||||
require.EqualValues(t, extSession.AccessToken, actual[0].AccessToken)
|
||||
})
|
||||
|
||||
t.Run("returns external sessions by SessionID", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
extSession := &auth.ExternalSession{
|
||||
SessionID: "session-index",
|
||||
}
|
||||
err := store.Create(context.Background(), extSession)
|
||||
require.NoError(t, err)
|
||||
|
||||
query := &auth.ListExternalSessionQuery{SessionID: extSession.SessionID}
|
||||
actual, err := store.List(context.Background(), query)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, actual, 1)
|
||||
require.EqualValues(t, extSession.ID, actual[0].ID)
|
||||
require.EqualValues(t, extSession.SessionID, actual[0].SessionID)
|
||||
})
|
||||
|
||||
t.Run("returns external sessions by NameID", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
extSession := &auth.ExternalSession{
|
||||
NameID: "name-id",
|
||||
}
|
||||
|
||||
err := store.Create(context.Background(), extSession)
|
||||
require.NoError(t, err)
|
||||
|
||||
query := &auth.ListExternalSessionQuery{NameID: extSession.NameID}
|
||||
actual, err := store.List(context.Background(), query)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, actual, 1)
|
||||
require.EqualValues(t, extSession.ID, actual[0].ID)
|
||||
require.EqualValues(t, extSession.NameID, actual[0].NameID)
|
||||
})
|
||||
|
||||
t.Run("returns empty result if no external sessions match the query", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
query := &auth.ListExternalSessionQuery{ID: 999}
|
||||
actual, err := store.List(context.Background(), query)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, actual, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestDeleteExternalSessionsByUserID(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
|
||||
t.Run("deletes all external sessions for a given user ID", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
userID := int64(1)
|
||||
extSession1 := &auth.ExternalSession{
|
||||
UserID: userID,
|
||||
AccessToken: "access-token-1",
|
||||
}
|
||||
extSession2 := &auth.ExternalSession{
|
||||
UserID: userID,
|
||||
AccessToken: "access-token-2",
|
||||
}
|
||||
|
||||
err := store.Create(context.Background(), extSession1)
|
||||
require.NoError(t, err)
|
||||
err = store.Create(context.Background(), extSession2)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.DeleteExternalSessionsByUserID(context.Background(), userID)
|
||||
require.NoError(t, err)
|
||||
|
||||
query := &auth.ListExternalSessionQuery{}
|
||||
actual, err := store.List(context.Background(), query)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, actual, 0)
|
||||
})
|
||||
|
||||
t.Run("returns no error if no external sessions exist for the given user ID", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
userID := int64(999)
|
||||
err := store.DeleteExternalSessionsByUserID(context.Background(), userID)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestDeleteExternalSession(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
|
||||
t.Run("deletes an existing external session", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
extSession := &auth.ExternalSession{
|
||||
AccessToken: "access-token",
|
||||
}
|
||||
|
||||
err := store.Create(context.Background(), extSession)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Delete(context.Background(), extSession.ID)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = store.Get(context.Background(), extSession.ID)
|
||||
require.ErrorIs(t, err, auth.ErrExternalSessionNotFound)
|
||||
})
|
||||
|
||||
t.Run("returns no error if the external session does not exist", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
err := store.Delete(context.Background(), 999)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestBatchDeleteExternalSessionsByUserIDs(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
|
||||
t.Run("deletes all external sessions for given user IDs", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
userID1 := int64(1)
|
||||
userID2 := int64(2)
|
||||
extSession1 := &auth.ExternalSession{
|
||||
UserID: userID1,
|
||||
AccessToken: "access-token-1",
|
||||
}
|
||||
extSession2 := &auth.ExternalSession{
|
||||
UserID: userID2,
|
||||
AccessToken: "access-token-2",
|
||||
}
|
||||
|
||||
err := store.Create(context.Background(), extSession1)
|
||||
require.NoError(t, err)
|
||||
err = store.Create(context.Background(), extSession2)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.BatchDeleteExternalSessionsByUserIDs(context.Background(), []int64{userID1, userID2})
|
||||
require.NoError(t, err)
|
||||
|
||||
query := &auth.ListExternalSessionQuery{}
|
||||
actual, err := store.List(context.Background(), query)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, actual, 0)
|
||||
})
|
||||
|
||||
t.Run("returns no error if no external sessions exist for the given user IDs", func(t *testing.T) {
|
||||
store := setupTest(t)
|
||||
|
||||
err := store.BatchDeleteExternalSessionsByUserIDs(context.Background(), []int64{999, 1000})
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func setupTest(t *testing.T) *store {
|
||||
sqlStore := db.InitTestDB(t)
|
||||
secretService := fakes.NewFakeSecretsService()
|
||||
tracer := tracing.InitializeTracerForTest()
|
||||
externalSessionStore := provideExternalSessionStore(sqlStore, secretService, tracer).(*store)
|
||||
return externalSessionStore
|
||||
}
|
@ -7,19 +7,20 @@ import (
|
||||
)
|
||||
|
||||
type userAuthToken struct {
|
||||
Id int64
|
||||
UserId int64
|
||||
AuthToken string
|
||||
PrevAuthToken string
|
||||
UserAgent string
|
||||
ClientIp string
|
||||
AuthTokenSeen bool
|
||||
SeenAt int64
|
||||
RotatedAt int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
RevokedAt int64
|
||||
UnhashedToken string `xorm:"-"`
|
||||
Id int64
|
||||
UserId int64
|
||||
AuthToken string
|
||||
PrevAuthToken string
|
||||
UserAgent string
|
||||
ClientIp string
|
||||
AuthTokenSeen bool
|
||||
SeenAt int64
|
||||
RotatedAt int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
RevokedAt int64
|
||||
UnhashedToken string `xorm:"-"`
|
||||
ExternalSessionId int64
|
||||
}
|
||||
|
||||
func userAuthTokenFromUserToken(ut *auth.UserToken) (*userAuthToken, error) {
|
||||
@ -46,6 +47,7 @@ func (uat *userAuthToken) fromUserToken(ut *auth.UserToken) error {
|
||||
uat.UpdatedAt = ut.UpdatedAt
|
||||
uat.RevokedAt = ut.RevokedAt
|
||||
uat.UnhashedToken = ut.UnhashedToken
|
||||
uat.ExternalSessionId = ut.ExternalSessionId
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -68,5 +70,6 @@ func (uat *userAuthToken) toUserToken(ut *auth.UserToken) error {
|
||||
ut.UpdatedAt = uat.UpdatedAt
|
||||
ut.RevokedAt = uat.RevokedAt
|
||||
ut.UnhashedToken = uat.UnhashedToken
|
||||
ut.ExternalSessionId = uat.ExternalSessionId
|
||||
return nil
|
||||
}
|
||||
|
@ -16,6 +16,9 @@ func (s *UserAuthTokenService) Run(ctx context.Context) error {
|
||||
if _, err := s.deleteExpiredTokens(ctx, maxInactiveLifetime, maxLifetime); err != nil {
|
||||
s.log.Error("An error occurred while deleting expired tokens", "err", err)
|
||||
}
|
||||
if err := s.deleteOrphanedExternalSessions(ctx); err != nil {
|
||||
s.log.Error("An error occurred while deleting orphaned external sessions", "err", err)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
s.log.Error("Failed to lock and execute cleanup of expired auth token", "error", err)
|
||||
@ -28,6 +31,9 @@ func (s *UserAuthTokenService) Run(ctx context.Context) error {
|
||||
if _, err := s.deleteExpiredTokens(ctx, maxInactiveLifetime, maxLifetime); err != nil {
|
||||
s.log.Error("An error occurred while deleting expired tokens", "err", err)
|
||||
}
|
||||
if err := s.deleteOrphanedExternalSessions(ctx); err != nil {
|
||||
s.log.Error("An error occurred while deleting orphaned external sessions", "err", err)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
s.log.Error("Failed to lock and execute cleanup of expired auth token", "error", err)
|
||||
@ -66,3 +72,29 @@ func (s *UserAuthTokenService) deleteExpiredTokens(ctx context.Context, maxInact
|
||||
|
||||
return affected, err
|
||||
}
|
||||
|
||||
func (s *UserAuthTokenService) deleteOrphanedExternalSessions(ctx context.Context) error {
|
||||
s.log.Debug("Starting cleanup of external sessions")
|
||||
|
||||
var affected int64
|
||||
err := s.sqlStore.WithDbSession(ctx, func(dbSession *db.Session) error {
|
||||
sql := `DELETE FROM user_external_session WHERE NOT EXISTS (SELECT 1 FROM user_auth_token WHERE user_external_session.id = user_auth_token.external_session_id)`
|
||||
|
||||
res, err := dbSession.Exec(sql)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
affected, err = res.RowsAffected()
|
||||
if err != nil {
|
||||
s.log.Error("Failed to cleanup orphaned external sessions", "error", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
s.log.Debug("Cleanup of orphaned external sessions done", "count", affected)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
|
@ -9,9 +9,14 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
)
|
||||
|
||||
func TestUserAuthTokenCleanup(t *testing.T) {
|
||||
func TestIntegrationUserAuthTokenCleanup(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
|
||||
setup := func() *testContext {
|
||||
ctx := createTestContext(t)
|
||||
maxInactiveLifetime, _ := time.ParseDuration("168h")
|
||||
@ -75,3 +80,61 @@ func TestUserAuthTokenCleanup(t *testing.T) {
|
||||
require.Equal(t, int64(3), affected)
|
||||
})
|
||||
}
|
||||
|
||||
func TestIntegrationOrphanedExternalSessionsCleanup(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("skipping integration test")
|
||||
}
|
||||
|
||||
setup := func() *testContext {
|
||||
ctx := createTestContext(t)
|
||||
return ctx
|
||||
}
|
||||
|
||||
insertExternalSession := func(ctx *testContext, id int64) {
|
||||
es := &auth.ExternalSession{ID: id, UserAuthID: 1, UserID: 1}
|
||||
err := ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error {
|
||||
_, err := sess.Insert(es)
|
||||
require.Nil(t, err)
|
||||
return nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
insertAuthToken := func(ctx *testContext, token string, externalSessionId int64) {
|
||||
ut := userAuthToken{AuthToken: token, PrevAuthToken: fmt.Sprintf("old%s", token), ExternalSessionId: externalSessionId}
|
||||
err := ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error {
|
||||
_, err := sess.Insert(&ut)
|
||||
require.Nil(t, err)
|
||||
return nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
t.Run("should delete orphaned external sessions", func(t *testing.T) {
|
||||
ctx := setup()
|
||||
|
||||
// insert three external sessions
|
||||
for i := int64(1); i <= 3; i++ {
|
||||
insertExternalSession(ctx, i)
|
||||
}
|
||||
|
||||
// insert two auth tokens linked to external sessions
|
||||
insertAuthToken(ctx, "token1", 1)
|
||||
insertAuthToken(ctx, "token2", 2)
|
||||
|
||||
// delete orphaned external sessions
|
||||
err := ctx.tokenService.deleteOrphanedExternalSessions(context.Background())
|
||||
require.NoError(t, err)
|
||||
|
||||
// verify that only the orphaned external session is deleted
|
||||
var count int64
|
||||
err = ctx.sqlstore.WithDbSession(context.Background(), func(sess *db.Session) error {
|
||||
count, err = sess.Count(&auth.ExternalSession{})
|
||||
require.Nil(t, err)
|
||||
return nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(2), count)
|
||||
})
|
||||
}
|
||||
|
162
pkg/services/auth/authtest/external_session_store_mock.go
Normal file
162
pkg/services/auth/authtest/external_session_store_mock.go
Normal file
@ -0,0 +1,162 @@
|
||||
// Code generated by mockery v2.42.1. DO NOT EDIT.
|
||||
|
||||
package authtest
|
||||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
auth "github.com/grafana/grafana/pkg/services/auth"
|
||||
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// MockExternalSessionStore is an autogenerated mock type for the ExternalSessionStore type
|
||||
type MockExternalSessionStore struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// BatchDeleteExternalSessionsByUserIDs provides a mock function with given fields: ctx, userIDs
|
||||
func (_m *MockExternalSessionStore) BatchDeleteExternalSessionsByUserIDs(ctx context.Context, userIDs []int64) error {
|
||||
ret := _m.Called(ctx, userIDs)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for BatchDeleteExternalSessionsByUserIDs")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, []int64) error); ok {
|
||||
r0 = rf(ctx, userIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: ctx, extSesion
|
||||
func (_m *MockExternalSessionStore) Create(ctx context.Context, extSesion *auth.ExternalSession) error {
|
||||
ret := _m.Called(ctx, extSesion)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for Create")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, *auth.ExternalSession) error); ok {
|
||||
r0 = rf(ctx, extSesion)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Delete provides a mock function with given fields: ctx, ID
|
||||
func (_m *MockExternalSessionStore) Delete(ctx context.Context, ID int64) error {
|
||||
ret := _m.Called(ctx, ID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for Delete")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok {
|
||||
r0 = rf(ctx, ID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// DeleteExternalSessionsByUserID provides a mock function with given fields: ctx, userID
|
||||
func (_m *MockExternalSessionStore) DeleteExternalSessionsByUserID(ctx context.Context, userID int64) error {
|
||||
ret := _m.Called(ctx, userID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for DeleteExternalSessionsByUserID")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok {
|
||||
r0 = rf(ctx, userID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Get provides a mock function with given fields: ctx, ID
|
||||
func (_m *MockExternalSessionStore) Get(ctx context.Context, ID int64) (*auth.ExternalSession, error) {
|
||||
ret := _m.Called(ctx, ID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for Get")
|
||||
}
|
||||
|
||||
var r0 *auth.ExternalSession
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) (*auth.ExternalSession, error)); ok {
|
||||
return rf(ctx, ID)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) *auth.ExternalSession); ok {
|
||||
r0 = rf(ctx, ID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*auth.ExternalSession)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok {
|
||||
r1 = rf(ctx, ID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// List provides a mock function with given fields: ctx, query
|
||||
func (_m *MockExternalSessionStore) List(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) {
|
||||
ret := _m.Called(ctx, query)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for List")
|
||||
}
|
||||
|
||||
var r0 []*auth.ExternalSession
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error)); ok {
|
||||
return rf(ctx, query)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, *auth.ListExternalSessionQuery) []*auth.ExternalSession); ok {
|
||||
r0 = rf(ctx, query)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*auth.ExternalSession)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, *auth.ListExternalSessionQuery) error); ok {
|
||||
r1 = rf(ctx, query)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// NewMockExternalSessionStore creates a new instance of MockExternalSessionStore. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
||||
// The first argument is typically a *testing.T value.
|
||||
func NewMockExternalSessionStore(t interface {
|
||||
mock.TestingT
|
||||
Cleanup(func())
|
||||
}) *MockExternalSessionStore {
|
||||
mock := &MockExternalSessionStore{}
|
||||
mock.Mock.Test(t)
|
||||
|
||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
||||
|
||||
return mock
|
||||
}
|
@ -11,26 +11,28 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/login"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
)
|
||||
|
||||
type FakeUserAuthTokenService struct {
|
||||
CreateTokenProvider func(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error)
|
||||
RotateTokenProvider func(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error)
|
||||
TryRotateTokenProvider func(ctx context.Context, token *auth.UserToken, clientIP net.IP, userAgent string) (bool, *auth.UserToken, error)
|
||||
LookupTokenProvider func(ctx context.Context, unhashedToken string) (*auth.UserToken, error)
|
||||
RevokeTokenProvider func(ctx context.Context, token *auth.UserToken, soft bool) error
|
||||
RevokeAllUserTokensProvider func(ctx context.Context, userID int64) error
|
||||
ActiveTokenCountProvider func(ctx context.Context, userID *int64) (int64, error)
|
||||
GetUserTokenProvider func(ctx context.Context, userID, userTokenID int64) (*auth.UserToken, error)
|
||||
GetUserTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error)
|
||||
GetUserRevokedTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error)
|
||||
BatchRevokedTokenProvider func(ctx context.Context, userIDs []int64) error
|
||||
CreateTokenProvider func(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error)
|
||||
RotateTokenProvider func(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error)
|
||||
GetTokenByExternalSessionIDProvider func(ctx context.Context, externalSessionID int64) (*auth.UserToken, error)
|
||||
GetExternalSessionProvider func(ctx context.Context, externalSessionID int64) (*auth.ExternalSession, error)
|
||||
FindExternalSessionsProvider func(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error)
|
||||
TryRotateTokenProvider func(ctx context.Context, token *auth.UserToken, clientIP net.IP, userAgent string) (bool, *auth.UserToken, error)
|
||||
LookupTokenProvider func(ctx context.Context, unhashedToken string) (*auth.UserToken, error)
|
||||
RevokeTokenProvider func(ctx context.Context, token *auth.UserToken, soft bool) error
|
||||
RevokeAllUserTokensProvider func(ctx context.Context, userID int64) error
|
||||
ActiveTokenCountProvider func(ctx context.Context, userID *int64) (int64, error)
|
||||
GetUserTokenProvider func(ctx context.Context, userID, userTokenID int64) (*auth.UserToken, error)
|
||||
GetUserTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error)
|
||||
GetUserRevokedTokensProvider func(ctx context.Context, userID int64) ([]*auth.UserToken, error)
|
||||
BatchRevokedTokenProvider func(ctx context.Context, userIDs []int64) error
|
||||
}
|
||||
|
||||
func NewFakeUserAuthTokenService() *FakeUserAuthTokenService {
|
||||
return &FakeUserAuthTokenService{
|
||||
CreateTokenProvider: func(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) {
|
||||
CreateTokenProvider: func(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) {
|
||||
return &auth.UserToken{
|
||||
UserId: 0,
|
||||
UnhashedToken: "",
|
||||
@ -72,14 +74,26 @@ func (s *FakeUserAuthTokenService) Init() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) CreateToken(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) {
|
||||
return s.CreateTokenProvider(context.Background(), user, clientIP, userAgent)
|
||||
func (s *FakeUserAuthTokenService) CreateToken(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) {
|
||||
return s.CreateTokenProvider(context.Background(), cmd)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) RotateToken(ctx context.Context, cmd auth.RotateCommand) (*auth.UserToken, error) {
|
||||
return s.RotateTokenProvider(ctx, cmd)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) GetTokenByExternalSessionID(ctx context.Context, externalSessionID int64) (*auth.UserToken, error) {
|
||||
return s.GetTokenByExternalSessionIDProvider(ctx, externalSessionID)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) GetExternalSession(ctx context.Context, externalSessionID int64) (*auth.ExternalSession, error) {
|
||||
return s.GetExternalSessionProvider(ctx, externalSessionID)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) FindExternalSessions(ctx context.Context, query *auth.ListExternalSessionQuery) ([]*auth.ExternalSession, error) {
|
||||
return s.FindExternalSessionsProvider(context.Background(), query)
|
||||
}
|
||||
|
||||
func (s *FakeUserAuthTokenService) LookupToken(ctx context.Context, unhashedToken string) (*auth.UserToken, error) {
|
||||
return s.LookupTokenProvider(context.Background(), unhashedToken)
|
||||
}
|
||||
|
66
pkg/services/auth/external_session.go
Normal file
66
pkg/services/auth/external_session.go
Normal file
@ -0,0 +1,66 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ExternalSession struct {
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
UserID int64 `xorm:"user_id"`
|
||||
UserAuthID int64 `xorm:"user_auth_id"`
|
||||
AuthModule string `xorm:"auth_module"`
|
||||
AccessToken string `xorm:"access_token"`
|
||||
IDToken string `xorm:"id_token"`
|
||||
RefreshToken string `xorm:"refresh_token"`
|
||||
SessionID string `xorm:"session_id"`
|
||||
SessionIDHash string `xorm:"session_id_hash"`
|
||||
NameID string `xorm:"name_id"`
|
||||
NameIDHash string `xorm:"name_id_hash"`
|
||||
ExpiresAt time.Time `xorm:"expires_at"`
|
||||
CreatedAt time.Time `xorm:"created 'created_at'"`
|
||||
}
|
||||
|
||||
func (e *ExternalSession) TableName() string {
|
||||
return "user_external_session"
|
||||
}
|
||||
|
||||
func (e *ExternalSession) Clone() *ExternalSession {
|
||||
return &ExternalSession{
|
||||
ID: e.ID,
|
||||
UserID: e.UserID,
|
||||
UserAuthID: e.UserAuthID,
|
||||
AuthModule: e.AuthModule,
|
||||
AccessToken: e.AccessToken,
|
||||
IDToken: e.IDToken,
|
||||
RefreshToken: e.RefreshToken,
|
||||
SessionID: e.SessionID,
|
||||
SessionIDHash: e.SessionIDHash,
|
||||
NameID: e.NameID,
|
||||
NameIDHash: e.NameIDHash,
|
||||
ExpiresAt: e.ExpiresAt,
|
||||
CreatedAt: e.CreatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
type ListExternalSessionQuery struct {
|
||||
ID int64
|
||||
NameID string
|
||||
SessionID string
|
||||
}
|
||||
|
||||
//go:generate mockery --name ExternalSessionStore --structname MockExternalSessionStore --outpkg authtest --filename external_session_store_mock.go --output ./authtest/
|
||||
type ExternalSessionStore interface {
|
||||
// Get returns the external session
|
||||
Get(ctx context.Context, ID int64) (*ExternalSession, error)
|
||||
// List returns all external sessions fπor the given query
|
||||
List(ctx context.Context, query *ListExternalSessionQuery) ([]*ExternalSession, error)
|
||||
// Create creates a new external session for a user
|
||||
Create(ctx context.Context, extSesion *ExternalSession) error
|
||||
// Delete deletes an external session
|
||||
Delete(ctx context.Context, ID int64) error
|
||||
// DeleteExternalSessionsByUserID deletes an external session
|
||||
DeleteExternalSessionsByUserID(ctx context.Context, userID int64) error
|
||||
// BatchDeleteExternalSessionsByUserIDs deletes external sessions by user IDs
|
||||
BatchDeleteExternalSessionsByUserIDs(ctx context.Context, userIDs []int64) error
|
||||
}
|
@ -32,9 +32,10 @@ const (
|
||||
)
|
||||
|
||||
const (
|
||||
MetaKeyUsername = "username"
|
||||
MetaKeyAuthModule = "authModule"
|
||||
MetaKeyIsLogin = "isLogin"
|
||||
MetaKeyUsername = "username"
|
||||
MetaKeyAuthModule = "authModule"
|
||||
MetaKeyIsLogin = "isLogin"
|
||||
defaultRedirectToCookieKey = "redirect_to"
|
||||
)
|
||||
|
||||
// ClientParams are hints to the auth service about how to handle the identity management
|
||||
@ -74,9 +75,11 @@ type FetchPermissionsParams struct {
|
||||
Roles []string
|
||||
}
|
||||
|
||||
type PostAuthHookFn func(ctx context.Context, identity *Identity, r *Request) error
|
||||
type PostLoginHookFn func(ctx context.Context, identity *Identity, r *Request, err error)
|
||||
type PreLogoutHookFn func(ctx context.Context, requester identity.Requester, sessionToken *usertoken.UserToken) error
|
||||
type (
|
||||
PostAuthHookFn func(ctx context.Context, identity *Identity, r *Request) error
|
||||
PostLoginHookFn func(ctx context.Context, identity *Identity, r *Request, err error)
|
||||
PreLogoutHookFn func(ctx context.Context, requester identity.Requester, sessionToken *usertoken.UserToken) error
|
||||
)
|
||||
|
||||
type Authenticator interface {
|
||||
// Authenticate authenticates a request
|
||||
@ -233,41 +236,52 @@ type RedirectValidator func(url string) error
|
||||
// HandleLoginResponse is a utility function to perform common operations after a successful login and returns response.NormalResponse
|
||||
func HandleLoginResponse(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) *response.NormalResponse {
|
||||
result := map[string]any{"message": "Logged in"}
|
||||
result["redirectUrl"] = handleLogin(r, w, cfg, identity, validator, features)
|
||||
result["redirectUrl"] = handleLogin(r, w, cfg, identity, validator, features, "")
|
||||
return response.JSON(http.StatusOK, result)
|
||||
}
|
||||
|
||||
// HandleLoginRedirect is a utility function to perform common operations after a successful login and redirects
|
||||
func HandleLoginRedirect(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) {
|
||||
redirectURL := handleLogin(r, w, cfg, identity, validator, features)
|
||||
redirectURL := handleLogin(r, w, cfg, identity, validator, features, "redirectTo")
|
||||
http.Redirect(w, r, redirectURL, http.StatusFound)
|
||||
}
|
||||
|
||||
// HandleLoginRedirectResponse is a utility function to perform common operations after a successful login and return a response.RedirectResponse
|
||||
func HandleLoginRedirectResponse(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) *response.RedirectResponse {
|
||||
return response.Redirect(handleLogin(r, w, cfg, identity, validator, features))
|
||||
func HandleLoginRedirectResponse(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles, redirectToCookieName string) *response.RedirectResponse {
|
||||
return response.Redirect(handleLogin(r, w, cfg, identity, validator, features, redirectToCookieName))
|
||||
}
|
||||
|
||||
func handleLogin(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles) string {
|
||||
func handleLogin(r *http.Request, w http.ResponseWriter, cfg *setting.Cfg, identity *Identity, validator RedirectValidator, features featuremgmt.FeatureToggles, redirectToCookieName string) string {
|
||||
WriteSessionCookie(w, cfg, identity.SessionToken)
|
||||
|
||||
redirectURL := cfg.AppSubURL + "/"
|
||||
if features.IsEnabledGlobally(featuremgmt.FlagUseSessionStorageForRedirection) {
|
||||
return cfg.AppSubURL + "/"
|
||||
if redirectToCookieName != "" {
|
||||
scopedRedirectToCookie, err := r.Cookie(redirectToCookieName)
|
||||
if err == nil {
|
||||
redirectTo, _ := url.QueryUnescape(scopedRedirectToCookie.Value)
|
||||
if redirectTo != "" && validator(redirectTo) == nil {
|
||||
redirectURL = cfg.AppSubURL + redirectTo
|
||||
}
|
||||
cookies.DeleteCookie(w, redirectToCookieName, cookieOptions(cfg))
|
||||
}
|
||||
}
|
||||
return redirectURL
|
||||
}
|
||||
|
||||
redirectURL := cfg.AppSubURL + "/"
|
||||
redirectURL = cfg.AppSubURL + "/"
|
||||
if redirectTo := getRedirectURL(r); len(redirectTo) > 0 {
|
||||
if validator(redirectTo) == nil {
|
||||
redirectURL = redirectTo
|
||||
}
|
||||
cookies.DeleteCookie(w, "redirect_to", cookieOptions(cfg))
|
||||
cookies.DeleteCookie(w, defaultRedirectToCookieKey, cookieOptions(cfg))
|
||||
}
|
||||
|
||||
return redirectURL
|
||||
}
|
||||
|
||||
func getRedirectURL(r *http.Request) string {
|
||||
cookie, err := r.Cookie("redirect_to")
|
||||
cookie, err := r.Cookie(defaultRedirectToCookieKey)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/auth"
|
||||
"github.com/grafana/grafana/pkg/services/authn"
|
||||
"github.com/grafana/grafana/pkg/services/authn/clients"
|
||||
"github.com/grafana/grafana/pkg/services/login"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
@ -52,8 +53,8 @@ func ProvideIdentitySynchronizer(s *Service) authn.IdentitySynchronizer {
|
||||
}
|
||||
|
||||
func ProvideService(
|
||||
cfg *setting.Cfg, tracer tracing.Tracer,
|
||||
sessionService auth.UserTokenService, usageStats usagestats.Service, registerer prometheus.Registerer,
|
||||
cfg *setting.Cfg, tracer tracing.Tracer, sessionService auth.UserTokenService,
|
||||
usageStats usagestats.Service, registerer prometheus.Registerer, authTokenService login.AuthInfoService,
|
||||
) *Service {
|
||||
s := &Service{
|
||||
log: log.New("authn.service"),
|
||||
@ -64,6 +65,7 @@ func ProvideService(
|
||||
tracer: tracer,
|
||||
metrics: newMetrics(registerer),
|
||||
sessionService: sessionService,
|
||||
authTokenService: authTokenService,
|
||||
preLogoutHooks: newQueue[authn.PreLogoutHookFn](),
|
||||
postAuthHooks: newQueue[authn.PostAuthHookFn](),
|
||||
postLoginHooks: newQueue[authn.PostLoginHookFn](),
|
||||
@ -85,7 +87,8 @@ type Service struct {
|
||||
tracer tracing.Tracer
|
||||
metrics *metrics
|
||||
|
||||
sessionService auth.UserTokenService
|
||||
sessionService auth.UserTokenService
|
||||
authTokenService login.AuthInfoService
|
||||
|
||||
// postAuthHooks are called after a successful authentication. They can modify the identity.
|
||||
postAuthHooks *queue[authn.PostAuthHookFn]
|
||||
@ -238,7 +241,9 @@ func (s *Service) Login(ctx context.Context, client string, r *authn.Request) (i
|
||||
s.log.FromContext(ctx).Debug("Failed to parse ip from address", "client", c.Name(), "id", id.ID, "addr", addr, "error", err)
|
||||
}
|
||||
|
||||
sessionToken, err := s.sessionService.CreateToken(ctx, &user.User{ID: userID}, ip, r.HTTPRequest.UserAgent())
|
||||
externalSession := s.resolveExternalSessionFromIdentity(ctx, id, userID)
|
||||
|
||||
sessionToken, err := s.sessionService.CreateToken(ctx, &auth.CreateTokenCommand{User: &user.User{ID: userID}, ClientIP: ip, UserAgent: r.HTTPRequest.UserAgent(), ExternalSession: externalSession})
|
||||
if err != nil {
|
||||
s.metrics.failedLogin.WithLabelValues(client).Inc()
|
||||
s.log.FromContext(ctx).Error("Failed to create session", "client", client, "id", id.ID, "err", err)
|
||||
@ -403,7 +408,8 @@ func (s *Service) resolveIdenity(ctx context.Context, orgID int64, typedID strin
|
||||
AllowGlobalOrg: true,
|
||||
FetchSyncedUser: true,
|
||||
SyncPermissions: true,
|
||||
}}, nil
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
if claims.IsIdentityType(t, claims.TypeServiceAccount) {
|
||||
@ -415,7 +421,8 @@ func (s *Service) resolveIdenity(ctx context.Context, orgID int64, typedID strin
|
||||
AllowGlobalOrg: true,
|
||||
FetchSyncedUser: true,
|
||||
SyncPermissions: true,
|
||||
}}, nil
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
resolver, ok := s.idenityResolverClients[string(t)]
|
||||
@ -482,3 +489,35 @@ func orgIDFromHeader(req *http.Request) int64 {
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
func (s *Service) resolveExternalSessionFromIdentity(ctx context.Context, identity *authn.Identity, userID int64) *auth.ExternalSession {
|
||||
if identity.OAuthToken == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
info, err := s.authTokenService.GetAuthInfo(ctx, &login.GetAuthInfoQuery{AuthId: identity.GetAuthID(), UserId: userID})
|
||||
if err != nil {
|
||||
s.log.FromContext(ctx).Info("Failed to get auth info", "error", err, "authID", identity.GetAuthID(), "userID", userID)
|
||||
return nil
|
||||
}
|
||||
|
||||
extSession := &auth.ExternalSession{
|
||||
AuthModule: identity.GetAuthenticatedBy(),
|
||||
UserAuthID: info.Id,
|
||||
UserID: userID,
|
||||
}
|
||||
extSession.AccessToken = identity.OAuthToken.AccessToken
|
||||
extSession.RefreshToken = identity.OAuthToken.RefreshToken
|
||||
extSession.ExpiresAt = identity.OAuthToken.Expiry
|
||||
|
||||
if idToken, ok := identity.OAuthToken.Extra("id_token").(string); ok && idToken != "" {
|
||||
extSession.IDToken = idToken
|
||||
}
|
||||
|
||||
// As of https://openid.net/specs/openid-connect-session-1_0.html
|
||||
if sessionState, ok := identity.OAuthToken.Extra("session_state").(string); ok && sessionState != "" {
|
||||
extSession.SessionID = sessionState
|
||||
}
|
||||
|
||||
return extSession
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ package authnimpl
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"slices"
|
||||
@ -24,7 +23,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/auth/authtest"
|
||||
"github.com/grafana/grafana/pkg/services/authn"
|
||||
"github.com/grafana/grafana/pkg/services/authn/authntest"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
@ -399,11 +397,11 @@ func TestService_Login(t *testing.T) {
|
||||
ExpectedIdentity: tt.expectedClientIdentity,
|
||||
})
|
||||
svc.sessionService = &authtest.FakeUserAuthTokenService{
|
||||
CreateTokenProvider: func(ctx context.Context, user *user.User, clientIP net.IP, userAgent string) (*auth.UserToken, error) {
|
||||
CreateTokenProvider: func(ctx context.Context, cmd *auth.CreateTokenCommand) (*auth.UserToken, error) {
|
||||
if tt.expectedSessionErr != nil {
|
||||
return nil, tt.expectedSessionErr
|
||||
}
|
||||
return &auth.UserToken{UserId: user.ID}, nil
|
||||
return &auth.UserToken{UserId: cmd.User.ID}, nil
|
||||
},
|
||||
}
|
||||
})
|
||||
|
@ -44,7 +44,7 @@ type Identity struct {
|
||||
// IsGrafanaAdmin is true if the entity is a Grafana admin.
|
||||
IsGrafanaAdmin *bool
|
||||
// AuthenticatedBy is the name of the authentication client that was used to authenticate the current Identity.
|
||||
// For example, "password", "apikey", "auth_ldap" or "auth_azuread".
|
||||
// For example, "password", "apikey", "ldap" or "oauth_azuread".
|
||||
AuthenticatedBy string
|
||||
// AuthId is the unique identifier for the entity in the external system.
|
||||
// Empty if the identity is provided by Grafana.
|
||||
|
@ -13,6 +13,8 @@ import (
|
||||
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
)
|
||||
|
||||
type CloudMigrationAPI struct {
|
||||
@ -78,6 +80,9 @@ func (cma *CloudMigrationAPI) GetToken(c *contextmodel.ReqContext) response.Resp
|
||||
|
||||
token, err := cma.cloudMigrationService.GetToken(ctx)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "fetching cloud migration access token")
|
||||
span.RecordError(err)
|
||||
|
||||
if !errors.Is(err, cloudmigration.ErrTokenNotFound) {
|
||||
logger.Error("fetching cloud migration access token", "err", err.Error())
|
||||
}
|
||||
@ -112,7 +117,10 @@ func (cma *CloudMigrationAPI) CreateToken(c *contextmodel.ReqContext) response.R
|
||||
|
||||
resp, err := cma.cloudMigrationService.CreateToken(ctx)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "creating gcom access token")
|
||||
span.RecordError(err)
|
||||
logger.Error("creating gcom access token", "err", err.Error())
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "creating gcom access token", err)
|
||||
}
|
||||
|
||||
@ -137,11 +145,17 @@ func (cma *CloudMigrationAPI) DeleteToken(c *contextmodel.ReqContext) response.R
|
||||
|
||||
uid := web.Params(c.Req)[":uid"]
|
||||
if err := util.ValidateUID(uid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid migration uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.Error(http.StatusBadRequest, "invalid migration uid", err)
|
||||
}
|
||||
|
||||
if err := cma.cloudMigrationService.DeleteToken(ctx, uid); err != nil {
|
||||
span.SetStatus(codes.Error, "deleting cloud migration token")
|
||||
span.RecordError(err)
|
||||
logger.Error("deleting cloud migration token", "err", err.Error())
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "deleting cloud migration token", err)
|
||||
}
|
||||
|
||||
@ -163,6 +177,9 @@ func (cma *CloudMigrationAPI) GetSessionList(c *contextmodel.ReqContext) respons
|
||||
|
||||
sl, err := cma.cloudMigrationService.GetSessionList(ctx)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "session list error")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "session list error", err)
|
||||
}
|
||||
|
||||
@ -185,11 +202,17 @@ func (cma *CloudMigrationAPI) GetSession(c *contextmodel.ReqContext) response.Re
|
||||
|
||||
uid := web.Params(c.Req)[":uid"]
|
||||
if err := util.ValidateUID(uid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid session uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.Error(http.StatusBadRequest, "invalid session uid", err)
|
||||
}
|
||||
|
||||
s, err := cma.cloudMigrationService.GetSession(ctx, uid)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "session not found")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusNotFound, "session not found", err)
|
||||
}
|
||||
|
||||
@ -217,12 +240,18 @@ func (cma *CloudMigrationAPI) CreateSession(c *contextmodel.ReqContext) response
|
||||
|
||||
cmd := CloudMigrationSessionRequestDTO{}
|
||||
if err := web.Bind(c.Req, &cmd); err != nil {
|
||||
span.SetStatus(codes.Error, "bad request data")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "bad request data", err)
|
||||
}
|
||||
s, err := cma.cloudMigrationService.CreateSession(ctx, cloudmigration.CloudMigrationSessionRequest{
|
||||
AuthToken: cmd.AuthToken,
|
||||
})
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "session creation error")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "session creation error", err)
|
||||
}
|
||||
|
||||
@ -250,11 +279,17 @@ func (cma *CloudMigrationAPI) DeleteSession(c *contextmodel.ReqContext) response
|
||||
|
||||
uid := web.Params(c.Req)[":uid"]
|
||||
if err := util.ValidateUID(uid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid session uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
|
||||
}
|
||||
|
||||
_, err := cma.cloudMigrationService.DeleteSession(ctx, uid)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "session delete error")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "session delete error", err)
|
||||
}
|
||||
return response.Empty(http.StatusOK)
|
||||
@ -278,11 +313,17 @@ func (cma *CloudMigrationAPI) CreateSnapshot(c *contextmodel.ReqContext) respons
|
||||
uid := web.Params(c.Req)[":uid"]
|
||||
|
||||
if err := util.ValidateUID(uid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid session uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
|
||||
}
|
||||
|
||||
ss, err := cma.cloudMigrationService.CreateSnapshot(ctx, c.SignedInUser, uid)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "error creating snapshot")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "error creating snapshot", err)
|
||||
}
|
||||
|
||||
@ -307,9 +348,15 @@ func (cma *CloudMigrationAPI) GetSnapshot(c *contextmodel.ReqContext) response.R
|
||||
|
||||
sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"]
|
||||
if err := util.ValidateUID(sessUid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid session uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
|
||||
}
|
||||
if err := util.ValidateUID(snapshotUid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid snapshot uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err)
|
||||
}
|
||||
|
||||
@ -327,6 +374,9 @@ func (cma *CloudMigrationAPI) GetSnapshot(c *contextmodel.ReqContext) response.R
|
||||
}
|
||||
snapshot, err := cma.cloudMigrationService.GetSnapshot(ctx, q)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "error retrieving snapshot")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "error retrieving snapshot", err)
|
||||
}
|
||||
|
||||
@ -335,11 +385,12 @@ func (cma *CloudMigrationAPI) GetSnapshot(c *contextmodel.ReqContext) response.R
|
||||
dtoResults := make([]MigrateDataResponseItemDTO, len(results))
|
||||
for i := 0; i < len(results); i++ {
|
||||
dtoResults[i] = MigrateDataResponseItemDTO{
|
||||
Name: results[i].Name,
|
||||
Type: MigrateDataType(results[i].Type),
|
||||
RefID: results[i].RefID,
|
||||
Status: ItemStatus(results[i].Status),
|
||||
Message: results[i].Error,
|
||||
Name: results[i].Name,
|
||||
Type: MigrateDataType(results[i].Type),
|
||||
RefID: results[i].RefID,
|
||||
Status: ItemStatus(results[i].Status),
|
||||
Message: results[i].Error,
|
||||
ParentName: results[i].ParentName,
|
||||
}
|
||||
}
|
||||
|
||||
@ -386,6 +437,9 @@ func (cma *CloudMigrationAPI) GetSnapshotList(c *contextmodel.ReqContext) respon
|
||||
|
||||
uid := web.Params(c.Req)[":uid"]
|
||||
if err := util.ValidateUID(uid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid session uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
|
||||
}
|
||||
q := cloudmigration.ListSnapshotsQuery{
|
||||
@ -403,6 +457,9 @@ func (cma *CloudMigrationAPI) GetSnapshotList(c *contextmodel.ReqContext) respon
|
||||
|
||||
snapshotList, err := cma.cloudMigrationService.GetSnapshotList(ctx, q)
|
||||
if err != nil {
|
||||
span.SetStatus(codes.Error, "error retrieving snapshot list")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "error retrieving snapshot list", err)
|
||||
}
|
||||
|
||||
@ -438,13 +495,22 @@ func (cma *CloudMigrationAPI) UploadSnapshot(c *contextmodel.ReqContext) respons
|
||||
|
||||
sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"]
|
||||
if err := util.ValidateUID(sessUid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid session uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
|
||||
}
|
||||
if err := util.ValidateUID(snapshotUid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid snapshot uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err)
|
||||
}
|
||||
|
||||
if err := cma.cloudMigrationService.UploadSnapshot(ctx, sessUid, snapshotUid); err != nil {
|
||||
span.SetStatus(codes.Error, "error uploading snapshot")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "error uploading snapshot", err)
|
||||
}
|
||||
|
||||
@ -468,13 +534,22 @@ func (cma *CloudMigrationAPI) CancelSnapshot(c *contextmodel.ReqContext) respons
|
||||
|
||||
sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"]
|
||||
if err := util.ValidateUID(sessUid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid session uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
|
||||
}
|
||||
if err := util.ValidateUID(snapshotUid); err != nil {
|
||||
span.SetStatus(codes.Error, "invalid snapshot uid")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err)
|
||||
}
|
||||
|
||||
if err := cma.cloudMigrationService.CancelSnapshot(ctx, sessUid, snapshotUid); err != nil {
|
||||
span.SetStatus(codes.Error, "error canceling snapshot")
|
||||
span.RecordError(err)
|
||||
|
||||
return response.ErrOrFallback(http.StatusInternalServerError, "error canceling snapshot", err)
|
||||
}
|
||||
|
||||
|
@ -345,7 +345,7 @@ func TestCloudMigrationAPI_GetSnapshot(t *testing.T) {
|
||||
requestUrl: "/api/cloudmigration/migration/1234/snapshot/1",
|
||||
basicRole: org.RoleAdmin,
|
||||
expectedHttpResult: http.StatusOK,
|
||||
expectedBody: `{"uid":"fake_uid","status":"CREATING","sessionUid":"1234","created":"0001-01-01T00:00:00Z","finished":"0001-01-01T00:00:00Z","results":[],"stats":{"types":{},"statuses":{},"total":0}}`,
|
||||
expectedBody: `{"uid":"fake_uid","status":"CREATING","sessionUid":"1234","created":"0001-01-01T00:00:00Z","finished":"0001-01-01T00:00:00Z","results":[{"name":"dashboard name","parentName":"dashboard parent name","type":"DASHBOARD","refId":"123","status":"PENDING"},{"name":"datasource name","parentName":"dashboard parent name","type":"DATASOURCE","refId":"456","status":"OK"}],"stats":{"types":{},"statuses":{},"total":0}}`,
|
||||
},
|
||||
{
|
||||
desc: "should return 403 if no used is not admin",
|
||||
|
@ -106,7 +106,8 @@ type MigrateDataResponseDTO struct {
|
||||
}
|
||||
|
||||
type MigrateDataResponseItemDTO struct {
|
||||
Name string `json:"name"`
|
||||
Name string `json:"name"`
|
||||
ParentName string `json:"parentName"`
|
||||
// required:true
|
||||
Type MigrateDataType `json:"type"`
|
||||
// required:true
|
||||
@ -120,10 +121,15 @@ type MigrateDataResponseItemDTO struct {
|
||||
type MigrateDataType string
|
||||
|
||||
const (
|
||||
DashboardDataType MigrateDataType = "DASHBOARD"
|
||||
DatasourceDataType MigrateDataType = "DATASOURCE"
|
||||
FolderDataType MigrateDataType = "FOLDER"
|
||||
LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT"
|
||||
DashboardDataType MigrateDataType = "DASHBOARD"
|
||||
DatasourceDataType MigrateDataType = "DATASOURCE"
|
||||
FolderDataType MigrateDataType = "FOLDER"
|
||||
LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT"
|
||||
AlertRuleType MigrateDataType = "ALERT_RULE"
|
||||
ContactPointType MigrateDataType = "CONTACT_POINT"
|
||||
NotificationPolicyType MigrateDataType = "NOTIFICATION_POLICY"
|
||||
NotificationTemplateType MigrateDataType = "NOTIFICATION_TEMPLATE"
|
||||
MuteTimingType MigrateDataType = "MUTE_TIMING"
|
||||
)
|
||||
|
||||
// swagger:enum ItemStatus
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana/pkg/api/routing"
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/kvstore"
|
||||
@ -27,6 +28,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
"github.com/grafana/grafana/pkg/services/gcom"
|
||||
"github.com/grafana/grafana/pkg/services/libraryelements"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
|
||||
"github.com/grafana/grafana/pkg/services/secrets"
|
||||
secretskv "github.com/grafana/grafana/pkg/services/secrets/kvstore"
|
||||
@ -35,6 +37,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
@ -62,6 +65,7 @@ type Service struct {
|
||||
secretsService secrets.Service
|
||||
kvStore *kvstore.NamespacedKVStore
|
||||
libraryElementsService libraryelements.Service
|
||||
ngAlert *ngalert.AlertNG
|
||||
|
||||
api *api.CloudMigrationAPI
|
||||
tracer tracing.Tracer
|
||||
@ -83,6 +87,7 @@ var _ cloudmigration.Service = (*Service)(nil)
|
||||
// builds the service, and api, and configures routes
|
||||
func ProvideService(
|
||||
cfg *setting.Cfg,
|
||||
httpClientProvider *httpclient.Provider,
|
||||
features featuremgmt.FeatureToggles,
|
||||
db db.DB,
|
||||
dsService datasources.DataSourceService,
|
||||
@ -96,6 +101,7 @@ func ProvideService(
|
||||
pluginStore pluginstore.Store,
|
||||
kvStore kvstore.KVStore,
|
||||
libraryElementsService libraryelements.Service,
|
||||
ngAlert *ngalert.AlertNG,
|
||||
) (cloudmigration.Service, error) {
|
||||
if !features.IsEnabledGlobally(featuremgmt.FlagOnPremToCloudMigrations) {
|
||||
return &NoopServiceImpl{}, nil
|
||||
@ -115,18 +121,33 @@ func ProvideService(
|
||||
pluginStore: pluginStore,
|
||||
kvStore: kvstore.WithNamespace(kvStore, 0, "cloudmigration"),
|
||||
libraryElementsService: libraryElementsService,
|
||||
ngAlert: ngAlert,
|
||||
}
|
||||
s.api = api.RegisterApi(routeRegister, s, tracer)
|
||||
|
||||
s.objectStorage = objectstorage.NewS3()
|
||||
httpClientS3, err := httpClientProvider.New()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating http client for S3: %w", err)
|
||||
}
|
||||
s.objectStorage = objectstorage.NewS3(httpClientS3, tracer)
|
||||
|
||||
if !cfg.CloudMigration.IsDeveloperMode {
|
||||
c, err := gmsclient.NewGMSClient(cfg)
|
||||
httpClientGMS, err := httpClientProvider.New()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating http client for GMS: %w", err)
|
||||
}
|
||||
|
||||
c, err := gmsclient.NewGMSClient(cfg, httpClientGMS)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("initializing GMS client: %w", err)
|
||||
}
|
||||
s.gmsClient = c
|
||||
s.gcomService = gcom.New(gcom.Config{ApiURL: cfg.GrafanaComAPIURL, Token: cfg.CloudMigration.GcomAPIToken})
|
||||
|
||||
httpClientGcom, err := httpClientProvider.New()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating http client for GCOM: %w", err)
|
||||
}
|
||||
s.gcomService = gcom.New(gcom.Config{ApiURL: cfg.GrafanaComAPIURL, Token: cfg.CloudMigration.GcomAPIToken}, httpClientGcom)
|
||||
} else {
|
||||
s.gmsClient = gmsclient.NewInMemoryClient()
|
||||
s.gcomService = &gcomStub{policies: map[string]gcom.AccessPolicy{}, token: nil}
|
||||
@ -169,7 +190,8 @@ func (s *Service) GetToken(ctx context.Context) (gcom.TokenView, error) {
|
||||
RequestID: requestID,
|
||||
Region: instance.RegionSlug,
|
||||
AccessPolicyName: accessPolicyName,
|
||||
TokenName: accessTokenName})
|
||||
TokenName: accessTokenName,
|
||||
})
|
||||
if err != nil {
|
||||
return gcom.TokenView{}, fmt.Errorf("listing tokens: %w", err)
|
||||
}
|
||||
@ -279,9 +301,6 @@ func (s *Service) CreateToken(ctx context.Context) (cloudmigration.CreateAccessT
|
||||
}
|
||||
|
||||
func (s *Service) findAccessPolicyByName(ctx context.Context, regionSlug, accessPolicyName string) (*gcom.AccessPolicy, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.findAccessPolicyByName")
|
||||
defer span.End()
|
||||
|
||||
accessPolicies, err := s.gcomService.ListAccessPolicies(ctx, gcom.ListAccessPoliciesParams{
|
||||
RequestID: tracing.TraceIDFromContext(ctx, false),
|
||||
Region: regionSlug,
|
||||
@ -341,7 +360,7 @@ func (s *Service) DeleteToken(ctx context.Context, tokenID string) error {
|
||||
}
|
||||
|
||||
func (s *Service) GetSession(ctx context.Context, uid string) (*cloudmigration.CloudMigrationSession, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetMigration")
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetSession")
|
||||
defer span.End()
|
||||
migration, err := s.store.GetMigrationSessionByUID(ctx, uid)
|
||||
if err != nil {
|
||||
@ -352,6 +371,9 @@ func (s *Service) GetSession(ctx context.Context, uid string) (*cloudmigration.C
|
||||
}
|
||||
|
||||
func (s *Service) GetSessionList(ctx context.Context) (*cloudmigration.CloudMigrationSessionListResponse, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetSessionList")
|
||||
defer span.End()
|
||||
|
||||
values, err := s.store.GetCloudMigrationSessionList(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("retrieving session list from store: %w", err)
|
||||
@ -370,7 +392,7 @@ func (s *Service) GetSessionList(ctx context.Context) (*cloudmigration.CloudMigr
|
||||
}
|
||||
|
||||
func (s *Service) CreateSession(ctx context.Context, cmd cloudmigration.CloudMigrationSessionRequest) (*cloudmigration.CloudMigrationSessionResponse, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.createMigration")
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.CreateSession")
|
||||
defer span.End()
|
||||
|
||||
base64Token := cmd.AuthToken
|
||||
@ -405,6 +427,9 @@ func (s *Service) CreateSession(ctx context.Context, cmd cloudmigration.CloudMig
|
||||
}
|
||||
|
||||
func (s *Service) DeleteSession(ctx context.Context, sessionUID string) (*cloudmigration.CloudMigrationSession, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.DeleteSession")
|
||||
defer span.End()
|
||||
|
||||
session, snapshots, err := s.store.DeleteMigrationSessionByUID(ctx, sessionUID)
|
||||
if err != nil {
|
||||
s.report(ctx, session, gmsclient.EventDisconnect, 0, err)
|
||||
@ -470,26 +495,36 @@ func (s *Service) CreateSnapshot(ctx context.Context, signedInUser *user.SignedI
|
||||
s.cancelMutex.Unlock()
|
||||
}()
|
||||
|
||||
ctx, cancelFunc := context.WithCancel(context.Background())
|
||||
// Create context out the span context to ensure the trace is propagated
|
||||
asyncCtx := trace.ContextWithSpanContext(context.Background(), span.SpanContext())
|
||||
asyncCtx, asyncSpan := s.tracer.Start(asyncCtx, "CloudMigrationService.CreateSnapshotAsync")
|
||||
defer asyncSpan.End()
|
||||
|
||||
asyncCtx, cancelFunc := context.WithCancel(asyncCtx)
|
||||
s.cancelFunc = cancelFunc
|
||||
|
||||
s.report(ctx, session, gmsclient.EventStartBuildingSnapshot, 0, nil)
|
||||
s.report(asyncCtx, session, gmsclient.EventStartBuildingSnapshot, 0, nil)
|
||||
|
||||
start := time.Now()
|
||||
err := s.buildSnapshot(ctx, signedInUser, initResp.MaxItemsPerPartition, initResp.Metadata, snapshot)
|
||||
err := s.buildSnapshot(asyncCtx, signedInUser, initResp.MaxItemsPerPartition, initResp.Metadata, snapshot)
|
||||
if err != nil {
|
||||
asyncSpan.SetStatus(codes.Error, "error building snapshot")
|
||||
asyncSpan.RecordError(err)
|
||||
s.log.Error("building snapshot", "err", err.Error())
|
||||
|
||||
// Update status to error with retries
|
||||
if err := s.updateSnapshotWithRetries(context.Background(), cloudmigration.UpdateSnapshotCmd{
|
||||
if err := s.updateSnapshotWithRetries(asyncCtx, cloudmigration.UpdateSnapshotCmd{
|
||||
UID: snapshot.UID,
|
||||
SessionID: sessionUid,
|
||||
Status: cloudmigration.SnapshotStatusError,
|
||||
}); err != nil {
|
||||
s.log.Error("critical failure during snapshot creation - please report any error logs")
|
||||
asyncSpan.RecordError(err)
|
||||
}
|
||||
}
|
||||
|
||||
s.report(ctx, session, gmsclient.EventDoneBuildingSnapshot, time.Since(start), err)
|
||||
span.SetStatus(codes.Ok, "snapshot built")
|
||||
s.report(asyncCtx, session, gmsclient.EventDoneBuildingSnapshot, time.Since(start), err)
|
||||
}()
|
||||
|
||||
return &snapshot, nil
|
||||
@ -624,32 +659,48 @@ func (s *Service) UploadSnapshot(ctx context.Context, sessionUid string, snapsho
|
||||
s.cancelMutex.Unlock()
|
||||
}()
|
||||
|
||||
ctx, cancelFunc := context.WithCancel(context.Background())
|
||||
s.cancelFunc = cancelFunc
|
||||
// Create context out the span context to ensure the trace is propagated
|
||||
asyncCtx := trace.ContextWithSpanContext(context.Background(), span.SpanContext())
|
||||
asyncCtx, asyncSpan := s.tracer.Start(asyncCtx, "CloudMigrationService.UploadSnapshot")
|
||||
defer asyncSpan.End()
|
||||
|
||||
s.report(ctx, session, gmsclient.EventStartUploadingSnapshot, 0, nil)
|
||||
asyncCtx, s.cancelFunc = context.WithCancel(asyncCtx)
|
||||
|
||||
s.report(asyncCtx, session, gmsclient.EventStartUploadingSnapshot, 0, nil)
|
||||
|
||||
start := time.Now()
|
||||
err := s.uploadSnapshot(ctx, session, snapshot, uploadUrl)
|
||||
err := s.uploadSnapshot(asyncCtx, session, snapshot, uploadUrl)
|
||||
if err != nil {
|
||||
asyncSpan.SetStatus(codes.Error, "error uploading snapshot")
|
||||
asyncSpan.RecordError(err)
|
||||
|
||||
s.log.Error("uploading snapshot", "err", err.Error())
|
||||
// Update status to error with retries
|
||||
if err := s.updateSnapshotWithRetries(context.Background(), cloudmigration.UpdateSnapshotCmd{
|
||||
if err := s.updateSnapshotWithRetries(asyncCtx, cloudmigration.UpdateSnapshotCmd{
|
||||
UID: snapshot.UID,
|
||||
SessionID: sessionUid,
|
||||
Status: cloudmigration.SnapshotStatusError,
|
||||
}); err != nil {
|
||||
asyncSpan.RecordError(err)
|
||||
s.log.Error("critical failure during snapshot upload - please report any error logs")
|
||||
}
|
||||
}
|
||||
|
||||
s.report(ctx, session, gmsclient.EventDoneUploadingSnapshot, time.Since(start), err)
|
||||
s.report(asyncCtx, session, gmsclient.EventDoneUploadingSnapshot, time.Since(start), err)
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) CancelSnapshot(ctx context.Context, sessionUid string, snapshotUid string) (err error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.CancelSnapshot",
|
||||
trace.WithAttributes(
|
||||
attribute.String("sessionUid", sessionUid),
|
||||
attribute.String("snapshotUid", snapshotUid),
|
||||
),
|
||||
)
|
||||
defer span.End()
|
||||
|
||||
// The cancel func itself is protected by a mutex in the async threads, so it may or may not be set by the time CancelSnapshot is called
|
||||
// Attempt to cancel and recover from the panic if the cancel function is nil
|
||||
defer func() {
|
||||
@ -684,6 +735,9 @@ func (s *Service) report(
|
||||
d time.Duration,
|
||||
evtErr error,
|
||||
) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.report")
|
||||
defer span.End()
|
||||
|
||||
id, err := s.getLocalEventId(ctx)
|
||||
if err != nil {
|
||||
s.log.Error("failed to report event", "type", t, "error", err.Error())
|
||||
@ -738,6 +792,9 @@ func (s *Service) getLocalEventId(ctx context.Context) (string, error) {
|
||||
}
|
||||
|
||||
func (s *Service) deleteLocalFiles(snapshots []cloudmigration.CloudMigrationSnapshot) error {
|
||||
_, span := s.tracer.Start(context.Background(), "CloudMigrationService.deleteLocalFiles")
|
||||
defer span.End()
|
||||
|
||||
var err error
|
||||
for _, snapshot := range snapshots {
|
||||
err = os.RemoveAll(snapshot.LocalDir)
|
||||
|
@ -2,18 +2,24 @@ package cloudmigrationimpl
|
||||
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/grafana/grafana/pkg/api/routing"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/httpclient"
|
||||
"github.com/grafana/grafana/pkg/infra/kvstore"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/actest"
|
||||
"github.com/grafana/grafana/pkg/services/annotations/annotationstest"
|
||||
"github.com/grafana/grafana/pkg/services/cloudmigration"
|
||||
"github.com/grafana/grafana/pkg/services/cloudmigration/gmsclient"
|
||||
"github.com/grafana/grafana/pkg/services/contexthandler/ctxkey"
|
||||
@ -26,7 +32,12 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/folder/foldertest"
|
||||
libraryelementsfake "github.com/grafana/grafana/pkg/services/libraryelements/fake"
|
||||
libraryelements "github.com/grafana/grafana/pkg/services/libraryelements/model"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||
ngalertstore "github.com/grafana/grafana/pkg/services/ngalert/store"
|
||||
ngalertfakes "github.com/grafana/grafana/pkg/services/ngalert/tests/fakes"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
|
||||
"github.com/grafana/grafana/pkg/services/quota/quotatest"
|
||||
secretsfakes "github.com/grafana/grafana/pkg/services/secrets/fakes"
|
||||
secretskv "github.com/grafana/grafana/pkg/services/secrets/kvstore"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
@ -391,6 +402,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) {
|
||||
Results: []cloudmigration.CloudMigrationResource{
|
||||
{
|
||||
Name: "1 name",
|
||||
ParentName: "1 parent name",
|
||||
Type: cloudmigration.DatasourceDataType,
|
||||
RefID: "1", // this will be core
|
||||
Status: cloudmigration.ItemStatusOK,
|
||||
@ -398,6 +410,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) {
|
||||
},
|
||||
{
|
||||
Name: "2 name",
|
||||
ParentName: "",
|
||||
Type: cloudmigration.DatasourceDataType,
|
||||
RefID: "2", // this will be non-core
|
||||
Status: cloudmigration.ItemStatusOK,
|
||||
@ -405,6 +418,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) {
|
||||
},
|
||||
{
|
||||
Name: "3 name",
|
||||
ParentName: "3 parent name",
|
||||
Type: cloudmigration.DatasourceDataType,
|
||||
RefID: "3", // this will be non-core with an error
|
||||
Status: cloudmigration.ItemStatusError,
|
||||
@ -413,6 +427,7 @@ func Test_NonCoreDataSourcesHaveWarning(t *testing.T) {
|
||||
},
|
||||
{
|
||||
Name: "4 name",
|
||||
ParentName: "4 folder name",
|
||||
Type: cloudmigration.DatasourceDataType,
|
||||
RefID: "4", // this will be deleted
|
||||
Status: cloudmigration.ItemStatusOK,
|
||||
@ -563,6 +578,122 @@ func TestReportEvent(t *testing.T) {
|
||||
require.Equal(t, 1, gmsMock.reportEventCalled)
|
||||
})
|
||||
}
|
||||
func TestGetFolderNamesForFolderUIDs(t *testing.T) {
|
||||
s := setUpServiceTest(t, false).(*Service)
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
user := &user.SignedInUser{OrgID: 1}
|
||||
|
||||
testcases := []struct {
|
||||
folders []*folder.Folder
|
||||
folderUIDs []string
|
||||
expectedFolderNames []string
|
||||
}{
|
||||
{
|
||||
folders: []*folder.Folder{
|
||||
{UID: "folderUID-A", Title: "Folder A", OrgID: 1},
|
||||
{UID: "folderUID-B", Title: "Folder B", OrgID: 1},
|
||||
},
|
||||
folderUIDs: []string{"folderUID-A", "folderUID-B"},
|
||||
expectedFolderNames: []string{"Folder A", "Folder B"},
|
||||
},
|
||||
{
|
||||
folders: []*folder.Folder{
|
||||
{UID: "folderUID-A", Title: "Folder A", OrgID: 1},
|
||||
},
|
||||
folderUIDs: []string{"folderUID-A"},
|
||||
expectedFolderNames: []string{"Folder A"},
|
||||
},
|
||||
{
|
||||
folders: []*folder.Folder{},
|
||||
folderUIDs: []string{"folderUID-A"},
|
||||
expectedFolderNames: []string{""},
|
||||
},
|
||||
{
|
||||
folders: []*folder.Folder{
|
||||
{UID: "folderUID-A", Title: "Folder A", OrgID: 1},
|
||||
},
|
||||
folderUIDs: []string{"folderUID-A", "folderUID-B"},
|
||||
expectedFolderNames: []string{"Folder A", ""},
|
||||
},
|
||||
{
|
||||
folders: []*folder.Folder{},
|
||||
folderUIDs: []string{""},
|
||||
expectedFolderNames: []string{""},
|
||||
},
|
||||
{
|
||||
folders: []*folder.Folder{},
|
||||
folderUIDs: []string{},
|
||||
expectedFolderNames: []string{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testcases {
|
||||
s.folderService = &foldertest.FakeService{ExpectedFolders: tc.folders}
|
||||
|
||||
folderUIDsToFolders, err := s.getFolderNamesForFolderUIDs(ctx, user, tc.folderUIDs)
|
||||
require.NoError(t, err)
|
||||
|
||||
resFolderNames := slices.Collect(maps.Values(folderUIDsToFolders))
|
||||
require.Len(t, resFolderNames, len(tc.expectedFolderNames))
|
||||
|
||||
require.ElementsMatch(t, resFolderNames, tc.expectedFolderNames)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetParentNames(t *testing.T) {
|
||||
s := setUpServiceTest(t, false).(*Service)
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
user := &user.SignedInUser{OrgID: 1}
|
||||
libraryElementFolderUID := "folderUID-A"
|
||||
testcases := []struct {
|
||||
fakeFolders []*folder.Folder
|
||||
folders []folder.CreateFolderCommand
|
||||
dashboards []dashboards.Dashboard
|
||||
libraryElements []libraryElement
|
||||
expectedDashParentNames []string
|
||||
expectedFoldParentNames []string
|
||||
}{
|
||||
{
|
||||
fakeFolders: []*folder.Folder{
|
||||
{UID: "folderUID-A", Title: "Folder A", OrgID: 1, ParentUID: ""},
|
||||
{UID: "folderUID-B", Title: "Folder B", OrgID: 1, ParentUID: "folderUID-A"},
|
||||
},
|
||||
folders: []folder.CreateFolderCommand{
|
||||
{UID: "folderUID-C", Title: "Folder A", OrgID: 1, ParentUID: "folderUID-A"},
|
||||
},
|
||||
dashboards: []dashboards.Dashboard{
|
||||
{UID: "dashboardUID-0", OrgID: 1, FolderUID: ""},
|
||||
{UID: "dashboardUID-1", OrgID: 1, FolderUID: "folderUID-A"},
|
||||
{UID: "dashboardUID-2", OrgID: 1, FolderUID: "folderUID-B"},
|
||||
},
|
||||
libraryElements: []libraryElement{
|
||||
{UID: "libraryElementUID-0", FolderUID: &libraryElementFolderUID},
|
||||
{UID: "libraryElementUID-1"},
|
||||
},
|
||||
expectedDashParentNames: []string{"", "Folder A", "Folder B"},
|
||||
expectedFoldParentNames: []string{"Folder A"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testcases {
|
||||
s.folderService = &foldertest.FakeService{ExpectedFolders: tc.fakeFolders}
|
||||
|
||||
dataUIDsToParentNamesByType, err := s.getParentNames(ctx, user, tc.dashboards, tc.folders, tc.libraryElements)
|
||||
require.NoError(t, err)
|
||||
|
||||
resDashParentNames := slices.Collect(maps.Values(dataUIDsToParentNamesByType[cloudmigration.DashboardDataType]))
|
||||
require.Len(t, resDashParentNames, len(tc.expectedDashParentNames))
|
||||
require.ElementsMatch(t, resDashParentNames, tc.expectedDashParentNames)
|
||||
|
||||
resFoldParentNames := slices.Collect(maps.Values(dataUIDsToParentNamesByType[cloudmigration.FolderDataType]))
|
||||
require.Len(t, resFoldParentNames, len(tc.expectedFoldParentNames))
|
||||
require.ElementsMatch(t, resFoldParentNames, tc.expectedFoldParentNames)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetLibraryElementsCommands(t *testing.T) {
|
||||
s := setUpServiceTest(t, false).(*Service)
|
||||
@ -642,8 +773,29 @@ func setUpServiceTest(t *testing.T, withDashboardMock bool) cloudmigration.Servi
|
||||
},
|
||||
}
|
||||
|
||||
featureToggles := featuremgmt.WithFeatures(featuremgmt.FlagOnPremToCloudMigrations, featuremgmt.FlagDashboardRestore)
|
||||
|
||||
kvStore := kvstore.ProvideService(sqlStore)
|
||||
|
||||
bus := bus.ProvideBus(tracer)
|
||||
fakeAccessControl := actest.FakeAccessControl{}
|
||||
fakeAccessControlService := actest.FakeService{}
|
||||
alertMetrics := metrics.NewNGAlert(prometheus.NewRegistry())
|
||||
|
||||
ruleStore, err := ngalertstore.ProvideDBStore(cfg, featureToggles, sqlStore, mockFolder, dashboardService, fakeAccessControl)
|
||||
require.NoError(t, err)
|
||||
|
||||
ng, err := ngalert.ProvideService(
|
||||
cfg, featureToggles, nil, nil, rr, sqlStore, kvStore, nil, nil, quotatest.New(false, nil),
|
||||
secretsService, nil, alertMetrics, mockFolder, fakeAccessControl, dashboardService, nil, bus, fakeAccessControlService,
|
||||
annotationstest.NewFakeAnnotationsRepo(), &pluginstore.FakePluginStore{}, tracer, ruleStore,
|
||||
httpclient.NewProvider(), ngalertfakes.NewFakeReceiverPermissionsService(),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
s, err := ProvideService(
|
||||
cfg,
|
||||
httpclient.NewProvider(),
|
||||
featuremgmt.WithFeatures(
|
||||
featuremgmt.FlagOnPremToCloudMigrations,
|
||||
featuremgmt.FlagDashboardRestore),
|
||||
@ -659,6 +811,7 @@ func setUpServiceTest(t *testing.T, withDashboardMock bool) cloudmigration.Servi
|
||||
&pluginstore.FakePluginStore{},
|
||||
kvstore.ProvideService(sqlStore),
|
||||
&libraryelementsfake.LibraryElementService{},
|
||||
ng,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
@ -98,10 +98,28 @@ func (m FakeServiceImpl) GetSnapshot(ctx context.Context, query cloudmigration.G
|
||||
if m.ReturnError {
|
||||
return nil, fmt.Errorf("mock error")
|
||||
}
|
||||
cloudMigrationResources := []cloudmigration.CloudMigrationResource{
|
||||
{
|
||||
Type: cloudmigration.DashboardDataType,
|
||||
RefID: "123",
|
||||
Status: cloudmigration.ItemStatusPending,
|
||||
Name: "dashboard name",
|
||||
ParentName: "dashboard parent name",
|
||||
},
|
||||
{
|
||||
Type: cloudmigration.DatasourceDataType,
|
||||
RefID: "456",
|
||||
Status: cloudmigration.ItemStatusOK,
|
||||
Name: "datasource name",
|
||||
ParentName: "dashboard parent name",
|
||||
},
|
||||
}
|
||||
|
||||
return &cloudmigration.CloudMigrationSnapshot{
|
||||
UID: "fake_uid",
|
||||
SessionUID: "fake_uid",
|
||||
Status: cloudmigration.SnapshotStatusCreating,
|
||||
Resources: cloudMigrationResources,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
@ -23,9 +23,21 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/util/retryer"
|
||||
"golang.org/x/crypto/nacl/box"
|
||||
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
)
|
||||
|
||||
var currentMigrationTypes = []cloudmigration.MigrateDataType{
|
||||
cloudmigration.DatasourceDataType,
|
||||
cloudmigration.FolderDataType,
|
||||
cloudmigration.LibraryElementDataType,
|
||||
cloudmigration.DashboardDataType,
|
||||
}
|
||||
|
||||
func (s *Service) getMigrationDataJSON(ctx context.Context, signedInUser *user.SignedInUser) (*cloudmigration.MigrateDataRequest, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getMigrationDataJSON")
|
||||
defer span.End()
|
||||
|
||||
// Data sources
|
||||
dataSources, err := s.getDataSourceCommands(ctx)
|
||||
if err != nil {
|
||||
@ -95,14 +107,24 @@ func (s *Service) getMigrationDataJSON(ctx context.Context, signedInUser *user.S
|
||||
})
|
||||
}
|
||||
|
||||
// Obtain the names of parent elements for Dashboard and Folders data types
|
||||
parentNamesByType, err := s.getParentNames(ctx, signedInUser, dashs, folders, libraryElements)
|
||||
if err != nil {
|
||||
s.log.Error("Failed to get parent folder names", "err", err)
|
||||
}
|
||||
|
||||
migrationData := &cloudmigration.MigrateDataRequest{
|
||||
Items: migrationDataSlice,
|
||||
Items: migrationDataSlice,
|
||||
ItemParentNames: parentNamesByType,
|
||||
}
|
||||
|
||||
return migrationData, nil
|
||||
}
|
||||
|
||||
func (s *Service) getDataSourceCommands(ctx context.Context) ([]datasources.AddDataSourceCommand, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getDataSourceCommands")
|
||||
defer span.End()
|
||||
|
||||
dataSources, err := s.dsService.GetAllDataSources(ctx, &datasources.GetAllDataSourcesQuery{})
|
||||
if err != nil {
|
||||
s.log.Error("Failed to get all datasources", "err", err)
|
||||
@ -141,6 +163,9 @@ func (s *Service) getDataSourceCommands(ctx context.Context) ([]datasources.AddD
|
||||
|
||||
// getDashboardAndFolderCommands returns the json payloads required by the dashboard and folder creation APIs
|
||||
func (s *Service) getDashboardAndFolderCommands(ctx context.Context, signedInUser *user.SignedInUser) ([]dashboards.Dashboard, []folder.CreateFolderCommand, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getDashboardAndFolderCommands")
|
||||
defer span.End()
|
||||
|
||||
dashs, err := s.dashboardService.GetAllDashboards(ctx)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
@ -196,6 +221,9 @@ type libraryElement struct {
|
||||
|
||||
// getLibraryElementsCommands returns the json payloads required by the library elements creation API
|
||||
func (s *Service) getLibraryElementsCommands(ctx context.Context, signedInUser *user.SignedInUser) ([]libraryElement, error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.getLibraryElementsCommands")
|
||||
defer span.End()
|
||||
|
||||
const perPage = 100
|
||||
|
||||
cmds := make([]libraryElement, 0)
|
||||
@ -242,6 +270,9 @@ func (s *Service) getLibraryElementsCommands(ctx context.Context, signedInUser *
|
||||
|
||||
// asynchronous process for writing the snapshot to the filesystem and updating the snapshot status
|
||||
func (s *Service) buildSnapshot(ctx context.Context, signedInUser *user.SignedInUser, maxItemsPerPartition uint32, metadata []byte, snapshotMeta cloudmigration.CloudMigrationSnapshot) error {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.buildSnapshot")
|
||||
defer span.End()
|
||||
|
||||
// TODO -- make sure we can only build one snapshot at a time
|
||||
s.buildSnapshotMutex.Lock()
|
||||
defer s.buildSnapshotMutex.Unlock()
|
||||
@ -289,20 +320,21 @@ func (s *Service) buildSnapshot(ctx context.Context, signedInUser *user.SignedIn
|
||||
Data: item.Data,
|
||||
})
|
||||
|
||||
parentName := ""
|
||||
if _, exists := migrationData.ItemParentNames[item.Type]; exists {
|
||||
parentName = migrationData.ItemParentNames[item.Type][item.RefID]
|
||||
}
|
||||
|
||||
localSnapshotResource[i] = cloudmigration.CloudMigrationResource{
|
||||
Name: item.Name,
|
||||
Type: item.Type,
|
||||
RefID: item.RefID,
|
||||
Status: cloudmigration.ItemStatusPending,
|
||||
Name: item.Name,
|
||||
Type: item.Type,
|
||||
RefID: item.RefID,
|
||||
Status: cloudmigration.ItemStatusPending,
|
||||
ParentName: parentName,
|
||||
}
|
||||
}
|
||||
|
||||
for _, resourceType := range []cloudmigration.MigrateDataType{
|
||||
cloudmigration.DatasourceDataType,
|
||||
cloudmigration.FolderDataType,
|
||||
cloudmigration.LibraryElementDataType,
|
||||
cloudmigration.DashboardDataType,
|
||||
} {
|
||||
for _, resourceType := range currentMigrationTypes {
|
||||
for chunk := range slices.Chunk(resourcesGroupedByType[resourceType], int(maxItemsPerPartition)) {
|
||||
if err := snapshotWriter.Write(string(resourceType), chunk); err != nil {
|
||||
return fmt.Errorf("writing resources to snapshot writer: resourceType=%s %w", resourceType, err)
|
||||
@ -339,6 +371,9 @@ func (s *Service) buildSnapshot(ctx context.Context, signedInUser *user.SignedIn
|
||||
|
||||
// asynchronous process for and updating the snapshot status
|
||||
func (s *Service) uploadSnapshot(ctx context.Context, session *cloudmigration.CloudMigrationSession, snapshotMeta *cloudmigration.CloudMigrationSnapshot, uploadUrl string) (err error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot")
|
||||
defer span.End()
|
||||
|
||||
// TODO -- make sure we can only upload one snapshot at a time
|
||||
s.buildSnapshotMutex.Lock()
|
||||
defer s.buildSnapshotMutex.Unlock()
|
||||
@ -361,37 +396,61 @@ func (s *Service) uploadSnapshot(ctx context.Context, session *cloudmigration.Cl
|
||||
}
|
||||
}()
|
||||
|
||||
_, readIndexSpan := s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot.readIndex")
|
||||
index, err := snapshot.ReadIndex(indexFile)
|
||||
if err != nil {
|
||||
readIndexSpan.SetStatus(codes.Error, "reading index from file")
|
||||
readIndexSpan.RecordError(err)
|
||||
readIndexSpan.End()
|
||||
|
||||
return fmt.Errorf("reading index from file: %w", err)
|
||||
}
|
||||
readIndexSpan.End()
|
||||
|
||||
s.log.Debug(fmt.Sprintf("uploadSnapshot: read index file in %d ms", time.Since(start).Milliseconds()))
|
||||
|
||||
uploadCtx, uploadSpan := s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot.uploadDataFiles")
|
||||
// Upload the data files.
|
||||
for _, fileNames := range index.Items {
|
||||
for _, fileName := range fileNames {
|
||||
filePath := filepath.Join(snapshotMeta.LocalDir, fileName)
|
||||
key := fmt.Sprintf("%d/snapshots/%s/%s", session.StackID, snapshotMeta.GMSSnapshotUID, fileName)
|
||||
if err := s.uploadUsingPresignedURL(ctx, uploadUrl, key, filePath); err != nil {
|
||||
if err := s.uploadUsingPresignedURL(uploadCtx, uploadUrl, key, filePath); err != nil {
|
||||
uploadSpan.SetStatus(codes.Error, "uploading snapshot data file using presigned url")
|
||||
uploadSpan.RecordError(err)
|
||||
uploadSpan.End()
|
||||
|
||||
return fmt.Errorf("uploading snapshot file using presigned url: %w", err)
|
||||
}
|
||||
s.log.Debug(fmt.Sprintf("uploadSnapshot: uploaded %s in %d ms", fileName, time.Since(start).Milliseconds()))
|
||||
}
|
||||
}
|
||||
uploadSpan.End()
|
||||
|
||||
s.log.Debug(fmt.Sprintf("uploadSnapshot: uploaded all data files in %d ms", time.Since(start).Milliseconds()))
|
||||
|
||||
uploadCtx, uploadSpan = s.tracer.Start(ctx, "CloudMigrationService.uploadSnapshot.uploadIndex")
|
||||
|
||||
// Upload the index file. Must be done after uploading the data files.
|
||||
key := fmt.Sprintf("%d/snapshots/%s/%s", session.StackID, snapshotMeta.GMSSnapshotUID, "index.json")
|
||||
if _, err := indexFile.Seek(0, 0); err != nil {
|
||||
uploadSpan.SetStatus(codes.Error, "seeking to beginning of index file")
|
||||
uploadSpan.RecordError(err)
|
||||
uploadSpan.End()
|
||||
|
||||
return fmt.Errorf("seeking to beginning of index file: %w", err)
|
||||
}
|
||||
|
||||
if err := s.objectStorage.PresignedURLUpload(ctx, uploadUrl, key, indexFile); err != nil {
|
||||
if err := s.objectStorage.PresignedURLUpload(uploadCtx, uploadUrl, key, indexFile); err != nil {
|
||||
uploadSpan.SetStatus(codes.Error, "uploading index file using presigned url")
|
||||
uploadSpan.RecordError(err)
|
||||
uploadSpan.End()
|
||||
|
||||
return fmt.Errorf("uploading file using presigned url: %w", err)
|
||||
}
|
||||
|
||||
uploadSpan.End()
|
||||
|
||||
s.log.Debug(fmt.Sprintf("uploadSnapshot: uploaded index file in %d ms", time.Since(start).Milliseconds()))
|
||||
s.log.Info("successfully uploaded snapshot", "snapshotUid", snapshotMeta.UID, "cloud_snapshotUid", snapshotMeta.GMSSnapshotUID)
|
||||
|
||||
@ -408,6 +467,9 @@ func (s *Service) uploadSnapshot(ctx context.Context, session *cloudmigration.Cl
|
||||
}
|
||||
|
||||
func (s *Service) uploadUsingPresignedURL(ctx context.Context, uploadURL, key string, filePath string) (err error) {
|
||||
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.uploadUsingPresignedURL")
|
||||
defer span.End()
|
||||
|
||||
// The directory that contains the file can set in the configuration, therefore the directory can be any directory.
|
||||
// nolint:gosec
|
||||
file, err := os.Open(filePath)
|
||||
@ -486,3 +548,74 @@ func sortFolders(input []folder.CreateFolderCommand) []folder.CreateFolderComman
|
||||
|
||||
return input
|
||||
}
|
||||
|
||||
// getFolderNamesForFolderUIDs queries the folders service to obtain folder names for a list of folderUIDs
|
||||
func (s *Service) getFolderNamesForFolderUIDs(ctx context.Context, signedInUser *user.SignedInUser, folderUIDs []string) (map[string](string), error) {
|
||||
folders, err := s.folderService.GetFolders(ctx, folder.GetFoldersQuery{
|
||||
UIDs: folderUIDs,
|
||||
SignedInUser: signedInUser,
|
||||
WithFullpathUIDs: true,
|
||||
})
|
||||
if err != nil {
|
||||
s.log.Error("Failed to obtain folders from folder UIDs", "err", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
folderUIDsToNames := make(map[string](string), len(folderUIDs))
|
||||
for _, folderUID := range folderUIDs {
|
||||
folderUIDsToNames[folderUID] = ""
|
||||
}
|
||||
for _, f := range folders {
|
||||
folderUIDsToNames[f.UID] = f.Title
|
||||
}
|
||||
return folderUIDsToNames, nil
|
||||
}
|
||||
|
||||
// getParentNames finds the parent names for resources and returns a map of data type: {data UID : parentName}
|
||||
// for dashboards, folders and library elements - the parent is the parent folder
|
||||
func (s *Service) getParentNames(ctx context.Context, signedInUser *user.SignedInUser, dashboards []dashboards.Dashboard, folders []folder.CreateFolderCommand, libraryElements []libraryElement) (map[cloudmigration.MigrateDataType]map[string](string), error) {
|
||||
parentNamesByType := make(map[cloudmigration.MigrateDataType]map[string](string))
|
||||
for _, dataType := range currentMigrationTypes {
|
||||
parentNamesByType[dataType] = make(map[string]string)
|
||||
}
|
||||
|
||||
// Obtain list of unique folderUIDs
|
||||
parentFolderUIDsSet := make(map[string]struct{}, len(dashboards)+len(folders)+len(libraryElements))
|
||||
for _, dashboard := range dashboards {
|
||||
parentFolderUIDsSet[dashboard.FolderUID] = struct{}{}
|
||||
}
|
||||
for _, f := range folders {
|
||||
parentFolderUIDsSet[f.ParentUID] = struct{}{}
|
||||
}
|
||||
for _, libraryElement := range libraryElements {
|
||||
if libraryElement.FolderUID != nil {
|
||||
parentFolderUIDsSet[*libraryElement.FolderUID] = struct{}{}
|
||||
}
|
||||
}
|
||||
parentFolderUIDsSlice := make([]string, 0, len(parentFolderUIDsSet))
|
||||
for parentFolderUID := range parentFolderUIDsSet {
|
||||
parentFolderUIDsSlice = append(parentFolderUIDsSlice, parentFolderUID)
|
||||
}
|
||||
|
||||
// Obtain folder names given a list of folderUIDs
|
||||
foldersUIDsToFolderName, err := s.getFolderNamesForFolderUIDs(ctx, signedInUser, parentFolderUIDsSlice)
|
||||
if err != nil {
|
||||
s.log.Error("Failed to get parent folder names from folder UIDs", "err", err)
|
||||
return parentNamesByType, err
|
||||
}
|
||||
|
||||
// Prepare map of {data type: {data UID : parentName}}
|
||||
for _, dashboard := range dashboards {
|
||||
parentNamesByType[cloudmigration.DashboardDataType][dashboard.UID] = foldersUIDsToFolderName[dashboard.FolderUID]
|
||||
}
|
||||
for _, f := range folders {
|
||||
parentNamesByType[cloudmigration.FolderDataType][f.UID] = foldersUIDsToFolderName[f.ParentUID]
|
||||
}
|
||||
for _, libraryElement := range libraryElements {
|
||||
if libraryElement.FolderUID != nil {
|
||||
parentNamesByType[cloudmigration.LibraryElementDataType][libraryElement.UID] = foldersUIDsToFolderName[*libraryElement.FolderUID]
|
||||
}
|
||||
}
|
||||
|
||||
return parentNamesByType, err
|
||||
}
|
||||
|
@ -18,19 +18,21 @@ import (
|
||||
)
|
||||
|
||||
// NewGMSClient returns an implementation of Client that queries GrafanaMigrationService
|
||||
func NewGMSClient(cfg *setting.Cfg) (Client, error) {
|
||||
func NewGMSClient(cfg *setting.Cfg, httpClient *http.Client) (Client, error) {
|
||||
if cfg.CloudMigration.GMSDomain == "" {
|
||||
return nil, fmt.Errorf("missing GMS domain")
|
||||
}
|
||||
return &gmsClientImpl{
|
||||
cfg: cfg,
|
||||
log: log.New(logPrefix),
|
||||
cfg: cfg,
|
||||
log: log.New(logPrefix),
|
||||
httpClient: httpClient,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type gmsClientImpl struct {
|
||||
cfg *setting.Cfg
|
||||
log *log.ConcreteLogger
|
||||
cfg *setting.Cfg
|
||||
log *log.ConcreteLogger
|
||||
httpClient *http.Client
|
||||
|
||||
getStatusMux sync.Mutex
|
||||
getStatusLastQueried time.Time
|
||||
@ -40,8 +42,11 @@ func (c *gmsClientImpl) ValidateKey(ctx context.Context, cm cloudmigration.Cloud
|
||||
// TODO: there is a lot of boilerplate code in these methods, we should consolidate them when we have a gardening period
|
||||
path := fmt.Sprintf("%s/api/v1/validate-key", c.buildBasePath(cm.ClusterSlug))
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSValidateKeyTimeout)
|
||||
defer cancel()
|
||||
|
||||
// validation is an empty POST to GMS with the authorization header included
|
||||
req, err := http.NewRequest("POST", path, bytes.NewReader(nil))
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", path, bytes.NewReader(nil))
|
||||
if err != nil {
|
||||
c.log.Error("error creating http request for token validation", "err", err.Error())
|
||||
return fmt.Errorf("http request error: %w", err)
|
||||
@ -49,10 +54,7 @@ func (c *gmsClientImpl) ValidateKey(ctx context.Context, cm cloudmigration.Cloud
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", cm.StackID, cm.AuthToken))
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: c.cfg.CloudMigration.GMSValidateKeyTimeout,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
c.log.Error("error sending http request for token validation", "err", err.Error())
|
||||
return fmt.Errorf("http request error: %w", err)
|
||||
@ -74,8 +76,11 @@ func (c *gmsClientImpl) ValidateKey(ctx context.Context, cm cloudmigration.Cloud
|
||||
func (c *gmsClientImpl) StartSnapshot(ctx context.Context, session cloudmigration.CloudMigrationSession) (out *cloudmigration.StartSnapshotResponse, err error) {
|
||||
path := fmt.Sprintf("%s/api/v1/start-snapshot", c.buildBasePath(session.ClusterSlug))
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSStartSnapshotTimeout)
|
||||
defer cancel()
|
||||
|
||||
// Send the request to cms with the associated auth token
|
||||
req, err := http.NewRequest(http.MethodPost, path, nil)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, path, nil)
|
||||
if err != nil {
|
||||
c.log.Error("error creating http request to start snapshot", "err", err.Error())
|
||||
return nil, fmt.Errorf("http request error: %w", err)
|
||||
@ -83,10 +88,7 @@ func (c *gmsClientImpl) StartSnapshot(ctx context.Context, session cloudmigratio
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken))
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: c.cfg.CloudMigration.GMSStartSnapshotTimeout,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
c.log.Error("error sending http request to start snapshot", "err", err.Error())
|
||||
return nil, fmt.Errorf("http request error: %w", err)
|
||||
@ -119,8 +121,11 @@ func (c *gmsClientImpl) GetSnapshotStatus(ctx context.Context, session cloudmigr
|
||||
|
||||
path := fmt.Sprintf("%s/api/v1/snapshots/%s/status?offset=%d", c.buildBasePath(session.ClusterSlug), snapshot.GMSSnapshotUID, offset)
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSGetSnapshotStatusTimeout)
|
||||
defer cancel()
|
||||
|
||||
// Send the request to gms with the associated auth token
|
||||
req, err := http.NewRequest(http.MethodGet, path, nil)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, path, nil)
|
||||
if err != nil {
|
||||
c.log.Error("error creating http request to get snapshot status", "err", err.Error())
|
||||
return nil, fmt.Errorf("http request error: %w", err)
|
||||
@ -128,11 +133,8 @@ func (c *gmsClientImpl) GetSnapshotStatus(ctx context.Context, session cloudmigr
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken))
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: c.cfg.CloudMigration.GMSGetSnapshotStatusTimeout,
|
||||
}
|
||||
c.getStatusLastQueried = time.Now()
|
||||
resp, err := client.Do(req)
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
c.log.Error("error sending http request to get snapshot status", "err", err.Error())
|
||||
return nil, fmt.Errorf("http request error: %w", err)
|
||||
@ -163,8 +165,11 @@ func (c *gmsClientImpl) GetSnapshotStatus(ctx context.Context, session cloudmigr
|
||||
func (c *gmsClientImpl) CreatePresignedUploadUrl(ctx context.Context, session cloudmigration.CloudMigrationSession, snapshot cloudmigration.CloudMigrationSnapshot) (string, error) {
|
||||
path := fmt.Sprintf("%s/api/v1/snapshots/%s/create-upload-url", c.buildBasePath(session.ClusterSlug), snapshot.GMSSnapshotUID)
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSCreateUploadUrlTimeout)
|
||||
defer cancel()
|
||||
|
||||
// Send the request to gms with the associated auth token
|
||||
req, err := http.NewRequest(http.MethodPost, path, nil)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, path, nil)
|
||||
if err != nil {
|
||||
c.log.Error("error creating http request to create upload url", "err", err.Error())
|
||||
return "", fmt.Errorf("http request error: %w", err)
|
||||
@ -172,10 +177,7 @@ func (c *gmsClientImpl) CreatePresignedUploadUrl(ctx context.Context, session cl
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken))
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: c.cfg.CloudMigration.GMSCreateUploadUrlTimeout,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
c.log.Error("error sending http request to create an upload url", "err", err.Error())
|
||||
return "", fmt.Errorf("http request error: %w", err)
|
||||
@ -208,6 +210,9 @@ func (c *gmsClientImpl) ReportEvent(ctx context.Context, session cloudmigration.
|
||||
return
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(ctx, c.cfg.CloudMigration.GMSReportEventTimeout)
|
||||
defer cancel()
|
||||
|
||||
path := fmt.Sprintf("%s/api/v1/events", c.buildBasePath(session.ClusterSlug))
|
||||
|
||||
var buf bytes.Buffer
|
||||
@ -216,7 +221,7 @@ func (c *gmsClientImpl) ReportEvent(ctx context.Context, session cloudmigration.
|
||||
return
|
||||
}
|
||||
// Send the request to gms with the associated auth token
|
||||
req, err := http.NewRequest(http.MethodPost, path, &buf)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, path, &buf)
|
||||
if err != nil {
|
||||
c.log.Error("error creating http request to report event", "err", err.Error())
|
||||
return
|
||||
@ -224,10 +229,7 @@ func (c *gmsClientImpl) ReportEvent(ctx context.Context, session cloudmigration.
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %d:%s", session.StackID, session.AuthToken))
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: c.cfg.CloudMigration.GMSReportEventTimeout,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
c.log.Error("error sending http request for report event", "err", err.Error())
|
||||
return
|
||||
|
@ -1,6 +1,7 @@
|
||||
package gmsclient
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
@ -16,7 +17,9 @@ func Test_buildBasePath(t *testing.T) {
|
||||
CloudMigration: setting.CloudMigrationSettings{
|
||||
GMSDomain: "",
|
||||
},
|
||||
})
|
||||
},
|
||||
http.DefaultClient,
|
||||
)
|
||||
require.Error(t, err)
|
||||
|
||||
// Domain is required
|
||||
@ -24,7 +27,9 @@ func Test_buildBasePath(t *testing.T) {
|
||||
CloudMigration: setting.CloudMigrationSettings{
|
||||
GMSDomain: "non-empty",
|
||||
},
|
||||
})
|
||||
},
|
||||
http.DefaultClient,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
client := c.(*gmsClientImpl)
|
||||
|
||||
|
@ -75,15 +75,21 @@ type CloudMigrationResource struct {
|
||||
Error string `xorm:"error_string" json:"error"`
|
||||
|
||||
SnapshotUID string `xorm:"snapshot_uid"`
|
||||
ParentName string `xorm:"parent_name" json:"parentName"`
|
||||
}
|
||||
|
||||
type MigrateDataType string
|
||||
|
||||
const (
|
||||
DashboardDataType MigrateDataType = "DASHBOARD"
|
||||
DatasourceDataType MigrateDataType = "DATASOURCE"
|
||||
FolderDataType MigrateDataType = "FOLDER"
|
||||
LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT"
|
||||
DashboardDataType MigrateDataType = "DASHBOARD"
|
||||
DatasourceDataType MigrateDataType = "DATASOURCE"
|
||||
FolderDataType MigrateDataType = "FOLDER"
|
||||
LibraryElementDataType MigrateDataType = "LIBRARY_ELEMENT"
|
||||
AlertRuleType MigrateDataType = "ALERT_RULE"
|
||||
ContactPointType MigrateDataType = "CONTACT_POINT"
|
||||
NotificationPolicyType MigrateDataType = "NOTIFICATION_POLICY"
|
||||
NotificationTemplateType MigrateDataType = "NOTIFICATION_TEMPLATE"
|
||||
MuteTimingType MigrateDataType = "MUTE_TIMING"
|
||||
)
|
||||
|
||||
type ItemStatus string
|
||||
@ -185,7 +191,8 @@ type Base64HGInstance struct {
|
||||
// GMS domain structs
|
||||
|
||||
type MigrateDataRequest struct {
|
||||
Items []MigrateDataRequestItem
|
||||
Items []MigrateDataRequestItem
|
||||
ItemParentNames map[MigrateDataType]map[string](string)
|
||||
}
|
||||
|
||||
type MigrateDataRequestItem struct {
|
||||
|
@ -9,15 +9,26 @@ import (
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
)
|
||||
|
||||
type S3 struct{}
|
||||
type S3 struct {
|
||||
httpClient *http.Client
|
||||
tracer tracing.Tracer
|
||||
}
|
||||
|
||||
func NewS3() *S3 {
|
||||
return &S3{}
|
||||
func NewS3(httpClient *http.Client, tracer tracing.Tracer) *S3 {
|
||||
return &S3{httpClient: httpClient, tracer: tracer}
|
||||
}
|
||||
|
||||
func (s3 *S3) PresignedURLUpload(ctx context.Context, presignedURL, key string, reader io.Reader) (err error) {
|
||||
ctx, span := s3.tracer.Start(ctx, "objectstorage.S3.PresignedURLUpload")
|
||||
span.SetAttributes(attribute.String("key", key))
|
||||
defer span.End()
|
||||
|
||||
url, err := url.Parse(presignedURL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parsing presigned url")
|
||||
@ -68,13 +79,13 @@ func (s3 *S3) PresignedURLUpload(ctx context.Context, presignedURL, key string,
|
||||
|
||||
endpoint := fmt.Sprintf("%s://%s%s", url.Scheme, url.Host, url.Path)
|
||||
|
||||
request, err := http.NewRequest(http.MethodPost, endpoint, buffer)
|
||||
request, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, buffer)
|
||||
if err != nil {
|
||||
return fmt.Errorf("creating http request: %w", err)
|
||||
}
|
||||
request.Header.Set("Content-Type", writer.FormDataContentType())
|
||||
httpClient := http.Client{}
|
||||
response, err := httpClient.Do(request)
|
||||
|
||||
response, err := s3.httpClient.Do(request)
|
||||
if err != nil {
|
||||
return fmt.Errorf("sending http request: %w", err)
|
||||
}
|
||||
|
11
pkg/services/cloudmigration/slicesext/slicesext.go
Normal file
11
pkg/services/cloudmigration/slicesext/slicesext.go
Normal file
@ -0,0 +1,11 @@
|
||||
package slicesext
|
||||
|
||||
func Map[T any, U any](xs []T, f func(T) U) []U {
|
||||
out := make([]U, 0, len(xs))
|
||||
|
||||
for _, x := range xs {
|
||||
out = append(out, f(x))
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
36
pkg/services/cloudmigration/slicesext/slicesext_test.go
Normal file
36
pkg/services/cloudmigration/slicesext/slicesext_test.go
Normal file
@ -0,0 +1,36 @@
|
||||
package slicesext_test
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/cloudmigration/slicesext"
|
||||
)
|
||||
|
||||
func TestMap(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("mapping a nil slice does nothing and returns an empty slice", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
require.Empty(t, slicesext.Map[any, any](nil, nil))
|
||||
})
|
||||
|
||||
t.Run("mapping a non-nil slice with a nil function panics", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
require.Panics(t, func() { slicesext.Map[int, any]([]int{1, 2, 3}, nil) })
|
||||
})
|
||||
|
||||
t.Run("mapping a non-nil slice with a non-nil function returns the mapped slice", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
original := []int{1, 2, 3}
|
||||
expected := []string{"1", "2", "3"}
|
||||
fn := func(i int) string { return strconv.Itoa(i) }
|
||||
|
||||
require.ElementsMatch(t, expected, slicesext.Map(original, fn))
|
||||
})
|
||||
}
|
@ -255,7 +255,7 @@ func (s *Service) AddDataSource(ctx context.Context, cmd *datasources.AddDataSou
|
||||
}
|
||||
|
||||
var dataSource *datasources.DataSource
|
||||
return dataSource, s.db.InTransaction(ctx, func(ctx context.Context) error {
|
||||
err = s.db.InTransaction(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
|
||||
cmd.EncryptedSecureJsonData = make(map[string][]byte)
|
||||
@ -293,12 +293,18 @@ func (s *Service) AddDataSource(ctx context.Context, cmd *datasources.AddDataSou
|
||||
if cmd.UserID != 0 {
|
||||
permissions = append(permissions, accesscontrol.SetResourcePermissionCommand{UserID: cmd.UserID, Permission: "Admin"})
|
||||
}
|
||||
_, err = s.permissionsService.SetPermissions(ctx, cmd.OrgID, dataSource.UID, permissions...)
|
||||
return err
|
||||
if _, err = s.permissionsService.SetPermissions(ctx, cmd.OrgID, dataSource.UID, permissions...); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return dataSource, nil
|
||||
}
|
||||
|
||||
// This will valid validate the instance settings return a version that is safe to be saved
|
||||
|
@ -3,6 +3,7 @@ package service
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
@ -104,6 +105,27 @@ func TestService_AddDataSource(t *testing.T) {
|
||||
require.EqualError(t, err, "[datasource.urlInvalid] max length is 255")
|
||||
})
|
||||
|
||||
t.Run("should fail if the datasource managed permissions fail", func(t *testing.T) {
|
||||
dsService := initDSService(t)
|
||||
enableRBACManagedPermissions(t, dsService.cfg)
|
||||
dsService.permissionsService = &actest.FakePermissionsService{
|
||||
ExpectedErr: errors.New("failed to set datasource permissions"),
|
||||
}
|
||||
dsService.pluginStore = &pluginstore.FakePluginStore{
|
||||
PluginList: []pluginstore.Plugin{},
|
||||
}
|
||||
|
||||
cmd := &datasources.AddDataSourceCommand{
|
||||
OrgID: 1,
|
||||
Type: datasources.DS_TESTDATA,
|
||||
Name: "test",
|
||||
}
|
||||
|
||||
ds, err := dsService.AddDataSource(context.Background(), cmd)
|
||||
assert.Nil(t, ds)
|
||||
assert.ErrorContains(t, err, "failed to set datasource permissions")
|
||||
})
|
||||
|
||||
t.Run("if a plugin has an API version defined (EXPERIMENTAL)", func(t *testing.T) {
|
||||
t.Run("should success to run admission hooks", func(t *testing.T) {
|
||||
dsService := initDSService(t)
|
||||
@ -580,11 +602,8 @@ func TestService_DeleteDataSource(t *testing.T) {
|
||||
permissionSvc := acmock.NewMockedPermissionsService()
|
||||
permissionSvc.On("SetPermissions", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return([]accesscontrol.ResourcePermission{}, nil).Once()
|
||||
permissionSvc.On("DeleteResourcePermissions", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once()
|
||||
|
||||
f := ini.Empty()
|
||||
f.Section("rbac").Key("resources_with_managed_permissions_on_creation").SetValue("datasource")
|
||||
cfg, err := setting.NewCfgFromINIFile(f)
|
||||
require.NoError(t, err)
|
||||
cfg := &setting.Cfg{}
|
||||
enableRBACManagedPermissions(t, cfg)
|
||||
dsService, err := ProvideService(sqlStore, secretsService, secretsStore, cfg, featuremgmt.WithFeatures(), acmock.New(), permissionSvc, quotaService, &pluginstore.FakePluginStore{}, &pluginfakes.FakePluginClient{}, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -1521,6 +1540,15 @@ func initDSService(t *testing.T) *Service {
|
||||
return dsService
|
||||
}
|
||||
|
||||
func enableRBACManagedPermissions(t testing.TB, cfg *setting.Cfg) {
|
||||
t.Helper()
|
||||
f := ini.Empty()
|
||||
f.Section("rbac").Key("resources_with_managed_permissions_on_creation").SetValue("datasource")
|
||||
tempCfg, err := setting.NewCfgFromINIFile(f)
|
||||
cfg.RBAC = tempCfg.RBAC
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
const caCert string = `-----BEGIN CERTIFICATE-----
|
||||
MIIDATCCAemgAwIBAgIJAMQ5hC3CPDTeMA0GCSqGSIb3DQEBCwUAMBcxFTATBgNV
|
||||
BAMMDGNhLWs4cy1zdGhsbTAeFw0xNjEwMjcwODQyMjdaFw00NDAzMTQwODQyMjda
|
||||
|
@ -430,11 +430,10 @@ var (
|
||||
Owner: grafanaObservabilityLogsSquad,
|
||||
},
|
||||
{
|
||||
Name: "pluginsFrontendSandbox",
|
||||
Description: "Enables the plugins frontend sandbox",
|
||||
Stage: FeatureStageExperimental,
|
||||
FrontendOnly: true,
|
||||
Owner: grafanaPluginsPlatformSquad,
|
||||
Name: "pluginsFrontendSandbox",
|
||||
Description: "Enables the plugins frontend sandbox",
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: grafanaPluginsPlatformSquad,
|
||||
},
|
||||
{
|
||||
Name: "frontendSandboxMonitorOnly",
|
||||
@ -675,9 +674,8 @@ var (
|
||||
Name: "externalServiceAccounts",
|
||||
Description: "Automatic service account and token setup for plugins",
|
||||
HideFromAdminPage: true,
|
||||
Stage: FeatureStageGeneralAvailability,
|
||||
Stage: FeatureStagePublicPreview,
|
||||
Owner: identityAccessTeam,
|
||||
Expression: "true", // enabled by default
|
||||
},
|
||||
{
|
||||
Name: "panelMonitoring",
|
||||
@ -1063,6 +1061,12 @@ var (
|
||||
Owner: grafanaOperatorExperienceSquad,
|
||||
Expression: "false",
|
||||
},
|
||||
{
|
||||
Name: "onPremToCloudMigrationsAlerts",
|
||||
Description: "Enables the migration of alerts and its child resources to your Grafana Cloud stack. Requires `onPremToCloudMigrations` to be enabled in conjunction.",
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: grafanaOperatorExperienceSquad,
|
||||
},
|
||||
{
|
||||
Name: "alertingSaveStatePeriodic",
|
||||
Description: "Writes the state periodically to the database, asynchronous to rule evaluation",
|
||||
@ -1324,12 +1328,12 @@ var (
|
||||
Expression: "true",
|
||||
},
|
||||
{
|
||||
Name: "ssoSettingsLDAP",
|
||||
Description: "Use the new SSO Settings API to configure LDAP",
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: identityAccessTeam,
|
||||
HideFromDocs: true,
|
||||
HideFromAdminPage: true,
|
||||
Name: "ssoSettingsLDAP",
|
||||
Description: "Use the new SSO Settings API to configure LDAP",
|
||||
Stage: FeatureStagePublicPreview,
|
||||
Owner: identityAccessTeam,
|
||||
AllowSelfServe: true,
|
||||
RequiresRestart: true,
|
||||
},
|
||||
{
|
||||
Name: "failWrongDSUID",
|
||||
@ -1514,6 +1518,12 @@ var (
|
||||
HideFromDocs: true,
|
||||
HideFromAdminPage: true,
|
||||
},
|
||||
{
|
||||
Name: "pluginsSriChecks",
|
||||
Description: "Enables SRI checks for plugin assets",
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: grafanaPluginsPlatformSquad,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user