Merge remote-tracking branch 'origin/main' into resource-store

This commit is contained in:
Ryan McKinley 2024-06-19 22:59:26 +03:00
commit c17140f263
67 changed files with 3097 additions and 1179 deletions

View File

@ -6134,10 +6134,6 @@ exports[`better eslint`] = {
[0, 0, 0, "Do not re-export imported variable (\`./thirdArgAfterSearchQuery\`)", "4"],
[0, 0, 0, "Do not re-export imported variable (\`./withinStringQuery\`)", "5"]
],
"public/app/plugins/datasource/cloudwatch/components/CheatSheet/LogsCheatSheet.tsx:5381": [
[0, 0, 0, "Styles should be written using objects.", "0"],
[0, 0, 0, "Styles should be written using objects.", "1"]
],
"public/app/plugins/datasource/cloudwatch/components/ConfigEditor/XrayLinkConfig.tsx:5381": [
[0, 0, 0, "Styles should be written using objects.", "0"]
],
@ -6660,9 +6656,6 @@ exports[`better eslint`] = {
"public/app/plugins/datasource/jaeger/components/QueryEditor.tsx:5381": [
[0, 0, 0, "\'HorizontalGroup\' import from \'@grafana/ui\' is restricted from being used by a pattern. Use Stack component instead.", "0"]
],
"public/app/plugins/datasource/jaeger/configuration/ConfigEditor.tsx:5381": [
[0, 0, 0, "Styles should be written using objects.", "0"]
],
"public/app/plugins/datasource/jaeger/configuration/TraceIdTimeParams.tsx:5381": [
[0, 0, 0, "Styles should be written using objects.", "0"],
[0, 0, 0, "Styles should be written using objects.", "1"]
@ -6878,9 +6871,6 @@ exports[`better eslint`] = {
"public/app/plugins/datasource/tempo/webpack.config.ts:5381": [
[0, 0, 0, "Do not re-export imported variable (\`config\`)", "0"]
],
"public/app/plugins/datasource/zipkin/ConfigEditor.tsx:5381": [
[0, 0, 0, "Styles should be written using objects.", "0"]
],
"public/app/plugins/datasource/zipkin/QueryField.tsx:5381": [
[0, 0, 0, "\'HorizontalGroup\' import from \'@grafana/ui\' is restricted from being used by a pattern. Use Stack component instead.", "0"],
[0, 0, 0, "Do not use any type assertions.", "1"],
@ -7988,11 +7978,6 @@ exports[`no gf-form usage`] = {
"public/app/plugins/datasource/elasticsearch/configuration/DataLinks.tsx:5381": [
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
],
"public/app/plugins/datasource/grafana-pyroscope-datasource/ConfigEditor.tsx:5381": [
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
],
"public/app/plugins/datasource/influxdb/components/editor/annotation/AnnotationEditor.tsx:5381": [
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
],

View File

@ -4952,6 +4952,6 @@ kind: secret
name: gcr_credentials
---
kind: signature
hmac: 043028c50d984e1ea98a294c6746df1388cb0b7d7976f82f3dd0004fc493bafc
hmac: ba86e9c1fb16bb20bff8d56f158ea31f32c3e44f6d517a04ae774fc28f9101e7
...

62
.github/CODEOWNERS vendored
View File

@ -76,8 +76,8 @@
/pkg/bus/ @grafana/grafana-search-and-storage
/pkg/cmd/ @grafana/grafana-backend-group
/pkg/cmd/grafana/apiserver @grafana/grafana-app-platform-squad
/pkg/components/apikeygen/ @grafana/identity-access-team
/pkg/components/satokengen/ @grafana/identity-access-team
/pkg/components/apikeygen/ @grafana/identity-squad
/pkg/components/satokengen/ @grafana/identity-squad
/pkg/components/dashdiffs/ @grafana/grafana-app-platform-squad
/pkg/components/imguploader/ @grafana/alerting-backend
/pkg/components/loki/ @grafana/grafana-backend-group
@ -104,12 +104,12 @@
/pkg/server/ @grafana/grafana-backend-group
/pkg/apiserver @grafana/grafana-app-platform-squad
/pkg/apimachinery @grafana/grafana-app-platform-squad
/pkg/apimachinery/identity/ @grafana/identity-access-team
/pkg/apimachinery/identity/ @grafana/identity-squad
/pkg/apimachinery/errutil/ @grafana/grafana-backend-group
/pkg/promlib @grafana/observability-metrics
/pkg/storage/ @grafana/grafana-search-and-storage
/pkg/services/annotations/ @grafana/grafana-search-and-storage
/pkg/services/apikey/ @grafana/identity-access-team
/pkg/services/apikey/ @grafana/identity-squad
/pkg/services/cleanup/ @grafana/grafana-backend-group
/pkg/services/contexthandler/ @grafana/grafana-backend-group
/pkg/services/correlations/ @grafana/explore-squad
@ -137,14 +137,14 @@
/pkg/services/secrets/ @grafana/grafana-operator-experience-squad
/pkg/services/shorturls/ @grafana/grafana-backend-group
/pkg/services/sqlstore/ @grafana/grafana-search-and-storage
/pkg/services/ssosettings/ @grafana/identity-access-team
/pkg/services/ssosettings/ @grafana/identity-squad
/pkg/services/star/ @grafana/grafana-search-and-storage
/pkg/services/stats/ @grafana/grafana-backend-group
/pkg/services/tag/ @grafana/grafana-search-and-storage
/pkg/services/team/ @grafana/identity-access-team
/pkg/services/team/ @grafana/access-squad
/pkg/services/temp_user/ @grafana/grafana-backend-group
/pkg/services/updatechecker/ @grafana/grafana-backend-group
/pkg/services/user/ @grafana/identity-access-team
/pkg/services/user/ @grafana/access-squad
/pkg/services/validations/ @grafana/grafana-backend-group
/pkg/setting/ @grafana/grafana-backend-services-squad
/pkg/tests/ @grafana/grafana-backend-services-squad
@ -401,9 +401,9 @@ playwright.config.ts @grafana/plugins-platform-frontend
# Temp owners until Enterprise team takes over
/public/app/features/migrate-to-cloud @grafana/grafana-frontend-platform
/public/app/features/auth-config/ @grafana/identity-access-team
/public/app/features/auth-config/ @grafana/identity-squad
/public/app/features/annotations/ @grafana/dashboards-squad
/public/app/features/api-keys/ @grafana/identity-access-team
/public/app/features/api-keys/ @grafana/identity-squad
/public/app/features/canvas/ @grafana/dataviz-squad
/public/app/features/geo/ @grafana/dataviz-squad
/public/app/features/visualization/data-hover/ @grafana/dataviz-squad
@ -438,14 +438,14 @@ playwright.config.ts @grafana/plugins-platform-frontend
/public/app/features/sandbox/ @grafana/grafana-frontend-platform
/public/app/features/browse-dashboards/ @grafana/grafana-frontend-platform
/public/app/features/search/ @grafana/grafana-frontend-platform
/public/app/features/serviceaccounts/ @grafana/identity-access-team
/public/app/features/serviceaccounts/ @grafana/identity-squad
/public/app/features/storage/ @grafana/grafana-app-platform-squad
/public/app/features/teams/ @grafana/identity-access-team
/public/app/features/teams/ @grafana/access-squad
/public/app/features/templating/ @grafana/dashboards-squad
/public/app/features/trails/ @torkelo
/public/app/features/transformers/ @grafana/dataviz-squad
/public/app/features/transformers/timeSeriesTable/ @grafana/dataviz-squad @grafana/app-o11y-visualizations
/public/app/features/users/ @grafana/identity-access-team
/public/app/features/users/ @grafana/access-squad
/public/app/features/variables/ @grafana/dashboards-squad
/public/app/plugins/panel/alertlist/ @grafana/alerting-frontend
/public/app/plugins/panel/annolist/ @grafana/grafana-frontend-platform
@ -522,7 +522,7 @@ playwright.config.ts @grafana/plugins-platform-frontend
/scripts/benchmark-access-control.sh @grafana/identity-access-team
/scripts/benchmark-access-control.sh @grafana/access-squad
/scripts/check-breaking-changes.sh @grafana/plugins-platform-frontend
/scripts/ci-* @grafana/grafana-release-guild
/scripts/circle-* @grafana/grafana-release-guild
@ -606,23 +606,23 @@ playwright.config.ts @grafana/plugins-platform-frontend
/grafana-mixin/ @grafana/grafana-backend-services-squad
# Grafana authentication and authorization
/pkg/login/ @grafana/identity-access-team
/pkg/services/accesscontrol/ @grafana/identity-access-team
/pkg/login/ @grafana/identity-squad
/pkg/services/accesscontrol/ @grafana/access-squad
/pkg/services/anonymous/ @grafana/identity-access-team
/pkg/services/auth/ @grafana/identity-access-team
/pkg/services/authn/ @grafana/identity-access-team
/pkg/services/authz/ @grafana/identity-access-team
/pkg/services/signingkeys/ @grafana/identity-access-team
/pkg/services/dashboards/accesscontrol.go @grafana/identity-access-team
/pkg/services/datasources/guardian/ @grafana/identity-access-team
/pkg/services/guardian/ @grafana/identity-access-team
/pkg/services/ldap/ @grafana/identity-access-team
/pkg/services/login/ @grafana/identity-access-team
/pkg/services/loginattempt/ @grafana/identity-access-team
/pkg/services/auth/ @grafana/identity-squad
/pkg/services/authn/ @grafana/identity-squad
/pkg/services/authz/ @grafana/access-squad
/pkg/services/signingkeys/ @grafana/identity-squad
/pkg/services/dashboards/accesscontrol.go @grafana/access-squad
/pkg/services/datasources/guardian/ @grafana/access-squad
/pkg/services/guardian/ @grafana/access-squad
/pkg/services/ldap/ @grafana/identity-squad
/pkg/services/login/ @grafana/identity-squad
/pkg/services/loginattempt/ @grafana/identity-squad
/pkg/services/extsvcauth/ @grafana/identity-access-team
/pkg/services/oauthtoken/ @grafana/identity-access-team
/pkg/services/serviceaccounts/ @grafana/identity-access-team
/public/app/core/components/RolePicker/ @grafana/identity-access-team
/pkg/services/oauthtoken/ @grafana/identity-squad
/pkg/services/serviceaccounts/ @grafana/identity-squad
/public/app/core/components/RolePicker/ @grafana/access-squad
# Support bundles
/public/app/features/support-bundles/ @grafana/identity-access-team
@ -726,9 +726,9 @@ embed.go @grafana/grafana-as-code
# Conf
/conf/defaults.ini @torkelo
/conf/sample.ini @torkelo
/conf/ldap.toml @grafana/identity-access-team
/conf/ldap_multiple.toml @grafana/identity-access-team
/conf/provisioning/access-control/ @grafana/identity-access-team
/conf/ldap.toml @grafana/identity-squad
/conf/ldap_multiple.toml @grafana/identity-squad
/conf/provisioning/access-control/ @grafana/access-squad
/conf/provisioning/alerting/ @grafana/alerting-backend
/conf/provisioning/dashboards/ @grafana/dashboards-squad
/conf/provisioning/datasources/ @grafana/plugins-platform-backend

View File

@ -273,8 +273,8 @@ datasources:
# Field with an internal link pointing to a logs data source in Grafana.
# datasourceUid value must match the uid value of the logs data source.
datasourceUid: 'loki'
spanStartTimeShift: '1h'
spanEndTimeShift: '-1h'
spanStartTimeShift: '-1h'
spanEndTimeShift: '1h'
tags: ['job', 'instance', 'pod', 'namespace']
filterByTraceID: false
filterBySpanID: false

View File

@ -298,6 +298,9 @@ This transformation has the following options:
- **Numeric** - attempts to make the values numbers
- **String** - will make the values strings
- **Time** - attempts to parse the values as time
- The input will be parsed according to the [Moment.js parsing format](https://momentjs.com/docs/#/parsing/)
- It will parse the numeric input as a Unix epoch timestamp in milliseconds.
You must multiply your input by 1000 if it's in seconds.
- Will show an option to specify a DateFormat as input by a string like yyyy-mm-dd or DD MM YYYY hh:mm:ss
- **Boolean** - will make the values booleans
- **Enum** - will make the values enums

View File

@ -56,8 +56,12 @@ export const filterByValueTransformer: DataTransformerInfo<FilterByValueTransfor
interpolatedFilters.push(
...filters.map((filter) => {
if (filter.config.id === ValueMatcherID.between) {
const interpolatedFrom = ctx.interpolate(filter.config.options.from);
const interpolatedTo = ctx.interpolate(filter.config.options.to);
if (typeof filter.config.options.from === 'string') {
filter.config.options.from = ctx.interpolate(filter.config.options.from);
}
if (typeof filter.config.options.to === 'string') {
filter.config.options.to = ctx.interpolate(filter.config.options.to);
}
const newFilter = {
...filter,
@ -65,8 +69,8 @@ export const filterByValueTransformer: DataTransformerInfo<FilterByValueTransfor
...filter.config,
options: {
...filter.config.options,
to: interpolatedTo,
from: interpolatedFrom,
to: filter.config.options.to,
from: filter.config.options.from,
},
},
};
@ -76,12 +80,14 @@ export const filterByValueTransformer: DataTransformerInfo<FilterByValueTransfor
// Due to colliding syntaxes, interpolating regex filters will cause issues.
return filter;
} else if (filter.config.options.value) {
const interpolatedValue = ctx.interpolate(filter.config.options.value);
if (typeof filter.config.options.value === 'string') {
filter.config.options.value = ctx.interpolate(filter.config.options.value);
}
const newFilter = {
...filter,
config: { ...filter.config, options: { ...filter.config.options, value: interpolatedValue } },
config: { ...filter.config, options: { ...filter.config.options, value: filter.config.options.value } },
};
newFilter.config.options.value! = interpolatedValue;
return newFilter;
}

View File

@ -197,4 +197,5 @@ export interface FeatureToggles {
openSearchBackendFlowEnabled?: boolean;
ssoSettingsLDAP?: boolean;
databaseReadReplica?: boolean;
zanzana?: boolean;
}

View File

@ -1,7 +1,9 @@
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/components/PromCheatSheet.tsx
import { css } from '@emotion/css';
import React from 'react';
import { QueryEditorHelpProps } from '@grafana/data';
import { GrafanaTheme2, QueryEditorHelpProps } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import { PromQuery } from '../types';
@ -29,23 +31,44 @@ const CHEAT_SHEET_ITEMS = [
},
];
export const PromCheatSheet = (props: QueryEditorHelpProps<PromQuery>) => (
<div>
<h2>PromQL Cheat Sheet</h2>
{CHEAT_SHEET_ITEMS.map((item, index) => (
<div className="cheat-sheet-item" key={index}>
<div className="cheat-sheet-item__title">{item.title}</div>
{item.expression ? (
<button
type="button"
className="cheat-sheet-item__example"
onClick={(e) => props.onClickExample({ refId: 'A', expr: item.expression })}
>
<code>{item.expression}</code>
</button>
) : null}
<div className="cheat-sheet-item__label">{item.label}</div>
</div>
))}
</div>
);
export const PromCheatSheet = (props: QueryEditorHelpProps<PromQuery>) => {
const styles = useStyles2(getStyles);
return (
<div>
<h2>PromQL Cheat Sheet</h2>
{CHEAT_SHEET_ITEMS.map((item, index) => (
<div className={styles.cheatSheetItem} key={index}>
<div className={styles.cheatSheetItemTitle}>{item.title}</div>
{item.expression ? (
<button
type="button"
className={styles.cheatSheetExample}
onClick={(e) => props.onClickExample({ refId: 'A', expr: item.expression })}
>
<code>{item.expression}</code>
</button>
) : null}
{item.label}
</div>
))}
</div>
);
};
const getStyles = (theme: GrafanaTheme2) => ({
cheatSheetItem: css({
margin: theme.spacing(3, 0),
}),
cheatSheetItemTitle: css({
fontSize: theme.typography.h3.fontSize,
}),
cheatSheetExample: css({
margin: theme.spacing(0.5, 0),
// element is interactive, clear button styles
textAlign: 'left',
border: 'none',
background: 'transparent',
display: 'block',
}),
});

View File

@ -4,7 +4,8 @@ import { isEqual } from 'lodash';
import React, { memo, useCallback } from 'react';
import { usePrevious } from 'react-use';
import { InlineFormLabel, RadioButtonGroup } from '@grafana/ui';
import { GrafanaTheme2 } from '@grafana/data';
import { InlineFormLabel, RadioButtonGroup, useStyles2 } from '@grafana/ui';
import { PrometheusDatasource } from '../datasource';
import { PromQuery } from '../types';
@ -21,6 +22,7 @@ export interface PromExploreExtraFieldProps {
export const PromExploreExtraField = memo(({ query, datasource, onChange, onRunQuery }: PromExploreExtraFieldProps) => {
const rangeOptions = getQueryTypeOptions(true);
const prevQuery = usePrevious(query);
const styles = useStyles2(getStyles);
const onExemplarChange = useCallback(
(exemplar: boolean) => {
@ -59,7 +61,8 @@ export const PromExploreExtraField = memo(({ query, datasource, onChange, onRunQ
<div
data-testid={promExploreExtraFieldTestIds.queryTypeField}
className={cx(
'gf-form explore-input-margin',
'gf-form',
styles.queryTypeField,
css({
flexWrap: 'nowrap',
})
@ -144,3 +147,9 @@ export const promExploreExtraFieldTestIds = {
stepField: 'prom-editor-extra-field-step',
queryTypeField: 'prom-editor-extra-field-query-type',
};
const getStyles = (theme: GrafanaTheme2) => ({
queryTypeField: css({
marginRight: theme.spacing(0.5),
}),
});

View File

@ -1,5 +1,5 @@
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx
import { cx } from '@emotion/css';
import { css, cx } from '@emotion/css';
import React, { ReactNode } from 'react';
import { isDataFrame, QueryEditorProps, QueryHint, TimeRange, toLegacyResponseData } from '@grafana/data';
@ -265,8 +265,12 @@ class PromQueryFieldClass extends React.PureComponent<PromQueryFieldProps, PromQ
{ExtraFieldElement}
{hint ? (
<div className="query-row-break">
<div className="prom-query-field-info text-warning">
<div
className={css({
flexBasis: '100%',
})}
>
<div className="text-warning">
{hint.label}{' '}
{hint.fix ? (
<button

View File

@ -552,7 +552,7 @@ describe('PrometheusDatasource', () => {
config.featureToggles.promQLScope = undefined;
});
it('should call replace function 2 times', () => {
it('should call replace function 3 times', () => {
const query: PromQuery = {
expr: 'test{job="testjob"}',
format: 'time_series',
@ -563,7 +563,7 @@ describe('PrometheusDatasource', () => {
replaceMock.mockReturnValue(interval);
const queries = ds.interpolateVariablesInQueries([query], { Interval: { text: interval, value: interval } });
expect(templateSrvStub.replace).toBeCalledTimes(2);
expect(templateSrvStub.replace).toBeCalledTimes(3);
expect(queries[0].interval).toBe(interval);
});
@ -753,6 +753,55 @@ describe('PrometheusDatasource', () => {
const result = ds.applyTemplateVariables(query, {}, filters);
expect(result).toMatchObject({ expr: 'test{job="99", k1="v1", k2!="v2"} > 99' });
});
it('should replace variables in ad-hoc filters', () => {
const searchPattern = /\$A/g;
replaceMock.mockImplementation((a: string) => a?.replace(searchPattern, '99') ?? a);
const query = {
expr: 'test',
refId: 'A',
};
const filters = [
{
key: 'job',
operator: '=~',
value: '$A',
},
];
const result = ds.applyTemplateVariables(query, {}, filters);
expect(result).toMatchObject({ expr: 'test{job=~"99"}' });
});
it('should replace variables in adhoc filters on backend when promQLScope is enabled', () => {
config.featureToggles.promQLScope = true;
const searchPattern = /\$A/g;
replaceMock.mockImplementation((a: string) => a?.replace(searchPattern, '99') ?? a);
const query = {
expr: 'test',
refId: 'A',
};
const filters = [
{
key: 'job',
operator: '=~',
value: '$A',
},
];
const result = ds.applyTemplateVariables(query, {}, filters);
expect(result).toMatchObject({
expr: 'test',
adhocFilters: [
{
key: 'job',
operator: 'regex-match',
value: '99',
},
],
});
});
});
describe('metricFindQuery', () => {

View File

@ -659,14 +659,19 @@ export class PrometheusDatasource
if (queries && queries.length) {
expandedQueries = queries.map((query) => {
const interpolatedQuery = this.templateSrv.replace(query.expr, scopedVars, this.interpolateQueryExpr);
const replacedInterpolatedQuery = config.featureToggles.promQLScope
? interpolatedQuery
: this.templateSrv.replace(
this.enhanceExprWithAdHocFilters(filters, interpolatedQuery),
scopedVars,
this.interpolateQueryExpr
);
const expandedQuery = {
...query,
...(config.featureToggles.promQLScope ? { adhocFilters: this.generateScopeFilters(filters) } : {}),
datasource: this.getRef(),
expr: config.featureToggles.promQLScope
? interpolatedQuery
: this.enhanceExprWithAdHocFilters(filters, interpolatedQuery),
expr: replacedInterpolatedQuery,
interval: this.templateSrv.replace(query.interval, scopedVars),
};
@ -824,7 +829,11 @@ export class PrometheusDatasource
return [];
}
return filters.map((f) => ({ ...f, operator: scopeFilterOperatorMap[f.operator] }));
return filters.map((f) => ({
...f,
value: this.templateSrv.replace(f.value, {}, this.interpolateQueryExpr),
operator: scopeFilterOperatorMap[f.operator],
}));
}
enhanceExprWithAdHocFilters(filters: AdHocVariableFilter[] | undefined, expr: string) {
@ -865,12 +874,21 @@ export class PrometheusDatasource
};
// interpolate expression
// We need a first replace to evaluate variables before applying adhoc filters
// This is required for an expression like `metric > $VAR` where $VAR is a float to which we must not add adhoc filters
const expr = this.templateSrv.replace(target.expr, variables, this.interpolateQueryExpr);
// Apply ad-hoc filters
// When ad-hoc filters are applied, we replace again the variables in case the ad-hoc filters also reference a variable
const exprWithAdhoc = config.featureToggles.promQLScope
? expr
: this.templateSrv.replace(this.enhanceExprWithAdHocFilters(filters, expr), variables, this.interpolateQueryExpr);
return {
...target,
...(config.featureToggles.promQLScope ? { adhocFilters: this.generateScopeFilters(filters) } : {}),
expr: config.featureToggles.promQLScope ? expr : this.enhanceExprWithAdHocFilters(filters, expr),
expr: exprWithAdhoc,
interval: this.templateSrv.replace(target.interval, variables),
legendFormat: this.templateSrv.replace(target.legendFormat, variables),
};

View File

@ -75,8 +75,12 @@ export const PromQueryBuilder = React.memo<PromQueryBuilderProps>((props) => {
<MetricsLabelsSection query={query} onChange={onChange} datasource={datasource} />
</EditorRow>
{initHints.length ? (
<div className="query-row-break">
<div className="prom-query-field-info text-warning">
<div
className={css({
flexBasis: '100%',
})}
>
<div className="text-warning">
{initHints[0].label}{' '}
{initHints[0].fix ? (
<button type="button" className={'text-warning'}>

View File

@ -657,6 +657,7 @@ function getValueStyles(
width: `${width}px`,
display: 'flex',
alignItems: 'center',
textWrap: 'nowrap',
lineHeight: VALUE_LINE_HEIGHT,
};

View File

@ -1,6 +1,6 @@
import { css, cx } from '@emotion/css';
import { isString } from 'lodash';
import React, { useCallback, useState } from 'react';
import React, { useCallback, useId, useState } from 'react';
import { getTimeZoneInfo, GrafanaTheme2, TimeZone } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
@ -9,8 +9,8 @@ import { useStyles2 } from '../../../themes';
import { t, Trans } from '../../../utils/i18n';
import { Button } from '../../Button';
import { Field } from '../../Forms/Field';
import { RadioButtonGroup } from '../../Forms/RadioButtonGroup/RadioButtonGroup';
import { Select } from '../../Select/Select';
import { Tab, TabContent, TabsBar } from '../../Tabs';
import { TimeZonePicker } from '../TimeZonePicker';
import { TimeZoneDescription } from '../TimeZonePicker/TimeZoneDescription';
import { TimeZoneOffset } from '../TimeZonePicker/TimeZoneOffset';
@ -36,6 +36,10 @@ export const TimePickerFooter = (props: Props) => {
const [isEditing, setEditing] = useState(false);
const [editMode, setEditMode] = useState('tz');
const timeSettingsId = useId();
const timeZoneSettingsId = useId();
const fiscalYearSettingsId = useId();
const onToggleChangeTimeSettings = useCallback(
(event?: React.MouseEvent) => {
if (event) {
@ -78,62 +82,79 @@ export const TimePickerFooter = (props: Props) => {
variant="secondary"
onClick={onToggleChangeTimeSettings}
size="sm"
aria-expanded={isEditing}
aria-controls={timeSettingsId}
icon={isEditing ? 'angle-up' : 'angle-down'}
>
<Trans i18nKey="time-picker.footer.change-settings-button">Change time settings</Trans>
</Button>
</section>
{isEditing ? (
<div className={style.editContainer}>
<div>
<RadioButtonGroup
value={editMode}
options={[
{ label: t('time-picker.footer.time-zone-option', 'Time zone'), value: 'tz' },
{ label: t('time-picker.footer.fiscal-year-option', 'Fiscal year'), value: 'fy' },
]}
onChange={setEditMode}
></RadioButtonGroup>
</div>
{editMode === 'tz' ? (
<section
data-testid={selectors.components.TimeZonePicker.containerV2}
className={cx(style.timeZoneContainer, style.timeSettingContainer)}
>
<TimeZonePicker
includeInternal={true}
onChange={(timeZone) => {
onToggleChangeTimeSettings();
if (isString(timeZone)) {
onChangeTimeZone(timeZone);
}
}}
onBlur={onToggleChangeTimeSettings}
menuShouldPortal={false}
/>
</section>
) : (
<section
data-testid={selectors.components.TimeZonePicker.containerV2}
className={cx(style.timeZoneContainer, style.timeSettingContainer)}
>
<Field
className={style.fiscalYearField}
label={t('time-picker.footer.fiscal-year-start', 'Fiscal year start month')}
<div className={style.editContainer} id={timeSettingsId}>
<TabsBar>
<Tab
label={t('time-picker.footer.time-zone-option', 'Time zone')}
active={editMode === 'tz'}
onChangeTab={() => {
setEditMode('tz');
}}
aria-controls={timeZoneSettingsId}
/>
<Tab
label={t('time-picker.footer.fiscal-year-option', 'Fiscal year')}
active={editMode === 'fy'}
onChangeTab={() => {
setEditMode('fy');
}}
aria-controls={fiscalYearSettingsId}
/>
</TabsBar>
<TabContent>
{editMode === 'tz' ? (
<section
role="tabpanel"
data-testid={selectors.components.TimeZonePicker.containerV2}
id={timeZoneSettingsId}
className={cx(style.timeZoneContainer, style.timeSettingContainer)}
>
<Select
value={fiscalYearStartMonth}
menuShouldPortal={false}
options={monthOptions}
onChange={(value) => {
if (onChangeFiscalYearStartMonth) {
onChangeFiscalYearStartMonth(value.value ?? 0);
<TimeZonePicker
includeInternal={true}
onChange={(timeZone) => {
onToggleChangeTimeSettings();
if (isString(timeZone)) {
onChangeTimeZone(timeZone);
}
}}
onBlur={onToggleChangeTimeSettings}
menuShouldPortal={false}
/>
</Field>
</section>
)}
</section>
) : (
<section
role="tabpanel"
data-testid={selectors.components.TimeZonePicker.containerV2}
id={fiscalYearSettingsId}
className={cx(style.timeZoneContainer, style.timeSettingContainer)}
>
<Field
className={style.fiscalYearField}
label={t('time-picker.footer.fiscal-year-start', 'Fiscal year start month')}
>
<Select
value={fiscalYearStartMonth}
menuShouldPortal={false}
options={monthOptions}
onChange={(value) => {
if (onChangeFiscalYearStartMonth) {
onChangeFiscalYearStartMonth(value.value ?? 0);
}
}}
/>
</Field>
</section>
)}
</TabContent>
</div>
) : null}
</div>
@ -143,7 +164,7 @@ export const TimePickerFooter = (props: Props) => {
const getStyle = (theme: GrafanaTheme2) => ({
container: css({
borderTop: `1px solid ${theme.colors.border.weak}`,
padding: '11px',
padding: theme.spacing(1.5),
display: 'flex',
flexDirection: 'row',
justifyContent: 'space-between',
@ -151,7 +172,8 @@ const getStyle = (theme: GrafanaTheme2) => ({
}),
editContainer: css({
borderTop: `1px solid ${theme.colors.border.weak}`,
padding: '11px',
padding: theme.spacing(1.5),
paddingTop: 0,
justifyContent: 'space-between',
alignItems: 'center',
}),

View File

@ -11,6 +11,7 @@ import { getElementStyles } from './elements';
import { getExtraStyles } from './extra';
import { getFontStyles } from './fonts';
import { getFormElementStyles } from './forms';
import { getJsonFormatterStyles } from './jsonFormatter';
import { getLegacySelectStyles } from './legacySelect';
import { getMarkdownStyles } from './markdownStyles';
import { getPageStyles } from './page';
@ -31,6 +32,7 @@ export function GlobalStyles() {
getExtraStyles(theme),
getFontStyles(theme),
getFormElementStyles(theme),
getJsonFormatterStyles(theme),
getPageStyles(theme),
getCardStyles(theme),
getAgularPanelStyles(theme),

View File

@ -4,7 +4,7 @@ import { GrafanaTheme2 } from '@grafana/data';
export function getCodeStyles(theme: GrafanaTheme2) {
return css({
'code, pre': {
'code, pre, kbd, samp': {
...theme.typography.code,
fontSize: theme.typography.bodySmall.fontSize,
backgroundColor: theme.colors.background.primary,
@ -26,6 +26,7 @@ export function getCodeStyles(theme: GrafanaTheme2) {
wordBreak: 'break-all',
wordWrap: 'break-word',
whiteSpace: 'pre-wrap',
overflow: 'auto',
padding: '10px',
code: {

View File

@ -40,6 +40,10 @@ export function getElementStyles(theme: GrafanaTheme2) {
margin: theme.spacing(0, 0, 2),
},
textarea: {
overflow: 'auto',
},
button: {
letterSpacing: theme.typography.body.letterSpacing,
@ -67,6 +71,82 @@ export function getElementStyles(theme: GrafanaTheme2) {
fontStyle: 'normal',
},
'audio, canvas, progress, video': {
display: 'inline-block',
verticalAlign: 'baseline',
},
// Prevent modern browsers from displaying `audio` without controls.
// Remove excess height in iOS 5 devices.
'audio:not([controls])': {
display: 'none',
height: 0,
},
// Address styling not present in Safari.
'abbr[title]': {
borderBottom: '1px dotted',
},
dfn: {
fontStyle: 'italic',
},
// Prevent `sub` and `sup` affecting `line-height` in all browsers.
'sub, sup': {
fontSize: '75%',
lineHeight: 0,
position: 'relative',
verticalAlign: 'baseline',
},
sup: {
top: '-0.5em',
},
sub: {
bottom: '-0.25em',
},
// 1. Correct color not being inherited.
// Known issue: affects color of disabled elements.
// 2. Correct font properties not being inherited.
// 3. Address margins set differently in Firefox 4+, Safari, and Chrome.
'button, input, optgroup, select, textarea': {
color: 'inherit',
font: 'inherit',
margin: 0,
},
// Don't inherit the `font-weight` (applied by a rule above).
// NOTE: the default cannot safely be changed in Chrome and Safari on OS X.
optgroup: {
fontWeight: 'bold',
},
// 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio`
// and `video` controls.
// 2. Correct inability to style clickable `input` types in iOS.
// 3. Improve usability and consistency of cursor style between image-type
// `input` and others.
'button, html input[type="button"], input[type="submit"]': {
WebkitAppearance: 'button',
cursor: 'pointer',
},
// Remove inner padding and search cancel button in Safari and Chrome on OS X.
// Safari (but not Chrome) clips the cancel button when the search input has
// padding (and `textfield` appearance).
'input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration': {
WebkitAppearance: 'none',
},
table: {
borderCollapse: 'collapse',
borderSpacing: 0,
},
'td, th': {
padding: 0,
},
// Utility classes
'.muted': {
color: theme.colors.text.secondary,

View File

@ -0,0 +1,125 @@
import { css } from '@emotion/react';
import { GrafanaTheme2 } from '@grafana/data';
export function getJsonFormatterStyles(theme: GrafanaTheme2) {
return css({
'.json-formatter-row': {
fontFamily: 'monospace',
'&, a, a:hover': {
color: theme.colors.text.primary,
textDecoration: 'none',
},
'.json-formatter-row': {
marginLeft: theme.spacing(2),
},
'.json-formatter-children': {
'&.json-formatter-empty': {
opacity: 0.5,
marginLeft: theme.spacing(2),
'&::after': {
display: 'none',
},
'&.json-formatter-object::after': {
content: "'No properties'",
},
'&.json-formatter-array::after': {
content: "'[]'",
},
},
},
'.json-formatter-string': {
color: theme.isDark ? '#23d662' : 'green',
whiteSpace: 'pre-wrap',
wordWrap: 'break-word',
wordBreak: 'break-all',
},
'.json-formatter-number': {
color: theme.isDark ? theme.colors.primary.text : theme.colors.primary.main,
},
'.json-formatter-boolean': {
color: theme.isDark ? theme.colors.primary.text : theme.colors.error.main,
},
'.json-formatter-null': {
color: theme.isDark ? '#eec97d' : '#855a00',
},
'.json-formatter-undefined': {
color: theme.isDark ? 'rgb(239, 143, 190)' : 'rgb(202, 11, 105)',
},
'.json-formatter-function': {
color: theme.isDark ? '#fd48cb' : '#ff20ed',
},
'.json-formatter-url': {
textDecoration: 'underline',
color: theme.isDark ? '#027bff' : theme.colors.primary.main,
cursor: 'pointer',
},
'.json-formatter-bracket': {
color: theme.isDark ? '#9494ff' : theme.colors.primary.main,
},
'.json-formatter-key': {
color: theme.isDark ? '#23a0db' : '#00008b',
cursor: 'pointer',
paddingRight: theme.spacing(0.25),
marginRight: theme.spacing(0.5),
},
'.json-formatter-constructor-name': {
cursor: 'pointer',
},
'.json-formatter-array-comma': {
marginRight: theme.spacing(0.5),
},
'.json-formatter-toggler': {
lineHeight: '16px',
fontSize: theme.typography.size.xs,
verticalAlign: 'middle',
opacity: 0.6,
cursor: 'pointer',
paddingRight: theme.spacing(0.25),
'&::after': {
display: 'inline-block',
transition: 'transform 100ms ease-in',
content: "'►'",
},
},
// Inline preview on hover (optional)
'> a > .json-formatter-preview-text': {
opacity: 0,
transition: 'opacity 0.15s ease-in',
fontStyle: 'italic',
},
'&:hover > a > .json-formatter-preview-text': {
opacity: 0.6,
},
// Open state
'&.json-formatter-open': {
'> .json-formatter-toggler-link .json-formatter-toggler::after': {
transform: 'rotate(90deg)',
},
'> .json-formatter-children::after': {
display: 'inline-block',
},
'> a > .json-formatter-preview-text': {
display: 'none',
},
'&.json-formatter-empty::after': {
display: 'block',
},
},
},
});
}

View File

@ -12,6 +12,7 @@ import (
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
@ -22,9 +23,16 @@ import (
type ZanzanaClient interface{}
func ProvideZanzana(cfg *setting.Cfg) (ZanzanaClient, error) {
var client *zanzana.Client
// ProvideZanzana used to register ZanzanaClient.
// It will also start an embedded ZanzanaSever if mode is set to "embedded".
func ProvideZanzana(cfg *setting.Cfg, db db.DB, features featuremgmt.FeatureToggles) (ZanzanaClient, error) {
if !features.IsEnabledGlobally(featuremgmt.FlagZanzana) {
return zanzana.NoopClient{}, nil
}
logger := log.New("zanzana")
var client *zanzana.Client
switch cfg.Zanzana.Mode {
case setting.ZanzanaModeClient:
conn, err := grpc.NewClient(cfg.Zanzana.Addr, grpc.WithTransportCredentials(insecure.NewCredentials()))
@ -33,10 +41,16 @@ func ProvideZanzana(cfg *setting.Cfg) (ZanzanaClient, error) {
}
client = zanzana.NewClient(openfgav1.NewOpenFGAServiceClient(conn))
case setting.ZanzanaModeEmbedded:
srv, err := zanzana.NewServer(zanzana.NewStore())
store, err := zanzana.NewEmbeddedStore(cfg, db, logger)
if err != nil {
return nil, fmt.Errorf("failed to start zanzana: %w", err)
}
srv, err := zanzana.NewServer(store, logger)
if err != nil {
return nil, fmt.Errorf("failed to start zanzana: %w", err)
}
channel := &inprocgrpc.Channel{}
openfgav1.RegisterOpenFGAServiceServer(channel, srv)
client = zanzana.NewClient(openfgav1.NewOpenFGAServiceClient(channel))
@ -77,7 +91,12 @@ type Zanzana struct {
}
func (z *Zanzana) start(ctx context.Context) error {
srv, err := zanzana.NewServer(zanzana.NewStore())
store, err := zanzana.NewStore(z.cfg, z.logger)
if err != nil {
return fmt.Errorf("failed to initilize zanana store: %w", err)
}
srv, err := zanzana.NewServer(store, z.logger)
if err != nil {
return fmt.Errorf("failed to start zanzana: %w", err)
}

View File

@ -12,3 +12,5 @@ type Client struct {
func NewClient(c openfgav1.OpenFGAServiceClient) *Client {
return &Client{c}
}
type NoopClient struct{}

View File

@ -10,12 +10,10 @@ import (
// zanzanaLogger is a grafana logger wrapper compatible with OpenFGA logger interface
type zanzanaLogger struct {
logger *log.ConcreteLogger
logger log.Logger
}
func newZanzanaLogger() *zanzanaLogger {
logger := log.New("openfga-server")
func newZanzanaLogger(logger log.Logger) *zanzanaLogger {
return &zanzanaLogger{
logger: logger,
}
@ -23,7 +21,8 @@ func newZanzanaLogger() *zanzanaLogger {
// Simple converter for zap logger fields
func zapFieldsToArgs(fields []zap.Field) []any {
args := make([]any, 0)
// We need to pre-allocated space for key and value
args := make([]any, 0, len(fields)*2)
for _, f := range fields {
args = append(args, f.Key)
if f.Interface != nil {

View File

@ -3,13 +3,15 @@ package zanzana
import (
"github.com/openfga/openfga/pkg/server"
"github.com/openfga/openfga/pkg/storage"
"github.com/grafana/grafana/pkg/infra/log"
)
func NewServer(store storage.OpenFGADatastore) (*server.Server, error) {
func NewServer(store storage.OpenFGADatastore, logger log.Logger) (*server.Server, error) {
// FIXME(kalleep): add support for more options, configure logging, tracing etc
opts := []server.OpenFGAServiceV1Option{
server.WithDatastore(store),
server.WithLogger(newZanzanaLogger()),
server.WithLogger(newZanzanaLogger(logger)),
}
// FIXME(kalleep): Interceptors

View File

@ -1,13 +1,214 @@
package zanzana
import (
"errors"
"fmt"
"strings"
"time"
"github.com/openfga/openfga/assets"
"github.com/openfga/openfga/pkg/storage"
"github.com/openfga/openfga/pkg/storage/memory"
"github.com/openfga/openfga/pkg/storage/mysql"
"github.com/openfga/openfga/pkg/storage/postgres"
"github.com/openfga/openfga/pkg/storage/sqlcommon"
"xorm.io/xorm"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
"github.com/grafana/grafana/pkg/setting"
)
// FIXME(kalleep): Add support for postgres, mysql and sqlite data stores.
// Postgres and mysql is already implmented by openFGA so we just need to hook up migartions for them.
// FIXME(kalleep): Add sqlite data store.
// There is no support for sqlite atm but we are working on adding it: https://github.com/openfga/openfga/pull/1615
func NewStore() storage.OpenFGADatastore {
return memory.New()
func NewStore(cfg *setting.Cfg, logger log.Logger) (storage.OpenFGADatastore, error) {
grafanaDBCfg, zanzanaDBCfg, err := parseConfig(cfg, logger)
if err != nil {
return nil, fmt.Errorf("failed to parse database config: %w", err)
}
switch grafanaDBCfg.Type {
case migrator.SQLite:
return memory.New(), nil
case migrator.MySQL:
// For mysql we need to pass parseTime parameter in connection string
connStr := grafanaDBCfg.ConnectionString + "&parseTime=true"
if err := runMigrations(cfg, migrator.MySQL, connStr, assets.MySQLMigrationDir); err != nil {
return nil, fmt.Errorf("failed to run migrations: %w", err)
}
return mysql.New(connStr, zanzanaDBCfg)
case migrator.Postgres:
if err := runMigrations(cfg, migrator.Postgres, grafanaDBCfg.ConnectionString, assets.PostgresMigrationDir); err != nil {
return nil, fmt.Errorf("failed to run migrations: %w", err)
}
return postgres.New(grafanaDBCfg.ConnectionString, zanzanaDBCfg)
}
// Should never happen
return nil, fmt.Errorf("unsupported database engine: %s", grafanaDBCfg.Type)
}
func NewEmbeddedStore(cfg *setting.Cfg, db db.DB, logger log.Logger) (storage.OpenFGADatastore, error) {
grafanaDBCfg, zanzanaDBCfg, err := parseConfig(cfg, logger)
if err != nil {
return nil, fmt.Errorf("failed to parse database config: %w", err)
}
m := migrator.NewMigrator(db.GetEngine(), cfg)
switch grafanaDBCfg.Type {
case migrator.SQLite:
// FIXME(kalleep): At the moment sqlite is not a supported data store.
// So we just return in memory store for now.
return memory.New(), nil
case migrator.MySQL:
if err := runMigrationsWithMigrator(m, cfg, assets.MySQLMigrationDir); err != nil {
return nil, fmt.Errorf("failed to run migrations: %w", err)
}
// For mysql we need to pass parseTime parameter in connection string
return mysql.New(grafanaDBCfg.ConnectionString+"&parseTime=true", zanzanaDBCfg)
case migrator.Postgres:
if err := runMigrationsWithMigrator(m, cfg, assets.PostgresMigrationDir); err != nil {
return nil, fmt.Errorf("failed to run migrations: %w", err)
}
return postgres.New(grafanaDBCfg.ConnectionString, zanzanaDBCfg)
}
// Should never happen
return nil, fmt.Errorf("unsupported database engine: %s", db.GetDialect().DriverName())
}
func parseConfig(cfg *setting.Cfg, logger log.Logger) (*sqlstore.DatabaseConfig, *sqlcommon.Config, error) {
sec := cfg.Raw.Section("database")
grafanaDBCfg, err := sqlstore.NewDatabaseConfig(cfg, nil)
if err != nil {
return nil, nil, nil
}
zanzanaDBCfg := &sqlcommon.Config{
Logger: newZanzanaLogger(logger),
// MaxTuplesPerWriteField: 0,
// MaxTypesPerModelField: 0,
MaxOpenConns: grafanaDBCfg.MaxOpenConn,
MaxIdleConns: grafanaDBCfg.MaxIdleConn,
ConnMaxLifetime: time.Duration(grafanaDBCfg.ConnMaxLifetime) * time.Second,
ExportMetrics: sec.Key("instrument_queries").MustBool(false),
}
return grafanaDBCfg, zanzanaDBCfg, nil
}
func runMigrations(cfg *setting.Cfg, typ, connStr, path string) error {
engine, err := xorm.NewEngine(typ, connStr)
if err != nil {
return fmt.Errorf("failed to parse database config: %w", err)
}
m := migrator.NewMigrator(engine, cfg)
m.AddCreateMigration()
return runMigrationsWithMigrator(m, cfg, path)
}
func runMigrationsWithMigrator(m *migrator.Migrator, cfg *setting.Cfg, path string) error {
migrations, err := getMigrations(path)
if err != nil {
return err
}
for _, mig := range migrations {
m.AddMigration(mig.name, mig.migration)
}
sec := cfg.Raw.Section("database")
return m.Start(
sec.Key("migration_locking").MustBool(true),
sec.Key("locking_attempt_timeout_sec").MustInt(),
)
}
type migration struct {
name string
migration migrator.Migration
}
func getMigrations(path string) ([]migration, error) {
entries, err := assets.EmbedMigrations.ReadDir(path)
if err != nil {
return nil, fmt.Errorf("failed to read migration dir: %w", err)
}
// parseStatements extracts statements from a sql file so we can execute
// them as separate migrations. OpenFGA uses Goose as their migration egine
// and Goose uses a single sql file for both up and down migrations.
// Grafana only supports up migration so we strip out the down migration
// and parse each individual statement
parseStatements := func(data []byte) ([]string, error) {
scripts := strings.Split(strings.TrimPrefix(string(data), "-- +goose Up"), "-- +goose Down")
if len(scripts) != 2 {
return nil, errors.New("malformed migration file")
}
// We assume that up migrations are always before down migrations
parts := strings.SplitAfter(scripts[0], ";")
stmts := make([]string, 0, len(parts))
for _, p := range parts {
p = strings.TrimSpace(p)
if p != "" {
stmts = append(stmts, p)
}
}
return stmts, nil
}
formatName := func(name string) string {
// Each migration file start with XXX where X is a number.
// We remove that part and prefix each migration with "zanzana".
return strings.TrimSuffix("zanzana"+name[3:], ".sql")
}
migrations := make([]migration, 0, len(entries))
for _, e := range entries {
data, err := assets.EmbedMigrations.ReadFile(path + "/" + e.Name())
if err != nil {
return nil, fmt.Errorf("failed to read migration file: %w", err)
}
stmts, err := parseStatements(data)
if err != nil {
return nil, fmt.Errorf("failed to parse migration: %w", err)
}
migrations = append(migrations, migration{
name: formatName(e.Name()),
migration: &rawMigration{stmts: stmts},
})
}
return migrations, nil
}
var _ migrator.CodeMigration = (*rawMigration)(nil)
type rawMigration struct {
stmts []string
migrator.MigrationBase
}
func (m *rawMigration) Exec(sess *xorm.Session, migrator *migrator.Migrator) error {
for _, stmt := range m.stmts {
if _, err := sess.Exec(stmt); err != nil {
return fmt.Errorf("failed to run migration: %w", err)
}
}
return nil
}
func (m *rawMigration) SQL(dialect migrator.Dialect) string {
return strings.Join(m.stmts, "\n")
}

View File

@ -40,19 +40,28 @@ func RegisterApi(
// registerEndpoints Registers Endpoints on Grafana Router
func (cma *CloudMigrationAPI) registerEndpoints() {
cma.routeRegister.Group("/api/cloudmigration", func(cloudMigrationRoute routing.RouteRegister) {
// destination instance endpoints for token management
cloudMigrationRoute.Get("/token", routing.Wrap(cma.GetToken))
cloudMigrationRoute.Post("/token", routing.Wrap(cma.CreateToken))
cloudMigrationRoute.Delete("/token/:uid", routing.Wrap(cma.DeleteToken))
// on-prem instance endpoints for managing GMS sessions
cloudMigrationRoute.Get("/migration", routing.Wrap(cma.GetSessionList))
cloudMigrationRoute.Post("/migration", routing.Wrap(cma.CreateSession))
cloudMigrationRoute.Get("/migration/:uid", routing.Wrap(cma.GetSession))
cloudMigrationRoute.Delete("/migration/:uid", routing.Wrap(cma.DeleteSession))
// TODO new APIs for snapshot management to replace these
// sync approach to data migration
cloudMigrationRoute.Post("/migration/:uid/run", routing.Wrap(cma.RunMigration))
cloudMigrationRoute.Get("/migration/:uid/run", routing.Wrap(cma.GetMigrationRunList))
cloudMigrationRoute.Get("/migration/run/:runUID", routing.Wrap(cma.GetMigrationRun))
cloudMigrationRoute.Get("/token", routing.Wrap(cma.GetToken))
cloudMigrationRoute.Post("/token", routing.Wrap(cma.CreateToken))
cloudMigrationRoute.Delete("/token/:uid", routing.Wrap(cma.DeleteToken))
// async approach to data migration using snapshots
cloudMigrationRoute.Post("/migration/:uid/snapshot", routing.Wrap(cma.CreateSnapshot))
cloudMigrationRoute.Get("/migration/:uid/snapshot/:snapshotUid", routing.Wrap(cma.GetSnapshot))
cloudMigrationRoute.Get("/migration/:uid/snapshots", routing.Wrap(cma.GetSnapshotList))
cloudMigrationRoute.Post("/migration/:uid/snapshot/:snapshotUid/upload", routing.Wrap(cma.UploadSnapshot))
cloudMigrationRoute.Post("/migration/:uid/snapshot/:snapshotUid/cancel", routing.Wrap(cma.CancelSnapshot))
}, middleware.ReqOrgAdmin)
}
@ -121,6 +130,7 @@ func (cma *CloudMigrationAPI) CreateToken(c *contextmodel.ReqContext) response.R
//
// Responses:
// 204: cloudMigrationDeleteTokenResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
@ -170,6 +180,7 @@ func (cma *CloudMigrationAPI) GetSessionList(c *contextmodel.ReqContext) respons
//
// Responses:
// 200: cloudMigrationSessionResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
@ -201,6 +212,7 @@ func (cma *CloudMigrationAPI) GetSession(c *contextmodel.ReqContext) response.Re
//
// Responses:
// 200: cloudMigrationSessionResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
@ -235,6 +247,7 @@ func (cma *CloudMigrationAPI) CreateSession(c *contextmodel.ReqContext) response
//
// Responses:
// 200: cloudMigrationRunResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
@ -261,6 +274,7 @@ func (cma *CloudMigrationAPI) RunMigration(c *contextmodel.ReqContext) response.
//
// Responses:
// 200: cloudMigrationRunResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
@ -293,6 +307,7 @@ func (cma *CloudMigrationAPI) GetMigrationRun(c *contextmodel.ReqContext) respon
//
// Responses:
// 200: cloudMigrationRunListResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
@ -314,7 +329,7 @@ func (cma *CloudMigrationAPI) GetMigrationRunList(c *contextmodel.ReqContext) re
for i := 0; i < len(runList.Runs); i++ {
runs[i] = MigrateDataResponseListDTO{runList.Runs[i].RunUID}
}
return response.JSON(http.StatusOK, SnapshotListDTO{
return response.JSON(http.StatusOK, CloudMigrationRunListDTO{
Runs: runs,
})
}
@ -326,6 +341,7 @@ func (cma *CloudMigrationAPI) GetMigrationRunList(c *contextmodel.ReqContext) re
// Responses:
// 200
// 401: unauthorisedError
// 400: badRequestError
// 403: forbiddenError
// 500: internalServerError
func (cma *CloudMigrationAPI) DeleteSession(c *contextmodel.ReqContext) response.Response {
@ -343,3 +359,194 @@ func (cma *CloudMigrationAPI) DeleteSession(c *contextmodel.ReqContext) response
}
return response.Empty(http.StatusOK)
}
// swagger:route POST /cloudmigration/migration/{uid}/snapshot migrations createSnapshot
//
// Trigger the creation of an instance snapshot associated with the provided session.
// If the snapshot initialization is successful, the snapshot uid is returned.
//
// Responses:
// 200: createSnapshotResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
func (cma *CloudMigrationAPI) CreateSnapshot(c *contextmodel.ReqContext) response.Response {
ctx, span := cma.tracer.Start(c.Req.Context(), "MigrationAPI.CreateSnapshot")
defer span.End()
uid := web.Params(c.Req)[":uid"]
if err := util.ValidateUID(uid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
}
ss, err := cma.cloudMigrationService.CreateSnapshot(ctx, uid)
if err != nil {
return response.ErrOrFallback(http.StatusInternalServerError, "error creating snapshot", err)
}
return response.JSON(http.StatusOK, CreateSnapshotResponseDTO{
SnapshotUID: ss.UID,
})
}
// swagger:route GET /cloudmigration/migration/{uid}/snapshot/{snapshotUid} migrations getSnapshot
//
// Get metadata about a snapshot, including where it is in its processing and final results.
//
// Responses:
// 200: getSnapshotResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
func (cma *CloudMigrationAPI) GetSnapshot(c *contextmodel.ReqContext) response.Response {
ctx, span := cma.tracer.Start(c.Req.Context(), "MigrationAPI.GetSnapshot")
defer span.End()
sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"]
if err := util.ValidateUID(sessUid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
}
if err := util.ValidateUID(snapshotUid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err)
}
snapshot, err := cma.cloudMigrationService.GetSnapshot(ctx, sessUid, snapshotUid)
if err != nil {
return response.ErrOrFallback(http.StatusInternalServerError, "error retrieving snapshot", err)
}
result, err := snapshot.GetSnapshotResult()
if err != nil {
return response.ErrOrFallback(http.StatusInternalServerError, "error snapshot reading snapshot results", err)
}
dtoResults := make([]MigrateDataResponseItemDTO, len(result))
for i := 0; i < len(result); i++ {
dtoResults[i] = MigrateDataResponseItemDTO{
Type: MigrateDataType(result[i].Type),
RefID: result[i].RefID,
Status: ItemStatus(result[i].Status),
Error: result[i].Error,
}
}
respDto := GetSnapshotResponseDTO{
SnapshotDTO: SnapshotDTO{
SnapshotUID: snapshot.UID,
Status: fromSnapshotStatus(snapshot.Status),
SessionUID: sessUid,
Created: snapshot.Created,
Finished: snapshot.Finished,
},
Results: dtoResults,
}
return response.JSON(http.StatusOK, respDto)
}
// swagger:route GET /cloudmigration/migration/{uid}/snapshots migrations getShapshotList
//
// Get a list of snapshots for a session.
//
// Responses:
// 200: snapshotListResponse
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
func (cma *CloudMigrationAPI) GetSnapshotList(c *contextmodel.ReqContext) response.Response {
ctx, span := cma.tracer.Start(c.Req.Context(), "MigrationAPI.GetShapshotList")
defer span.End()
uid := web.Params(c.Req)[":uid"]
if err := util.ValidateUID(uid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
}
q := cloudmigration.ListSnapshotsQuery{
SessionUID: uid,
Limit: c.QueryInt("limit"),
Offset: c.QueryInt("offset"),
}
if q.Limit == 0 {
q.Limit = 100
}
snapshotList, err := cma.cloudMigrationService.GetSnapshotList(ctx, q)
if err != nil {
return response.ErrOrFallback(http.StatusInternalServerError, "error retrieving snapshot list", err)
}
dtos := make([]SnapshotDTO, len(snapshotList))
for i := 0; i < len(snapshotList); i++ {
dtos[i] = SnapshotDTO{
SnapshotUID: snapshotList[i].UID,
Status: fromSnapshotStatus(snapshotList[i].Status),
SessionUID: uid,
Created: snapshotList[i].Created,
Finished: snapshotList[i].Finished,
}
}
return response.JSON(http.StatusOK, SnapshotListResponseDTO{
Snapshots: dtos,
})
}
// swagger:route POST /cloudmigration/migration/{uid}/snapshot/{snapshotUid}/upload migrations uploadSnapshot
//
// Upload a snapshot to the Grafana Migration Service for processing.
//
// Responses:
// 200:
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
func (cma *CloudMigrationAPI) UploadSnapshot(c *contextmodel.ReqContext) response.Response {
ctx, span := cma.tracer.Start(c.Req.Context(), "MigrationAPI.UploadSnapshot")
defer span.End()
sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"]
if err := util.ValidateUID(sessUid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
}
if err := util.ValidateUID(snapshotUid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err)
}
if err := cma.cloudMigrationService.UploadSnapshot(ctx, sessUid, snapshotUid); err != nil {
return response.ErrOrFallback(http.StatusInternalServerError, "error uploading snapshot", err)
}
return response.JSON(http.StatusOK, nil)
}
// swagger:route POST /cloudmigration/migration/{uid}/snapshot/{snapshotUid}/cancel migrations cancelSnapshot
//
// Cancel a snapshot, wherever it is in its processing chain.
// TODO: Implement
//
// Responses:
// 200:
// 400: badRequestError
// 401: unauthorisedError
// 403: forbiddenError
// 500: internalServerError
func (cma *CloudMigrationAPI) CancelSnapshot(c *contextmodel.ReqContext) response.Response {
_, span := cma.tracer.Start(c.Req.Context(), "MigrationAPI.CancelSnapshot")
defer span.End()
sessUid, snapshotUid := web.Params(c.Req)[":uid"], web.Params(c.Req)[":snapshotUid"]
if err := util.ValidateUID(sessUid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid session uid", err)
}
if err := util.ValidateUID(snapshotUid); err != nil {
return response.ErrOrFallback(http.StatusBadRequest, "invalid snapshot uid", err)
}
// Implement
return response.JSON(http.StatusOK, nil)
}

View File

@ -0,0 +1,21 @@
[sample token] // NOT A REAL TOKEN
eyJUb2tlbiI6ImNvbXBsZXRlbHlfZmFrZV90b2tlbl9jZG9peTFhYzdwdXlwZCIsIkluc3RhbmNlIjp7IlN0YWNrSUQiOjEyMzQ1LCJTbHVnIjoic3R1Ymluc3RhbmNlIiwiUmVnaW9uU2x1ZyI6ImZha2UtcmVnaW9uIiwiQ2x1c3RlclNsdWciOiJmYWtlLWNsdXNlciJ9fQ==
[create session}
curl -X POST -H "Content-Type: application/json" \
http://admin:admin@localhost:3000/api/cloudmigration/migration \
-d '{"AuthToken":"eyJUb2tlbiI6ImNvbXBsZXRlbHlfZmFrZV90b2tlbl9jZG9peTFhYzdwdXlwZCIsIkluc3RhbmNlIjp7IlN0YWNrSUQiOjEyMzQ1LCJTbHVnIjoic3R1Ymluc3RhbmNlIiwiUmVnaW9uU2x1ZyI6ImZha2UtcmVnaW9uIiwiQ2x1c3RlclNsdWciOiJmYWtlLWNsdXNlciJ9fQ=="}'
[create snapshot]
curl -X POST -H "Content-Type: application/json" \
http://admin:admin@localhost:3000/api/cloudmigration/migration/{sessionUid}/snapshot
[get snapshot list]
curl -X GET http://admin:admin@localhost:3000/api/cloudmigration/migration/{sessionUid}/snapshots?limit=100&offset=0
[get snapshot]
curl -X GET http://admin:admin@localhost:3000/api/cloudmigration/migration/{sessionUid}/snapshot/{snapshotUid}
[upload snapshot]
curl -X POST -H "Content-Type: application/json" \
http://admin:admin@localhost:3000/api/cloudmigration/migration/{sessionUid}/snapshot/{snapshotUid}/upload

View File

@ -38,7 +38,6 @@ type CreateAccessTokenResponseDTO struct {
// swagger:parameters deleteCloudMigrationToken
type DeleteCloudMigrationToken struct {
// UID of a cloud migration token
//
// in: path
UID string `json:"uid"`
}
@ -67,7 +66,6 @@ type CloudMigrationSessionListResponseDTO struct {
// swagger:parameters getSession
type GetCloudMigrationSessionRequest struct {
// UID of a migration session
//
// in: path
UID string `json:"uid"`
}
@ -92,7 +90,6 @@ type CloudMigrationSessionRequestDTO struct {
// swagger:parameters runCloudMigration
type RunCloudMigrationRequest struct {
// UID of a migration
//
// in: path
UID string `json:"uid"`
}
@ -138,7 +135,6 @@ const (
// swagger:parameters getCloudMigrationRun
type GetMigrationRunParams struct {
// RunUID of a migration run
//
// in: path
RunUID string `json:"runUID"`
}
@ -146,7 +142,6 @@ type GetMigrationRunParams struct {
// swagger:parameters getCloudMigrationRunList
type GetCloudMigrationRunList struct {
// UID of a migration
//
// in: path
UID string `json:"uid"`
}
@ -154,10 +149,10 @@ type GetCloudMigrationRunList struct {
// swagger:response cloudMigrationRunListResponse
type CloudMigrationRunListResponse struct {
// in: body
Body SnapshotListDTO
Body CloudMigrationRunListDTO
}
type SnapshotListDTO struct {
type CloudMigrationRunListDTO struct {
Runs []MigrateDataResponseListDTO `json:"runs"`
}
@ -168,7 +163,6 @@ type MigrateDataResponseListDTO struct {
// swagger:parameters deleteSession
type DeleteMigrationSessionRequest struct {
// UID of a migration session
//
// in: path
UID string `json:"uid"`
}
@ -207,3 +201,138 @@ func convertMigrateDataResponseToDTO(r cloudmigration.MigrateDataResponse) Migra
Items: items,
}
}
// Base snapshot without results
type SnapshotDTO struct {
SnapshotUID string `json:"uid"`
Status SnapshotStatus `json:"status"`
SessionUID string `json:"sessionUid"`
Created time.Time `json:"created"`
Finished time.Time `json:"finished"`
}
// swagger:enum SnapshotStatus
type SnapshotStatus string
const (
SnapshotStatusInitializing SnapshotStatus = "INITIALIZING"
SnapshotStatusCreating SnapshotStatus = "CREATING"
SnapshotStatusPendingUpload SnapshotStatus = "PENDING_UPLOAD"
SnapshotStatusUploading SnapshotStatus = "UPLOADING"
SnapshotStatusPendingProcessing SnapshotStatus = "PENDING_PROCESSING"
SnapshotStatusProcessing SnapshotStatus = "PROCESSING"
SnapshotStatusFinished SnapshotStatus = "FINISHED"
SnapshotStatusError SnapshotStatus = "ERROR"
SnapshotStatusUnknown SnapshotStatus = "UNKNOWN"
)
func fromSnapshotStatus(status cloudmigration.SnapshotStatus) SnapshotStatus {
switch status {
case cloudmigration.SnapshotStatusInitializing:
return SnapshotStatusInitializing
case cloudmigration.SnapshotStatusCreating:
return SnapshotStatusCreating
case cloudmigration.SnapshotStatusPendingUpload:
return SnapshotStatusPendingUpload
case cloudmigration.SnapshotStatusUploading:
return SnapshotStatusUploading
case cloudmigration.SnapshotStatusPendingProcessing:
return SnapshotStatusPendingProcessing
case cloudmigration.SnapshotStatusProcessing:
return SnapshotStatusProcessing
case cloudmigration.SnapshotStatusFinished:
return SnapshotStatusFinished
case cloudmigration.SnapshotStatusError:
return SnapshotStatusError
default:
return SnapshotStatusUnknown
}
}
// swagger:parameters createSnapshot
type CreateSnapshotRequest struct {
// UID of a session
// in: path
UID string `json:"uid"`
}
// swagger:response createSnapshotResponse
type CreateSnapshotResponse struct {
// in: body
Body CreateSnapshotResponseDTO
}
type CreateSnapshotResponseDTO struct {
SnapshotUID string `json:"uid"`
}
// swagger:parameters getSnapshot
type GetSnapshotParams struct {
// Session UID of a session
// in: path
UID string `json:"uid"`
// UID of a snapshot
// in: path
SnapshotUID string `json:"snapshotUid"`
}
// swagger:response getSnapshotResponse
type GetSnapshotResponse struct {
// in: body
Body GetSnapshotResponseDTO
}
type GetSnapshotResponseDTO struct {
SnapshotDTO
Results []MigrateDataResponseItemDTO `json:"results"`
}
// swagger:parameters getShapshotList
type GetSnapshotListParams struct {
// Offset is used for pagination with limit
// in:query
// required:false
// default: 0
Offset int `json:"offset"`
// Max limit for results returned.
// in:query
// required:false
// default: 100
Limit int `json:"limit"`
// Session UID of a session
// in: path
UID string `json:"uid"`
}
// swagger:response snapshotListResponse
type SnapshotListResponse struct {
// in: body
Body SnapshotListResponseDTO
}
type SnapshotListResponseDTO struct {
Snapshots []SnapshotDTO `json:"snapshots"`
}
// swagger:parameters uploadSnapshot
type UploadSnapshotParams struct {
// Session UID of a session
// in: path
UID string `json:"uid"`
// UID of a snapshot
// in: path
SnapshotUID string `json:"snapshotUid"`
}
// swagger:parameters cancelSnapshot
type CancelSnapshotParams struct {
// Session UID of a session
// in: path
UID string `json:"uid"`
// UID of a snapshot
// in: path
SnapshotUID string `json:"snapshotUid"`
}

View File

@ -22,5 +22,10 @@ type Service interface {
RunMigration(ctx context.Context, migUID string) (*MigrateDataResponse, error)
GetMigrationStatus(ctx context.Context, runUID string) (*CloudMigrationSnapshot, error)
GetMigrationRunList(ctx context.Context, migUID string) (*SnapshotList, error)
GetMigrationRunList(ctx context.Context, migUID string) (*CloudMigrationRunList, error)
CreateSnapshot(ctx context.Context, sessionUid string) (*CloudMigrationSnapshot, error)
GetSnapshot(ctx context.Context, sessionUid string, snapshotUid string) (*CloudMigrationSnapshot, error)
GetSnapshotList(ctx context.Context, query ListSnapshotsQuery) ([]CloudMigrationSnapshot, error)
UploadSnapshot(ctx context.Context, sessionUid string, snapshotUid string) error
}

View File

@ -7,6 +7,9 @@ import (
"errors"
"fmt"
"net/http"
"os"
"path/filepath"
"sync"
"time"
"github.com/grafana/grafana/pkg/api/response"
@ -17,7 +20,6 @@ import (
"github.com/grafana/grafana/pkg/services/cloudmigration"
"github.com/grafana/grafana/pkg/services/cloudmigration/api"
"github.com/grafana/grafana/pkg/services/cloudmigration/gmsclient"
"github.com/grafana/grafana/pkg/services/contexthandler"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/featuremgmt"
@ -25,6 +27,7 @@ import (
"github.com/grafana/grafana/pkg/services/gcom"
"github.com/grafana/grafana/pkg/services/secrets"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
"github.com/prometheus/client_golang/prometheus"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
@ -37,6 +40,9 @@ type Service struct {
log *log.ConcreteLogger
cfg *setting.Cfg
buildSnapshotMutex sync.Mutex
buildSnapshotError bool
features featuremgmt.FeatureToggles
gmsClient gmsclient.Client
@ -398,7 +404,6 @@ func (s *Service) RunMigration(ctx context.Context, uid string) (*cloudmigration
return nil, fmt.Errorf("migrate data error: %w", err)
}
// TODO update cloud migration run schema to treat the result as a first-class citizen
respData, err := json.Marshal(resp)
if err != nil {
s.log.Error("error marshalling migration response data: %w", err)
@ -419,135 +424,6 @@ func (s *Service) RunMigration(ctx context.Context, uid string) (*cloudmigration
return resp, nil
}
func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.MigrateDataRequest, error) {
// Data sources
dataSources, err := s.getDataSources(ctx)
if err != nil {
s.log.Error("Failed to get datasources", "err", err)
return nil, err
}
// Dashboards
dashboards, err := s.getDashboards(ctx)
if err != nil {
s.log.Error("Failed to get dashboards", "err", err)
return nil, err
}
// Folders
folders, err := s.getFolders(ctx)
if err != nil {
s.log.Error("Failed to get folders", "err", err)
return nil, err
}
migrationDataSlice := make(
[]cloudmigration.MigrateDataRequestItem, 0,
len(dataSources)+len(dashboards)+len(folders),
)
for _, ds := range dataSources {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DatasourceDataType,
RefID: ds.UID,
Name: ds.Name,
Data: ds,
})
}
for _, dashboard := range dashboards {
dashboard.Data.Del("id")
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DashboardDataType,
RefID: dashboard.UID,
Name: dashboard.Title,
Data: map[string]any{"dashboard": dashboard.Data},
})
}
for _, f := range folders {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.FolderDataType,
RefID: f.UID,
Name: f.Title,
Data: f,
})
}
migrationData := &cloudmigration.MigrateDataRequest{
Items: migrationDataSlice,
}
return migrationData, nil
}
func (s *Service) getDataSources(ctx context.Context) ([]datasources.AddDataSourceCommand, error) {
dataSources, err := s.dsService.GetAllDataSources(ctx, &datasources.GetAllDataSourcesQuery{})
if err != nil {
s.log.Error("Failed to get all datasources", "err", err)
return nil, err
}
result := []datasources.AddDataSourceCommand{}
for _, dataSource := range dataSources {
// Decrypt secure json to send raw credentials
decryptedData, err := s.secretsService.DecryptJsonData(ctx, dataSource.SecureJsonData)
if err != nil {
s.log.Error("Failed to decrypt secure json data", "err", err)
return nil, err
}
dataSourceCmd := datasources.AddDataSourceCommand{
OrgID: dataSource.OrgID,
Name: dataSource.Name,
Type: dataSource.Type,
Access: dataSource.Access,
URL: dataSource.URL,
User: dataSource.User,
Database: dataSource.Database,
BasicAuth: dataSource.BasicAuth,
BasicAuthUser: dataSource.BasicAuthUser,
WithCredentials: dataSource.WithCredentials,
IsDefault: dataSource.IsDefault,
JsonData: dataSource.JsonData,
SecureJsonData: decryptedData,
ReadOnly: dataSource.ReadOnly,
UID: dataSource.UID,
}
result = append(result, dataSourceCmd)
}
return result, err
}
func (s *Service) getFolders(ctx context.Context) ([]folder.Folder, error) {
reqCtx := contexthandler.FromContext(ctx)
folders, err := s.folderService.GetFolders(ctx, folder.GetFoldersQuery{
SignedInUser: reqCtx.SignedInUser,
})
if err != nil {
return nil, err
}
result := make([]folder.Folder, len(folders))
for i, folder := range folders {
result[i] = *folder
}
return result, nil
}
func (s *Service) getDashboards(ctx context.Context) ([]dashboards.Dashboard, error) {
dashs, err := s.dashboardService.GetAllDashboards(ctx)
if err != nil {
return nil, err
}
result := make([]dashboards.Dashboard, len(dashs))
for i, dashboard := range dashs {
result[i] = *dashboard
}
return result, nil
}
func (s *Service) createMigrationRun(ctx context.Context, cmr cloudmigration.CloudMigrationSnapshot) (string, error) {
uid, err := s.store.CreateMigrationRun(ctx, cmr)
if err != nil {
@ -565,13 +441,13 @@ func (s *Service) GetMigrationStatus(ctx context.Context, runUID string) (*cloud
return cmr, nil
}
func (s *Service) GetMigrationRunList(ctx context.Context, migUID string) (*cloudmigration.SnapshotList, error) {
func (s *Service) GetMigrationRunList(ctx context.Context, migUID string) (*cloudmigration.CloudMigrationRunList, error) {
runs, err := s.store.GetMigrationStatusList(ctx, migUID)
if err != nil {
return nil, fmt.Errorf("retrieving migration statuses from db: %w", err)
}
runList := &cloudmigration.SnapshotList{Runs: []cloudmigration.MigrateDataResponseList{}}
runList := &cloudmigration.CloudMigrationRunList{Runs: []cloudmigration.MigrateDataResponseList{}}
for _, s := range runs {
runList.Runs = append(runList.Runs, cloudmigration.MigrateDataResponseList{
RunUID: s.UID,
@ -589,6 +465,123 @@ func (s *Service) DeleteSession(ctx context.Context, uid string) (*cloudmigratio
return c, nil
}
func (s *Service) CreateSnapshot(ctx context.Context, sessionUid string) (*cloudmigration.CloudMigrationSnapshot, error) {
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.CreateSnapshot")
defer span.End()
// fetch session for the gms auth token
session, err := s.store.GetMigrationSessionByUID(ctx, sessionUid)
if err != nil {
return nil, fmt.Errorf("fetching migration session for uid %s: %w", sessionUid, err)
}
// query gms to establish new snapshot
initResp, err := s.gmsClient.InitializeSnapshot(ctx, *session)
if err != nil {
return nil, fmt.Errorf("initializing snapshot with GMS for session %s: %w", sessionUid, err)
}
// create new directory for snapshot writing
snapshotUid := util.GenerateShortUID()
dir := filepath.Join("cloudmigration.snapshots", fmt.Sprintf("snapshot-%s-%s", snapshotUid, initResp.GMSSnapshotUID))
err = os.MkdirAll(dir, 0750)
if err != nil {
return nil, fmt.Errorf("creating snapshot directory: %w", err)
}
// save snapshot to the db
snapshot := cloudmigration.CloudMigrationSnapshot{
UID: snapshotUid,
SessionUID: sessionUid,
Status: cloudmigration.SnapshotStatusInitializing,
EncryptionKey: initResp.EncryptionKey,
UploadURL: initResp.UploadURL,
GMSSnapshotUID: initResp.GMSSnapshotUID,
LocalDir: dir,
}
uid, err := s.store.CreateSnapshot(ctx, snapshot)
if err != nil {
return nil, fmt.Errorf("saving snapshot: %w", err)
}
snapshot.UID = uid
// start building the snapshot asynchronously while we return a success response to the client
go s.buildSnapshot(context.Background(), snapshot)
return &snapshot, nil
}
// GetSnapshot returns the on-prem version of a snapshot, supplemented with processing status from GMS
func (s *Service) GetSnapshot(ctx context.Context, sessionUid string, snapshotUid string) (*cloudmigration.CloudMigrationSnapshot, error) {
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetSnapshot")
defer span.End()
snapshot, err := s.store.GetSnapshotByUID(ctx, snapshotUid)
if err != nil {
return nil, fmt.Errorf("fetching snapshot for uid %s: %w", snapshotUid, err)
}
session, err := s.store.GetMigrationSessionByUID(ctx, sessionUid)
if err != nil {
return nil, fmt.Errorf("fetching session for uid %s: %w", sessionUid, err)
}
if snapshot.ShouldQueryGMS() {
// ask GMS for status if it's in the cloud
snapshotMeta, err := s.gmsClient.GetSnapshotStatus(ctx, *session, *snapshot)
if err != nil {
return nil, fmt.Errorf("error fetching snapshot status from GMS: sessionUid: %s, snapshotUid: %s", sessionUid, snapshotUid)
}
// grab any result available
// TODO: figure out a more intelligent way to do this, will depend on GMS apis
snapshot.Result = snapshotMeta.Result
if snapshotMeta.Status == cloudmigration.SnapshotStatusFinished {
// we need to update the snapshot in our db before reporting anything finished to the client
if err := s.store.UpdateSnapshot(ctx, cloudmigration.UpdateSnapshotCmd{
UID: snapshot.UID,
Status: cloudmigration.SnapshotStatusFinished,
Result: snapshot.Result,
}); err != nil {
return nil, fmt.Errorf("error updating snapshot status: %w", err)
}
}
}
return snapshot, nil
}
func (s *Service) GetSnapshotList(ctx context.Context, query cloudmigration.ListSnapshotsQuery) ([]cloudmigration.CloudMigrationSnapshot, error) {
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.GetSnapshotList")
defer span.End()
snapshotList, err := s.store.GetSnapshotList(ctx, query)
if err != nil {
return nil, fmt.Errorf("fetching snapshots for session uid %s: %w", query.SessionUID, err)
}
return snapshotList, nil
}
func (s *Service) UploadSnapshot(ctx context.Context, sessionUid string, snapshotUid string) error {
ctx, span := s.tracer.Start(ctx, "CloudMigrationService.UploadSnapshot")
defer span.End()
snapshot, err := s.GetSnapshot(ctx, sessionUid, snapshotUid)
if err != nil {
return fmt.Errorf("fetching snapshot with uid %s: %w", snapshotUid, err)
}
s.log.Info("Uploading snapshot with GMS ID %s in local directory %s to url %s", snapshot.GMSSnapshotUID, snapshot.LocalDir, snapshot.UploadURL)
s.log.Debug("UploadSnapshot not yet implemented, faking it")
// start uploading the snapshot asynchronously while we return a success response to the client
go s.uploadSnapshot(context.Background(), *snapshot)
return nil
}
func (s *Service) parseCloudMigrationConfig() (string, error) {
if s.cfg == nil {
return "", fmt.Errorf("cfg cannot be nil")

View File

@ -44,7 +44,7 @@ func (s *NoopServiceImpl) GetMigrationStatus(ctx context.Context, runUID string)
return nil, cloudmigration.ErrFeatureDisabledError
}
func (s *NoopServiceImpl) GetMigrationRunList(ctx context.Context, uid string) (*cloudmigration.SnapshotList, error) {
func (s *NoopServiceImpl) GetMigrationRunList(ctx context.Context, uid string) (*cloudmigration.CloudMigrationRunList, error) {
return nil, cloudmigration.ErrFeatureDisabledError
}
@ -59,3 +59,19 @@ func (s *NoopServiceImpl) CreateMigrationRun(context.Context, cloudmigration.Clo
func (s *NoopServiceImpl) RunMigration(context.Context, string) (*cloudmigration.MigrateDataResponse, error) {
return nil, cloudmigration.ErrFeatureDisabledError
}
func (s *NoopServiceImpl) CreateSnapshot(ctx context.Context, sessionUid string) (*cloudmigration.CloudMigrationSnapshot, error) {
return nil, cloudmigration.ErrFeatureDisabledError
}
func (s *NoopServiceImpl) GetSnapshot(ctx context.Context, sessionUid string, snapshotUid string) (*cloudmigration.CloudMigrationSnapshot, error) {
return nil, cloudmigration.ErrFeatureDisabledError
}
func (s *NoopServiceImpl) GetSnapshotList(ctx context.Context, query cloudmigration.ListSnapshotsQuery) ([]cloudmigration.CloudMigrationSnapshot, error) {
return nil, cloudmigration.ErrFeatureDisabledError
}
func (s *NoopServiceImpl) UploadSnapshot(ctx context.Context, sessionUid string, snapshotUid string) error {
return cloudmigration.ErrFeatureDisabledError
}

View File

@ -8,6 +8,7 @@ import (
"github.com/grafana/grafana/pkg/services/cloudmigration"
"github.com/grafana/grafana/pkg/services/gcom"
"github.com/grafana/grafana/pkg/util"
)
var fixedDate = time.Date(2024, 6, 5, 17, 30, 40, 0, time.UTC)
@ -122,14 +123,61 @@ func (m FakeServiceImpl) GetMigrationStatus(_ context.Context, _ string) (*cloud
}, nil
}
func (m FakeServiceImpl) GetMigrationRunList(_ context.Context, _ string) (*cloudmigration.SnapshotList, error) {
func (m FakeServiceImpl) GetMigrationRunList(_ context.Context, _ string) (*cloudmigration.CloudMigrationRunList, error) {
if m.ReturnError {
return nil, fmt.Errorf("mock error")
}
return &cloudmigration.SnapshotList{
return &cloudmigration.CloudMigrationRunList{
Runs: []cloudmigration.MigrateDataResponseList{
{RunUID: "fake_run_uid_1"},
{RunUID: "fake_run_uid_2"},
},
}, nil
}
func (m FakeServiceImpl) CreateSnapshot(ctx context.Context, sessionUid string) (*cloudmigration.CloudMigrationSnapshot, error) {
if m.ReturnError {
return nil, fmt.Errorf("mock error")
}
return &cloudmigration.CloudMigrationSnapshot{
UID: util.GenerateShortUID(),
SessionUID: sessionUid,
Status: cloudmigration.SnapshotStatusUnknown,
}, nil
}
func (m FakeServiceImpl) GetSnapshot(ctx context.Context, sessionUid string, snapshotUid string) (*cloudmigration.CloudMigrationSnapshot, error) {
if m.ReturnError {
return nil, fmt.Errorf("mock error")
}
return &cloudmigration.CloudMigrationSnapshot{
UID: util.GenerateShortUID(),
SessionUID: sessionUid,
Status: cloudmigration.SnapshotStatusUnknown,
}, nil
}
func (m FakeServiceImpl) GetSnapshotList(ctx context.Context, query cloudmigration.ListSnapshotsQuery) ([]cloudmigration.CloudMigrationSnapshot, error) {
if m.ReturnError {
return nil, fmt.Errorf("mock error")
}
return []cloudmigration.CloudMigrationSnapshot{
{
UID: util.GenerateShortUID(),
SessionUID: query.SessionUID,
Status: cloudmigration.SnapshotStatusUnknown,
},
{
UID: util.GenerateShortUID(),
SessionUID: query.SessionUID,
Status: cloudmigration.SnapshotStatusUnknown,
},
}, nil
}
func (m FakeServiceImpl) UploadSnapshot(ctx context.Context, sessionUid string, snapshotUid string) error {
if m.ReturnError {
return fmt.Errorf("mock error")
}
return nil
}

View File

@ -0,0 +1,234 @@
package cloudmigrationimpl
import (
"context"
"time"
"github.com/grafana/grafana/pkg/services/cloudmigration"
"github.com/grafana/grafana/pkg/services/contexthandler"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/folder"
"github.com/grafana/grafana/pkg/util/retryer"
)
func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.MigrateDataRequest, error) {
// Data sources
dataSources, err := s.getDataSources(ctx)
if err != nil {
s.log.Error("Failed to get datasources", "err", err)
return nil, err
}
// Dashboards
dashboards, err := s.getDashboards(ctx)
if err != nil {
s.log.Error("Failed to get dashboards", "err", err)
return nil, err
}
// Folders
folders, err := s.getFolders(ctx)
if err != nil {
s.log.Error("Failed to get folders", "err", err)
return nil, err
}
migrationDataSlice := make(
[]cloudmigration.MigrateDataRequestItem, 0,
len(dataSources)+len(dashboards)+len(folders),
)
for _, ds := range dataSources {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DatasourceDataType,
RefID: ds.UID,
Name: ds.Name,
Data: ds,
})
}
for _, dashboard := range dashboards {
dashboard.Data.Del("id")
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DashboardDataType,
RefID: dashboard.UID,
Name: dashboard.Title,
Data: map[string]any{"dashboard": dashboard.Data},
})
}
for _, f := range folders {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.FolderDataType,
RefID: f.UID,
Name: f.Title,
Data: f,
})
}
migrationData := &cloudmigration.MigrateDataRequest{
Items: migrationDataSlice,
}
return migrationData, nil
}
func (s *Service) getDataSources(ctx context.Context) ([]datasources.AddDataSourceCommand, error) {
dataSources, err := s.dsService.GetAllDataSources(ctx, &datasources.GetAllDataSourcesQuery{})
if err != nil {
s.log.Error("Failed to get all datasources", "err", err)
return nil, err
}
result := []datasources.AddDataSourceCommand{}
for _, dataSource := range dataSources {
// Decrypt secure json to send raw credentials
decryptedData, err := s.secretsService.DecryptJsonData(ctx, dataSource.SecureJsonData)
if err != nil {
s.log.Error("Failed to decrypt secure json data", "err", err)
return nil, err
}
dataSourceCmd := datasources.AddDataSourceCommand{
OrgID: dataSource.OrgID,
Name: dataSource.Name,
Type: dataSource.Type,
Access: dataSource.Access,
URL: dataSource.URL,
User: dataSource.User,
Database: dataSource.Database,
BasicAuth: dataSource.BasicAuth,
BasicAuthUser: dataSource.BasicAuthUser,
WithCredentials: dataSource.WithCredentials,
IsDefault: dataSource.IsDefault,
JsonData: dataSource.JsonData,
SecureJsonData: decryptedData,
ReadOnly: dataSource.ReadOnly,
UID: dataSource.UID,
}
result = append(result, dataSourceCmd)
}
return result, err
}
func (s *Service) getFolders(ctx context.Context) ([]folder.Folder, error) {
reqCtx := contexthandler.FromContext(ctx)
folders, err := s.folderService.GetFolders(ctx, folder.GetFoldersQuery{
SignedInUser: reqCtx.SignedInUser,
})
if err != nil {
return nil, err
}
result := make([]folder.Folder, len(folders))
for i, folder := range folders {
result[i] = *folder
}
return result, nil
}
func (s *Service) getDashboards(ctx context.Context) ([]dashboards.Dashboard, error) {
dashs, err := s.dashboardService.GetAllDashboards(ctx)
if err != nil {
return nil, err
}
result := make([]dashboards.Dashboard, len(dashs))
for i, dashboard := range dashs {
result[i] = *dashboard
}
return result, nil
}
// asynchronous process for writing the snapshot to the filesystem and updating the snapshot status
func (s *Service) buildSnapshot(ctx context.Context, snapshotMeta cloudmigration.CloudMigrationSnapshot) {
// TODO -- make sure we can only build one snapshot at a time
s.buildSnapshotMutex.Lock()
defer s.buildSnapshotMutex.Unlock()
s.buildSnapshotError = false
// update snapshot status to creating, add some retries since this is a background task
if err := retryer.Retry(func() (retryer.RetrySignal, error) {
err := s.store.UpdateSnapshot(ctx, cloudmigration.UpdateSnapshotCmd{
UID: snapshotMeta.UID,
Status: cloudmigration.SnapshotStatusCreating,
})
return retryer.FuncComplete, err
}, 10, time.Millisecond*100, time.Second*10); err != nil {
s.log.Error("failed to set snapshot status to 'creating'", "err", err)
s.buildSnapshotError = true
return
}
// build snapshot
// just sleep for now to simulate snapshot creation happening
// need to do a couple of fancy things when we implement this:
// - some sort of regular check-in so we know we haven't timed out
// - a channel to listen for cancel events
// - retries baked into the snapshot writing process?
s.log.Debug("snapshot meta", "snapshot", snapshotMeta)
time.Sleep(3 * time.Second)
// update snapshot status to pending upload with retry
if err := retryer.Retry(func() (retryer.RetrySignal, error) {
err := s.store.UpdateSnapshot(ctx, cloudmigration.UpdateSnapshotCmd{
UID: snapshotMeta.UID,
Status: cloudmigration.SnapshotStatusPendingUpload,
})
return retryer.FuncComplete, err
}, 10, time.Millisecond*100, time.Second*10); err != nil {
s.log.Error("failed to set snapshot status to 'pending upload'", "err", err)
s.buildSnapshotError = true
}
}
// asynchronous process for and updating the snapshot status
func (s *Service) uploadSnapshot(ctx context.Context, snapshotMeta cloudmigration.CloudMigrationSnapshot) {
// TODO -- make sure we can only upload one snapshot at a time
s.buildSnapshotMutex.Lock()
defer s.buildSnapshotMutex.Unlock()
s.buildSnapshotError = false
// update snapshot status to uploading, add some retries since this is a background task
if err := retryer.Retry(func() (retryer.RetrySignal, error) {
err := s.store.UpdateSnapshot(ctx, cloudmigration.UpdateSnapshotCmd{
UID: snapshotMeta.UID,
Status: cloudmigration.SnapshotStatusUploading,
})
return retryer.FuncComplete, err
}, 10, time.Millisecond*100, time.Second*10); err != nil {
s.log.Error("failed to set snapshot status to 'creating'", "err", err)
s.buildSnapshotError = true
return
}
// upload snapshot
// just sleep for now to simulate snapshot creation happening
s.log.Debug("snapshot meta", "snapshot", snapshotMeta)
time.Sleep(3 * time.Second)
// update snapshot status to pending processing with retry
if err := retryer.Retry(func() (retryer.RetrySignal, error) {
err := s.store.UpdateSnapshot(ctx, cloudmigration.UpdateSnapshotCmd{
UID: snapshotMeta.UID,
Status: cloudmigration.SnapshotStatusPendingProcessing,
})
return retryer.FuncComplete, err
}, 10, time.Millisecond*100, time.Second*10); err != nil {
s.log.Error("failed to set snapshot status to 'pending upload'", "err", err)
s.buildSnapshotError = true
}
// simulate the rest
// processing
time.Sleep(3 * time.Second)
if err := s.store.UpdateSnapshot(ctx, cloudmigration.UpdateSnapshotCmd{
UID: snapshotMeta.UID,
Status: cloudmigration.SnapshotStatusProcessing,
}); err != nil {
s.log.Error("updating snapshot", "err", err)
}
// end here as the GetSnapshot handler will fill in the rest when called
}

View File

@ -15,4 +15,9 @@ type store interface {
CreateMigrationRun(ctx context.Context, cmr cloudmigration.CloudMigrationSnapshot) (string, error)
GetMigrationStatus(ctx context.Context, cmrUID string) (*cloudmigration.CloudMigrationSnapshot, error)
GetMigrationStatusList(ctx context.Context, migrationUID string) ([]*cloudmigration.CloudMigrationSnapshot, error)
CreateSnapshot(ctx context.Context, snapshot cloudmigration.CloudMigrationSnapshot) (string, error)
UpdateSnapshot(ctx context.Context, snapshot cloudmigration.UpdateSnapshotCmd) error
GetSnapshotByUID(ctx context.Context, uid string) (*cloudmigration.CloudMigrationSnapshot, error)
GetSnapshotList(ctx context.Context, query cloudmigration.ListSnapshotsQuery) ([]cloudmigration.CloudMigrationSnapshot, error)
}

View File

@ -146,6 +146,106 @@ func (ss *sqlStore) GetMigrationStatusList(ctx context.Context, migrationUID str
return runs, nil
}
func (ss *sqlStore) CreateSnapshot(ctx context.Context, snapshot cloudmigration.CloudMigrationSnapshot) (string, error) {
if err := ss.encryptKey(ctx, &snapshot); err != nil {
return "", err
}
if snapshot.Result == nil {
snapshot.Result = make([]byte, 0)
}
if snapshot.UID == "" {
snapshot.UID = util.GenerateShortUID()
}
err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error {
snapshot.Created = time.Now()
snapshot.Updated = time.Now()
snapshot.UID = util.GenerateShortUID()
_, err := sess.Insert(&snapshot)
if err != nil {
return err
}
return nil
})
if err != nil {
return "", err
}
return snapshot.UID, nil
}
// UpdateSnapshot takes a snapshot object containing a uid and updates a subset of features in the database.
func (ss *sqlStore) UpdateSnapshot(ctx context.Context, update cloudmigration.UpdateSnapshotCmd) error {
if update.UID == "" {
return fmt.Errorf("missing snapshot uid")
}
err := ss.db.InTransaction(ctx, func(ctx context.Context) error {
// Update status if set
if err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error {
if update.Status != "" {
rawSQL := "UPDATE cloud_migration_snapshot SET status=? WHERE uid=?"
if _, err := sess.Exec(rawSQL, update.Status, update.UID); err != nil {
return fmt.Errorf("updating snapshot status for uid %s: %w", update.UID, err)
}
}
return nil
}); err != nil {
return err
}
// Update result if set
if err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error {
if len(update.Result) > 0 {
rawSQL := "UPDATE cloud_migration_snapshot SET result=? WHERE uid=?"
if _, err := sess.Exec(rawSQL, update.Result, update.UID); err != nil {
return fmt.Errorf("updating snapshot result for uid %s: %w", update.UID, err)
}
}
return nil
}); err != nil {
return err
}
return nil
})
return err
}
func (ss *sqlStore) GetSnapshotByUID(ctx context.Context, uid string) (*cloudmigration.CloudMigrationSnapshot, error) {
var snapshot cloudmigration.CloudMigrationSnapshot
err := ss.db.WithDbSession(ctx, func(sess *db.Session) error {
exist, err := sess.Where("uid=?", uid).Get(&snapshot)
if err != nil {
return err
}
if !exist {
return cloudmigration.ErrSnapshotNotFound
}
return nil
})
if err := ss.decryptKey(ctx, &snapshot); err != nil {
return &snapshot, err
}
return &snapshot, err
}
func (ss *sqlStore) GetSnapshotList(ctx context.Context, query cloudmigration.ListSnapshotsQuery) ([]cloudmigration.CloudMigrationSnapshot, error) {
var runs = make([]cloudmigration.CloudMigrationSnapshot, 0)
err := ss.db.WithDbSession(ctx, func(sess *db.Session) error {
sess.Limit(query.Limit, query.Offset)
return sess.Find(&runs, &cloudmigration.CloudMigrationSnapshot{
SessionUID: query.SessionUID,
})
})
if err != nil {
return nil, err
}
return runs, nil
}
func (ss *sqlStore) encryptToken(ctx context.Context, cm *cloudmigration.CloudMigrationSession) error {
s, err := ss.secretsService.Encrypt(ctx, []byte(cm.AuthToken), secrets.WithoutScope())
if err != nil {
@ -171,3 +271,29 @@ func (ss *sqlStore) decryptToken(ctx context.Context, cm *cloudmigration.CloudMi
return nil
}
func (ss *sqlStore) encryptKey(ctx context.Context, snapshot *cloudmigration.CloudMigrationSnapshot) error {
s, err := ss.secretsService.Encrypt(ctx, []byte(snapshot.EncryptionKey), secrets.WithoutScope())
if err != nil {
return fmt.Errorf("encrypting key: %w", err)
}
snapshot.EncryptionKey = base64.StdEncoding.EncodeToString(s)
return nil
}
func (ss *sqlStore) decryptKey(ctx context.Context, snapshot *cloudmigration.CloudMigrationSnapshot) error {
decoded, err := base64.StdEncoding.DecodeString(snapshot.EncryptionKey)
if err != nil {
return fmt.Errorf("key could not be decoded")
}
t, err := ss.secretsService.Decrypt(ctx, decoded)
if err != nil {
return fmt.Errorf("decrypting key: %w", err)
}
snapshot.EncryptionKey = string(t)
return nil
}

View File

@ -152,7 +152,6 @@ func Test_GetMigrationStatusList(t *testing.T) {
list, err := s.GetMigrationStatusList(ctx, "qwerty")
require.NoError(t, err)
require.Equal(t, 2, len(list))
// TODO validate that this is ok
})
t.Run("returns no error if migration was not found, just empty list", func(t *testing.T) {
@ -188,11 +187,11 @@ func setUpTest(t *testing.T) (*sqlstore.SQLStore, *sqlStore) {
// insert cloud migration run test data
_, err = testDB.GetSqlxSession().Exec(ctx, `
INSERT INTO
cloud_migration_snapshot (session_uid, uid, result, created, updated, finished)
cloud_migration_snapshot (session_uid, uid, result, created, updated, finished, status)
VALUES
('qwerty', 'poiuy', ?, '2024-03-25 15:30:36.000', '2024-03-27 15:30:43.000', '2024-03-27 15:30:43.000'),
('qwerty', 'lkjhg', ?, '2024-03-25 15:30:36.000', '2024-03-27 15:30:43.000', '2024-03-27 15:30:43.000'),
('zxcvbn', 'mnbvvc', ?, '2024-03-25 15:30:36.000', '2024-03-27 15:30:43.000', '2024-03-27 15:30:43.000');
('qwerty', 'poiuy', ?, '2024-03-25 15:30:36.000', '2024-03-27 15:30:43.000', '2024-03-27 15:30:43.000', "finished"),
('qwerty', 'lkjhg', ?, '2024-03-25 15:30:36.000', '2024-03-27 15:30:43.000', '2024-03-27 15:30:43.000', "finished"),
('zxcvbn', 'mnbvvc', ?, '2024-03-25 15:30:36.000', '2024-03-27 15:30:43.000', '2024-03-27 15:30:43.000', "finished");
`,
[]byte("ERROR"),
[]byte("OK"),

View File

@ -9,6 +9,8 @@ import (
type Client interface {
ValidateKey(context.Context, cloudmigration.CloudMigrationSession) error
MigrateData(context.Context, cloudmigration.CloudMigrationSession, cloudmigration.MigrateDataRequest) (*cloudmigration.MigrateDataResponse, error)
InitializeSnapshot(context.Context, cloudmigration.CloudMigrationSession) (*cloudmigration.InitializeSnapshotResponse, error)
GetSnapshotStatus(context.Context, cloudmigration.CloudMigrationSession, cloudmigration.CloudMigrationSnapshot) (*cloudmigration.CloudMigrationSnapshot, error)
}
const logPrefix = "cloudmigration.gmsclient"

View File

@ -111,6 +111,14 @@ func (c *gmsClientImpl) MigrateData(ctx context.Context, cm cloudmigration.Cloud
return &result, nil
}
func (c *gmsClientImpl) InitializeSnapshot(context.Context, cloudmigration.CloudMigrationSession) (*cloudmigration.InitializeSnapshotResponse, error) {
panic("not implemented")
}
func (c *gmsClientImpl) GetSnapshotStatus(context.Context, cloudmigration.CloudMigrationSession, cloudmigration.CloudMigrationSnapshot) (*cloudmigration.CloudMigrationSnapshot, error) {
panic("not implemented")
}
func convertRequestToDTO(request cloudmigration.MigrateDataRequest) MigrateDataRequestDTO {
items := make([]MigrateDataRequestItemDTO, len(request.Items))
for i := 0; i < len(request.Items); i++ {

View File

@ -2,9 +2,12 @@ package gmsclient
import (
"context"
"encoding/json"
"math/rand"
"time"
"github.com/grafana/grafana/pkg/services/cloudmigration"
"github.com/grafana/grafana/pkg/util"
)
// NewInMemoryClient returns an implementation of Client that returns canned responses
@ -12,7 +15,9 @@ func NewInMemoryClient() Client {
return &memoryClientImpl{}
}
type memoryClientImpl struct{}
type memoryClientImpl struct {
snapshot *cloudmigration.InitializeSnapshotResponse
}
func (c *memoryClientImpl) ValidateKey(ctx context.Context, cm cloudmigration.CloudMigrationSession) error {
return nil
@ -43,3 +48,50 @@ func (c *memoryClientImpl) MigrateData(
return &result, nil
}
func (c *memoryClientImpl) InitializeSnapshot(context.Context, cloudmigration.CloudMigrationSession) (*cloudmigration.InitializeSnapshotResponse, error) {
c.snapshot = &cloudmigration.InitializeSnapshotResponse{
EncryptionKey: util.GenerateShortUID(),
GMSSnapshotUID: util.GenerateShortUID(),
UploadURL: "localhost:3000",
}
return c.snapshot, nil
}
func (c *memoryClientImpl) GetSnapshotStatus(ctx context.Context, session cloudmigration.CloudMigrationSession, snapshot cloudmigration.CloudMigrationSnapshot) (*cloudmigration.CloudMigrationSnapshot, error) {
// just fake an entire response
gmsSnapshot := cloudmigration.CloudMigrationSnapshot{
Status: cloudmigration.SnapshotStatusFinished,
GMSSnapshotUID: util.GenerateShortUID(),
Result: []byte{},
Finished: time.Now(),
}
result := []cloudmigration.MigrateDataResponseItem{
{
Type: cloudmigration.DashboardDataType,
RefID: util.GenerateShortUID(),
Status: cloudmigration.ItemStatusOK,
},
{
Type: cloudmigration.DatasourceDataType,
RefID: util.GenerateShortUID(),
Status: cloudmigration.ItemStatusError,
Error: "fake error",
},
{
Type: cloudmigration.FolderDataType,
RefID: util.GenerateShortUID(),
Status: cloudmigration.ItemStatusOK,
},
}
b, err := json.Marshal(result)
if err != nil {
return nil, err
}
gmsSnapshot.Result = b
return &gmsSnapshot, nil
}

View File

@ -15,6 +15,7 @@ var (
ErrMigrationRunNotFound = errutil.NotFound("cloudmigrations.migrationRunNotFound").Errorf("Migration run not found")
ErrMigrationNotDeleted = errutil.Internal("cloudmigrations.sessionNotDeleted").Errorf("Session not deleted")
ErrTokenNotFound = errutil.NotFound("cloudmigrations.tokenNotFound").Errorf("Token not found")
ErrSnapshotNotFound = errutil.NotFound("cloudmigrations.snapshotNotFound").Errorf("Snapshot not found")
)
// CloudMigration domain structs
@ -31,26 +32,64 @@ type CloudMigrationSession struct {
}
type CloudMigrationSnapshot struct {
ID int64 `xorm:"pk autoincr 'id'"`
UID string `xorm:"uid"`
SessionUID string `xorm:"session_uid"`
Result []byte //store raw gms response body
Created time.Time
Updated time.Time
Finished time.Time
ID int64 `xorm:"pk autoincr 'id'"`
UID string `xorm:"uid"`
SessionUID string `xorm:"session_uid"`
Status SnapshotStatus
EncryptionKey string `xorm:"encryption_key"` // stored in the unified secrets table
UploadURL string `xorm:"upload_url"`
LocalDir string `xorm:"local_directory"`
GMSSnapshotUID string `xorm:"gms_snapshot_uid"`
ErrorString string `xorm:"error_string"`
Created time.Time
Updated time.Time
Finished time.Time
// []MigrateDataResponseItem
Result []byte `xorm:"result"` //store raw gms response body
}
func (r CloudMigrationSnapshot) GetResult() (*MigrateDataResponse, error) {
type SnapshotStatus string
const (
SnapshotStatusInitializing = "initializing"
SnapshotStatusCreating = "creating"
SnapshotStatusPendingUpload = "pending_upload"
SnapshotStatusUploading = "uploading"
SnapshotStatusPendingProcessing = "pending_processing"
SnapshotStatusProcessing = "processing"
SnapshotStatusFinished = "finished"
SnapshotStatusError = "error"
SnapshotStatusUnknown = "unknown"
)
// Deprecated, use GetSnapshotResult for the async workflow
func (s CloudMigrationSnapshot) GetResult() (*MigrateDataResponse, error) {
var result MigrateDataResponse
err := json.Unmarshal(r.Result, &result)
err := json.Unmarshal(s.Result, &result)
if err != nil {
return nil, errors.New("could not parse result of run")
}
result.RunUID = r.UID
result.RunUID = s.UID
return &result, nil
}
type SnapshotList struct {
func (s CloudMigrationSnapshot) ShouldQueryGMS() bool {
return s.Status == SnapshotStatusPendingProcessing || s.Status == SnapshotStatusProcessing
}
func (s CloudMigrationSnapshot) GetSnapshotResult() ([]MigrateDataResponseItem, error) {
var result []MigrateDataResponseItem
if len(s.Result) > 0 {
err := json.Unmarshal(s.Result, &result)
if err != nil {
return nil, errors.New("could not parse result of run")
}
}
return result, nil
}
type CloudMigrationRunList struct {
Runs []MigrateDataResponseList
}
@ -69,6 +108,18 @@ type CloudMigrationSessionListResponse struct {
Sessions []CloudMigrationSessionResponse
}
type ListSnapshotsQuery struct {
SessionUID string
Offset int
Limit int
}
type UpdateSnapshotCmd struct {
UID string
Status SnapshotStatus
Result []byte //store raw gms response body
}
// access token
type CreateAccessTokenResponse struct {
@ -140,3 +191,13 @@ type MigrateDataResponseItem struct {
Status ItemStatus
Error string
}
type CreateSessionResponse struct {
SnapshotUid string
}
type InitializeSnapshotResponse struct {
EncryptionKey string
UploadURL string
GMSSnapshotUID string
}

View File

@ -709,7 +709,7 @@ var (
AllowSelfServe: false,
RequiresRestart: true,
},
FeatureFlag{
{
Name: "disableClassicHTTPHistogram",
Description: "Disables classic HTTP Histogram (use with enableNativeHTTPHistogram)",
Stage: FeatureStageExperimental,
@ -1343,6 +1343,14 @@ var (
Owner: grafanaBackendServicesSquad,
Expression: "false", // enabled by default
},
{
Name: "zanzana",
Description: "Use openFGA as authorization engine.",
Stage: FeatureStageExperimental,
Owner: identityAccessTeam,
HideFromDocs: true,
HideFromAdminPage: true,
},
}
)

View File

@ -178,3 +178,4 @@ authZGRPCServer,experimental,@grafana/identity-access-team,false,false,false
openSearchBackendFlowEnabled,preview,@grafana/aws-datasources,false,false,false
ssoSettingsLDAP,experimental,@grafana/identity-access-team,false,false,false
databaseReadReplica,experimental,@grafana/grafana-backend-services-squad,false,false,false
zanzana,experimental,@grafana/identity-access-team,false,false,false

1 Name Stage Owner requiresDevMode RequiresRestart FrontendOnly
178 openSearchBackendFlowEnabled preview @grafana/aws-datasources false false false
179 ssoSettingsLDAP experimental @grafana/identity-access-team false false false
180 databaseReadReplica experimental @grafana/grafana-backend-services-squad false false false
181 zanzana experimental @grafana/identity-access-team false false false

View File

@ -722,4 +722,8 @@ const (
// FlagDatabaseReadReplica
// Use a read replica for some database queries.
FlagDatabaseReadReplica = "databaseReadReplica"
// FlagZanzana
// Use openFGA as authorization engine.
FlagZanzana = "zanzana"
)

View File

@ -2305,6 +2305,20 @@
"stage": "experimental",
"codeowner": "@grafana/hosted-grafana-team"
}
},
{
"metadata": {
"name": "zanzana",
"resourceVersion": "1718787304727",
"creationTimestamp": "2024-06-19T08:55:04Z"
},
"spec": {
"description": "Use openFGA as authorization engine.",
"stage": "experimental",
"codeowner": "@grafana/identity-access-team",
"hideFromAdminPage": true,
"hideFromDocs": true
}
}
]
}

View File

@ -5,7 +5,7 @@ import (
)
func addCloudMigrationsMigrations(mg *Migrator) {
// v1 - synchronous workflow
// --- v1 - synchronous workflow
migrationTable := Table{
Name: "cloud_migration",
Columns: []*Column{
@ -65,7 +65,7 @@ func addCloudMigrationsMigrations(mg *Migrator) {
Cols: []string{"uid"}, Type: UniqueIndex,
}))
// v2 - asynchronous workflow refactor
// --- v2 - asynchronous workflow refactor
sessionTable := Table{
Name: "cloud_migration_session",
Columns: []*Column{
@ -120,4 +120,23 @@ func addCloudMigrationsMigrations(mg *Migrator) {
"updated": "updated",
"finished": "finished",
})
// --- add new columns to snapshots table
uploadUrlColumn := Column{Name: "upload_url", Type: DB_Text, Nullable: true}
mg.AddMigration("add snapshot upload_url column", NewAddColumnMigration(migrationSnapshotTable, &uploadUrlColumn))
statusColumn := Column{Name: "status", Type: DB_Text, Nullable: false}
mg.AddMigration("add snapshot status column", NewAddColumnMigration(migrationSnapshotTable, &statusColumn))
localDirColumn := Column{Name: "local_directory", Type: DB_Text, Nullable: true}
mg.AddMigration("add snapshot local_directory column", NewAddColumnMigration(migrationSnapshotTable, &localDirColumn))
gmsSnapshotUIDColumn := Column{Name: "gms_snapshot_uid", Type: DB_Text, Nullable: true}
mg.AddMigration("add snapshot gms_snapshot_uid column", NewAddColumnMigration(migrationSnapshotTable, &gmsSnapshotUIDColumn))
encryptionKeyColumn := Column{Name: "encryption_key", Type: DB_Text, Nullable: true}
mg.AddMigration("add snapshot encryption_key column", NewAddColumnMigration(migrationSnapshotTable, &encryptionKeyColumn))
errorStringColumn := Column{Name: "error_string", Type: DB_Text, Nullable: true}
mg.AddMigration("add snapshot error_string column", NewAddColumnMigration(migrationSnapshotTable, &errorStringColumn))
}

View File

@ -3159,6 +3159,17 @@
}
}
},
"CloudMigrationRunListDTO": {
"type": "object",
"properties": {
"runs": {
"type": "array",
"items": {
"$ref": "#/definitions/MigrateDataResponseListDTO"
}
}
}
},
"CloudMigrationSessionListResponseDTO": {
"type": "object",
"properties": {
@ -3632,6 +3643,14 @@
}
}
},
"CreateSnapshotResponseDTO": {
"type": "object",
"properties": {
"uid": {
"type": "string"
}
}
},
"CreateTeamCommand": {
"type": "object",
"properties": {
@ -4726,6 +4745,45 @@
}
]
},
"GetSnapshotResponseDTO": {
"type": "object",
"properties": {
"created": {
"type": "string",
"format": "date-time"
},
"finished": {
"type": "string",
"format": "date-time"
},
"results": {
"type": "array",
"items": {
"$ref": "#/definitions/MigrateDataResponseItemDTO"
}
},
"sessionUid": {
"type": "string"
},
"status": {
"type": "string",
"enum": [
"INITIALIZING",
"CREATING",
"PENDING_UPLOAD",
"UPLOADING",
"PENDING_PROCESSING",
"PROCESSING",
"FINISHED",
"ERROR",
"UNKNOWN"
]
},
"uid": {
"type": "string"
}
}
},
"Hit": {
"type": "object",
"properties": {
@ -7084,13 +7142,47 @@
"type": "integer",
"format": "int64"
},
"SnapshotListDTO": {
"SnapshotDTO": {
"description": "Base snapshot without results",
"type": "object",
"properties": {
"runs": {
"created": {
"type": "string",
"format": "date-time"
},
"finished": {
"type": "string",
"format": "date-time"
},
"sessionUid": {
"type": "string"
},
"status": {
"type": "string",
"enum": [
"INITIALIZING",
"CREATING",
"PENDING_UPLOAD",
"UPLOADING",
"PENDING_PROCESSING",
"PROCESSING",
"FINISHED",
"ERROR",
"UNKNOWN"
]
},
"uid": {
"type": "string"
}
}
},
"SnapshotListResponseDTO": {
"type": "object",
"properties": {
"snapshots": {
"type": "array",
"items": {
"$ref": "#/definitions/MigrateDataResponseListDTO"
"$ref": "#/definitions/SnapshotDTO"
}
}
}
@ -8394,7 +8486,7 @@
"cloudMigrationRunListResponse": {
"description": "",
"schema": {
"$ref": "#/definitions/SnapshotListDTO"
"$ref": "#/definitions/CloudMigrationRunListDTO"
}
},
"cloudMigrationRunResponse": {
@ -8559,6 +8651,12 @@
"$ref": "#/definitions/ServiceAccountDTO"
}
},
"createSnapshotResponse": {
"description": "",
"schema": {
"$ref": "#/definitions/CreateSnapshotResponseDTO"
}
},
"createTeamResponse": {
"description": "",
"schema": {
@ -9126,6 +9224,12 @@
}
}
},
"getSnapshotResponse": {
"description": "",
"schema": {
"$ref": "#/definitions/GetSnapshotResponseDTO"
}
},
"getStatusResponse": {
"description": ""
},
@ -9582,6 +9686,12 @@
"$ref": "#/definitions/RoleAssignmentsDTO"
}
},
"snapshotListResponse": {
"description": "",
"schema": {
"$ref": "#/definitions/SnapshotListResponseDTO"
}
},
"unauthorisedError": {
"description": "UnauthorizedError is returned when the request is not authenticated.",
"schema": {

View File

@ -2336,6 +2336,9 @@
"200": {
"$ref": "#/responses/cloudMigrationSessionResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
@ -2368,6 +2371,9 @@
"200": {
"$ref": "#/responses/cloudMigrationRunResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
@ -2400,6 +2406,9 @@
"200": {
"$ref": "#/responses/cloudMigrationSessionResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
@ -2427,6 +2436,9 @@
}
],
"responses": {
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
@ -2459,6 +2471,9 @@
"200": {
"$ref": "#/responses/cloudMigrationRunListResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
@ -2490,6 +2505,223 @@
"200": {
"$ref": "#/responses/cloudMigrationRunResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
"403": {
"$ref": "#/responses/forbiddenError"
},
"500": {
"$ref": "#/responses/internalServerError"
}
}
}
},
"/cloudmigration/migration/{uid}/snapshot": {
"post": {
"description": "If the snapshot initialization is successful, the snapshot uid is returned.",
"tags": [
"migrations"
],
"summary": "Trigger the creation of an instance snapshot associated with the provided session.",
"operationId": "createSnapshot",
"parameters": [
{
"type": "string",
"description": "UID of a session",
"name": "uid",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"$ref": "#/responses/createSnapshotResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
"403": {
"$ref": "#/responses/forbiddenError"
},
"500": {
"$ref": "#/responses/internalServerError"
}
}
}
},
"/cloudmigration/migration/{uid}/snapshot/{snapshotUid}": {
"get": {
"tags": [
"migrations"
],
"summary": "Get metadata about a snapshot, including where it is in its processing and final results.",
"operationId": "getSnapshot",
"parameters": [
{
"type": "string",
"description": "Session UID of a session",
"name": "uid",
"in": "path",
"required": true
},
{
"type": "string",
"description": "UID of a snapshot",
"name": "snapshotUid",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"$ref": "#/responses/getSnapshotResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
"403": {
"$ref": "#/responses/forbiddenError"
},
"500": {
"$ref": "#/responses/internalServerError"
}
}
}
},
"/cloudmigration/migration/{uid}/snapshot/{snapshotUid}/cancel": {
"post": {
"description": "TODO: Implement",
"tags": [
"migrations"
],
"summary": "Cancel a snapshot, wherever it is in its processing chain.",
"operationId": "cancelSnapshot",
"parameters": [
{
"type": "string",
"description": "Session UID of a session",
"name": "uid",
"in": "path",
"required": true
},
{
"type": "string",
"description": "UID of a snapshot",
"name": "snapshotUid",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "(empty)"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
"403": {
"$ref": "#/responses/forbiddenError"
},
"500": {
"$ref": "#/responses/internalServerError"
}
}
}
},
"/cloudmigration/migration/{uid}/snapshot/{snapshotUid}/upload": {
"post": {
"tags": [
"migrations"
],
"summary": "Upload a snapshot to the Grafana Migration Service for processing.",
"operationId": "uploadSnapshot",
"parameters": [
{
"type": "string",
"description": "Session UID of a session",
"name": "uid",
"in": "path",
"required": true
},
{
"type": "string",
"description": "UID of a snapshot",
"name": "snapshotUid",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "(empty)"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
"403": {
"$ref": "#/responses/forbiddenError"
},
"500": {
"$ref": "#/responses/internalServerError"
}
}
}
},
"/cloudmigration/migration/{uid}/snapshots": {
"get": {
"tags": [
"migrations"
],
"summary": "Get a list of snapshots for a session.",
"operationId": "getShapshotList",
"parameters": [
{
"type": "integer",
"format": "int64",
"default": 0,
"description": "Offset is used for pagination with limit",
"name": "offset",
"in": "query"
},
{
"type": "integer",
"format": "int64",
"default": 100,
"description": "Max limit for results returned.",
"name": "limit",
"in": "query"
},
{
"type": "string",
"description": "Session UID of a session",
"name": "uid",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"$ref": "#/responses/snapshotListResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
@ -2569,6 +2801,9 @@
"204": {
"$ref": "#/responses/cloudMigrationDeleteTokenResponse"
},
"400": {
"$ref": "#/responses/badRequestError"
},
"401": {
"$ref": "#/responses/unauthorisedError"
},
@ -13336,6 +13571,17 @@
}
}
},
"CloudMigrationRunListDTO": {
"type": "object",
"properties": {
"runs": {
"type": "array",
"items": {
"$ref": "#/definitions/MigrateDataResponseListDTO"
}
}
}
},
"CloudMigrationSessionListResponseDTO": {
"type": "object",
"properties": {
@ -13877,6 +14123,14 @@
}
}
},
"CreateSnapshotResponseDTO": {
"type": "object",
"properties": {
"uid": {
"type": "string"
}
}
},
"CreateTeamCommand": {
"type": "object",
"properties": {
@ -15270,6 +15524,45 @@
}
]
},
"GetSnapshotResponseDTO": {
"type": "object",
"properties": {
"created": {
"type": "string",
"format": "date-time"
},
"finished": {
"type": "string",
"format": "date-time"
},
"results": {
"type": "array",
"items": {
"$ref": "#/definitions/MigrateDataResponseItemDTO"
}
},
"sessionUid": {
"type": "string"
},
"status": {
"type": "string",
"enum": [
"INITIALIZING",
"CREATING",
"PENDING_UPLOAD",
"UPLOADING",
"PENDING_PROCESSING",
"PROCESSING",
"FINISHED",
"ERROR",
"UNKNOWN"
]
},
"uid": {
"type": "string"
}
}
},
"GettableAlertmanagers": {
"type": "object",
"properties": {
@ -19985,13 +20278,47 @@
"SmtpNotEnabled": {
"$ref": "#/definitions/ResponseDetails"
},
"SnapshotListDTO": {
"SnapshotDTO": {
"description": "Base snapshot without results",
"type": "object",
"properties": {
"runs": {
"created": {
"type": "string",
"format": "date-time"
},
"finished": {
"type": "string",
"format": "date-time"
},
"sessionUid": {
"type": "string"
},
"status": {
"type": "string",
"enum": [
"INITIALIZING",
"CREATING",
"PENDING_UPLOAD",
"UPLOADING",
"PENDING_PROCESSING",
"PROCESSING",
"FINISHED",
"ERROR",
"UNKNOWN"
]
},
"uid": {
"type": "string"
}
}
},
"SnapshotListResponseDTO": {
"type": "object",
"properties": {
"snapshots": {
"type": "array",
"items": {
"$ref": "#/definitions/MigrateDataResponseListDTO"
"$ref": "#/definitions/SnapshotDTO"
}
}
}
@ -22434,7 +22761,7 @@
"cloudMigrationRunListResponse": {
"description": "(empty)",
"schema": {
"$ref": "#/definitions/SnapshotListDTO"
"$ref": "#/definitions/CloudMigrationRunListDTO"
}
},
"cloudMigrationRunResponse": {
@ -22599,6 +22926,12 @@
"$ref": "#/definitions/ServiceAccountDTO"
}
},
"createSnapshotResponse": {
"description": "(empty)",
"schema": {
"$ref": "#/definitions/CreateSnapshotResponseDTO"
}
},
"createTeamResponse": {
"description": "(empty)",
"schema": {
@ -23166,6 +23499,12 @@
}
}
},
"getSnapshotResponse": {
"description": "(empty)",
"schema": {
"$ref": "#/definitions/GetSnapshotResponseDTO"
}
},
"getStatusResponse": {
"description": "(empty)"
},
@ -23631,6 +23970,12 @@
"$ref": "#/definitions/RoleAssignmentsDTO"
}
},
"snapshotListResponse": {
"description": "(empty)",
"schema": {
"$ref": "#/definitions/SnapshotListResponseDTO"
}
},
"unauthorisedError": {
"description": "UnauthorizedError is returned when the request is not authenticated.",
"schema": {

View File

@ -338,7 +338,8 @@ export function EditCloudGroupModal(props: ModalProps): React.ReactElement {
</Stack>
</Field>
{checkEvaluationIntervalGlobalLimit(watch('groupInterval')).exceedsLimit && (
{/* if we're dealing with a Grafana-managed group, check if the evaluation interval is valid / permitted */}
{isGrafanaManagedGroup && checkEvaluationIntervalGlobalLimit(watch('groupInterval')).exceedsLimit && (
<EvaluationIntervalLimitExceeded />
)}

View File

@ -7,6 +7,7 @@ import { Icon, Tooltip, useStyles2 } from '@grafana/ui/src';
import { CombinedRule } from '../../../../../types/unified-alerting';
import { checkEvaluationIntervalGlobalLimit } from '../../utils/config';
import { isGrafanaRulerRule } from '../../utils/rules';
interface RuleConfigStatusProps {
rule: CombinedRule;
@ -14,11 +15,11 @@ interface RuleConfigStatusProps {
export function RuleConfigStatus({ rule }: RuleConfigStatusProps) {
const styles = useStyles2(getStyles);
const isGrafanaManagedRule = isGrafanaRulerRule(rule.rulerRule);
const { exceedsLimit } = useMemo(
() => checkEvaluationIntervalGlobalLimit(rule.group.interval),
[rule.group.interval]
);
const exceedsLimit = useMemo(() => {
return isGrafanaManagedRule ? checkEvaluationIntervalGlobalLimit(rule.group.interval).exceedsLimit : false;
}, [rule.group.interval, isGrafanaManagedRule]);
if (!exceedsLimit) {
return null;

View File

@ -0,0 +1,69 @@
import { totalFromStats } from './RuleStats';
describe('RuleStats', () => {
it('should count 0', () => {
expect(
totalFromStats({
alerting: 0,
error: 0,
inactive: 0,
nodata: 0,
paused: 0,
pending: 0,
recording: 0,
})
).toBe(0);
});
it('should count rules', () => {
expect(
totalFromStats({
alerting: 2,
error: 0,
inactive: 0,
nodata: 0,
paused: 0,
pending: 2,
recording: 2,
})
).toBe(6);
});
it('should not count rule health as a rule', () => {
expect(
totalFromStats({
alerting: 0,
error: 1,
inactive: 1,
nodata: 0,
paused: 0,
pending: 0,
recording: 0,
})
).toBe(1);
expect(
totalFromStats({
alerting: 0,
error: 0,
inactive: 0,
nodata: 1,
paused: 0,
pending: 0,
recording: 1,
})
).toBe(1);
expect(
totalFromStats({
alerting: 0,
error: 0,
inactive: 1,
nodata: 0,
paused: 1,
pending: 0,
recording: 0,
})
).toBe(1);
});
});

View File

@ -1,4 +1,4 @@
import { isUndefined, omitBy, sum } from 'lodash';
import { isUndefined, omitBy, pick, sum } from 'lodash';
import pluralize from 'pluralize';
import React, { Fragment } from 'react';
@ -27,24 +27,12 @@ const emptyStats: Required<AlertGroupTotals> = {
};
export const RuleStats = ({ namespaces }: Props) => {
const stats = { ...emptyStats };
// sum all totals for all namespaces
namespaces.forEach(({ groups }) => {
groups.forEach((group) => {
const groupTotals = omitBy(group.totals, isUndefined);
for (let key in groupTotals) {
// @ts-ignore
stats[key] += groupTotals[key];
}
});
});
const stats = statsFromNamespaces(namespaces);
const total = totalFromStats(stats);
const statsComponents = getComponentsFromStats(stats);
const hasStats = Boolean(statsComponents.length);
const total = sum(Object.values(stats));
statsComponents.unshift(
<Fragment key="total">
{total} {pluralize('rule', total)}
@ -66,6 +54,32 @@ interface RuleGroupStatsProps {
group: CombinedRuleGroup;
}
function statsFromNamespaces(namespaces: CombinedRuleNamespace[]): AlertGroupTotals {
const stats = { ...emptyStats };
// sum all totals for all namespaces
namespaces.forEach(({ groups }) => {
groups.forEach((group) => {
const groupTotals = omitBy(group.totals, isUndefined);
for (let key in groupTotals) {
// @ts-ignore
stats[key] += groupTotals[key];
}
});
});
return stats;
}
export function totalFromStats(stats: AlertGroupTotals): number {
// countable stats will pick only the states that indicate a single rule health indicators like "error" and "nodata" should
// not be counted because they are already counted by their state
const countableStats = pick(stats, ['alerting', 'pending', 'inactive', 'recording']);
const total = sum(Object.values(countableStats));
return total;
}
export const RuleGroupStats = ({ group }: RuleGroupStatsProps) => {
const stats = group.totals;
const evaluationInterval = group?.interval;

View File

@ -14,15 +14,17 @@ const AlertRulesToolbarButton = React.lazy(
export function initAlerting() {
const grafanaRulesPermissions = getRulesPermissions(GRAFANA_RULES_SOURCE_NAME);
const alertingEnabled = config.unifiedAlertingEnabled;
if (contextSrv.hasPermission(grafanaRulesPermissions.read)) {
addCustomRightAction({
show: () => config.unifiedAlertingEnabled,
component: ({ dashboard }) => (
<React.Suspense fallback={null} key="alert-rules-button">
{dashboard && <AlertRulesToolbarButton dashboardUid={dashboard.uid} />}
</React.Suspense>
),
show: () => alertingEnabled,
component: ({ dashboard }) =>
alertingEnabled ? (
<React.Suspense fallback={null} key="alert-rules-button">
{dashboard && <AlertRulesToolbarButton dashboardUid={dashboard.uid} />}
</React.Suspense>
) : null,
index: -2,
});
}

View File

@ -196,6 +196,9 @@ This transformation has the following options:
- **Numeric** - attempts to make the values numbers
- **String** - will make the values strings
- **Time** - attempts to parse the values as time
- The input will be parsed according to the [Moment.js parsing format](https://momentjs.com/docs/#/parsing/)
- It will parse the numeric input as a Unix epoch timestamp in milliseconds.
You must multiply your input by 1000 if it's in seconds.
- Will show an option to specify a DateFormat as input by a string like yyyy-mm-dd or DD MM YYYY hh:mm:ss
- **Boolean** - will make the values booleans
- **Enum** - will make the values enums

View File

@ -3,7 +3,8 @@ import { stripIndent, stripIndents } from 'common-tags';
import Prism from 'prismjs';
import React, { useState } from 'react';
import { Collapse } from '@grafana/ui';
import { GrafanaTheme2 } from '@grafana/data';
import { Collapse, useStyles2 } from '@grafana/ui';
import { flattenTokens } from '@grafana/ui/src/slate-plugins/slate-prism';
import tokenizer from '../../language/cloudwatch-logs/syntax';
@ -350,14 +351,6 @@ function renderHighlightedMarkup(code: string, keyPrefix: string) {
return <div className="slate-query-field">{spans}</div>;
}
const exampleCategory = css`
margin-top: 5px;
`;
const link = css`
text-decoration: underline;
`;
type Props = {
onClickExample: (query: CloudWatchQuery) => void;
query: CloudWatchQuery;
@ -366,6 +359,7 @@ type Props = {
const LogsCheatSheet = (props: Props) => {
const [isCommandsOpen, setIsCommandsOpen] = useState(false);
const [isQueriesOpen, setIsQueriesOpen] = useState(false);
const styles = useStyles2(getStyles);
return (
<div>
@ -385,7 +379,7 @@ const LogsCheatSheet = (props: Props) => {
<p>{item.description}</p>
<button
type="button"
className="cheat-sheet-item__example"
className={styles.cheatSheetExample}
key={item.expr}
onClick={() =>
props.onClickExample({
@ -415,13 +409,13 @@ const LogsCheatSheet = (props: Props) => {
>
{QUERIES.map((cat, i) => (
<div key={`cat-${i}`}>
<div className={`cheat-sheet-item__title ${cx(exampleCategory)}`}>{cat.category}</div>
<div className={cx(styles.cheatSheetItemTitle, styles.exampleCategory)}>{cat.category}</div>
{cat.examples.map((item, j) => (
<div className="cheat-sheet-item" key={`item-${j}`}>
<div className={styles.cheatSheetItem} key={`item-${j}`}>
<h4>{item.title}</h4>
<button
type="button"
className="cheat-sheet-item__example"
className={styles.cheatSheetExample}
key={item.expr}
onClick={() =>
props.onClickExample({
@ -445,7 +439,7 @@ const LogsCheatSheet = (props: Props) => {
<div>
Note: If you are seeing masked data, you may have CloudWatch logs data protection enabled.{' '}
<a
className={cx(link)}
className={styles.link}
href="https://grafana.com/docs/grafana/latest/datasources/aws-cloudwatch/#cloudwatch-logs-data-protection"
target="_blank"
rel="noreferrer"
@ -459,3 +453,26 @@ const LogsCheatSheet = (props: Props) => {
};
export default LogsCheatSheet;
const getStyles = (theme: GrafanaTheme2) => ({
exampleCategory: css({
marginTop: '5px',
}),
link: css({
textDecoration: 'underline',
}),
cheatSheetItem: css({
margin: theme.spacing(3, 0),
}),
cheatSheetItemTitle: css({
fontSize: theme.typography.h3.fontSize,
}),
cheatSheetExample: css({
margin: theme.spacing(0.5, 0),
// element is interactive, clear button styles
textAlign: 'left',
border: 'none',
background: 'transparent',
display: 'block',
}),
});

View File

@ -1,8 +1,26 @@
import { css } from '@emotion/css';
import React from 'react';
import { DataSourcePluginOptionsEditorProps } from '@grafana/data';
import { DataSourcePluginOptionsEditorProps, GrafanaTheme2 } from '@grafana/data';
import {
AdvancedHttpSettings,
Auth,
ConfigSection,
ConfigSubSection,
ConnectionSettings,
DataSourceDescription,
convertLegacyAuthProps,
} from '@grafana/experimental';
import { config } from '@grafana/runtime';
import { DataSourceHttpSettings, EventsWithValidation, LegacyForms, regexValidation } from '@grafana/ui';
import {
Divider,
EventsWithValidation,
LegacyForms,
SecureSocksProxySettings,
Stack,
regexValidation,
useStyles2,
} from '@grafana/ui';
import { PyroscopeDataSourceOptions } from './types';
@ -10,21 +28,43 @@ interface Props extends DataSourcePluginOptionsEditorProps<PyroscopeDataSourceOp
export const ConfigEditor = (props: Props) => {
const { options, onOptionsChange } = props;
const styles = useStyles2(getStyles);
return (
<>
<DataSourceHttpSettings
defaultUrl={'http://localhost:4040'}
dataSourceConfig={options}
showAccessOptions={false}
onChange={onOptionsChange}
secureSocksDSProxyEnabled={config.secureSocksDSProxyEnabled}
<div className={styles.container}>
<DataSourceDescription
dataSourceName="Pyroscope"
docsLink="https://grafana.com/docs/grafana/latest/datasources/pyroscope"
hasRequiredFields={false}
/>
<h3 className="page-heading">Querying</h3>
<div className="gf-form-group">
<div className="gf-form-inline">
<div className="gf-form">
<Divider spacing={4} />
<ConnectionSettings config={options} onChange={onOptionsChange} urlPlaceholder="http://localhost:4040" />
<Divider spacing={4} />
<Auth
{...convertLegacyAuthProps({
config: options,
onChange: onOptionsChange,
})}
/>
<Divider spacing={4} />
<ConfigSection
title="Additional settings"
description="Additional settings are optional settings that can be configured for more control over your data source."
isCollapsible={true}
isInitiallyOpen={false}
>
<Stack gap={5} direction="column">
<AdvancedHttpSettings config={options} onChange={onOptionsChange} />
{config.secureSocksDSProxyEnabled && (
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
)}
<ConfigSubSection title="Querying">
<LegacyForms.FormField
label="Minimal step"
labelWidth={13}
@ -55,9 +95,16 @@ export const ConfigEditor = (props: Props) => {
}
tooltip="Minimal step used for metric query. Should be the same or higher as the scrape interval setting in the Pyroscope database."
/>
</div>
</div>
</div>
</>
</ConfigSubSection>
</Stack>
</ConfigSection>
</div>
);
};
const getStyles = (theme: GrafanaTheme2) => ({
container: css({
marginBottom: theme.spacing(2),
maxWidth: '900px',
}),
});

View File

@ -1,5 +1,9 @@
import { css } from '@emotion/css';
import React from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
const CHEAT_SHEET_ITEMS = [
{
title: 'Getting started',
@ -8,14 +12,27 @@ const CHEAT_SHEET_ITEMS = [
},
];
export const InfluxCheatSheet = () => (
<div>
<h2>InfluxDB Cheat Sheet</h2>
{CHEAT_SHEET_ITEMS.map((item) => (
<div className="cheat-sheet-item" key={item.title}>
<div className="cheat-sheet-item__title">{item.title}</div>
<div className="cheat-sheet-item__label">{item.label}</div>
</div>
))}
</div>
);
export const InfluxCheatSheet = () => {
const styles = useStyles2(getStyles);
return (
<div>
<h2>InfluxDB Cheat Sheet</h2>
{CHEAT_SHEET_ITEMS.map((item) => (
<div className={styles.cheatSheetItem} key={item.title}>
<div className={styles.cheatSheetItemTitle}>{item.title}</div>
{item.label}
</div>
))}
</div>
);
};
const getStyles = (theme: GrafanaTheme2) => ({
cheatSheetItem: css({
margin: theme.spacing(3, 0),
}),
cheatSheetItemTitle: css({
fontSize: theme.typography.h3.fontSize,
}),
});

View File

@ -2,10 +2,17 @@ import { css } from '@emotion/css';
import React from 'react';
import { DataSourcePluginOptionsEditorProps, GrafanaTheme2 } from '@grafana/data';
import { ConfigSection, DataSourceDescription } from '@grafana/experimental';
import {
AdvancedHttpSettings,
Auth,
ConfigSection,
ConnectionSettings,
DataSourceDescription,
convertLegacyAuthProps,
} from '@grafana/experimental';
import { NodeGraphSection, SpanBarSection, TraceToLogsSection, TraceToMetricsSection } from '@grafana/o11y-ds-frontend';
import { config } from '@grafana/runtime';
import { DataSourceHttpSettings, useStyles2, Divider, Stack } from '@grafana/ui';
import { useStyles2, Divider, Stack, SecureSocksProxySettings } from '@grafana/ui';
import { TraceIdTimeParams } from './TraceIdTimeParams';
@ -24,14 +31,17 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
<Divider spacing={4} />
<DataSourceHttpSettings
defaultUrl="http://localhost:16686"
dataSourceConfig={options}
showAccessOptions={false}
onChange={onOptionsChange}
secureSocksDSProxyEnabled={config.secureSocksDSProxyEnabled}
<ConnectionSettings config={options} onChange={onOptionsChange} urlPlaceholder="http://localhost:16686" />
<Divider spacing={4} />
<Auth
{...convertLegacyAuthProps({
config: options,
onChange: onOptionsChange,
})}
/>
<Divider spacing={4} />
<TraceToLogsSection options={options} onOptionsChange={onOptionsChange} />
<Divider spacing={4} />
@ -45,6 +55,12 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
isInitiallyOpen={false}
>
<Stack gap={5} direction="column">
<AdvancedHttpSettings config={options} onChange={onOptionsChange} />
{config.secureSocksDSProxyEnabled && (
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
)}
<NodeGraphSection options={options} onOptionsChange={onOptionsChange} />
<SpanBarSection options={options} onOptionsChange={onOptionsChange} />
<TraceIdTimeParams options={options} onOptionsChange={onOptionsChange} />
@ -55,9 +71,8 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
};
const getStyles = (theme: GrafanaTheme2) => ({
container: css`
label: container;
margin-bottom: ${theme.spacing(2)};
max-width: 900px;
`,
container: css({
marginBottom: theme.spacing(2),
maxWidth: '900px',
}),
});

View File

@ -1,8 +1,10 @@
import { css } from '@emotion/css';
import { shuffle } from 'lodash';
import React, { PureComponent } from 'react';
import { QueryEditorHelpProps } from '@grafana/data';
import { GrafanaTheme2, QueryEditorHelpProps } from '@grafana/data';
import { reportInteraction } from '@grafana/runtime';
import { Themeable2, withTheme2 } from '@grafana/ui';
import LokiLanguageProvider from '../LanguageProvider';
import { escapeLabelValueInExactSelector } from '../languageUtils';
@ -37,7 +39,10 @@ const LOGQL_EXAMPLES = [
},
];
export default class LokiCheatSheet extends PureComponent<QueryEditorHelpProps<LokiQuery>, { userExamples: string[] }> {
class UnthemedLokiCheatSheet extends PureComponent<
QueryEditorHelpProps<LokiQuery> & Themeable2,
{ userExamples: string[] }
> {
declare userLabelTimer: ReturnType<typeof setTimeout>;
state = {
userExamples: [],
@ -75,7 +80,8 @@ export default class LokiCheatSheet extends PureComponent<QueryEditorHelpProps<L
};
renderExpression(expr: string) {
const { onClickExample } = this.props;
const { onClickExample, theme } = this.props;
const styles = getStyles(theme);
const onClick = (query: LokiQuery) => {
onClickExample(query);
reportInteraction('grafana_loki_cheatsheet_example_clicked', {});
@ -84,7 +90,7 @@ export default class LokiCheatSheet extends PureComponent<QueryEditorHelpProps<L
return (
<button
type="button"
className="cheat-sheet-item__example"
className={styles.cheatSheetExample}
key={expr}
onClick={() => onClick({ refId: 'A', expr })}
>
@ -95,55 +101,72 @@ export default class LokiCheatSheet extends PureComponent<QueryEditorHelpProps<L
render() {
const { userExamples } = this.state;
const { theme } = this.props;
const hasUserExamples = userExamples.length > 0;
const styles = getStyles(theme);
return (
<div>
<h2>Loki Cheat Sheet</h2>
<div className="cheat-sheet-item">
<div className="cheat-sheet-item__title">See your logs</div>
<div className="cheat-sheet-item__label">
Start by selecting a log stream from the Label browser, or alternatively you can write a stream selector
into the query field.
</div>
<div className={styles.cheatSheetItem}>
<div className={styles.cheatSheetItemTitle}>See your logs</div>
Start by selecting a log stream from the Label browser, or alternatively you can write a stream selector into
the query field.
{hasUserExamples ? (
<div>
<div className="cheat-sheet-item__label">Here are some example streams from your logs:</div>
Here are some example streams from your logs:
{userExamples.map((example) => this.renderExpression(example))}
</div>
) : (
<div>
<div className="cheat-sheet-item__label">Here is an example of a log stream:</div>
Here is an example of a log stream:
{this.renderExpression(DEFAULT_EXAMPLES[0])}
</div>
)}
</div>
<div className="cheat-sheet-item">
<div className="cheat-sheet-item__title">Combine stream selectors</div>
<div className={styles.cheatSheetItem}>
<div className={styles.cheatSheetItemTitle}>Combine stream selectors</div>
{this.renderExpression('{app="cassandra",namespace="prod"}')}
<div className="cheat-sheet-item__label">Returns all log lines from streams that have both labels.</div>
Returns all log lines from streams that have both labels.
</div>
<div className="cheat-sheet-item">
<div className="cheat-sheet-item__title">Filtering for search terms.</div>
<div className={styles.cheatSheetItem}>
<div className={styles.cheatSheetItemTitle}>Filtering for search terms.</div>
{this.renderExpression('{app="cassandra"} |~ "(duration|latency)s*(=|is|of)s*[d.]+"')}
{this.renderExpression('{app="cassandra"} |= "exact match"')}
{this.renderExpression('{app="cassandra"} != "do not match"')}
<div className="cheat-sheet-item__label">
<a href="https://grafana.com/docs/loki/latest/logql/#log-pipeline" target="logql">
LogQL
</a>{' '}
supports exact and regular expression filters.
</div>
<a href="https://grafana.com/docs/loki/latest/logql/#log-pipeline" target="logql">
LogQL
</a>{' '}
supports exact and regular expression filters.
</div>
{LOGQL_EXAMPLES.map((item) => (
<div className="cheat-sheet-item" key={item.expression}>
<div className="cheat-sheet-item__title">{item.title}</div>
<div className={styles.cheatSheetItem} key={item.expression}>
<div className={styles.cheatSheetItemTitle}>{item.title}</div>
{this.renderExpression(item.expression)}
<div className="cheat-sheet-item__label">{item.label}</div>
{item.label}
</div>
))}
</div>
);
}
}
export default withTheme2(UnthemedLokiCheatSheet);
const getStyles = (theme: GrafanaTheme2) => ({
cheatSheetItem: css({
margin: theme.spacing(3, 0),
}),
cheatSheetItemTitle: css({
fontSize: theme.typography.h3.fontSize,
}),
cheatSheetExample: css({
margin: theme.spacing(0.5, 0),
// element is interactive, clear button styles
textAlign: 'left',
border: 'none',
background: 'transparent',
display: 'block',
}),
});

View File

@ -1,8 +1,17 @@
import { css } from '@emotion/css';
import React from 'react';
import { DataSourcePluginOptionsEditorProps } from '@grafana/data';
import { DataSourcePluginOptionsEditorProps, GrafanaTheme2 } from '@grafana/data';
import {
AdvancedHttpSettings,
Auth,
ConfigSection,
ConnectionSettings,
DataSourceDescription,
convertLegacyAuthProps,
} from '@grafana/experimental';
import { config } from '@grafana/runtime';
import { DataSourceHttpSettings } from '@grafana/ui';
import { Divider, SecureSocksProxySettings, Stack, useStyles2 } from '@grafana/ui';
import { ParcaDataSourceOptions } from './types';
@ -10,16 +19,50 @@ interface Props extends DataSourcePluginOptionsEditorProps<ParcaDataSourceOption
export const ConfigEditor = (props: Props) => {
const { options, onOptionsChange } = props;
const styles = useStyles2(getStyles);
return (
<>
<DataSourceHttpSettings
defaultUrl={'http://localhost:7070'}
dataSourceConfig={options}
showAccessOptions={false}
onChange={onOptionsChange}
secureSocksDSProxyEnabled={config.secureSocksDSProxyEnabled}
<div className={styles.container}>
<DataSourceDescription
dataSourceName="Parca"
docsLink="https://grafana.com/docs/grafana/latest/datasources/parca"
hasRequiredFields={false}
/>
</>
<Divider spacing={4} />
<ConnectionSettings config={options} onChange={onOptionsChange} urlPlaceholder="http://localhost:7070" />
<Divider spacing={4} />
<Auth
{...convertLegacyAuthProps({
config: options,
onChange: onOptionsChange,
})}
/>
<Divider spacing={4} />
<ConfigSection
title="Additional settings"
description="Additional settings are optional settings that can be configured for more control over your data source."
isCollapsible={true}
isInitiallyOpen={false}
>
<Stack gap={5} direction="column">
<AdvancedHttpSettings config={options} onChange={onOptionsChange} />
{config.secureSocksDSProxyEnabled && (
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
)}
</Stack>
</ConfigSection>
</div>
);
};
const getStyles = (theme: GrafanaTheme2) => ({
container: css({
marginBottom: theme.spacing(2),
maxWidth: '900px',
}),
});

View File

@ -70,9 +70,7 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
<AdvancedHttpSettings config={options} onChange={onOptionsChange} />
{config.secureSocksDSProxyEnabled && (
<>
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
</>
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
)}
<ConfigSubSection
@ -125,7 +123,6 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
const getStyles = (theme: GrafanaTheme2) => ({
container: css({
label: 'container',
marginBottom: theme.spacing(2),
maxWidth: '900px',
}),

View File

@ -2,10 +2,17 @@ import { css } from '@emotion/css';
import React from 'react';
import { DataSourcePluginOptionsEditorProps, GrafanaTheme2 } from '@grafana/data';
import { ConfigSection, DataSourceDescription } from '@grafana/experimental';
import {
AdvancedHttpSettings,
Auth,
ConfigSection,
ConnectionSettings,
DataSourceDescription,
convertLegacyAuthProps,
} from '@grafana/experimental';
import { NodeGraphSection, SpanBarSection, TraceToLogsSection, TraceToMetricsSection } from '@grafana/o11y-ds-frontend';
import { config } from '@grafana/runtime';
import { DataSourceHttpSettings, useStyles2, Divider, Stack } from '@grafana/ui';
import { useStyles2, Divider, Stack, SecureSocksProxySettings } from '@grafana/ui';
export type Props = DataSourcePluginOptionsEditorProps;
@ -22,14 +29,17 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
<Divider spacing={4} />
<DataSourceHttpSettings
defaultUrl="http://localhost:9411"
dataSourceConfig={options}
showAccessOptions={false}
onChange={onOptionsChange}
secureSocksDSProxyEnabled={config.secureSocksDSProxyEnabled}
<ConnectionSettings config={options} onChange={onOptionsChange} urlPlaceholder="http://localhost:9411" />
<Divider spacing={4} />
<Auth
{...convertLegacyAuthProps({
config: options,
onChange: onOptionsChange,
})}
/>
<Divider spacing={4} />
<TraceToLogsSection options={options} onOptionsChange={onOptionsChange} />
<Divider spacing={4} />
@ -43,6 +53,12 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
isInitiallyOpen={false}
>
<Stack gap={5} direction="column">
<AdvancedHttpSettings config={options} onChange={onOptionsChange} />
{config.secureSocksDSProxyEnabled && (
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
)}
<NodeGraphSection options={options} onOptionsChange={onOptionsChange} />
<SpanBarSection options={options} onOptionsChange={onOptionsChange} />
</Stack>
@ -52,9 +68,8 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
};
const getStyles = (theme: GrafanaTheme2) => ({
container: css`
label: container;
margin-bottom: ${theme.spacing(2)};
max-width: 900px;
`,
container: css({
marginBottom: theme.spacing(2),
maxWidth: '900px',
}),
});

View File

@ -200,7 +200,7 @@
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/SnapshotListDTO"
"$ref": "#/components/schemas/CloudMigrationRunListDTO"
}
}
},
@ -424,6 +424,16 @@
},
"description": "(empty)"
},
"createSnapshotResponse": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/CreateSnapshotResponseDTO"
}
}
},
"description": "(empty)"
},
"createTeamResponse": {
"content": {
"application/json": {
@ -1251,6 +1261,16 @@
},
"description": "(empty)"
},
"getSnapshotResponse": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/GetSnapshotResponseDTO"
}
}
},
"description": "(empty)"
},
"getStatusResponse": {
"description": "(empty)"
},
@ -1912,6 +1932,16 @@
},
"description": "(empty)"
},
"snapshotListResponse": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/SnapshotListResponseDTO"
}
}
},
"description": "(empty)"
},
"unauthorisedError": {
"content": {
"application/json": {
@ -3683,6 +3713,17 @@
},
"type": "object"
},
"CloudMigrationRunListDTO": {
"properties": {
"runs": {
"items": {
"$ref": "#/components/schemas/MigrateDataResponseListDTO"
},
"type": "array"
}
},
"type": "object"
},
"CloudMigrationSessionListResponseDTO": {
"properties": {
"sessions": {
@ -4224,6 +4265,14 @@
},
"type": "object"
},
"CreateSnapshotResponseDTO": {
"properties": {
"uid": {
"type": "string"
}
},
"type": "object"
},
"CreateTeamCommand": {
"properties": {
"email": {
@ -5617,6 +5666,45 @@
],
"title": "Get home dashboard response."
},
"GetSnapshotResponseDTO": {
"properties": {
"created": {
"format": "date-time",
"type": "string"
},
"finished": {
"format": "date-time",
"type": "string"
},
"results": {
"items": {
"$ref": "#/components/schemas/MigrateDataResponseItemDTO"
},
"type": "array"
},
"sessionUid": {
"type": "string"
},
"status": {
"enum": [
"INITIALIZING",
"CREATING",
"PENDING_UPLOAD",
"UPLOADING",
"PENDING_PROCESSING",
"PROCESSING",
"FINISHED",
"ERROR",
"UNKNOWN"
],
"type": "string"
},
"uid": {
"type": "string"
}
},
"type": "object"
},
"GettableAlertmanagers": {
"properties": {
"data": {
@ -10331,11 +10419,45 @@
"SmtpNotEnabled": {
"$ref": "#/components/schemas/ResponseDetails"
},
"SnapshotListDTO": {
"SnapshotDTO": {
"description": "Base snapshot without results",
"properties": {
"runs": {
"created": {
"format": "date-time",
"type": "string"
},
"finished": {
"format": "date-time",
"type": "string"
},
"sessionUid": {
"type": "string"
},
"status": {
"enum": [
"INITIALIZING",
"CREATING",
"PENDING_UPLOAD",
"UPLOADING",
"PENDING_PROCESSING",
"PROCESSING",
"FINISHED",
"ERROR",
"UNKNOWN"
],
"type": "string"
},
"uid": {
"type": "string"
}
},
"type": "object"
},
"SnapshotListResponseDTO": {
"properties": {
"snapshots": {
"items": {
"$ref": "#/components/schemas/MigrateDataResponseListDTO"
"$ref": "#/components/schemas/SnapshotDTO"
},
"type": "array"
}
@ -15165,6 +15287,9 @@
"200": {
"$ref": "#/components/responses/cloudMigrationSessionResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
@ -15199,6 +15324,9 @@
"200": {
"$ref": "#/components/responses/cloudMigrationRunResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
@ -15230,6 +15358,9 @@
}
],
"responses": {
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
@ -15262,6 +15393,9 @@
"200": {
"$ref": "#/components/responses/cloudMigrationSessionResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
@ -15296,6 +15430,9 @@
"200": {
"$ref": "#/components/responses/cloudMigrationRunListResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
@ -15329,6 +15466,9 @@
"200": {
"$ref": "#/components/responses/cloudMigrationRunResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
@ -15345,6 +15485,240 @@
]
}
},
"/cloudmigration/migration/{uid}/snapshot": {
"post": {
"description": "If the snapshot initialization is successful, the snapshot uid is returned.",
"operationId": "createSnapshot",
"parameters": [
{
"description": "UID of a session",
"in": "path",
"name": "uid",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"$ref": "#/components/responses/createSnapshotResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
"403": {
"$ref": "#/components/responses/forbiddenError"
},
"500": {
"$ref": "#/components/responses/internalServerError"
}
},
"summary": "Trigger the creation of an instance snapshot associated with the provided session.",
"tags": [
"migrations"
]
}
},
"/cloudmigration/migration/{uid}/snapshot/{snapshotUid}": {
"get": {
"operationId": "getSnapshot",
"parameters": [
{
"description": "Session UID of a session",
"in": "path",
"name": "uid",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "UID of a snapshot",
"in": "path",
"name": "snapshotUid",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"$ref": "#/components/responses/getSnapshotResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
"403": {
"$ref": "#/components/responses/forbiddenError"
},
"500": {
"$ref": "#/components/responses/internalServerError"
}
},
"summary": "Get metadata about a snapshot, including where it is in its processing and final results.",
"tags": [
"migrations"
]
}
},
"/cloudmigration/migration/{uid}/snapshot/{snapshotUid}/cancel": {
"post": {
"description": "TODO: Implement",
"operationId": "cancelSnapshot",
"parameters": [
{
"description": "Session UID of a session",
"in": "path",
"name": "uid",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "UID of a snapshot",
"in": "path",
"name": "snapshotUid",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "(empty)"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
"403": {
"$ref": "#/components/responses/forbiddenError"
},
"500": {
"$ref": "#/components/responses/internalServerError"
}
},
"summary": "Cancel a snapshot, wherever it is in its processing chain.",
"tags": [
"migrations"
]
}
},
"/cloudmigration/migration/{uid}/snapshot/{snapshotUid}/upload": {
"post": {
"operationId": "uploadSnapshot",
"parameters": [
{
"description": "Session UID of a session",
"in": "path",
"name": "uid",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "UID of a snapshot",
"in": "path",
"name": "snapshotUid",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "(empty)"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
"403": {
"$ref": "#/components/responses/forbiddenError"
},
"500": {
"$ref": "#/components/responses/internalServerError"
}
},
"summary": "Upload a snapshot to the Grafana Migration Service for processing.",
"tags": [
"migrations"
]
}
},
"/cloudmigration/migration/{uid}/snapshots": {
"get": {
"operationId": "getShapshotList",
"parameters": [
{
"description": "Offset is used for pagination with limit",
"in": "query",
"name": "offset",
"schema": {
"default": 0,
"format": "int64",
"type": "integer"
}
},
{
"description": "Max limit for results returned.",
"in": "query",
"name": "limit",
"schema": {
"default": 100,
"format": "int64",
"type": "integer"
}
},
{
"description": "Session UID of a session",
"in": "path",
"name": "uid",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"$ref": "#/components/responses/snapshotListResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},
"403": {
"$ref": "#/components/responses/forbiddenError"
},
"500": {
"$ref": "#/components/responses/internalServerError"
}
},
"summary": "Get a list of snapshots for a session.",
"tags": [
"migrations"
]
}
},
"/cloudmigration/token": {
"get": {
"operationId": "getCloudMigrationToken",
@ -15410,6 +15784,9 @@
"204": {
"$ref": "#/components/responses/cloudMigrationDeleteTokenResponse"
},
"400": {
"$ref": "#/components/responses/badRequestError"
},
"401": {
"$ref": "#/components/responses/unauthorisedError"
},

View File

@ -198,6 +198,7 @@ $doc-font-size: $font-size-sm;
.graph-legend {
display: flex;
flex: 0 1 auto;
flex-wrap: wrap;
max-height: 35%;
margin: 0;
text-align: center;
@ -1846,3 +1847,20 @@ $easing: cubic-bezier(0, 0, 0.265, 1);
line-height: calc(3em / 4);
vertical-align: -15%;
}
.cheat-sheet-item {
margin: $space-lg 0;
}
.cheat-sheet-item__title {
font-size: $font-size-h3;
}
.cheat-sheet-item__example {
margin: $space-xs 0;
// element is interactive, clear button styles
text-align: left;
border: none;
background: transparent;
display: block;
}

View File

@ -8,7 +8,6 @@
@import 'mixins/forms';
// BASE
@import 'base/normalize';
@import 'base/reboot';
@import 'base/type';
@import 'base/forms';
@ -31,7 +30,6 @@
@import 'components/infobox';
@import 'components/query_editor';
@import 'components/query_part';
@import 'components/json_explorer';
@import 'components/dashboard_grid';
@import 'components/add_data_source';
@import 'components/panel_header';
@ -40,7 +38,6 @@
@import 'pages/dashboard';
@import 'pages/alerting';
@import 'pages/history';
@import 'pages/explore';
// ANGULAR
@import 'angular';

View File

@ -1,424 +0,0 @@
/*! normalize.css commit fe56763 | MIT License | github.com/necolas/normalize.css */
//
// 1. Set default font family to sans-serif.
// 2. Prevent iOS and IE text size adjust after device orientation change,
// without disabling user zoom.
//
html {
font-family: sans-serif; // 1
-ms-text-size-adjust: 100%; // 2
-webkit-text-size-adjust: 100%; // 2
}
//
// Remove default margin.
//
body {
margin: 0;
}
// HTML5 display definitions
// ==========================================================================
//
// Correct `block` display not defined for any HTML5 element in IE 8/9.
// Correct `block` display not defined for `details` or `summary` in IE 10/11
// and Firefox.
// Correct `block` display not defined for `main` in IE 11.
//
article,
aside,
details,
figcaption,
figure,
footer,
header,
main,
menu,
nav,
section {
display: block;
}
//
// 1. Correct `inline-block` display not defined in IE 8/9.
// 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera.
//
audio,
canvas,
progress,
video {
display: inline-block; // 1
vertical-align: baseline; // 2
}
//
// Prevent modern browsers from displaying `audio` without controls.
// Remove excess height in iOS 5 devices.
//
audio:not([controls]) {
display: none;
height: 0;
}
//
// Address `[hidden]` styling not present in IE 8/9/10.
// Hide the `template` element in IE 8/9/10/11, Safari, and Firefox < 22.
//
[hidden],
template {
display: none;
}
// Links
// ==========================================================================
//
// Remove the gray background color from active links in IE 10.
//
a {
background-color: transparent;
}
//
// Improve readability of focused elements when they are also in an
// active/hover state.
//
a {
&:active {
outline: 0;
}
&:hover {
outline: 0;
}
}
// Text-level semantics
// ==========================================================================
//
// Address styling not present in IE 8/9/10/11, Safari, and Chrome.
//
abbr[title] {
border-bottom: 1px dotted;
}
//
// Address style set to `bolder` in Firefox 4+, Safari, and Chrome.
//
b,
strong {
font-weight: bold;
}
//
// Address styling not present in Safari and Chrome.
//
dfn {
font-style: italic;
}
//
// Address variable `h1` font-size and margin within `section` and `article`
// contexts in Firefox 4+, Safari, and Chrome.
//
h1 {
font-size: 2em;
margin: 0.67em 0;
}
//
// Address styling not present in IE 8/9.
//
mark {
background: #ff0;
color: #000;
}
//
// Address inconsistent and variable font size in all browsers.
//
small {
font-size: 80%;
}
//
// Prevent `sub` and `sup` affecting `line-height` in all browsers.
//
sub,
sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
top: -0.5em;
}
sub {
bottom: -0.25em;
}
// Embedded content
// ==========================================================================
//
// Remove border when inside `a` element in IE 8/9/10.
//
img {
border: 0;
}
//
// Correct overflow not hidden in IE 9/10/11.
//
svg:not(:root) {
overflow: hidden;
}
// Grouping content
// ==========================================================================
//
// Address margin not present in IE 8/9 and Safari.
//
figure {
margin: 1em 40px;
}
//
// Address differences between Firefox and other browsers.
//
hr {
box-sizing: content-box;
height: 0;
}
//
// Contain overflow in all browsers.
//
pre {
overflow: auto;
}
//
// Address odd `em`-unit font size rendering in all browsers.
//
code,
kbd,
pre,
samp {
font-family: monospace, monospace;
font-size: 1em;
}
// Forms
// ==========================================================================
//
// Known limitation: by default, Chrome and Safari on OS X allow very limited
// styling of `select`, unless a `border` property is set.
//
//
// 1. Correct color not being inherited.
// Known issue: affects color of disabled elements.
// 2. Correct font properties not being inherited.
// 3. Address margins set differently in Firefox 4+, Safari, and Chrome.
//
button,
input,
optgroup,
select,
textarea {
color: inherit; // 1
font: inherit; // 2
margin: 0; // 3
}
//
// Address `overflow` set to `hidden` in IE 8/9/10/11.
//
button {
overflow: visible;
}
//
// Address inconsistent `text-transform` inheritance for `button` and `select`.
// All other form control elements do not inherit `text-transform` values.
// Correct `button` style inheritance in Firefox, IE 8/9/10/11, and Opera.
// Correct `select` style inheritance in Firefox.
//
button,
select {
text-transform: none;
}
//
// 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio`
// and `video` controls.
// 2. Correct inability to style clickable `input` types in iOS.
// 3. Improve usability and consistency of cursor style between image-type
// `input` and others.
//
button,
html input[type='button'],
// 1 input[type='reset'],
input[type='submit'] {
-webkit-appearance: button; // 2
cursor: pointer; // 3
}
//
// Re-set default cursor for disabled elements.
//
button[disabled],
html input[disabled] {
cursor: default;
}
//
// Remove inner padding and border in Firefox 4+.
//
button::-moz-focus-inner,
input::-moz-focus-inner {
border: 0;
padding: 0;
}
//
// Address Firefox 4+ setting `line-height` on `input` using `!important` in
// the UA stylesheet.
//
input {
line-height: normal;
}
//
// It's recommended that you don't attempt to style these elements.
// Firefox's implementation doesn't respect box-sizing, padding, or width.
//
// 1. Address box sizing set to `content-box` in IE 8/9/10.
// 2. Remove excess padding in IE 8/9/10.
//
input[type='checkbox'],
input[type='radio'] {
box-sizing: border-box; // 1
padding: 0; // 2
}
//
// Fix the cursor style for Chrome's increment/decrement buttons. For certain
// `font-size` values of the `input`, it causes the cursor style of the
// decrement button to change from `default` to `text`.
//
input[type='number']::-webkit-inner-spin-button,
input[type='number']::-webkit-outer-spin-button {
height: auto;
}
//
// Address `appearance` set to `searchfield` in Safari and Chrome.
//
input[type='search'] {
-webkit-appearance: textfield;
}
//
// Remove inner padding and search cancel button in Safari and Chrome on OS X.
// Safari (but not Chrome) clips the cancel button when the search input has
// padding (and `textfield` appearance).
//
input[type='search']::-webkit-search-cancel-button,
input[type='search']::-webkit-search-decoration {
-webkit-appearance: none;
}
//
// Define consistent border, margin, and padding.
//
fieldset {
border: 1px solid #c0c0c0;
margin: 0 2px;
padding: 0.35em 0.625em 0.75em;
}
//
// 1. Correct `color` not being inherited in IE 8/9/10/11.
// 2. Remove padding so people aren't caught out if they zero out fieldsets.
//
legend {
border: 0; // 1
padding: 0; // 2
}
//
// Remove default vertical scrollbar in IE 8/9/10/11.
//
textarea {
overflow: auto;
}
//
// Don't inherit the `font-weight` (applied by a rule above).
// NOTE: the default cannot safely be changed in Chrome and Safari on OS X.
//
optgroup {
font-weight: bold;
}
// Tables
// ==========================================================================
//
// Remove most spacing between table cells.
//
table {
border-collapse: collapse;
border-spacing: 0;
}
td,
th {
padding: 0;
}

View File

@ -292,7 +292,7 @@ legend {
margin-bottom: $space-sm;
font-size: $space-lg;
line-height: inherit;
// border: 0;
border: 0;
}
input[type='search'] {

View File

@ -1,122 +0,0 @@
.json-formatter-row {
font-family: monospace;
&,
a,
a:hover {
color: $json-explorer-default-color;
text-decoration: none;
}
.json-formatter-row {
margin-left: $space-md;
}
.json-formatter-children {
&.json-formatter-empty {
opacity: 0.5;
margin-left: $space-md;
&::after {
display: none;
}
&.json-formatter-object::after {
content: 'No properties';
}
&.json-formatter-array::after {
content: '[]';
}
}
}
.json-formatter-string {
color: $json-explorer-string-color;
white-space: pre-wrap;
word-wrap: break-word;
word-break: break-all;
}
.json-formatter-number {
color: $json-explorer-number-color;
}
.json-formatter-boolean {
color: $json-explorer-boolean-color;
}
.json-formatter-null {
color: $json-explorer-null-color;
}
.json-formatter-undefined {
color: $json-explorer-undefined-color;
}
.json-formatter-function {
color: $json-explorer-function-color;
}
.json-formatter-date {
background-color: fade($json-explorer-default-color, 5%);
}
.json-formatter-url {
text-decoration: underline;
color: $json-explorer-url-color;
cursor: pointer;
}
.json-formatter-bracket {
color: $json-explorer-bracket-color;
}
.json-formatter-key {
color: $json-explorer-key-color;
cursor: pointer;
padding-right: $space-xxs;
margin-right: 4px;
}
.json-formatter-constructor-name {
cursor: pointer;
}
.json-formatter-array-comma {
margin-right: 4px;
}
.json-formatter-toggler {
line-height: 16px;
font-size: $font-size-xs;
vertical-align: middle;
opacity: $json-explorer-toggler-opacity;
cursor: pointer;
padding-right: $space-xxs;
&::after {
display: inline-block;
transition: transform $json-explorer-rotate-time ease-in;
content: '';
}
}
// Inline preview on hover (optional)
> a > .json-formatter-preview-text {
opacity: 0;
transition: opacity 0.15s ease-in;
font-style: italic;
}
&:hover > a > .json-formatter-preview-text {
opacity: 0.6;
}
// Open state
&.json-formatter-open {
> .json-formatter-toggler-link .json-formatter-toggler::after {
transform: rotate(90deg);
}
> .json-formatter-children::after {
display: inline-block;
}
> a > .json-formatter-preview-text {
display: none;
}
&.json-formatter-empty::after {
display: block;
}
}
}

View File

@ -1,155 +0,0 @@
// TODO: this is used in Loki & Prometheus, move it
.explore-input-margin {
margin-right: 4px;
}
.graph-legend {
flex-wrap: wrap;
}
// TODO: move to Loki and Prometheus
.query-row-break {
flex-basis: 100%;
}
// TODO: Prometheus-specifics, to be extracted to datasource soon
.explore {
.prom-query-field-info {
margin: 0.25em 0.5em 0.5em;
display: flex;
details {
margin-left: 1em;
}
}
}
// ReactTable basic overrides (does not include pivot/groups/filters)
// When integrating ReactTable as new panel plugin, move to _panel_table.scss
.ReactTable {
border: none;
}
.ReactTable .rt-table {
// Allow some space for the no-data text
min-height: 90px;
}
.ReactTable .rt-thead.-header {
box-shadow: none;
background: $list-item-bg;
border-top: 2px solid $body-bg;
border-bottom: 2px solid $body-bg;
height: 2em;
}
.ReactTable .rt-thead.-header .rt-th {
text-align: left;
color: $blue;
font-weight: $font-weight-semi-bold;
}
.ReactTable .rt-thead .rt-td,
.ReactTable .rt-thead .rt-th {
padding: 0.45em 0 0.45em 1.1em;
border-right: none;
box-shadow: none;
}
.ReactTable .rt-tbody .rt-td {
padding: 0.45em 0 0.45em 1.1em;
border-bottom: 2px solid $body-bg;
border-right: 2px solid $body-bg;
}
.ReactTable .rt-tbody .rt-td:last-child {
border-right: none;
}
.ReactTable .-pagination {
border-top: none;
box-shadow: none;
margin-top: $space-sm;
}
.ReactTable .-pagination .-btn {
color: $blue;
background: $list-item-bg;
}
.ReactTable .-pagination input,
.ReactTable .-pagination select {
color: $input-color;
background-color: $input-bg;
}
.ReactTable .-loading {
background: $input-bg;
}
.ReactTable .-loading.-active {
opacity: 0.8;
}
.ReactTable .-loading > div {
color: $input-color;
}
.ReactTable .rt-tr .rt-td:last-child {
text-align: right;
}
.ReactTable .rt-noData {
top: 60px;
z-index: inherit;
}
// React-component cascade fix: show "loading" when loading children
.rc-cascader-menu-item-loading:after {
position: absolute;
right: 12px;
content: 'loading';
color: #767980;
font-style: italic;
}
// React-component cascade fix: vertical alignment issue with Safari
.rc-cascader-menu {
vertical-align: top;
// To fix cascader button width issue in windows + firefox
scrollbar-width: thin;
}
// TODO Experimental
.cheat-sheet-item {
margin: $space-lg 0;
}
.cheat-sheet-item__title {
font-size: $font-size-h3;
}
.cheat-sheet-item__example {
margin: $space-xs 0;
// element is interactive, clear button styles
text-align: left;
border: none;
background: transparent;
display: block;
}
.query-type-toggle {
margin-left: 5px;
.btn.active {
background-color: $input-bg;
background-image: none;
background-clip: padding-box;
border: $input-border;
border-radius: $input-border-radius;
@include box-shadow($input-box-shadow);
color: $input-color;
}
}