From ce2209585c1e1e885e1f02c29d004e8abde61c84 Mon Sep 17 00:00:00 2001 From: corpglory-dev Date: Fri, 1 Feb 2019 14:32:40 +0300 Subject: [PATCH 01/38] Remove version.ts --- .../config_ctrl.ts | 2 +- .../version.test.ts | 53 ------------------- .../version.ts | 34 ------------ 3 files changed, 1 insertion(+), 88 deletions(-) delete mode 100644 public/app/plugins/datasource/grafana-azure-monitor-datasource/version.test.ts delete mode 100644 public/app/plugins/datasource/grafana-azure-monitor-datasource/version.ts diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts index 98fe5a87a56..4ee5c94fad6 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/config_ctrl.ts @@ -1,6 +1,6 @@ import AzureLogAnalyticsDatasource from './azure_log_analytics/azure_log_analytics_datasource'; import config from 'app/core/config'; -import { isVersionGtOrEq } from './version'; +import { isVersionGtOrEq } from 'app/core/utils/version'; export class AzureMonitorConfigCtrl { static templateUrl = 'public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/config.html'; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.test.ts deleted file mode 100644 index 17a6ce9bb0b..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.test.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { SemVersion, isVersionGtOrEq } from './version'; - -describe('SemVersion', () => { - let version = '1.0.0-alpha.1'; - - describe('parsing', () => { - it('should parse version properly', () => { - const semver = new SemVersion(version); - expect(semver.major).toBe(1); - expect(semver.minor).toBe(0); - expect(semver.patch).toBe(0); - expect(semver.meta).toBe('alpha.1'); - }); - }); - - describe('comparing', () => { - beforeEach(() => { - version = '3.4.5'; - }); - - it('should detect greater version properly', () => { - const semver = new SemVersion(version); - const cases = [ - { value: '3.4.5', expected: true }, - { value: '3.4.4', expected: true }, - { value: '3.4.6', expected: false }, - { value: '4', expected: false }, - { value: '3.5', expected: false }, - ]; - cases.forEach(testCase => { - expect(semver.isGtOrEq(testCase.value)).toBe(testCase.expected); - }); - }); - }); - - describe('isVersionGtOrEq', () => { - it('should compare versions properly (a >= b)', () => { - const cases = [ - { values: ['3.4.5', '3.4.5'], expected: true }, - { values: ['3.4.5', '3.4.4'], expected: true }, - { values: ['3.4.5', '3.4.6'], expected: false }, - { values: ['3.4', '3.4.0'], expected: true }, - { values: ['3', '3.0.0'], expected: true }, - { values: ['3.1.1-beta1', '3.1'], expected: true }, - { values: ['3.4.5', '4'], expected: false }, - { values: ['3.4.5', '3.5'], expected: false }, - ]; - cases.forEach(testCase => { - expect(isVersionGtOrEq(testCase.values[0], testCase.values[1])).toBe(testCase.expected); - }); - }); - }); -}); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.ts deleted file mode 100644 index 1131e1d2ab8..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/version.ts +++ /dev/null @@ -1,34 +0,0 @@ -import _ from 'lodash'; - -const versionPattern = /^(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:-([0-9A-Za-z\.]+))?/; - -export class SemVersion { - major: number; - minor: number; - patch: number; - meta: string; - - constructor(version: string) { - const match = versionPattern.exec(version); - if (match) { - this.major = Number(match[1]); - this.minor = Number(match[2] || 0); - this.patch = Number(match[3] || 0); - this.meta = match[4]; - } - } - - isGtOrEq(version: string): boolean { - const compared = new SemVersion(version); - return !(this.major < compared.major || this.minor < compared.minor || this.patch < compared.patch); - } - - isValid(): boolean { - return _.isNumber(this.major); - } -} - -export function isVersionGtOrEq(a: string, b: string): boolean { - const aSemver = new SemVersion(a); - return aSemver.isGtOrEq(b); -} From cf60ae79c31d6c0745b47e77e62e2d0605a19b73 Mon Sep 17 00:00:00 2001 From: corpglory-dev Date: Fri, 1 Feb 2019 14:47:17 +0300 Subject: [PATCH 02/38] Move prism to app/features/explore --- .../editor => features/explore}/slate-plugins/prism/index.tsx | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename public/app/{plugins/datasource/grafana-azure-monitor-datasource/editor => features/explore}/slate-plugins/prism/index.tsx (100%) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/prism/index.tsx b/public/app/features/explore/slate-plugins/prism/index.tsx similarity index 100% rename from public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/prism/index.tsx rename to public/app/features/explore/slate-plugins/prism/index.tsx From bdd59de877f677d8831f64d438a146408097faed Mon Sep 17 00:00:00 2001 From: corpglory-dev Date: Fri, 1 Feb 2019 14:47:33 +0300 Subject: [PATCH 03/38] Remove newline && runner plugins --- .../editor/slate-plugins/newline.ts | 35 ------------------- .../editor/slate-plugins/runner.ts | 14 -------- 2 files changed, 49 deletions(-) delete mode 100644 public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/newline.ts delete mode 100644 public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/runner.ts diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/newline.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/newline.ts deleted file mode 100644 index d484d93a542..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/newline.ts +++ /dev/null @@ -1,35 +0,0 @@ -function getIndent(text) { - let offset = text.length - text.trimLeft().length; - if (offset) { - let indent = text[0]; - while (--offset) { - indent += text[0]; - } - return indent; - } - return ''; -} - -export default function NewlinePlugin() { - return { - onKeyDown(event, change) { - const { value } = change; - if (!value.isCollapsed) { - return undefined; - } - - if (event.key === 'Enter' && !event.shiftKey) { - event.preventDefault(); - - const { startBlock } = value; - const currentLineText = startBlock.text; - const indent = getIndent(currentLineText); - - return change - .splitBlock() - .insertText(indent) - .focus(); - } - }, - }; -} diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/runner.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/runner.ts deleted file mode 100644 index 068bd9f0ad1..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/slate-plugins/runner.ts +++ /dev/null @@ -1,14 +0,0 @@ -export default function RunnerPlugin({ handler }) { - return { - onKeyDown(event) { - // Handle enter - if (handler && event.key === 'Enter' && event.shiftKey) { - // Submit on Enter - event.preventDefault(); - handler(event); - return true; - } - return undefined; - }, - }; -} From 9a3f4def98fcb89855ec278c924f1897d1b7357e Mon Sep 17 00:00:00 2001 From: corpglory-dev Date: Fri, 1 Feb 2019 14:49:04 +0300 Subject: [PATCH 04/38] Use slate-plugins from app/features/explore --- .../editor/query_field.tsx | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx index 1c883a40c31..f93912f069e 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx @@ -1,12 +1,9 @@ -import PluginPrism from './slate-plugins/prism'; -// import PluginPrism from 'slate-prism'; -// import Prism from 'prismjs'; +import PluginPrism from 'app/features/explore/slate-plugins/prism'; import BracesPlugin from 'app/features/explore/slate-plugins/braces'; import ClearPlugin from 'app/features/explore/slate-plugins/clear'; -// Custom plugins (new line on Enter and run on Shift+Enter) -import NewlinePlugin from './slate-plugins/newline'; -import RunnerPlugin from './slate-plugins/runner'; +import NewlinePlugin from 'app/features/explore/slate-plugins/newline'; +import RunnerPlugin from 'app/features/explore/slate-plugins/runner'; import Typeahead from './typeahead'; From 6d03766acecab8914d344a929708a22470838a43 Mon Sep 17 00:00:00 2001 From: corpglory-dev Date: Fri, 1 Feb 2019 14:54:38 +0300 Subject: [PATCH 05/38] Remove extra newline --- .../grafana-azure-monitor-datasource/editor/query_field.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx index f93912f069e..400126f7e55 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx @@ -1,5 +1,4 @@ import PluginPrism from 'app/features/explore/slate-plugins/prism'; - import BracesPlugin from 'app/features/explore/slate-plugins/braces'; import ClearPlugin from 'app/features/explore/slate-plugins/clear'; import NewlinePlugin from 'app/features/explore/slate-plugins/newline'; From f2d2712a9547ac8bcd6ef8b69f85d41d2d19dd51 Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Wed, 30 Jan 2019 14:21:51 +0300 Subject: [PATCH 06/38] azuremonitor: add more builtin functions and operators --- .../editor/KustoQueryField.tsx | 26 +- .../editor/editor_component.tsx | 2 +- .../editor/kusto.ts | 114 ------ .../editor/kusto/kusto.ts | 355 ++++++++++++++++++ 4 files changed, 379 insertions(+), 118 deletions(-) delete mode 100644 public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto.ts create mode 100644 public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index 849cf62efe0..fa79d4bdb99 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -6,7 +6,7 @@ import QueryField from './query_field'; import debounce from 'app/features/explore/utils/debounce'; import { getNextCharacter } from 'app/features/explore/utils/dom'; -import { FUNCTIONS, KEYWORDS } from './kusto'; +import { KEYWORDS, functionTokens, operatorTokens, grafanaMacros } from './kusto/kusto'; // import '../sass/editor.base.scss'; @@ -260,10 +260,20 @@ export default class KustoQueryField extends QueryField { label: 'Keywords', items: KEYWORDS.map(wrapText) }, + { + prefixMatch: true, + label: 'Operators', + items: operatorTokens.map((s: any) => { s.type = 'function'; return s; }) + }, { prefixMatch: true, label: 'Functions', - items: FUNCTIONS.map((s: any) => { s.type = 'function'; return s; }) + items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) + }, + { + prefixMatch: true, + label: 'Macros', + items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) } ]; } @@ -276,10 +286,20 @@ export default class KustoQueryField extends QueryField { label: 'Keywords', items: KEYWORDS.map(wrapText) }, + { + prefixMatch: true, + label: 'Operators', + items: operatorTokens.map((s: any) => { s.type = 'function'; return s; }) + }, { prefixMatch: true, label: 'Functions', - items: FUNCTIONS.map((s: any) => { s.type = 'function'; return s; }) + items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) + }, + { + prefixMatch: true, + label: 'Macros', + items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) } ]; } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx index da7db58567f..59e4ab12c81 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx @@ -1,5 +1,5 @@ import KustoQueryField from './KustoQueryField'; -import Kusto from './kusto'; +import Kusto from './kusto/kusto'; import React, { Component } from 'react'; import coreModule from 'app/core/core_module'; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto.ts deleted file mode 100644 index 647ebb8024a..00000000000 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto.ts +++ /dev/null @@ -1,114 +0,0 @@ -export const FUNCTIONS = [ - { text: 'countof', display: 'countof()', hint: '' }, - { text: 'bin', display: 'bin()', hint: '' }, - { text: 'extentid', display: 'extentid()', hint: '' }, - { text: 'extract', display: 'extract()', hint: '' }, - { text: 'extractjson', display: 'extractjson()', hint: '' }, - { text: 'floor', display: 'floor()', hint: '' }, - { text: 'iif', display: 'iif()', hint: '' }, - { text: 'isnull', display: 'isnull()', hint: '' }, - { text: 'isnotnull', display: 'isnotnull()', hint: '' }, - { text: 'notnull', display: 'notnull()', hint: '' }, - { text: 'isempty', display: 'isempty()', hint: '' }, - { text: 'isnotempty', display: 'isnotempty()', hint: '' }, - { text: 'notempty', display: 'notempty()', hint: '' }, - { text: 'now', display: 'now()', hint: '' }, - { text: 're2', display: 're2()', hint: '' }, - { text: 'strcat', display: 'strcat()', hint: '' }, - { text: 'strlen', display: 'strlen()', hint: '' }, - { text: 'toupper', display: 'toupper()', hint: '' }, - { text: 'tostring', display: 'tostring()', hint: '' }, - { text: 'count', display: 'count()', hint: '' }, - { text: 'cnt', display: 'cnt()', hint: '' }, - { text: 'sum', display: 'sum()', hint: '' }, - { text: 'min', display: 'min()', hint: '' }, - { text: 'max', display: 'max()', hint: '' }, - { text: 'avg', display: 'avg()', hint: '' }, - { - text: '$__timeFilter', - display: '$__timeFilter()', - hint: 'Macro that uses the selected timerange in Grafana to filter the query.', - }, - { - text: '$__escapeMulti', - display: '$__escapeMulti()', - hint: 'Macro to escape multi-value template variables that contain illegal characters.', - }, - { text: '$__contains', display: '$__contains()', hint: 'Macro for multi-value template variables.' }, -]; - -export const KEYWORDS = [ - 'by', - 'on', - 'contains', - 'notcontains', - 'containscs', - 'notcontainscs', - 'startswith', - 'has', - 'matches', - 'regex', - 'true', - 'false', - 'and', - 'or', - 'typeof', - 'int', - 'string', - 'date', - 'datetime', - 'time', - 'long', - 'real', - '​boolean', - 'bool', - // add some more keywords - 'where', - 'order', -]; - -// Kusto operators -// export const OPERATORS = ['+', '-', '*', '/', '>', '<', '==', '<>', '<=', '>=', '~', '!~']; - -export const DURATION = ['SECONDS', 'MINUTES', 'HOURS', 'DAYS', 'WEEKS', 'MONTHS', 'YEARS']; - -const tokenizer = { - comment: { - pattern: /(^|[^\\:])\/\/.*/, - lookbehind: true, - greedy: true, - }, - 'function-context': { - pattern: /[a-z0-9_]+\([^)]*\)?/i, - inside: {}, - }, - duration: { - pattern: new RegExp(`${DURATION.join('?|')}?`, 'i'), - alias: 'number', - }, - builtin: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.text).join('|')})(?=\\s*\\()`, 'i'), - string: { - pattern: /(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/, - greedy: true, - }, - keyword: new RegExp(`\\b(?:${KEYWORDS.join('|')}|\\*)\\b`, 'i'), - boolean: /\b(?:true|false)\b/, - number: /\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i, - operator: /-|\+|\*|\/|>|<|==|<=?|>=?|<>|!~|~|=|\|/, - punctuation: /[{};(),.:]/, - variable: /(\[\[(.+?)\]\])|(\$(.+?))\b/, -}; - -tokenizer['function-context'].inside = { - argument: { - pattern: /[a-z0-9_]+(?=:)/i, - alias: 'symbol', - }, - duration: tokenizer.duration, - number: tokenizer.number, - builtin: tokenizer.builtin, - string: tokenizer.string, - variable: tokenizer.variable, -}; - -export default tokenizer; diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts new file mode 100644 index 00000000000..e2a1142597b --- /dev/null +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts @@ -0,0 +1,355 @@ +/* tslint:disable:max-line-length */ +export const operatorTokens = [ + { text: "!between", hint: "Matches the input that is outside the inclusive range." }, + { text: "as", hint: "Binds a name to the operator's input tabular expression." }, + { text: "between", hint: "Matches the input that is inside the inclusive range." }, + { text: "consume", hint: "The `consume` operator consumes the tabular data stream handed to it. It is\r\nmostly used for triggering the query side-effect without actually returning\r\nthe results back to the caller." }, + { text: "count", hint: "Returns the number of records in the input record set." }, + { text: "datatable", hint: "Returns a table whose schema and values are defined in the query itself." }, + { text: "distinct", hint: "Produces a table with the distinct combination of the provided columns of the input table." }, + { text: "evaluate", hint: "Invokes a service-side query extension (plugin)." }, + { text: "extend", hint: "Create calculated columns and append them to the result set." }, + { text: "externaldata", hint: "Returns a table whose schema is defined in the query itself, and whose data is read from an external raw file." }, + { text: "facet", hint: "Returns a set of tables, one for each specified column.\r\nEach table specifies the list of values taken by its column.\r\nAn additional table can be created by using the `with` clause." }, + { text: "find", hint: "Finds rows that match a predicate across a set of tables." }, + { text: "fork", hint: "Runs multiple consumer operators in parallel." }, + { text: "getschema", hint: "Produce a table that represents a tabular schema of the input." }, + { text: "in", hint: "Filters a recordset based on the provided set of values." }, + { text: "invoke", hint: "Invokes lambda that receives the source of `invoke` as tabular parameter argument." }, + { text: "join", hint: "Merge the rows of two tables to form a new table by matching values of the specified column(s) from each table." }, + { text: "limit", hint: "Return up to the specified number of rows." }, + { text: "make-series", hint: "Create series of specified aggregated values along specified axis." }, + { text: "mvexpand", hint: "Expands multi-value array or property bag." }, + { text: "order", hint: "Sort the rows of the input table into order by one or more columns." }, + { text: "parse", hint: "Evaluates a string expression and parses its value into one or more calculated columns." }, + { text: "print", hint: "Evaluates one or more scalar expressions and inserts the results (as a single-row table with as many columns as there are expressions) into the output." }, + { text: "project", hint: "Select the columns to include, rename or drop, and insert new computed columns." }, + { text: "project-away", hint: "Select what columns to exclude from the input." }, + { text: "project-rename", hint: "Renames columns in the result output." }, + { text: "range", hint: "Generates a single-column table of values." }, + { text: "reduce", hint: "Groups a set of strings together based on values similarity." }, + { text: "render", hint: "Instructs the user agent to render the results of the query in a particular way." }, + { text: "sample", hint: "Returns up to the specified number of random rows from the input table." }, + { text: "sample-distinct", hint: "Returns a single column that contains up to the specified number of distinct values of the requested column." }, + { text: "search", hint: "The search operator provides a multi-table/multi-column search experience." }, + { text: "serialize", hint: "Marks that order of the input row set is safe for window functions usage." }, + { text: "sort", hint: "Sort the rows of the input table into order by one or more columns." }, + { text: "summarize", hint: "Produces a table that aggregates the content of the input table." }, + { text: "take", hint: "Return up to the specified number of rows." }, + { text: "top", hint: "Returns the first *N* records sorted by the specified columns." }, + { text: "top-hitters", hint: "Returns an approximation of the first *N* results (assuming skewed distribution of the input)." }, + { text: "top-nested", hint: "Produces hierarchical top results, where each level is a drill-down based on previous level values." }, + { text: "union", hint: "Takes two or more tables and returns the rows of all of them." }, + { text: "where", hint: "Filters a table to the subset of rows that satisfy a predicate." }, +]; + +export const functionTokens = [ + { text: "abs", hint: "Calculates the absolute value of the input." }, + { text: "acos", hint: "Returns the angle whose cosine is the specified number (the inverse operation of [`cos()`](cosfunction.md)) ." }, + { text: "ago", hint: "Subtracts the given timespan from the current UTC clock time." }, + { text: "any", hint: "Returns random non-empty value from the specified expression values." }, + { text: "arg_max", hint: "Finds a row in the group that maximizes *ExprToMaximize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "arg_min", hint: "Finds a row in the group that minimizes *ExprToMinimize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "argmax", hint: "Finds a row in the group that maximizes *ExprToMaximize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "argmin", hint: "Finds a row in the group that minimizes *ExprToMinimize*, and returns the value of *ExprToReturn* (or `*` to return the entire row)." }, + { text: "array_concat", hint: "Concatenates a number of dynamic arrays to a single array." }, + { text: "array_length", hint: "Calculates the number of elements in a dynamic array." }, + { text: "array_slice", hint: "Extracts a slice of a dynamic array." }, + { text: "array_split", hint: "Splits an array to multiple arrays according to the split indices and packs the generated array in a dynamic array." }, + { text: "asin", hint: "Returns the angle whose sine is the specified number (the inverse operation of [`sin()`](sinfunction.md)) ." }, + { text: "assert", hint: "Checks for a condition; if the condition is false, outputs error messages and fails the query." }, + { text: "atan", hint: "Returns the angle whose tangent is the specified number (the inverse operation of [`tan()`](tanfunction.md)) ." }, + { text: "atan2", hint: "Calculates the angle, in radians, between the positive x-axis and the ray from the origin to the point (y, x)." }, + { text: "avg", hint: "Calculates the average of *Expr* across the group." }, + { text: "avgif", hint: "Calculates the [average](avg-aggfunction.md) of *Expr* across the group for which *Predicate* evaluates to `true`." }, + { text: "bag_keys", hint: "Enumerates all the root keys in a dynamic property-bag object." }, + { text: "base64_decodestring", hint: "Decodes a base64 string to a UTF-8 string" }, + { text: "base64_encodestring", hint: "Encodes a string as base64 string" }, + { text: "beta_cdf", hint: "Returns the standard cumulative beta distribution function." }, + { text: "beta_inv", hint: "Returns the inverse of the beta cumulative probability beta density function." }, + { text: "beta_pdf", hint: "Returns the probability density beta function." }, + { text: "bin", hint: "Rounds values down to an integer multiple of a given bin size." }, + { text: "bin_at", hint: "Rounds values down to a fixed-size \'bin\', with control over the bin's starting point.\r\n(See also [`bin function`](./binfunction.md).)" }, + { text: "bin_auto", hint: "Rounds values down to a fixed-size \'bin\', with control over the bin size and starting point provided by a query property." }, + { text: "binary_and", hint: "Returns a result of the bitwise `and` operation between two values." }, + { text: "binary_not", hint: "Returns a bitwise negation of the input value." }, + { text: "binary_or", hint: "Returns a result of the bitwise `or` operation of the two values." }, + { text: "binary_shift_left", hint: "Returns binary shift left operation on a pair of numbers." }, + { text: "binary_shift_right", hint: "Returns binary shift right operation on a pair of numbers." }, + { text: "binary_xor", hint: "Returns a result of the bitwise `xor` operation of the two values." }, + { text: "buildschema", hint: "Returns the minimal schema that admits all values of *DynamicExpr*." }, + { text: "case", hint: "Evaluates a list of predicates and returns the first result expression whose predicate is satisfied." }, + { text: "ceiling", hint: "Calculates the smallest integer greater than, or equal to, the specified numeric expression." }, + { text: "cluster", hint: "Changes the reference of the query to a remote cluster." }, + { text: "coalesce", hint: "Evaluates a list of expressions and returns the first non-null (or non-empty for string) expression." }, + { text: "cos", hint: "Returns the cosine function." }, + { text: "cot", hint: "Calculates the trigonometric cotangent of the specified angle, in radians." }, + { text: "count", hint: "Returns a count of the records per summarization group (or in total if summarization is done without grouping)." }, + { text: "countif", hint: "Returns a count of rows for which *Predicate* evaluates to `true`." }, + { text: "countof", hint: "Counts occurrences of a substring in a string. Plain string matches may overlap; regex matches do not." }, + { text: "current_principal", hint: "Returns the current principal running this query." }, + { text: "cursor_after", hint: "A predicate over the records of a table to compare their ingestion time\r\nagainst a database cursor." }, + { text: "cursor_before_or_at", hint: "A predicate over the records of a table to compare their ingestion time\r\nagainst a database cursor." }, + { text: "database", hint: "Changes the reference of the query to a specific database within the cluster scope." }, + { text: "datetime_add", hint: "Calculates a new [datetime](./scalar-data-types/datetime.md) from a specified datepart multiplied by a specified amount, added to a specified [datetime](./scalar-data-types/datetime.md)." }, + { text: "datetime_diff", hint: "Calculates calendarian difference between two [datetime](./scalar-data-types/datetime.md) values." }, + { text: "datetime_part", hint: "Extracts the requested date part as an integer value." }, + { text: "dayofmonth", hint: "Returns the integer number representing the day number of the given month" }, + { text: "dayofweek", hint: "Returns the integer number of days since the preceding Sunday, as a `timespan`." }, + { text: "dayofyear", hint: "Returns the integer number represents the day number of the given year." }, + { text: "dcount", hint: "Returns an estimate of the number of distinct values of *Expr* in the group." }, + { text: "dcount_hll", hint: "Calculates the dcount from hll results (which was generated by [hll](hll-aggfunction.md) or [hll_merge](hll-merge-aggfunction.md))." }, + { text: "dcountif", hint: "Returns an estimate of the number of distinct values of *Expr* of rows for which *Predicate* evaluates to `true`." }, + { text: "degrees", hint: "Converts angle value in radians into value in degrees, using formula `degrees = (180 / PI ) * angle_in_radians`" }, + { text: "distance", hint: "Returns the distance between two points in meters." }, + { text: "endofday", hint: "Returns the end of the day containing the date, shifted by an offset, if provided." }, + { text: "endofmonth", hint: "Returns the end of the month containing the date, shifted by an offset, if provided." }, + { text: "endofweek", hint: "Returns the end of the week containing the date, shifted by an offset, if provided." }, + { text: "endofyear", hint: "Returns the end of the year containing the date, shifted by an offset, if provided." }, + { text: "estimate_data_size", hint: "Returns an estimated data size of the selected columns of the tabular expression." }, + { text: "exp", hint: "The base-e exponential function of x, which is e raised to the power x: e^x." }, + { text: "exp10", hint: "The base-10 exponential function of x, which is 10 raised to the power x: 10^x. \r\n**Syntax**" }, + { text: "exp2", hint: "The base-2 exponential function of x, which is 2 raised to the power x: 2^x." }, + { text: "extent_id", hint: "Returns a unique identifier that identifies the data shard (\"extent\") that the current record resides in." }, + { text: "extent_tags", hint: "Returns a dynamic array with the [tags](../management/extents-overview.md#extent-tagging) of the data shard (\"extent\") that the current record resides in." }, + { text: "extract", hint: "Get a match for a [regular expression](./re2.md) from a text string." }, + { text: "extract_all", hint: "Get all matches for a [regular expression](./re2.md) from a text string." }, + { text: "extractjson", hint: "Get a specified element out of a JSON text using a path expression." }, + { text: "floor", hint: "An alias for [`bin()`](binfunction.md)." }, + { text: "format_datetime", hint: "Formats a datetime parameter based on the format pattern parameter." }, + { text: "format_timespan", hint: "Formats a timespan parameter based on the format pattern parameter." }, + { text: "gamma", hint: "Computes [gamma function](https://en.wikipedia.org/wiki/Gamma_function)" }, + { text: "getmonth", hint: "Get the month number (1-12) from a datetime." }, + { text: "gettype", hint: "Returns the runtime type of its single argument." }, + { text: "getyear", hint: "Returns the year part of the `datetime` argument." }, + { text: "hash", hint: "Returns a hash value for the input value." }, + { text: "hash_sha256", hint: "Returns a sha256 hash value for the input value." }, + { text: "hll", hint: "Calculates the Intermediate results of [dcount](dcount-aggfunction.md) across the group." }, + { text: "hll_merge", hint: "Merges hll results (scalar version of the aggregate version [`hll_merge()`](hll-merge-aggfunction.md))." }, + { text: "hourofday", hint: "Returns the integer number representing the hour number of the given date" }, + { text: "iff", hint: "Evaluates the first argument (the predicate), and returns the value of either the second or third arguments, depending on whether the predicate evaluated to `true` (second) or `false` (third)." }, + { text: "iif", hint: "Evaluates the first argument (the predicate), and returns the value of either the second or third arguments, depending on whether the predicate evaluated to `true` (second) or `false` (third)." }, + { text: "indexof", hint: "Function reports the zero-based index of the first occurrence of a specified string within input string." }, + { text: "ingestion_time", hint: "Retrieves the record's `$IngestionTime` hidden `datetime` column, or null." }, + { text: "iscolumnexists", hint: "Returns a boolean value indicating if the given string argument exists in the schema produced by the preceding tabular operator." }, + { text: "isempty", hint: "Returns `true` if the argument is an empty string or is null." }, + { text: "isfinite", hint: "Returns whether input is a finite value (is neither infinite nor NaN)." }, + { text: "isinf", hint: "Returns whether input is an infinite (positive or negative) value." }, + { text: "isnan", hint: "Returns whether input is Not-a-Number (NaN) value." }, + { text: "isnotempty", hint: "Returns `true` if the argument is not an empty string nor it is a null." }, + { text: "isnotnull", hint: "Returns `true` if the argument is not null." }, + { text: "isnull", hint: "Evaluates its sole argument and returns a `bool` value indicating if the argument evaluates to a null value." }, + { text: "log", hint: "Returns the natural logarithm function." }, + { text: "log10", hint: "Returns the common (base-10) logarithm function." }, + { text: "log2", hint: "Returns the base-2 logarithm function." }, + { text: "loggamma", hint: "Computes log of absolute value of the [gamma function](https://en.wikipedia.org/wiki/Gamma_function)" }, + { text: "make_datetime", hint: "Creates a [datetime](./scalar-data-types/datetime.md) scalar value from the specified date and time." }, + { text: "make_dictionary", hint: "Returns a `dynamic` (JSON) property-bag (dictionary) of all the values of *Expr* in the group." }, + { text: "make_string", hint: "Returns the string generated by the Unicode characters." }, + { text: "make_timespan", hint: "Creates a [timespan](./scalar-data-types/timespan.md) scalar value from the specified time period." }, + { text: "makelist", hint: "Returns a `dynamic` (JSON) array of all the values of *Expr* in the group." }, + { text: "makeset", hint: "Returns a `dynamic` (JSON) array of the set of distinct values that *Expr* takes in the group." }, + { text: "materialize", hint: "Allows caching a sub-query result during the time of query execution in a way that other subqueries can reference the partial result." }, + { text: "max", hint: "Returns the maximum value across the group." }, + { text: "max_of", hint: "Returns the maximum value of several evaluated numeric expressions." }, + { text: "merge_tdigests", hint: "Merges tdigest results (scalar version of the aggregate version [`merge_tdigests()`](merge-tdigests-aggfunction.md))." }, + { text: "min", hint: "Returns the minimum value agross the group." }, + { text: "min_of", hint: "Returns the minimum value of several evaluated numeric expressions." }, + { text: "monthofyear", hint: "Returns the integer number represents the month number of the given year." }, + { text: "next", hint: "Returns the value of a column in a row that it at some offset following the\r\ncurrent row in a [serialized row set](./windowsfunctions.md#serialized-row-set)." }, + { text: "not", hint: "Reverses the value of its `bool` argument." }, + { text: "now", hint: "Returns the current UTC clock time, optionally offset by a given timespan.\r\nThis function can be used multiple times in a statement and the clock time being referenced will be the same for all instances." }, + { text: "pack", hint: "Creates a `dynamic` object (property bag) from a list of names and values." }, + { text: "pack_all", hint: "Creates a `dynamic` object (property bag) from all the columns of the tabular expression." }, + { text: "pack_array", hint: "Packs all input values into a dynamic array." }, + { text: "parse_ipv4", hint: "Converts input to integer (signed 64-bit) number representation." }, + { text: "parse_json", hint: "Interprets a `string` as a [JSON value](https://json.org/)) and returns the value as [`dynamic`](./scalar-data-types/dynamic.md). \r\nIt is superior to using [extractjson() function](./extractjsonfunction.md)\r\nwhen you need to extract more than one element of a JSON compound object." }, + { text: "parse_path", hint: "Parses a file path `string` and returns a [`dynamic`](./scalar-data-types/dynamic.md) object that contains the following parts of the path: \r\nScheme, RootPath, DirectoryPath, DirectoryName, FileName, Extension, AlternateDataStreamName.\r\nIn addition to the simple paths with both types of slashes, supports paths with schemas (e.g. \"file://...\"), shared paths (e.g. \"\\\\shareddrive\\users...\"), long paths (e.g \"\\\\?\\C:...\"\"), alternate data streams (e.g. \"file1.exe:file2.exe\")" }, + { text: "parse_url", hint: "Parses an absolute URL `string` and returns a [`dynamic`](./scalar-data-types/dynamic.md) object contains all parts of the URL (Scheme, Host, Port, Path, Username, Password, Query Parameters, Fragment)." }, + { text: "parse_urlquery", hint: "Parses a url query `string` and returns a [`dynamic`](./scalar-data-types/dynamic.md) object contains the Query parameters." }, + { text: "parse_user_agent", hint: "Interprets a user-agent string, which identifies the user's browser and provides certain system details to servers hosting the websites the user visits. The result is returned as [`dynamic`](./scalar-data-types/dynamic.md)." }, + { text: "parse_version", hint: "Converts input string representation of version to a comparable decimal number." }, + { text: "parse_xml", hint: "Interprets a `string` as a XML value, converts the value to a [JSON value](https://json.org/) and returns the value as [`dynamic`](./scalar-data-types/dynamic.md)." }, + { text: "percentile", hint: "Returns an estimate for the specified [nearest-rank percentile](#nearest-rank-percentile) of the population defined by *Expr*. \r\nThe accuracy depends on the density of population in the region of the percentile." }, + { text: "percentile_tdigest", hint: "Calculates the percentile result from tdigest results (which was generated by [tdigest](tdigest-aggfunction.md) or [merge-tdigests](merge-tdigests-aggfunction.md))" }, + { text: "percentrank_tdigest", hint: "Calculates the approximate rank of the value in a set where rank is expressed as percentage of set's size. \r\nThis function can be viewed as the inverse of the percentile." }, + { text: "pi", hint: "Returns the constant value of Pi (π)." }, + { text: "point", hint: "Returns a dynamic array representation of a point." }, + { text: "pow", hint: "Returns a result of raising to power" }, + { text: "prev", hint: "Returns the value of a column in a row that it at some offset prior to the\r\ncurrent row in a [serialized row set](./windowsfunctions.md#serialized-row-set)." }, + { text: "radians", hint: "Converts angle value in degrees into value in radians, using formula `radians = (PI / 180 ) * angle_in_degrees`" }, + { text: "rand", hint: "Returns a random number." }, + { text: "range", hint: "Generates a dynamic array holding a series of equally-spaced values." }, + { text: "repeat", hint: "Generates a dynamic array holding a series of equal values." }, + { text: "replace", hint: "Replace all regex matches with another string." }, + { text: "reverse", hint: "Function makes reverse of input string." }, + { text: "round", hint: "Returns the rounded source to the specified precision." }, + { text: "row_cumsum", hint: "Calculates the cumulative sum of a column in a [serialized row set](./windowsfunctions.md#serialized-row-set)." }, + { text: "row_number", hint: "Returns the current row's index in a [serialized row set](./windowsfunctions.md#serialized-row-set).\r\nThe row index starts by default at `1` for the first row, and is incremented by `1` for each additional row.\r\nOptionally, the row index can start at a different value than `1`.\r\nAdditionally, the row index may be reset according to some provided predicate." }, + { text: "series_add", hint: "Calculates the element-wise addition of two numeric series inputs." }, + { text: "series_decompose", hint: "Applies a decomposition transformation on a series." }, + { text: "series_decompose_anomalies", hint: "Anomaly Detection based on series decomposition (refer to [series_decompose()](series-decomposefunction.md))" }, + { text: "series_decompose_forecast", hint: "Forecast based on series decomposition." }, + { text: "series_divide", hint: "Calculates the element-wise division of two numeric series inputs." }, + { text: "series_equals", hint: "Calculates the element-wise equals (`==`) logic operation of two numeric series inputs." }, + { text: "series_fill_backward", hint: "Performs backward fill interpolation of missing values in a series." }, + { text: "series_fill_const", hint: "Replaces missing values in a series with a specified constant value." }, + { text: "series_fill_forward", hint: "Performs forward fill interpolation of missing values in a series." }, + { text: "series_fill_linear", hint: "Performs linear interpolation of missing values in a series." }, + { text: "series_fir", hint: "Applies a Finite Impulse Response filter on a series." }, + { text: "series_fit_2lines", hint: "Applies two segments linear regression on a series, returning multiple columns." }, + { text: "series_fit_2lines_dynamic", hint: "Applies two segments linear regression on a series, returning dynamic object." }, + { text: "series_fit_line", hint: "Applies linear regression on a series, returning multiple columns." }, + { text: "series_fit_line_dynamic", hint: "Applies linear regression on a series, returning dynamic object." }, + { text: "series_greater", hint: "Calculates the element-wise greater (`>`) logic operation of two numeric series inputs." }, + { text: "series_greater_equals", hint: "Calculates the element-wise greater or equals (`>=`) logic operation of two numeric series inputs." }, + { text: "series_iir", hint: "Applies a Infinite Impulse Response filter on a series." }, + { text: "series_less", hint: "Calculates the element-wise less (`<`) logic operation of two numeric series inputs." }, + { text: "series_less_equals", hint: "Calculates the element-wise less or equal (`<=`) logic operation of two numeric series inputs." }, + { text: "series_multiply", hint: "Calculates the element-wise multiplication of two numeric series inputs." }, + { text: "series_not_equals", hint: "Calculates the element-wise not equals (`!=`) logic operation of two numeric series inputs." }, + { text: "series_outliers", hint: "Scores anomaly points in a series." }, + { text: "series_periods_detect", hint: "Finds the most significant periods that exist in a time series." }, + { text: "series_periods_validate", hint: "Checks whether a time series contains periodic patterns of given lengths." }, + { text: "series_seasonal", hint: "Calculates the seasonal component of a series according to the detected or given seasonal period." }, + { text: "series_stats", hint: "Returns statistics for a series in multiple columns." }, + { text: "series_stats_dynamic", hint: "Returns statistics for a series in dynamic object." }, + { text: "series_subtract", hint: "Calculates the element-wise subtraction of two numeric series inputs." }, + { text: "sign", hint: "Sign of a numeric expression" }, + { text: "sin", hint: "Returns the sine function." }, + { text: "split", hint: "Splits a given string according to a given delimiter and returns a string array with the contained substrings." }, + { text: "sqrt", hint: "Returns the square root function." }, + { text: "startofday", hint: "Returns the start of the day containing the date, shifted by an offset, if provided." }, + { text: "startofmonth", hint: "Returns the start of the month containing the date, shifted by an offset, if provided." }, + { text: "startofweek", hint: "Returns the start of the week containing the date, shifted by an offset, if provided." }, + { text: "startofyear", hint: "Returns the start of the year containing the date, shifted by an offset, if provided." }, + { text: "stdev", hint: "Calculates the standard deviation of *Expr* across the group, considering the group as a [sample](https://en.wikipedia.org/wiki/Sample_%28statistics%29)." }, + { text: "stdevif", hint: "Calculates the [stdev](stdev-aggfunction.md) of *Expr* across the group for which *Predicate* evaluates to `true`." }, + { text: "stdevp", hint: "Calculates the standard deviation of *Expr* across the group, considering the group as a [population](https://en.wikipedia.org/wiki/Statistical_population)." }, + { text: "strcat", hint: "Concatenates between 1 and 64 arguments." }, + { text: "strcat_array", hint: "Creates a concatenated string of array values using specified delimiter." }, + { text: "strcat_delim", hint: "Concatenates between 2 and 64 arguments, with delimiter, provided as first argument." }, + { text: "strcmp", hint: "Compares two strings." }, + { text: "string_size", hint: "Returns the size, in bytes, of the input string." }, + { text: "strlen", hint: "Returns the length, in characters, of the input string." }, + { text: "strrep", hint: "Repeats given [string](./scalar-data-types/string.md) provided amount of times." }, + { text: "substring", hint: "Extracts a substring from a source string starting from some index to the end of the string." }, + { text: "sum", hint: "Calculates the sum of *Expr* across the group." }, + { text: "sumif", hint: "Returns a sum of *Expr* for which *Predicate* evaluates to `true`." }, + { text: "table", hint: "References specific table using an query-time evaluated string-expression." }, + { text: "tan", hint: "Returns the tangent function." }, + { text: "tdigest", hint: "Calculates the Intermediate results of [`percentiles()`](percentiles-aggfunction.md) across the group." }, + { text: "tdigest_merge", hint: "Merges tdigest results (scalar version of the aggregate version [`tdigest_merge()`](tdigest-merge-aggfunction.md))." }, + { text: "tobool", hint: "Converts input to boolean (signed 8-bit) representation." }, + { text: "todatetime", hint: "Converts input to [datetime](./scalar-data-types/datetime.md) scalar." }, + { text: "todecimal", hint: "Converts input to decimal number representation." }, + { text: "todouble", hint: "Converts the input to a value of type `real`. (`todouble()` and `toreal()` are synonyms.)" }, + { text: "todynamic", hint: "Interprets a `string` as a [JSON value](https://json.org/) and returns the value as [`dynamic`](./scalar-data-types/dynamic.md)." }, + { text: "toguid", hint: "Converts input to [`guid`](./scalar-data-types/guid.md) representation." }, + { text: "tohex", hint: "Converts input to a hexadecimal string." }, + { text: "toint", hint: "Converts input to integer (signed 32-bit) number representation." }, + { text: "tolong", hint: "Converts input to long (signed 64-bit) number representation." }, + { text: "tolower", hint: "Converts input string to lower case." }, + { text: "toscalar", hint: "Returns a scalar constant value of the evaluated expression." }, + { text: "tostring", hint: "Converts input to a string representation." }, + { text: "totimespan", hint: "Converts input to [timespan](./scalar-data-types/timespan.md) scalar." }, + { text: "toupper", hint: "Converts a string to upper case." }, + { text: "translate", hint: "Replaces a set of characters ('searchList') with another set of characters ('replacementList') in a given a string.\r\nThe function searches for characters in the 'searchList' and replaces them with the corresponding characters in 'replacementList'" }, + { text: "treepath", hint: "Enumerates all the path expressions that identify leaves in a dynamic object." }, + { text: "trim", hint: "Removes all leading and trailing matches of the specified regular expression." }, + { text: "trim_end", hint: "Removes trailing match of the specified regular expression." }, + { text: "trim_start", hint: "Removes leading match of the specified regular expression." }, + { text: "url_decode", hint: "The function converts encoded URL into a to regular URL representation." }, + { text: "url_encode", hint: "The function converts characters of the input URL into a format that can be transmitted over the Internet." }, + { text: "variance", hint: "Calculates the variance of *Expr* across the group, considering the group as a [sample](https://en.wikipedia.org/wiki/Sample_%28statistics%29)." }, + { text: "varianceif", hint: "Calculates the [variance](variance-aggfunction.md) of *Expr* across the group for which *Predicate* evaluates to `true`." }, + { text: "variancep", hint: "Calculates the variance of *Expr* across the group, considering the group as a [population](https://en.wikipedia.org/wiki/Statistical_population)." }, + { text: "weekofyear", hint: "Returns the integer number represents the week number." }, + { text: "welch_test", hint: "Computes the p_value of the [Welch-test function](https://en.wikipedia.org/wiki/Welch%27s_t-test)" }, + { text: "zip", hint: "The `zip` function accepts any number of `dynamic` arrays, and returns an\r\narray whose elements are each an array holding the elements of the input\r\narrays of the same index." }, +]; + +export const KEYWORDS = [ + 'by', + 'on', + 'contains', + 'notcontains', + 'containscs', + 'notcontainscs', + 'startswith', + 'has', + 'matches', + 'regex', + 'true', + 'false', + 'and', + 'or', + 'typeof', + 'int', + 'string', + 'date', + 'datetime', + 'time', + 'long', + 'real', + '​boolean', + 'bool', +]; + +export const grafanaMacros = [ + { text: '$__timeFilter', display: '$__timeFilter()', hint: 'Macro that uses the selected timerange in Grafana to filter the query.', }, + { text: '$__escapeMulti', display: '$__escapeMulti()', hint: 'Macro to escape multi-value template variables that contain illegal characters.', }, + { text: '$__contains', display: '$__contains()', hint: 'Macro for multi-value template variables.' }, +]; + +// Kusto operators +// export const OPERATORS = ['+', '-', '*', '/', '>', '<', '==', '<>', '<=', '>=', '~', '!~']; + +export const DURATION = ['SECONDS', 'MINUTES', 'HOURS', 'DAYS', 'WEEKS', 'MONTHS', 'YEARS']; + +const tokenizer = { + comment: { + pattern: /(^|[^\\:])\/\/.*/, + lookbehind: true, + greedy: true, + }, + 'function-context': { + pattern: /[a-z0-9_]+\([^)]*\)?/i, + inside: {}, + }, + duration: { + pattern: new RegExp(`${DURATION.join('?|')}?`, 'i'), + alias: 'number', + }, + builtin: new RegExp(`\\b(?:${functionTokens.map(f => f.text).join('|')})(?=\\s*\\()`, 'i'), + string: { + pattern: /(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/, + greedy: true, + }, + keyword: new RegExp(`\\b(?:${KEYWORDS.join('|')}|${operatorTokens.map(f => f.text).join('|')}|\\*)\\b`, 'i'), + boolean: /\b(?:true|false)\b/, + number: /\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i, + operator: /-|\+|\*|\/|>|<|==|<=?|>=?|<>|!~|~|=|\|/, + punctuation: /[{};(),.:]/, + variable: /(\[\[(.+?)\]\])|(\$(.+?))\b/, +}; + +tokenizer['function-context'].inside = { + argument: { + pattern: /[a-z0-9_]+(?=:)/i, + alias: 'symbol', + }, + duration: tokenizer.duration, + number: tokenizer.number, + builtin: tokenizer.builtin, + string: tokenizer.string, + variable: tokenizer.variable, +}; + +// console.log(tokenizer.builtin); + +export default tokenizer; + +// function escapeRegExp(str: string): string { +// return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +// } From 0c3657da7e41f4d895cbc7f32eda87695bbb25f9 Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Wed, 30 Jan 2019 15:24:37 +0300 Subject: [PATCH 07/38] azuremonitor: suggest tables initially --- .../editor/KustoQueryField.tsx | 90 ++++++++++++++----- .../editor/editor_component.tsx | 8 +- .../partials/query.editor.html | 1 + .../query_ctrl.ts | 4 +- 4 files changed, 79 insertions(+), 24 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index fa79d4bdb99..c8f96fba211 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -1,3 +1,4 @@ +import _ from 'lodash'; import Plain from 'slate-plain-serializer'; import QueryField from './query_field'; @@ -25,21 +26,43 @@ interface SuggestionGroup { skipFilter?: boolean; } +interface KustoSchema { + Databases: { + Default?: KustoDBSchema; + }; + Plugins?: any[]; +} + +interface KustoDBSchema { + Name?: string; + Functions?: any; + Tables?: any; +} + +const defaultSchema = () => ({ + Databases: { + Default: {} + } +}); + const cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim(); const wrapText = text => ({ text }); export default class KustoQueryField extends QueryField { fields: any; events: any; + schema: KustoSchema; constructor(props, context) { super(props, context); + this.schema = defaultSchema(); this.onTypeahead = debounce(this.onTypeahead, TYPEAHEAD_DELAY); } componentDidMount() { this.updateMenu(); + this.fetchSchema(); } onTypeahead = () => { @@ -128,7 +151,13 @@ export default class KustoQueryField extends QueryField { suggestionGroups = this._getKeywordSuggestions(); } else if (Plain.serialize(this.state.value) === '') { typeaheadContext = 'context-new'; - suggestionGroups = this._getInitialSuggestions(); + if (this.schema) { + suggestionGroups = this._getInitialSuggestions(); + } else { + this.fetchSchema(); + setTimeout(this.onTypeahead, 0); + return; + } } let results = 0; @@ -263,7 +292,7 @@ export default class KustoQueryField extends QueryField { { prefixMatch: true, label: 'Operators', - items: operatorTokens.map((s: any) => { s.type = 'function'; return s; }) + items: operatorTokens }, { prefixMatch: true, @@ -274,34 +303,46 @@ export default class KustoQueryField extends QueryField { prefixMatch: true, label: 'Macros', items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) + }, + { + prefixMatch: true, + label: 'Tables', + items: _.map(this.schema.Databases.Default.Tables, (t: any) => ({ text: t.Name })) } ]; } private _getInitialSuggestions(): SuggestionGroup[] { - // TODO: return datbase tables as an initial suggestion return [ { prefixMatch: true, - label: 'Keywords', - items: KEYWORDS.map(wrapText) - }, - { - prefixMatch: true, - label: 'Operators', - items: operatorTokens.map((s: any) => { s.type = 'function'; return s; }) - }, - { - prefixMatch: true, - label: 'Functions', - items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) - }, - { - prefixMatch: true, - label: 'Macros', - items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) + label: 'Tables', + items: _.map(this.schema.Databases.Default.Tables, (t: any) => ({ text: t.Name })) } ]; + + // return [ + // { + // prefixMatch: true, + // label: 'Keywords', + // items: KEYWORDS.map(wrapText) + // }, + // { + // prefixMatch: true, + // label: 'Operators', + // items: operatorTokens.map((s: any) => { s.type = 'function'; return s; }) + // }, + // { + // prefixMatch: true, + // label: 'Functions', + // items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) + // }, + // { + // prefixMatch: true, + // label: 'Macros', + // items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) + // } + // ]; } private async _fetchEvents() { @@ -329,4 +370,13 @@ export default class KustoQueryField extends QueryField { // Stub this.fields = []; } + + private async fetchSchema() { + const schema = await this.props.getSchema(); + if (schema) { + this.schema = schema; + } else { + this.schema = defaultSchema(); + } + } } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx index 59e4ab12c81..7787f029ee7 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx @@ -31,7 +31,7 @@ class Editor extends Component { }; render() { - const { request, variables } = this.props; + const { request, variables, getSchema } = this.props; const { edited, query } = this.state; return ( @@ -45,6 +45,7 @@ class Editor extends Component { placeholder="Enter a query" request={request} templateVariables={variables} + getSchema={getSchema} /> ); @@ -54,6 +55,9 @@ class Editor extends Component { coreModule.directive('kustoEditor', [ 'reactDirective', reactDirective => { - return reactDirective(Editor, ['change', 'database', 'execute', 'query', 'request', 'variables']); + return reactDirective(Editor, [ + 'change', 'database', 'execute', 'query', 'request', 'variables', + ['getSchema', { watchDepth: 'reference' }] + ]); }, ]); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html index 49f02ec8355..592fccdcda9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html @@ -130,6 +130,7 @@ change="ctrl.onLogAnalyticsQueryChange" execute="ctrl.onLogAnalyticsQueryExecute" variables="ctrl.templateVariables" + getSchema="ctrl.getAzureLogAnalyticsSchema" /> diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts index fd42c172f11..b3aa5f9f6e9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts @@ -304,7 +304,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { /* Azure Log Analytics */ - getWorkspaces() { + getWorkspaces = () => { return this.datasource.azureLogAnalyticsDatasource .getWorkspaces() .then(list => { @@ -316,7 +316,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { .catch(this.handleQueryCtrlError.bind(this)); } - getAzureLogAnalyticsSchema() { + getAzureLogAnalyticsSchema = () => { return this.getWorkspaces() .then(() => { return this.datasource.azureLogAnalyticsDatasource.getSchema(this.target.azureLogAnalytics.workspace); From df9ecc68162ae678a8f22acda868b06fb0433f0d Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Thu, 31 Jan 2019 17:44:00 +0300 Subject: [PATCH 08/38] azuremonitor: don't go back to dashboard if escape pressed in the editor --- public/app/core/services/keybindingSrv.ts | 18 ++++++++++++++++++ .../editor/KustoQueryField.tsx | 2 +- .../editor/query_field.tsx | 14 ++++++++++++++ public/app/routes/GrafanaCtrl.ts | 3 +++ 4 files changed, 36 insertions(+), 1 deletion(-) diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index 989746fd067..6d790baa336 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -139,6 +139,10 @@ export class KeybindingSrv { ); } + unbind(keyArg: string, keyType?: string) { + Mousetrap.unbind(keyArg, keyType); + } + showDashEditView() { const search = _.extend(this.$location.search(), { editview: 'settings' }); this.$location.search(search); @@ -293,3 +297,17 @@ export class KeybindingSrv { } coreModule.service('keybindingSrv', KeybindingSrv); + +/** + * Code below exports the service to react components + */ + +let singletonInstance: KeybindingSrv; + +export function setKeybindingSrv(instance: KeybindingSrv) { + singletonInstance = instance; +} + +export function getKeybindingSrv(): KeybindingSrv { + return singletonInstance; +} diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index c8f96fba211..719d57b9b6a 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -61,7 +61,7 @@ export default class KustoQueryField extends QueryField { } componentDidMount() { - this.updateMenu(); + super.componentDidMount(); this.fetchSchema(); } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx index 1c883a40c31..0acd53cabff 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx @@ -9,6 +9,7 @@ import NewlinePlugin from './slate-plugins/newline'; import RunnerPlugin from './slate-plugins/runner'; import Typeahead from './typeahead'; +import { getKeybindingSrv, KeybindingSrv } from 'app/core/services/keybindingSrv'; import { Block, Document, Text, Value } from 'slate'; import { Editor } from 'slate-react'; @@ -61,6 +62,7 @@ class QueryField extends React.Component { menuEl: any; plugins: any; resetTimer: any; + keybindingSrv: KeybindingSrv = getKeybindingSrv(); constructor(props, context) { super(props, context); @@ -90,6 +92,7 @@ class QueryField extends React.Component { } componentWillUnmount() { + this.restoreEscapeKeyBinding(); clearTimeout(this.resetTimer); } @@ -218,6 +221,7 @@ class QueryField extends React.Component { if (onBlur) { onBlur(); } + this.restoreEscapeKeyBinding(); }; handleFocus = () => { @@ -225,8 +229,18 @@ class QueryField extends React.Component { if (onFocus) { onFocus(); } + // Don't go back to dashboard if Escape pressed inside the editor. + this.removeEscapeKeyBinding(); }; + removeEscapeKeyBinding() { + this.keybindingSrv.unbind('esc', 'keydown'); + } + + restoreEscapeKeyBinding() { + this.keybindingSrv.setupGlobal(); + } + onClickItem = item => { const { suggestions } = this.state; if (!suggestions || suggestions.length === 0) { diff --git a/public/app/routes/GrafanaCtrl.ts b/public/app/routes/GrafanaCtrl.ts index 70bdf49e5e4..e50abdc0710 100644 --- a/public/app/routes/GrafanaCtrl.ts +++ b/public/app/routes/GrafanaCtrl.ts @@ -10,6 +10,7 @@ import appEvents from 'app/core/app_events'; import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { TimeSrv, setTimeSrv } from 'app/features/dashboard/services/TimeSrv'; import { DatasourceSrv, setDatasourceSrv } from 'app/features/plugins/datasource_srv'; +import { KeybindingSrv, setKeybindingSrv } from 'app/core/services/keybindingSrv'; import { AngularLoader, setAngularLoader } from 'app/core/services/AngularLoader'; import { configureStore } from 'app/store/configureStore'; @@ -25,6 +26,7 @@ export class GrafanaCtrl { backendSrv: BackendSrv, timeSrv: TimeSrv, datasourceSrv: DatasourceSrv, + keybindingSrv: KeybindingSrv, angularLoader: AngularLoader ) { // make angular loader service available to react components @@ -32,6 +34,7 @@ export class GrafanaCtrl { setBackendSrv(backendSrv); setDatasourceSrv(datasourceSrv); setTimeSrv(timeSrv); + setKeybindingSrv(keybindingSrv); configureStore(); $scope.init = () => { From ad821cf6296b224d3995e1473e0db6533e1fed0c Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Thu, 31 Jan 2019 20:23:40 +0300 Subject: [PATCH 09/38] azuremonitor: where clause autocomplete --- .../editor/KustoQueryField.tsx | 49 ++++++++++++++++--- 1 file changed, 43 insertions(+), 6 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index 719d57b9b6a..33be370ada3 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -11,7 +11,7 @@ import { KEYWORDS, functionTokens, operatorTokens, grafanaMacros } from './kusto // import '../sass/editor.base.scss'; -const TYPEAHEAD_DELAY = 500; +const TYPEAHEAD_DELAY = 100; interface Suggestion { text: string; @@ -104,12 +104,13 @@ export default class KustoQueryField extends QueryField { this._fetchFields(); return; } - } else if (modelPrefix.match(/(facet\s$)/i)) { - typeaheadContext = 'context-facet'; - if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); + } else if (modelPrefix.match(/(where\s$)/i)) { + typeaheadContext = 'context-where'; + const fullQuery = Plain.serialize(this.state.value); + const table = this.getTableFromContext(fullQuery); + if (table) { + suggestionGroups = this.getWhereSuggestions(table); } else { - this._fetchFields(); return; } } else if (modelPrefix.match(/(,\s*$)/)) { @@ -345,6 +346,35 @@ export default class KustoQueryField extends QueryField { // ]; } + private getWhereSuggestions(table: string): SuggestionGroup[] { + const tableSchema = this.schema.Databases.Default.Tables[table]; + if (tableSchema) { + return [ + { + prefixMatch: true, + label: 'Fields', + items: _.map(tableSchema.OrderedColumns, (f: any) => ({ + text: f.Name, + hint: f.Type + })) + } + ]; + } else { + return []; + } + } + + private getTableFromContext(query: string) { + const tablePattern = /^\s*(\w+)\s*|/g; + const normalizedQuery = normalizeQuery(query); + const match = tablePattern.exec(normalizedQuery); + if (match && match.length > 1 && match[0] && match[1]) { + return match[1]; + } else { + return null; + } + } + private async _fetchEvents() { // const query = 'events'; // const result = await this.request(query); @@ -380,3 +410,10 @@ export default class KustoQueryField extends QueryField { } } } + +function normalizeQuery(query: string): string { + const commentPattern = /\/\/.*$/gm; + let normalizedQuery = query.replace(commentPattern, ''); + normalizedQuery = normalizedQuery.replace('\n', ' '); + return normalizedQuery; +} From dd8ca70151672293b48267a800b2e3682e8a79c5 Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Mon, 4 Feb 2019 18:51:56 +0300 Subject: [PATCH 10/38] azuremonitor: use kusto editor for App Insights --- .../app_insights/app_insights_datasource.ts | 9 +++++++ .../app_insights/response_parser.ts | 26 +++++++++++++++++++ .../editor/KustoQueryField.tsx | 14 +++++++++- .../editor/editor_component.tsx | 22 ++++++++++++---- .../partials/query.editor.html | 15 ++++++++--- .../query_ctrl.ts | 14 ++++++++++ 6 files changed, 91 insertions(+), 9 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts index 950fa73a16b..97f76d229fb 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/app_insights_datasource.ts @@ -224,4 +224,13 @@ export default class AppInsightsDatasource { return new ResponseParser(result).parseGroupBys(); }); } + + getQuerySchema() { + const url = `${this.baseUrl}/query/schema`; + return this.doRequest(url).then(result => { + const schema = new ResponseParser(result).parseQuerySchema(); + // console.log(schema); + return schema; + }); + } } diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts index 848472cf101..fa96e4a2e3e 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/app_insights/response_parser.ts @@ -199,6 +199,32 @@ export default class ResponseParser { return ResponseParser.toTextValueList(this.results.supportedGroupBy); } + parseQuerySchema() { + const result = { + Type: 'AppInsights', + Tables: {} + }; + if (this.results && this.results.data && this.results.data.Tables) { + for (let i = 0; i < this.results.data.Tables[0].Rows.length; i++) { + const column = this.results.data.Tables[0].Rows[i]; + const columnTable = column[0]; + const columnName = column[1]; + const columnType = column[2]; + if (result.Tables[columnTable]) { + result.Tables[columnTable].OrderedColumns.push({ Name: columnName, Type: columnType }); + } else { + result.Tables[columnTable] = { + Name: columnTable, + OrderedColumns: [ + { Name: columnName, Type: columnType } + ] + }; + } + } + } + return result; + } + static toTextValueList(values) { const list: any[] = []; for (let i = 0; i < values.length; i++) { diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index 33be370ada3..09573f29047 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -402,8 +402,11 @@ export default class KustoQueryField extends QueryField { } private async fetchSchema() { - const schema = await this.props.getSchema(); + let schema = await this.props.getSchema(); if (schema) { + if (schema.Type === 'AppInsights') { + schema = castSchema(schema); + } this.schema = schema; } else { this.schema = defaultSchema(); @@ -411,6 +414,15 @@ export default class KustoQueryField extends QueryField { } } +/** + * Cast schema from App Insights to default Kusto schema + */ +function castSchema(schema) { + const defaultSchemaTemplate = defaultSchema(); + defaultSchemaTemplate.Databases.Default = schema; + return defaultSchemaTemplate; +} + function normalizeQuery(query: string): string { const commentPattern = /\/\/.*$/gm; let normalizedQuery = query.replace(commentPattern, ''); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx index 7787f029ee7..bdc85f1577d 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/editor_component.tsx @@ -4,7 +4,20 @@ import Kusto from './kusto/kusto'; import React, { Component } from 'react'; import coreModule from 'app/core/core_module'; -class Editor extends Component { +interface EditorProps { + index: number; + placeholder?: string; + change: (value: string, index: number) => void; + variables: () => string[] | string[]; + getSchema?: () => Promise; + execute?: () => void; +} + +class Editor extends Component { + static defaultProps = { + placeholder: 'Enter a query' + }; + constructor(props) { super(props); this.state = { @@ -31,7 +44,7 @@ class Editor extends Component { }; render() { - const { request, variables, getSchema } = this.props; + const { variables, getSchema, placeholder } = this.props; const { edited, query } = this.state; return ( @@ -42,8 +55,7 @@ class Editor extends Component { onQueryChange={this.onChangeQuery} prismLanguage="kusto" prismDefinition={Kusto} - placeholder="Enter a query" - request={request} + placeholder={placeholder} templateVariables={variables} getSchema={getSchema} /> @@ -56,7 +68,7 @@ coreModule.directive('kustoEditor', [ 'reactDirective', reactDirective => { return reactDirective(Editor, [ - 'change', 'database', 'execute', 'query', 'request', 'variables', + 'change', 'database', 'execute', 'query', 'variables', 'placeholder', ['getSchema', { watchDepth: 'reference' }] ]); }, diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html index 592fccdcda9..6299947b30a 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html @@ -124,8 +124,6 @@
-
+ +
+
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts index b3aa5f9f6e9..cee67d11ab3 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts @@ -345,6 +345,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { } return interval; } + getAppInsightsMetricNames() { if (!this.datasource.appInsightsDatasource.isConfigured()) { return; @@ -377,6 +378,19 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { .catch(this.handleQueryCtrlError.bind(this)); } + onAppInsightsQueryChange = (nextQuery: string) => { + this.target.appInsights.rawQueryString = nextQuery; + } + + onAppInsightsQueryExecute = () => { + return this.refresh(); + } + + getAppInsightsQuerySchema = () => { + return this.datasource.appInsightsDatasource.getQuerySchema() + .catch(this.handleQueryCtrlError.bind(this)); + } + getAppInsightsGroupBySegments(query) { return _.map(this.target.appInsights.groupByOptions, option => { return { text: option, value: option }; From 99ff8e68ffbd851163d539891b884fbf66da67ca Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Mon, 4 Feb 2019 19:20:18 +0300 Subject: [PATCH 11/38] azuremonitor: fix where suggestions --- .../editor/KustoQueryField.tsx | 28 +++++++++++++------ 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index 09573f29047..9b2df96fcdd 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -99,12 +99,12 @@ export default class KustoQueryField extends QueryField { if (wrapperClasses.contains('function-context')) { typeaheadContext = 'context-function'; if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); + suggestionGroups = this.getKeywordSuggestions(); } else { this._fetchFields(); return; } - } else if (modelPrefix.match(/(where\s$)/i)) { + } else if (modelPrefix.match(/(where\s(\w+\b)?$)/i)) { typeaheadContext = 'context-where'; const fullQuery = Plain.serialize(this.state.value); const table = this.getTableFromContext(fullQuery); @@ -116,7 +116,7 @@ export default class KustoQueryField extends QueryField { } else if (modelPrefix.match(/(,\s*$)/)) { typeaheadContext = 'context-multiple-fields'; if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); + suggestionGroups = this.getKeywordSuggestions(); } else { this._fetchFields(); return; @@ -124,7 +124,7 @@ export default class KustoQueryField extends QueryField { } else if (modelPrefix.match(/(from\s$)/i)) { typeaheadContext = 'context-from'; if (this.events) { - suggestionGroups = this._getKeywordSuggestions(); + suggestionGroups = this.getKeywordSuggestions(); } else { this._fetchEvents(); return; @@ -132,7 +132,7 @@ export default class KustoQueryField extends QueryField { } else if (modelPrefix.match(/(^select\s\w*$)/i)) { typeaheadContext = 'context-select'; if (this.fields) { - suggestionGroups = this._getKeywordSuggestions(); + suggestionGroups = this.getKeywordSuggestions(); } else { this._fetchFields(); return; @@ -140,16 +140,19 @@ export default class KustoQueryField extends QueryField { } else if (modelPrefix.match(/from\s\S+\s\w*$/i)) { prefix = ''; typeaheadContext = 'context-since'; - suggestionGroups = this._getKeywordSuggestions(); + suggestionGroups = this.getKeywordSuggestions(); // } else if (modelPrefix.match(/\d+\s\w*$/)) { // typeaheadContext = 'context-number'; // suggestionGroups = this._getAfterNumberSuggestions(); } else if (modelPrefix.match(/ago\b/i) || modelPrefix.match(/facet\b/i) || modelPrefix.match(/\$__timefilter\b/i)) { typeaheadContext = 'context-timeseries'; - suggestionGroups = this._getKeywordSuggestions(); + suggestionGroups = this.getKeywordSuggestions(); } else if (prefix && !wrapperClasses.contains('argument')) { + if (modelPrefix.match(/\s$/i)) { + prefix = ''; + } typeaheadContext = 'context-builtin'; - suggestionGroups = this._getKeywordSuggestions(); + suggestionGroups = this.getKeywordSuggestions(); } else if (Plain.serialize(this.state.value) === '') { typeaheadContext = 'context-new'; if (this.schema) { @@ -159,6 +162,12 @@ export default class KustoQueryField extends QueryField { setTimeout(this.onTypeahead, 0); return; } + } else { + typeaheadContext = 'context-builtin'; + if (modelPrefix.match(/\s$/i)) { + prefix = ''; + } + suggestionGroups = this.getKeywordSuggestions(); } let results = 0; @@ -178,6 +187,7 @@ export default class KustoQueryField extends QueryField { .filter(group => group.items.length > 0); // console.log('onTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); + // console.log('onTypeahead', modelPrefix, prefix, typeaheadContext); this.setState({ typeaheadPrefix: prefix, @@ -283,7 +293,7 @@ export default class KustoQueryField extends QueryField { // ]; // } - private _getKeywordSuggestions(): SuggestionGroup[] { + private getKeywordSuggestions(): SuggestionGroup[] { return [ { prefixMatch: true, From 4b5bfd3da54e8ee681da9c6d74750714b7a882ff Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Tue, 5 Feb 2019 14:01:06 +0300 Subject: [PATCH 12/38] azuremonitor: more autocomplete suggestions for built-in functions --- .../editor/KustoQueryField.tsx | 197 ++++++++---------- 1 file changed, 89 insertions(+), 108 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index 9b2df96fcdd..bbe34b8f46a 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -96,57 +96,51 @@ export default class KustoQueryField extends QueryField { const wrapperClasses = wrapperNode.classList; let typeaheadContext: string | null = null; + // Built-in functions if (wrapperClasses.contains('function-context')) { typeaheadContext = 'context-function'; - if (this.fields) { - suggestionGroups = this.getKeywordSuggestions(); - } else { - this._fetchFields(); - return; - } + suggestionGroups = this.getColumnSuggestions(); + + // where } else if (modelPrefix.match(/(where\s(\w+\b)?$)/i)) { typeaheadContext = 'context-where'; - const fullQuery = Plain.serialize(this.state.value); - const table = this.getTableFromContext(fullQuery); - if (table) { - suggestionGroups = this.getWhereSuggestions(table); - } else { - return; - } - } else if (modelPrefix.match(/(,\s*$)/)) { - typeaheadContext = 'context-multiple-fields'; - if (this.fields) { - suggestionGroups = this.getKeywordSuggestions(); - } else { - this._fetchFields(); - return; - } - } else if (modelPrefix.match(/(from\s$)/i)) { - typeaheadContext = 'context-from'; - if (this.events) { - suggestionGroups = this.getKeywordSuggestions(); - } else { - this._fetchEvents(); - return; - } - } else if (modelPrefix.match(/(^select\s\w*$)/i)) { - typeaheadContext = 'context-select'; - if (this.fields) { - suggestionGroups = this.getKeywordSuggestions(); - } else { - this._fetchFields(); - return; - } - } else if (modelPrefix.match(/from\s\S+\s\w*$/i)) { - prefix = ''; - typeaheadContext = 'context-since'; - suggestionGroups = this.getKeywordSuggestions(); - // } else if (modelPrefix.match(/\d+\s\w*$/)) { - // typeaheadContext = 'context-number'; - // suggestionGroups = this._getAfterNumberSuggestions(); - } else if (modelPrefix.match(/ago\b/i) || modelPrefix.match(/facet\b/i) || modelPrefix.match(/\$__timefilter\b/i)) { - typeaheadContext = 'context-timeseries'; - suggestionGroups = this.getKeywordSuggestions(); + suggestionGroups = this.getColumnSuggestions(); + + // summarize by + } else if (modelPrefix.match(/(summarize\s(\w+\b)?$)/i)) { + typeaheadContext = 'context-summarize'; + suggestionGroups = this.getFunctionSuggestions(); + } else if (modelPrefix.match(/(summarize\s(.+\s)?by\s+([^,\s]+,\s*)*([^,\s]+\b)?$)/i)) { + typeaheadContext = 'context-summarize-by'; + suggestionGroups = this.getColumnSuggestions(); + + // order by, top X by, ... by ... + } else if (modelPrefix.match(/(by\s+([^,\s]+,\s*)*([^,\s]+\b)?$)/i)) { + typeaheadContext = 'context-by'; + suggestionGroups = this.getColumnSuggestions(); + + // join + } else if (modelPrefix.match(/(on\s(.+\b)?$)/i)) { + typeaheadContext = 'context-join-on'; + suggestionGroups = this.getColumnSuggestions(); + } else if (modelPrefix.match(/(join\s+(\(\s+)?(\w+\b)?$)/i)) { + typeaheadContext = 'context-join'; + suggestionGroups = this.getTableSuggestions(); + + // distinct + } else if (modelPrefix.match(/(distinct\s(.+\b)?$)/i)) { + typeaheadContext = 'context-distinct'; + suggestionGroups = this.getColumnSuggestions(); + + // database() + } else if (modelPrefix.match(/(database\(\"(\w+)\"\)\.(.+\b)?$)/i)) { + typeaheadContext = 'context-database-table'; + const db = this.getDBFromDatabaseFunction(modelPrefix); + console.log(db); + suggestionGroups = this.getTableSuggestions(db); + prefix = prefix.replace('.', ''); + + // built-in } else if (prefix && !wrapperClasses.contains('argument')) { if (modelPrefix.match(/\s$/i)) { prefix = ''; @@ -156,7 +150,7 @@ export default class KustoQueryField extends QueryField { } else if (Plain.serialize(this.state.value) === '') { typeaheadContext = 'context-new'; if (this.schema) { - suggestionGroups = this._getInitialSuggestions(); + suggestionGroups = this.getInitialSuggestions(); } else { this.fetchSchema(); setTimeout(this.onTypeahead, 0); @@ -187,7 +181,7 @@ export default class KustoQueryField extends QueryField { .filter(group => group.items.length > 0); // console.log('onTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); - // console.log('onTypeahead', modelPrefix, prefix, typeaheadContext); + console.log('onTypeahead', modelPrefix, prefix, typeaheadContext); this.setState({ typeaheadPrefix: prefix, @@ -293,6 +287,10 @@ export default class KustoQueryField extends QueryField { // ]; // } + private getInitialSuggestions(): SuggestionGroup[] { + return this.getTableSuggestions(); + } + private getKeywordSuggestions(): SuggestionGroup[] { return [ { @@ -323,50 +321,28 @@ export default class KustoQueryField extends QueryField { ]; } - private _getInitialSuggestions(): SuggestionGroup[] { + private getFunctionSuggestions(): SuggestionGroup[] { return [ { prefixMatch: true, - label: 'Tables', - items: _.map(this.schema.Databases.Default.Tables, (t: any) => ({ text: t.Name })) + label: 'Functions', + items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) + }, + { + prefixMatch: true, + label: 'Macros', + items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) } ]; - - // return [ - // { - // prefixMatch: true, - // label: 'Keywords', - // items: KEYWORDS.map(wrapText) - // }, - // { - // prefixMatch: true, - // label: 'Operators', - // items: operatorTokens.map((s: any) => { s.type = 'function'; return s; }) - // }, - // { - // prefixMatch: true, - // label: 'Functions', - // items: functionTokens.map((s: any) => { s.type = 'function'; return s; }) - // }, - // { - // prefixMatch: true, - // label: 'Macros', - // items: grafanaMacros.map((s: any) => { s.type = 'function'; return s; }) - // } - // ]; } - private getWhereSuggestions(table: string): SuggestionGroup[] { - const tableSchema = this.schema.Databases.Default.Tables[table]; - if (tableSchema) { + getTableSuggestions(db = 'Default'): SuggestionGroup[] { + if (this.schema.Databases[db]) { return [ { prefixMatch: true, - label: 'Fields', - items: _.map(tableSchema.OrderedColumns, (f: any) => ({ - text: f.Name, - hint: f.Type - })) + label: 'Tables', + items: _.map(this.schema.Databases[db].Tables, (t: any) => ({ text: t.Name })) } ]; } else { @@ -374,7 +350,28 @@ export default class KustoQueryField extends QueryField { } } - private getTableFromContext(query: string) { + private getColumnSuggestions(): SuggestionGroup[] { + const table = this.getTableFromContext(); + if (table) { + const tableSchema = this.schema.Databases.Default.Tables[table]; + if (tableSchema) { + return [ + { + prefixMatch: true, + label: 'Fields', + items: _.map(tableSchema.OrderedColumns, (f: any) => ({ + text: f.Name, + hint: f.Type + })) + } + ]; + } + } + return []; + } + + private getTableFromContext() { + const query = Plain.serialize(this.state.value); const tablePattern = /^\s*(\w+)\s*|/g; const normalizedQuery = normalizeQuery(query); const match = tablePattern.exec(normalizedQuery); @@ -385,30 +382,14 @@ export default class KustoQueryField extends QueryField { } } - private async _fetchEvents() { - // const query = 'events'; - // const result = await this.request(query); - - // if (result === undefined) { - // this.events = []; - // } else { - // this.events = result; - // } - // setTimeout(this.onTypeahead, 0); - - //Stub - this.events = []; - } - - private async _fetchFields() { - // const query = 'fields'; - // const result = await this.request(query); - - // this.fields = result || []; - - // setTimeout(this.onTypeahead, 0); - // Stub - this.fields = []; + private getDBFromDatabaseFunction(prefix: string) { + const databasePattern = /database\(\"(\w+)\"\)/gi; + const match = databasePattern.exec(prefix); + if (match && match.length > 1 && match[0] && match[1]) { + return match[1]; + } else { + return null; + } } private async fetchSchema() { From 181b4f9e80fbe7a6aa39c957dc79c863dcdbf11a Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Tue, 5 Feb 2019 14:39:24 +0300 Subject: [PATCH 13/38] azuremonitor: improve autocomplete experence --- .../editor/KustoQueryField.tsx | 24 ++++++++++--------- .../editor/query_field.tsx | 6 ++--- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index bbe34b8f46a..0a484794e8f 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -65,7 +65,7 @@ export default class KustoQueryField extends QueryField { this.fetchSchema(); } - onTypeahead = () => { + onTypeahead = (force = false) => { const selection = window.getSelection(); if (selection.anchorNode) { const wrapperNode = selection.anchorNode.parentElement; @@ -140,14 +140,8 @@ export default class KustoQueryField extends QueryField { suggestionGroups = this.getTableSuggestions(db); prefix = prefix.replace('.', ''); - // built-in - } else if (prefix && !wrapperClasses.contains('argument')) { - if (modelPrefix.match(/\s$/i)) { - prefix = ''; - } - typeaheadContext = 'context-builtin'; - suggestionGroups = this.getKeywordSuggestions(); - } else if (Plain.serialize(this.state.value) === '') { + // new + } else if (normalizeQuery(Plain.serialize(this.state.value)).match(/^\s*\w*$/i)) { typeaheadContext = 'context-new'; if (this.schema) { suggestionGroups = this.getInitialSuggestions(); @@ -156,7 +150,15 @@ export default class KustoQueryField extends QueryField { setTimeout(this.onTypeahead, 0); return; } - } else { + + // built-in + } else if (prefix && !wrapperClasses.contains('argument')) { + if (modelPrefix.match(/\s$/i)) { + prefix = ''; + } + typeaheadContext = 'context-builtin'; + suggestionGroups = this.getKeywordSuggestions(); + } else if (force === true) { typeaheadContext = 'context-builtin'; if (modelPrefix.match(/\s$/i)) { prefix = ''; @@ -181,7 +183,7 @@ export default class KustoQueryField extends QueryField { .filter(group => group.items.length > 0); // console.log('onTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); - console.log('onTypeahead', modelPrefix, prefix, typeaheadContext); + // console.log('onTypeahead', prefix, typeaheadContext); this.setState({ typeaheadPrefix: prefix, diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx index 0acd53cabff..e62337c4982 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx @@ -104,11 +104,11 @@ class QueryField extends React.Component { const changed = value.document !== this.state.value.document; this.setState({ value }, () => { if (changed) { + // call typeahead only if query changed + window.requestAnimationFrame(this.onTypeahead); this.onChangeQuery(); } }); - - window.requestAnimationFrame(this.onTypeahead); }; request = (url?) => { @@ -143,7 +143,7 @@ class QueryField extends React.Component { case ' ': { if (event.ctrlKey) { event.preventDefault(); - this.onTypeahead(); + this.onTypeahead(true); return true; } break; From 4caea91164bed003b589476e77ceeae64949b568 Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Wed, 6 Feb 2019 13:00:26 +0300 Subject: [PATCH 14/38] azuremonitor: fix autocomplete menu height --- .../editor/query_field.tsx | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx index e62337c4982..adab7fc5414 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/query_field.tsx @@ -105,7 +105,7 @@ class QueryField extends React.Component { this.setState({ value }, () => { if (changed) { // call typeahead only if query changed - window.requestAnimationFrame(this.onTypeahead); + requestAnimationFrame(() => this.onTypeahead()); this.onChangeQuery(); } }); @@ -283,12 +283,18 @@ class QueryField extends React.Component { const rect = node.parentElement.getBoundingClientRect(); const scrollX = window.scrollX; const scrollY = window.scrollY; + const screenHeight = window.innerHeight; + + const menuLeft = rect.left + scrollX - 2; + const menuTop = rect.top + scrollY + rect.height + 4; + const menuHeight = screenHeight - menuTop - 10; // Write DOM requestAnimationFrame(() => { menu.style.opacity = 1; - menu.style.top = `${rect.top + scrollY + rect.height + 4}px`; - menu.style.left = `${rect.left + scrollX - 2}px`; + menu.style.top = `${menuTop}px`; + menu.style.left = `${menuLeft}px`; + menu.style.maxHeight = `${menuHeight}px`; }); } }; From e4446f0340eef3edf3e6d54f50364fdc821732cc Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Wed, 6 Feb 2019 13:52:35 +0300 Subject: [PATCH 15/38] azuremonitor: improve autocomplete UX --- .../editor/KustoQueryField.tsx | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx index 0a484794e8f..2a578176674 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/KustoQueryField.tsx @@ -65,7 +65,7 @@ export default class KustoQueryField extends QueryField { this.fetchSchema(); } - onTypeahead = (force = false) => { + onTypeahead = (force?: boolean) => { const selection = window.getSelection(); if (selection.anchorNode) { const wrapperNode = selection.anchorNode.parentElement; @@ -152,14 +152,17 @@ export default class KustoQueryField extends QueryField { } // built-in - } else if (prefix && !wrapperClasses.contains('argument')) { + } else if (prefix && !wrapperClasses.contains('argument') && !force) { + // Use only last typed word as a prefix for searching if (modelPrefix.match(/\s$/i)) { prefix = ''; + return; } + prefix = getLastWord(prefix); typeaheadContext = 'context-builtin'; suggestionGroups = this.getKeywordSuggestions(); } else if (force === true) { - typeaheadContext = 'context-builtin'; + typeaheadContext = 'context-builtin-forced'; if (modelPrefix.match(/\s$/i)) { prefix = ''; } @@ -183,7 +186,7 @@ export default class KustoQueryField extends QueryField { .filter(group => group.items.length > 0); // console.log('onTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); - // console.log('onTypeahead', prefix, typeaheadContext); + // console.log('onTypeahead', prefix, typeaheadContext, force); this.setState({ typeaheadPrefix: prefix, @@ -422,3 +425,12 @@ function normalizeQuery(query: string): string { normalizedQuery = normalizedQuery.replace('\n', ' '); return normalizedQuery; } + +function getLastWord(str: string): string { + const lastWordPattern = /(?:.*\s)?([^\s]+\s*)$/gi; + const match = lastWordPattern.exec(str); + if (match && match.length > 1) { + return match[1]; + } + return ''; +} From 0e228d582d53b22a8f9bc1a7448bdc485b4f04e0 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Fri, 8 Feb 2019 17:20:31 +0100 Subject: [PATCH 16/38] azuremonitor: builds a query and sends it to Azure on the backend Lots of edge cases not covered and the response is not parsed. It only handles one service and will have to be refactored to handle multiple --- pkg/cmd/grafana-server/main.go | 1 + pkg/models/datasource.go | 2 +- pkg/tsdb/azuremonitor/azuremonitor.go | 251 +++++++++++++++++++++ pkg/tsdb/azuremonitor/azuremonitor_test.go | 61 +++++ pkg/tsdb/azuremonitor/types.go | 72 ++++++ 5 files changed, 386 insertions(+), 1 deletion(-) create mode 100644 pkg/tsdb/azuremonitor/azuremonitor.go create mode 100644 pkg/tsdb/azuremonitor/azuremonitor_test.go create mode 100644 pkg/tsdb/azuremonitor/types.go diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index 3bdaf0cc80e..d371d4e91da 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -19,6 +19,7 @@ import ( _ "github.com/grafana/grafana/pkg/services/alerting/conditions" _ "github.com/grafana/grafana/pkg/services/alerting/notifiers" "github.com/grafana/grafana/pkg/setting" + _ "github.com/grafana/grafana/pkg/tsdb/azuremonitor" _ "github.com/grafana/grafana/pkg/tsdb/cloudwatch" _ "github.com/grafana/grafana/pkg/tsdb/elasticsearch" _ "github.com/grafana/grafana/pkg/tsdb/graphite" diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index e1cb185d92a..22c53dfa0dd 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -23,7 +23,7 @@ const ( DS_ACCESS_DIRECT = "direct" DS_ACCESS_PROXY = "proxy" DS_STACKDRIVER = "stackdriver" - DS_AZURE_MONITOR = "azure-monitor" + DS_AZURE_MONITOR = "grafana-azure-monitor-datasource" ) var ( diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go new file mode 100644 index 00000000000..93fd8ed8110 --- /dev/null +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -0,0 +1,251 @@ +package azuremonitor + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "path" + "time" + + // "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/api/pluginproxy" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/opentracing/opentracing-go" + "golang.org/x/net/context/ctxhttp" +) + +var ( + slog log.Logger +) + +// AzureMonitorExecutor executes queries for the Azure Monitor datasource - all four services +type AzureMonitorExecutor struct { + httpClient *http.Client + dsInfo *models.DataSource +} + +// NewAzureMonitorExecutor initializes a http client +func NewAzureMonitorExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + httpClient, err := dsInfo.GetHttpClient() + if err != nil { + return nil, err + } + + return &AzureMonitorExecutor{ + httpClient: httpClient, + dsInfo: dsInfo, + }, nil +} + +func init() { + slog = log.New("tsdb.azuremonitor") + tsdb.RegisterTsdbQueryEndpoint("grafana-azure-monitor-datasource", NewAzureMonitorExecutor) +} + +// Query takes in the frontend queries, parses them into the query format +// expected by chosen Azure Monitor service (Azure Monitor, App Insights etc.) +// executes the queries against the API and parses the response into +// the right format +func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + var result *tsdb.Response + var err error + queryType := tsdbQuery.Queries[0].Model.Get("queryType").MustString("") + + switch queryType { + case "azureMonitorTimeSeriesQuery": + case "Azure Monitor": + fallthrough + default: + result, err = e.executeTimeSeriesQuery(ctx, tsdbQuery) + } + + return result, err +} + +func (e *AzureMonitorExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + queries, err := e.buildQueries(tsdbQuery) + if err != nil { + return nil, err + } + + for _, query := range queries { + queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery) + if err != nil { + return nil, err + } + err = e.parseResponse(queryRes, resp, query) + if err != nil { + queryRes.Error = err + } + result.Results[query.RefID] = queryRes + } + + return result, nil +} + +func (e *AzureMonitorExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*AzureMonitorQuery, error) { + azureMonitorQueries := []*AzureMonitorQuery{} + startTime, err := tsdbQuery.TimeRange.ParseFrom() + if err != nil { + return nil, err + } + + endTime, err := tsdbQuery.TimeRange.ParseTo() + if err != nil { + return nil, err + } + + for _, query := range tsdbQuery.Queries { + var target string + + azureMonitorTarget := query.Model.Get("azureMonitor").MustMap() + + resourceGroup := azureMonitorTarget["resourceGroup"].(string) + metricDefinition := azureMonitorTarget["metricDefinition"].(string) + resourceName := azureMonitorTarget["resourceName"].(string) + azureURL := fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", resourceGroup, metricDefinition, resourceName) + + alias := azureMonitorTarget["alias"].(string) + + params := url.Values{} + params.Add("api-version", "2018-01-01") + params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339))) + params.Add("interval", azureMonitorTarget["timeGrain"].(string)) + params.Add("aggregation", azureMonitorTarget["aggregation"].(string)) + params.Add("metricnames", azureMonitorTarget["metricName"].(string)) + target = params.Encode() + + if setting.Env == setting.DEV { + slog.Debug("Azuremonitor request", "params", params) + } + + azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{ + URL: azureURL, + Target: target, + Params: params, + RefID: query.RefId, + Alias: alias, + }) + } + + return azureMonitorQueries, nil +} + +func (e *AzureMonitorExecutor) executeQuery(ctx context.Context, query *AzureMonitorQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, AzureMonitorResponse, error) { + queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} + + req, err := e.createRequest(ctx, e.dsInfo) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + req.URL.Path = path.Join(req.URL.Path, query.URL) + req.URL.RawQuery = query.Params.Encode() + queryResult.Meta.Set("rawQuery", req.URL.RawQuery) + + span, ctx := opentracing.StartSpanFromContext(ctx, "azuremonitor query") + span.SetTag("target", query.Target) + span.SetTag("from", tsdbQuery.TimeRange.From) + span.SetTag("until", tsdbQuery.TimeRange.To) + span.SetTag("datasource_id", e.dsInfo.Id) + span.SetTag("org_id", e.dsInfo.OrgId) + + defer span.Finish() + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + + res, err := ctxhttp.Do(ctx, e.httpClient, req) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + data, err := e.unmarshalResponse(res) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + return queryResult, data, nil +} + +func (e *AzureMonitorExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { + // find plugin + plugin, ok := plugins.DataSources[dsInfo.Type] + if !ok { + return nil, errors.New("Unable to find datasource plugin Azure Monitor") + } + + var azureMonitorRoute *plugins.AppPluginRoute + for _, route := range plugin.Routes { + if route.Path == "azuremonitor" { + azureMonitorRoute = route + break + } + } + + cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor") + subscriptionID := dsInfo.JsonData.Get("subscriptionId").MustString() + proxyPass := fmt.Sprintf("%s/subscriptions/%s", cloudName, subscriptionID) + + u, _ := url.Parse(dsInfo.Url) + u.Path = path.Join(u.Path, "render") + + req, err := http.NewRequest(http.MethodGet, u.String(), nil) + if err != nil { + slog.Error("Failed to create request", "error", err) + return nil, fmt.Errorf("Failed to create request. error: %v", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) + + pluginproxy.ApplyRoute(ctx, req, proxyPass, azureMonitorRoute, dsInfo) + + return req, nil +} + +func (e *AzureMonitorExecutor) unmarshalResponse(res *http.Response) (AzureMonitorResponse, error) { + body, err := ioutil.ReadAll(res.Body) + defer res.Body.Close() + if err != nil { + return AzureMonitorResponse{}, err + } + + if res.StatusCode/100 != 2 { + slog.Error("Request failed", "status", res.Status, "body", string(body)) + return AzureMonitorResponse{}, fmt.Errorf(string(body)) + } + + var data AzureMonitorResponse + err = json.Unmarshal(body, &data) + if err != nil { + slog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body)) + return AzureMonitorResponse{}, err + } + + return data, nil +} + +func (e *AzureMonitorExecutor) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error { + slog.Info("AzureMonitor", "Response", data) + + return nil +} diff --git a/pkg/tsdb/azuremonitor/azuremonitor_test.go b/pkg/tsdb/azuremonitor/azuremonitor_test.go new file mode 100644 index 00000000000..787e0ae1586 --- /dev/null +++ b/pkg/tsdb/azuremonitor/azuremonitor_test.go @@ -0,0 +1,61 @@ +package azuremonitor + +import ( + "fmt" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestAzureMonitor(t *testing.T) { + Convey("AzureMonitor", t, func() { + executor := &AzureMonitorExecutor{} + + Convey("Parse queries from frontend and build AzureMonitor API queries", func() { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + tsdbQuery := &tsdb.TsdbQuery{ + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "azureMonitor": map[string]interface{}{ + "timeGrain": "PT1M", + "aggregation": "Average", + "resourceGroup": "grafanastaging", + "resourceName": "grafana", + "metricDefinition": "Microsoft.Compute/virtualMachines", + "metricName": "Percentage CPU", + "alias": "testalias", + "queryType": "Azure Monitor", + }, + }), + RefId: "A", + }, + }, + } + Convey("and is a normal query", func() { + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].URL, ShouldEqual, "resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics") + So(queries[0].Target, ShouldEqual, "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") + So(len(queries[0].Params), ShouldEqual, 5) + So(queries[0].Params["timespan"][0], ShouldEqual, "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z") + So(queries[0].Params["api-version"][0], ShouldEqual, "2018-01-01") + So(queries[0].Params["aggregation"][0], ShouldEqual, "Average") + So(queries[0].Params["metricnames"][0], ShouldEqual, "Percentage CPU") + So(queries[0].Params["interval"][0], ShouldEqual, "PT1M") + So(queries[0].Alias, ShouldEqual, "testalias") + }) + }) + }) +} diff --git a/pkg/tsdb/azuremonitor/types.go b/pkg/tsdb/azuremonitor/types.go new file mode 100644 index 00000000000..fc99ede6512 --- /dev/null +++ b/pkg/tsdb/azuremonitor/types.go @@ -0,0 +1,72 @@ +package azuremonitor + +import ( + "net/url" + "time" +) + +// AzureMonitorQuery is the query for all the services as they have similar queries +// with a url, a querystring and an alias field +type AzureMonitorQuery struct { + URL string + Target string + Params url.Values + RefID string + Alias string +} + +// AzureMonitorResponse is the json response from the Azure Monitor API +type AzureMonitorResponse struct { + Cost int `json:"cost"` + Timespan string `json:"timespan"` + Interval string `json:"interval"` + Value []struct { + ID string `json:"id"` + Type string `json:"type"` + Name struct { + Value string `json:"value"` + LocalizedValue string `json:"localizedValue"` + } `json:"name"` + Unit string `json:"unit"` + Timeseries []struct { + Metadatavalues []struct { + Name struct { + Value string `json:"value"` + LocalizedValue string `json:"localizedValue"` + } `json:"name"` + Value string `json:"value"` + } `json:"metadatavalues"` + Data []struct { + TimeStamp time.Time `json:"timeStamp"` + Average float64 `json:"average"` + } `json:"data"` + } `json:"timeseries"` + } `json:"value"` + Namespace string `json:"namespace"` + Resourceregion string `json:"resourceregion"` +} + +// ApplicationInsightsResponse is the json response from the Application Insights API +type ApplicationInsightsResponse struct { + Tables []struct { + TableName string `json:"TableName"` + Columns []struct { + ColumnName string `json:"ColumnName"` + DataType string `json:"DataType"` + ColumnType string `json:"ColumnType"` + } `json:"Columns"` + Rows [][]interface{} `json:"Rows"` + } `json:"Tables"` +} + +// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API. +type AzureLogAnalyticsResponse struct { + Tables []struct { + Name string `json:"name"` + Columns []struct { + Name string `json:"name"` + Type string `json:"type"` + } `json:"columns"` + Rows [][]interface{} `json:"rows"` + } `json:"tables"` +} From 10194df11270d961d97d3d084afdda2ce669c835 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Fri, 8 Feb 2019 18:15:17 +0100 Subject: [PATCH 17/38] azuremonitor: simple alerting for Azure Monitor API Lots of edge cases and functionality left to implement but a simple query works for alerting now. --- pkg/tsdb/azuremonitor/azuremonitor.go | 39 +++++++++++---- pkg/tsdb/azuremonitor/azuremonitor_test.go | 34 ++++++++++++++ .../test-data/1-azure-monitor-response.json | 47 +++++++++++++++++++ pkg/tsdb/azuremonitor/types.go | 11 +++-- .../plugin.json | 3 +- 5 files changed, 118 insertions(+), 16 deletions(-) create mode 100644 pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response.json diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go index 93fd8ed8110..8ef959bed9c 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor.go +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -11,8 +11,8 @@ import ( "path" "time" - // "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/api/pluginproxy" + "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" @@ -113,10 +113,12 @@ func (e *AzureMonitorExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*Azure azureMonitorTarget := query.Model.Get("azureMonitor").MustMap() - resourceGroup := azureMonitorTarget["resourceGroup"].(string) - metricDefinition := azureMonitorTarget["metricDefinition"].(string) - resourceName := azureMonitorTarget["resourceName"].(string) - azureURL := fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", resourceGroup, metricDefinition, resourceName) + urlComponents := make(map[string]string) + urlComponents["resourceGroup"] = azureMonitorTarget["resourceGroup"].(string) + urlComponents["metricDefinition"] = azureMonitorTarget["metricDefinition"].(string) + urlComponents["resourceName"] = azureMonitorTarget["resourceName"].(string) + + azureURL := fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", urlComponents["resourceGroup"], urlComponents["metricDefinition"], urlComponents["resourceName"]) alias := azureMonitorTarget["alias"].(string) @@ -133,11 +135,12 @@ func (e *AzureMonitorExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*Azure } azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{ - URL: azureURL, - Target: target, - Params: params, - RefID: query.RefId, - Alias: alias, + URL: azureURL, + UrlComponents: urlComponents, + Target: target, + Params: params, + RefID: query.RefId, + Alias: alias, }) } @@ -247,5 +250,21 @@ func (e *AzureMonitorExecutor) unmarshalResponse(res *http.Response) (AzureMonit func (e *AzureMonitorExecutor) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error { slog.Info("AzureMonitor", "Response", data) + for _, series := range data.Value { + points := make([]tsdb.TimePoint, 0) + + defaultMetricName := fmt.Sprintf("%s.%s", query.UrlComponents["resourceName"], series.Name.LocalizedValue) + + for _, point := range series.Timeseries[0].Data { + value := point.Average + points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.TimeStamp).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ + Name: defaultMetricName, + Points: points, + }) + } + return nil } diff --git a/pkg/tsdb/azuremonitor/azuremonitor_test.go b/pkg/tsdb/azuremonitor/azuremonitor_test.go index 787e0ae1586..1b8f69aa64a 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor_test.go @@ -1,7 +1,9 @@ package azuremonitor import ( + "encoding/json" "fmt" + "io/ioutil" "testing" "time" @@ -57,5 +59,37 @@ func TestAzureMonitor(t *testing.T) { So(queries[0].Alias, ShouldEqual, "testalias") }) }) + + Convey("Parse AzureMonitor API response in the time series format", func() { + Convey("when data from query aggregated to one time series", func() { + data, err := loadTestFile("./test-data/1-azure-monitor-response.json") + So(err, ShouldBeNil) + So(data.Interval, ShouldEqual, "PT1M") + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + } + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(len(res.Series), ShouldEqual, 1) + So(res.Series[0].Name, ShouldEqual, "grafana.Percentage CPU") + So(len(res.Series[0].Points), ShouldEqual, 5) + }) + }) }) } + +func loadTestFile(path string) (AzureMonitorResponse, error) { + var data AzureMonitorResponse + + jsonBody, err := ioutil.ReadFile(path) + if err != nil { + return data, err + } + err = json.Unmarshal(jsonBody, &data) + return data, err +} diff --git a/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response.json b/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response.json new file mode 100644 index 00000000000..febb47f2047 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-08T10:13:50Z\/2019-02-08T16:13:50Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-08T10:13:00Z", + "average": 2.0875 + }, + { + "timeStamp": "2019-02-08T10:14:00Z", + "average": 2.1525 + }, + { + "timeStamp": "2019-02-08T10:15:00Z", + "average": 2.155 + }, + { + "timeStamp": "2019-02-08T10:16:00Z", + "average": 3.6925 + }, + { + "timeStamp": "2019-02-08T10:17:00Z", + "average": 2.44 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/types.go b/pkg/tsdb/azuremonitor/types.go index fc99ede6512..5b1b7255d62 100644 --- a/pkg/tsdb/azuremonitor/types.go +++ b/pkg/tsdb/azuremonitor/types.go @@ -8,11 +8,12 @@ import ( // AzureMonitorQuery is the query for all the services as they have similar queries // with a url, a querystring and an alias field type AzureMonitorQuery struct { - URL string - Target string - Params url.Values - RefID string - Alias string + URL string + UrlComponents map[string]string + Target string + Params url.Values + RefID string + Alias string } // AzureMonitorResponse is the json response from the Azure Monitor API diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json b/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json index 76a56f2baaa..e4f48c581e3 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/plugin.json @@ -158,5 +158,6 @@ }, "metrics": true, - "annotations": true + "annotations": true, + "alerting": true } From a5e5db20e171fb248da54ec97d1b0b071d51d46c Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Sat, 9 Feb 2019 21:52:44 +0100 Subject: [PATCH 18/38] azuremonitor: add support for aggregations on backend --- pkg/tsdb/azuremonitor/azuremonitor.go | 16 ++- pkg/tsdb/azuremonitor/azuremonitor_test.go | 103 +++++++++++++++++- ...json => 1-azure-monitor-response-avg.json} | 0 .../2-azure-monitor-response-total.json | 47 ++++++++ .../3-azure-monitor-response-maximum.json | 47 ++++++++ .../4-azure-monitor-response-minimum.json | 47 ++++++++ .../5-azure-monitor-response-count.json | 47 ++++++++ pkg/tsdb/azuremonitor/types.go | 6 +- 8 files changed, 309 insertions(+), 4 deletions(-) rename pkg/tsdb/azuremonitor/test-data/{1-azure-monitor-response.json => 1-azure-monitor-response-avg.json} (100%) create mode 100644 pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json create mode 100644 pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json create mode 100644 pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json create mode 100644 pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go index 8ef959bed9c..3c6ae9b0013 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor.go +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -256,7 +256,21 @@ func (e *AzureMonitorExecutor) parseResponse(queryRes *tsdb.QueryResult, data Az defaultMetricName := fmt.Sprintf("%s.%s", query.UrlComponents["resourceName"], series.Name.LocalizedValue) for _, point := range series.Timeseries[0].Data { - value := point.Average + var value float64 + switch query.Params.Get("aggregation") { + case "Average": + value = point.Average + case "Total": + value = point.Total + case "Maximum": + value = point.Maximum + case "Minimum": + value = point.Minimum + case "Count": + value = point.Count + default: + value = point.Count + } points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.TimeStamp).Unix())*1000)) } diff --git a/pkg/tsdb/azuremonitor/azuremonitor_test.go b/pkg/tsdb/azuremonitor/azuremonitor_test.go index 1b8f69aa64a..73f2d83090f 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor_test.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "io/ioutil" + "net/url" "testing" "time" @@ -61,8 +62,8 @@ func TestAzureMonitor(t *testing.T) { }) Convey("Parse AzureMonitor API response in the time series format", func() { - Convey("when data from query aggregated to one time series", func() { - data, err := loadTestFile("./test-data/1-azure-monitor-response.json") + Convey("when data from query aggregated as average to one time series", func() { + data, err := loadTestFile("./test-data/1-azure-monitor-response-avg.json") So(err, ShouldBeNil) So(data.Interval, ShouldEqual, "PT1M") @@ -71,6 +72,9 @@ func TestAzureMonitor(t *testing.T) { UrlComponents: map[string]string{ "resourceName": "grafana", }, + Params: url.Values{ + "aggregation": {"Average"}, + }, } err = executor.parseResponse(res, data, query) So(err, ShouldBeNil) @@ -78,6 +82,101 @@ func TestAzureMonitor(t *testing.T) { So(len(res.Series), ShouldEqual, 1) So(res.Series[0].Name, ShouldEqual, "grafana.Percentage CPU") So(len(res.Series[0].Points), ShouldEqual, 5) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 2.0875) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549620780000) + + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 2.1525) + So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1549620840000) + + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 2.155) + So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1549620900000) + + So(res.Series[0].Points[3][0].Float64, ShouldEqual, 3.6925) + So(res.Series[0].Points[3][1].Float64, ShouldEqual, 1549620960000) + + So(res.Series[0].Points[4][0].Float64, ShouldEqual, 2.44) + So(res.Series[0].Points[4][1].Float64, ShouldEqual, 1549621020000) + }) + + Convey("when data from query aggregated as total to one time series", func() { + data, err := loadTestFile("./test-data/2-azure-monitor-response-total.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Total"}, + }, + } + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 8.26) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549718940000) + }) + + Convey("when data from query aggregated as maximum to one time series", func() { + data, err := loadTestFile("./test-data/3-azure-monitor-response-maximum.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Maximum"}, + }, + } + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3.07) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549722360000) + }) + + Convey("when data from query aggregated as minimum to one time series", func() { + data, err := loadTestFile("./test-data/4-azure-monitor-response-minimum.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Minimum"}, + }, + } + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 1.51) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549723380000) + }) + + Convey("when data from query aggregated as Count to one time series", func() { + data, err := loadTestFile("./test-data/5-azure-monitor-response-count.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Count"}, + }, + } + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 4) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549723440000) }) }) }) diff --git a/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response.json b/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json similarity index 100% rename from pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response.json rename to pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json diff --git a/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json b/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json new file mode 100644 index 00000000000..1002bbf7d18 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T13:29:41Z\/2019-02-09T19:29:41Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T13:29:00Z", + "total": 8.26 + }, + { + "timeStamp": "2019-02-09T13:30:00Z", + "total": 8.7 + }, + { + "timeStamp": "2019-02-09T13:31:00Z", + "total": 14.82 + }, + { + "timeStamp": "2019-02-09T13:32:00Z", + "total": 10.07 + }, + { + "timeStamp": "2019-02-09T13:33:00Z", + "total": 8.52 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json b/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json new file mode 100644 index 00000000000..3ca83c99932 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T14:26:12Z\/2019-02-09T20:26:12Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T14:26:00Z", + "maximum": 3.07 + }, + { + "timeStamp": "2019-02-09T14:27:00Z", + "maximum": 2.92 + }, + { + "timeStamp": "2019-02-09T14:28:00Z", + "maximum": 2.87 + }, + { + "timeStamp": "2019-02-09T14:29:00Z", + "maximum": 2.27 + }, + { + "timeStamp": "2019-02-09T14:30:00Z", + "maximum": 2.52 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json b/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json new file mode 100644 index 00000000000..5e5f99cc498 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T14:43:21Z\/2019-02-09T20:43:21Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T14:43:00Z", + "minimum": 1.51 + }, + { + "timeStamp": "2019-02-09T14:44:00Z", + "minimum": 2.38 + }, + { + "timeStamp": "2019-02-09T14:45:00Z", + "minimum": 1.69 + }, + { + "timeStamp": "2019-02-09T14:46:00Z", + "minimum": 2.27 + }, + { + "timeStamp": "2019-02-09T14:47:00Z", + "minimum": 1.96 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json b/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json new file mode 100644 index 00000000000..f024a5f2518 --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json @@ -0,0 +1,47 @@ +{ + "cost": 0, + "timespan": "2019-02-09T14:44:52Z\/2019-02-09T20:44:52Z", + "interval": "PT1M", + "value": [ + { + "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "Percentage CPU", + "localizedValue": "Percentage CPU" + }, + "unit": "Percent", + "timeseries": [ + { + "metadatavalues": [ + + ], + "data": [ + { + "timeStamp": "2019-02-09T14:44:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:45:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:46:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:47:00Z", + "count": 4 + }, + { + "timeStamp": "2019-02-09T14:48:00Z", + "count": 4 + } + ] + } + ] + } + ], + "namespace": "Microsoft.Compute\/virtualMachines", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/types.go b/pkg/tsdb/azuremonitor/types.go index 5b1b7255d62..b547c71f185 100644 --- a/pkg/tsdb/azuremonitor/types.go +++ b/pkg/tsdb/azuremonitor/types.go @@ -39,7 +39,11 @@ type AzureMonitorResponse struct { } `json:"metadatavalues"` Data []struct { TimeStamp time.Time `json:"timeStamp"` - Average float64 `json:"average"` + Average float64 `json:"average,omitempty"` + Total float64 `json:"total,omitempty"` + Count float64 `json:"count,omitempty"` + Maximum float64 `json:"maximum,omitempty"` + Minimum float64 `json:"minimum,omitempty"` } `json:"data"` } `json:"timeseries"` } `json:"value"` From b816f35c41f004e652a114b0ca39263f02ee1843 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Sun, 10 Feb 2019 00:23:12 +0100 Subject: [PATCH 19/38] azuremonitor: handle multi-dimensions on backend --- pkg/tsdb/azuremonitor/azuremonitor.go | 62 ++++++--- pkg/tsdb/azuremonitor/azuremonitor_test.go | 27 ++++ pkg/tsdb/azuremonitor/legend-key.go | 11 ++ .../1-azure-monitor-response-avg.json | 2 +- .../2-azure-monitor-response-total.json | 2 +- .../3-azure-monitor-response-maximum.json | 2 +- .../4-azure-monitor-response-minimum.json | 2 +- .../5-azure-monitor-response-count.json | 2 +- ...zure-monitor-response-multi-dimension.json | 128 ++++++++++++++++++ pkg/tsdb/azuremonitor/url-builder.go | 28 ++++ pkg/tsdb/azuremonitor/url-builder_test.go | 45 ++++++ 11 files changed, 287 insertions(+), 24 deletions(-) create mode 100644 pkg/tsdb/azuremonitor/legend-key.go create mode 100644 pkg/tsdb/azuremonitor/test-data/6-azure-monitor-response-multi-dimension.json create mode 100644 pkg/tsdb/azuremonitor/url-builder.go create mode 100644 pkg/tsdb/azuremonitor/url-builder_test.go diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go index 3c6ae9b0013..ef1376f5ed5 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor.go +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -9,6 +9,7 @@ import ( "net/http" "net/url" "path" + "strings" "time" "github.com/grafana/grafana/pkg/api/pluginproxy" @@ -24,7 +25,7 @@ import ( ) var ( - slog log.Logger + azlog log.Logger ) // AzureMonitorExecutor executes queries for the Azure Monitor datasource - all four services @@ -47,7 +48,7 @@ func NewAzureMonitorExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, } func init() { - slog = log.New("tsdb.azuremonitor") + azlog = log.New("tsdb.azuremonitor") tsdb.RegisterTsdbQueryEndpoint("grafana-azure-monitor-datasource", NewAzureMonitorExecutor) } @@ -61,7 +62,6 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou queryType := tsdbQuery.Queries[0].Model.Get("queryType").MustString("") switch queryType { - case "azureMonitorTimeSeriesQuery": case "Azure Monitor": fallthrough default: @@ -112,26 +112,39 @@ func (e *AzureMonitorExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*Azure var target string azureMonitorTarget := query.Model.Get("azureMonitor").MustMap() + azlog.Debug("AzureMonitor", "target", azureMonitorTarget) urlComponents := make(map[string]string) - urlComponents["resourceGroup"] = azureMonitorTarget["resourceGroup"].(string) - urlComponents["metricDefinition"] = azureMonitorTarget["metricDefinition"].(string) - urlComponents["resourceName"] = azureMonitorTarget["resourceName"].(string) + urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"]) + urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"]) + urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"]) - azureURL := fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", urlComponents["resourceGroup"], urlComponents["metricDefinition"], urlComponents["resourceName"]) + ub := URLBuilder{ + ResourceGroup: urlComponents["resourceGroup"], + MetricDefinition: urlComponents["metricDefinition"], + ResourceName: urlComponents["resourceName"], + } + azureURL := ub.Build() - alias := azureMonitorTarget["alias"].(string) + alias := fmt.Sprintf("%v", azureMonitorTarget["alias"]) params := url.Values{} params.Add("api-version", "2018-01-01") params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339))) - params.Add("interval", azureMonitorTarget["timeGrain"].(string)) - params.Add("aggregation", azureMonitorTarget["aggregation"].(string)) - params.Add("metricnames", azureMonitorTarget["metricName"].(string)) + params.Add("interval", fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])) + params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"])) + params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"])) + + dimension := fmt.Sprintf("%v", azureMonitorTarget["dimension"]) + dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"])) + if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && dimensionFilter != "" { + params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter)) + } + target = params.Encode() if setting.Env == setting.DEV { - slog.Debug("Azuremonitor request", "params", params) + azlog.Debug("Azuremonitor request", "params", params) } azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{ @@ -174,6 +187,7 @@ func (e *AzureMonitorExecutor) executeQuery(ctx context.Context, query *AzureMon opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(req.Header)) + azlog.Debug("AzureMonitor", "Request URL", req.URL.String()) res, err := ctxhttp.Do(ctx, e.httpClient, req) if err != nil { queryResult.Error = err @@ -213,7 +227,7 @@ func (e *AzureMonitorExecutor) createRequest(ctx context.Context, dsInfo *models req, err := http.NewRequest(http.MethodGet, u.String(), nil) if err != nil { - slog.Error("Failed to create request", "error", err) + azlog.Error("Failed to create request", "error", err) return nil, fmt.Errorf("Failed to create request. error: %v", err) } @@ -233,14 +247,14 @@ func (e *AzureMonitorExecutor) unmarshalResponse(res *http.Response) (AzureMonit } if res.StatusCode/100 != 2 { - slog.Error("Request failed", "status", res.Status, "body", string(body)) + azlog.Error("Request failed", "status", res.Status, "body", string(body)) return AzureMonitorResponse{}, fmt.Errorf(string(body)) } var data AzureMonitorResponse err = json.Unmarshal(body, &data) if err != nil { - slog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body)) + azlog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body)) return AzureMonitorResponse{}, err } @@ -248,14 +262,24 @@ func (e *AzureMonitorExecutor) unmarshalResponse(res *http.Response) (AzureMonit } func (e *AzureMonitorExecutor) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error { - slog.Info("AzureMonitor", "Response", data) + azlog.Debug("AzureMonitor", "Response", data) - for _, series := range data.Value { + if len(data.Value) == 0 { + return nil + } + + for _, series := range data.Value[0].Timeseries { points := make([]tsdb.TimePoint, 0) - defaultMetricName := fmt.Sprintf("%s.%s", query.UrlComponents["resourceName"], series.Name.LocalizedValue) + metadataName := "" + metadataValue := "" + if len(series.Metadatavalues) > 0 { + metadataName = series.Metadatavalues[0].Name.LocalizedValue + metadataValue = series.Metadatavalues[0].Value + } + defaultMetricName := formatLegendKey(query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue) - for _, point := range series.Timeseries[0].Data { + for _, point := range series.Data { var value float64 switch query.Params.Get("aggregation") { case "Average": diff --git a/pkg/tsdb/azuremonitor/azuremonitor_test.go b/pkg/tsdb/azuremonitor/azuremonitor_test.go index 73f2d83090f..760fecd0630 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor_test.go @@ -178,6 +178,33 @@ func TestAzureMonitor(t *testing.T) { So(res.Series[0].Points[0][0].Float64, ShouldEqual, 4) So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549723440000) }) + + Convey("when data from query aggregated as total and has dimension filter", func() { + data, err := loadTestFile("./test-data/6-azure-monitor-response-multi-dimension.json") + So(err, ShouldBeNil) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &AzureMonitorQuery{ + UrlComponents: map[string]string{ + "resourceName": "grafana", + }, + Params: url.Values{ + "aggregation": {"Average"}, + }, + } + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + So(len(res.Series), ShouldEqual, 3) + + So(res.Series[0].Name, ShouldEqual, "grafana{blobtype=PageBlob}.Blob Count") + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3) + + So(res.Series[1].Name, ShouldEqual, "grafana{blobtype=BlockBlob}.Blob Count") + So(res.Series[1].Points[0][0].Float64, ShouldEqual, 1) + + So(res.Series[2].Name, ShouldEqual, "grafana{blobtype=Azure Data Lake Storage}.Blob Count") + So(res.Series[2].Points[0][0].Float64, ShouldEqual, 0) + }) }) }) } diff --git a/pkg/tsdb/azuremonitor/legend-key.go b/pkg/tsdb/azuremonitor/legend-key.go new file mode 100644 index 00000000000..7d5cdbbcdd3 --- /dev/null +++ b/pkg/tsdb/azuremonitor/legend-key.go @@ -0,0 +1,11 @@ +package azuremonitor + +import "fmt" + +// formatLegendKey builds the legend key or timeseries name +func formatLegendKey(resourceName string, metricName string, metadataName string, metadataValue string) string { + if len(metadataName) > 0 { + return fmt.Sprintf("%s{%s=%s}.%s", resourceName, metadataName, metadataValue, metricName) + } + return fmt.Sprintf("%s.%s", resourceName, metricName) +} diff --git a/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json b/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json index febb47f2047..5fc84f6afa6 100644 --- a/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json +++ b/pkg/tsdb/azuremonitor/test-data/1-azure-monitor-response-avg.json @@ -4,7 +4,7 @@ "interval": "PT1M", "value": [ { - "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", "type": "Microsoft.Insights\/metrics", "name": { "value": "Percentage CPU", diff --git a/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json b/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json index 1002bbf7d18..d0b22f1b02c 100644 --- a/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json +++ b/pkg/tsdb/azuremonitor/test-data/2-azure-monitor-response-total.json @@ -4,7 +4,7 @@ "interval": "PT1M", "value": [ { - "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", "type": "Microsoft.Insights\/metrics", "name": { "value": "Percentage CPU", diff --git a/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json b/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json index 3ca83c99932..1e46cceb2be 100644 --- a/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json +++ b/pkg/tsdb/azuremonitor/test-data/3-azure-monitor-response-maximum.json @@ -4,7 +4,7 @@ "interval": "PT1M", "value": [ { - "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", "type": "Microsoft.Insights\/metrics", "name": { "value": "Percentage CPU", diff --git a/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json b/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json index 5e5f99cc498..16310614214 100644 --- a/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json +++ b/pkg/tsdb/azuremonitor/test-data/4-azure-monitor-response-minimum.json @@ -4,7 +4,7 @@ "interval": "PT1M", "value": [ { - "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", "type": "Microsoft.Insights\/metrics", "name": { "value": "Percentage CPU", diff --git a/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json b/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json index f024a5f2518..91afc33f070 100644 --- a/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json +++ b/pkg/tsdb/azuremonitor/test-data/5-azure-monitor-response-count.json @@ -4,7 +4,7 @@ "interval": "PT1M", "value": [ { - "id": "\/subscriptions\/44693801-6ee6-49de-9b2d-9106972f9572\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU", "type": "Microsoft.Insights\/metrics", "name": { "value": "Percentage CPU", diff --git a/pkg/tsdb/azuremonitor/test-data/6-azure-monitor-response-multi-dimension.json b/pkg/tsdb/azuremonitor/test-data/6-azure-monitor-response-multi-dimension.json new file mode 100644 index 00000000000..dddcef0e79c --- /dev/null +++ b/pkg/tsdb/azuremonitor/test-data/6-azure-monitor-response-multi-dimension.json @@ -0,0 +1,128 @@ +{ + "cost": 0, + "timespan": "2019-02-09T15:21:39Z\/2019-02-09T21:21:39Z", + "interval": "PT1H", + "value": [ + { + "id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Storage\/storageAccounts\/grafanastaging\/blobServices\/default\/providers\/Microsoft.Insights\/metrics\/BlobCount", + "type": "Microsoft.Insights\/metrics", + "name": { + "value": "BlobCount", + "localizedValue": "Blob Count" + }, + "unit": "Count", + "timeseries": [ + { + "metadatavalues": [ + { + "name": { + "value": "blobtype", + "localizedValue": "blobtype" + }, + "value": "PageBlob" + } + ], + "data": [ + { + "timeStamp": "2019-02-09T15:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T16:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T17:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T18:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T19:21:00Z", + "average": 3 + }, + { + "timeStamp": "2019-02-09T20:21:00Z" + } + ] + }, + { + "metadatavalues": [ + { + "name": { + "value": "blobtype", + "localizedValue": "blobtype" + }, + "value": "BlockBlob" + } + ], + "data": [ + { + "timeStamp": "2019-02-09T15:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T16:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T17:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T18:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T19:21:00Z", + "average": 1 + }, + { + "timeStamp": "2019-02-09T20:21:00Z" + } + ] + }, + { + "metadatavalues": [ + { + "name": { + "value": "blobtype", + "localizedValue": "blobtype" + }, + "value": "Azure Data Lake Storage" + } + ], + "data": [ + { + "timeStamp": "2019-02-09T15:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T16:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T17:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T18:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T19:21:00Z", + "average": 0 + }, + { + "timeStamp": "2019-02-09T20:21:00Z" + } + ] + } + ] + } + ], + "namespace": "Microsoft.Storage\/storageAccounts\/blobServices", + "resourceregion": "westeurope" +} diff --git a/pkg/tsdb/azuremonitor/url-builder.go b/pkg/tsdb/azuremonitor/url-builder.go new file mode 100644 index 00000000000..1ccbbc2bf81 --- /dev/null +++ b/pkg/tsdb/azuremonitor/url-builder.go @@ -0,0 +1,28 @@ +package azuremonitor + +import ( + "fmt" + "strings" +) + +// URLBuilder builds the URL for calling the Azure Monitor API +type URLBuilder struct { + ResourceGroup string + MetricDefinition string + ResourceName string +} + +// Build checks the metric definition property to see which form of the url +// should be returned +func (ub *URLBuilder) Build() string { + + if strings.Count(ub.MetricDefinition, "/") > 1 { + rn := strings.Split(ub.ResourceName, "/") + lastIndex := strings.LastIndex(ub.MetricDefinition, "/") + service := ub.MetricDefinition[lastIndex+1:] + md := ub.MetricDefinition[0:lastIndex] + return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, md, rn[0], service, rn[1]) + } + + return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, ub.MetricDefinition, ub.ResourceName) +} diff --git a/pkg/tsdb/azuremonitor/url-builder_test.go b/pkg/tsdb/azuremonitor/url-builder_test.go new file mode 100644 index 00000000000..baf9b34d7eb --- /dev/null +++ b/pkg/tsdb/azuremonitor/url-builder_test.go @@ -0,0 +1,45 @@ +package azuremonitor + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestURLBuilder(t *testing.T) { + Convey("AzureMonitor URL Builder", t, func() { + + Convey("when metric definition is in the short form", func() { + ub := &URLBuilder{ + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Compute/virtualMachines", + ResourceName: "rn", + } + + url := ub.Build() + So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metrics") + }) + + Convey("when metric definition is Microsoft.Storage/storageAccounts/blobServices", func() { + ub := &URLBuilder{ + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Storage/storageAccounts/blobServices", + ResourceName: "rn1/default", + } + + url := ub.Build() + So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/blobServices/default/providers/microsoft.insights/metrics") + }) + + Convey("when metric definition is Microsoft.Storage/storageAccounts/fileServices", func() { + ub := &URLBuilder{ + ResourceGroup: "rg", + MetricDefinition: "Microsoft.Storage/storageAccounts/fileServices", + ResourceName: "rn1/default", + } + + url := ub.Build() + So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/fileServices/default/providers/microsoft.insights/metrics") + }) + }) +} From b94de101cd95b23f0266cd61e311f09ae8ad7592 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Sun, 10 Feb 2019 01:18:16 +0100 Subject: [PATCH 20/38] azuremonitor: refactor azure monitor api code into own file --- .../azuremonitor/azuremonitor-datasource.go | 266 +++++++++++++++++ ...est.go => azuremonitor-datasource_test.go} | 8 +- pkg/tsdb/azuremonitor/azuremonitor.go | 274 ++---------------- 3 files changed, 288 insertions(+), 260 deletions(-) create mode 100644 pkg/tsdb/azuremonitor/azuremonitor-datasource.go rename pkg/tsdb/azuremonitor/{azuremonitor_test.go => azuremonitor-datasource_test.go} (97%) diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go new file mode 100644 index 00000000000..99e45bb58b8 --- /dev/null +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -0,0 +1,266 @@ +package azuremonitor + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "path" + "strings" + "time" + + "github.com/grafana/grafana/pkg/api/pluginproxy" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + opentracing "github.com/opentracing/opentracing-go" + "golang.org/x/net/context/ctxhttp" + + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" +) + +type AzureMonitorDatasource struct { + httpClient *http.Client + dsInfo *models.DataSource +} + +func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + queries, err := e.buildQueries(originalQueries, timeRange) + if err != nil { + return nil, err + } + + for _, query := range queries { + queryRes, resp, err := e.executeQuery(ctx, query, originalQueries, timeRange) + if err != nil { + return nil, err + } + azlog.Debug("AzureMonitor", "Response", resp) + + err = e.parseResponse(queryRes, resp, query) + if err != nil { + queryRes.Error = err + } + result.Results[query.RefID] = queryRes + } + + return result, nil +} + +func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*AzureMonitorQuery, error) { + azureMonitorQueries := []*AzureMonitorQuery{} + startTime, err := timeRange.ParseFrom() + if err != nil { + return nil, err + } + + endTime, err := timeRange.ParseTo() + if err != nil { + return nil, err + } + + for _, query := range queries { + var target string + + azureMonitorTarget := query.Model.Get("azureMonitor").MustMap() + azlog.Debug("AzureMonitor", "target", azureMonitorTarget) + + urlComponents := make(map[string]string) + urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"]) + urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"]) + urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"]) + + ub := URLBuilder{ + ResourceGroup: urlComponents["resourceGroup"], + MetricDefinition: urlComponents["metricDefinition"], + ResourceName: urlComponents["resourceName"], + } + azureURL := ub.Build() + + alias := fmt.Sprintf("%v", azureMonitorTarget["alias"]) + + params := url.Values{} + params.Add("api-version", "2018-01-01") + params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339))) + params.Add("interval", fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])) + params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"])) + params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"])) + + dimension := fmt.Sprintf("%v", azureMonitorTarget["dimension"]) + dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"])) + if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && dimensionFilter != "" { + params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter)) + } + + target = params.Encode() + + if setting.Env == setting.DEV { + azlog.Debug("Azuremonitor request", "params", params) + } + + azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{ + URL: azureURL, + UrlComponents: urlComponents, + Target: target, + Params: params, + RefID: query.RefId, + Alias: alias, + }) + } + + return azureMonitorQueries, nil +} + +func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, AzureMonitorResponse, error) { + queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} + + req, err := e.createRequest(ctx, e.dsInfo) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + req.URL.Path = path.Join(req.URL.Path, query.URL) + req.URL.RawQuery = query.Params.Encode() + queryResult.Meta.Set("rawQuery", req.URL.RawQuery) + + span, ctx := opentracing.StartSpanFromContext(ctx, "azuremonitor query") + span.SetTag("target", query.Target) + span.SetTag("from", timeRange.From) + span.SetTag("until", timeRange.To) + span.SetTag("datasource_id", e.dsInfo.Id) + span.SetTag("org_id", e.dsInfo.OrgId) + + defer span.Finish() + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + + azlog.Debug("AzureMonitor", "Request URL", req.URL.String()) + res, err := ctxhttp.Do(ctx, e.httpClient, req) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + data, err := e.unmarshalResponse(res) + if err != nil { + queryResult.Error = err + return queryResult, AzureMonitorResponse{}, nil + } + + return queryResult, data, nil +} + +func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { + // find plugin + plugin, ok := plugins.DataSources[dsInfo.Type] + if !ok { + return nil, errors.New("Unable to find datasource plugin Azure Monitor") + } + + var azureMonitorRoute *plugins.AppPluginRoute + for _, route := range plugin.Routes { + if route.Path == "azuremonitor" { + azureMonitorRoute = route + break + } + } + + cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor") + subscriptionID := dsInfo.JsonData.Get("subscriptionId").MustString() + proxyPass := fmt.Sprintf("%s/subscriptions/%s", cloudName, subscriptionID) + + u, _ := url.Parse(dsInfo.Url) + u.Path = path.Join(u.Path, "render") + + req, err := http.NewRequest(http.MethodGet, u.String(), nil) + if err != nil { + azlog.Error("Failed to create request", "error", err) + return nil, fmt.Errorf("Failed to create request. error: %v", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) + + pluginproxy.ApplyRoute(ctx, req, proxyPass, azureMonitorRoute, dsInfo) + + return req, nil +} + +func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMonitorResponse, error) { + body, err := ioutil.ReadAll(res.Body) + defer res.Body.Close() + if err != nil { + return AzureMonitorResponse{}, err + } + + if res.StatusCode/100 != 2 { + azlog.Error("Request failed", "status", res.Status, "body", string(body)) + return AzureMonitorResponse{}, fmt.Errorf(string(body)) + } + + var data AzureMonitorResponse + err = json.Unmarshal(body, &data) + if err != nil { + azlog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body)) + return AzureMonitorResponse{}, err + } + + return data, nil +} + +func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error { + if len(data.Value) == 0 { + return nil + } + + for _, series := range data.Value[0].Timeseries { + points := make([]tsdb.TimePoint, 0) + + metadataName := "" + metadataValue := "" + if len(series.Metadatavalues) > 0 { + metadataName = series.Metadatavalues[0].Name.LocalizedValue + metadataValue = series.Metadatavalues[0].Value + } + defaultMetricName := formatLegendKey(query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue) + + for _, point := range series.Data { + var value float64 + switch query.Params.Get("aggregation") { + case "Average": + value = point.Average + case "Total": + value = point.Total + case "Maximum": + value = point.Maximum + case "Minimum": + value = point.Minimum + case "Count": + value = point.Count + default: + value = point.Count + } + points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.TimeStamp).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ + Name: defaultMetricName, + Points: points, + }) + } + + return nil +} diff --git a/pkg/tsdb/azuremonitor/azuremonitor_test.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go similarity index 97% rename from pkg/tsdb/azuremonitor/azuremonitor_test.go rename to pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go index 760fecd0630..331a084033f 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go @@ -14,9 +14,9 @@ import ( . "github.com/smartystreets/goconvey/convey" ) -func TestAzureMonitor(t *testing.T) { - Convey("AzureMonitor", t, func() { - executor := &AzureMonitorExecutor{} +func TestAzureMonitorDatasource(t *testing.T) { + Convey("AzureMonitorDatasource", t, func() { + executor := &AzureMonitorDatasource{} Convey("Parse queries from frontend and build AzureMonitor API queries", func() { fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) @@ -44,7 +44,7 @@ func TestAzureMonitor(t *testing.T) { }, } Convey("and is a normal query", func() { - queries, err := executor.buildQueries(tsdbQuery) + queries, err := executor.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) So(err, ShouldBeNil) So(len(queries), ShouldEqual, 1) diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go index ef1376f5ed5..32d4a6f0f29 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor.go +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -2,26 +2,12 @@ package azuremonitor import ( "context" - "encoding/json" - "errors" "fmt" - "io/ioutil" "net/http" - "net/url" - "path" - "strings" - "time" - "github.com/grafana/grafana/pkg/api/pluginproxy" - "github.com/grafana/grafana/pkg/components/null" - "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" - "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb" - "github.com/opentracing/opentracing-go" - "golang.org/x/net/context/ctxhttp" ) var ( @@ -59,250 +45,26 @@ func init() { func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { var result *tsdb.Response var err error - queryType := tsdbQuery.Queries[0].Model.Get("queryType").MustString("") - switch queryType { - case "Azure Monitor": - fallthrough - default: - result, err = e.executeTimeSeriesQuery(ctx, tsdbQuery) + azureMonitorQueries := make([]*tsdb.Query, 0) + + for _, query := range tsdbQuery.Queries { + queryType := query.Model.Get("queryType").MustString("") + + switch queryType { + case "Azure Monitor": + azureMonitorQueries = append(azureMonitorQueries, query) + default: + return nil, fmt.Errorf("Alerting not supported for %s", queryType) + } } + azDatasource := &AzureMonitorDatasource{ + httpClient: e.httpClient, + dsInfo: e.dsInfo, + } + + result, err = azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, tsdbQuery.TimeRange) + return result, err } - -func (e *AzureMonitorExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), - } - - queries, err := e.buildQueries(tsdbQuery) - if err != nil { - return nil, err - } - - for _, query := range queries { - queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery) - if err != nil { - return nil, err - } - err = e.parseResponse(queryRes, resp, query) - if err != nil { - queryRes.Error = err - } - result.Results[query.RefID] = queryRes - } - - return result, nil -} - -func (e *AzureMonitorExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*AzureMonitorQuery, error) { - azureMonitorQueries := []*AzureMonitorQuery{} - startTime, err := tsdbQuery.TimeRange.ParseFrom() - if err != nil { - return nil, err - } - - endTime, err := tsdbQuery.TimeRange.ParseTo() - if err != nil { - return nil, err - } - - for _, query := range tsdbQuery.Queries { - var target string - - azureMonitorTarget := query.Model.Get("azureMonitor").MustMap() - azlog.Debug("AzureMonitor", "target", azureMonitorTarget) - - urlComponents := make(map[string]string) - urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"]) - urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"]) - urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"]) - - ub := URLBuilder{ - ResourceGroup: urlComponents["resourceGroup"], - MetricDefinition: urlComponents["metricDefinition"], - ResourceName: urlComponents["resourceName"], - } - azureURL := ub.Build() - - alias := fmt.Sprintf("%v", azureMonitorTarget["alias"]) - - params := url.Values{} - params.Add("api-version", "2018-01-01") - params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339))) - params.Add("interval", fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])) - params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"])) - params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"])) - - dimension := fmt.Sprintf("%v", azureMonitorTarget["dimension"]) - dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"])) - if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && dimensionFilter != "" { - params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter)) - } - - target = params.Encode() - - if setting.Env == setting.DEV { - azlog.Debug("Azuremonitor request", "params", params) - } - - azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{ - URL: azureURL, - UrlComponents: urlComponents, - Target: target, - Params: params, - RefID: query.RefId, - Alias: alias, - }) - } - - return azureMonitorQueries, nil -} - -func (e *AzureMonitorExecutor) executeQuery(ctx context.Context, query *AzureMonitorQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, AzureMonitorResponse, error) { - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} - - req, err := e.createRequest(ctx, e.dsInfo) - if err != nil { - queryResult.Error = err - return queryResult, AzureMonitorResponse{}, nil - } - - req.URL.Path = path.Join(req.URL.Path, query.URL) - req.URL.RawQuery = query.Params.Encode() - queryResult.Meta.Set("rawQuery", req.URL.RawQuery) - - span, ctx := opentracing.StartSpanFromContext(ctx, "azuremonitor query") - span.SetTag("target", query.Target) - span.SetTag("from", tsdbQuery.TimeRange.From) - span.SetTag("until", tsdbQuery.TimeRange.To) - span.SetTag("datasource_id", e.dsInfo.Id) - span.SetTag("org_id", e.dsInfo.OrgId) - - defer span.Finish() - - opentracing.GlobalTracer().Inject( - span.Context(), - opentracing.HTTPHeaders, - opentracing.HTTPHeadersCarrier(req.Header)) - - azlog.Debug("AzureMonitor", "Request URL", req.URL.String()) - res, err := ctxhttp.Do(ctx, e.httpClient, req) - if err != nil { - queryResult.Error = err - return queryResult, AzureMonitorResponse{}, nil - } - - data, err := e.unmarshalResponse(res) - if err != nil { - queryResult.Error = err - return queryResult, AzureMonitorResponse{}, nil - } - - return queryResult, data, nil -} - -func (e *AzureMonitorExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { - // find plugin - plugin, ok := plugins.DataSources[dsInfo.Type] - if !ok { - return nil, errors.New("Unable to find datasource plugin Azure Monitor") - } - - var azureMonitorRoute *plugins.AppPluginRoute - for _, route := range plugin.Routes { - if route.Path == "azuremonitor" { - azureMonitorRoute = route - break - } - } - - cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor") - subscriptionID := dsInfo.JsonData.Get("subscriptionId").MustString() - proxyPass := fmt.Sprintf("%s/subscriptions/%s", cloudName, subscriptionID) - - u, _ := url.Parse(dsInfo.Url) - u.Path = path.Join(u.Path, "render") - - req, err := http.NewRequest(http.MethodGet, u.String(), nil) - if err != nil { - azlog.Error("Failed to create request", "error", err) - return nil, fmt.Errorf("Failed to create request. error: %v", err) - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) - - pluginproxy.ApplyRoute(ctx, req, proxyPass, azureMonitorRoute, dsInfo) - - return req, nil -} - -func (e *AzureMonitorExecutor) unmarshalResponse(res *http.Response) (AzureMonitorResponse, error) { - body, err := ioutil.ReadAll(res.Body) - defer res.Body.Close() - if err != nil { - return AzureMonitorResponse{}, err - } - - if res.StatusCode/100 != 2 { - azlog.Error("Request failed", "status", res.Status, "body", string(body)) - return AzureMonitorResponse{}, fmt.Errorf(string(body)) - } - - var data AzureMonitorResponse - err = json.Unmarshal(body, &data) - if err != nil { - azlog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body)) - return AzureMonitorResponse{}, err - } - - return data, nil -} - -func (e *AzureMonitorExecutor) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error { - azlog.Debug("AzureMonitor", "Response", data) - - if len(data.Value) == 0 { - return nil - } - - for _, series := range data.Value[0].Timeseries { - points := make([]tsdb.TimePoint, 0) - - metadataName := "" - metadataValue := "" - if len(series.Metadatavalues) > 0 { - metadataName = series.Metadatavalues[0].Name.LocalizedValue - metadataValue = series.Metadatavalues[0].Value - } - defaultMetricName := formatLegendKey(query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue) - - for _, point := range series.Data { - var value float64 - switch query.Params.Get("aggregation") { - case "Average": - value = point.Average - case "Total": - value = point.Total - case "Maximum": - value = point.Maximum - case "Minimum": - value = point.Minimum - case "Count": - value = point.Count - default: - value = point.Count - } - points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.TimeStamp).Unix())*1000)) - } - - queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ - Name: defaultMetricName, - Points: points, - }) - } - - return nil -} From 452c4f5b9be8a5a8927066dc29f2df11dedcddd9 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Sun, 10 Feb 2019 01:47:38 +0100 Subject: [PATCH 21/38] azuremonitor: add test for dimension filter --- .../azuremonitor/azuremonitor-datasource.go | 9 ++++- .../azuremonitor-datasource_test.go | 39 +++++++++++++++---- 2 files changed, 38 insertions(+), 10 deletions(-) diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go index 99e45bb58b8..2da97514dd8 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -24,11 +24,16 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) +// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported type AzureMonitorDatasource struct { httpClient *http.Client dsInfo *models.DataSource } +// executeTimeSeriesQuery does the following: +// 1. build the AzureMonitor url and querystring for each query +// 2. executes each query by calling the Azure Monitor API +// 3. parses the responses for each query into the timeseries format func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { result := &tsdb.Response{ Results: make(map[string]*tsdb.QueryResult), @@ -95,9 +100,9 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"])) params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"])) - dimension := fmt.Sprintf("%v", azureMonitorTarget["dimension"]) + dimension := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimension"])) dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"])) - if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && dimensionFilter != "" { + if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && len(dimension) > 0 && len(dimensionFilter) > 0 { params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter)) } diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go index 331a084033f..0c95cabb3ec 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go @@ -16,7 +16,7 @@ import ( func TestAzureMonitorDatasource(t *testing.T) { Convey("AzureMonitorDatasource", t, func() { - executor := &AzureMonitorDatasource{} + datasource := &AzureMonitorDatasource{} Convey("Parse queries from frontend and build AzureMonitor API queries", func() { fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) @@ -44,7 +44,7 @@ func TestAzureMonitorDatasource(t *testing.T) { }, } Convey("and is a normal query", func() { - queries, err := executor.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) So(err, ShouldBeNil) So(len(queries), ShouldEqual, 1) @@ -59,6 +59,29 @@ func TestAzureMonitorDatasource(t *testing.T) { So(queries[0].Params["interval"][0], ShouldEqual, "PT1M") So(queries[0].Alias, ShouldEqual, "testalias") }) + + Convey("and has a dimension filter", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "azureMonitor": map[string]interface{}{ + "timeGrain": "PT1M", + "aggregation": "Average", + "resourceGroup": "grafanastaging", + "resourceName": "grafana", + "metricDefinition": "Microsoft.Compute/virtualMachines", + "metricName": "Percentage CPU", + "alias": "testalias", + "queryType": "Azure Monitor", + "dimension": "blob", + "dimensionFilter": "*", + }, + }) + + queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + So(err, ShouldBeNil) + + So(queries[0].Target, ShouldEqual, "%24filter=blob+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") + + }) }) Convey("Parse AzureMonitor API response in the time series format", func() { @@ -76,7 +99,7 @@ func TestAzureMonitorDatasource(t *testing.T) { "aggregation": {"Average"}, }, } - err = executor.parseResponse(res, data, query) + err = datasource.parseResponse(res, data, query) So(err, ShouldBeNil) So(len(res.Series), ShouldEqual, 1) @@ -112,7 +135,7 @@ func TestAzureMonitorDatasource(t *testing.T) { "aggregation": {"Total"}, }, } - err = executor.parseResponse(res, data, query) + err = datasource.parseResponse(res, data, query) So(err, ShouldBeNil) So(res.Series[0].Points[0][0].Float64, ShouldEqual, 8.26) @@ -132,7 +155,7 @@ func TestAzureMonitorDatasource(t *testing.T) { "aggregation": {"Maximum"}, }, } - err = executor.parseResponse(res, data, query) + err = datasource.parseResponse(res, data, query) So(err, ShouldBeNil) So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3.07) @@ -152,7 +175,7 @@ func TestAzureMonitorDatasource(t *testing.T) { "aggregation": {"Minimum"}, }, } - err = executor.parseResponse(res, data, query) + err = datasource.parseResponse(res, data, query) So(err, ShouldBeNil) So(res.Series[0].Points[0][0].Float64, ShouldEqual, 1.51) @@ -172,7 +195,7 @@ func TestAzureMonitorDatasource(t *testing.T) { "aggregation": {"Count"}, }, } - err = executor.parseResponse(res, data, query) + err = datasource.parseResponse(res, data, query) So(err, ShouldBeNil) So(res.Series[0].Points[0][0].Float64, ShouldEqual, 4) @@ -192,7 +215,7 @@ func TestAzureMonitorDatasource(t *testing.T) { "aggregation": {"Average"}, }, } - err = executor.parseResponse(res, data, query) + err = datasource.parseResponse(res, data, query) So(err, ShouldBeNil) So(len(res.Series), ShouldEqual, 3) From 60327953a2edf245d8fe8f1a5d394dc18931f20c Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Mon, 11 Feb 2019 01:17:37 +0100 Subject: [PATCH 22/38] azuremonitor: handles timegrain set to auto on backend --- .../azuremonitor/azuremonitor-datasource.go | 34 ++++++++++- .../azuremonitor-datasource_test.go | 20 +++++++ pkg/tsdb/azuremonitor/time-grain.go | 53 ++++++++++++++++ pkg/tsdb/azuremonitor/time-grain_test.go | 60 +++++++++++++++++++ 4 files changed, 165 insertions(+), 2 deletions(-) create mode 100644 pkg/tsdb/azuremonitor/time-grain.go create mode 100644 pkg/tsdb/azuremonitor/time-grain_test.go diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go index 2da97514dd8..3405c3bbd1f 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -30,6 +30,11 @@ type AzureMonitorDatasource struct { dsInfo *models.DataSource } +var ( + // 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds + allowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000} +) + // executeTimeSeriesQuery does the following: // 1. build the AzureMonitor url and querystring for each query // 2. executes each query by calling the Azure Monitor API @@ -49,7 +54,7 @@ func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, ori if err != nil { return nil, err } - azlog.Debug("AzureMonitor", "Response", resp) + // azlog.Debug("AzureMonitor", "Response", resp) err = e.parseResponse(queryRes, resp, query) if err != nil { @@ -93,10 +98,20 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * alias := fmt.Sprintf("%v", azureMonitorTarget["alias"]) + timeGrain := fmt.Sprintf("%v", azureMonitorTarget["timeGrain"]) + if timeGrain == "auto" { + autoInSeconds := e.findClosestAllowedIntervalMs(query.IntervalMs) / 1000 + tg := &TimeGrain{} + timeGrain, err = tg.createISO8601DurationFromInterval(fmt.Sprintf("%vs", autoInSeconds)) + if err != nil { + return nil, err + } + } + params := url.Values{} params.Add("api-version", "2018-01-01") params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339))) - params.Add("interval", fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])) + params.Add("interval", timeGrain) params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"])) params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"])) @@ -269,3 +284,18 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data return nil } + +func (e *AzureMonitorDatasource) findClosestAllowedIntervalMs(intervalMs int64) int64 { + closest := allowedIntervalsMS[0] + + for i, allowed := range allowedIntervalsMS { + if intervalMs > allowed { + if i+1 < len(allowedIntervalsMS) { + closest = allowedIntervalsMS[i+1] + } else { + closest = allowed + } + } + } + return closest +} diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go index 0c95cabb3ec..9aba4eb617b 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go @@ -229,6 +229,26 @@ func TestAzureMonitorDatasource(t *testing.T) { So(res.Series[2].Points[0][0].Float64, ShouldEqual, 0) }) }) + + Convey("Find closest allowed interval for auto time grain", func() { + intervals := map[string]int64{ + "3m": 180000, + "5m": 300000, + "10m": 600000, + "15m": 900000, + "1d": 86400000, + "2d": 172800000, + } + + closest := datasource.findClosestAllowedIntervalMs(intervals["3m"]) + So(closest, ShouldEqual, intervals["5m"]) + + closest = datasource.findClosestAllowedIntervalMs(intervals["10m"]) + So(closest, ShouldEqual, intervals["15m"]) + + closest = datasource.findClosestAllowedIntervalMs(intervals["2d"]) + So(closest, ShouldEqual, intervals["1d"]) + }) }) } diff --git a/pkg/tsdb/azuremonitor/time-grain.go b/pkg/tsdb/azuremonitor/time-grain.go new file mode 100644 index 00000000000..22da2872bab --- /dev/null +++ b/pkg/tsdb/azuremonitor/time-grain.go @@ -0,0 +1,53 @@ +package azuremonitor + +import ( + "fmt" + "strconv" + "strings" +) + +// TimeGrain handles convertions between +// the ISO 8601 Duration format (PT1H), Kbn units (1h) and Time Grains (1 hour) +// Also handles using the automatic Grafana interval to calculate a ISO 8601 Duration. +type TimeGrain struct{} + +var ( + smallTimeUnits = []string{"hour", "minute", "h", "m"} +) + +func (tg *TimeGrain) createISO8601DurationFromInterval(interval string) (string, error) { + if strings.Contains(interval, "ms") { + return "PT1M", nil + } + + timeValueString := interval[0 : len(interval)-1] + timeValue, err := strconv.Atoi(timeValueString) + if err != nil { + return "", fmt.Errorf("Could not parse interval %v to an ISO 8061 duration", interval) + } + + unit := interval[len(interval)-1:] + + if unit == "s" { + toMinutes := (timeValue * 60) % 60 + + // mimumum interval is 1m for Azure Monitor + if toMinutes < 1 { + toMinutes = 1 + } + + return tg.createISO8601Duration(toMinutes, "m"), nil + } + + return tg.createISO8601Duration(timeValue, unit), nil +} + +func (tg *TimeGrain) createISO8601Duration(timeValue int, timeUnit string) string { + for _, smallTimeUnit := range smallTimeUnits { + if timeUnit == smallTimeUnit { + return fmt.Sprintf("PT%v%v", timeValue, strings.ToUpper(timeUnit[0:1])) + } + } + + return fmt.Sprintf("P%v%v", timeValue, strings.ToUpper(timeUnit[0:1])) +} diff --git a/pkg/tsdb/azuremonitor/time-grain_test.go b/pkg/tsdb/azuremonitor/time-grain_test.go new file mode 100644 index 00000000000..be8d0b10a0c --- /dev/null +++ b/pkg/tsdb/azuremonitor/time-grain_test.go @@ -0,0 +1,60 @@ +package azuremonitor + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestTimeGrain(t *testing.T) { + Convey("TimeGrain", t, func() { + tgc := &TimeGrain{} + + Convey("create ISO 8601 Duration", func() { + Convey("when given a time unit smaller than a day", func() { + minuteKbnDuration := tgc.createISO8601Duration(1, "m") + hourKbnDuration := tgc.createISO8601Duration(2, "h") + minuteDuration := tgc.createISO8601Duration(1, "minute") + hourDuration := tgc.createISO8601Duration(2, "hour") + + Convey("should convert it to a time duration", func() { + So(minuteKbnDuration, ShouldEqual, "PT1M") + So(hourKbnDuration, ShouldEqual, "PT2H") + + So(minuteDuration, ShouldEqual, "PT1M") + So(hourDuration, ShouldEqual, "PT2H") + }) + }) + + Convey("when given the day time unit", func() { + kbnDuration := tgc.createISO8601Duration(1, "d") + duration := tgc.createISO8601Duration(2, "day") + + Convey("should convert it to a date duration", func() { + So(kbnDuration, ShouldEqual, "P1D") + So(duration, ShouldEqual, "P2D") + }) + }) + }) + + Convey("create ISO 8601 Duration from Grafana interval", func() { + Convey("and interval is less than a minute", func() { + durationMS, _ := tgc.createISO8601DurationFromInterval("100ms") + durationS, _ := tgc.createISO8601DurationFromInterval("59s") + Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() { + So(durationMS, ShouldEqual, "PT1M") + So(durationS, ShouldEqual, "PT1M") + }) + }) + + Convey("and interval is more than a minute", func() { + durationM, _ := tgc.createISO8601DurationFromInterval("10m") + durationD, _ := tgc.createISO8601DurationFromInterval("2d") + Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() { + So(durationM, ShouldEqual, "PT10M") + So(durationD, ShouldEqual, "P2D") + }) + }) + }) + }) +} From d6904ba9b412254fd82106f3bd0b6ad754f61107 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Mon, 11 Feb 2019 01:22:15 +0100 Subject: [PATCH 23/38] azuremonitor: small refactoring --- pkg/tsdb/azuremonitor/azuremonitor-datasource.go | 11 +++++++++++ pkg/tsdb/azuremonitor/legend-key.go | 11 ----------- 2 files changed, 11 insertions(+), 11 deletions(-) delete mode 100644 pkg/tsdb/azuremonitor/legend-key.go diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go index 3405c3bbd1f..079910e1b66 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -285,6 +285,9 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data return nil } +// findClosestAllowedIntervalMs is used for the auto time grain setting. +// It finds the closest time grain from the list of allowed time grains for Azure Monitor +// using the Grafana interval in milliseconds func (e *AzureMonitorDatasource) findClosestAllowedIntervalMs(intervalMs int64) int64 { closest := allowedIntervalsMS[0] @@ -299,3 +302,11 @@ func (e *AzureMonitorDatasource) findClosestAllowedIntervalMs(intervalMs int64) } return closest } + +// formatLegendKey builds the legend key or timeseries name +func formatLegendKey(resourceName string, metricName string, metadataName string, metadataValue string) string { + if len(metadataName) > 0 { + return fmt.Sprintf("%s{%s=%s}.%s", resourceName, metadataName, metadataValue, metricName) + } + return fmt.Sprintf("%s.%s", resourceName, metricName) +} diff --git a/pkg/tsdb/azuremonitor/legend-key.go b/pkg/tsdb/azuremonitor/legend-key.go deleted file mode 100644 index 7d5cdbbcdd3..00000000000 --- a/pkg/tsdb/azuremonitor/legend-key.go +++ /dev/null @@ -1,11 +0,0 @@ -package azuremonitor - -import "fmt" - -// formatLegendKey builds the legend key or timeseries name -func formatLegendKey(resourceName string, metricName string, metadataName string, metadataValue string) string { - if len(metadataName) > 0 { - return fmt.Sprintf("%s{%s=%s}.%s", resourceName, metadataName, metadataValue, metricName) - } - return fmt.Sprintf("%s.%s", resourceName, metricName) -} From a54484638d9c940d0b74fb836832ea7da59d621a Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Mon, 11 Feb 2019 11:25:51 +0100 Subject: [PATCH 24/38] interval: make the FormatDuration function public A useful function that was ported from kbn.ts and can be used to convert milliseconds into a kbn unit --- pkg/tsdb/interval.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pkg/tsdb/interval.go b/pkg/tsdb/interval.go index fd6adee39d7..7819ef3ecad 100644 --- a/pkg/tsdb/interval.go +++ b/pkg/tsdb/interval.go @@ -59,11 +59,11 @@ func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.D interval := time.Duration((to - from) / defaultRes) if interval < minInterval { - return Interval{Text: formatDuration(minInterval), Value: minInterval} + return Interval{Text: FormatDuration(minInterval), Value: minInterval} } rounded := roundInterval(interval) - return Interval{Text: formatDuration(rounded), Value: rounded} + return Interval{Text: FormatDuration(rounded), Value: rounded} } func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, defaultInterval time.Duration) (time.Duration, error) { @@ -89,7 +89,8 @@ func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, def return parsedInterval, nil } -func formatDuration(inter time.Duration) string { +// FormatDuration converts a duration into the kbn format e.g. 1m 2h or 3d +func FormatDuration(inter time.Duration) string { if inter >= year { return fmt.Sprintf("%dy", inter/year) } From 0b74860f55890d27125b22ff97112b682e4b7f6f Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Mon, 11 Feb 2019 13:27:08 +0100 Subject: [PATCH 25/38] azuremonitor: fix auto interval calculation on backend Not needed for alerting (as the query intervalms will always be 0) but needed later when being called from the frontend) --- .../azuremonitor/azuremonitor-datasource.go | 8 +++---- .../azuremonitor-datasource_test.go | 6 ++--- pkg/tsdb/azuremonitor/time-grain.go | 23 +++++++++---------- pkg/tsdb/azuremonitor/time-grain_test.go | 21 +++++++++++++---- pkg/tsdb/azuremonitor/url-builder.go | 6 ++--- pkg/tsdb/azuremonitor/url-builder_test.go | 6 ++--- pkg/tsdb/interval_test.go | 10 ++++---- 7 files changed, 45 insertions(+), 35 deletions(-) diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go index 079910e1b66..5def94aebf6 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -89,7 +89,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"]) urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"]) - ub := URLBuilder{ + ub := urlBuilder{ ResourceGroup: urlComponents["resourceGroup"], MetricDefinition: urlComponents["metricDefinition"], ResourceName: urlComponents["resourceName"], @@ -100,9 +100,9 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * timeGrain := fmt.Sprintf("%v", azureMonitorTarget["timeGrain"]) if timeGrain == "auto" { - autoInSeconds := e.findClosestAllowedIntervalMs(query.IntervalMs) / 1000 + autoInterval := e.findClosestAllowedIntervalMS(query.IntervalMs) tg := &TimeGrain{} - timeGrain, err = tg.createISO8601DurationFromInterval(fmt.Sprintf("%vs", autoInSeconds)) + timeGrain, err = tg.createISO8601DurationFromIntervalMS(autoInterval) if err != nil { return nil, err } @@ -288,7 +288,7 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data // findClosestAllowedIntervalMs is used for the auto time grain setting. // It finds the closest time grain from the list of allowed time grains for Azure Monitor // using the Grafana interval in milliseconds -func (e *AzureMonitorDatasource) findClosestAllowedIntervalMs(intervalMs int64) int64 { +func (e *AzureMonitorDatasource) findClosestAllowedIntervalMS(intervalMs int64) int64 { closest := allowedIntervalsMS[0] for i, allowed := range allowedIntervalsMS { diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go index 9aba4eb617b..b8d1d6cc266 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go @@ -240,13 +240,13 @@ func TestAzureMonitorDatasource(t *testing.T) { "2d": 172800000, } - closest := datasource.findClosestAllowedIntervalMs(intervals["3m"]) + closest := datasource.findClosestAllowedIntervalMS(intervals["3m"]) So(closest, ShouldEqual, intervals["5m"]) - closest = datasource.findClosestAllowedIntervalMs(intervals["10m"]) + closest = datasource.findClosestAllowedIntervalMS(intervals["10m"]) So(closest, ShouldEqual, intervals["15m"]) - closest = datasource.findClosestAllowedIntervalMs(intervals["2d"]) + closest = datasource.findClosestAllowedIntervalMS(intervals["2d"]) So(closest, ShouldEqual, intervals["1d"]) }) }) diff --git a/pkg/tsdb/azuremonitor/time-grain.go b/pkg/tsdb/azuremonitor/time-grain.go index 22da2872bab..e6a15aef64f 100644 --- a/pkg/tsdb/azuremonitor/time-grain.go +++ b/pkg/tsdb/azuremonitor/time-grain.go @@ -4,6 +4,9 @@ import ( "fmt" "strconv" "strings" + "time" + + "github.com/grafana/grafana/pkg/tsdb" ) // TimeGrain handles convertions between @@ -15,28 +18,24 @@ var ( smallTimeUnits = []string{"hour", "minute", "h", "m"} ) -func (tg *TimeGrain) createISO8601DurationFromInterval(interval string) (string, error) { - if strings.Contains(interval, "ms") { +func (tg *TimeGrain) createISO8601DurationFromIntervalMS(interval int64) (string, error) { + formatted := tsdb.FormatDuration(time.Duration(interval) * time.Millisecond) + + if strings.Contains(formatted, "ms") { return "PT1M", nil } - timeValueString := interval[0 : len(interval)-1] + timeValueString := formatted[0 : len(formatted)-1] timeValue, err := strconv.Atoi(timeValueString) if err != nil { return "", fmt.Errorf("Could not parse interval %v to an ISO 8061 duration", interval) } - unit := interval[len(interval)-1:] - - if unit == "s" { - toMinutes := (timeValue * 60) % 60 + unit := formatted[len(formatted)-1:] + if unit == "s" && timeValue < 60 { // mimumum interval is 1m for Azure Monitor - if toMinutes < 1 { - toMinutes = 1 - } - - return tg.createISO8601Duration(toMinutes, "m"), nil + return "PT1M", nil } return tg.createISO8601Duration(timeValue, unit), nil diff --git a/pkg/tsdb/azuremonitor/time-grain_test.go b/pkg/tsdb/azuremonitor/time-grain_test.go index be8d0b10a0c..2df3c92b0ff 100644 --- a/pkg/tsdb/azuremonitor/time-grain_test.go +++ b/pkg/tsdb/azuremonitor/time-grain_test.go @@ -37,10 +37,14 @@ func TestTimeGrain(t *testing.T) { }) }) - Convey("create ISO 8601 Duration from Grafana interval", func() { + Convey("create ISO 8601 Duration from Grafana interval in milliseconds", func() { Convey("and interval is less than a minute", func() { - durationMS, _ := tgc.createISO8601DurationFromInterval("100ms") - durationS, _ := tgc.createISO8601DurationFromInterval("59s") + durationMS, err := tgc.createISO8601DurationFromIntervalMS(100) + So(err, ShouldBeNil) + + durationS, err := tgc.createISO8601DurationFromIntervalMS(59999) + So(err, ShouldBeNil) + Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() { So(durationMS, ShouldEqual, "PT1M") So(durationS, ShouldEqual, "PT1M") @@ -48,8 +52,15 @@ func TestTimeGrain(t *testing.T) { }) Convey("and interval is more than a minute", func() { - durationM, _ := tgc.createISO8601DurationFromInterval("10m") - durationD, _ := tgc.createISO8601DurationFromInterval("2d") + intervals := map[string]int64{ + "10m": 600000, + "2d": 172800000, + } + durationM, err := tgc.createISO8601DurationFromIntervalMS(intervals["10m"]) + So(err, ShouldBeNil) + durationD, err := tgc.createISO8601DurationFromIntervalMS(intervals["2d"]) + So(err, ShouldBeNil) + Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() { So(durationM, ShouldEqual, "PT10M") So(durationD, ShouldEqual, "P2D") diff --git a/pkg/tsdb/azuremonitor/url-builder.go b/pkg/tsdb/azuremonitor/url-builder.go index 1ccbbc2bf81..c252048f517 100644 --- a/pkg/tsdb/azuremonitor/url-builder.go +++ b/pkg/tsdb/azuremonitor/url-builder.go @@ -5,8 +5,8 @@ import ( "strings" ) -// URLBuilder builds the URL for calling the Azure Monitor API -type URLBuilder struct { +// urlBuilder builds the URL for calling the Azure Monitor API +type urlBuilder struct { ResourceGroup string MetricDefinition string ResourceName string @@ -14,7 +14,7 @@ type URLBuilder struct { // Build checks the metric definition property to see which form of the url // should be returned -func (ub *URLBuilder) Build() string { +func (ub *urlBuilder) Build() string { if strings.Count(ub.MetricDefinition, "/") > 1 { rn := strings.Split(ub.ResourceName, "/") diff --git a/pkg/tsdb/azuremonitor/url-builder_test.go b/pkg/tsdb/azuremonitor/url-builder_test.go index baf9b34d7eb..85c4f81bc83 100644 --- a/pkg/tsdb/azuremonitor/url-builder_test.go +++ b/pkg/tsdb/azuremonitor/url-builder_test.go @@ -10,7 +10,7 @@ func TestURLBuilder(t *testing.T) { Convey("AzureMonitor URL Builder", t, func() { Convey("when metric definition is in the short form", func() { - ub := &URLBuilder{ + ub := &urlBuilder{ ResourceGroup: "rg", MetricDefinition: "Microsoft.Compute/virtualMachines", ResourceName: "rn", @@ -21,7 +21,7 @@ func TestURLBuilder(t *testing.T) { }) Convey("when metric definition is Microsoft.Storage/storageAccounts/blobServices", func() { - ub := &URLBuilder{ + ub := &urlBuilder{ ResourceGroup: "rg", MetricDefinition: "Microsoft.Storage/storageAccounts/blobServices", ResourceName: "rn1/default", @@ -32,7 +32,7 @@ func TestURLBuilder(t *testing.T) { }) Convey("when metric definition is Microsoft.Storage/storageAccounts/fileServices", func() { - ub := &URLBuilder{ + ub := &urlBuilder{ ResourceGroup: "rg", MetricDefinition: "Microsoft.Storage/storageAccounts/fileServices", ResourceName: "rn1/default", diff --git a/pkg/tsdb/interval_test.go b/pkg/tsdb/interval_test.go index 941b08dd554..4cd3fcea532 100644 --- a/pkg/tsdb/interval_test.go +++ b/pkg/tsdb/interval_test.go @@ -51,11 +51,11 @@ func TestInterval(t *testing.T) { }) Convey("Format value", func() { - So(formatDuration(time.Second*61), ShouldEqual, "1m") - So(formatDuration(time.Millisecond*30), ShouldEqual, "30ms") - So(formatDuration(time.Hour*23), ShouldEqual, "23h") - So(formatDuration(time.Hour*24), ShouldEqual, "1d") - So(formatDuration(time.Hour*24*367), ShouldEqual, "1y") + So(FormatDuration(time.Second*61), ShouldEqual, "1m") + So(FormatDuration(time.Millisecond*30), ShouldEqual, "30ms") + So(FormatDuration(time.Hour*23), ShouldEqual, "23h") + So(FormatDuration(time.Hour*24), ShouldEqual, "1d") + So(FormatDuration(time.Hour*24*367), ShouldEqual, "1y") }) }) } From e53f41e511a92dd12eb0ee3604fb494a572f6311 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Mon, 11 Feb 2019 14:10:01 +0100 Subject: [PATCH 26/38] changelog: adds note for #15131 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9db747d6ed5..afb3fbbaab4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ * **Templating**: Add json formatting to variable interpolation [#15291](https://github.com/grafana/grafana/issues/15291), thx [@mtanda](https://github.com/mtanda) * **Login**: Anonymous usage stats for token auth [#15288](https://github.com/grafana/grafana/issues/15288) * **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) +* **AzureMonitor**: improve autocomplete for Log Analytics and App Insights editor [#15131](https://github.com/grafana/grafana/issues/15131) ### 6.0.0-beta1 fixes From 82e330a1c53756c11a23b14c6b9a4a23d7332ef6 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 11 Feb 2019 14:01:39 +0100 Subject: [PATCH 27/38] update changelog --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index afb3fbbaab4..23cf3e2167f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ # 6.0.0-beta2 (unreleased) ### Minor -* **Pushover**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae) +* **Alerting**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae) * **Graphite/InfluxDB/OpenTSDB**: Fix always take dashboard timezone into consideration when handle custom time ranges [#15284](https://github.com/grafana/grafana/issues/15284) * **Stackdriver**: Template variables in filters using globbing format [#15182](https://github.com/grafana/grafana/issues/15182) * **Cloudwatch**: Add `resource_arns` template variable query function [#8207](https://github.com/grafana/grafana/issues/8207), thx [@jeroenvollenbrock](https://github.com/jeroenvollenbrock) @@ -13,12 +13,12 @@ * **Annotations**: Support PATCH verb in annotations http api [#12546](https://github.com/grafana/grafana/issues/12546), thx [@SamuelToh](https://github.com/SamuelToh) * **Templating**: Add json formatting to variable interpolation [#15291](https://github.com/grafana/grafana/issues/15291), thx [@mtanda](https://github.com/mtanda) * **Login**: Anonymous usage stats for token auth [#15288](https://github.com/grafana/grafana/issues/15288) -* **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) * **AzureMonitor**: improve autocomplete for Log Analytics and App Insights editor [#15131](https://github.com/grafana/grafana/issues/15131) ### 6.0.0-beta1 fixes * **Postgres**: Fix default port not added when port not configured [#15189](https://github.com/grafana/grafana/issues/15189) +* **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) # 6.0.0-beta1 (2019-01-30) From a7c44c2ce749c496b0be7eebfb4b6a91d9c6b224 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 11 Feb 2019 14:03:45 +0100 Subject: [PATCH 28/38] changelog: add notes about closing #14432 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 23cf3e2167f..ab721afe144 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ * **Templating**: Add json formatting to variable interpolation [#15291](https://github.com/grafana/grafana/issues/15291), thx [@mtanda](https://github.com/mtanda) * **Login**: Anonymous usage stats for token auth [#15288](https://github.com/grafana/grafana/issues/15288) * **AzureMonitor**: improve autocomplete for Log Analytics and App Insights editor [#15131](https://github.com/grafana/grafana/issues/15131) +* **LDAP**: Fix IPA/FreeIPA v4.6.4 does not allow LDAP searches with empty attributes [#14432](https://github.com/grafana/grafana/issues/14432) ### 6.0.0-beta1 fixes From 9472d7e60083b6fdd903696beb657ea34fb2b497 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 11 Feb 2019 14:06:39 +0100 Subject: [PATCH 29/38] changelog: add notes about closing #15219 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ab721afe144..a1919fb9e3b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ * **Postgres**: Fix default port not added when port not configured [#15189](https://github.com/grafana/grafana/issues/15189) * **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) +* **Dashboard**: Fix save provisioned dashboard modal [#15219](https://github.com/grafana/grafana/pull/15219) # 6.0.0-beta1 (2019-01-30) From 63f465f0ac261ceb2d10a65cc080e67503b0e9b0 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 11 Feb 2019 14:10:30 +0100 Subject: [PATCH 30/38] changelog: add notes about closing #15122 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1919fb9e3b..b8c316c403f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ * **Postgres**: Fix default port not added when port not configured [#15189](https://github.com/grafana/grafana/issues/15189) * **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) * **Dashboard**: Fix save provisioned dashboard modal [#15219](https://github.com/grafana/grafana/pull/15219) +* **Dashboard**: Fix having a long query in prometheus dashboard query editor blocks 30% of the query field when on OSX and having native scrollbars [#15122](https://github.com/grafana/grafana/issues/15122) # 6.0.0-beta1 (2019-01-30) From 1f0c7727f462bb53e3815e2240651e1b466c4386 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 11 Feb 2019 14:12:25 +0100 Subject: [PATCH 31/38] changelog: add notes about closing #15222 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b8c316c403f..5bb307a8a36 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ * **Alerting**: Fixes crash bug when alert notifier folders are missing [#15295](https://github.com/grafana/grafana/issues/15295) * **Dashboard**: Fix save provisioned dashboard modal [#15219](https://github.com/grafana/grafana/pull/15219) * **Dashboard**: Fix having a long query in prometheus dashboard query editor blocks 30% of the query field when on OSX and having native scrollbars [#15122](https://github.com/grafana/grafana/issues/15122) +* **Explore**: Fix issue with wrapping on long queries [#15222](https://github.com/grafana/grafana/issues/15222) # 6.0.0-beta1 (2019-01-30) From 757a98257d25296c75a29c5f589f517ac22d800e Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 11 Feb 2019 14:13:52 +0100 Subject: [PATCH 32/38] changelog: add notes about closing #15223 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bb307a8a36..d41f3cf271c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ * **Dashboard**: Fix save provisioned dashboard modal [#15219](https://github.com/grafana/grafana/pull/15219) * **Dashboard**: Fix having a long query in prometheus dashboard query editor blocks 30% of the query field when on OSX and having native scrollbars [#15122](https://github.com/grafana/grafana/issues/15122) * **Explore**: Fix issue with wrapping on long queries [#15222](https://github.com/grafana/grafana/issues/15222) +* **Explore**: Fix cut & paste adds newline before and after selection [#15223](https://github.com/grafana/grafana/issues/15223) # 6.0.0-beta1 (2019-01-30) From 8769b7aa5757881f6960cadc61856bfab071a544 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 11 Feb 2019 14:16:05 +0100 Subject: [PATCH 33/38] changelog: add notes about closing #15258 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d41f3cf271c..6221b7bcc93 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ * **Dashboard**: Fix having a long query in prometheus dashboard query editor blocks 30% of the query field when on OSX and having native scrollbars [#15122](https://github.com/grafana/grafana/issues/15122) * **Explore**: Fix issue with wrapping on long queries [#15222](https://github.com/grafana/grafana/issues/15222) * **Explore**: Fix cut & paste adds newline before and after selection [#15223](https://github.com/grafana/grafana/issues/15223) +* **Dataproxy**: Fix global datasource proxy timeout not added to correct http client [#15258](https://github.com/grafana/grafana/issues/15258) [#5699](https://github.com/grafana/grafana/issues/5699) # 6.0.0-beta1 (2019-01-30) From ac345312a46f86f520af0b5399b08df29685b1d9 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Mon, 11 Feb 2019 14:42:12 +0100 Subject: [PATCH 34/38] azuremonitor: don't use make for maps and array --- pkg/tsdb/azuremonitor/azuremonitor-datasource.go | 6 +++--- pkg/tsdb/azuremonitor/azuremonitor.go | 2 +- pkg/tsdb/azuremonitor/time-grain.go | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go index 5def94aebf6..cae8d8bfb73 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -41,7 +41,7 @@ var ( // 3. parses the responses for each query into the timeseries format func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), + Results: map[string]*tsdb.QueryResult{}, } queries, err := e.buildQueries(originalQueries, timeRange) @@ -84,7 +84,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * azureMonitorTarget := query.Model.Get("azureMonitor").MustMap() azlog.Debug("AzureMonitor", "target", azureMonitorTarget) - urlComponents := make(map[string]string) + urlComponents := map[string]string{} urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"]) urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"]) urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"]) @@ -247,7 +247,7 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data } for _, series := range data.Value[0].Timeseries { - points := make([]tsdb.TimePoint, 0) + points := []tsdb.TimePoint{} metadataName := "" metadataValue := "" diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go index 32d4a6f0f29..31a42d21a12 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor.go +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -46,7 +46,7 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou var result *tsdb.Response var err error - azureMonitorQueries := make([]*tsdb.Query, 0) + var azureMonitorQueries []*tsdb.Query for _, query := range tsdbQuery.Queries { queryType := query.Model.Get("queryType").MustString("") diff --git a/pkg/tsdb/azuremonitor/time-grain.go b/pkg/tsdb/azuremonitor/time-grain.go index e6a15aef64f..425e39b6208 100644 --- a/pkg/tsdb/azuremonitor/time-grain.go +++ b/pkg/tsdb/azuremonitor/time-grain.go @@ -34,7 +34,7 @@ func (tg *TimeGrain) createISO8601DurationFromIntervalMS(interval int64) (string unit := formatted[len(formatted)-1:] if unit == "s" && timeValue < 60 { - // mimumum interval is 1m for Azure Monitor + // minimum interval is 1m for Azure Monitor return "PT1M", nil } From 4408817e65cb1d6447928ff935df8f3ab19a1f58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 11 Feb 2019 15:13:20 +0100 Subject: [PATCH 35/38] Fixed double page class on api keys and org details page --- public/app/features/api-keys/ApiKeysPage.tsx | 8 +- .../__snapshots__/ApiKeysPage.test.tsx.snap | 196 +++++++++--------- public/app/features/org/OrgDetailsPage.tsx | 20 +- .../OrgDetailsPage.test.tsx.snap | 28 +-- 4 files changed, 119 insertions(+), 133 deletions(-) diff --git a/public/app/features/api-keys/ApiKeysPage.tsx b/public/app/features/api-keys/ApiKeysPage.tsx index 41b9b0c8a55..f0d6fa8d267 100644 --- a/public/app/features/api-keys/ApiKeysPage.tsx +++ b/public/app/features/api-keys/ApiKeysPage.tsx @@ -107,7 +107,7 @@ export class ApiKeysPage extends PureComponent { renderEmptyList() { const { isAdding } = this.state; return ( -
+ <> {!isAdding && ( { /> )} {this.renderAddApiKeyForm()} -
+ ); } @@ -183,7 +183,7 @@ export class ApiKeysPage extends PureComponent { const { apiKeys, searchQuery } = this.props; return ( -
+ <>
+ ); } diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap index f40894426ae..9a9daab76c3 100644 --- a/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap +++ b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap @@ -35,118 +35,114 @@ exports[`Render should render CTA if there are no API keys 1`] = ` -
- - + +
-
- -
- Add API Key -
-
+ +
+ Add API Key +
+ +
-
- - Key name - - + +
+
+ + Role + + + - - - - - -
-
- -
+ Viewer + + + + +
- -
- -
+
+ +
+
+ +
+
`; diff --git a/public/app/features/org/OrgDetailsPage.tsx b/public/app/features/org/OrgDetailsPage.tsx index ee644f0006f..236558db40a 100644 --- a/public/app/features/org/OrgDetailsPage.tsx +++ b/public/app/features/org/OrgDetailsPage.tsx @@ -36,18 +36,16 @@ export class OrgDetailsPage extends PureComponent { return ( -
- {!isLoading && ( -
- this.onOrgNameChange(name)} - onSubmit={this.onUpdateOrganization} - orgName={organization.name} - /> - -
- )} + {!isLoading && ( +
+ this.onOrgNameChange(name)} + onSubmit={this.onUpdateOrganization} + orgName={organization.name} + /> +
+ )} ); diff --git a/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap b/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap index 9e13a73901e..2339975ca8b 100644 --- a/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap +++ b/public/app/features/org/__snapshots__/OrgDetailsPage.test.tsx.snap @@ -15,11 +15,7 @@ exports[`Render should render component 1`] = ` > -
- + /> `; @@ -39,19 +35,15 @@ exports[`Render should render organization and preferences 1`] = ` -
-
- - -
+
+ +
From 93f1a48641b9e9219fa5f8869757fcdb4d1187ae Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Mon, 11 Feb 2019 15:21:02 +0100 Subject: [PATCH 36/38] changelog: adds note for #14623 --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6221b7bcc93..a82fc7050b4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # 6.0.0-beta2 (unreleased) +### New Features +* **AzureMonitor**: Enable alerting by converting Azure Monitor API to Go [#14623](https://github.com/grafana/grafana/issues/14623) + ### Minor * **Alerting**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae) * **Graphite/InfluxDB/OpenTSDB**: Fix always take dashboard timezone into consideration when handle custom time ranges [#15284](https://github.com/grafana/grafana/issues/15284) From c4fa64e6dc082bdb813edb13f34652f4163b9cfe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 11 Feb 2019 15:23:17 +0100 Subject: [PATCH 37/38] Updated lint-staged --- package.json | 2 +- yarn.lock | 286 ++++++++++++++++++++++----------------------------- 2 files changed, 122 insertions(+), 166 deletions(-) diff --git a/package.json b/package.json index fae51a1d856..2f44291a86a 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "husky": "^0.14.3", "jest": "^23.6.0", "jest-date-mock": "^1.0.6", - "lint-staged": "^6.0.0", + "lint-staged": "^8.1.3", "load-grunt-tasks": "3.5.2", "mini-css-extract-plugin": "^0.4.0", "mocha": "^4.0.1", diff --git a/yarn.lock b/yarn.lock index df2e1cea37e..2fb4a5d3ee2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1040,6 +1040,20 @@ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.1.3.tgz#b700d97385fa91affed60c71dfd51c67e9dad762" integrity sha512-QsYGKdhhuDFNq7bjm2r44y0mp5xW3uO3csuTPDWZc0OIiMQv+AIY5Cqwd4mJiC5N8estVl7qlvOx1hbtOuUWbw== +"@iamstarkov/listr-update-renderer@0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@iamstarkov/listr-update-renderer/-/listr-update-renderer-0.4.1.tgz#d7c48092a2dcf90fd672b6c8b458649cb350c77e" + integrity sha512-IJyxQWsYDEkf8C8QthBn5N8tIUR9V9je6j3sMIpAkonaadjbvxmRC6RAhpa3RKxndhNnU2M6iNbtJwd7usQYIA== + dependencies: + chalk "^1.1.3" + cli-truncate "^0.2.1" + elegant-spinner "^1.0.1" + figures "^1.7.0" + indent-string "^3.0.0" + log-symbols "^1.0.2" + log-update "^2.3.0" + strip-ansi "^3.0.1" + "@icons/material@^0.2.4": version "0.2.4" resolved "https://registry.yarnpkg.com/@icons/material/-/material-0.2.4.tgz#e90c9f71768b3736e76d7dd6783fc6c2afa88bc8" @@ -2468,7 +2482,7 @@ ansi-colors@^3.0.0: resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813" integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw== -ansi-escapes@^1.0.0, ansi-escapes@^1.1.0: +ansi-escapes@^1.1.0: version "1.4.0" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" integrity sha1-06ioOzGapneTZisT52HHkRQiMG4= @@ -2525,11 +2539,6 @@ ansistyles@~0.1.3: resolved "https://registry.yarnpkg.com/ansistyles/-/ansistyles-0.1.3.tgz#5de60415bda071bb37127854c864f41b23254539" integrity sha1-XeYEFb2gcbs3EnhUyGT0GyMlRTk= -any-observable@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.2.0.tgz#c67870058003579009083f54ac0abafb5c33d242" - integrity sha1-xnhwBYADV5AJCD9UrAq6+1wz0kI= - any-observable@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" @@ -2548,11 +2557,6 @@ app-root-dir@^1.0.2: resolved "https://registry.yarnpkg.com/app-root-dir/-/app-root-dir-1.0.2.tgz#38187ec2dea7577fff033ffcb12172692ff6e118" integrity sha1-OBh+wt6nV3//Az/8sSFyaS/24Rg= -app-root-path@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-2.1.0.tgz#98bf6599327ecea199309866e8140368fd2e646a" - integrity sha1-mL9lmTJ+zqGZMJhm6BQDaP0uZGo= - append-transform@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991" @@ -4588,7 +4592,7 @@ chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3, chalk@~1.1.1: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.3.2, chalk@^2.4.1, chalk@^2.4.2: +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.3.1, chalk@^2.3.2, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -4783,7 +4787,7 @@ cli-columns@^3.1.2: string-width "^2.0.0" strip-ansi "^3.0.1" -cli-cursor@^1.0.1, cli-cursor@^1.0.2: +cli-cursor@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" integrity sha1-ZNo/fValRBLll5S9Ytw1KV6PKYc= @@ -4797,11 +4801,6 @@ cli-cursor@^2.0.0, cli-cursor@^2.1.0: dependencies: restore-cursor "^2.0.0" -cli-spinners@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-0.1.2.tgz#bb764d88e185fb9e1e6a2a1f19772318f605e31c" - integrity sha1-u3ZNiOGF+54eaiofGXcjGPYF4xw= - cli-table2@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/cli-table2/-/cli-table2-0.2.0.tgz#2d1ef7f218a0e786e214540562d4bd177fe32d97" @@ -5075,7 +5074,7 @@ comma-separated-tokens@^1.0.0: dependencies: trim "0.0.1" -commander@2, commander@^2.11.0, commander@^2.12.1, commander@^2.13.0, commander@^2.19.0, commander@^2.8.1, commander@^2.9.0: +commander@2, commander@^2.12.1, commander@^2.13.0, commander@^2.14.1, commander@^2.19.0, commander@^2.8.1, commander@^2.9.0: version "2.19.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== @@ -5312,7 +5311,7 @@ cosmiconfig@^4.0.0: parse-json "^4.0.0" require-from-string "^2.0.1" -cosmiconfig@^5.0.5, cosmiconfig@^5.0.7: +cosmiconfig@^5.0.2, cosmiconfig@^5.0.5, cosmiconfig@^5.0.7: version "5.0.7" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.0.7.tgz#39826b292ee0d78eda137dfa3173bd1c21a43b04" integrity sha512-PcLqxTKiDmNT6pSpy4N6KtuPwb53W+2tzNvwOZw0WH9N6O0vLIBq0x8aj8Oj75ere4YcGi48bDFCL+3fRJdlNA== @@ -6085,7 +6084,7 @@ debug@^3.1.0, debug@^3.2.5: dependencies: ms "^2.1.1" -debug@^4.1.0: +debug@^4.0.1, debug@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== @@ -6915,7 +6914,7 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= -escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.4, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -7129,19 +7128,6 @@ execa@^0.7.0: signal-exit "^3.0.0" strip-eof "^1.0.0" -execa@^0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.8.0.tgz#d8d76bbc1b55217ed190fd6dd49d3c774ecfc8da" - integrity sha1-2NdrvBtVIX7RkP1t1J08d07PyNo= - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - execa@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" @@ -7631,6 +7617,11 @@ flush-write-stream@^1.0.0: inherits "^2.0.1" readable-stream "^2.0.4" +fn-name@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fn-name/-/fn-name-2.0.1.tgz#5214d7537a4d06a4a301c0cc262feb84188002e7" + integrity sha1-UhTXU3pNBqSjAcDMJi/rhBiAAuc= + follow-redirects@^1.0.0, follow-redirects@^1.2.5: version "1.6.1" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.6.1.tgz#514973c44b5757368bad8bddfe52f81f015c94cb" @@ -7848,6 +7839,15 @@ fuse.js@^3.0.1, fuse.js@^3.3.0: resolved "https://registry.yarnpkg.com/fuse.js/-/fuse.js-3.3.0.tgz#1e4fe172a60687230fb54a5cb247eb96e2e7e885" integrity sha512-ESBRkGLWMuVkapqYCcNO1uqMg5qbCKkgb+VS6wsy17Rix0/cMS9kSOZoYkjH8Ko//pgJ/EEGu0GTjk2mjX2LGQ== +g-status@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/g-status/-/g-status-2.0.2.tgz#270fd32119e8fc9496f066fe5fe88e0a6bc78b97" + integrity sha512-kQoE9qH+T1AHKgSSD0Hkv98bobE90ILQcXAF4wvGgsr7uFqNvwmh8j+Lq3l0RVt3E3HjSbv2B9biEGcEtpHLCA== + dependencies: + arrify "^1.0.1" + matcher "^1.0.0" + simple-git "^1.85.0" + gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" @@ -9497,13 +9497,6 @@ is-object@^1.0.1: resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.1.tgz#8952688c5ec2ffd6b03ecc85e769e02903083470" integrity sha1-iVJojF7C/9awPsyF52ngKQMINHA= -is-observable@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-0.2.0.tgz#b361311d83c6e5d726cabf5e250b0237106f5ae2" - integrity sha1-s2ExHYPG5dcmyr9eJQsCNxBvWuI= - dependencies: - symbol-observable "^0.2.2" - is-observable@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" @@ -9917,11 +9910,6 @@ jest-environment-node@^23.4.0: jest-mock "^23.2.0" jest-util "^23.4.0" -jest-get-type@^21.2.0: - version "21.2.0" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-21.2.0.tgz#f6376ab9db4b60d81e39f30749c6c466f40d4a23" - integrity sha512-y2fFw3C+D0yjNSDp7ab1kcd6NUYfy3waPTlD8yWkAtiocJdBRQqNoRqVfMNxgj+IjT0V5cBIHJO0z9vuSSZ43Q== - jest-get-type@^22.1.0: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-22.4.3.tgz#e3a8504d8479342dd4420236b322869f18900ce4" @@ -10094,16 +10082,6 @@ jest-util@^23.4.0: slash "^1.0.0" source-map "^0.6.0" -jest-validate@^21.1.0: - version "21.2.1" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-21.2.1.tgz#cc0cbca653cd54937ba4f2a111796774530dd3c7" - integrity sha512-k4HLI1rZQjlU+EC682RlQ6oZvLrE5SCh3brseQc24vbZTxzT/k/3urar5QMCVgjadmSO7lECeGdc6YxnM3yEGg== - dependencies: - chalk "^2.0.1" - jest-get-type "^21.2.0" - leven "^2.1.0" - pretty-format "^21.2.1" - jest-validate@^23.6.0: version "23.6.0" resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-23.6.0.tgz#36761f99d1ed33fcd425b4e4c5595d62b6597474" @@ -10566,51 +10544,42 @@ libnpx@^10.2.0: y18n "^4.0.0" yargs "^11.0.0" -lint-staged@^6.0.0: - version "6.1.1" - resolved "https://registry.yarnpkg.com/lint-staged/-/lint-staged-6.1.1.tgz#cd08c4d9b8ccc2d37198d1c47ce77d22be6cf324" - integrity sha512-M/7bwLdXbeG7ZNLcasGeLMBDg60/w6obj3KOtINwJyxAxb53XGY0yH5FSZlWklEzuVbTtqtIfAajh6jYIN90AA== +lint-staged@^8.1.3: + version "8.1.3" + resolved "https://registry.yarnpkg.com/lint-staged/-/lint-staged-8.1.3.tgz#bb069db5466c0fe16710216e633a84f2b362fa60" + integrity sha512-6TGkikL1B+6mIOuSNq2TV6oP21IhPMnV8q0cf9oYZ296ArTVNcbFh1l1pfVOHHbBIYLlziWNsQ2q45/ffmJ4AA== dependencies: - app-root-path "^2.0.0" - chalk "^2.1.0" - commander "^2.11.0" - cosmiconfig "^4.0.0" + "@iamstarkov/listr-update-renderer" "0.4.1" + chalk "^2.3.1" + commander "^2.14.1" + cosmiconfig "^5.0.2" debug "^3.1.0" dedent "^0.7.0" - execa "^0.8.0" + del "^3.0.0" + execa "^1.0.0" find-parent-dir "^0.3.0" + g-status "^2.0.2" is-glob "^4.0.0" - jest-validate "^21.1.0" - listr "^0.13.0" - lodash "^4.17.4" - log-symbols "^2.0.0" - minimatch "^3.0.0" + is-windows "^1.0.2" + listr "^0.14.2" + lodash "^4.17.5" + log-symbols "^2.2.0" + micromatch "^3.1.8" npm-which "^3.0.1" p-map "^1.1.1" path-is-inside "^1.0.2" pify "^3.0.0" - staged-git-files "1.0.0" - stringify-object "^3.2.0" + please-upgrade-node "^3.0.2" + staged-git-files "1.1.2" + string-argv "^0.0.2" + stringify-object "^3.2.2" + yup "^0.26.10" listr-silent-renderer@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" integrity sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4= -listr-update-renderer@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.4.0.tgz#344d980da2ca2e8b145ba305908f32ae3f4cc8a7" - integrity sha1-NE2YDaLKLosUW6MFkI8yrj9MyKc= - dependencies: - chalk "^1.1.3" - cli-truncate "^0.2.1" - elegant-spinner "^1.0.1" - figures "^1.7.0" - indent-string "^3.0.0" - log-symbols "^1.0.2" - log-update "^1.0.2" - strip-ansi "^3.0.1" - listr-update-renderer@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz#4ea8368548a7b8aecb7e06d8c95cb45ae2ede6a2" @@ -10625,16 +10594,6 @@ listr-update-renderer@^0.5.0: log-update "^2.3.0" strip-ansi "^3.0.1" -listr-verbose-renderer@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.4.1.tgz#8206f4cf6d52ddc5827e5fd14989e0e965933a35" - integrity sha1-ggb0z21S3cWCfl/RSYng6WWTOjU= - dependencies: - chalk "^1.1.3" - cli-cursor "^1.0.2" - date-fns "^1.27.2" - figures "^1.7.0" - listr-verbose-renderer@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz#f1132167535ea4c1261102b9f28dac7cba1e03db" @@ -10645,30 +10604,7 @@ listr-verbose-renderer@^0.5.0: date-fns "^1.27.2" figures "^2.0.0" -listr@^0.13.0: - version "0.13.0" - resolved "https://registry.yarnpkg.com/listr/-/listr-0.13.0.tgz#20bb0ba30bae660ee84cc0503df4be3d5623887d" - integrity sha1-ILsLowuuZg7oTMBQPfS+PVYjiH0= - dependencies: - chalk "^1.1.3" - cli-truncate "^0.2.1" - figures "^1.7.0" - indent-string "^2.1.0" - is-observable "^0.2.0" - is-promise "^2.1.0" - is-stream "^1.1.0" - listr-silent-renderer "^1.1.1" - listr-update-renderer "^0.4.0" - listr-verbose-renderer "^0.4.0" - log-symbols "^1.0.2" - log-update "^1.0.2" - ora "^0.2.3" - p-map "^1.1.1" - rxjs "^5.4.2" - stream-to-observable "^0.2.0" - strip-ansi "^3.0.1" - -listr@^0.14.1: +listr@^0.14.1, listr@^0.14.2: version "0.14.3" resolved "https://registry.yarnpkg.com/listr/-/listr-0.14.3.tgz#2fea909604e434be464c50bddba0d496928fa586" integrity sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA== @@ -10949,14 +10885,6 @@ log-symbols@^2.0.0, log-symbols@^2.1.0, log-symbols@^2.2.0: dependencies: chalk "^2.0.1" -log-update@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/log-update/-/log-update-1.0.2.tgz#19929f64c4093d2d2e7075a1dad8af59c296b8d1" - integrity sha1-GZKfZMQJPS0ucHWh2tivWcKWuNE= - dependencies: - ansi-escapes "^1.0.0" - cli-cursor "^1.0.2" - log-update@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" @@ -11154,6 +11082,13 @@ marksy@^6.1.0: he "^1.1.1" marked "^0.3.12" +matcher@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/matcher/-/matcher-1.1.1.tgz#51d8301e138f840982b338b116bb0c09af62c1c2" + integrity sha512-+BmqxWIubKTRKNWx/ahnCkk3mG8m7OturVlqq6HiojGJTd5hVYbgZm6WzcYPCoB+KBT4Vd6R7WSRG2OADNaCjg== + dependencies: + escape-string-regexp "^1.0.4" + material-colors@^1.2.1: version "1.2.6" resolved "https://registry.yarnpkg.com/material-colors/-/material-colors-1.2.6.tgz#6d1958871126992ceecc72f4bcc4d8f010865f46" @@ -12512,16 +12447,6 @@ optionator@^0.8.1: type-check "~0.3.2" wordwrap "~1.0.0" -ora@^0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/ora/-/ora-0.2.3.tgz#37527d220adcd53c39b73571d754156d5db657a4" - integrity sha1-N1J9Igrc1Tw5tzVx11QVbV22V6Q= - dependencies: - chalk "^1.1.1" - cli-cursor "^1.0.2" - cli-spinners "^0.1.2" - object-assign "^4.0.1" - ordered-ast-traverse@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ordered-ast-traverse/-/ordered-ast-traverse-1.1.1.tgz#6843a170bc0eee8b520cc8ddc1ddd3aa30fa057c" @@ -13023,6 +12948,13 @@ pkg-up@^1.0.0: dependencies: find-up "^1.0.0" +please-upgrade-node@^3.0.2: + version "3.1.1" + resolved "https://registry.yarnpkg.com/please-upgrade-node/-/please-upgrade-node-3.1.1.tgz#ed320051dfcc5024fae696712c8288993595e8ac" + integrity sha512-KY1uHnQ2NlQHqIJQpnh/i54rKkuxCEBx+voJIS/Mvb+L2iYd2NMotwduhKTMjfC1uKoX3VXOxLjIYG66dfJTVQ== + dependencies: + semver-compare "^1.0.0" + pluralize@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45" @@ -13560,14 +13492,6 @@ pretty-error@^2.0.2, pretty-error@^2.1.1: renderkid "^2.0.1" utila "~0.4" -pretty-format@^21.2.1: - version "21.2.1" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-21.2.1.tgz#ae5407f3cf21066cd011aa1ba5fce7b6a2eddb36" - integrity sha512-ZdWPGYAnYfcVP8yKA3zFjCn8s4/17TeYH28MXuC8vTp0o21eXjbFGcOAXZEaDaOFJjc3h2qa7HQNHNshhvoh2A== - dependencies: - ansi-regex "^3.0.0" - ansi-styles "^3.2.0" - pretty-format@^23.6.0: version "23.6.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-23.6.0.tgz#5eaac8eeb6b33b987b7fe6097ea6a8a146ab5760" @@ -13670,6 +13594,11 @@ prop-types@15.x, prop-types@^15.5.10, prop-types@^15.5.4, prop-types@^15.5.8, pr loose-envify "^1.3.1" object-assign "^4.1.1" +property-expr@^1.5.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-1.5.1.tgz#22e8706894a0c8e28d58735804f6ba3a3673314f" + integrity sha512-CGuc0VUTGthpJXL36ydB6jnbyOf/rAHFvmVrJlH+Rg0DqqLFQGAP6hIaxD/G0OAmBJPhXDHuEJigrp0e0wFV6g== + property-information@^5.0.0, property-information@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/property-information/-/property-information-5.0.1.tgz#c3b09f4f5750b1634c0b24205adbf78f18bdf94f" @@ -15078,7 +15007,7 @@ rx-lite@^3.1.2: resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-3.1.2.tgz#19ce502ca572665f3b647b10939f97fd1615f102" integrity sha1-Gc5QLKVyZl87ZHsQk5+X/RYV8QI= -rxjs@^5.4.2, rxjs@^5.5.2: +rxjs@^5.5.2: version "5.5.12" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.5.12.tgz#6fa61b8a77c3d793dbaf270bee2f43f652d741cc" integrity sha512-xx2itnL5sBbqeeiVgNPVuQQ1nC8Jp2WfNJhXWHmElW9YmrpS9UVnNzhP3EH3HFqexO5Tlp8GhYY+WEcqcVMvGw== @@ -15247,6 +15176,11 @@ selfsigned@^1.9.1: dependencies: node-forge "0.7.5" +semver-compare@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc" + integrity sha1-De4hahyUGrN+nvsXiPavxf9VN/w= + semver-diff@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" @@ -15482,6 +15416,13 @@ simple-get@^2.7.0: once "^1.3.1" simple-concat "^1.0.0" +simple-git@^1.85.0: + version "1.107.0" + resolved "https://registry.yarnpkg.com/simple-git/-/simple-git-1.107.0.tgz#12cffaf261c14d6f450f7fdb86c21ccee968b383" + integrity sha512-t4OK1JRlp4ayKRfcW6owrWcRVLyHRUlhGd0uN6ZZTqfDq8a5XpcUdOKiGRNobHEuMtNqzp0vcJNvhYWwh5PsQA== + dependencies: + debug "^4.0.1" + simple-is@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/simple-is/-/simple-is-0.2.0.tgz#2abb75aade39deb5cc815ce10e6191164850baf0" @@ -15929,10 +15870,10 @@ stack-utils@^1.0.1: resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.2.tgz#33eba3897788558bebfc2db059dc158ec36cebb8" integrity sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA== -staged-git-files@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/staged-git-files/-/staged-git-files-1.0.0.tgz#cdb847837c1fcc52c08a872d4883cc0877668a80" - integrity sha1-zbhHg3wfzFLAioctSIPMCHdmioA= +staged-git-files@1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/staged-git-files/-/staged-git-files-1.1.2.tgz#4326d33886dc9ecfa29a6193bf511ba90a46454b" + integrity sha512-0Eyrk6uXW6tg9PYkhi/V/J4zHp33aNyi2hOCmhFLqLTIhbgqWn5jlSzI+IU0VqrZq6+DbHcabQl/WP6P3BG0QA== static-extend@^0.1.1: version "0.1.2" @@ -16009,13 +15950,6 @@ stream-shift@^1.0.0: resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" integrity sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI= -stream-to-observable@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/stream-to-observable/-/stream-to-observable-0.2.0.tgz#59d6ea393d87c2c0ddac10aa0d561bc6ba6f0e10" - integrity sha1-WdbqOT2HwsDdrBCqDVYbxrpvDhA= - dependencies: - any-observable "^0.2.0" - strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" @@ -16026,6 +15960,11 @@ strict-uri-encode@^2.0.0: resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" integrity sha1-ucczDHBChi9rFC3CdLvMWGbONUY= +string-argv@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.0.2.tgz#dac30408690c21f3c3630a3ff3a05877bdcbd736" + integrity sha1-2sMECGkMIfPDYwo/86BYd73L1zY= + string-length@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/string-length/-/string-length-2.0.0.tgz#d40dbb686a3ace960c1cffca562bf2c45f8363ed" @@ -16131,7 +16070,7 @@ stringifier@^1.3.0: traverse "^0.6.6" type-name "^2.0.1" -stringify-object@^3.2.0: +stringify-object@^3.2.2: version "3.3.0" resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== @@ -16336,11 +16275,6 @@ symbol-observable@1.0.1: resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4" integrity sha1-g0D8RwLDEi310iKI+IKD9RPT/dQ= -symbol-observable@^0.2.2: - version "0.2.4" - resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-0.2.4.tgz#95a83db26186d6af7e7a18dbd9760a2f86d08f40" - integrity sha1-lag9smGG1q9+ehjb2XYKL4bQj0A= - symbol-observable@^1.1.0, symbol-observable@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" @@ -16358,6 +16292,11 @@ symbol.prototype.description@^1.0.0: dependencies: has-symbols "^1.0.0" +synchronous-promise@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/synchronous-promise/-/synchronous-promise-2.0.6.tgz#de76e0ea2b3558c1e673942e47e714a930fa64aa" + integrity sha512-TyOuWLwkmtPL49LHCX1caIwHjRzcVd62+GF6h8W/jHOeZUFHpnd2XJDVuUlaTaLPH1nuu2M69mfHr5XbQJnf/g== + systemjs-plugin-css@^0.1.36: version "0.1.37" resolved "https://registry.yarnpkg.com/systemjs-plugin-css/-/systemjs-plugin-css-0.1.37.tgz#684847252ca69b7da24a1201094c86274324e82f" @@ -16649,6 +16588,11 @@ toposort@^1.0.0: resolved "https://registry.yarnpkg.com/toposort/-/toposort-1.0.7.tgz#2e68442d9f64ec720b8cc89e6443ac6caa950029" integrity sha1-LmhELZ9k7HILjMieZEOsbKqVACk= +toposort@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330" + integrity sha1-riF2gXXRVZ1IvvNUILL0li8JwzA= + touch@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/touch/-/touch-2.0.2.tgz#ca0b2a3ae3211246a61b16ba9e6cbf1596287164" @@ -18075,6 +18019,18 @@ yeoman-generator@^2.0.5: through2 "^2.0.0" yeoman-environment "^2.0.5" +yup@^0.26.10: + version "0.26.10" + resolved "https://registry.yarnpkg.com/yup/-/yup-0.26.10.tgz#3545839663289038faf25facfc07e11fd67c0cb1" + integrity sha512-keuNEbNSnsOTOuGCt3UJW69jDE3O4P+UHAakO7vSeFMnjaitcmlbij/a3oNb9g1Y1KvSKH/7O1R2PQ4m4TRylw== + dependencies: + "@babel/runtime" "7.0.0" + fn-name "~2.0.1" + lodash "^4.17.10" + property-expr "^1.5.0" + synchronous-promise "^2.0.5" + toposort "^2.0.2" + zip-stream@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-1.2.0.tgz#a8bc45f4c1b49699c6b90198baacaacdbcd4ba04" From b93cdf56fb7d5828900ad60f5f2fc42e420adf00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 11 Feb 2019 16:26:02 +0100 Subject: [PATCH 38/38] Removed double page container --- public/app/features/teams/TeamList.tsx | 4 +- public/app/features/teams/TeamPages.tsx | 2 +- .../__snapshots__/TeamList.test.tsx.snap | 582 +++++++++--------- .../__snapshots__/TeamPages.test.tsx.snap | 22 +- 4 files changed, 297 insertions(+), 313 deletions(-) diff --git a/public/app/features/teams/TeamList.tsx b/public/app/features/teams/TeamList.tsx index efd279184d4..2e399b34860 100644 --- a/public/app/features/teams/TeamList.tsx +++ b/public/app/features/teams/TeamList.tsx @@ -86,7 +86,7 @@ export class TeamList extends PureComponent { const { teams, searchQuery } = this.props; return ( -
+ <>
-
+ ); } diff --git a/public/app/features/teams/TeamPages.tsx b/public/app/features/teams/TeamPages.tsx index ebbde595601..7a38197ff71 100644 --- a/public/app/features/teams/TeamPages.tsx +++ b/public/app/features/teams/TeamPages.tsx @@ -84,7 +84,7 @@ export class TeamPages extends PureComponent { return ( - {team && Object.keys(team).length !== 0 &&
{this.renderPage()}
} + {team && Object.keys(team).length !== 0 && this.renderPage()}
); diff --git a/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap index 812fe05c424..5d969cd9d83 100644 --- a/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap +++ b/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap @@ -36,320 +36,316 @@ exports[`Render should render teams table 1`] = ` isLoading={false} >
-
- -
-
+ - - - - - - - + + + + + + + + + + + + + + + + + + + +
- - Name - - Email - - Members - + +
+ + + + + + + + + + + + + + + - - - + + + - - - - - - + + - - - - - - + + - - - - - - + + - - - - - - + + + + + - - - - - - -
+ + Name + + Email + + Members + +
+ + + + + + test-1 + + + + test-1@test.com + + + + 1 + + + -
- - - - - - - test-1 - - - - test-1@test.com - - - - 1 - - - -
- - - - - - - test-2 - - - - test-2@test.com - - - - 2 - - - -
- - - - - - - test-3 - - - - test-3@test.com - - - - 3 - - - -
- - - - - - - test-4 - - - - test-4@test.com - - - - 4 - - - -
- +
+ - - - - - - test-5 - - - - test-5@test.com - - - - 5 - - - -
-
+ + +
+ + test-3 + + + + test-3@test.com + + + + 3 + + + +
+ + + + + + test-4 + + + + test-4@test.com + + + + 4 + + + +
+ + + + + + test-5 + + + + test-5@test.com + + + + 5 + + + +
diff --git a/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap index 0c09eb3f82d..70f37cea4c5 100644 --- a/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap +++ b/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap @@ -17,11 +17,7 @@ exports[`Render should render group sync page 1`] = ` -
- -
+
`; @@ -33,13 +29,9 @@ exports[`Render should render member page if team not empty 1`] = ` -
- -
+
`; @@ -51,11 +43,7 @@ exports[`Render should render settings and preferences page 1`] = ` -
- -
+
`;