Merge pull request #11348 from grafana/11306-sql-table-time

Postgres, MySQL, MSSQL minor refactoring/unification
This commit is contained in:
Daniel Lee
2018-03-23 14:23:31 +01:00
committed by GitHub
24 changed files with 6891 additions and 638 deletions

View File

@@ -53,7 +53,7 @@
"iconColor": "#6ed0e0",
"limit": 100,
"name": "Deploys",
"rawQuery": "SELECT\n time_sec as time,\n description as [text],\n tags\n FROM [event]\n WHERE $__unixEpochFilter(time_sec) AND tags='deploy'\n ORDER BY 1 ASC\n ",
"rawQuery": "SELECT\n $__time(time_sec),\n description as [text],\n tags\n FROM [event]\n WHERE $__unixEpochFilter(time_sec) AND tags='deploy'\n ORDER BY 1 ASC\n ",
"showIn": 0,
"tags": [],
"type": "tags"
@@ -65,7 +65,7 @@
"iconColor": "rgba(255, 96, 96, 1)",
"limit": 100,
"name": "Tickets",
"rawQuery": "SELECT\n time_sec as time,\n description as [text],\n tags\n FROM [event]\n WHERE $__unixEpochFilter(time_sec) AND tags='ticket'\n ORDER BY 1 ASC\n ",
"rawQuery": "SELECT\n $__time(time_sec),\n description as [text],\n tags\n FROM [event]\n WHERE $__unixEpochFilter(time_sec) AND tags='ticket'\n ORDER BY 1 ASC\n ",
"showIn": 0,
"tags": [],
"type": "tags"
@@ -76,8 +76,20 @@
"hide": false,
"iconColor": "#7eb26d",
"limit": 100,
"name": "Metric Values",
"rawQuery": "SELECT \n time, \n measurement as text, \n '' as tags\nFROM\n metric_values \nORDER BY 1",
"name": "Metric Values timeEpoch macro",
"rawQuery": "SELECT \n $__timeEpoch(time), \n measurement as text, \n '' as tags\nFROM\n metric_values \nWHERE\n $__timeFilter(time)\nORDER BY 1",
"showIn": 0,
"tags": [],
"type": "tags"
},
{
"datasource": "${DS_MSSQL_TEST}",
"enable": false,
"hide": false,
"iconColor": "#1f78c1",
"limit": 100,
"name": "Metric Values native time",
"rawQuery": "SELECT \n time, \n measurement as text, \n '' as tags\nFROM\n metric_values \nWHERE\n $__timeFilter(time)\nORDER BY 1",
"showIn": 0,
"tags": [],
"type": "tags"
@@ -88,7 +100,7 @@
"gnetId": null,
"graphTooltip": 0,
"id": null,
"iteration": 1521481503341,
"iteration": 1521715844826,
"links": [],
"panels": [
{
@@ -138,6 +150,222 @@
"transform": "table",
"type": "table"
},
{
"columns": [],
"datasource": "${DS_MSSQL_TEST}",
"fontSize": "100%",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 4
},
"id": 32,
"links": [],
"pageSize": null,
"scroll": true,
"showHeader": true,
"sort": {
"col": 0,
"desc": true
},
"styles": [
{
"alias": "Time",
"dateFormat": "YYYY-MM-DD HH:mm:ss",
"pattern": "time",
"type": "date"
},
{
"alias": "",
"colorMode": null,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"decimals": 2,
"pattern": "/.*/",
"thresholds": [],
"type": "number",
"unit": "short"
}
],
"targets": [
{
"alias": "",
"format": "table",
"rawSql": "SELECT cast(null as bigint) as time",
"refId": "A",
"target": ""
}
],
"title": "cast(null as bigint) as time",
"transform": "table",
"type": "table"
},
{
"columns": [],
"datasource": "${DS_MSSQL_TEST}",
"fontSize": "100%",
"gridPos": {
"h": 3,
"w": 6,
"x": 6,
"y": 4
},
"id": 33,
"links": [],
"pageSize": null,
"scroll": true,
"showHeader": true,
"sort": {
"col": 0,
"desc": true
},
"styles": [
{
"alias": "Time",
"dateFormat": "YYYY-MM-DD HH:mm:ss",
"pattern": "time",
"type": "date"
},
{
"alias": "",
"colorMode": null,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"decimals": 2,
"pattern": "/.*/",
"thresholds": [],
"type": "number",
"unit": "short"
}
],
"targets": [
{
"alias": "",
"format": "table",
"rawSql": "SELECT cast(null as datetime) as time",
"refId": "A",
"target": ""
}
],
"title": "cast(null as datetime) as time",
"transform": "table",
"type": "table"
},
{
"columns": [],
"datasource": "${DS_MSSQL_TEST}",
"fontSize": "100%",
"gridPos": {
"h": 3,
"w": 6,
"x": 12,
"y": 4
},
"id": 34,
"links": [],
"pageSize": null,
"scroll": true,
"showHeader": true,
"sort": {
"col": 0,
"desc": true
},
"styles": [
{
"alias": "Time",
"dateFormat": "YYYY-MM-DD HH:mm:ss",
"pattern": "time",
"type": "date"
},
{
"alias": "",
"colorMode": null,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"decimals": 2,
"pattern": "/.*/",
"thresholds": [],
"type": "number",
"unit": "short"
}
],
"targets": [
{
"alias": "",
"format": "table",
"rawSql": "SELECT GETDATE() as time",
"refId": "A",
"target": ""
}
],
"title": "GETDATE() as time",
"transform": "table",
"type": "table"
},
{
"columns": [],
"datasource": "${DS_MSSQL_TEST}",
"fontSize": "100%",
"gridPos": {
"h": 3,
"w": 6,
"x": 18,
"y": 4
},
"id": 35,
"links": [],
"pageSize": null,
"scroll": true,
"showHeader": true,
"sort": {
"col": 0,
"desc": true
},
"styles": [
{
"alias": "Time",
"dateFormat": "YYYY-MM-DD HH:mm:ss",
"pattern": "time",
"type": "date"
},
{
"alias": "",
"colorMode": null,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"decimals": 2,
"pattern": "/.*/",
"thresholds": [],
"type": "number",
"unit": "short"
}
],
"targets": [
{
"alias": "",
"format": "table",
"rawSql": "SELECT GETUTCDATE() as time",
"refId": "A",
"target": ""
}
],
"title": "GETUTCDATE() as time",
"transform": "table",
"type": "table"
},
{
"aliasColors": {},
"bars": false,
@@ -149,7 +377,7 @@
"h": 9,
"w": 8,
"x": 0,
"y": 4
"y": 7
},
"id": 7,
"legend": {
@@ -228,7 +456,7 @@
"h": 9,
"w": 8,
"x": 8,
"y": 4
"y": 7
},
"id": 9,
"legend": {
@@ -307,7 +535,7 @@
"h": 9,
"w": 8,
"x": 16,
"y": 4
"y": 7
},
"id": 10,
"legend": {
@@ -386,7 +614,7 @@
"h": 9,
"w": 8,
"x": 0,
"y": 13
"y": 16
},
"id": 16,
"legend": {
@@ -465,7 +693,7 @@
"h": 9,
"w": 8,
"x": 8,
"y": 13
"y": 16
},
"id": 12,
"legend": {
@@ -544,7 +772,7 @@
"h": 9,
"w": 8,
"x": 16,
"y": 13
"y": 16
},
"id": 13,
"legend": {
@@ -623,7 +851,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 22
"y": 25
},
"id": 27,
"legend": {
@@ -655,13 +883,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__timeGroup(time, '$summarize') as time, \n measurement + ' - value one' as metric, \n avg(valueOne) as valueOne\nFROM\n metric_values \nWHERE\n $__timeFilter(time)\nGROUP BY \n $__timeGroup(time, '$summarize'), \n measurement \nORDER BY 1",
"rawSql": "SELECT \n $__timeGroup(time, '$summarize') as time, \n measurement + ' - value one' as metric, \n avg(valueOne) as valueOne\nFROM\n metric_values \nWHERE\n $__timeFilter(time) AND\n ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n $__timeGroup(time, '$summarize'), \n measurement \nORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__timeGroup(time, '$summarize') as time, \n measurement + ' - value two' as metric, \n avg(valueTwo) as valueTwo \nFROM\n metric_values \nGROUP BY \n $__timeGroup(time, '$summarize'), \n measurement \nORDER BY 1",
"rawSql": "SELECT \n $__timeGroup(time, '$summarize') as time, \n measurement + ' - value two' as metric, \n avg(valueTwo) as valueTwo \nFROM\n metric_values\nWHERE\n $__timeFilter(time) AND\n ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n $__timeGroup(time, '$summarize'), \n measurement \nORDER BY 1",
"refId": "B"
}
],
@@ -712,7 +940,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 22
"y": 25
},
"id": 5,
"legend": {
@@ -734,7 +962,19 @@
"pointradius": 3,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"seriesOverrides": [
{
"alias": "MovingAverageValueOne",
"dashes": true,
"lines": false
},
{
"alias": "MovingAverageValueTwo",
"dashes": true,
"lines": false,
"yaxis": 1
}
],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
@@ -742,8 +982,14 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__timeGroup(time, '$summarize') as time, \n avg(valueOne) as valueOne, \n avg(valueTwo) as valueTwo \nFROM\n metric_values \nGROUP BY \n $__timeGroup(time, '$summarize')\nORDER BY 1",
"rawSql": "SELECT \n $__timeGroup(time, '$summarize') as time, \n avg(valueOne) as valueOne, \n avg(valueTwo) as valueTwo \nFROM\n metric_values \nWHERE \n $__timeFilter(time) AND \n ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n $__timeGroup(time, '$summarize')\nORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n time,\n avg(valueOne) OVER (ORDER BY time ROWS BETWEEN 6 PRECEDING AND 6 FOLLOWING) as MovingAverageValueOne,\n avg(valueTwo) OVER (ORDER BY time ROWS BETWEEN 6 PRECEDING AND 6 FOLLOWING) as MovingAverageValueTwo\nFROM\n metric_values \nWHERE \n $__timeFilter(time) AND \n ($metric = 'ALL' OR measurement = $metric)\nORDER BY 1",
"refId": "B"
}
],
"thresholds": [],
@@ -793,7 +1039,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 30
"y": 33
},
"id": 4,
"legend": {
@@ -825,13 +1071,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "B"
}
],
@@ -882,7 +1128,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 30
"y": 33
},
"id": 28,
"legend": {
@@ -963,7 +1209,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 38
"y": 41
},
"id": 19,
"legend": {
@@ -995,13 +1241,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "B"
}
],
@@ -1052,7 +1298,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 38
"y": 41
},
"id": 18,
"legend": {
@@ -1082,7 +1328,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
}
],
@@ -1133,7 +1379,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 46
"y": 49
},
"id": 17,
"legend": {
@@ -1165,13 +1411,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "B"
}
],
@@ -1222,7 +1468,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 46
"y": 49
},
"id": 20,
"legend": {
@@ -1252,7 +1498,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
}
],
@@ -1303,7 +1549,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 54
"y": 57
},
"id": 29,
"legend": {
@@ -1335,7 +1581,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "DECLARE \n @from int = $__unixEpochFrom(),\n @to int = $__unixEpochTo()\n \nEXEC dbo.sp_test_epoch @from, @to",
"rawSql": "DECLARE\n @from int = $__unixEpochFrom(), \n @to int = $__unixEpochTo(), \n @interval nvarchar(50) = '$summarize', \n @metric nvarchar(200) = $metric\n \nEXEC dbo.sp_test_epoch @from, @to, @interval, @metric",
"refId": "A"
}
],
@@ -1386,7 +1632,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 54
"y": 57
},
"id": 30,
"legend": {
@@ -1418,7 +1664,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "DECLARE \n @from datetime = $__timeFrom(),\n @to datetime = $__timeTo()\n \nEXEC dbo.sp_test_datetime @from, @to",
"rawSql": "DECLARE\n @from datetime = $__timeFrom(), \n @to datetime = $__timeTo(), \n @interval nvarchar(50) = '$summarize', \n @metric nvarchar(200) = $metric\n \nEXEC dbo.sp_test_datetime @from, @to, @interval, @metric",
"refId": "A"
}
],
@@ -1469,7 +1715,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 62
"y": 65
},
"id": 14,
"legend": {
@@ -1499,13 +1745,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "B"
}
],
@@ -1559,7 +1805,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 62
"y": 65
},
"id": 15,
"legend": {
@@ -1589,7 +1835,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
}
],
@@ -1642,7 +1888,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 70
"y": 73
},
"id": 25,
"legend": {
@@ -1672,13 +1918,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "B"
}
],
@@ -1732,7 +1978,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 70
"y": 73
},
"id": 22,
"legend": {
@@ -1762,7 +2008,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
}
],
@@ -1815,7 +2061,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 78
"y": 81
},
"id": 21,
"legend": {
@@ -1845,13 +2091,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "B"
}
],
@@ -1905,7 +2151,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 78
"y": 81
},
"id": 26,
"legend": {
@@ -1935,7 +2181,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
}
],
@@ -1988,7 +2234,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 86
"y": 89
},
"id": 23,
"legend": {
@@ -2018,13 +2264,13 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "B"
}
],
@@ -2078,7 +2324,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 86
"y": 89
},
"id": 24,
"legend": {
@@ -2108,7 +2354,7 @@
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values ORDER BY 1",
"rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
"refId": "A"
}
],
@@ -2157,6 +2403,26 @@
"tags": [],
"templating": {
"list": [
{
"allValue": "'ALL'",
"current": {},
"datasource": "${DS_MSSQL_TEST}",
"hide": 0,
"includeAll": true,
"label": "Metric",
"multi": false,
"name": "metric",
"options": [],
"query": "SELECT DISTINCT measurement FROM metric_values",
"refresh": 1,
"regex": "",
"sort": 0,
"tagValuesQuery": "",
"tags": [],
"tagsQuery": "",
"type": "query",
"useTags": false
},
{
"auto": false,
"auto_count": 30,
@@ -2208,7 +2474,7 @@
},
"time": {
"from": "2018-03-15T12:30:00.000Z",
"to": "2018-03-15T13:55:00.000Z"
"to": "2018-03-15T13:55:01.000Z"
},
"timepicker": {
"refresh_intervals": [
@@ -2238,5 +2504,5 @@
"timezone": "",
"title": "Microsoft SQL Server Data Source Test",
"uid": "GlAqcPgmz",
"version": 37
"version": 57
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -73,25 +73,20 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s AS time", args[0]), nil
case "__utcTime":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), %s) AS time", args[0]), nil
case "__timeEpoch":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), %s) ) AS time", args[0]), nil
return fmt.Sprintf("DATEDIFF(second, '1970-01-01', %s) AS time", args[0]), nil
case "__timeFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= DATEADD(s, %d+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01') AND %s <= DATEADD(s, %d+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
return fmt.Sprintf("%s >= DATEADD(s, %d, '1970-01-01') AND %s <= DATEADD(s, %d, '1970-01-01')", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeFrom":
return fmt.Sprintf("DATEADD(second, %d+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__timeTo":
return fmt.Sprintf("DATEADD(second, %d+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -113,7 +108,7 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
m.Query.Model.Set("fillValue", floatVal)
}
}
return fmt.Sprintf("cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), %s))/%.0f as int)*%.0f as int)", args[0], interval.Seconds(), interval.Seconds()), nil
return fmt.Sprintf("CAST(ROUND(DATEDIFF(second, '1970-01-01', %s)/%.1f, 0) as bigint)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)

View File

@@ -25,46 +25,39 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, "select time_column AS time")
})
Convey("interpolate __utcTime function", func() {
sql, err := engine.Interpolate(query, nil, "select $__utcTime(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column) AS time")
})
Convey("interpolate __timeEpoch function", func() {
sql, err := engine.Interpolate(query, nil, "select $__timeEpoch(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column) ) AS time")
So(sql, ShouldEqual, "select DATEDIFF(second, '1970-01-01', time_column) AS time")
})
Convey("interpolate __timeEpoch function wrapped in aggregation", func() {
sql, err := engine.Interpolate(query, nil, "select min($__timeEpoch(time_column))")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select min(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column) ) AS time)")
So(sql, ShouldEqual, "select min(DATEDIFF(second, '1970-01-01', time_column) AS time)")
})
Convey("interpolate __timeFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "WHERE time_column >= DATEADD(s, 18446744066914186738+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01') AND time_column <= DATEADD(s, 18446744066914187038+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')")
So(sql, ShouldEqual, "WHERE time_column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND time_column <= DATEADD(s, 18446744066914187038, '1970-01-01')")
})
Convey("interpolate __timeGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "GROUP BY cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column))/300 as int)*300 as int)")
So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300")
})
Convey("interpolate __timeGroup function with spaces around arguments", func() {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "GROUP BY cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column))/300 as int)*300 as int)")
So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300")
})
Convey("interpolate __timeGroup function with fill (value = NULL)", func() {
@@ -97,21 +90,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select DATEADD(second, 18446744066914186738+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')")
So(sql, ShouldEqual, "select DATEADD(second, 18446744066914186738, '1970-01-01')")
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select DATEADD(second, 18446744066914187038+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')")
So(sql, ShouldEqual, "select DATEADD(second, 18446744066914187038, '1970-01-01')")
})
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(18446744066914186738)")
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038")
So(sql, ShouldEqual, "select time_column >= 18446744066914186738 AND time_column <= 18446744066914187038")
})
Convey("interpolate __unixEpochFrom function", func() {

View File

@@ -119,15 +119,10 @@ func (e MssqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows,
return err
}
// convert column named time to unix timestamp to make
// native datetime mssql types work in annotation queries
if timeIndex != -1 {
switch value := values[timeIndex].(type) {
case time.Time:
values[timeIndex] = float64(value.Unix())
}
}
// converts column named time to unix timestamp in milliseconds
// to make native mssql datetime types and epoch dates work in
// annotation and table queries.
tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
table.Rows = append(table.Rows, values)
}

View File

@@ -19,6 +19,8 @@ import (
// and set up a MSSQL db named grafanatest and a user/password grafana/Password!
// Use the docker/blocks/mssql_tests/docker-compose.yaml to spin up a
// preconfigured MSSQL server suitable for running these tests.
// Thers's also a dashboard.json in same directory that you can import to Grafana
// once you've created a datasource for the test server/database.
// If needed, change the variable below to the IP address of the database.
var serverIP string = "localhost"
@@ -37,44 +39,44 @@ func TestMSSQL(t *testing.T) {
sess := x.NewSession()
defer sess.Close()
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC)
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
Convey("Given a table with different native data types", func() {
sql := `
IF OBJECT_ID('dbo.[mssql_types]', 'U') IS NOT NULL
DROP TABLE dbo.[mssql_types]
IF OBJECT_ID('dbo.[mssql_types]', 'U') IS NOT NULL
DROP TABLE dbo.[mssql_types]
CREATE TABLE [mssql_types] (
c_bit bit,
CREATE TABLE [mssql_types] (
c_bit bit,
c_tinyint tinyint,
c_smallint smallint,
c_int int,
c_bigint bigint,
c_tinyint tinyint,
c_smallint smallint,
c_int int,
c_bigint bigint,
c_money money,
c_smallmoney smallmoney,
c_numeric numeric(10,5),
c_real real,
c_decimal decimal(10,2),
c_float float,
c_money money,
c_smallmoney smallmoney,
c_numeric numeric(10,5),
c_real real,
c_decimal decimal(10,2),
c_float float,
c_char char(10),
c_varchar varchar(10),
c_text text,
c_char char(10),
c_varchar varchar(10),
c_text text,
c_nchar nchar(12),
c_nvarchar nvarchar(12),
c_ntext ntext,
c_nchar nchar(12),
c_nvarchar nvarchar(12),
c_ntext ntext,
c_datetime datetime,
c_datetime2 datetime2,
c_smalldatetime smalldatetime,
c_date date,
c_time time,
c_datetimeoffset datetimeoffset
)
`
c_datetime datetime,
c_datetime2 datetime2,
c_smalldatetime smalldatetime,
c_date date,
c_time time,
c_datetimeoffset datetimeoffset
)
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
@@ -87,14 +89,14 @@ func TestMSSQL(t *testing.T) {
d2 := dt2.Format(dt2Format)
sql = fmt.Sprintf(`
INSERT INTO [mssql_types]
SELECT
1, 5, 20020, 980300, 1420070400, '$20000.15', '£2.15', 12345.12,
1.11, 2.22, 3.33,
'char10', 'varchar10', 'text',
N'☺nchar12☺', N'☺nvarchar12☺', N'☺text☺',
CAST('%s' AS DATETIME), CAST('%s' AS DATETIME2), CAST('%s' AS SMALLDATETIME), CAST('%s' AS DATE), CAST('%s' AS TIME), SWITCHOFFSET(CAST('%s' AS DATETIMEOFFSET), '-07:00')
`, d, d2, d, d, d, d2)
INSERT INTO [mssql_types]
SELECT
1, 5, 20020, 980300, 1420070400, '$20000.15', '£2.15', 12345.12,
1.11, 2.22, 3.33,
'char10', 'varchar10', 'text',
N'☺nchar12☺', N'☺nvarchar12☺', N'☺text☺',
CAST('%s' AS DATETIME), CAST('%s' AS DATETIME2), CAST('%s' AS SMALLDATETIME), CAST('%s' AS DATE), CAST('%s' AS TIME), SWITCHOFFSET(CAST('%s' AS DATETIMEOFFSET), '-07:00')
`, d, d2, d, d, d, d2)
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
@@ -151,14 +153,14 @@ func TestMSSQL(t *testing.T) {
Convey("Given a table with metrics that lacks data for some series ", func() {
sql := `
IF OBJECT_ID('dbo.[metric]', 'U') IS NOT NULL
DROP TABLE dbo.[metric]
IF OBJECT_ID('dbo.[metric]', 'U') IS NOT NULL
DROP TABLE dbo.[metric]
CREATE TABLE [metric] (
time datetime,
value int
)
`
CREATE TABLE [metric] (
time datetime,
value int
)
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
@@ -186,14 +188,8 @@ func TestMSSQL(t *testing.T) {
})
}
dtFormat := "2006-01-02 15:04:05.999999999"
for _, s := range series {
sql = fmt.Sprintf(`
INSERT INTO metric (time, value)
VALUES(CAST('%s' AS DATETIME), %d)
`, s.Time.Format(dtFormat), s.Value)
_, err = sess.Exec(sql)
_, err = sess.Insert(s)
So(err, ShouldBeNil)
}
@@ -211,22 +207,32 @@ func TestMSSQL(t *testing.T) {
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 6)
So(len(points), ShouldEqual, 4)
actualValueFirst := points[0][0].Float64
actualTimeFirst := time.Unix(int64(points[0][1].Float64)/1000, 0)
So(actualValueFirst, ShouldEqual, 15)
So(actualTimeFirst, ShouldEqual, fromStart)
dt := fromStart
actualValueLast := points[3][0].Float64
actualTimeLast := time.Unix(int64(points[3][1].Float64)/1000, 0)
So(actualValueLast, ShouldEqual, 20)
So(actualTimeLast, ShouldEqual, fromStart.Add(25*time.Minute))
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 3; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with NULL fill enabled", func() {
@@ -247,33 +253,34 @@ func TestMSSQL(t *testing.T) {
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 7)
actualValueFirst := points[0][0].Float64
actualTimeFirst := time.Unix(int64(points[0][1].Float64)/1000, 0)
So(actualValueFirst, ShouldEqual, 15)
So(actualTimeFirst, ShouldEqual, fromStart)
actualNullPoint := points[3][0]
actualNullTime := time.Unix(int64(points[3][1].Float64)/1000, 0)
So(actualNullPoint.Valid, ShouldBeFalse)
So(actualNullTime, ShouldEqual, fromStart.Add(15*time.Minute))
dt := fromStart
actualValueLast := points[5][0].Float64
actualTimeLast := time.Unix(int64(points[5][1].Float64)/1000, 0)
So(actualValueLast, ShouldEqual, 20)
So(actualTimeLast, ShouldEqual, fromStart.Add(25*time.Minute))
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
actualLastNullPoint := points[6][0]
actualLastNullTime := time.Unix(int64(points[6][1].Float64)/1000, 0)
So(actualLastNullPoint.Valid, ShouldBeFalse)
So(actualLastNullTime, ShouldEqual, fromStart.Add(30*time.Minute))
So(points[3][0].Valid, ShouldBeFalse)
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 4; i < 7; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
@@ -294,53 +301,44 @@ func TestMSSQL(t *testing.T) {
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(points[6][0].Float64, ShouldEqual, 1.5)
So(points[3][0].Float64, ShouldEqual, 1.5)
})
})
Convey("Given a table with metrics having multiple values and measurements", func() {
sql := `
IF OBJECT_ID('dbo.[metric_values]', 'U') IS NOT NULL
DROP TABLE dbo.[metric_values]
CREATE TABLE [metric_values] (
time datetime,
measurement nvarchar(100),
valueOne int,
valueTwo int,
)
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
type metricValues struct {
type metric_values struct {
Time time.Time
Measurement string
ValueOne int64
ValueTwo int64
ValueOne int64 `xorm:"integer 'valueOne'"`
ValueTwo int64 `xorm:"integer 'valueTwo'"`
}
if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(metric_values{})
}
err := sess.CreateTable(metric_values{})
So(err, ShouldBeNil)
rand.Seed(time.Now().Unix())
rnd := func(min, max int64) int64 {
return rand.Int63n(max-min) + min
}
series := []*metricValues{}
series := []*metric_values{}
for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) {
series = append(series, &metricValues{
series = append(series, &metric_values{
Time: t,
Measurement: "Metric A",
ValueOne: rnd(0, 100),
ValueTwo: rnd(0, 100),
})
series = append(series, &metricValues{
series = append(series, &metric_values{
Time: t,
Measurement: "Metric B",
ValueOne: rnd(0, 100),
@@ -348,14 +346,8 @@ func TestMSSQL(t *testing.T) {
})
}
dtFormat := "2006-01-02 15:04:05"
for _, s := range series {
sql = fmt.Sprintf(`
INSERT metric_values (time, measurement, valueOne, valueTwo)
VALUES(CAST('%s' AS DATETIME), '%s', %d, %d)
`, s.Time.Format(dtFormat), s.Measurement, s.ValueOne, s.ValueTwo)
_, err = sess.Exec(sql)
_, err = sess.Insert(s)
So(err, ShouldBeNil)
}
@@ -373,8 +365,8 @@ func TestMSSQL(t *testing.T) {
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
@@ -396,8 +388,8 @@ func TestMSSQL(t *testing.T) {
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
@@ -407,45 +399,55 @@ func TestMSSQL(t *testing.T) {
Convey("Given a stored procedure that takes @from and @to in epoch time", func() {
sql := `
IF object_id('sp_test_epoch') IS NOT NULL
DROP PROCEDURE sp_test_epoch
`
IF object_id('sp_test_epoch') IS NOT NULL
DROP PROCEDURE sp_test_epoch
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
sql = `
CREATE PROCEDURE sp_test_epoch(
@from int,
@to int
) AS
BEGIN
SELECT
cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int) as time,
measurement + ' - value one' as metric,
avg(valueOne) as value
FROM
metric_values
WHERE
time >= DATEADD(s, @from, '1970-01-01') AND time <= DATEADD(s, @to, '1970-01-01')
GROUP BY
cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int),
measurement
UNION ALL
SELECT
cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int) as time,
measurement + ' - value two' as metric,
avg(valueTwo) as value
FROM
metric_values
WHERE
time >= DATEADD(s, @from, '1970-01-01') AND time <= DATEADD(s, @to, '1970-01-01')
GROUP BY
cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int),
measurement
ORDER BY 1
END
`
CREATE PROCEDURE sp_test_epoch(
@from int,
@to int,
@interval nvarchar(50) = '5m',
@metric nvarchar(200) = 'ALL'
) AS
BEGIN
DECLARE @dInterval int
SELECT @dInterval = 300
IF @interval = '10m'
SELECT @dInterval = 600
SELECT
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
measurement + ' - value one' as metric,
avg(valueOne) as value
FROM
metric_values
WHERE
time BETWEEN DATEADD(s, @from, '1970-01-01') AND DATEADD(s, @to, '1970-01-01') AND
(@metric = 'ALL' OR measurement = @metric)
GROUP BY
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
measurement
UNION ALL
SELECT
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
measurement + ' - value two' as metric,
avg(valueTwo) as value
FROM
metric_values
WHERE
time BETWEEN DATEADD(s, @from, '1970-01-01') AND DATEADD(s, @to, '1970-01-01') AND
(@metric = 'ALL' OR measurement = @metric)
GROUP BY
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
measurement
ORDER BY 1
END
`
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
@@ -456,10 +458,10 @@ func TestMSSQL(t *testing.T) {
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `DECLARE
@from int = $__unixEpochFrom(),
@to int = $__unixEpochTo()
@from int = $__unixEpochFrom(),
@to int = $__unixEpochTo()
EXEC dbo.sp_test_epoch @from, @to`,
EXEC dbo.sp_test_epoch @from, @to`,
"format": "time_series",
}),
RefId: "A",
@@ -474,6 +476,7 @@ func TestMSSQL(t *testing.T) {
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
fmt.Println("query", "sql", queryResult.Meta)
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 4)
@@ -486,45 +489,55 @@ func TestMSSQL(t *testing.T) {
Convey("Given a stored procedure that takes @from and @to in datetime", func() {
sql := `
IF object_id('sp_test_datetime') IS NOT NULL
DROP PROCEDURE sp_test_datetime
`
IF object_id('sp_test_datetime') IS NOT NULL
DROP PROCEDURE sp_test_datetime
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
sql = `
CREATE PROCEDURE sp_test_datetime(
@from datetime,
@to datetime
) AS
BEGIN
SELECT
cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int) as time,
measurement + ' - value one' as metric,
avg(valueOne) as value
FROM
metric_values
WHERE
time >= @from AND time <= @to
GROUP BY
cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int),
measurement
UNION ALL
SELECT
cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int) as time,
measurement + ' - value two' as metric,
avg(valueTwo) as value
FROM
metric_values
WHERE
time >= @from AND time <= @to
GROUP BY
cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int),
measurement
ORDER BY 1
END
`
CREATE PROCEDURE sp_test_datetime(
@from datetime,
@to datetime,
@interval nvarchar(50) = '5m',
@metric nvarchar(200) = 'ALL'
) AS
BEGIN
DECLARE @dInterval int
SELECT @dInterval = 300
IF @interval = '10m'
SELECT @dInterval = 600
SELECT
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
measurement + ' - value one' as metric,
avg(valueOne) as value
FROM
metric_values
WHERE
time BETWEEN @from AND @to AND
(@metric = 'ALL' OR measurement = @metric)
GROUP BY
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
measurement
UNION ALL
SELECT
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
measurement + ' - value two' as metric,
avg(valueTwo) as value
FROM
metric_values
WHERE
time BETWEEN @from AND @to AND
(@metric = 'ALL' OR measurement = @metric)
GROUP BY
CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
measurement
ORDER BY 1
END
`
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
@@ -535,10 +548,10 @@ func TestMSSQL(t *testing.T) {
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `DECLARE
@from int = $__unixEpochFrom(),
@to int = $__unixEpochTo()
@from int = $__unixEpochFrom(),
@to int = $__unixEpochTo()
EXEC dbo.sp_test_epoch @from, @to`,
EXEC dbo.sp_test_epoch @from, @to`,
"format": "time_series",
}),
RefId: "A",
@@ -570,7 +583,7 @@ func TestMSSQL(t *testing.T) {
DROP TABLE dbo.[event]
CREATE TABLE [event] (
time_sec bigint,
time_sec int,
description nvarchar(100),
tags nvarchar(100),
)
@@ -654,12 +667,191 @@ func TestMSSQL(t *testing.T) {
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
})
Convey("When doing an annotation query with a time column in datetime format", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
dtFormat := "2006-01-02 15:04:05.999999999"
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
CAST('%s' AS DATETIME) as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Format(dtFormat)),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch second format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
cast(%d as int) as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()*1000),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column holding a bigint null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as bigint) as time,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
Convey("When doing an annotation query with a time column holding a datetime null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as datetime) as time,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
})
})
}
func InitMSSQLTestDB(t *testing.T) *xorm.Engine {
x, err := xorm.NewEngine(sqlutil.TestDB_Mssql.DriverName, strings.Replace(sqlutil.TestDB_Mssql.ConnStr, "localhost", serverIP, 1))
x.DatabaseTZ = time.UTC
x.TZLocation = time.UTC
// x.ShowSQL()
@@ -667,8 +859,6 @@ func InitMSSQLTestDB(t *testing.T) *xorm.Engine {
t.Fatalf("Failed to init mssql db %v", err)
}
sqlutil.CleanDB(x)
return x
}

View File

@@ -68,7 +68,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__time":
case "__timeEpoch", "__time":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}

View File

@@ -81,7 +81,7 @@ func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows,
// check if there is a column named time
for i, col := range columnNames {
switch col {
case "time_sec":
case "time", "time_sec":
timeIndex = i
}
}
@@ -96,13 +96,10 @@ func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows,
return err
}
// for annotations, convert to epoch
if timeIndex != -1 {
switch value := values[timeIndex].(type) {
case time.Time:
values[timeIndex] = float64(value.UnixNano() / 1e9)
}
}
// converts column named time to unix timestamp in milliseconds to make
// native mysql datetime types and epoch dates work in
// annotation and table queries.
tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
table.Rows = append(table.Rows, values)
}
@@ -185,9 +182,37 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
return err
}
rowData := NewStringStringScan(columnNames)
columnTypes, err := rows.ColumnTypes()
if err != nil {
return err
}
rowLimit := 1000000
rowCount := 0
timeIndex := -1
metricIndex := -1
// check columns of resultset: a column named time is mandatory
// the first text column is treated as metric name unless a column named metric is present
for i, col := range columnNames {
switch col {
case "time", "time_sec":
timeIndex = i
case "metric":
metricIndex = i
default:
if metricIndex == -1 {
switch columnTypes[i].DatabaseTypeName() {
case "CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT":
metricIndex = i
}
}
}
}
if timeIndex == -1 {
return fmt.Errorf("Found no column named time or time_sec")
}
fillMissing := query.Model.Get("fill").MustBool(false)
var fillInterval float64
@@ -198,53 +223,90 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
fillValue.Valid = true
}
}
for ; rows.Next(); rowCount++ {
for rows.Next() {
var timestamp float64
var value null.Float
var metric string
if rowCount > rowLimit {
return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit)
return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
}
err := rowData.Update(rows.Rows)
values, err := e.getTypedRowData(rows)
if err != nil {
e.log.Error("MySQL response parsing", "error", err)
return fmt.Errorf("MySQL response parsing error %v", err)
return err
}
if rowData.metric == "" {
rowData.metric = "Unknown"
switch columnValue := values[timeIndex].(type) {
case int64:
timestamp = float64(columnValue * 1000)
case float64:
timestamp = columnValue * 1000
case time.Time:
timestamp = float64(columnValue.UnixNano() / 1e6)
default:
return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
}
if !rowData.time.Valid {
return fmt.Errorf("Found row with no time value")
}
series, exist := pointsBySeries[rowData.metric]
if exist == false {
series = &tsdb.TimeSeries{Name: rowData.metric}
pointsBySeries[rowData.metric] = series
seriesByQueryOrder.PushBack(rowData.metric)
}
if fillMissing {
var intervalStart float64
if exist == false {
intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
if metricIndex >= 0 {
if columnValue, ok := values[metricIndex].(string); ok == true {
metric = columnValue
} else {
intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
}
// align interval start
intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
for i := intervalStart; i < rowData.time.Float64; i += fillInterval {
series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
rowCount++
return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex])
}
}
series.Points = append(series.Points, tsdb.TimePoint{rowData.value, rowData.time})
for i, col := range columnNames {
if i == timeIndex || i == metricIndex {
continue
}
switch columnValue := values[i].(type) {
case int64:
value = null.FloatFrom(float64(columnValue))
case float64:
value = null.FloatFrom(columnValue)
case nil:
value.Valid = false
default:
return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue)
}
if metricIndex == -1 {
metric = col
}
series, exist := pointsBySeries[metric]
if exist == false {
series = &tsdb.TimeSeries{Name: metric}
pointsBySeries[metric] = series
seriesByQueryOrder.PushBack(metric)
}
if fillMissing {
var intervalStart float64
if exist == false {
intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
} else {
intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
}
// align interval start
intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
for i := intervalStart; i < timestamp; i += fillInterval {
series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
rowCount++
}
}
series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
rowCount++
}
}
for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
@@ -269,62 +331,3 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
result.Meta.Set("rowCount", rowCount)
return nil
}
type stringStringScan struct {
rowPtrs []interface{}
rowValues []string
columnNames []string
columnCount int
time null.Float
value null.Float
metric string
}
func NewStringStringScan(columnNames []string) *stringStringScan {
s := &stringStringScan{
columnCount: len(columnNames),
columnNames: columnNames,
rowPtrs: make([]interface{}, len(columnNames)),
rowValues: make([]string, len(columnNames)),
}
for i := 0; i < s.columnCount; i++ {
s.rowPtrs[i] = new(sql.RawBytes)
}
return s
}
func (s *stringStringScan) Update(rows *sql.Rows) error {
if err := rows.Scan(s.rowPtrs...); err != nil {
return err
}
s.time = null.FloatFromPtr(nil)
s.value = null.FloatFromPtr(nil)
for i := 0; i < s.columnCount; i++ {
if rb, ok := s.rowPtrs[i].(*sql.RawBytes); ok {
s.rowValues[i] = string(*rb)
switch s.columnNames[i] {
case "time_sec":
if sec, err := strconv.ParseInt(s.rowValues[i], 10, 64); err == nil {
s.time = null.FloatFrom(float64(sec * 1000))
}
case "value":
if value, err := strconv.ParseFloat(s.rowValues[i], 64); err == nil {
s.value = null.FloatFrom(value)
}
case "metric":
s.metric = s.rowValues[i]
}
*rb = nil // reset pointer to discard current value to avoid a bug
} else {
return fmt.Errorf("Cannot convert index %d column %s to type *sql.RawBytes", i, s.columnNames[i])
}
}
return nil
}

View File

@@ -1,6 +1,8 @@
package mysql
import (
"fmt"
"math/rand"
"testing"
"time"
@@ -14,6 +16,10 @@ import (
// To run this test, remove the Skip from SkipConvey
// and set up a MySQL db named grafana_tests and a user/password grafana/password
// Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a
// preconfigured MySQL server suitable for running these tests.
// Thers's also a dashboard.json in same directory that you can import to Grafana
// once you've created a datasource for the test server/database.
func TestMySQL(t *testing.T) {
SkipConvey("MySQL", t, func() {
x := InitMySQLTestDB(t)
@@ -29,110 +35,621 @@ func TestMySQL(t *testing.T) {
sess := x.NewSession()
defer sess.Close()
sql := "CREATE TABLE `mysql_types` ("
sql += "`atinyint` tinyint(1) NOT NULL,"
sql += "`avarchar` varchar(3) NOT NULL,"
sql += "`achar` char(3),"
sql += "`amediumint` mediumint NOT NULL,"
sql += "`asmallint` smallint NOT NULL,"
sql += "`abigint` bigint NOT NULL,"
sql += "`aint` int(11) NOT NULL,"
sql += "`adouble` double(10,2),"
sql += "`anewdecimal` decimal(10,2),"
sql += "`afloat` float(10,2) NOT NULL,"
sql += "`atimestamp` timestamp NOT NULL,"
sql += "`adatetime` datetime NOT NULL,"
sql += "`atime` time NOT NULL,"
// sql += "`ayear` year," // Crashes xorm when running cleandb
sql += "`abit` bit(1),"
sql += "`atinytext` tinytext,"
sql += "`atinyblob` tinyblob,"
sql += "`atext` text,"
sql += "`ablob` blob,"
sql += "`amediumtext` mediumtext,"
sql += "`amediumblob` mediumblob,"
sql += "`alongtext` longtext,"
sql += "`alongblob` longblob,"
sql += "`aenum` enum('val1', 'val2'),"
sql += "`aset` set('a', 'b', 'c', 'd'),"
sql += "`adate` date,"
sql += "`time_sec` datetime(6),"
sql += "`aintnull` int(11),"
sql += "`afloatnull` float(10,2),"
sql += "`avarcharnull` varchar(3),"
sql += "`adecimalnull` decimal(10,2)"
sql += ") ENGINE=InnoDB DEFAULT CHARSET=latin1;"
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.Local)
sql = "INSERT INTO `mysql_types` "
sql += "(`atinyint`, `avarchar`, `achar`, `amediumint`, `asmallint`, `abigint`, `aint`, `adouble`, "
sql += "`anewdecimal`, `afloat`, `adatetime`, `atimestamp`, `atime`, `abit`, `atinytext`, "
sql += "`atinyblob`, `atext`, `ablob`, `amediumtext`, `amediumblob`, `alongtext`, `alongblob`, "
sql += "`aenum`, `aset`, `adate`, `time_sec`) "
sql += "VALUES(1, 'abc', 'def', 1, 10, 100, 1420070400, 1.11, "
sql += "2.22, 3.33, now(), current_timestamp(), '11:11:11', 1, 'tinytext', "
sql += "'tinyblob', 'text', 'blob', 'mediumtext', 'mediumblob', 'longtext', 'longblob', "
sql += "'val2', 'a,b', curdate(), '2018-01-01 00:01:01.123456');"
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
Convey("Query with Table format should map MySQL column types to Go types", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT * FROM mysql_types",
"format": "table",
}),
RefId: "A",
},
},
Convey("Given a table with different native data types", func() {
if exists, err := sess.IsTableExist("mysql_types"); err != nil || exists {
So(err, ShouldBeNil)
sess.DropTable("mysql_types")
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
sql := "CREATE TABLE `mysql_types` ("
sql += "`atinyint` tinyint(1) NOT NULL,"
sql += "`avarchar` varchar(3) NOT NULL,"
sql += "`achar` char(3),"
sql += "`amediumint` mediumint NOT NULL,"
sql += "`asmallint` smallint NOT NULL,"
sql += "`abigint` bigint NOT NULL,"
sql += "`aint` int(11) NOT NULL,"
sql += "`adouble` double(10,2),"
sql += "`anewdecimal` decimal(10,2),"
sql += "`afloat` float(10,2) NOT NULL,"
sql += "`atimestamp` timestamp NOT NULL,"
sql += "`adatetime` datetime NOT NULL,"
sql += "`atime` time NOT NULL,"
sql += "`ayear` year," // Crashes xorm when running cleandb
sql += "`abit` bit(1),"
sql += "`atinytext` tinytext,"
sql += "`atinyblob` tinyblob,"
sql += "`atext` text,"
sql += "`ablob` blob,"
sql += "`amediumtext` mediumtext,"
sql += "`amediumblob` mediumblob,"
sql += "`alongtext` longtext,"
sql += "`alongblob` longblob,"
sql += "`aenum` enum('val1', 'val2'),"
sql += "`aset` set('a', 'b', 'c', 'd'),"
sql += "`adate` date,"
sql += "`time_sec` datetime(6),"
sql += "`aintnull` int(11),"
sql += "`afloatnull` float(10,2),"
sql += "`avarcharnull` varchar(3),"
sql += "`adecimalnull` decimal(10,2)"
sql += ") ENGINE=InnoDB DEFAULT CHARSET=latin1;"
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
column := queryResult.Tables[0].Rows[0]
sql = "INSERT INTO `mysql_types` "
sql += "(`atinyint`, `avarchar`, `achar`, `amediumint`, `asmallint`, `abigint`, `aint`, `adouble`, "
sql += "`anewdecimal`, `afloat`, `adatetime`, `atimestamp`, `atime`, `ayear`, `abit`, `atinytext`, "
sql += "`atinyblob`, `atext`, `ablob`, `amediumtext`, `amediumblob`, `alongtext`, `alongblob`, "
sql += "`aenum`, `aset`, `adate`, `time_sec`) "
sql += "VALUES(1, 'abc', 'def', 1, 10, 100, 1420070400, 1.11, "
sql += "2.22, 3.33, now(), current_timestamp(), '11:11:11', '2018', 1, 'tinytext', "
sql += "'tinyblob', 'text', 'blob', 'mediumtext', 'mediumblob', 'longtext', 'longblob', "
sql += "'val2', 'a,b', curdate(), '2018-01-01 00:01:01.123456');"
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
So(*column[0].(*int8), ShouldEqual, 1)
So(column[1].(string), ShouldEqual, "abc")
So(column[2].(string), ShouldEqual, "def")
So(*column[3].(*int32), ShouldEqual, 1)
So(*column[4].(*int16), ShouldEqual, 10)
So(*column[5].(*int64), ShouldEqual, 100)
So(*column[6].(*int32), ShouldEqual, 1420070400)
So(column[7].(float64), ShouldEqual, 1.11)
So(column[8].(float64), ShouldEqual, 2.22)
So(*column[9].(*float32), ShouldEqual, 3.33)
_, offset := time.Now().Zone()
So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
So(column[12].(string), ShouldEqual, "11:11:11")
So(*column[13].(*[]byte), ShouldHaveSameTypeAs, []byte{1})
So(column[14].(string), ShouldEqual, "tinytext")
So(column[15].(string), ShouldEqual, "tinyblob")
So(column[16].(string), ShouldEqual, "text")
So(column[17].(string), ShouldEqual, "blob")
So(column[18].(string), ShouldEqual, "mediumtext")
So(column[19].(string), ShouldEqual, "mediumblob")
So(column[20].(string), ShouldEqual, "longtext")
So(column[21].(string), ShouldEqual, "longblob")
So(column[22].(string), ShouldEqual, "val2")
So(column[23].(string), ShouldEqual, "a,b")
So(column[24].(time.Time).Format("2006-01-02T00:00:00Z"), ShouldEqual, time.Now().Format("2006-01-02T00:00:00Z"))
So(column[25].(float64), ShouldEqual, 1514764861)
So(column[26], ShouldEqual, nil)
So(column[27], ShouldEqual, nil)
So(column[28], ShouldEqual, "")
So(column[29], ShouldEqual, nil)
Convey("Query with Table format should map MySQL column types to Go types", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT * FROM mysql_types",
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
column := queryResult.Tables[0].Rows[0]
So(*column[0].(*int8), ShouldEqual, 1)
So(column[1].(string), ShouldEqual, "abc")
So(column[2].(string), ShouldEqual, "def")
So(*column[3].(*int32), ShouldEqual, 1)
So(*column[4].(*int16), ShouldEqual, 10)
So(*column[5].(*int64), ShouldEqual, 100)
So(*column[6].(*int32), ShouldEqual, 1420070400)
So(column[7].(float64), ShouldEqual, 1.11)
So(column[8].(float64), ShouldEqual, 2.22)
So(*column[9].(*float32), ShouldEqual, 3.33)
_, offset := time.Now().Zone()
So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
So(column[12].(string), ShouldEqual, "11:11:11")
So(column[13].(int64), ShouldEqual, 2018)
So(*column[14].(*[]byte), ShouldHaveSameTypeAs, []byte{1})
So(column[15].(string), ShouldEqual, "tinytext")
So(column[16].(string), ShouldEqual, "tinyblob")
So(column[17].(string), ShouldEqual, "text")
So(column[18].(string), ShouldEqual, "blob")
So(column[19].(string), ShouldEqual, "mediumtext")
So(column[20].(string), ShouldEqual, "mediumblob")
So(column[21].(string), ShouldEqual, "longtext")
So(column[22].(string), ShouldEqual, "longblob")
So(column[23].(string), ShouldEqual, "val2")
So(column[24].(string), ShouldEqual, "a,b")
So(column[25].(time.Time).Format("2006-01-02T00:00:00Z"), ShouldEqual, time.Now().Format("2006-01-02T00:00:00Z"))
So(column[26].(float64), ShouldEqual, float64(1514764861000))
So(column[27], ShouldEqual, nil)
So(column[28], ShouldEqual, nil)
So(column[29], ShouldEqual, "")
So(column[30], ShouldEqual, nil)
})
})
Convey("Given a table with metrics that lacks data for some series ", func() {
type metric struct {
Time time.Time
Value int64
}
if exist, err := sess.IsTableExist(metric{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(metric{})
}
err := sess.CreateTable(metric{})
So(err, ShouldBeNil)
series := []*metric{}
firstRange := genTimeRangeByInterval(fromStart, 10*time.Minute, 10*time.Second)
secondRange := genTimeRangeByInterval(fromStart.Add(20*time.Minute), 10*time.Minute, 10*time.Second)
for _, t := range firstRange {
series = append(series, &metric{
Time: t,
Value: 15,
})
}
for _, t := range secondRange {
series = append(series, &metric{
Time: t,
Value: 20,
})
}
for _, s := range series {
_, err = sess.Insert(s)
So(err, ShouldBeNil)
}
Convey("When doing a metric query using timeGroup", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m') as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 6)
dt := fromStart
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 3; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with NULL fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m', NULL) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 7)
dt := fromStart
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
So(points[3][0].Valid, ShouldBeFalse)
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 4; i < 7; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m', 1.5) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(points[3][0].Float64, ShouldEqual, 1.5)
})
})
Convey("Given a table with metrics having multiple values and measurements", func() {
type metric_values struct {
Time time.Time
Measurement string
ValueOne int64 `xorm:"integer 'valueOne'"`
ValueTwo int64 `xorm:"integer 'valueTwo'"`
}
if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(metric_values{})
}
err := sess.CreateTable(metric_values{})
So(err, ShouldBeNil)
rand.Seed(time.Now().Unix())
rnd := func(min, max int64) int64 {
return rand.Int63n(max-min) + min
}
series := []*metric_values{}
for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) {
series = append(series, &metric_values{
Time: t,
Measurement: "Metric A",
ValueOne: rnd(0, 100),
ValueTwo: rnd(0, 100),
})
series = append(series, &metric_values{
Time: t,
Measurement: "Metric B",
ValueOne: rnd(0, 100),
ValueTwo: rnd(0, 100),
})
}
for _, s := range series {
_, err := sess.Insert(s)
So(err, ShouldBeNil)
}
Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1`,
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
So(queryResult.Series[0].Name, ShouldEqual, "Metric B - value one")
So(queryResult.Series[1].Name, ShouldEqual, "Metric A - value one")
})
Convey("When doing a metric query grouping by time should return correct series", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT $__time(time), valueOne, valueTwo FROM metric_values ORDER BY 1`,
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
So(queryResult.Series[0].Name, ShouldEqual, "valueOne")
So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
})
})
Convey("Given a table with event data", func() {
type event struct {
TimeSec int64
Description string
Tags string
}
if exist, err := sess.IsTableExist(event{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(event{})
}
err := sess.CreateTable(event{})
So(err, ShouldBeNil)
events := []*event{}
for _, t := range genTimeRangeByInterval(fromStart.Add(-20*time.Minute), 60*time.Minute, 25*time.Minute) {
events = append(events, &event{
TimeSec: t.Unix(),
Description: "Someone deployed something",
Tags: "deploy",
})
events = append(events, &event{
TimeSec: t.Add(5 * time.Minute).Unix(),
Description: "New support ticket registered",
Tags: "ticket",
})
}
for _, e := range events {
_, err = sess.Insert(e)
So(err, ShouldBeNil)
}
Convey("When doing an annotation query of deploy events should return expected result", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT time_sec, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC`,
"format": "table",
}),
RefId: "Deploys",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["Deploys"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
})
Convey("When doing an annotation query of ticket events should return expected result", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT time_sec, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC`,
"format": "table",
}),
RefId: "Tickets",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["Tickets"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
})
Convey("When doing an annotation query with a time column in datetime format", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 0, time.UTC)
dtFormat := "2006-01-02 15:04:05.999999999"
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
CAST('%s' as datetime) as time_sec,
'message' as text,
'tag1,tag2' as tags
`, dt.Format(dtFormat)),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch second format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time_sec,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column in epoch second format (signed integer) should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 0, time.Local)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
CAST('%d' as signed integer) as time_sec,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time_sec,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()*1000),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column holding a unsigned integer null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as unsigned integer) as time_sec,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
Convey("When doing an annotation query with a time column holding a DATETIME null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as DATETIME) as time_sec,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
})
})
}
func InitMySQLTestDB(t *testing.T) *xorm.Engine {
x, err := xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, sqlutil.TestDB_Mysql.ConnStr+"&parseTime=true")
x.DatabaseTZ = time.Local
x.TZLocation = time.Local
// x.ShowSQL()
@@ -140,7 +657,18 @@ func InitMySQLTestDB(t *testing.T) *xorm.Engine {
t.Fatalf("Failed to init mysql db %v", err)
}
sqlutil.CleanDB(x)
return x
}
func genTimeRangeByInterval(from time.Time, duration time.Duration, interval time.Duration) []time.Time {
durationSec := int64(duration.Seconds())
intervalSec := int64(interval.Seconds())
timeRange := []time.Time{}
for i := int64(0); i < durationSec; i += intervalSec {
timeRange = append(timeRange, from)
from = from.Add(time.Duration(int64(time.Second) * intervalSec))
}
return timeRange
}

View File

@@ -63,7 +63,6 @@ func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSo
}
func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
columnNames, err := rows.Columns()
if err != nil {
return err
@@ -100,14 +99,10 @@ func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Ro
return err
}
// convert column named time to unix timestamp to make
// native datetime postgres types work in annotation queries
if timeIndex != -1 {
switch value := values[timeIndex].(type) {
case time.Time:
values[timeIndex] = float64(value.UnixNano() / 1e9)
}
}
// converts column named time to unix timestamp in milliseconds to make
// native postgres datetime types and epoch dates work in
// annotation and table queries.
tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
table.Rows = append(table.Rows, values)
}
@@ -118,7 +113,6 @@ func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Ro
}
func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) {
types, err := rows.ColumnTypes()
if err != nil {
return nil, err
@@ -209,7 +203,6 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co
fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
fillValue.Valid = true
}
}
for rows.Next() {

View File

@@ -1,6 +1,8 @@
package postgres
import (
"fmt"
"math/rand"
"testing"
"time"
@@ -14,7 +16,11 @@ import (
)
// To run this test, remove the Skip from SkipConvey
// and set up a PostgreSQL db named grafanatest and a user/password grafanatest/grafanatest
// and set up a PostgreSQL db named grafanatest and a user/password grafanatest/grafanatest!
// Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a
// preconfigured Postgres server suitable for running these tests.
// Thers's also a dashboard.json in same directory that you can import to Grafana
// once you've created a datasource for the test server/database.
func TestPostgres(t *testing.T) {
SkipConvey("PostgreSQL", t, func() {
x := InitPostgresTestDB(t)
@@ -30,88 +36,599 @@ func TestPostgres(t *testing.T) {
sess := x.NewSession()
defer sess.Close()
sql := `
CREATE TABLE postgres_types(
c00_smallint smallint,
c01_integer integer,
c02_bigint bigint,
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
c03_real real,
c04_double double precision,
c05_decimal decimal(10,2),
c06_numeric numeric(10,2),
Convey("Given a table with different native data types", func() {
sql := `
DROP TABLE IF EXISTS postgres_types;
CREATE TABLE postgres_types(
c00_smallint smallint,
c01_integer integer,
c02_bigint bigint,
c07_char char(10),
c08_varchar varchar(10),
c09_text text,
c03_real real,
c04_double double precision,
c05_decimal decimal(10,2),
c06_numeric numeric(10,2),
c10_timestamp timestamp without time zone,
c11_timestamptz timestamp with time zone,
c12_date date,
c13_time time without time zone,
c14_timetz time with time zone,
c15_interval interval
);
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
c07_char char(10),
c08_varchar varchar(10),
c09_text text,
sql = `
INSERT INTO postgres_types VALUES(
1,2,3,
4.5,6.7,1.1,1.2,
'char10','varchar10','text',
c10_timestamp timestamp without time zone,
c11_timestamptz timestamp with time zone,
c12_date date,
c13_time time without time zone,
c14_timetz time with time zone,
now(),now(),now(),now(),now(),'15m'::interval
);
`
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
Convey("Query with Table format should map PostgreSQL column types to Go types", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT * FROM postgres_types",
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
c15_interval interval
);
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
column := queryResult.Tables[0].Rows[0]
So(column[0].(int64), ShouldEqual, 1)
So(column[1].(int64), ShouldEqual, 2)
So(column[2].(int64), ShouldEqual, 3)
So(column[3].(float64), ShouldEqual, 4.5)
So(column[4].(float64), ShouldEqual, 6.7)
// libpq doesnt properly convert decimal, numeric and char to go types but returns []uint8 instead
// So(column[5].(float64), ShouldEqual, 1.1)
// So(column[6].(float64), ShouldEqual, 1.2)
// So(column[7].(string), ShouldEqual, "char")
So(column[8].(string), ShouldEqual, "varchar10")
So(column[9].(string), ShouldEqual, "text")
sql = `
INSERT INTO postgres_types VALUES(
1,2,3,
4.5,6.7,1.1,1.2,
'char10','varchar10','text',
So(column[10].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[11].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[12].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[13].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[14].(time.Time), ShouldHaveSameTypeAs, time.Now())
now(),now(),now(),now(),now(),'15m'::interval
);
`
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
// libpq doesnt properly convert interval to go types but returns []uint8 instead
// So(column[15].(time.Time), ShouldHaveSameTypeAs, time.Now())
Convey("When doing a table query should map Postgres column types to Go types", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT * FROM postgres_types",
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
column := queryResult.Tables[0].Rows[0]
So(column[0].(int64), ShouldEqual, 1)
So(column[1].(int64), ShouldEqual, 2)
So(column[2].(int64), ShouldEqual, 3)
So(column[3].(float64), ShouldEqual, 4.5)
So(column[4].(float64), ShouldEqual, 6.7)
So(column[5].(float64), ShouldEqual, 1.1)
So(column[6].(float64), ShouldEqual, 1.2)
So(column[7].(string), ShouldEqual, "char10 ")
So(column[8].(string), ShouldEqual, "varchar10")
So(column[9].(string), ShouldEqual, "text")
So(column[10].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[11].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[12].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[13].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[14].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[15].(string), ShouldEqual, "00:15:00")
})
})
Convey("Given a table with metrics that lacks data for some series ", func() {
sql := `
DROP TABLE IF EXISTS metric;
CREATE TABLE metric (
time timestamp,
value integer
)
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
type metric struct {
Time time.Time
Value int64
}
series := []*metric{}
firstRange := genTimeRangeByInterval(fromStart, 10*time.Minute, 10*time.Second)
secondRange := genTimeRangeByInterval(fromStart.Add(20*time.Minute), 10*time.Minute, 10*time.Second)
for _, t := range firstRange {
series = append(series, &metric{
Time: t,
Value: 15,
})
}
for _, t := range secondRange {
series = append(series, &metric{
Time: t,
Value: 20,
})
}
for _, s := range series {
_, err = sess.Insert(s)
So(err, ShouldBeNil)
}
Convey("When doing a metric query using timeGroup", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m'), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 6)
dt := fromStart
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 3; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with NULL fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m', NULL), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 7)
dt := fromStart
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
So(points[3][0].Valid, ShouldBeFalse)
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 4; i < 7; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m', 1.5), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(points[3][0].Float64, ShouldEqual, 1.5)
})
})
Convey("Given a table with metrics having multiple values and measurements", func() {
type metric_values struct {
Time time.Time
Measurement string
ValueOne int64 `xorm:"integer 'valueOne'"`
ValueTwo int64 `xorm:"integer 'valueTwo'"`
}
if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(metric_values{})
}
err := sess.CreateTable(metric_values{})
So(err, ShouldBeNil)
rand.Seed(time.Now().Unix())
rnd := func(min, max int64) int64 {
return rand.Int63n(max-min) + min
}
series := []*metric_values{}
for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) {
series = append(series, &metric_values{
Time: t,
Measurement: "Metric A",
ValueOne: rnd(0, 100),
ValueTwo: rnd(0, 100),
})
series = append(series, &metric_values{
Time: t,
Measurement: "Metric B",
ValueOne: rnd(0, 100),
ValueTwo: rnd(0, 100),
})
}
for _, s := range series {
_, err := sess.Insert(s)
So(err, ShouldBeNil)
}
Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT $__timeEpoch(time), measurement || ' - value one' as metric, "valueOne" FROM metric_values ORDER BY 1`,
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
})
Convey("When doing a metric query grouping by time should return correct series", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT $__timeEpoch(time), "valueOne", "valueTwo" FROM metric_values ORDER BY 1`,
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
So(queryResult.Series[0].Name, ShouldEqual, "valueOne")
So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
})
})
Convey("Given a table with event data", func() {
type event struct {
TimeSec int64
Description string
Tags string
}
if exist, err := sess.IsTableExist(event{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(event{})
}
err := sess.CreateTable(event{})
So(err, ShouldBeNil)
events := []*event{}
for _, t := range genTimeRangeByInterval(fromStart.Add(-20*time.Minute), 60*time.Minute, 25*time.Minute) {
events = append(events, &event{
TimeSec: t.Unix(),
Description: "Someone deployed something",
Tags: "deploy",
})
events = append(events, &event{
TimeSec: t.Add(5 * time.Minute).Unix(),
Description: "New support ticket registered",
Tags: "ticket",
})
}
for _, e := range events {
_, err = sess.Insert(e)
So(err, ShouldBeNil)
}
Convey("When doing an annotation query of deploy events should return expected result", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC`,
"format": "table",
}),
RefId: "Deploys",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["Deploys"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
})
Convey("When doing an annotation query of ticket events should return expected result", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC`,
"format": "table",
}),
RefId: "Tickets",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["Tickets"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
})
Convey("When doing an annotation query with a time column in datetime format", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
dtFormat := "2006-01-02 15:04:05.999999999"
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
CAST('%s' AS TIMESTAMP) as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Format(dtFormat)),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch second format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
cast(%d as bigint) as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()*1000),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column holding a bigint null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as bigint) as time,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
Convey("When doing an annotation query with a time column holding a timestamp null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as timestamp) as time,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
})
})
}
func InitPostgresTestDB(t *testing.T) *xorm.Engine {
x, err := xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, sqlutil.TestDB_Postgres.ConnStr)
x.DatabaseTZ = time.UTC
x.TZLocation = time.UTC
// x.ShowSQL()
@@ -119,7 +636,18 @@ func InitPostgresTestDB(t *testing.T) *xorm.Engine {
t.Fatalf("Failed to init postgres db %v", err)
}
sqlutil.CleanDB(x)
return x
}
func genTimeRangeByInterval(from time.Time, duration time.Duration, interval time.Duration) []time.Time {
durationSec := int64(duration.Seconds())
intervalSec := int64(interval.Seconds())
timeRange := []time.Time{}
for i := int64(0); i < durationSec; i += intervalSec {
timeRange = append(timeRange, from)
from = from.Add(time.Duration(int64(time.Second) * intervalSec))
}
return timeRange
}

View File

@@ -3,6 +3,7 @@ package tsdb
import (
"context"
"sync"
"time"
"github.com/go-xorm/core"
"github.com/go-xorm/xorm"
@@ -133,3 +134,30 @@ func (e *DefaultSqlEngine) Query(
return result, nil
}
// ConvertTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds
// to make native datetime types and epoch dates work in annotation and table queries.
func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {
if timeIndex >= 0 {
switch value := values[timeIndex].(type) {
case time.Time:
values[timeIndex] = EpochPrecisionToMs(float64(value.Unix()))
case *time.Time:
if value != nil {
values[timeIndex] = EpochPrecisionToMs(float64((*value).Unix()))
}
case int64:
values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
case *int64:
if value != nil {
values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
}
case float64:
values[timeIndex] = EpochPrecisionToMs(value)
case *float64:
if value != nil {
values[timeIndex] = EpochPrecisionToMs(*value)
}
}
}
}

View File

@@ -0,0 +1,46 @@
package tsdb
import (
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
)
func TestSqlEngine(t *testing.T) {
Convey("SqlEngine", t, func() {
Convey("Given row values with time columns when converting them", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
fixtures := make([]interface{}, 8)
fixtures[0] = dt
fixtures[1] = dt.Unix() * 1000
fixtures[2] = dt.Unix()
fixtures[3] = float64(dt.Unix() * 1000)
fixtures[4] = float64(dt.Unix())
var nilDt *time.Time
var nilInt64 *int64
var nilFloat64 *float64
fixtures[5] = nilDt
fixtures[6] = nilInt64
fixtures[7] = nilFloat64
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
}
Convey("Should convert sql time columns to epoch time in ms ", func() {
expected := float64(dt.Unix() * 1000)
So(fixtures[0].(float64), ShouldEqual, expected)
So(fixtures[1].(int64), ShouldEqual, expected)
So(fixtures[2].(int64), ShouldEqual, expected)
So(fixtures[3].(float64), ShouldEqual, expected)
So(fixtures[4].(float64), ShouldEqual, expected)
So(fixtures[5], ShouldBeNil)
So(fixtures[6], ShouldBeNil)
So(fixtures[7], ShouldBeNil)
})
})
})
}

View File

@@ -88,3 +88,13 @@ func (tr *TimeRange) ParseTo() (time.Time, error) {
return time.Time{}, fmt.Errorf("cannot parse to value %s", tr.To)
}
// EpochPrecisionToMs converts epoch precision to millisecond, if needed.
// Only seconds to milliseconds supported right now
func EpochPrecisionToMs(value float64) float64 {
if int64(value)/1e10 == 0 {
return float64(value * 1e3)
}
return float64(value)
}

View File

@@ -20,23 +20,22 @@
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the <b>time</b> column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
- column with alias: <b>time</b> for the annotation event time (in UTC). Use unix timestamp in seconds or any native date data type.
- column with alias: <b>time</b> for the annotation event time. Use epoch time or any native date data type.
- column with alias: <b>text</b> for the annotation text.
- column with alias: <b>tags</b> for annotation tags. This is a comma separated string of tags e.g. 'tag1,tag2'.
Macros:
- $__time(column) -&gt; column AS time
- $__utcTime(column) -&gt; DATEADD(second, DATEDIFF(second, GETDATE(), GETUTCDATE()), column) AS time
- $__timeEpoch(column) -&gt; DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second, GETDATE(), GETUTCDATE()), column) ) AS time
- $__timeFilter(column) -&gt; column &gt; DATEADD(s, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01') AND column &lt; DATEADD(s, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01')
- $__unixEpochFilter(column) -&gt; column &gt; 1492750877 AND column &lt; 1492750877
- $__timeEpoch(column) -&gt; DATEDIFF(second, '1970-01-01', column) AS time
- $__timeFilter(column) -&gt; column &gt;= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &t;= DATEADD(s, 18446744066914187038, '1970-01-01')
- $__unixEpochFilter(column) -&gt; column &gt;= 1492750877 AND column &lt;= 1492750877
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; DATEADD(second, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01')
- $__timeTo() -&gt; DATEADD(second, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01')
- $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877
- $__timeFrom() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__timeTo() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877
</pre>
</div>
</div>

View File

@@ -48,15 +48,22 @@ Table:
Macros:
- $__time(column) -&gt; column AS time
- $__utcTime(column) -&gt; DATEADD(second, DATEDIFF(second, GETDATE(), GETUTCDATE()), column) AS time
- $__timeEpoch(column) -&gt; DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second, GETDATE(), GETUTCDATE()), column) ) AS time
- $__timeFilter(column) -&gt; column &gt; DATEADD(s, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01') AND column &lt; DATEADD(s, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01')
- $__unixEpochFilter(column) -&gt; column &gt; 1492750877 AND column &lt; 1492750877
- $__timeGroup(column, '5m'[, fillvalue]) -&gt; cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second, GETDATE(), GETUTCDATE()), column))/300 as int)*300 as int). Providing a <i>fillValue</i> of <i>NULL</i> or floating value will automatically fill empty series in timerange with that value.
- $__timeEpoch(column) -&gt; DATEDIFF(second, '1970-01-01', column) AS time
- $__timeFilter(column) -&gt; column &gt;= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &t;= DATEADD(s, 18446744066914187038, '1970-01-01')
- $__unixEpochFilter(column) -&gt; column &gt;= 1492750877 AND column &lt;= 1492750877
- $__timeGroup(column, '5m'[, fillvalue]) -&gt; CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300. Providing a <i>fillValue</i> of <i>NULL</i> or floating value will automatically fill empty series in timerange with that value.
Example of group by and order by with $__timeGroup:
SELECT
$__timeGroup(date_time_col, '1h') AS time,
sum(value) as value
FROM yourtable
GROUP BY $__timeGroup(date_time_col, '1h')
ORDER BY 1
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; DATEADD(second, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01')
- $__timeTo() -&gt; DATEADD(second, 1492750877+DATEDIFF(second, GETUTCDATE(), GETDATE()), '1970-01-01')
- $__timeFrom() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__timeTo() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877
</pre>

View File

@@ -128,7 +128,7 @@ export default class ResponseParser {
const row = table.rows[i];
list.push({
annotation: options.annotation,
time: Math.floor(row[timeColumnIndex]) * 1000,
time: Math.floor(row[timeColumnIndex]),
text: row[textColumnIndex],
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],
});

View File

@@ -1,26 +1,21 @@
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
import moment from 'moment';
import helpers from 'test/specs/helpers';
import { MssqlDatasource } from '../datasource';
import { TemplateSrvStub } from 'test/specs/helpers';
import { CustomVariable } from 'app/features/templating/custom_variable';
import q from 'q';
describe('MSSQLDatasource', function() {
var ctx = new helpers.ServiceTestContext();
var instanceSettings = { name: 'mssql' };
const ctx: any = {
backendSrv: {},
templateSrv: new TemplateSrvStub(),
};
beforeEach(angularMocks.module('grafana.core'));
beforeEach(angularMocks.module('grafana.services'));
beforeEach(ctx.providePhase(['backendSrv']));
beforeEach(function() {
ctx.$q = q;
ctx.instanceSettings = { name: 'mssql' };
beforeEach(
angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
ctx.$q = $q;
ctx.$httpBackend = $httpBackend;
ctx.$rootScope = $rootScope;
ctx.ds = $injector.instantiate(MssqlDatasource, { instanceSettings: instanceSettings });
$httpBackend.when('GET', /\.html$/).respond('');
})
);
ctx.ds = new MssqlDatasource(ctx.instanceSettings, ctx.backendSrv, ctx.$q, ctx.templateSrv);
});
describe('When performing annotationQuery', function() {
let results;
@@ -46,9 +41,9 @@ describe('MSSQLDatasource', function() {
{
columns: [{ text: 'time' }, { text: 'text' }, { text: 'tags' }],
rows: [
[1432288355, 'some text', 'TagA,TagB'],
[1432288390, 'some text2', ' TagB , TagC'],
[1432288400, 'some text3'],
[1521545610656, 'some text', 'TagA,TagB'],
[1521546251185, 'some text2', ' TagB , TagC'],
[1521546501378, 'some text3'],
],
},
],
@@ -56,27 +51,27 @@ describe('MSSQLDatasource', function() {
},
};
beforeEach(function() {
ctx.backendSrv.datasourceRequest = function(options) {
beforeEach(() => {
ctx.backendSrv.datasourceRequest = options => {
return ctx.$q.when({ data: response, status: 200 });
};
ctx.ds.annotationQuery(options).then(function(data) {
return ctx.ds.annotationQuery(options).then(data => {
results = data;
});
ctx.$rootScope.$apply();
});
it('should return annotation list', function() {
expect(results.length).to.be(3);
expect(results.length).toBe(3);
expect(results[0].text).to.be('some text');
expect(results[0].tags[0]).to.be('TagA');
expect(results[0].tags[1]).to.be('TagB');
expect(results[0].text).toBe('some text');
expect(results[0].tags[0]).toBe('TagA');
expect(results[0].tags[1]).toBe('TagB');
expect(results[1].tags[0]).to.be('TagB');
expect(results[1].tags[1]).to.be('TagC');
expect(results[1].tags[0]).toBe('TagB');
expect(results[1].tags[1]).toBe('TagC');
expect(results[2].tags.length).to.be(0);
expect(results[2].tags.length).toBe(0);
});
});
@@ -104,16 +99,16 @@ describe('MSSQLDatasource', function() {
ctx.backendSrv.datasourceRequest = function(options) {
return ctx.$q.when({ data: response, status: 200 });
};
ctx.ds.metricFindQuery(query).then(function(data) {
return ctx.ds.metricFindQuery(query).then(function(data) {
results = data;
});
ctx.$rootScope.$apply();
});
it('should return list of all column values', function() {
expect(results.length).to.be(6);
expect(results[0].text).to.be('aTitle');
expect(results[5].text).to.be('some text3');
expect(results.length).toBe(6);
expect(results[0].text).toBe('aTitle');
expect(results[5].text).toBe('some text3');
});
});
@@ -141,18 +136,18 @@ describe('MSSQLDatasource', function() {
ctx.backendSrv.datasourceRequest = function(options) {
return ctx.$q.when({ data: response, status: 200 });
};
ctx.ds.metricFindQuery(query).then(function(data) {
return ctx.ds.metricFindQuery(query).then(function(data) {
results = data;
});
ctx.$rootScope.$apply();
});
it('should return list of as text, value', function() {
expect(results.length).to.be(3);
expect(results[0].text).to.be('aTitle');
expect(results[0].value).to.be('value1');
expect(results[2].text).to.be('aTitle3');
expect(results[2].value).to.be('value3');
expect(results.length).toBe(3);
expect(results[0].text).toBe('aTitle');
expect(results[0].value).toBe('value1');
expect(results[2].text).toBe('aTitle3');
expect(results[2].value).toBe('value3');
});
});
@@ -180,16 +175,16 @@ describe('MSSQLDatasource', function() {
ctx.backendSrv.datasourceRequest = function(options) {
return ctx.$q.when({ data: response, status: 200 });
};
ctx.ds.metricFindQuery(query).then(function(data) {
return ctx.ds.metricFindQuery(query).then(function(data) {
results = data;
});
ctx.$rootScope.$apply();
});
it('should return list of unique keys', function() {
expect(results.length).to.be(1);
expect(results[0].text).to.be('aTitle');
expect(results[0].value).to.be('same');
expect(results.length).toBe(1);
expect(results[0].text).toBe('aTitle');
expect(results[0].value).toBe('same');
});
});
@@ -200,33 +195,33 @@ describe('MSSQLDatasource', function() {
describe('and value is a string', () => {
it('should return an unquoted value', () => {
expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql('abc');
expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual('abc');
});
});
describe('and value is a number', () => {
it('should return an unquoted value', () => {
expect(ctx.ds.interpolateVariable(1000, ctx.variable)).to.eql(1000);
expect(ctx.ds.interpolateVariable(1000, ctx.variable)).toEqual(1000);
});
});
describe('and value is an array of strings', () => {
it('should return comma separated quoted values', () => {
expect(ctx.ds.interpolateVariable(['a', 'b', 'c'], ctx.variable)).to.eql("'a','b','c'");
expect(ctx.ds.interpolateVariable(['a', 'b', 'c'], ctx.variable)).toEqual("'a','b','c'");
});
});
describe('and variable allows multi-value and value is a string', () => {
it('should return a quoted value', () => {
ctx.variable.multi = true;
expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql("'abc'");
expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual("'abc'");
});
});
describe('and variable allows all and value is a string', () => {
it('should return a quoted value', () => {
ctx.variable.includeAll = true;
expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql("'abc'");
expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual("'abc'");
});
});
});

View File

@@ -18,15 +18,16 @@
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
An annotation is an event that is overlayed on top of graphs. The query can have up to four columns per row, the time_sec column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the <i>time</i> or <i>time_sec</i> column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
- column with alias: <b>time_sec</b> for the annotation event. Format is UTC in seconds, use UNIX_TIMESTAMP(column)
- column with alias: <b>time</b> or <i>time_sec</i> for the annotation event time. Use epoch time or any native date data type.
- column with alias: <b>text</b> for the annotation text
- column with alias: <b>tags</b> for annotation tags. This is a comma separated string of tags e.g. 'tag1,tag2'
Macros:
- $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
- $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
- $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
- $__timeFilter(column) -&gt; UNIX_TIMESTAMP(time_date_time) &gt; 1492750877 AND UNIX_TIMESTAMP(time_date_time) &lt; 1492750877
- $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877

View File

@@ -38,15 +38,16 @@
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Time series:
- return column named time_sec (UTC in seconds), use UNIX_TIMESTAMP(column)
- return column named value for the time point value
- return column named metric to represent the series name
- return column named time or time_sec (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
- return column(s) with numeric datatype as values
- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
Table:
- return any set of columns
Macros:
- $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
- $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
- $__timeFilter(column) -&gt; UNIX_TIMESTAMP(time_date_time) &ge; 1492750877 AND UNIX_TIMESTAMP(time_date_time) &le; 1492750877
- $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
- $__timeGroup(column,'5m') -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)

View File

@@ -113,7 +113,7 @@ export default class ResponseParser {
let tagsColumnIndex = -1;
for (let i = 0; i < table.columns.length; i++) {
if (table.columns[i].text === 'time_sec') {
if (table.columns[i].text === 'time_sec' || table.columns[i].text === 'time') {
timeColumnIndex = i;
} else if (table.columns[i].text === 'title') {
return this.$q.reject({
@@ -137,7 +137,7 @@ export default class ResponseParser {
const row = table.rows[i];
list.push({
annotation: options.annotation,
time: Math.floor(row[timeColumnIndex]) * 1000,
time: Math.floor(row[timeColumnIndex]),
text: row[textColumnIndex] ? row[textColumnIndex].toString() : '',
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],
});

View File

@@ -18,15 +18,16 @@
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
An annotation is an event that is overlayed on top of graphs. The query can have up to four columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
- column with alias: <b>time</b> for the annotation event. Format is UTC in seconds, use extract(epoch from column) as "time"
- column with alias: <b>time</b> for the annotation event time. Use epoch time or any native date data type.
- column with alias: <b>text</b> for the annotation text
- column with alias: <b>tags</b> for annotation tags. This is a comma separated string of tags e.g. 'tag1,tag2'
Macros:
- $__time(column) -&gt; column as "time"
- $__timeEpoch -&gt; extract(epoch from column) as "time"
- $__timeFilter(column) -&gt; column &ge; to_timestamp(1492750877) AND column &le; to_timestamp(1492750877)
- $__unixEpochFilter(column) -&gt; column &gt; 1492750877 AND column &lt; 1492750877

View File

@@ -134,7 +134,7 @@ export default class ResponseParser {
const row = table.rows[i];
list.push({
annotation: options.annotation,
time: Math.floor(row[timeColumnIndex]) * 1000,
time: Math.floor(row[timeColumnIndex]),
title: row[titleColumnIndex],
text: row[textColumnIndex],
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],