mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
tslint: changing vars -> const (#13034)
This commit is contained in:
parent
9b978b7203
commit
314b645857
@ -5,7 +5,7 @@ import { ThresholdMapper } from '../threshold_mapper';
|
||||
describe('ThresholdMapper', () => {
|
||||
describe('with greater than evaluator', () => {
|
||||
it('can map query conditions to thresholds', () => {
|
||||
var panel: any = {
|
||||
const panel: any = {
|
||||
type: 'graph',
|
||||
alert: {
|
||||
conditions: [
|
||||
@ -17,7 +17,7 @@ describe('ThresholdMapper', () => {
|
||||
},
|
||||
};
|
||||
|
||||
var updated = ThresholdMapper.alertToGraphThresholds(panel);
|
||||
const updated = ThresholdMapper.alertToGraphThresholds(panel);
|
||||
expect(updated).toBe(true);
|
||||
expect(panel.thresholds[0].op).toBe('gt');
|
||||
expect(panel.thresholds[0].value).toBe(100);
|
||||
@ -26,7 +26,7 @@ describe('ThresholdMapper', () => {
|
||||
|
||||
describe('with outside range evaluator', () => {
|
||||
it('can map query conditions to thresholds', () => {
|
||||
var panel: any = {
|
||||
const panel: any = {
|
||||
type: 'graph',
|
||||
alert: {
|
||||
conditions: [
|
||||
@ -38,7 +38,7 @@ describe('ThresholdMapper', () => {
|
||||
},
|
||||
};
|
||||
|
||||
var updated = ThresholdMapper.alertToGraphThresholds(panel);
|
||||
const updated = ThresholdMapper.alertToGraphThresholds(panel);
|
||||
expect(updated).toBe(true);
|
||||
expect(panel.thresholds[0].op).toBe('lt');
|
||||
expect(panel.thresholds[0].value).toBe(100);
|
||||
@ -50,7 +50,7 @@ describe('ThresholdMapper', () => {
|
||||
|
||||
describe('with inside range evaluator', () => {
|
||||
it('can map query conditions to thresholds', () => {
|
||||
var panel: any = {
|
||||
const panel: any = {
|
||||
type: 'graph',
|
||||
alert: {
|
||||
conditions: [
|
||||
@ -62,7 +62,7 @@ describe('ThresholdMapper', () => {
|
||||
},
|
||||
};
|
||||
|
||||
var updated = ThresholdMapper.alertToGraphThresholds(panel);
|
||||
const updated = ThresholdMapper.alertToGraphThresholds(panel);
|
||||
expect(updated).toBe(true);
|
||||
expect(panel.thresholds[0].op).toBe('gt');
|
||||
expect(panel.thresholds[0].value).toBe(100);
|
||||
|
@ -6,7 +6,7 @@ jest.mock('app/core/services/context_srv', () => ({}));
|
||||
|
||||
describe('DashboardModel', function() {
|
||||
describe('when creating new dashboard model defaults only', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({}, {});
|
||||
@ -27,7 +27,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('when getting next panel id', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({
|
||||
@ -42,16 +42,16 @@ describe('DashboardModel', function() {
|
||||
|
||||
describe('getSaveModelClone', function() {
|
||||
it('should sort keys', () => {
|
||||
var model = new DashboardModel({});
|
||||
var saveModel = model.getSaveModelClone();
|
||||
var keys = _.keys(saveModel);
|
||||
const model = new DashboardModel({});
|
||||
const saveModel = model.getSaveModelClone();
|
||||
const keys = _.keys(saveModel);
|
||||
|
||||
expect(keys[0]).toBe('annotations');
|
||||
expect(keys[1]).toBe('autoUpdate');
|
||||
});
|
||||
|
||||
it('should remove add panel panels', () => {
|
||||
var model = new DashboardModel({});
|
||||
const model = new DashboardModel({});
|
||||
model.addPanel({
|
||||
type: 'add-panel',
|
||||
});
|
||||
@ -61,15 +61,15 @@ describe('DashboardModel', function() {
|
||||
model.addPanel({
|
||||
type: 'add-panel',
|
||||
});
|
||||
var saveModel = model.getSaveModelClone();
|
||||
var panels = saveModel.panels;
|
||||
const saveModel = model.getSaveModelClone();
|
||||
const panels = saveModel.panels;
|
||||
|
||||
expect(panels.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('row and panel manipulation', function() {
|
||||
var dashboard;
|
||||
let dashboard;
|
||||
|
||||
beforeEach(function() {
|
||||
dashboard = new DashboardModel({});
|
||||
@ -82,7 +82,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
it('duplicate panel should try to add to the right if there is space', function() {
|
||||
var panel = { id: 10, gridPos: { x: 0, y: 0, w: 6, h: 2 } };
|
||||
const panel = { id: 10, gridPos: { x: 0, y: 0, w: 6, h: 2 } };
|
||||
|
||||
dashboard.addPanel(panel);
|
||||
dashboard.duplicatePanel(dashboard.panels[0]);
|
||||
@ -96,7 +96,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
it('duplicate panel should remove repeat data', function() {
|
||||
var panel = {
|
||||
const panel = {
|
||||
id: 10,
|
||||
gridPos: { x: 0, y: 0, w: 6, h: 2 },
|
||||
repeat: 'asd',
|
||||
@ -112,7 +112,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('Given editable false dashboard', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({ editable: false });
|
||||
@ -124,14 +124,14 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
it('getSaveModelClone should remove meta', function() {
|
||||
var clone = model.getSaveModelClone();
|
||||
const clone = model.getSaveModelClone();
|
||||
expect(clone.meta).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when loading dashboard with old influxdb query schema', function() {
|
||||
var model;
|
||||
var target;
|
||||
let model;
|
||||
let target;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({
|
||||
@ -197,7 +197,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('when creating dashboard model with missing list for annoations or templating', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({
|
||||
@ -222,7 +222,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('Formatting epoch timestamp when timezone is set as utc', function() {
|
||||
var dashboard;
|
||||
let dashboard;
|
||||
|
||||
beforeEach(function() {
|
||||
dashboard = new DashboardModel({ timezone: 'utc' });
|
||||
@ -242,7 +242,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('updateSubmenuVisibility with empty lists', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({});
|
||||
@ -255,7 +255,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('updateSubmenuVisibility with annotation', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({
|
||||
@ -272,7 +272,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('updateSubmenuVisibility with template var', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({
|
||||
@ -289,7 +289,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('updateSubmenuVisibility with hidden template var', function() {
|
||||
var model;
|
||||
let model;
|
||||
|
||||
beforeEach(function() {
|
||||
model = new DashboardModel({
|
||||
@ -306,7 +306,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('updateSubmenuVisibility with hidden annotation toggle', function() {
|
||||
var dashboard;
|
||||
let dashboard;
|
||||
|
||||
beforeEach(function() {
|
||||
dashboard = new DashboardModel({
|
||||
@ -323,7 +323,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('When collapsing row', function() {
|
||||
var dashboard;
|
||||
let dashboard;
|
||||
|
||||
beforeEach(function() {
|
||||
dashboard = new DashboardModel({
|
||||
@ -365,7 +365,7 @@ describe('DashboardModel', function() {
|
||||
});
|
||||
|
||||
describe('When expanding row', function() {
|
||||
var dashboard;
|
||||
let dashboard;
|
||||
|
||||
beforeEach(function() {
|
||||
dashboard = new DashboardModel({
|
||||
|
@ -10,7 +10,7 @@ import { DashboardExporter } from '../export/exporter';
|
||||
import { DashboardModel } from '../dashboard_model';
|
||||
|
||||
describe('given dashboard with repeated panels', () => {
|
||||
var dash, exported;
|
||||
let dash, exported;
|
||||
|
||||
beforeEach(done => {
|
||||
dash = {
|
||||
@ -89,7 +89,7 @@ describe('given dashboard with repeated panels', () => {
|
||||
config.buildInfo.version = '3.0.2';
|
||||
|
||||
//Stubs test function calls
|
||||
var datasourceSrvStub = { get: jest.fn(arg => getStub(arg)) };
|
||||
const datasourceSrvStub = { get: jest.fn(arg => getStub(arg)) };
|
||||
|
||||
config.panels['graph'] = {
|
||||
id: 'graph',
|
||||
@ -110,7 +110,7 @@ describe('given dashboard with repeated panels', () => {
|
||||
};
|
||||
|
||||
dash = new DashboardModel(dash, {});
|
||||
var exporter = new DashboardExporter(datasourceSrvStub);
|
||||
const exporter = new DashboardExporter(datasourceSrvStub);
|
||||
exporter.makeExportable(dash).then(clean => {
|
||||
exported = clean;
|
||||
done();
|
||||
@ -118,12 +118,12 @@ describe('given dashboard with repeated panels', () => {
|
||||
});
|
||||
|
||||
it('should replace datasource refs', () => {
|
||||
var panel = exported.panels[0];
|
||||
const panel = exported.panels[0];
|
||||
expect(panel.datasource).toBe('${DS_GFDB}');
|
||||
});
|
||||
|
||||
it('should replace datasource refs in collapsed row', () => {
|
||||
var panel = exported.panels[5].panels[0];
|
||||
const panel = exported.panels[5].panels[0];
|
||||
expect(panel.datasource).toBe('${DS_GFDB}');
|
||||
});
|
||||
|
||||
@ -145,7 +145,7 @@ describe('given dashboard with repeated panels', () => {
|
||||
});
|
||||
|
||||
it('should add datasource to required', () => {
|
||||
var require = _.find(exported.__requires, { name: 'TestDB' });
|
||||
const require = _.find(exported.__requires, { name: 'TestDB' });
|
||||
expect(require.name).toBe('TestDB');
|
||||
expect(require.id).toBe('testdb');
|
||||
expect(require.type).toBe('datasource');
|
||||
@ -153,52 +153,52 @@ describe('given dashboard with repeated panels', () => {
|
||||
});
|
||||
|
||||
it('should not add built in datasources to required', () => {
|
||||
var require = _.find(exported.__requires, { name: 'Mixed' });
|
||||
const require = _.find(exported.__requires, { name: 'Mixed' });
|
||||
expect(require).toBe(undefined);
|
||||
});
|
||||
|
||||
it('should add datasources used in mixed mode', () => {
|
||||
var require = _.find(exported.__requires, { name: 'OtherDB' });
|
||||
const require = _.find(exported.__requires, { name: 'OtherDB' });
|
||||
expect(require).not.toBe(undefined);
|
||||
});
|
||||
|
||||
it('should add graph panel to required', () => {
|
||||
var require = _.find(exported.__requires, { name: 'Graph' });
|
||||
const require = _.find(exported.__requires, { name: 'Graph' });
|
||||
expect(require.name).toBe('Graph');
|
||||
expect(require.id).toBe('graph');
|
||||
expect(require.version).toBe('1.1.0');
|
||||
});
|
||||
|
||||
it('should add table panel to required', () => {
|
||||
var require = _.find(exported.__requires, { name: 'Table' });
|
||||
const require = _.find(exported.__requires, { name: 'Table' });
|
||||
expect(require.name).toBe('Table');
|
||||
expect(require.id).toBe('table');
|
||||
expect(require.version).toBe('1.1.1');
|
||||
});
|
||||
|
||||
it('should add heatmap panel to required', () => {
|
||||
var require = _.find(exported.__requires, { name: 'Heatmap' });
|
||||
const require = _.find(exported.__requires, { name: 'Heatmap' });
|
||||
expect(require.name).toBe('Heatmap');
|
||||
expect(require.id).toBe('heatmap');
|
||||
expect(require.version).toBe('1.1.2');
|
||||
});
|
||||
|
||||
it('should add grafana version', () => {
|
||||
var require = _.find(exported.__requires, { name: 'Grafana' });
|
||||
const require = _.find(exported.__requires, { name: 'Grafana' });
|
||||
expect(require.type).toBe('grafana');
|
||||
expect(require.id).toBe('grafana');
|
||||
expect(require.version).toBe('3.0.2');
|
||||
});
|
||||
|
||||
it('should add constant template variables as inputs', () => {
|
||||
var input = _.find(exported.__inputs, { name: 'VAR_PREFIX' });
|
||||
const input = _.find(exported.__inputs, { name: 'VAR_PREFIX' });
|
||||
expect(input.type).toBe('constant');
|
||||
expect(input.label).toBe('prefix');
|
||||
expect(input.value).toBe('collectd');
|
||||
});
|
||||
|
||||
it('should templatize constant variables', () => {
|
||||
var variable = _.find(exported.templating.list, { name: 'prefix' });
|
||||
const variable = _.find(exported.templating.list, { name: 'prefix' });
|
||||
expect(variable.query).toBe('${VAR_PREFIX}');
|
||||
expect(variable.current.text).toBe('${VAR_PREFIX}');
|
||||
expect(variable.current.value).toBe('${VAR_PREFIX}');
|
||||
@ -208,7 +208,7 @@ describe('given dashboard with repeated panels', () => {
|
||||
});
|
||||
|
||||
// Stub responses
|
||||
var stubs = [];
|
||||
const stubs = [];
|
||||
stubs['gfdb'] = {
|
||||
name: 'gfdb',
|
||||
meta: { id: 'testdb', info: { version: '1.2.1' }, name: 'TestDB' },
|
||||
|
@ -4,12 +4,12 @@ import { describe, it, expect } from 'test/lib/common';
|
||||
describe('saving dashboard as', () => {
|
||||
function scenario(name, panel, verify) {
|
||||
describe(name, () => {
|
||||
var json = {
|
||||
const json = {
|
||||
title: 'name',
|
||||
panels: [panel],
|
||||
};
|
||||
|
||||
var mockDashboardSrv = {
|
||||
const mockDashboardSrv = {
|
||||
getCurrent: function() {
|
||||
return {
|
||||
id: 5,
|
||||
@ -21,8 +21,8 @@ describe('saving dashboard as', () => {
|
||||
},
|
||||
};
|
||||
|
||||
var ctrl = new SaveDashboardAsModalCtrl(mockDashboardSrv);
|
||||
var ctx: any = {
|
||||
const ctrl = new SaveDashboardAsModalCtrl(mockDashboardSrv);
|
||||
const ctx: any = {
|
||||
clone: ctrl.clone,
|
||||
ctrl: ctrl,
|
||||
panel: panel,
|
||||
@ -35,14 +35,14 @@ describe('saving dashboard as', () => {
|
||||
}
|
||||
|
||||
scenario('default values', {}, ctx => {
|
||||
var clone = ctx.clone;
|
||||
const clone = ctx.clone;
|
||||
expect(clone.id).toBe(null);
|
||||
expect(clone.title).toBe('name Copy');
|
||||
expect(clone.editable).toBe(true);
|
||||
expect(clone.hideControls).toBe(false);
|
||||
});
|
||||
|
||||
var graphPanel = {
|
||||
const graphPanel = {
|
||||
id: 1,
|
||||
type: 'graph',
|
||||
alert: { rule: 1 },
|
||||
|
@ -1,12 +1,12 @@
|
||||
import { SaveProvisionedDashboardModalCtrl } from '../save_provisioned_modal';
|
||||
|
||||
describe('SaveProvisionedDashboardModalCtrl', () => {
|
||||
var json = {
|
||||
const json = {
|
||||
title: 'name',
|
||||
id: 5,
|
||||
};
|
||||
|
||||
var mockDashboardSrv = {
|
||||
const mockDashboardSrv = {
|
||||
getCurrent: function() {
|
||||
return {
|
||||
id: 5,
|
||||
@ -18,7 +18,7 @@ describe('SaveProvisionedDashboardModalCtrl', () => {
|
||||
},
|
||||
};
|
||||
|
||||
var ctrl = new SaveProvisionedDashboardModalCtrl(mockDashboardSrv);
|
||||
const ctrl = new SaveProvisionedDashboardModalCtrl(mockDashboardSrv);
|
||||
|
||||
it('should remove id from dashboard model', () => {
|
||||
expect(ctrl.dash.id).toBeUndefined();
|
||||
|
@ -4,7 +4,7 @@ import config from 'app/core/config';
|
||||
import { LinkSrv } from 'app/features/panellinks/link_srv';
|
||||
|
||||
describe('ShareModalCtrl', () => {
|
||||
var ctx = <any>{
|
||||
const ctx = <any>{
|
||||
timeSrv: {
|
||||
timeRange: () => {
|
||||
return { from: new Date(1000), to: new Date(2000) };
|
||||
@ -68,8 +68,8 @@ describe('ShareModalCtrl', () => {
|
||||
ctx.scope.panel = { id: 22 };
|
||||
|
||||
ctx.scope.init();
|
||||
var base = 'http://dashboards.grafana.com/render/d-solo/abcdefghi/my-dash';
|
||||
var params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&tz=UTC';
|
||||
const base = 'http://dashboards.grafana.com/render/d-solo/abcdefghi/my-dash';
|
||||
const params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&tz=UTC';
|
||||
expect(ctx.scope.imageUrl).toContain(base + params);
|
||||
});
|
||||
|
||||
@ -79,8 +79,8 @@ describe('ShareModalCtrl', () => {
|
||||
ctx.scope.panel = { id: 22 };
|
||||
|
||||
ctx.scope.init();
|
||||
var base = 'http://dashboards.grafana.com/render/dashboard-solo/script/my-dash.js';
|
||||
var params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&tz=UTC';
|
||||
const base = 'http://dashboards.grafana.com/render/dashboard-solo/script/my-dash.js';
|
||||
const params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&tz=UTC';
|
||||
expect(ctx.scope.imageUrl).toContain(base + params);
|
||||
});
|
||||
|
||||
|
@ -3,25 +3,25 @@ import '../time_srv';
|
||||
import moment from 'moment';
|
||||
|
||||
describe('timeSrv', function() {
|
||||
var rootScope = {
|
||||
const rootScope = {
|
||||
$on: jest.fn(),
|
||||
onAppEvent: jest.fn(),
|
||||
appEvent: jest.fn(),
|
||||
};
|
||||
|
||||
var timer = {
|
||||
const timer = {
|
||||
register: jest.fn(),
|
||||
cancel: jest.fn(),
|
||||
cancelAll: jest.fn(),
|
||||
};
|
||||
|
||||
var location = {
|
||||
let location = {
|
||||
search: jest.fn(() => ({})),
|
||||
};
|
||||
|
||||
var timeSrv;
|
||||
let timeSrv;
|
||||
|
||||
var _dashboard: any = {
|
||||
const _dashboard: any = {
|
||||
time: { from: 'now-6h', to: 'now' },
|
||||
getTimezone: jest.fn(() => 'browser'),
|
||||
};
|
||||
@ -34,14 +34,14 @@ describe('timeSrv', function() {
|
||||
describe('timeRange', function() {
|
||||
it('should return unparsed when parse is false', function() {
|
||||
timeSrv.setTime({ from: 'now', to: 'now-1h' });
|
||||
var time = timeSrv.timeRange();
|
||||
const time = timeSrv.timeRange();
|
||||
expect(time.raw.from).toBe('now');
|
||||
expect(time.raw.to).toBe('now-1h');
|
||||
});
|
||||
|
||||
it('should return parsed when parse is true', function() {
|
||||
timeSrv.setTime({ from: 'now', to: 'now-1h' });
|
||||
var time = timeSrv.timeRange();
|
||||
const time = timeSrv.timeRange();
|
||||
expect(moment.isMoment(time.from)).toBe(true);
|
||||
expect(moment.isMoment(time.to)).toBe(true);
|
||||
});
|
||||
@ -58,7 +58,7 @@ describe('timeSrv', function() {
|
||||
|
||||
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||
timeSrv.init(_dashboard);
|
||||
var time = timeSrv.timeRange();
|
||||
const time = timeSrv.timeRange();
|
||||
expect(time.raw.from).toBe('now-2d');
|
||||
expect(time.raw.to).toBe('now');
|
||||
});
|
||||
@ -74,7 +74,7 @@ describe('timeSrv', function() {
|
||||
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||
|
||||
timeSrv.init(_dashboard);
|
||||
var time = timeSrv.timeRange();
|
||||
const time = timeSrv.timeRange();
|
||||
expect(time.from.valueOf()).toEqual(new Date('2014-04-10T05:20:10Z').getTime());
|
||||
expect(time.to.valueOf()).toEqual(new Date('2014-05-20T03:10:22Z').getTime());
|
||||
});
|
||||
@ -90,7 +90,7 @@ describe('timeSrv', function() {
|
||||
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||
|
||||
timeSrv.init(_dashboard);
|
||||
var time = timeSrv.timeRange();
|
||||
const time = timeSrv.timeRange();
|
||||
expect(time.from.valueOf()).toEqual(new Date('2014-04-10T00:00:00Z').getTime());
|
||||
expect(time.to.valueOf()).toEqual(new Date('2014-05-20T00:00:00Z').getTime());
|
||||
});
|
||||
@ -106,7 +106,7 @@ describe('timeSrv', function() {
|
||||
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||
|
||||
timeSrv.init(_dashboard);
|
||||
var time = timeSrv.timeRange();
|
||||
const time = timeSrv.timeRange();
|
||||
expect(time.from.valueOf()).toEqual(1410337646373);
|
||||
expect(time.to.valueOf()).toEqual(1410337665699);
|
||||
});
|
||||
|
@ -37,7 +37,7 @@ describe('when updating view state', () => {
|
||||
});
|
||||
|
||||
it('should update querystring and view state', () => {
|
||||
var updateState = { fullscreen: true, edit: true, panelId: 1 };
|
||||
const updateState = { fullscreen: true, edit: true, panelId: 1 };
|
||||
|
||||
viewState.update(updateState);
|
||||
|
||||
|
@ -7,9 +7,9 @@ jest.mock('angular', () => {
|
||||
});
|
||||
|
||||
describe('linkSrv', function() {
|
||||
var linkSrv;
|
||||
var templateSrvMock = {};
|
||||
var timeSrvMock = {};
|
||||
let linkSrv;
|
||||
const templateSrvMock = {};
|
||||
const timeSrvMock = {};
|
||||
|
||||
beforeEach(() => {
|
||||
linkSrv = new LinkSrv(templateSrvMock, timeSrvMock);
|
||||
@ -17,29 +17,29 @@ describe('linkSrv', function() {
|
||||
|
||||
describe('when appending query strings', function() {
|
||||
it('add ? to URL if not present', function() {
|
||||
var url = linkSrv.appendToQueryString('http://example.com', 'foo=bar');
|
||||
const url = linkSrv.appendToQueryString('http://example.com', 'foo=bar');
|
||||
expect(url).toBe('http://example.com?foo=bar');
|
||||
});
|
||||
|
||||
it('do not add & to URL if ? is present but query string is empty', function() {
|
||||
var url = linkSrv.appendToQueryString('http://example.com?', 'foo=bar');
|
||||
const url = linkSrv.appendToQueryString('http://example.com?', 'foo=bar');
|
||||
expect(url).toBe('http://example.com?foo=bar');
|
||||
});
|
||||
|
||||
it('add & to URL if query string is present', function() {
|
||||
var url = linkSrv.appendToQueryString('http://example.com?foo=bar', 'hello=world');
|
||||
const url = linkSrv.appendToQueryString('http://example.com?foo=bar', 'hello=world');
|
||||
expect(url).toBe('http://example.com?foo=bar&hello=world');
|
||||
});
|
||||
|
||||
it('do not change the URL if there is nothing to append', function() {
|
||||
_.each(['', undefined, null], function(toAppend) {
|
||||
var url1 = linkSrv.appendToQueryString('http://example.com', toAppend);
|
||||
const url1 = linkSrv.appendToQueryString('http://example.com', toAppend);
|
||||
expect(url1).toBe('http://example.com');
|
||||
|
||||
var url2 = linkSrv.appendToQueryString('http://example.com?', toAppend);
|
||||
const url2 = linkSrv.appendToQueryString('http://example.com?', toAppend);
|
||||
expect(url2).toBe('http://example.com?');
|
||||
|
||||
var url3 = linkSrv.appendToQueryString('http://example.com?foo=bar', toAppend);
|
||||
const url3 = linkSrv.appendToQueryString('http://example.com?foo=bar', toAppend);
|
||||
expect(url3).toBe('http://example.com?foo=bar');
|
||||
});
|
||||
});
|
||||
|
@ -3,21 +3,21 @@ import { AdhocVariable } from '../adhoc_variable';
|
||||
describe('AdhocVariable', function() {
|
||||
describe('when serializing to url', function() {
|
||||
it('should set return key value and op separated by pipe', function() {
|
||||
var variable = new AdhocVariable({
|
||||
const variable = new AdhocVariable({
|
||||
filters: [
|
||||
{ key: 'key1', operator: '=', value: 'value1' },
|
||||
{ key: 'key2', operator: '!=', value: 'value2' },
|
||||
{ key: 'key3', operator: '=', value: 'value3a|value3b|value3c' },
|
||||
],
|
||||
});
|
||||
var urlValue = variable.getValueForUrl();
|
||||
const urlValue = variable.getValueForUrl();
|
||||
expect(urlValue).toMatchObject(['key1|=|value1', 'key2|!=|value2', 'key3|=|value3a__gfp__value3b__gfp__value3c']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deserializing from url', function() {
|
||||
it('should restore filters', function() {
|
||||
var variable = new AdhocVariable({});
|
||||
const variable = new AdhocVariable({});
|
||||
variable.setValueFromUrl(['key1|=|value1', 'key2|!=|value2', 'key3|=|value3a__gfp__value3b__gfp__value3c']);
|
||||
|
||||
expect(variable.filters[0].key).toBe('key1');
|
||||
|
@ -3,7 +3,7 @@ import { QueryVariable } from '../query_variable';
|
||||
describe('QueryVariable', () => {
|
||||
describe('when creating from model', () => {
|
||||
it('should set defaults', () => {
|
||||
var variable = new QueryVariable({}, null, null, null, null);
|
||||
const variable = new QueryVariable({}, null, null, null, null);
|
||||
expect(variable.datasource).toBe(null);
|
||||
expect(variable.refresh).toBe(0);
|
||||
expect(variable.sort).toBe(0);
|
||||
@ -15,13 +15,13 @@ describe('QueryVariable', () => {
|
||||
});
|
||||
|
||||
it('get model should copy changes back to model', () => {
|
||||
var variable = new QueryVariable({}, null, null, null, null);
|
||||
const variable = new QueryVariable({}, null, null, null, null);
|
||||
variable.options = [{ text: 'test' }];
|
||||
variable.datasource = 'google';
|
||||
variable.regex = 'asd';
|
||||
variable.sort = 50;
|
||||
|
||||
var model = variable.getSaveModel();
|
||||
const model = variable.getSaveModel();
|
||||
expect(model.options.length).toBe(1);
|
||||
expect(model.options[0].text).toBe('test');
|
||||
expect(model.datasource).toBe('google');
|
||||
@ -30,11 +30,11 @@ describe('QueryVariable', () => {
|
||||
});
|
||||
|
||||
it('if refresh != 0 then remove options in presisted mode', () => {
|
||||
var variable = new QueryVariable({}, null, null, null, null);
|
||||
const variable = new QueryVariable({}, null, null, null, null);
|
||||
variable.options = [{ text: 'test' }];
|
||||
variable.refresh = 1;
|
||||
|
||||
var model = variable.getSaveModel();
|
||||
const model = variable.getSaveModel();
|
||||
expect(model.options.length).toBe(0);
|
||||
});
|
||||
});
|
||||
@ -69,7 +69,7 @@ describe('QueryVariable', () => {
|
||||
});
|
||||
|
||||
it('should return in same order', () => {
|
||||
var i = 0;
|
||||
let i = 0;
|
||||
expect(result.length).toBe(11);
|
||||
expect(result[i++].text).toBe('');
|
||||
expect(result[i++].text).toBe('0');
|
||||
@ -90,7 +90,7 @@ describe('QueryVariable', () => {
|
||||
});
|
||||
|
||||
it('should return in same order', () => {
|
||||
var i = 0;
|
||||
let i = 0;
|
||||
expect(result.length).toBe(11);
|
||||
expect(result[i++].text).toBe('');
|
||||
expect(result[i++].text).toBe('0');
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { TemplateSrv } from '../template_srv';
|
||||
|
||||
describe('templateSrv', function() {
|
||||
var _templateSrv;
|
||||
let _templateSrv;
|
||||
|
||||
function initTemplateSrv(variables) {
|
||||
_templateSrv = new TemplateSrv();
|
||||
@ -14,7 +14,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should initialize template data', function() {
|
||||
var target = _templateSrv.replace('this.[[test]].filters');
|
||||
const target = _templateSrv.replace('this.[[test]].filters');
|
||||
expect(target).toBe('this.oogle.filters');
|
||||
});
|
||||
});
|
||||
@ -25,42 +25,42 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should replace $test with scoped value', function() {
|
||||
var target = _templateSrv.replace('this.$test.filters', {
|
||||
const target = _templateSrv.replace('this.$test.filters', {
|
||||
test: { value: 'mupp', text: 'asd' },
|
||||
});
|
||||
expect(target).toBe('this.mupp.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test} with scoped value', function() {
|
||||
var target = _templateSrv.replace('this.${test}.filters', {
|
||||
const target = _templateSrv.replace('this.${test}.filters', {
|
||||
test: { value: 'mupp', text: 'asd' },
|
||||
});
|
||||
expect(target).toBe('this.mupp.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test:glob} with scoped value', function() {
|
||||
var target = _templateSrv.replace('this.${test:glob}.filters', {
|
||||
const target = _templateSrv.replace('this.${test:glob}.filters', {
|
||||
test: { value: 'mupp', text: 'asd' },
|
||||
});
|
||||
expect(target).toBe('this.mupp.filters');
|
||||
});
|
||||
|
||||
it('should replace $test with scoped text', function() {
|
||||
var target = _templateSrv.replaceWithText('this.$test.filters', {
|
||||
const target = _templateSrv.replaceWithText('this.$test.filters', {
|
||||
test: { value: 'mupp', text: 'asd' },
|
||||
});
|
||||
expect(target).toBe('this.asd.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test} with scoped text', function() {
|
||||
var target = _templateSrv.replaceWithText('this.${test}.filters', {
|
||||
const target = _templateSrv.replaceWithText('this.${test}.filters', {
|
||||
test: { value: 'mupp', text: 'asd' },
|
||||
});
|
||||
expect(target).toBe('this.asd.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test:glob} with scoped text', function() {
|
||||
var target = _templateSrv.replaceWithText('this.${test:glob}.filters', {
|
||||
const target = _templateSrv.replaceWithText('this.${test:glob}.filters', {
|
||||
test: { value: 'mupp', text: 'asd' },
|
||||
});
|
||||
expect(target).toBe('this.asd.filters');
|
||||
@ -81,17 +81,17 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should return filters if datasourceName match', function() {
|
||||
var filters = _templateSrv.getAdhocFilters('oogle');
|
||||
const filters = _templateSrv.getAdhocFilters('oogle');
|
||||
expect(filters).toMatchObject([1]);
|
||||
});
|
||||
|
||||
it('should return empty array if datasourceName does not match', function() {
|
||||
var filters = _templateSrv.getAdhocFilters('oogleasdasd');
|
||||
const filters = _templateSrv.getAdhocFilters('oogleasdasd');
|
||||
expect(filters).toMatchObject([]);
|
||||
});
|
||||
|
||||
it('should return filters when datasourceName match via data source variable', function() {
|
||||
var filters = _templateSrv.getAdhocFilters('logstash');
|
||||
const filters = _templateSrv.getAdhocFilters('logstash');
|
||||
expect(filters).toMatchObject([2]);
|
||||
});
|
||||
});
|
||||
@ -108,37 +108,37 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should replace $test with globbed value', function() {
|
||||
var target = _templateSrv.replace('this.$test.filters', {}, 'glob');
|
||||
const target = _templateSrv.replace('this.$test.filters', {}, 'glob');
|
||||
expect(target).toBe('this.{value1,value2}.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test} with globbed value', function() {
|
||||
var target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
|
||||
const target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
|
||||
expect(target).toBe('this.{value1,value2}.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test:glob} with globbed value', function() {
|
||||
var target = _templateSrv.replace('this.${test:glob}.filters', {});
|
||||
const target = _templateSrv.replace('this.${test:glob}.filters', {});
|
||||
expect(target).toBe('this.{value1,value2}.filters');
|
||||
});
|
||||
|
||||
it('should replace $test with piped value', function() {
|
||||
var target = _templateSrv.replace('this=$test', {}, 'pipe');
|
||||
const target = _templateSrv.replace('this=$test', {}, 'pipe');
|
||||
expect(target).toBe('this=value1|value2');
|
||||
});
|
||||
|
||||
it('should replace ${test} with piped value', function() {
|
||||
var target = _templateSrv.replace('this=${test}', {}, 'pipe');
|
||||
const target = _templateSrv.replace('this=${test}', {}, 'pipe');
|
||||
expect(target).toBe('this=value1|value2');
|
||||
});
|
||||
|
||||
it('should replace ${test:pipe} with piped value', function() {
|
||||
var target = _templateSrv.replace('this=${test:pipe}', {});
|
||||
const target = _templateSrv.replace('this=${test:pipe}', {});
|
||||
expect(target).toBe('this=value1|value2');
|
||||
});
|
||||
|
||||
it('should replace ${test:pipe} with piped value and $test with globbed value', function() {
|
||||
var target = _templateSrv.replace('${test:pipe},$test', {}, 'glob');
|
||||
const target = _templateSrv.replace('${test:pipe},$test', {}, 'glob');
|
||||
expect(target).toBe('value1|value2,{value1,value2}');
|
||||
});
|
||||
});
|
||||
@ -156,22 +156,22 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should replace $test with formatted all value', function() {
|
||||
var target = _templateSrv.replace('this.$test.filters', {}, 'glob');
|
||||
const target = _templateSrv.replace('this.$test.filters', {}, 'glob');
|
||||
expect(target).toBe('this.{value1,value2}.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test} with formatted all value', function() {
|
||||
var target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
|
||||
const target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
|
||||
expect(target).toBe('this.{value1,value2}.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test:glob} with formatted all value', function() {
|
||||
var target = _templateSrv.replace('this.${test:glob}.filters', {});
|
||||
const target = _templateSrv.replace('this.${test:glob}.filters', {});
|
||||
expect(target).toBe('this.{value1,value2}.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test:pipe} with piped value and $test with globbed value', function() {
|
||||
var target = _templateSrv.replace('${test:pipe},$test', {}, 'glob');
|
||||
const target = _templateSrv.replace('${test:pipe},$test', {}, 'glob');
|
||||
expect(target).toBe('value1|value2,{value1,value2}');
|
||||
});
|
||||
});
|
||||
@ -190,22 +190,22 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should replace $test with formatted all value', function() {
|
||||
var target = _templateSrv.replace('this.$test.filters', {}, 'glob');
|
||||
const target = _templateSrv.replace('this.$test.filters', {}, 'glob');
|
||||
expect(target).toBe('this.*.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test} with formatted all value', function() {
|
||||
var target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
|
||||
const target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
|
||||
expect(target).toBe('this.*.filters');
|
||||
});
|
||||
|
||||
it('should replace ${test:glob} with formatted all value', function() {
|
||||
var target = _templateSrv.replace('this.${test:glob}.filters', {});
|
||||
const target = _templateSrv.replace('this.${test:glob}.filters', {});
|
||||
expect(target).toBe('this.*.filters');
|
||||
});
|
||||
|
||||
it('should not escape custom all value', function() {
|
||||
var target = _templateSrv.replace('this.$test', {}, 'regex');
|
||||
const target = _templateSrv.replace('this.$test', {}, 'regex');
|
||||
expect(target).toBe('this.*');
|
||||
});
|
||||
});
|
||||
@ -213,70 +213,70 @@ describe('templateSrv', function() {
|
||||
describe('lucene format', function() {
|
||||
it('should properly escape $test with lucene escape sequences', function() {
|
||||
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'value/4' } }]);
|
||||
var target = _templateSrv.replace('this:$test', {}, 'lucene');
|
||||
const target = _templateSrv.replace('this:$test', {}, 'lucene');
|
||||
expect(target).toBe('this:value\\/4');
|
||||
});
|
||||
|
||||
it('should properly escape ${test} with lucene escape sequences', function() {
|
||||
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'value/4' } }]);
|
||||
var target = _templateSrv.replace('this:${test}', {}, 'lucene');
|
||||
const target = _templateSrv.replace('this:${test}', {}, 'lucene');
|
||||
expect(target).toBe('this:value\\/4');
|
||||
});
|
||||
|
||||
it('should properly escape ${test:lucene} with lucene escape sequences', function() {
|
||||
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'value/4' } }]);
|
||||
var target = _templateSrv.replace('this:${test:lucene}', {});
|
||||
const target = _templateSrv.replace('this:${test:lucene}', {});
|
||||
expect(target).toBe('this:value\\/4');
|
||||
});
|
||||
});
|
||||
|
||||
describe('format variable to string values', function() {
|
||||
it('single value should return value', function() {
|
||||
var result = _templateSrv.formatValue('test');
|
||||
const result = _templateSrv.formatValue('test');
|
||||
expect(result).toBe('test');
|
||||
});
|
||||
|
||||
it('multi value and glob format should render glob string', function() {
|
||||
var result = _templateSrv.formatValue(['test', 'test2'], 'glob');
|
||||
const result = _templateSrv.formatValue(['test', 'test2'], 'glob');
|
||||
expect(result).toBe('{test,test2}');
|
||||
});
|
||||
|
||||
it('multi value and lucene should render as lucene expr', function() {
|
||||
var result = _templateSrv.formatValue(['test', 'test2'], 'lucene');
|
||||
const result = _templateSrv.formatValue(['test', 'test2'], 'lucene');
|
||||
expect(result).toBe('("test" OR "test2")');
|
||||
});
|
||||
|
||||
it('multi value and regex format should render regex string', function() {
|
||||
var result = _templateSrv.formatValue(['test.', 'test2'], 'regex');
|
||||
const result = _templateSrv.formatValue(['test.', 'test2'], 'regex');
|
||||
expect(result).toBe('(test\\.|test2)');
|
||||
});
|
||||
|
||||
it('multi value and pipe should render pipe string', function() {
|
||||
var result = _templateSrv.formatValue(['test', 'test2'], 'pipe');
|
||||
const result = _templateSrv.formatValue(['test', 'test2'], 'pipe');
|
||||
expect(result).toBe('test|test2');
|
||||
});
|
||||
|
||||
it('multi value and distributed should render distributed string', function() {
|
||||
var result = _templateSrv.formatValue(['test', 'test2'], 'distributed', {
|
||||
const result = _templateSrv.formatValue(['test', 'test2'], 'distributed', {
|
||||
name: 'build',
|
||||
});
|
||||
expect(result).toBe('test,build=test2');
|
||||
});
|
||||
|
||||
it('multi value and distributed should render when not string', function() {
|
||||
var result = _templateSrv.formatValue(['test'], 'distributed', {
|
||||
const result = _templateSrv.formatValue(['test'], 'distributed', {
|
||||
name: 'build',
|
||||
});
|
||||
expect(result).toBe('test');
|
||||
});
|
||||
|
||||
it('multi value and csv format should render csv string', function() {
|
||||
var result = _templateSrv.formatValue(['test', 'test2'], 'csv');
|
||||
const result = _templateSrv.formatValue(['test', 'test2'], 'csv');
|
||||
expect(result).toBe('test,test2');
|
||||
});
|
||||
|
||||
it('slash should be properly escaped in regex format', function() {
|
||||
var result = _templateSrv.formatValue('Gi3/14', 'regex');
|
||||
const result = _templateSrv.formatValue('Gi3/14', 'regex');
|
||||
expect(result).toBe('Gi3\\/14');
|
||||
});
|
||||
});
|
||||
@ -287,7 +287,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should return true if exists', function() {
|
||||
var result = _templateSrv.variableExists('$test');
|
||||
const result = _templateSrv.variableExists('$test');
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
@ -298,17 +298,17 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should insert html', function() {
|
||||
var result = _templateSrv.highlightVariablesAsHtml('$test');
|
||||
const result = _templateSrv.highlightVariablesAsHtml('$test');
|
||||
expect(result).toBe('<span class="template-variable">$test</span>');
|
||||
});
|
||||
|
||||
it('should insert html anywhere in string', function() {
|
||||
var result = _templateSrv.highlightVariablesAsHtml('this $test ok');
|
||||
const result = _templateSrv.highlightVariablesAsHtml('this $test ok');
|
||||
expect(result).toBe('this <span class="template-variable">$test</span> ok');
|
||||
});
|
||||
|
||||
it('should ignore if variables does not exist', function() {
|
||||
var result = _templateSrv.highlightVariablesAsHtml('this $google ok');
|
||||
const result = _templateSrv.highlightVariablesAsHtml('this $google ok');
|
||||
expect(result).toBe('this $google ok');
|
||||
});
|
||||
});
|
||||
@ -319,7 +319,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should set current value and update template data', function() {
|
||||
var target = _templateSrv.replace('this.[[test]].filters');
|
||||
const target = _templateSrv.replace('this.[[test]].filters');
|
||||
expect(target).toBe('this.muuuu.filters');
|
||||
});
|
||||
});
|
||||
@ -339,7 +339,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should set multiple url params', function() {
|
||||
var params = {};
|
||||
const params = {};
|
||||
_templateSrv.fillVariableValuesForUrl(params);
|
||||
expect(params['var-test']).toMatchObject(['val1', 'val2']);
|
||||
});
|
||||
@ -360,7 +360,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should not include template variable value in url', function() {
|
||||
var params = {};
|
||||
const params = {};
|
||||
_templateSrv.fillVariableValuesForUrl(params);
|
||||
expect(params['var-test']).toBe(undefined);
|
||||
});
|
||||
@ -382,7 +382,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should not include template variable value in url', function() {
|
||||
var params = {};
|
||||
const params = {};
|
||||
_templateSrv.fillVariableValuesForUrl(params);
|
||||
expect(params['var-test']).toBe(undefined);
|
||||
});
|
||||
@ -394,7 +394,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should set scoped value as url params', function() {
|
||||
var params = {};
|
||||
const params = {};
|
||||
_templateSrv.fillVariableValuesForUrl(params, {
|
||||
test: { value: 'val1' },
|
||||
});
|
||||
@ -408,7 +408,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should not set scoped value as url params', function() {
|
||||
var params = {};
|
||||
const params = {};
|
||||
_templateSrv.fillVariableValuesForUrl(params, {
|
||||
test: { name: 'test', value: 'val1', skipUrlSync: true },
|
||||
});
|
||||
@ -435,7 +435,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should replace with text except for grafanaVariables', function() {
|
||||
var target = _templateSrv.replaceWithText('Server: $server, period: $period');
|
||||
const target = _templateSrv.replaceWithText('Server: $server, period: $period');
|
||||
expect(target).toBe('Server: All, period: 13m');
|
||||
});
|
||||
});
|
||||
@ -446,7 +446,7 @@ describe('templateSrv', function() {
|
||||
});
|
||||
|
||||
it('should replace $__interval_ms with interval milliseconds', function() {
|
||||
var target = _templateSrv.replace('10 * $__interval_ms', {
|
||||
const target = _templateSrv.replace('10 * $__interval_ms', {
|
||||
__interval_ms: { text: '100', value: '100' },
|
||||
});
|
||||
expect(target).toBe('10 * 100');
|
||||
|
@ -2,38 +2,38 @@ import { containsVariable, assignModelProperties } from '../variable';
|
||||
|
||||
describe('containsVariable', function() {
|
||||
describe('when checking if a string contains a variable', function() {
|
||||
it('should find it with $var syntax', function() {
|
||||
var contains = containsVariable('this.$test.filters', 'test');
|
||||
it('should find it with $const syntax', function() {
|
||||
const contains = containsVariable('this.$test.filters', 'test');
|
||||
expect(contains).toBe(true);
|
||||
});
|
||||
|
||||
it('should not find it if only part matches with $var syntax', function() {
|
||||
var contains = containsVariable('this.$serverDomain.filters', 'server');
|
||||
it('should not find it if only part matches with $const syntax', function() {
|
||||
const contains = containsVariable('this.$serverDomain.filters', 'server');
|
||||
expect(contains).toBe(false);
|
||||
});
|
||||
|
||||
it('should find it if it ends with variable and passing multiple test strings', function() {
|
||||
var contains = containsVariable('show field keys from $pgmetric', 'test string2', 'pgmetric');
|
||||
const contains = containsVariable('show field keys from $pgmetric', 'test string2', 'pgmetric');
|
||||
expect(contains).toBe(true);
|
||||
});
|
||||
|
||||
it('should find it with [[var]] syntax', function() {
|
||||
var contains = containsVariable('this.[[test]].filters', 'test');
|
||||
const contains = containsVariable('this.[[test]].filters', 'test');
|
||||
expect(contains).toBe(true);
|
||||
});
|
||||
|
||||
it('should find it when part of segment', function() {
|
||||
var contains = containsVariable('metrics.$env.$group-*', 'group');
|
||||
const contains = containsVariable('metrics.$env.$group-*', 'group');
|
||||
expect(contains).toBe(true);
|
||||
});
|
||||
|
||||
it('should find it its the only thing', function() {
|
||||
var contains = containsVariable('$env', 'env');
|
||||
const contains = containsVariable('$env', 'env');
|
||||
expect(contains).toBe(true);
|
||||
});
|
||||
|
||||
it('should be able to pass in multiple test strings', function() {
|
||||
var contains = containsVariable('asd', 'asd2.$env', 'env');
|
||||
const contains = containsVariable('asd', 'asd2.$env', 'env');
|
||||
expect(contains).toBe(true);
|
||||
});
|
||||
});
|
||||
@ -41,14 +41,14 @@ describe('containsVariable', function() {
|
||||
|
||||
describe('assignModelProperties', function() {
|
||||
it('only set properties defined in defaults', function() {
|
||||
var target: any = { test: 'asd' };
|
||||
const target: any = { test: 'asd' };
|
||||
assignModelProperties(target, { propA: 1, propB: 2 }, { propB: 0 });
|
||||
expect(target.propB).toBe(2);
|
||||
expect(target.test).toBe('asd');
|
||||
});
|
||||
|
||||
it('use default value if not found on source', function() {
|
||||
var target: any = { test: 'asd' };
|
||||
const target: any = { test: 'asd' };
|
||||
assignModelProperties(target, { propA: 1, propB: 2 }, { propC: 10 });
|
||||
expect(target.propC).toBe(10);
|
||||
});
|
||||
|
@ -4,7 +4,7 @@ import moment from 'moment';
|
||||
import $q from 'q';
|
||||
|
||||
describe('VariableSrv', function() {
|
||||
var ctx = <any>{
|
||||
const ctx = <any>{
|
||||
datasourceSrv: {},
|
||||
timeSrv: {
|
||||
timeRange: () => {},
|
||||
@ -33,7 +33,7 @@ describe('VariableSrv', function() {
|
||||
|
||||
function describeUpdateVariable(desc, fn) {
|
||||
describe(desc, () => {
|
||||
var scenario: any = {};
|
||||
const scenario: any = {};
|
||||
scenario.setup = function(setupFn) {
|
||||
scenario.setupFn = setupFn;
|
||||
};
|
||||
@ -41,7 +41,7 @@ describe('VariableSrv', function() {
|
||||
beforeEach(async () => {
|
||||
scenario.setupFn();
|
||||
|
||||
var ds: any = {};
|
||||
const ds: any = {};
|
||||
ds.metricFindQuery = () => Promise.resolve(scenario.queryResult);
|
||||
|
||||
ctx.variableSrv = new VariableSrv(ctx.$rootScope, $q, ctx.$location, ctx.$injector, ctx.templateSrv);
|
||||
@ -100,7 +100,7 @@ describe('VariableSrv', function() {
|
||||
auto_count: 10,
|
||||
};
|
||||
|
||||
var range = {
|
||||
const range = {
|
||||
from: moment(new Date())
|
||||
.subtract(7, 'days')
|
||||
.toDate(),
|
||||
@ -118,7 +118,7 @@ describe('VariableSrv', function() {
|
||||
});
|
||||
|
||||
it('should set $__auto_interval_test', () => {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.mock.calls[0];
|
||||
const call = ctx.templateSrv.setGrafanaVariable.mock.calls[0];
|
||||
expect(call[0]).toBe('$__auto_interval_test');
|
||||
expect(call[1]).toBe('12h');
|
||||
});
|
||||
@ -126,7 +126,7 @@ describe('VariableSrv', function() {
|
||||
// updateAutoValue() gets called twice: once directly once via VariableSrv.validateVariableSelectionState()
|
||||
// So use lastCall instead of a specific call number
|
||||
it('should set $__auto_interval', () => {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.mock.calls.pop();
|
||||
const call = ctx.templateSrv.setGrafanaVariable.mock.calls.pop();
|
||||
expect(call[0]).toBe('$__auto_interval');
|
||||
expect(call[1]).toBe('12h');
|
||||
});
|
||||
@ -503,10 +503,10 @@ describe('VariableSrv', function() {
|
||||
});
|
||||
|
||||
describe('multiple interval variables with auto', () => {
|
||||
var variable1, variable2;
|
||||
let variable1, variable2;
|
||||
|
||||
beforeEach(() => {
|
||||
var range = {
|
||||
const range = {
|
||||
from: moment(new Date())
|
||||
.subtract(7, 'days')
|
||||
.toDate(),
|
||||
@ -515,7 +515,7 @@ describe('VariableSrv', function() {
|
||||
ctx.timeSrv.timeRange = () => range;
|
||||
ctx.templateSrv.setGrafanaVariable = jest.fn();
|
||||
|
||||
var variableModel1 = {
|
||||
const variableModel1 = {
|
||||
type: 'interval',
|
||||
query: '1s,2h,5h,1d',
|
||||
name: 'variable1',
|
||||
@ -525,7 +525,7 @@ describe('VariableSrv', function() {
|
||||
variable1 = ctx.variableSrv.createVariableFromModel(variableModel1);
|
||||
ctx.variableSrv.addVariable(variable1);
|
||||
|
||||
var variableModel2 = {
|
||||
const variableModel2 = {
|
||||
type: 'interval',
|
||||
query: '1s,2h,5h',
|
||||
name: 'variable2',
|
||||
@ -550,14 +550,14 @@ describe('VariableSrv', function() {
|
||||
});
|
||||
|
||||
it('should correctly set $__auto_interval_variableX', () => {
|
||||
var variable1Set,
|
||||
let variable1Set,
|
||||
variable2Set,
|
||||
legacySet,
|
||||
unknownSet = false;
|
||||
// updateAutoValue() gets called repeatedly: once directly once via VariableSrv.validateVariableSelectionState()
|
||||
// So check that all calls are valid rather than expect a specific number and/or ordering of calls
|
||||
for (var i = 0; i < ctx.templateSrv.setGrafanaVariable.mock.calls.length; i++) {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.mock.calls[i];
|
||||
for (let i = 0; i < ctx.templateSrv.setGrafanaVariable.mock.calls.length; i++) {
|
||||
const call = ctx.templateSrv.setGrafanaVariable.mock.calls[i];
|
||||
switch (call[0]) {
|
||||
case '$__auto_interval_variable1':
|
||||
expect(call[1]).toBe('12h');
|
||||
|
@ -26,7 +26,7 @@ describe('VariableSrv init', function() {
|
||||
|
||||
function describeInitScenario(desc, fn) {
|
||||
describe(desc, () => {
|
||||
var scenario: any = {
|
||||
const scenario: any = {
|
||||
urlParams: {},
|
||||
setup: setupFn => {
|
||||
scenario.setupFn = setupFn;
|
||||
@ -92,7 +92,7 @@ describe('VariableSrv init', function() {
|
||||
});
|
||||
|
||||
describe('given dependent variables', () => {
|
||||
var variableList = [
|
||||
const variableList = [
|
||||
{
|
||||
name: 'app',
|
||||
type: 'query',
|
||||
@ -110,7 +110,7 @@ describe('VariableSrv init', function() {
|
||||
},
|
||||
];
|
||||
|
||||
describeInitScenario('when setting parent var from url', scenario => {
|
||||
describeInitScenario('when setting parent const from url', scenario => {
|
||||
scenario.setup(() => {
|
||||
scenario.variables = _.cloneDeep(variableList);
|
||||
scenario.urlParams['var-app'] = 'google';
|
||||
@ -148,7 +148,7 @@ describe('VariableSrv init', function() {
|
||||
});
|
||||
|
||||
it('should update current value', () => {
|
||||
var variable = ctx.variableSrv.variables[0];
|
||||
const variable = ctx.variableSrv.variables[0];
|
||||
expect(variable.options.length).toBe(2);
|
||||
});
|
||||
});
|
||||
@ -172,7 +172,7 @@ describe('VariableSrv init', function() {
|
||||
});
|
||||
|
||||
it('should update current value', () => {
|
||||
var variable = ctx.variableSrv.variables[0];
|
||||
const variable = ctx.variableSrv.variables[0];
|
||||
expect(variable.current.value.length).toBe(2);
|
||||
expect(variable.current.value[0]).toBe('val2');
|
||||
expect(variable.current.value[1]).toBe('val1');
|
||||
@ -182,7 +182,7 @@ describe('VariableSrv init', function() {
|
||||
});
|
||||
|
||||
it('should set options that are not in value to selected false', () => {
|
||||
var variable = ctx.variableSrv.variables[0];
|
||||
const variable = ctx.variableSrv.variables[0];
|
||||
expect(variable.options[2].selected).toBe(false);
|
||||
});
|
||||
});
|
||||
@ -206,7 +206,7 @@ describe('VariableSrv init', function() {
|
||||
});
|
||||
|
||||
it('should update current value', () => {
|
||||
var variable = ctx.variableSrv.variables[0];
|
||||
const variable = ctx.variableSrv.variables[0];
|
||||
expect(variable.current.value.length).toBe(2);
|
||||
expect(variable.current.value[0]).toBe('val2');
|
||||
expect(variable.current.value[1]).toBe('val1');
|
||||
@ -216,7 +216,7 @@ describe('VariableSrv init', function() {
|
||||
});
|
||||
|
||||
it('should set options that are not in value to selected false', () => {
|
||||
var variable = ctx.variableSrv.variables[0];
|
||||
const variable = ctx.variableSrv.variables[0];
|
||||
expect(variable.options[2].selected).toBe(false);
|
||||
});
|
||||
});
|
||||
|
@ -35,9 +35,9 @@ describe('CloudWatchDatasource', function() {
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch query', function() {
|
||||
var requestParams;
|
||||
let requestParams;
|
||||
|
||||
var query = {
|
||||
const query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
@ -54,7 +54,7 @@ describe('CloudWatchDatasource', function() {
|
||||
],
|
||||
};
|
||||
|
||||
var response = {
|
||||
const response = {
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
@ -82,7 +82,7 @@ describe('CloudWatchDatasource', function() {
|
||||
|
||||
it('should generate the correct query', function(done) {
|
||||
ctx.ds.query(query).then(function() {
|
||||
var params = requestParams.queries[0];
|
||||
const params = requestParams.queries[0];
|
||||
expect(params.namespace).toBe(query.targets[0].namespace);
|
||||
expect(params.metricName).toBe(query.targets[0].metricName);
|
||||
expect(params.dimensions['InstanceId']).toBe('i-12345678');
|
||||
@ -97,7 +97,7 @@ describe('CloudWatchDatasource', function() {
|
||||
period: '10m',
|
||||
};
|
||||
|
||||
var query = {
|
||||
const query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
@ -115,14 +115,14 @@ describe('CloudWatchDatasource', function() {
|
||||
};
|
||||
|
||||
ctx.ds.query(query).then(function() {
|
||||
var params = requestParams.queries[0];
|
||||
const params = requestParams.queries[0];
|
||||
expect(params.period).toBe('600');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should cancel query for invalid extended statistics', function() {
|
||||
var query = {
|
||||
const query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
@ -152,7 +152,7 @@ describe('CloudWatchDatasource', function() {
|
||||
|
||||
describe('When query region is "default"', function() {
|
||||
it('should return the datasource region if empty or "default"', function() {
|
||||
var defaultRegion = instanceSettings.jsonData.defaultRegion;
|
||||
const defaultRegion = instanceSettings.jsonData.defaultRegion;
|
||||
|
||||
expect(ctx.ds.getActualRegion()).toBe(defaultRegion);
|
||||
expect(ctx.ds.getActualRegion('')).toBe(defaultRegion);
|
||||
@ -163,7 +163,7 @@ describe('CloudWatchDatasource', function() {
|
||||
expect(ctx.ds.getActualRegion('some-fake-region-1')).toBe('some-fake-region-1');
|
||||
});
|
||||
|
||||
var requestParams;
|
||||
let requestParams;
|
||||
beforeEach(function() {
|
||||
ctx.ds.performTimeSeriesQuery = jest.fn(request => {
|
||||
requestParams = request;
|
||||
@ -172,7 +172,7 @@ describe('CloudWatchDatasource', function() {
|
||||
});
|
||||
|
||||
it('should query for the datasource region if empty or "default"', function(done) {
|
||||
var query = {
|
||||
const query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
@ -197,7 +197,7 @@ describe('CloudWatchDatasource', function() {
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch query for extended statistics', function() {
|
||||
var query = {
|
||||
const query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
@ -215,7 +215,7 @@ describe('CloudWatchDatasource', function() {
|
||||
],
|
||||
};
|
||||
|
||||
var response = {
|
||||
const response = {
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
@ -379,10 +379,10 @@ describe('CloudWatchDatasource', function() {
|
||||
});
|
||||
|
||||
it('should caclculate the correct period', function() {
|
||||
var hourSec = 60 * 60;
|
||||
var daySec = hourSec * 24;
|
||||
var start = 1483196400 * 1000;
|
||||
var testData: any[] = [
|
||||
const hourSec = 60 * 60;
|
||||
const daySec = hourSec * 24;
|
||||
const start = 1483196400 * 1000;
|
||||
const testData: any[] = [
|
||||
[
|
||||
{ period: 60, namespace: 'AWS/EC2' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
|
@ -53,7 +53,7 @@ describe('ElasticDatasource', function() {
|
||||
});
|
||||
|
||||
it('should translate index pattern to current day', function() {
|
||||
var requestOptions;
|
||||
let requestOptions;
|
||||
ctx.backendSrv.datasourceRequest = jest.fn(options => {
|
||||
requestOptions = options;
|
||||
return Promise.resolve({ data: {} });
|
||||
@ -61,13 +61,13 @@ describe('ElasticDatasource', function() {
|
||||
|
||||
ctx.ds.testDatasource();
|
||||
|
||||
var today = moment.utc().format('YYYY.MM.DD');
|
||||
const today = moment.utc().format('YYYY.MM.DD');
|
||||
expect(requestOptions.url).toBe('http://es.com/asd-' + today + '/_mapping');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issuing metric query with interval pattern', function() {
|
||||
var requestOptions, parts, header;
|
||||
let requestOptions, parts, header;
|
||||
|
||||
beforeEach(() => {
|
||||
createDatasource({
|
||||
@ -104,13 +104,13 @@ describe('ElasticDatasource', function() {
|
||||
});
|
||||
|
||||
it('should json escape lucene query', function() {
|
||||
var body = angular.fromJson(parts[1]);
|
||||
const body = angular.fromJson(parts[1]);
|
||||
expect(body.query.bool.filter[1].query_string.query).toBe('escape\\:test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issuing document query', function() {
|
||||
var requestOptions, parts, header;
|
||||
let requestOptions, parts, header;
|
||||
|
||||
beforeEach(function() {
|
||||
createDatasource({
|
||||
@ -147,7 +147,7 @@ describe('ElasticDatasource', function() {
|
||||
});
|
||||
|
||||
it('should set size', function() {
|
||||
var body = angular.fromJson(parts[1]);
|
||||
const body = angular.fromJson(parts[1]);
|
||||
expect(body.size).toBe(500);
|
||||
});
|
||||
});
|
||||
@ -210,7 +210,7 @@ describe('ElasticDatasource', function() {
|
||||
query: '*',
|
||||
})
|
||||
.then(fieldObjects => {
|
||||
var fields = _.map(fieldObjects, 'text');
|
||||
const fields = _.map(fieldObjects, 'text');
|
||||
expect(fields).toEqual([
|
||||
'@timestamp',
|
||||
'beat.name.raw',
|
||||
@ -232,7 +232,7 @@ describe('ElasticDatasource', function() {
|
||||
type: 'number',
|
||||
})
|
||||
.then(fieldObjects => {
|
||||
var fields = _.map(fieldObjects, 'text');
|
||||
const fields = _.map(fieldObjects, 'text');
|
||||
expect(fields).toEqual(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']);
|
||||
});
|
||||
|
||||
@ -243,14 +243,14 @@ describe('ElasticDatasource', function() {
|
||||
type: 'date',
|
||||
})
|
||||
.then(fieldObjects => {
|
||||
var fields = _.map(fieldObjects, 'text');
|
||||
const fields = _.map(fieldObjects, 'text');
|
||||
expect(fields).toEqual(['@timestamp']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issuing aggregation query on es5.x', function() {
|
||||
var requestOptions, parts, header;
|
||||
let requestOptions, parts, header;
|
||||
|
||||
beforeEach(function() {
|
||||
createDatasource({
|
||||
@ -287,13 +287,13 @@ describe('ElasticDatasource', function() {
|
||||
});
|
||||
|
||||
it('should set size to 0', function() {
|
||||
var body = angular.fromJson(parts[1]);
|
||||
const body = angular.fromJson(parts[1]);
|
||||
expect(body.size).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issuing metricFind query on es5.x', function() {
|
||||
var requestOptions, parts, header, body, results;
|
||||
let requestOptions, parts, header, body, results;
|
||||
|
||||
beforeEach(() => {
|
||||
createDatasource({
|
||||
|
@ -6,8 +6,8 @@ import { IndexPattern } from '../index_pattern';
|
||||
describe('IndexPattern', () => {
|
||||
describe('when getting index for today', () => {
|
||||
test('should return correct index name', () => {
|
||||
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
var expected = 'asd-' + moment.utc().format('YYYY.MM.DD');
|
||||
const pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
const expected = 'asd-' + moment.utc().format('YYYY.MM.DD');
|
||||
|
||||
expect(pattern.getIndexForToday()).toBe(expected);
|
||||
});
|
||||
@ -16,20 +16,20 @@ describe('IndexPattern', () => {
|
||||
describe('when getting index list for time range', () => {
|
||||
describe('no interval', () => {
|
||||
test('should return correct index', () => {
|
||||
var pattern = new IndexPattern('my-metrics', null);
|
||||
var from = new Date(2015, 4, 30, 1, 2, 3);
|
||||
var to = new Date(2015, 5, 1, 12, 5, 6);
|
||||
const pattern = new IndexPattern('my-metrics', null);
|
||||
const from = new Date(2015, 4, 30, 1, 2, 3);
|
||||
const to = new Date(2015, 5, 1, 12, 5, 6);
|
||||
expect(pattern.getIndexList(from, to)).toEqual('my-metrics');
|
||||
});
|
||||
});
|
||||
|
||||
describe('daily', () => {
|
||||
test('should return correct index list', () => {
|
||||
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
var from = new Date(1432940523000);
|
||||
var to = new Date(1433153106000);
|
||||
const pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
const from = new Date(1432940523000);
|
||||
const to = new Date(1433153106000);
|
||||
|
||||
var expected = ['asd-2015.05.29', 'asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01'];
|
||||
const expected = ['asd-2015.05.29', 'asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01'];
|
||||
|
||||
expect(pattern.getIndexList(from, to)).toEqual(expected);
|
||||
});
|
||||
|
@ -1,14 +1,14 @@
|
||||
import { ElasticQueryBuilder } from '../query_builder';
|
||||
|
||||
describe('ElasticQueryBuilder', () => {
|
||||
var builder;
|
||||
let builder;
|
||||
|
||||
beforeEach(() => {
|
||||
builder = new ElasticQueryBuilder({ timeField: '@timestamp' });
|
||||
});
|
||||
|
||||
it('with defaults', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [{ type: 'Count', id: '0' }],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }],
|
||||
@ -19,12 +19,12 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with defaults on es5.x', () => {
|
||||
var builder_5x = new ElasticQueryBuilder({
|
||||
const builder_5x = new ElasticQueryBuilder({
|
||||
timeField: '@timestamp',
|
||||
esVersion: 5,
|
||||
});
|
||||
|
||||
var query = builder_5x.build({
|
||||
const query = builder_5x.build({
|
||||
metrics: [{ type: 'Count', id: '0' }],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }],
|
||||
@ -35,7 +35,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with multiple bucket aggs', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [{ type: 'count', id: '1' }],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [
|
||||
@ -49,7 +49,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with select field', () => {
|
||||
var query = builder.build(
|
||||
const query = builder.build(
|
||||
{
|
||||
metrics: [{ type: 'avg', field: '@value', id: '1' }],
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }],
|
||||
@ -58,12 +58,12 @@ describe('ElasticQueryBuilder', () => {
|
||||
1000
|
||||
);
|
||||
|
||||
var aggs = query.aggs['2'].aggs;
|
||||
const aggs = query.aggs['2'].aggs;
|
||||
expect(aggs['1'].avg.field).toBe('@value');
|
||||
});
|
||||
|
||||
it('with term agg and order by metric agg', () => {
|
||||
var query = builder.build(
|
||||
const query = builder.build(
|
||||
{
|
||||
metrics: [{ type: 'count', id: '1' }, { type: 'avg', field: '@value', id: '5' }],
|
||||
bucketAggs: [
|
||||
@ -80,15 +80,15 @@ describe('ElasticQueryBuilder', () => {
|
||||
1000
|
||||
);
|
||||
|
||||
var firstLevel = query.aggs['2'];
|
||||
var secondLevel = firstLevel.aggs['3'];
|
||||
const firstLevel = query.aggs['2'];
|
||||
const secondLevel = firstLevel.aggs['3'];
|
||||
|
||||
expect(firstLevel.aggs['5'].avg.field).toBe('@value');
|
||||
expect(secondLevel.aggs['5'].avg.field).toBe('@value');
|
||||
});
|
||||
|
||||
it('with metric percentiles', () => {
|
||||
var query = builder.build(
|
||||
const query = builder.build(
|
||||
{
|
||||
metrics: [
|
||||
{
|
||||
@ -106,14 +106,14 @@ describe('ElasticQueryBuilder', () => {
|
||||
1000
|
||||
);
|
||||
|
||||
var firstLevel = query.aggs['3'];
|
||||
const firstLevel = query.aggs['3'];
|
||||
|
||||
expect(firstLevel.aggs['1'].percentiles.field).toBe('@load_time');
|
||||
expect(firstLevel.aggs['1'].percentiles.percents).toEqual([1, 2, 3, 4]);
|
||||
});
|
||||
|
||||
it('with filters aggs', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [{ type: 'count', id: '1' }],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [
|
||||
@ -134,11 +134,11 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with filters aggs on es5.x', () => {
|
||||
var builder_5x = new ElasticQueryBuilder({
|
||||
const builder_5x = new ElasticQueryBuilder({
|
||||
timeField: '@timestamp',
|
||||
esVersion: 5,
|
||||
});
|
||||
var query = builder_5x.build({
|
||||
const query = builder_5x.build({
|
||||
metrics: [{ type: 'count', id: '1' }],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [
|
||||
@ -159,7 +159,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with raw_document metric', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [{ type: 'raw_document', id: '1', settings: {} }],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [],
|
||||
@ -168,7 +168,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
expect(query.size).toBe(500);
|
||||
});
|
||||
it('with raw_document metric size set', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [{ type: 'raw_document', id: '1', settings: { size: 1337 } }],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [],
|
||||
@ -178,7 +178,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with moving average', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [
|
||||
{
|
||||
id: '3',
|
||||
@ -195,7 +195,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
|
||||
});
|
||||
|
||||
var firstLevel = query.aggs['3'];
|
||||
const firstLevel = query.aggs['3'];
|
||||
|
||||
expect(firstLevel.aggs['2']).not.toBe(undefined);
|
||||
expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined);
|
||||
@ -203,7 +203,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with broken moving average', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [
|
||||
{
|
||||
id: '3',
|
||||
@ -224,7 +224,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
|
||||
});
|
||||
|
||||
var firstLevel = query.aggs['3'];
|
||||
const firstLevel = query.aggs['3'];
|
||||
|
||||
expect(firstLevel.aggs['2']).not.toBe(undefined);
|
||||
expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined);
|
||||
@ -233,7 +233,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with derivative', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [
|
||||
{
|
||||
id: '3',
|
||||
@ -249,7 +249,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
|
||||
});
|
||||
|
||||
var firstLevel = query.aggs['3'];
|
||||
const firstLevel = query.aggs['3'];
|
||||
|
||||
expect(firstLevel.aggs['2']).not.toBe(undefined);
|
||||
expect(firstLevel.aggs['2'].derivative).not.toBe(undefined);
|
||||
@ -257,7 +257,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with histogram', () => {
|
||||
var query = builder.build({
|
||||
const query = builder.build({
|
||||
metrics: [{ id: '1', type: 'count' }],
|
||||
bucketAggs: [
|
||||
{
|
||||
@ -269,7 +269,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
],
|
||||
});
|
||||
|
||||
var firstLevel = query.aggs['3'];
|
||||
const firstLevel = query.aggs['3'];
|
||||
expect(firstLevel.histogram.field).toBe('bytes');
|
||||
expect(firstLevel.histogram.interval).toBe(10);
|
||||
expect(firstLevel.histogram.min_doc_count).toBe(2);
|
||||
@ -277,7 +277,7 @@ describe('ElasticQueryBuilder', () => {
|
||||
});
|
||||
|
||||
it('with adhoc filters', () => {
|
||||
var query = builder.build(
|
||||
const query = builder.build(
|
||||
{
|
||||
metrics: [{ type: 'Count', id: '0' }],
|
||||
timeField: '@timestamp',
|
||||
|
@ -3,7 +3,7 @@ import * as queryDef from '../query_def';
|
||||
describe('ElasticQueryDef', () => {
|
||||
describe('getPipelineAggOptions', () => {
|
||||
describe('with zero targets', () => {
|
||||
var response = queryDef.getPipelineAggOptions([]);
|
||||
const response = queryDef.getPipelineAggOptions([]);
|
||||
|
||||
test('should return zero', () => {
|
||||
expect(response.length).toBe(0);
|
||||
@ -11,11 +11,11 @@ describe('ElasticQueryDef', () => {
|
||||
});
|
||||
|
||||
describe('with count and sum targets', () => {
|
||||
var targets = {
|
||||
const targets = {
|
||||
metrics: [{ type: 'count', field: '@value' }, { type: 'sum', field: '@value' }],
|
||||
};
|
||||
|
||||
var response = queryDef.getPipelineAggOptions(targets);
|
||||
const response = queryDef.getPipelineAggOptions(targets);
|
||||
|
||||
test('should return zero', () => {
|
||||
expect(response.length).toBe(2);
|
||||
@ -23,11 +23,11 @@ describe('ElasticQueryDef', () => {
|
||||
});
|
||||
|
||||
describe('with count and moving average targets', () => {
|
||||
var targets = {
|
||||
const targets = {
|
||||
metrics: [{ type: 'count', field: '@value' }, { type: 'moving_avg', field: '@value' }],
|
||||
};
|
||||
|
||||
var response = queryDef.getPipelineAggOptions(targets);
|
||||
const response = queryDef.getPipelineAggOptions(targets);
|
||||
|
||||
test('should return one', () => {
|
||||
expect(response.length).toBe(1);
|
||||
@ -35,11 +35,11 @@ describe('ElasticQueryDef', () => {
|
||||
});
|
||||
|
||||
describe('with derivatives targets', () => {
|
||||
var targets = {
|
||||
const targets = {
|
||||
metrics: [{ type: 'derivative', field: '@value' }],
|
||||
};
|
||||
|
||||
var response = queryDef.getPipelineAggOptions(targets);
|
||||
const response = queryDef.getPipelineAggOptions(targets);
|
||||
|
||||
test('should return zero', () => {
|
||||
expect(response.length).toBe(0);
|
||||
@ -49,7 +49,7 @@ describe('ElasticQueryDef', () => {
|
||||
|
||||
describe('isPipelineMetric', () => {
|
||||
describe('moving_avg', () => {
|
||||
var result = queryDef.isPipelineAgg('moving_avg');
|
||||
const result = queryDef.isPipelineAgg('moving_avg');
|
||||
|
||||
test('is pipe line metric', () => {
|
||||
expect(result).toBe(true);
|
||||
@ -57,7 +57,7 @@ describe('ElasticQueryDef', () => {
|
||||
});
|
||||
|
||||
describe('count', () => {
|
||||
var result = queryDef.isPipelineAgg('count');
|
||||
const result = queryDef.isPipelineAgg('count');
|
||||
|
||||
test('is not pipe line metric', () => {
|
||||
expect(result).toBe(false);
|
||||
|
@ -8,7 +8,7 @@ class DataObservable {
|
||||
}
|
||||
|
||||
subscribe(options) {
|
||||
var observable = liveSrv.subscribe(this.target.stream);
|
||||
const observable = liveSrv.subscribe(this.target.stream);
|
||||
return observable.subscribe(data => {
|
||||
console.log('grafana stream ds data!', data);
|
||||
});
|
||||
@ -26,8 +26,8 @@ export class GrafanaStreamDS {
|
||||
return Promise.resolve({ data: [] });
|
||||
}
|
||||
|
||||
var target = options.targets[0];
|
||||
var observable = new DataObservable(target);
|
||||
const target = options.targets[0];
|
||||
const observable = new DataObservable(target);
|
||||
|
||||
return Promise.resolve(observable);
|
||||
}
|
||||
|
@ -324,7 +324,7 @@ function accessScenario(name, url, fn) {
|
||||
|
||||
it('tracing headers should be added', () => {
|
||||
ctx.instanceSettings.url = url;
|
||||
var ds = new GraphiteDatasource(ctx.instanceSettings, ctx.$q, ctx.backendSrv, ctx.templateSrv);
|
||||
const ds = new GraphiteDatasource(ctx.instanceSettings, ctx.$q, ctx.backendSrv, ctx.templateSrv);
|
||||
ds.addTracingHeaders(httpOptions, options);
|
||||
fn(httpOptions);
|
||||
});
|
||||
|
@ -2,7 +2,7 @@ import gfunc from '../gfunc';
|
||||
|
||||
describe('when creating func instance from func names', function() {
|
||||
it('should return func instance', function() {
|
||||
var func = gfunc.createFuncInstance('sumSeries');
|
||||
const func = gfunc.createFuncInstance('sumSeries');
|
||||
expect(func).toBeTruthy();
|
||||
expect(func.def.name).toEqual('sumSeries');
|
||||
expect(func.def.params.length).toEqual(1);
|
||||
@ -11,18 +11,18 @@ describe('when creating func instance from func names', function() {
|
||||
});
|
||||
|
||||
it('should return func instance with shortName', function() {
|
||||
var func = gfunc.createFuncInstance('sum');
|
||||
const func = gfunc.createFuncInstance('sum');
|
||||
expect(func).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should return func instance from funcDef', function() {
|
||||
var func = gfunc.createFuncInstance('sum');
|
||||
var func2 = gfunc.createFuncInstance(func.def);
|
||||
const func = gfunc.createFuncInstance('sum');
|
||||
const func2 = gfunc.createFuncInstance(func.def);
|
||||
expect(func2).toBeTruthy();
|
||||
});
|
||||
|
||||
it('func instance should have text representation', function() {
|
||||
var func = gfunc.createFuncInstance('groupByNode');
|
||||
const func = gfunc.createFuncInstance('groupByNode');
|
||||
func.params[0] = 5;
|
||||
func.params[1] = 'avg';
|
||||
func.updateText();
|
||||
@ -32,62 +32,62 @@ describe('when creating func instance from func names', function() {
|
||||
|
||||
describe('when rendering func instance', function() {
|
||||
it('should handle single metric param', function() {
|
||||
var func = gfunc.createFuncInstance('sumSeries');
|
||||
const func = gfunc.createFuncInstance('sumSeries');
|
||||
expect(func.render('hello.metric')).toEqual('sumSeries(hello.metric)');
|
||||
});
|
||||
|
||||
it('should include default params if options enable it', function() {
|
||||
var func = gfunc.createFuncInstance('scaleToSeconds', {
|
||||
const func = gfunc.createFuncInstance('scaleToSeconds', {
|
||||
withDefaultParams: true,
|
||||
});
|
||||
expect(func.render('hello')).toEqual('scaleToSeconds(hello, 1)');
|
||||
});
|
||||
|
||||
it('should handle int or interval params with number', function() {
|
||||
var func = gfunc.createFuncInstance('movingMedian');
|
||||
const func = gfunc.createFuncInstance('movingMedian');
|
||||
func.params[0] = '5';
|
||||
expect(func.render('hello')).toEqual('movingMedian(hello, 5)');
|
||||
});
|
||||
|
||||
it('should handle int or interval params with interval string', function() {
|
||||
var func = gfunc.createFuncInstance('movingMedian');
|
||||
const func = gfunc.createFuncInstance('movingMedian');
|
||||
func.params[0] = '5min';
|
||||
expect(func.render('hello')).toEqual("movingMedian(hello, '5min')");
|
||||
});
|
||||
|
||||
it('should never quote boolean paramater', function() {
|
||||
var func = gfunc.createFuncInstance('sortByName');
|
||||
const func = gfunc.createFuncInstance('sortByName');
|
||||
func.params[0] = '$natural';
|
||||
expect(func.render('hello')).toEqual('sortByName(hello, $natural)');
|
||||
});
|
||||
|
||||
it('should never quote int paramater', function() {
|
||||
var func = gfunc.createFuncInstance('maximumAbove');
|
||||
const func = gfunc.createFuncInstance('maximumAbove');
|
||||
func.params[0] = '$value';
|
||||
expect(func.render('hello')).toEqual('maximumAbove(hello, $value)');
|
||||
});
|
||||
|
||||
it('should never quote node paramater', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
const func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.params[0] = '$node';
|
||||
expect(func.render('hello')).toEqual('aliasByNode(hello, $node)');
|
||||
});
|
||||
|
||||
it('should handle metric param and int param and string param', function() {
|
||||
var func = gfunc.createFuncInstance('groupByNode');
|
||||
const func = gfunc.createFuncInstance('groupByNode');
|
||||
func.params[0] = 5;
|
||||
func.params[1] = 'avg';
|
||||
expect(func.render('hello.metric')).toEqual("groupByNode(hello.metric, 5, 'avg')");
|
||||
});
|
||||
|
||||
it('should handle function with no metric param', function() {
|
||||
var func = gfunc.createFuncInstance('randomWalk');
|
||||
const func = gfunc.createFuncInstance('randomWalk');
|
||||
func.params[0] = 'test';
|
||||
expect(func.render(undefined)).toEqual("randomWalk('test')");
|
||||
});
|
||||
|
||||
it('should handle function multiple series params', function() {
|
||||
var func = gfunc.createFuncInstance('asPercent');
|
||||
const func = gfunc.createFuncInstance('asPercent');
|
||||
func.params[0] = '#B';
|
||||
expect(func.render('#A')).toEqual('asPercent(#A, #B)');
|
||||
});
|
||||
@ -95,14 +95,14 @@ describe('when rendering func instance', function() {
|
||||
|
||||
describe('when requesting function definitions', function() {
|
||||
it('should return function definitions', function() {
|
||||
var funcIndex = gfunc.getFuncDefs('1.0');
|
||||
const funcIndex = gfunc.getFuncDefs('1.0');
|
||||
expect(Object.keys(funcIndex).length).toBeGreaterThan(8);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when updating func param', function() {
|
||||
it('should update param value and update text representation', function() {
|
||||
var func = gfunc.createFuncInstance('summarize', {
|
||||
const func = gfunc.createFuncInstance('summarize', {
|
||||
withDefaultParams: true,
|
||||
});
|
||||
func.updateParam('1h', 0);
|
||||
@ -111,7 +111,7 @@ describe('when updating func param', function() {
|
||||
});
|
||||
|
||||
it('should parse numbers as float', function() {
|
||||
var func = gfunc.createFuncInstance('scale');
|
||||
const func = gfunc.createFuncInstance('scale');
|
||||
func.updateParam('0.001', 0);
|
||||
expect(func.params[0]).toBe('0.001');
|
||||
});
|
||||
@ -119,13 +119,13 @@ describe('when updating func param', function() {
|
||||
|
||||
describe('when updating func param with optional second parameter', function() {
|
||||
it('should update value and text', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
const func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('1', 0);
|
||||
expect(func.params[0]).toBe('1');
|
||||
});
|
||||
|
||||
it('should slit text and put value in second param', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
const func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('4,-5', 0);
|
||||
expect(func.params[0]).toBe('4');
|
||||
expect(func.params[1]).toBe('-5');
|
||||
@ -133,7 +133,7 @@ describe('when updating func param with optional second parameter', function() {
|
||||
});
|
||||
|
||||
it('should remove second param when empty string is set', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
const func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('4,-5', 0);
|
||||
func.updateParam('', 1);
|
||||
expect(func.params[0]).toBe('4');
|
||||
|
@ -2,8 +2,8 @@ import { Lexer } from '../lexer';
|
||||
|
||||
describe('when lexing graphite expression', function() {
|
||||
it('should tokenize metric expression', function() {
|
||||
var lexer = new Lexer('metric.test.*.asd.count');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('metric.test.*.asd.count');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[0].value).toBe('metric');
|
||||
expect(tokens[1].value).toBe('.');
|
||||
expect(tokens[2].type).toBe('identifier');
|
||||
@ -12,36 +12,36 @@ describe('when lexing graphite expression', function() {
|
||||
});
|
||||
|
||||
it('should tokenize metric expression with dash', function() {
|
||||
var lexer = new Lexer('metric.test.se1-server-*.asd.count');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('metric.test.se1-server-*.asd.count');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[4].type).toBe('identifier');
|
||||
expect(tokens[4].value).toBe('se1-server-*');
|
||||
});
|
||||
|
||||
it('should tokenize metric expression with dash2', function() {
|
||||
var lexer = new Lexer('net.192-168-1-1.192-168-1-9.ping_value.*');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('net.192-168-1-1.192-168-1-9.ping_value.*');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[0].value).toBe('net');
|
||||
expect(tokens[2].value).toBe('192-168-1-1');
|
||||
});
|
||||
|
||||
it('should tokenize metric expression with equal sign', function() {
|
||||
var lexer = new Lexer('apps=test');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('apps=test');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[0].value).toBe('apps=test');
|
||||
});
|
||||
|
||||
it('simple function2', function() {
|
||||
var lexer = new Lexer('offset(test.metric, -100)');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('offset(test.metric, -100)');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[2].type).toBe('identifier');
|
||||
expect(tokens[4].type).toBe('identifier');
|
||||
expect(tokens[6].type).toBe('number');
|
||||
});
|
||||
|
||||
it('should tokenize metric expression with curly braces', function() {
|
||||
var lexer = new Lexer('metric.se1-{first, second}.count');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('metric.se1-{first, second}.count');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens.length).toBe(10);
|
||||
expect(tokens[3].type).toBe('{');
|
||||
expect(tokens[4].value).toBe('first');
|
||||
@ -50,8 +50,8 @@ describe('when lexing graphite expression', function() {
|
||||
});
|
||||
|
||||
it('should tokenize metric expression with number segments', function() {
|
||||
var lexer = new Lexer('metric.10.12_10.test');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('metric.10.12_10.test');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[0].type).toBe('identifier');
|
||||
expect(tokens[2].type).toBe('identifier');
|
||||
expect(tokens[2].value).toBe('10');
|
||||
@ -60,16 +60,16 @@ describe('when lexing graphite expression', function() {
|
||||
});
|
||||
|
||||
it('should tokenize metric expression with segment that start with number', function() {
|
||||
var lexer = new Lexer('metric.001-server');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('metric.001-server');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[0].type).toBe('identifier');
|
||||
expect(tokens[2].type).toBe('identifier');
|
||||
expect(tokens.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should tokenize func call with numbered metric and number arg', function() {
|
||||
var lexer = new Lexer('scale(metric.10, 15)');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('scale(metric.10, 15)');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[0].type).toBe('identifier');
|
||||
expect(tokens[2].type).toBe('identifier');
|
||||
expect(tokens[2].value).toBe('metric');
|
||||
@ -79,24 +79,24 @@ describe('when lexing graphite expression', function() {
|
||||
});
|
||||
|
||||
it('should tokenize metric with template parameter', function() {
|
||||
var lexer = new Lexer('metric.[[server]].test');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('metric.[[server]].test');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[2].type).toBe('identifier');
|
||||
expect(tokens[2].value).toBe('[[server]]');
|
||||
expect(tokens[4].type).toBe('identifier');
|
||||
});
|
||||
|
||||
it('should tokenize metric with question mark', function() {
|
||||
var lexer = new Lexer('metric.server_??.test');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('metric.server_??.test');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[2].type).toBe('identifier');
|
||||
expect(tokens[2].value).toBe('server_??');
|
||||
expect(tokens[4].type).toBe('identifier');
|
||||
});
|
||||
|
||||
it('should handle error with unterminated string', function() {
|
||||
var lexer = new Lexer("alias(metric, 'asd)");
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer("alias(metric, 'asd)");
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[0].value).toBe('alias');
|
||||
expect(tokens[1].value).toBe('(');
|
||||
expect(tokens[2].value).toBe('metric');
|
||||
@ -107,15 +107,15 @@ describe('when lexing graphite expression', function() {
|
||||
});
|
||||
|
||||
it('should handle float parameters', function() {
|
||||
var lexer = new Lexer('alias(metric, 0.002)');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('alias(metric, 0.002)');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[4].type).toBe('number');
|
||||
expect(tokens[4].value).toBe('0.002');
|
||||
});
|
||||
|
||||
it('should handle bool parameters', function() {
|
||||
var lexer = new Lexer('alias(metric, true, false)');
|
||||
var tokens = lexer.tokenize();
|
||||
const lexer = new Lexer('alias(metric, true, false)');
|
||||
const tokens = lexer.tokenize();
|
||||
expect(tokens[4].type).toBe('bool');
|
||||
expect(tokens[4].value).toBe('true');
|
||||
expect(tokens[6].type).toBe('bool');
|
||||
|
@ -2,8 +2,8 @@ import { Parser } from '../parser';
|
||||
|
||||
describe('when parsing', function() {
|
||||
it('simple metric expression', function() {
|
||||
var parser = new Parser('metric.test.*.asd.count');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('metric.test.*.asd.count');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.type).toBe('metric');
|
||||
expect(rootNode.segments.length).toBe(5);
|
||||
@ -11,8 +11,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('simple metric expression with numbers in segments', function() {
|
||||
var parser = new Parser('metric.10.15_20.5');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('metric.10.15_20.5');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.type).toBe('metric');
|
||||
expect(rootNode.segments.length).toBe(4);
|
||||
@ -22,8 +22,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('simple metric expression with curly braces', function() {
|
||||
var parser = new Parser('metric.se1-{count, max}');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('metric.se1-{count, max}');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.type).toBe('metric');
|
||||
expect(rootNode.segments.length).toBe(2);
|
||||
@ -31,8 +31,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('simple metric expression with curly braces at start of segment and with post chars', function() {
|
||||
var parser = new Parser('metric.{count, max}-something.count');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('metric.{count, max}-something.count');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.type).toBe('metric');
|
||||
expect(rootNode.segments.length).toBe(3);
|
||||
@ -40,31 +40,31 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('simple function', function() {
|
||||
var parser = new Parser('sum(test)');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('sum(test)');
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params.length).toBe(1);
|
||||
});
|
||||
|
||||
it('simple function2', function() {
|
||||
var parser = new Parser('offset(test.metric, -100)');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('offset(test.metric, -100)');
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params[0].type).toBe('metric');
|
||||
expect(rootNode.params[1].type).toBe('number');
|
||||
});
|
||||
|
||||
it('simple function with string arg', function() {
|
||||
var parser = new Parser("randomWalk('test')");
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser("randomWalk('test')");
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params.length).toBe(1);
|
||||
expect(rootNode.params[0].type).toBe('string');
|
||||
});
|
||||
|
||||
it('function with multiple args', function() {
|
||||
var parser = new Parser("sum(test, 1, 'test')");
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser("sum(test, 1, 'test')");
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params.length).toBe(3);
|
||||
@ -74,8 +74,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('function with nested function', function() {
|
||||
var parser = new Parser('sum(scaleToSeconds(test, 1))');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('sum(scaleToSeconds(test, 1))');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params.length).toBe(1);
|
||||
@ -87,8 +87,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('function with multiple series', function() {
|
||||
var parser = new Parser('sum(test.test.*.count, test.timers.*.count)');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('sum(test.test.*.count, test.timers.*.count)');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params.length).toBe(2);
|
||||
@ -97,8 +97,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('function with templated series', function() {
|
||||
var parser = new Parser('sum(test.[[server]].count)');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('sum(test.[[server]].count)');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.message).toBe(undefined);
|
||||
expect(rootNode.params[0].type).toBe('metric');
|
||||
@ -107,54 +107,54 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('invalid metric expression', function() {
|
||||
var parser = new Parser('metric.test.*.asd.');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('metric.test.*.asd.');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.message).toBe('Expected metric identifier instead found end of string');
|
||||
expect(rootNode.pos).toBe(19);
|
||||
});
|
||||
|
||||
it('invalid function expression missing closing parenthesis', function() {
|
||||
var parser = new Parser('sum(test');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('sum(test');
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.message).toBe('Expected closing parenthesis instead found end of string');
|
||||
expect(rootNode.pos).toBe(9);
|
||||
});
|
||||
|
||||
it('unclosed string in function', function() {
|
||||
var parser = new Parser("sum('test)");
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser("sum('test)");
|
||||
const rootNode = parser.getAst();
|
||||
|
||||
expect(rootNode.message).toBe('Unclosed string parameter');
|
||||
expect(rootNode.pos).toBe(11);
|
||||
});
|
||||
|
||||
it('handle issue #69', function() {
|
||||
var parser = new Parser('cactiStyle(offset(scale(net.192-168-1-1.192-168-1-9.ping_value.*,0.001),-100))');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('cactiStyle(offset(scale(net.192-168-1-1.192-168-1-9.ping_value.*,0.001),-100))');
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('function');
|
||||
});
|
||||
|
||||
it('handle float function arguments', function() {
|
||||
var parser = new Parser('scale(test, 0.002)');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('scale(test, 0.002)');
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params[1].type).toBe('number');
|
||||
expect(rootNode.params[1].value).toBe(0.002);
|
||||
});
|
||||
|
||||
it('handle curly brace pattern at start', function() {
|
||||
var parser = new Parser('{apps}.test');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('{apps}.test');
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('metric');
|
||||
expect(rootNode.segments[0].value).toBe('{apps}');
|
||||
expect(rootNode.segments[1].value).toBe('test');
|
||||
});
|
||||
|
||||
it('series parameters', function() {
|
||||
var parser = new Parser('asPercent(#A, #B)');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('asPercent(#A, #B)');
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params[0].type).toBe('series-ref');
|
||||
expect(rootNode.params[0].value).toBe('#A');
|
||||
@ -162,8 +162,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('series parameters, issue 2788', function() {
|
||||
var parser = new Parser("summarize(diffSeries(#A, #B), '10m', 'sum', false)");
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser("summarize(diffSeries(#A, #B), '10m', 'sum', false)");
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.type).toBe('function');
|
||||
expect(rootNode.params[0].type).toBe('function');
|
||||
expect(rootNode.params[1].value).toBe('10m');
|
||||
@ -171,8 +171,8 @@ describe('when parsing', function() {
|
||||
});
|
||||
|
||||
it('should parse metric expression with ip number segments', function() {
|
||||
var parser = new Parser('5.10.123.5');
|
||||
var rootNode = parser.getAst();
|
||||
const parser = new Parser('5.10.123.5');
|
||||
const rootNode = parser.getAst();
|
||||
expect(rootNode.segments[0].value).toBe('5');
|
||||
expect(rootNode.segments[1].value).toBe('10');
|
||||
expect(rootNode.segments[2].value).toBe('123');
|
||||
|
@ -1,11 +1,11 @@
|
||||
import InfluxQuery from '../influx_query';
|
||||
|
||||
describe('InfluxQuery', function() {
|
||||
var templateSrv = { replace: val => val };
|
||||
const templateSrv = { replace: val => val };
|
||||
|
||||
describe('render series with mesurement only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
},
|
||||
@ -13,14 +13,14 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe('SELECT mean("value") FROM "cpu" WHERE $timeFilter GROUP BY time($__interval) fill(null)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('render series with policy only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
policy: '5m_avg',
|
||||
@ -29,7 +29,7 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe(
|
||||
'SELECT mean("value") FROM "5m_avg"."cpu" WHERE $timeFilter GROUP BY time($__interval) fill(null)'
|
||||
);
|
||||
@ -38,7 +38,7 @@ describe('InfluxQuery', function() {
|
||||
|
||||
describe('render series with math and alias', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [
|
||||
@ -54,7 +54,7 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe(
|
||||
'SELECT mean("value") /100 AS "text" FROM "cpu" WHERE $timeFilter GROUP BY time($__interval) fill(null)'
|
||||
);
|
||||
@ -63,7 +63,7 @@ describe('InfluxQuery', function() {
|
||||
|
||||
describe('series with single tag only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
groupBy: [{ type: 'time', params: ['auto'] }],
|
||||
@ -73,7 +73,7 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
|
||||
expect(queryText).toBe(
|
||||
'SELECT mean("value") FROM "cpu" WHERE ("hostname" = \'server\\\\1\') AND $timeFilter' +
|
||||
@ -82,7 +82,7 @@ describe('InfluxQuery', function() {
|
||||
});
|
||||
|
||||
it('should switch regex operator with tag value is regex', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
groupBy: [{ type: 'time', params: ['auto'] }],
|
||||
@ -92,7 +92,7 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe(
|
||||
'SELECT mean("value") FROM "cpu" WHERE ("app" =~ /e.*/) AND $timeFilter GROUP BY time($__interval)'
|
||||
);
|
||||
@ -101,7 +101,7 @@ describe('InfluxQuery', function() {
|
||||
|
||||
describe('series with multiple tags only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
groupBy: [{ type: 'time', params: ['auto'] }],
|
||||
@ -111,7 +111,7 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe(
|
||||
'SELECT mean("value") FROM "cpu" WHERE ("hostname" = \'server1\' AND "app" = \'email\') AND ' +
|
||||
'$timeFilter GROUP BY time($__interval)'
|
||||
@ -121,7 +121,7 @@ describe('InfluxQuery', function() {
|
||||
|
||||
describe('series with tags OR condition', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
groupBy: [{ type: 'time', params: ['auto'] }],
|
||||
@ -131,7 +131,7 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe(
|
||||
'SELECT mean("value") FROM "cpu" WHERE ("hostname" = \'server1\' OR "hostname" = \'server2\') AND ' +
|
||||
'$timeFilter GROUP BY time($__interval)'
|
||||
@ -141,7 +141,7 @@ describe('InfluxQuery', function() {
|
||||
|
||||
describe('query with value condition', function() {
|
||||
it('should not quote value', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
groupBy: [],
|
||||
@ -151,14 +151,14 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe('SELECT mean("value") FROM "cpu" WHERE ("value" > 5) AND $timeFilter');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with groupByTag', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
tags: [],
|
||||
@ -168,14 +168,14 @@ describe('InfluxQuery', function() {
|
||||
{}
|
||||
);
|
||||
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe('SELECT mean("value") FROM "cpu" WHERE $timeFilter GROUP BY time($__interval), "host"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('render series without group by', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }]],
|
||||
@ -184,14 +184,14 @@ describe('InfluxQuery', function() {
|
||||
templateSrv,
|
||||
{}
|
||||
);
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe('SELECT "value" FROM "cpu" WHERE $timeFilter');
|
||||
});
|
||||
});
|
||||
|
||||
describe('render series without group by and fill', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }]],
|
||||
@ -200,14 +200,14 @@ describe('InfluxQuery', function() {
|
||||
templateSrv,
|
||||
{}
|
||||
);
|
||||
var queryText = query.render();
|
||||
const queryText = query.render();
|
||||
expect(queryText).toBe('SELECT "value" FROM "cpu" WHERE $timeFilter GROUP BY time($__interval) fill(0)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding group by part', function() {
|
||||
it('should add tag before fill', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
groupBy: [{ type: 'time' }, { type: 'fill' }],
|
||||
@ -224,7 +224,7 @@ describe('InfluxQuery', function() {
|
||||
});
|
||||
|
||||
it('should add tag last if no fill', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
groupBy: [],
|
||||
@ -241,7 +241,7 @@ describe('InfluxQuery', function() {
|
||||
|
||||
describe('when adding select part', function() {
|
||||
it('should add mean after after field', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }]],
|
||||
@ -256,7 +256,7 @@ describe('InfluxQuery', function() {
|
||||
});
|
||||
|
||||
it('should replace sum by mean', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }, { type: 'mean' }]],
|
||||
@ -271,7 +271,7 @@ describe('InfluxQuery', function() {
|
||||
});
|
||||
|
||||
it('should add math before alias', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }, { type: 'mean' }, { type: 'alias' }]],
|
||||
@ -286,7 +286,7 @@ describe('InfluxQuery', function() {
|
||||
});
|
||||
|
||||
it('should add math last', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }, { type: 'mean' }]],
|
||||
@ -301,7 +301,7 @@ describe('InfluxQuery', function() {
|
||||
});
|
||||
|
||||
it('should replace math', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }, { type: 'mean' }, { type: 'math' }]],
|
||||
@ -316,7 +316,7 @@ describe('InfluxQuery', function() {
|
||||
});
|
||||
|
||||
it('should add math when one only query part', function() {
|
||||
var query = new InfluxQuery(
|
||||
const query = new InfluxQuery(
|
||||
{
|
||||
measurement: 'cpu',
|
||||
select: [[{ type: 'field', params: ['value'] }]],
|
||||
@ -332,9 +332,9 @@ describe('InfluxQuery', function() {
|
||||
|
||||
describe('when render adhoc filters', function() {
|
||||
it('should generate correct query segment', function() {
|
||||
var query = new InfluxQuery({ measurement: 'cpu' }, templateSrv, {});
|
||||
const query = new InfluxQuery({ measurement: 'cpu' }, templateSrv, {});
|
||||
|
||||
var queryText = query.renderAdhocFilters([
|
||||
const queryText = query.renderAdhocFilters([
|
||||
{ key: 'key1', operator: '=', value: 'value1' },
|
||||
{ key: 'key2', operator: '!=', value: 'value2' },
|
||||
]);
|
||||
|
@ -2,7 +2,7 @@ import InfluxSeries from '../influx_series';
|
||||
|
||||
describe('when generating timeseries from influxdb response', function() {
|
||||
describe('given multiple fields for series', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
@ -15,8 +15,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
};
|
||||
describe('and no alias', function() {
|
||||
it('should generate multiple datapoints for each column', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).toBe(3);
|
||||
expect(result[0].target).toBe('cpu.mean {app: test, server: server1}');
|
||||
@ -42,8 +42,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
describe('and simple alias', function() {
|
||||
it('should use alias', function() {
|
||||
options.alias = 'new series';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('new series');
|
||||
expect(result[1].target).toBe('new series');
|
||||
@ -54,8 +54,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
describe('and alias patterns', function() {
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).toBe('alias: cpu -> server1 (cpu)');
|
||||
@ -65,7 +65,7 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
});
|
||||
|
||||
describe('given measurement with default fieldname', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
@ -84,8 +84,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
|
||||
describe('and no alias', function() {
|
||||
it('should generate label with no field', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('cpu {app: test, server: server1}');
|
||||
expect(result[1].target).toBe('cpu {app: test2, server: server2}');
|
||||
@ -94,7 +94,7 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
});
|
||||
|
||||
describe('given two series', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
@ -114,8 +114,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
|
||||
describe('and no alias', function() {
|
||||
it('should generate two time series', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).toBe(2);
|
||||
expect(result[0].target).toBe('cpu.mean {app: test, server: server1}');
|
||||
@ -135,8 +135,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
describe('and simple alias', function() {
|
||||
it('should use alias', function() {
|
||||
options.alias = 'new series';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('new series');
|
||||
});
|
||||
@ -145,8 +145,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
describe('and alias patterns', function() {
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).toBe('alias: cpu -> server2 (cpu)');
|
||||
@ -155,7 +155,7 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
});
|
||||
|
||||
describe('given measurement with dots', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
@ -169,15 +169,15 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $1 -> [[3]]';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('alias: prod -> count');
|
||||
});
|
||||
});
|
||||
|
||||
describe('given table response', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
@ -190,8 +190,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
};
|
||||
|
||||
it('should return table', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var table = series.getTable();
|
||||
const series = new InfluxSeries(options);
|
||||
const table = series.getTable();
|
||||
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.columns.length).toBe(5);
|
||||
@ -201,7 +201,7 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
});
|
||||
|
||||
describe('given table response from SHOW CARDINALITY', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
@ -213,8 +213,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
};
|
||||
|
||||
it('should return table', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var table = series.getTable();
|
||||
const series = new InfluxSeries(options);
|
||||
const table = series.getTable();
|
||||
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.columns.length).toBe(1);
|
||||
@ -225,7 +225,7 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
|
||||
describe('given annotation response', function() {
|
||||
describe('with empty tagsColumn', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
alias: '',
|
||||
annotation: {},
|
||||
series: [
|
||||
@ -239,15 +239,15 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
};
|
||||
|
||||
it('should multiple tags', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var annotations = series.getAnnotations();
|
||||
const series = new InfluxSeries(options);
|
||||
const annotations = series.getAnnotations();
|
||||
|
||||
expect(annotations[0].tags.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('given annotation response', function() {
|
||||
var options = {
|
||||
const options = {
|
||||
alias: '',
|
||||
annotation: {
|
||||
tagsColumn: 'datacenter, source',
|
||||
@ -263,8 +263,8 @@ describe('when generating timeseries from influxdb response', function() {
|
||||
};
|
||||
|
||||
it('should multiple tags', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var annotations = series.getAnnotations();
|
||||
const series = new InfluxSeries(options);
|
||||
const annotations = series.getAnnotations();
|
||||
|
||||
expect(annotations[0].tags.length).toBe(2);
|
||||
expect(annotations[0].tags[0]).toBe('America');
|
||||
|
@ -3,139 +3,139 @@ import { InfluxQueryBuilder } from '../query_builder';
|
||||
describe('InfluxQueryBuilder', function() {
|
||||
describe('when building explore queries', function() {
|
||||
it('should only have measurement condition in tag keys query given query with measurement', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
const builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] });
|
||||
const query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).toBe('SHOW TAG KEYS FROM "cpu"');
|
||||
});
|
||||
|
||||
it('should handle regex measurement in tag keys query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: '/.*/',
|
||||
tags: [],
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
const query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).toBe('SHOW TAG KEYS FROM /.*/');
|
||||
});
|
||||
|
||||
it('should have no conditions in tags keys query given query with no measurement or tag', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
const query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).toBe('SHOW TAG KEYS');
|
||||
});
|
||||
|
||||
it('should have where condition in tag keys query with tags', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: '',
|
||||
tags: [{ key: 'host', value: 'se1' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
const query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).toBe('SHOW TAG KEYS WHERE "host" = \'se1\'');
|
||||
});
|
||||
|
||||
it('should have no conditions in measurement query for query with no tags', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
const query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100');
|
||||
});
|
||||
|
||||
it('should have no conditions in measurement query for query with no tags and empty query', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS', undefined, '');
|
||||
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
const query = builder.buildExploreQuery('MEASUREMENTS', undefined, '');
|
||||
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100');
|
||||
});
|
||||
|
||||
it('should have WITH MEASUREMENT in measurement query for non-empty query with no tags', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS', undefined, 'something');
|
||||
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
const query = builder.buildExploreQuery('MEASUREMENTS', undefined, 'something');
|
||||
expect(query).toBe('SHOW MEASUREMENTS WITH MEASUREMENT =~ /something/ LIMIT 100');
|
||||
});
|
||||
|
||||
it('should have WITH MEASUREMENT WHERE in measurement query for non-empty query with tags', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: '',
|
||||
tags: [{ key: 'app', value: 'email' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS', undefined, 'something');
|
||||
const query = builder.buildExploreQuery('MEASUREMENTS', undefined, 'something');
|
||||
expect(query).toBe('SHOW MEASUREMENTS WITH MEASUREMENT =~ /something/ WHERE "app" = \'email\' LIMIT 100');
|
||||
});
|
||||
|
||||
it('should have where condition in measurement query for query with tags', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: '',
|
||||
tags: [{ key: 'app', value: 'email' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
const query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
expect(query).toBe('SHOW MEASUREMENTS WHERE "app" = \'email\' LIMIT 100');
|
||||
});
|
||||
|
||||
it('should have where tag name IN filter in tag values query for query with one tag', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: '',
|
||||
tags: [{ key: 'app', value: 'asdsadsad' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
const query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "app"');
|
||||
});
|
||||
|
||||
it('should have measurement tag condition and tag name IN filter in tag values query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
tags: [{ key: 'app', value: 'email' }, { key: 'host', value: 'server1' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
const query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).toBe('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
|
||||
});
|
||||
|
||||
it('should select from policy correctly if policy is specified', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
policy: 'one_week',
|
||||
tags: [{ key: 'app', value: 'email' }, { key: 'host', value: 'server1' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
const query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).toBe('SHOW TAG VALUES FROM "one_week"."cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
|
||||
});
|
||||
|
||||
it('should not include policy when policy is default', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
policy: 'default',
|
||||
tags: [],
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
const query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).toBe('SHOW TAG VALUES FROM "cpu" WITH KEY = "app"');
|
||||
});
|
||||
|
||||
it('should switch to regex operator in tag condition', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
tags: [{ key: 'host', value: '/server.*/' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
const query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).toBe('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" =~ /server.*/');
|
||||
});
|
||||
|
||||
it('should build show field query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
tags: [{ key: 'app', value: 'email' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('FIELDS');
|
||||
const query = builder.buildExploreQuery('FIELDS');
|
||||
expect(query).toBe('SHOW FIELD KEYS FROM "cpu"');
|
||||
});
|
||||
|
||||
it('should build show field query with regexp', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
const builder = new InfluxQueryBuilder({
|
||||
measurement: '/$var/',
|
||||
tags: [{ key: 'app', value: 'email' }],
|
||||
});
|
||||
var query = builder.buildExploreQuery('FIELDS');
|
||||
const query = builder.buildExploreQuery('FIELDS');
|
||||
expect(query).toBe('SHOW FIELD KEYS FROM /$var/');
|
||||
});
|
||||
|
||||
it('should build show retention policies query', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] }, 'site');
|
||||
var query = builder.buildExploreQuery('RETENTION POLICIES');
|
||||
const builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] }, 'site');
|
||||
const query = builder.buildExploreQuery('RETENTION POLICIES');
|
||||
expect(query).toBe('SHOW RETENTION POLICIES on "site"');
|
||||
});
|
||||
});
|
||||
|
@ -3,7 +3,7 @@ import queryPart from '../query_part';
|
||||
describe('InfluxQueryPart', () => {
|
||||
describe('series with measurement only', () => {
|
||||
it('should handle nested function parts', () => {
|
||||
var part = queryPart.create({
|
||||
const part = queryPart.create({
|
||||
type: 'derivative',
|
||||
params: ['10s'],
|
||||
});
|
||||
@ -13,7 +13,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should nest spread function', () => {
|
||||
var part = queryPart.create({
|
||||
const part = queryPart.create({
|
||||
type: 'spread',
|
||||
});
|
||||
|
||||
@ -22,7 +22,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should handle suffix parts', () => {
|
||||
var part = queryPart.create({
|
||||
const part = queryPart.create({
|
||||
type: 'math',
|
||||
params: ['/ 100'],
|
||||
});
|
||||
@ -32,7 +32,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should handle alias parts', () => {
|
||||
var part = queryPart.create({
|
||||
const part = queryPart.create({
|
||||
type: 'alias',
|
||||
params: ['test'],
|
||||
});
|
||||
@ -42,7 +42,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should nest distinct when count is selected', () => {
|
||||
var selectParts = [
|
||||
const selectParts = [
|
||||
queryPart.create({
|
||||
type: 'field',
|
||||
category: queryPart.getCategories().Fields,
|
||||
@ -52,7 +52,7 @@ describe('InfluxQueryPart', () => {
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
}),
|
||||
];
|
||||
var partModel = queryPart.create({
|
||||
const partModel = queryPart.create({
|
||||
type: 'distinct',
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
});
|
||||
@ -64,7 +64,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should convert to count distinct when distinct is selected and count added', () => {
|
||||
var selectParts = [
|
||||
const selectParts = [
|
||||
queryPart.create({
|
||||
type: 'field',
|
||||
category: queryPart.getCategories().Fields,
|
||||
@ -74,7 +74,7 @@ describe('InfluxQueryPart', () => {
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
}),
|
||||
];
|
||||
var partModel = queryPart.create({
|
||||
const partModel = queryPart.create({
|
||||
type: 'count',
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
});
|
||||
@ -86,7 +86,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should replace count distinct if an aggregation is selected', () => {
|
||||
var selectParts = [
|
||||
const selectParts = [
|
||||
queryPart.create({
|
||||
type: 'field',
|
||||
category: queryPart.getCategories().Fields,
|
||||
@ -100,7 +100,7 @@ describe('InfluxQueryPart', () => {
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
}),
|
||||
];
|
||||
var partModel = queryPart.create({
|
||||
const partModel = queryPart.create({
|
||||
type: 'mean',
|
||||
category: queryPart.getCategories().Selectors,
|
||||
});
|
||||
@ -112,7 +112,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should not allowed nested counts when count distinct is selected', () => {
|
||||
var selectParts = [
|
||||
const selectParts = [
|
||||
queryPart.create({
|
||||
type: 'field',
|
||||
category: queryPart.getCategories().Fields,
|
||||
@ -126,7 +126,7 @@ describe('InfluxQueryPart', () => {
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
}),
|
||||
];
|
||||
var partModel = queryPart.create({
|
||||
const partModel = queryPart.create({
|
||||
type: 'count',
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
});
|
||||
@ -139,7 +139,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should not remove count distinct when distinct is added', () => {
|
||||
var selectParts = [
|
||||
const selectParts = [
|
||||
queryPart.create({
|
||||
type: 'field',
|
||||
category: queryPart.getCategories().Fields,
|
||||
@ -153,7 +153,7 @@ describe('InfluxQueryPart', () => {
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
}),
|
||||
];
|
||||
var partModel = queryPart.create({
|
||||
const partModel = queryPart.create({
|
||||
type: 'distinct',
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
});
|
||||
@ -166,7 +166,7 @@ describe('InfluxQueryPart', () => {
|
||||
});
|
||||
|
||||
it('should remove distinct when sum aggregation is selected', () => {
|
||||
var selectParts = [
|
||||
const selectParts = [
|
||||
queryPart.create({
|
||||
type: 'field',
|
||||
category: queryPart.getCategories().Fields,
|
||||
@ -176,7 +176,7 @@ describe('InfluxQueryPart', () => {
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
}),
|
||||
];
|
||||
var partModel = queryPart.create({
|
||||
const partModel = queryPart.create({
|
||||
type: 'sum',
|
||||
category: queryPart.getCategories().Aggregations,
|
||||
});
|
||||
|
@ -5,8 +5,8 @@ describe('influxdb response parser', () => {
|
||||
const parser = new ResponseParser();
|
||||
|
||||
describe('SHOW TAG response', () => {
|
||||
var query = 'SHOW TAG KEYS FROM "cpu"';
|
||||
var response = {
|
||||
const query = 'SHOW TAG KEYS FROM "cpu"';
|
||||
const response = {
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
@ -20,7 +20,7 @@ describe('influxdb response parser', () => {
|
||||
],
|
||||
};
|
||||
|
||||
var result = parser.parse(query, response);
|
||||
const result = parser.parse(query, response);
|
||||
|
||||
it('expects three results', () => {
|
||||
expect(_.size(result)).toBe(3);
|
||||
@ -28,10 +28,10 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
|
||||
describe('SHOW TAG VALUES response', () => {
|
||||
var query = 'SHOW TAG VALUES FROM "cpu" WITH KEY = "hostname"';
|
||||
const query = 'SHOW TAG VALUES FROM "cpu" WITH KEY = "hostname"';
|
||||
|
||||
describe('response from 0.10.0', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
@ -45,7 +45,7 @@ describe('influxdb response parser', () => {
|
||||
],
|
||||
};
|
||||
|
||||
var result = parser.parse(query, response);
|
||||
const result = parser.parse(query, response);
|
||||
|
||||
it('should get two responses', () => {
|
||||
expect(_.size(result)).toBe(2);
|
||||
@ -55,7 +55,7 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
|
||||
describe('response from 0.12.0', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
@ -74,7 +74,7 @@ describe('influxdb response parser', () => {
|
||||
],
|
||||
};
|
||||
|
||||
var result = parser.parse(query, response);
|
||||
const result = parser.parse(query, response);
|
||||
|
||||
it('should get two responses', () => {
|
||||
expect(_.size(result)).toBe(3);
|
||||
@ -86,8 +86,8 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
|
||||
describe('SELECT response', () => {
|
||||
var query = 'SELECT "usage_iowait" FROM "cpu" LIMIT 10';
|
||||
var response = {
|
||||
const query = 'SELECT "usage_iowait" FROM "cpu" LIMIT 10';
|
||||
const response = {
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
@ -101,7 +101,7 @@ describe('influxdb response parser', () => {
|
||||
],
|
||||
};
|
||||
|
||||
var result = parser.parse(query, response);
|
||||
const result = parser.parse(query, response);
|
||||
|
||||
it('should return second column', () => {
|
||||
expect(_.size(result)).toBe(3);
|
||||
@ -112,10 +112,10 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
|
||||
describe('SHOW FIELD response', () => {
|
||||
var query = 'SHOW FIELD KEYS FROM "cpu"';
|
||||
const query = 'SHOW FIELD KEYS FROM "cpu"';
|
||||
|
||||
describe('response from pre-1.0', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
@ -129,7 +129,7 @@ describe('influxdb response parser', () => {
|
||||
],
|
||||
};
|
||||
|
||||
var result = parser.parse(query, response);
|
||||
const result = parser.parse(query, response);
|
||||
|
||||
it('should get two responses', () => {
|
||||
expect(_.size(result)).toBe(1);
|
||||
@ -137,7 +137,7 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
|
||||
describe('response from 1.0', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
@ -151,7 +151,7 @@ describe('influxdb response parser', () => {
|
||||
],
|
||||
};
|
||||
|
||||
var result = parser.parse(query, response);
|
||||
const result = parser.parse(query, response);
|
||||
|
||||
it('should return first column', () => {
|
||||
expect(_.size(result)).toBe(1);
|
||||
|
@ -16,8 +16,8 @@ describe('opentsdb', () => {
|
||||
});
|
||||
|
||||
describe('When performing metricFindQuery', () => {
|
||||
var results;
|
||||
var requestOptions;
|
||||
let results;
|
||||
let requestOptions;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx.backendSrv.datasourceRequest = await function(options) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { OpenTsQueryCtrl } from '../query_ctrl';
|
||||
|
||||
describe('OpenTsQueryCtrl', () => {
|
||||
var ctx = <any>{
|
||||
const ctx = <any>{
|
||||
target: { target: '' },
|
||||
datasource: {
|
||||
tsdbVersion: '',
|
||||
|
@ -409,14 +409,14 @@ const timeSrv = {
|
||||
|
||||
describe('PrometheusDatasource', () => {
|
||||
describe('When querying prometheus with one target using query editor target spec', async () => {
|
||||
var results;
|
||||
var query = {
|
||||
let results;
|
||||
const query = {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
|
||||
interval: '60s',
|
||||
};
|
||||
// Interval alignment with step
|
||||
var urlExpected =
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
|
||||
|
||||
beforeEach(async () => {
|
||||
@ -453,12 +453,12 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
});
|
||||
describe('When querying prometheus with one target which return multiple series', () => {
|
||||
var results;
|
||||
var start = 60;
|
||||
var end = 360;
|
||||
var step = 60;
|
||||
let results;
|
||||
const start = 60;
|
||||
const end = 360;
|
||||
const step = 60;
|
||||
|
||||
var query = {
|
||||
const query = {
|
||||
range: { from: time({ seconds: start }), to: time({ seconds: end }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
|
||||
interval: '60s',
|
||||
@ -505,7 +505,7 @@ describe('PrometheusDatasource', () => {
|
||||
expect(results.data[0].datapoints[1][0]).toBe(3846);
|
||||
});
|
||||
it('should fill null after last datapoint in response', () => {
|
||||
var length = (end - start) / step + 1;
|
||||
const length = (end - start) / step + 1;
|
||||
expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
|
||||
expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
|
||||
expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
|
||||
@ -521,9 +521,9 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
});
|
||||
describe('When querying prometheus with one target and instant = true', () => {
|
||||
var results;
|
||||
var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
|
||||
var query = {
|
||||
let results;
|
||||
const urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
|
||||
const query = {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
|
||||
interval: '60s',
|
||||
@ -563,9 +563,9 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
});
|
||||
describe('When performing annotationQuery', () => {
|
||||
var results;
|
||||
let results;
|
||||
|
||||
var options = {
|
||||
const options = {
|
||||
annotation: {
|
||||
expr: 'ALERTS{alertstate="firing"}',
|
||||
tagKeys: 'job',
|
||||
@ -617,8 +617,8 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
describe('When resultFormat is table and instant = true', () => {
|
||||
var results;
|
||||
var query = {
|
||||
let results;
|
||||
const query = {
|
||||
range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
|
||||
interval: '60s',
|
||||
@ -653,7 +653,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
describe('The "step" query parameter', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
status: 'success',
|
||||
data: {
|
||||
data: {
|
||||
@ -686,13 +686,13 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
it('step should never go below 1', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [{ expr: 'test' }],
|
||||
interval: '100ms',
|
||||
};
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -702,7 +702,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
it('should be auto interval when greater than min interval', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -713,7 +713,7 @@ describe('PrometheusDatasource', () => {
|
||||
],
|
||||
interval: '10s',
|
||||
};
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -722,15 +722,15 @@ describe('PrometheusDatasource', () => {
|
||||
expect(res.url).toBe(urlExpected);
|
||||
});
|
||||
it('should result in querying fewer than 11000 data points', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 hour range
|
||||
range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
|
||||
targets: [{ expr: 'test' }],
|
||||
interval: '1s',
|
||||
};
|
||||
var end = 7 * 60 * 60;
|
||||
var start = 60 * 60;
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
|
||||
const end = 7 * 60 * 60;
|
||||
const start = 60 * 60;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -739,7 +739,7 @@ describe('PrometheusDatasource', () => {
|
||||
expect(res.url).toBe(urlExpected);
|
||||
});
|
||||
it('should not apply min interval when interval * intervalFactor greater', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -752,7 +752,7 @@ describe('PrometheusDatasource', () => {
|
||||
interval: '5s',
|
||||
};
|
||||
// times get rounded up to interval
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -761,7 +761,7 @@ describe('PrometheusDatasource', () => {
|
||||
expect(res.url).toBe(urlExpected);
|
||||
});
|
||||
it('should apply min interval when interval * intervalFactor smaller', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -773,7 +773,7 @@ describe('PrometheusDatasource', () => {
|
||||
],
|
||||
interval: '5s',
|
||||
};
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -782,7 +782,7 @@ describe('PrometheusDatasource', () => {
|
||||
expect(res.url).toBe(urlExpected);
|
||||
});
|
||||
it('should apply intervalFactor to auto interval when greater', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -795,7 +795,7 @@ describe('PrometheusDatasource', () => {
|
||||
interval: '10s',
|
||||
};
|
||||
// times get aligned to interval
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -804,7 +804,7 @@ describe('PrometheusDatasource', () => {
|
||||
expect(res.url).toBe(urlExpected);
|
||||
});
|
||||
it('should not not be affected by the 11000 data points limit when large enough', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 1 week range
|
||||
range: { from: time({}), to: time({ hours: 7 * 24 }) },
|
||||
targets: [
|
||||
@ -815,9 +815,9 @@ describe('PrometheusDatasource', () => {
|
||||
],
|
||||
interval: '10s',
|
||||
};
|
||||
var end = 7 * 24 * 60 * 60;
|
||||
var start = 0;
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
|
||||
const end = 7 * 24 * 60 * 60;
|
||||
const start = 0;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -826,7 +826,7 @@ describe('PrometheusDatasource', () => {
|
||||
expect(res.url).toBe(urlExpected);
|
||||
});
|
||||
it('should be determined by the 11000 data points limit when too small', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 1 week range
|
||||
range: { from: time({}), to: time({ hours: 7 * 24 }) },
|
||||
targets: [
|
||||
@ -837,9 +837,9 @@ describe('PrometheusDatasource', () => {
|
||||
],
|
||||
interval: '5s',
|
||||
};
|
||||
var end = 7 * 24 * 60 * 60;
|
||||
var start = 0;
|
||||
var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
|
||||
const end = 7 * 24 * 60 * 60;
|
||||
const start = 0;
|
||||
const urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
|
||||
backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
|
||||
ctx.ds = new PrometheusDatasource(instanceSettings, q, <any>backendSrv, templateSrv, timeSrv);
|
||||
await ctx.ds.query(query);
|
||||
@ -850,7 +850,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
describe('The __interval and __interval_ms template variables', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
status: 'success',
|
||||
data: {
|
||||
data: {
|
||||
@ -861,7 +861,7 @@ describe('PrometheusDatasource', () => {
|
||||
};
|
||||
|
||||
it('should be unchanged when auto interval is greater than min interval', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -877,7 +877,7 @@ describe('PrometheusDatasource', () => {
|
||||
},
|
||||
};
|
||||
|
||||
var urlExpected =
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=' +
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=60&end=420&step=10';
|
||||
@ -902,7 +902,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
});
|
||||
it('should be min interval when it is greater than auto interval', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -917,7 +917,7 @@ describe('PrometheusDatasource', () => {
|
||||
__interval_ms: { text: 5 * 1000, value: 5 * 1000 },
|
||||
},
|
||||
};
|
||||
var urlExpected =
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=' +
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=60&end=420&step=10';
|
||||
@ -941,7 +941,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
});
|
||||
it('should account for intervalFactor', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -957,7 +957,7 @@ describe('PrometheusDatasource', () => {
|
||||
__interval_ms: { text: 10 * 1000, value: 10 * 1000 },
|
||||
},
|
||||
};
|
||||
var urlExpected =
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=' +
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=0&end=500&step=100';
|
||||
@ -986,7 +986,7 @@ describe('PrometheusDatasource', () => {
|
||||
expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
|
||||
});
|
||||
it('should be interval * intervalFactor when greater than min interval', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -1002,7 +1002,7 @@ describe('PrometheusDatasource', () => {
|
||||
__interval_ms: { text: 5 * 1000, value: 5 * 1000 },
|
||||
},
|
||||
};
|
||||
var urlExpected =
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=' +
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=50&end=450&step=50';
|
||||
@ -1027,7 +1027,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
});
|
||||
it('should be min interval when greater than interval * intervalFactor', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 6 minute range
|
||||
range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
|
||||
targets: [
|
||||
@ -1043,7 +1043,7 @@ describe('PrometheusDatasource', () => {
|
||||
__interval_ms: { text: 5 * 1000, value: 5 * 1000 },
|
||||
},
|
||||
};
|
||||
var urlExpected =
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=' +
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=60&end=420&step=15';
|
||||
@ -1067,7 +1067,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
});
|
||||
it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
|
||||
var query = {
|
||||
const query = {
|
||||
// 1 week range
|
||||
range: { from: time({}), to: time({ hours: 7 * 24 }) },
|
||||
targets: [
|
||||
@ -1082,9 +1082,9 @@ describe('PrometheusDatasource', () => {
|
||||
__interval_ms: { text: 5 * 1000, value: 5 * 1000 },
|
||||
},
|
||||
};
|
||||
var end = 7 * 24 * 60 * 60;
|
||||
var start = 0;
|
||||
var urlExpected =
|
||||
const end = 7 * 24 * 60 * 60;
|
||||
const start = 0;
|
||||
const urlExpected =
|
||||
'proxied/api/v1/query_range?query=' +
|
||||
encodeURIComponent('rate(test[$__interval])') +
|
||||
'&start=' +
|
||||
@ -1115,7 +1115,7 @@ describe('PrometheusDatasource', () => {
|
||||
});
|
||||
|
||||
describe('PrometheusDatasource for POST', () => {
|
||||
// var ctx = new helpers.ServiceTestContext();
|
||||
// const ctx = new helpers.ServiceTestContext();
|
||||
const instanceSettings = {
|
||||
url: 'proxied',
|
||||
directUrl: 'direct',
|
||||
@ -1125,15 +1125,15 @@ describe('PrometheusDatasource for POST', () => {
|
||||
};
|
||||
|
||||
describe('When querying prometheus with one target using query editor target spec', () => {
|
||||
var results;
|
||||
var urlExpected = 'proxied/api/v1/query_range';
|
||||
var dataExpected = {
|
||||
let results;
|
||||
const urlExpected = 'proxied/api/v1/query_range';
|
||||
const dataExpected = {
|
||||
query: 'test{job="testjob"}',
|
||||
start: 1 * 60,
|
||||
end: 3 * 60,
|
||||
step: 60,
|
||||
};
|
||||
var query = {
|
||||
const query = {
|
||||
range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
|
||||
targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
|
||||
interval: '60s',
|
||||
|
@ -11,7 +11,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
});
|
||||
|
||||
describe('When resultFormat is table', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
status: 'success',
|
||||
data: {
|
||||
resultType: 'matrix',
|
||||
@ -33,7 +33,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
};
|
||||
|
||||
it('should return table model', () => {
|
||||
var table = ctx.resultTransformer.transformMetricDataToTable(response.data.result);
|
||||
const table = ctx.resultTransformer.transformMetricDataToTable(response.data.result);
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.rows).toEqual([
|
||||
[1443454528000, 'test', '', 'testjob', 3846],
|
||||
@ -49,7 +49,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
});
|
||||
|
||||
it('should column title include refId if response count is more than 2', () => {
|
||||
var table = ctx.resultTransformer.transformMetricDataToTable(response.data.result, 2, 'B');
|
||||
const table = ctx.resultTransformer.transformMetricDataToTable(response.data.result, 2, 'B');
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.columns).toMatchObject([
|
||||
{ text: 'Time', type: 'time' },
|
||||
@ -62,7 +62,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
});
|
||||
|
||||
describe('When resultFormat is table and instant = true', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
status: 'success',
|
||||
data: {
|
||||
resultType: 'vector',
|
||||
@ -76,7 +76,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
};
|
||||
|
||||
it('should return table model', () => {
|
||||
var table = ctx.resultTransformer.transformMetricDataToTable(response.data.result);
|
||||
const table = ctx.resultTransformer.transformMetricDataToTable(response.data.result);
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.rows).toEqual([[1443454528000, 'test', 'testjob', 3846]]);
|
||||
expect(table.columns).toMatchObject([
|
||||
@ -89,7 +89,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
});
|
||||
|
||||
describe('When resultFormat is heatmap', () => {
|
||||
var response = {
|
||||
const response = {
|
||||
status: 'success',
|
||||
data: {
|
||||
resultType: 'matrix',
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { DataProcessor } from '../data_processor';
|
||||
|
||||
describe('Graph DataProcessor', function() {
|
||||
var panel: any = {
|
||||
const panel: any = {
|
||||
xaxis: {},
|
||||
};
|
||||
|
||||
var processor = new DataProcessor(panel);
|
||||
const processor = new DataProcessor(panel);
|
||||
|
||||
describe('Given default xaxis options and query that returns docs', () => {
|
||||
beforeEach(() => {
|
||||
@ -29,7 +29,7 @@ describe('Graph DataProcessor', function() {
|
||||
});
|
||||
|
||||
describe('getDataFieldNames(', () => {
|
||||
var dataList = [
|
||||
const dataList = [
|
||||
{
|
||||
type: 'docs',
|
||||
datapoints: [
|
||||
@ -46,7 +46,7 @@ describe('Graph DataProcessor', function() {
|
||||
];
|
||||
|
||||
it('Should return all field names', () => {
|
||||
var fields = processor.getDataFieldNames(dataList, false);
|
||||
const fields = processor.getDataFieldNames(dataList, false);
|
||||
expect(fields).toContain('hostname');
|
||||
expect(fields).toContain('valueField');
|
||||
expect(fields).toContain('nested.prop1');
|
||||
@ -54,7 +54,7 @@ describe('Graph DataProcessor', function() {
|
||||
});
|
||||
|
||||
it('Should return all number fields', () => {
|
||||
var fields = processor.getDataFieldNames(dataList, true);
|
||||
const fields = processor.getDataFieldNames(dataList, true);
|
||||
expect(fields).toContain('valueField');
|
||||
expect(fields).toContain('nested.value2');
|
||||
});
|
||||
|
@ -243,7 +243,7 @@ describe('grafanaGraph', function() {
|
||||
});
|
||||
|
||||
it('should apply axis transform, autoscaling (if necessary) and ticks', function() {
|
||||
var axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
const axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
expect(axisAutoscale.transform(100)).toBe(2);
|
||||
expect(axisAutoscale.inverseTransform(-3)).toBeCloseTo(0.001);
|
||||
expect(axisAutoscale.min).toBeCloseTo(0.001);
|
||||
@ -256,7 +256,7 @@ describe('grafanaGraph', function() {
|
||||
expect(axisAutoscale.ticks[axisAutoscale.ticks.length - 1]).toBe(10000);
|
||||
}
|
||||
|
||||
var axisFixedscale = ctx.plotOptions.yaxes[1];
|
||||
const axisFixedscale = ctx.plotOptions.yaxes[1];
|
||||
expect(axisFixedscale.min).toBe(0.05);
|
||||
expect(axisFixedscale.max).toBe(1500);
|
||||
expect(axisFixedscale.ticks.length).toBe(5);
|
||||
@ -278,7 +278,7 @@ describe('grafanaGraph', function() {
|
||||
});
|
||||
|
||||
it('should not set min and max and should create some fake ticks', function() {
|
||||
var axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
const axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
expect(axisAutoscale.transform(100)).toBe(2);
|
||||
expect(axisAutoscale.inverseTransform(-3)).toBeCloseTo(0.001);
|
||||
expect(axisAutoscale.min).toBe(undefined);
|
||||
@ -304,7 +304,7 @@ describe('grafanaGraph', function() {
|
||||
});
|
||||
});
|
||||
it('should set min to 0.1 and add a tick for 0.1', function() {
|
||||
var axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
const axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
expect(axisAutoscale.transform(100)).toBe(2);
|
||||
expect(axisAutoscale.inverseTransform(-3)).toBeCloseTo(0.001);
|
||||
expect(axisAutoscale.min).toBe(0.1);
|
||||
@ -331,7 +331,7 @@ describe('grafanaGraph', function() {
|
||||
});
|
||||
|
||||
it('should regenerate ticks so that if fits on the y-axis', function() {
|
||||
var axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
const axisAutoscale = ctx.plotOptions.yaxes[0];
|
||||
expect(axisAutoscale.min).toBe(0.1);
|
||||
expect(axisAutoscale.ticks.length).toBe(8);
|
||||
expect(axisAutoscale.ticks[0]).toBe(0.1);
|
||||
@ -432,7 +432,7 @@ describe('grafanaGraph', function() {
|
||||
});
|
||||
|
||||
it('should show percentage', function() {
|
||||
var axis = ctx.plotOptions.yaxes[0];
|
||||
const axis = ctx.plotOptions.yaxes[0];
|
||||
expect(axis.tickFormatter(100, axis)).toBe('100%');
|
||||
});
|
||||
});
|
||||
@ -448,7 +448,7 @@ describe('grafanaGraph', function() {
|
||||
});
|
||||
|
||||
it('should format dates as hours minutes', function() {
|
||||
var axis = ctx.plotOptions.xaxis;
|
||||
const axis = ctx.plotOptions.xaxis;
|
||||
expect(axis.timeformat).toBe('%H:%M');
|
||||
});
|
||||
});
|
||||
@ -462,7 +462,7 @@ describe('grafanaGraph', function() {
|
||||
});
|
||||
|
||||
it('should format dates as month days', function() {
|
||||
var axis = ctx.plotOptions.xaxis;
|
||||
const axis = ctx.plotOptions.xaxis;
|
||||
expect(axis.timeformat).toBe('%m/%d');
|
||||
});
|
||||
});
|
||||
|
@ -43,7 +43,7 @@ describe('GraphCtrl', () => {
|
||||
|
||||
describe('when time series are outside range', () => {
|
||||
beforeEach(() => {
|
||||
var data = [
|
||||
const data = [
|
||||
{
|
||||
target: 'test.cpu1',
|
||||
datapoints: [[45, 1234567890], [60, 1234567899]],
|
||||
@ -61,14 +61,14 @@ describe('GraphCtrl', () => {
|
||||
|
||||
describe('when time series are inside range', () => {
|
||||
beforeEach(() => {
|
||||
var range = {
|
||||
const range = {
|
||||
from: moment()
|
||||
.subtract(1, 'days')
|
||||
.valueOf(),
|
||||
to: moment().valueOf(),
|
||||
};
|
||||
|
||||
var data = [
|
||||
const data = [
|
||||
{
|
||||
target: 'test.cpu1',
|
||||
datapoints: [[45, range.from + 1000], [60, range.from + 10000]],
|
||||
@ -86,7 +86,7 @@ describe('GraphCtrl', () => {
|
||||
|
||||
describe('datapointsCount given 2 series', () => {
|
||||
beforeEach(() => {
|
||||
var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }];
|
||||
const data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }];
|
||||
ctx.ctrl.onDataReceived(data);
|
||||
});
|
||||
|
||||
|
@ -3,18 +3,18 @@ jest.mock('app/core/core', () => ({}));
|
||||
import $ from 'jquery';
|
||||
import GraphTooltip from '../graph_tooltip';
|
||||
|
||||
var scope = {
|
||||
const scope = {
|
||||
appEvent: jest.fn(),
|
||||
onAppEvent: jest.fn(),
|
||||
ctrl: {},
|
||||
};
|
||||
|
||||
var elem = $('<div></div>');
|
||||
var dashboard = {};
|
||||
var getSeriesFn;
|
||||
const elem = $('<div></div>');
|
||||
const dashboard = {};
|
||||
const getSeriesFn = () => {};
|
||||
|
||||
function describeSharedTooltip(desc, fn) {
|
||||
var ctx: any = {};
|
||||
const ctx: any = {};
|
||||
ctx.ctrl = scope.ctrl;
|
||||
ctx.ctrl.panel = {
|
||||
tooltip: {
|
||||
@ -31,7 +31,7 @@ function describeSharedTooltip(desc, fn) {
|
||||
describe(desc, function() {
|
||||
beforeEach(function() {
|
||||
ctx.setupFn();
|
||||
var tooltip = new GraphTooltip(elem, dashboard, scope, getSeriesFn);
|
||||
const tooltip = new GraphTooltip(elem, dashboard, scope, getSeriesFn);
|
||||
ctx.results = tooltip.getMultiSeriesPlotHoverInfo(ctx.data, ctx.pos);
|
||||
});
|
||||
|
||||
@ -40,28 +40,28 @@ function describeSharedTooltip(desc, fn) {
|
||||
}
|
||||
|
||||
describe('findHoverIndexFromData', function() {
|
||||
var tooltip = new GraphTooltip(elem, dashboard, scope, getSeriesFn);
|
||||
var series = {
|
||||
const tooltip = new GraphTooltip(elem, dashboard, scope, getSeriesFn);
|
||||
const series = {
|
||||
data: [[100, 0], [101, 0], [102, 0], [103, 0], [104, 0], [105, 0], [106, 0], [107, 0]],
|
||||
};
|
||||
|
||||
it('should return 0 if posX out of lower bounds', function() {
|
||||
var posX = 99;
|
||||
const posX = 99;
|
||||
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return n - 1 if posX out of upper bounds', function() {
|
||||
var posX = 108;
|
||||
const posX = 108;
|
||||
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(series.data.length - 1);
|
||||
});
|
||||
|
||||
it('should return i if posX in series', function() {
|
||||
var posX = 104;
|
||||
const posX = 104;
|
||||
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(4);
|
||||
});
|
||||
|
||||
it('should return i if posX not in series and i + 1 > posX', function() {
|
||||
var posX = 104.9;
|
||||
const posX = 104.9;
|
||||
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(4);
|
||||
});
|
||||
});
|
||||
|
@ -5,7 +5,7 @@ import { ThresholdManager } from '../threshold_manager';
|
||||
describe('ThresholdManager', function() {
|
||||
function plotOptionsScenario(desc, func) {
|
||||
describe(desc, function() {
|
||||
var ctx: any = {
|
||||
const ctx: any = {
|
||||
panel: {
|
||||
thresholds: [],
|
||||
},
|
||||
@ -17,9 +17,9 @@ describe('ThresholdManager', function() {
|
||||
|
||||
ctx.setup = function(thresholds, data) {
|
||||
ctx.panel.thresholds = thresholds;
|
||||
var manager = new ThresholdManager(ctx.panelCtrl);
|
||||
const manager = new ThresholdManager(ctx.panelCtrl);
|
||||
if (data !== undefined) {
|
||||
var element = angular.element('<div grafana-graph><div>');
|
||||
const element = angular.element('<div grafana-graph><div>');
|
||||
manager.prepare(element, data);
|
||||
}
|
||||
manager.addFlotOptions(ctx.options, ctx.panel);
|
||||
@ -34,7 +34,7 @@ describe('ThresholdManager', function() {
|
||||
ctx.setup([{ op: 'gt', value: 300, fill: true, line: true, colorMode: 'critical' }]);
|
||||
|
||||
it('should add fill for threshold with fill: true', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
|
||||
expect(markings[0].yaxis.from).toBe(300);
|
||||
expect(markings[0].yaxis.to).toBe(Infinity);
|
||||
@ -42,7 +42,7 @@ describe('ThresholdManager', function() {
|
||||
});
|
||||
|
||||
it('should add line', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[1].yaxis.from).toBe(300);
|
||||
expect(markings[1].yaxis.to).toBe(300);
|
||||
expect(markings[1].color).toBe('rgba(237, 46, 24, 0.60)');
|
||||
@ -56,13 +56,13 @@ describe('ThresholdManager', function() {
|
||||
]);
|
||||
|
||||
it('should add fill for first thresholds to next threshold', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[0].yaxis.from).toBe(200);
|
||||
expect(markings[0].yaxis.to).toBe(300);
|
||||
});
|
||||
|
||||
it('should add fill for last thresholds to infinity', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[1].yaxis.from).toBe(300);
|
||||
expect(markings[1].yaxis.to).toBe(Infinity);
|
||||
});
|
||||
@ -75,13 +75,13 @@ describe('ThresholdManager', function() {
|
||||
]);
|
||||
|
||||
it('should add fill for first thresholds to next threshold', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[0].yaxis.from).toBe(300);
|
||||
expect(markings[0].yaxis.to).toBe(200);
|
||||
});
|
||||
|
||||
it('should add fill for last thresholds to itself', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[1].yaxis.from).toBe(200);
|
||||
expect(markings[1].yaxis.to).toBe(200);
|
||||
});
|
||||
@ -94,20 +94,20 @@ describe('ThresholdManager', function() {
|
||||
]);
|
||||
|
||||
it('should add fill for first thresholds to next threshold', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[0].yaxis.from).toBe(300);
|
||||
expect(markings[0].yaxis.to).toBe(Infinity);
|
||||
});
|
||||
|
||||
it('should add fill for last thresholds to itself', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[1].yaxis.from).toBe(200);
|
||||
expect(markings[1].yaxis.to).toBe(-Infinity);
|
||||
});
|
||||
});
|
||||
|
||||
plotOptionsScenario('for threshold on two Y axes', ctx => {
|
||||
var data = new Array(2);
|
||||
const data = new Array(2);
|
||||
data[0] = new TimeSeries({
|
||||
datapoints: [[0, 1], [300, 2]],
|
||||
alias: 'left',
|
||||
@ -127,12 +127,12 @@ describe('ThresholdManager', function() {
|
||||
);
|
||||
|
||||
it('should add first threshold for left axis', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[0].yaxis.from).toBe(100);
|
||||
});
|
||||
|
||||
it('should add second threshold for right axis', function() {
|
||||
var markings = ctx.options.grid.markings;
|
||||
const markings = ctx.options.grid.markings;
|
||||
expect(markings[1].y2axis.from).toBe(200);
|
||||
});
|
||||
});
|
||||
|
@ -25,7 +25,7 @@ describe('HeatmapCtrl', function() {
|
||||
|
||||
describe('when time series are outside range', function() {
|
||||
beforeEach(function() {
|
||||
var data = [
|
||||
const data = [
|
||||
{
|
||||
target: 'test.cpu1',
|
||||
datapoints: [[45, 1234567890], [60, 1234567899]],
|
||||
@ -43,14 +43,14 @@ describe('HeatmapCtrl', function() {
|
||||
|
||||
describe('when time series are inside range', function() {
|
||||
beforeEach(function() {
|
||||
var range = {
|
||||
const range = {
|
||||
from: moment()
|
||||
.subtract(1, 'days')
|
||||
.valueOf(),
|
||||
to: moment().valueOf(),
|
||||
};
|
||||
|
||||
var data = [
|
||||
const data = [
|
||||
{
|
||||
target: 'test.cpu1',
|
||||
datapoints: [[45, range.from + 1000], [60, range.from + 10000]],
|
||||
@ -68,7 +68,7 @@ describe('HeatmapCtrl', function() {
|
||||
|
||||
describe('datapointsCount given 2 series', function() {
|
||||
beforeEach(function() {
|
||||
var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }];
|
||||
const data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }];
|
||||
ctx.ctrl.onDataReceived(data);
|
||||
});
|
||||
|
||||
|
@ -3,7 +3,7 @@ import { getColorForValue } from '../module';
|
||||
describe('grafanaSingleStat', function() {
|
||||
describe('legacy thresholds', () => {
|
||||
describe('positive thresholds', () => {
|
||||
var data: any = {
|
||||
const data: any = {
|
||||
colorMap: ['green', 'yellow', 'red'],
|
||||
thresholds: [20, 50],
|
||||
};
|
||||
@ -39,7 +39,7 @@ describe('grafanaSingleStat', function() {
|
||||
});
|
||||
|
||||
describe('negative thresholds', () => {
|
||||
var data: any = {
|
||||
const data: any = {
|
||||
colorMap: ['green', 'yellow', 'red'],
|
||||
thresholds: [0, 20],
|
||||
};
|
||||
@ -58,7 +58,7 @@ describe('grafanaSingleStat', function() {
|
||||
});
|
||||
|
||||
describe('negative thresholds', () => {
|
||||
var data: any = {
|
||||
const data: any = {
|
||||
colorMap: ['green', 'yellow', 'red'],
|
||||
thresholds: [-27, 20],
|
||||
};
|
||||
|
@ -4,7 +4,7 @@ import { TableRenderer } from '../renderer';
|
||||
|
||||
describe('when rendering table', () => {
|
||||
describe('given 13 columns', () => {
|
||||
var table = new TableModel();
|
||||
const table = new TableModel();
|
||||
table.columns = [
|
||||
{ text: 'Time' },
|
||||
{ text: 'Value' },
|
||||
@ -24,7 +24,7 @@ describe('when rendering table', () => {
|
||||
[1388556366666, 1230, 40, undefined, '', '', 'my.host.com', 'host1', ['value1', 'value2'], 1, 2, 1, 2],
|
||||
];
|
||||
|
||||
var panel = {
|
||||
const panel = {
|
||||
pageSize: 10,
|
||||
styles: [
|
||||
{
|
||||
@ -163,11 +163,11 @@ describe('when rendering table', () => {
|
||||
],
|
||||
};
|
||||
|
||||
var sanitize = function(value) {
|
||||
const sanitize = function(value) {
|
||||
return 'sanitized';
|
||||
};
|
||||
|
||||
var templateSrv = {
|
||||
const templateSrv = {
|
||||
replace: function(value, scopedVars) {
|
||||
if (scopedVars) {
|
||||
// For testing variables replacement in link
|
||||
@ -179,75 +179,75 @@ describe('when rendering table', () => {
|
||||
},
|
||||
};
|
||||
|
||||
var renderer = new TableRenderer(panel, table, 'utc', sanitize, templateSrv);
|
||||
const renderer = new TableRenderer(panel, table, 'utc', sanitize, templateSrv);
|
||||
|
||||
it('time column should be formated', () => {
|
||||
var html = renderer.renderCell(0, 0, 1388556366666);
|
||||
const html = renderer.renderCell(0, 0, 1388556366666);
|
||||
expect(html).toBe('<td>2014-01-01T06:06:06Z</td>');
|
||||
});
|
||||
|
||||
it('undefined time column should be rendered as -', () => {
|
||||
var html = renderer.renderCell(0, 0, undefined);
|
||||
const html = renderer.renderCell(0, 0, undefined);
|
||||
expect(html).toBe('<td>-</td>');
|
||||
});
|
||||
|
||||
it('null time column should be rendered as -', () => {
|
||||
var html = renderer.renderCell(0, 0, null);
|
||||
const html = renderer.renderCell(0, 0, null);
|
||||
expect(html).toBe('<td>-</td>');
|
||||
});
|
||||
|
||||
it('number column with unit specified should ignore style unit', () => {
|
||||
var html = renderer.renderCell(5, 0, 1230);
|
||||
const html = renderer.renderCell(5, 0, 1230);
|
||||
expect(html).toBe('<td>1.23 kbps</td>');
|
||||
});
|
||||
|
||||
it('number column should be formated', () => {
|
||||
var html = renderer.renderCell(1, 0, 1230);
|
||||
const html = renderer.renderCell(1, 0, 1230);
|
||||
expect(html).toBe('<td>1.230 s</td>');
|
||||
});
|
||||
|
||||
it('number style should ignore string values', () => {
|
||||
var html = renderer.renderCell(1, 0, 'asd');
|
||||
const html = renderer.renderCell(1, 0, 'asd');
|
||||
expect(html).toBe('<td>asd</td>');
|
||||
});
|
||||
|
||||
it('colored cell should have style', () => {
|
||||
var html = renderer.renderCell(2, 0, 40);
|
||||
const html = renderer.renderCell(2, 0, 40);
|
||||
expect(html).toBe('<td style="color:green">40.0</td>');
|
||||
});
|
||||
|
||||
it('colored cell should have style', () => {
|
||||
var html = renderer.renderCell(2, 0, 55);
|
||||
const html = renderer.renderCell(2, 0, 55);
|
||||
expect(html).toBe('<td style="color:orange">55.0</td>');
|
||||
});
|
||||
|
||||
it('colored cell should have style', () => {
|
||||
var html = renderer.renderCell(2, 0, 85);
|
||||
const html = renderer.renderCell(2, 0, 85);
|
||||
expect(html).toBe('<td style="color:red">85.0</td>');
|
||||
});
|
||||
|
||||
it('unformated undefined should be rendered as string', () => {
|
||||
var html = renderer.renderCell(3, 0, 'value');
|
||||
const html = renderer.renderCell(3, 0, 'value');
|
||||
expect(html).toBe('<td>value</td>');
|
||||
});
|
||||
|
||||
it('string style with escape html should return escaped html', () => {
|
||||
var html = renderer.renderCell(4, 0, '&breaking <br /> the <br /> row');
|
||||
const html = renderer.renderCell(4, 0, '&breaking <br /> the <br /> row');
|
||||
expect(html).toBe('<td>&breaking <br /> the <br /> row</td>');
|
||||
});
|
||||
|
||||
it('undefined formater should return escaped html', () => {
|
||||
var html = renderer.renderCell(3, 0, '&breaking <br /> the <br /> row');
|
||||
const html = renderer.renderCell(3, 0, '&breaking <br /> the <br /> row');
|
||||
expect(html).toBe('<td>&breaking <br /> the <br /> row</td>');
|
||||
});
|
||||
|
||||
it('undefined value should render as -', () => {
|
||||
var html = renderer.renderCell(3, 0, undefined);
|
||||
const html = renderer.renderCell(3, 0, undefined);
|
||||
expect(html).toBe('<td></td>');
|
||||
});
|
||||
|
||||
it('sanitized value should render as', () => {
|
||||
var html = renderer.renderCell(6, 0, 'text <a href="http://google.com">link</a>');
|
||||
const html = renderer.renderCell(6, 0, 'text <a href="http://google.com">link</a>');
|
||||
expect(html).toBe('<td>sanitized</td>');
|
||||
});
|
||||
|
||||
@ -264,8 +264,8 @@ describe('when rendering table', () => {
|
||||
});
|
||||
|
||||
it('link should render as', () => {
|
||||
var html = renderer.renderCell(7, 0, 'host1');
|
||||
var expectedHtml = `
|
||||
const html = renderer.renderCell(7, 0, 'host1');
|
||||
const expectedHtml = `
|
||||
<td class="table-panel-cell-link">
|
||||
<a href="/dashboard?param=host1¶m_1=1230¶m_2=40"
|
||||
target="_blank" data-link-tooltip data-original-title="host1 1230 my.host.com" data-placement="right">
|
||||
@ -277,87 +277,87 @@ describe('when rendering table', () => {
|
||||
});
|
||||
|
||||
it('Array column should not use number as formatter', () => {
|
||||
var html = renderer.renderCell(8, 0, ['value1', 'value2']);
|
||||
const html = renderer.renderCell(8, 0, ['value1', 'value2']);
|
||||
expect(html).toBe('<td>value1, value2</td>');
|
||||
});
|
||||
|
||||
it('numeric value should be mapped to text', () => {
|
||||
var html = renderer.renderCell(9, 0, 1);
|
||||
const html = renderer.renderCell(9, 0, 1);
|
||||
expect(html).toBe('<td>on</td>');
|
||||
});
|
||||
|
||||
it('string numeric value should be mapped to text', () => {
|
||||
var html = renderer.renderCell(9, 0, '0');
|
||||
const html = renderer.renderCell(9, 0, '0');
|
||||
expect(html).toBe('<td>off</td>');
|
||||
});
|
||||
|
||||
it('string value should be mapped to text', () => {
|
||||
var html = renderer.renderCell(9, 0, 'HELLO WORLD');
|
||||
const html = renderer.renderCell(9, 0, 'HELLO WORLD');
|
||||
expect(html).toBe('<td>HELLO GRAFANA</td>');
|
||||
});
|
||||
|
||||
it('array column value should be mapped to text', () => {
|
||||
var html = renderer.renderCell(9, 0, ['value1', 'value2']);
|
||||
const html = renderer.renderCell(9, 0, ['value1', 'value2']);
|
||||
expect(html).toBe('<td>value3, value4</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text (range)', () => {
|
||||
var html = renderer.renderCell(10, 0, 2);
|
||||
const html = renderer.renderCell(10, 0, 2);
|
||||
expect(html).toBe('<td>on</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text (range)', () => {
|
||||
var html = renderer.renderCell(10, 0, 5);
|
||||
const html = renderer.renderCell(10, 0, 5);
|
||||
expect(html).toBe('<td>off</td>');
|
||||
});
|
||||
|
||||
it('array column value should not be mapped to text', () => {
|
||||
var html = renderer.renderCell(10, 0, ['value1', 'value2']);
|
||||
const html = renderer.renderCell(10, 0, ['value1', 'value2']);
|
||||
expect(html).toBe('<td>value1, value2</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(11, 0, 1);
|
||||
const html = renderer.renderCell(11, 0, 1);
|
||||
expect(html).toBe('<td style="color:orange">on</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(11, 0, '1');
|
||||
const html = renderer.renderCell(11, 0, '1');
|
||||
expect(html).toBe('<td style="color:orange">on</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(11, 0, 0);
|
||||
const html = renderer.renderCell(11, 0, 0);
|
||||
expect(html).toBe('<td style="color:green">off</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(11, 0, '0');
|
||||
const html = renderer.renderCell(11, 0, '0');
|
||||
expect(html).toBe('<td style="color:green">off</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(11, 0, '2.1');
|
||||
const html = renderer.renderCell(11, 0, '2.1');
|
||||
expect(html).toBe('<td style="color:red">2.1</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text (range) and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(12, 0, 0);
|
||||
const html = renderer.renderCell(12, 0, 0);
|
||||
expect(html).toBe('<td style="color:green">0</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text (range) and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(12, 0, 1);
|
||||
const html = renderer.renderCell(12, 0, 1);
|
||||
expect(html).toBe('<td style="color:green">on</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text (range) and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(12, 0, 4);
|
||||
const html = renderer.renderCell(12, 0, 4);
|
||||
expect(html).toBe('<td style="color:orange">off</td>');
|
||||
});
|
||||
|
||||
it('value should be mapped to text (range) and colored cell should have style', () => {
|
||||
var html = renderer.renderCell(12, 0, '7.1');
|
||||
const html = renderer.renderCell(12, 0, '7.1');
|
||||
expect(html).toBe('<td style="color:red">7.1</td>');
|
||||
});
|
||||
});
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { transformers, transformDataToTable } from '../transformers';
|
||||
|
||||
describe('when transforming time series table', () => {
|
||||
var table;
|
||||
let table;
|
||||
|
||||
describe('given 2 time series', () => {
|
||||
var time = new Date().getTime();
|
||||
var timeSeries = [
|
||||
const time = new Date().getTime();
|
||||
const timeSeries = [
|
||||
{
|
||||
target: 'series1',
|
||||
datapoints: [[12.12, time], [14.44, time + 1]],
|
||||
@ -17,7 +17,7 @@ describe('when transforming time series table', () => {
|
||||
];
|
||||
|
||||
describe('timeseries_to_rows', () => {
|
||||
var panel = {
|
||||
const panel = {
|
||||
transform: 'timeseries_to_rows',
|
||||
sort: { col: 0, desc: true },
|
||||
};
|
||||
@ -43,7 +43,7 @@ describe('when transforming time series table', () => {
|
||||
});
|
||||
|
||||
describe('timeseries_to_columns', () => {
|
||||
var panel = {
|
||||
const panel = {
|
||||
transform: 'timeseries_to_columns',
|
||||
};
|
||||
|
||||
@ -70,7 +70,7 @@ describe('when transforming time series table', () => {
|
||||
});
|
||||
|
||||
describe('timeseries_aggregations', () => {
|
||||
var panel = {
|
||||
const panel = {
|
||||
transform: 'timeseries_aggregations',
|
||||
sort: { col: 0, desc: true },
|
||||
columns: [{ text: 'Max', value: 'max' }, { text: 'Min', value: 'min' }],
|
||||
@ -99,12 +99,12 @@ describe('when transforming time series table', () => {
|
||||
describe('table data sets', () => {
|
||||
describe('Table', () => {
|
||||
const transform = 'table';
|
||||
var panel = {
|
||||
const panel = {
|
||||
transform,
|
||||
};
|
||||
var time = new Date().getTime();
|
||||
const time = new Date().getTime();
|
||||
|
||||
var nonTableData = [
|
||||
const nonTableData = [
|
||||
{
|
||||
type: 'foo',
|
||||
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value' }],
|
||||
@ -112,7 +112,7 @@ describe('when transforming time series table', () => {
|
||||
},
|
||||
];
|
||||
|
||||
var singleQueryData = [
|
||||
const singleQueryData = [
|
||||
{
|
||||
type: 'table',
|
||||
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value' }],
|
||||
@ -120,7 +120,7 @@ describe('when transforming time series table', () => {
|
||||
},
|
||||
];
|
||||
|
||||
var multipleQueriesDataSameLabels = [
|
||||
const multipleQueriesDataSameLabels = [
|
||||
{
|
||||
type: 'table',
|
||||
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Label Key 2' }, { text: 'Value #A' }],
|
||||
@ -143,7 +143,7 @@ describe('when transforming time series table', () => {
|
||||
},
|
||||
];
|
||||
|
||||
var multipleQueriesDataDifferentLabels = [
|
||||
const multipleQueriesDataDifferentLabels = [
|
||||
{
|
||||
type: 'table',
|
||||
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value #A' }],
|
||||
@ -163,14 +163,14 @@ describe('when transforming time series table', () => {
|
||||
|
||||
describe('getColumns', function() {
|
||||
it('should return data columns given a single query', function() {
|
||||
var columns = transformers[transform].getColumns(singleQueryData);
|
||||
const columns = transformers[transform].getColumns(singleQueryData);
|
||||
expect(columns[0].text).toBe('Time');
|
||||
expect(columns[1].text).toBe('Label Key 1');
|
||||
expect(columns[2].text).toBe('Value');
|
||||
});
|
||||
|
||||
it('should return the union of data columns given a multiple queries', function() {
|
||||
var columns = transformers[transform].getColumns(multipleQueriesDataSameLabels);
|
||||
const columns = transformers[transform].getColumns(multipleQueriesDataSameLabels);
|
||||
expect(columns[0].text).toBe('Time');
|
||||
expect(columns[1].text).toBe('Label Key 1');
|
||||
expect(columns[2].text).toBe('Label Key 2');
|
||||
@ -179,7 +179,7 @@ describe('when transforming time series table', () => {
|
||||
});
|
||||
|
||||
it('should return the union of data columns given a multiple queries with different labels', function() {
|
||||
var columns = transformers[transform].getColumns(multipleQueriesDataDifferentLabels);
|
||||
const columns = transformers[transform].getColumns(multipleQueriesDataDifferentLabels);
|
||||
expect(columns[0].text).toBe('Time');
|
||||
expect(columns[1].text).toBe('Label Key 1');
|
||||
expect(columns[2].text).toBe('Value #A');
|
||||
@ -263,7 +263,7 @@ describe('when transforming time series table', () => {
|
||||
|
||||
describe('doc data sets', () => {
|
||||
describe('JSON Data', () => {
|
||||
var panel = {
|
||||
const panel = {
|
||||
transform: 'json',
|
||||
columns: [
|
||||
{ text: 'Timestamp', value: 'timestamp' },
|
||||
@ -271,7 +271,7 @@ describe('when transforming time series table', () => {
|
||||
{ text: 'nested.level2', value: 'nested.level2' },
|
||||
],
|
||||
};
|
||||
var rawData = [
|
||||
const rawData = [
|
||||
{
|
||||
type: 'docs',
|
||||
datapoints: [
|
||||
@ -288,7 +288,7 @@ describe('when transforming time series table', () => {
|
||||
|
||||
describe('getColumns', function() {
|
||||
it('should return nested properties', function() {
|
||||
var columns = transformers['json'].getColumns(rawData);
|
||||
const columns = transformers['json'].getColumns(rawData);
|
||||
expect(columns[0].text).toBe('timestamp');
|
||||
expect(columns[1].text).toBe('message');
|
||||
expect(columns[2].text).toBe('nested.level2');
|
||||
@ -319,8 +319,8 @@ describe('when transforming time series table', () => {
|
||||
|
||||
describe('annotation data', () => {
|
||||
describe('Annnotations', () => {
|
||||
var panel = { transform: 'annotations' };
|
||||
var rawData = {
|
||||
const panel = { transform: 'annotations' };
|
||||
const rawData = {
|
||||
annotations: [
|
||||
{
|
||||
time: 1000,
|
||||
|
@ -18,5 +18,5 @@ jest.mock('app/features/plugins/plugin_loader', () => ({}));
|
||||
|
||||
configure({ adapter: new Adapter() });
|
||||
|
||||
var global = <any>window;
|
||||
const global = <any>window;
|
||||
global.$ = global.jQuery = $;
|
||||
|
@ -5,7 +5,7 @@ import { angularMocks, sinon } from '../lib/common';
|
||||
import { PanelModel } from 'app/features/dashboard/panel_model';
|
||||
|
||||
export function ControllerTestContext() {
|
||||
var self = this;
|
||||
const self = this;
|
||||
|
||||
this.datasource = {};
|
||||
this.$element = {};
|
||||
@ -58,7 +58,7 @@ export function ControllerTestContext() {
|
||||
$rootScope.onAppEvent = sinon.spy();
|
||||
$rootScope.colors = [];
|
||||
|
||||
for (var i = 0; i < 50; i++) {
|
||||
for (let i = 0; i < 50; i++) {
|
||||
$rootScope.colors.push('#' + i);
|
||||
}
|
||||
|
||||
@ -88,7 +88,7 @@ export function ControllerTestContext() {
|
||||
self.scope.onAppEvent = sinon.spy();
|
||||
|
||||
$rootScope.colors = [];
|
||||
for (var i = 0; i < 50; i++) {
|
||||
for (let i = 0; i < 50; i++) {
|
||||
$rootScope.colors.push('#' + i);
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ export function ControllerTestContext() {
|
||||
}
|
||||
|
||||
export function ServiceTestContext() {
|
||||
var self = this;
|
||||
const self = this;
|
||||
self.templateSrv = new TemplateSrvStub();
|
||||
self.timeSrv = new TimeSrvStub();
|
||||
self.datasourceSrv = {};
|
||||
@ -195,7 +195,7 @@ export function TemplateSrvStub() {
|
||||
};
|
||||
}
|
||||
|
||||
var allDeps = {
|
||||
const allDeps = {
|
||||
ContextSrvStub,
|
||||
TemplateSrvStub,
|
||||
TimeSrvStub,
|
||||
|
Loading…
Reference in New Issue
Block a user