From d83f886519c1e20d3eadfa03a575ed8d0022cdea Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Tue, 3 Apr 2018 16:36:43 +0200 Subject: [PATCH 001/105] migrated jquery.flot.events to ts --- .../plugins/panel/graph/jquery.flot.events.js | 604 ---------------- .../plugins/panel/graph/jquery.flot.events.ts | 663 ++++++++++++++++++ 2 files changed, 663 insertions(+), 604 deletions(-) delete mode 100644 public/app/plugins/panel/graph/jquery.flot.events.js create mode 100644 public/app/plugins/panel/graph/jquery.flot.events.ts diff --git a/public/app/plugins/panel/graph/jquery.flot.events.js b/public/app/plugins/panel/graph/jquery.flot.events.js deleted file mode 100644 index 3ea3ca8f330..00000000000 --- a/public/app/plugins/panel/graph/jquery.flot.events.js +++ /dev/null @@ -1,604 +0,0 @@ -define([ - 'jquery', - 'lodash', - 'angular', - 'tether-drop', -], -function ($, _, angular, Drop) { - 'use strict'; - - function createAnnotationToolip(element, event, plot) { - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var eventManager = plot.getOptions().events.manager; - var tmpScope = $rootScope.$new(true); - tmpScope.event = event; - tmpScope.onEdit = function() { - eventManager.editEvent(event); - }; - - $compile(content)(tmpScope); - tmpScope.$digest(); - tmpScope.$destroy(); - - var drop = new Drop({ - target: element[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--annotation', - openOn: 'hover', - hoverCloseDelay: 200, - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - - drop.on('close', function() { - setTimeout(function() { - drop.destroy(); - }); - }); - }]); - } - - var markerElementToAttachTo = null; - - function createEditPopover(element, event, plot) { - var eventManager = plot.getOptions().events.manager; - if (eventManager.editorOpen) { - // update marker element to attach to (needed in case of legend on the right - // when there is a double render pass and the initial marker element is removed) - markerElementToAttachTo = element; - return; - } - - // mark as openend - eventManager.editorOpened(); - // set marker element to attache to - markerElementToAttachTo = element; - - // wait for element to be attached and positioned - setTimeout(function() { - - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var scope = $rootScope.$new(true); - var drop; - - scope.event = event; - scope.panelCtrl = eventManager.panelCtrl; - scope.close = function() { - drop.close(); - }; - - $compile(content)(scope); - scope.$digest(); - - drop = new Drop({ - target: markerElementToAttachTo[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--form', - openOn: 'click', - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - eventManager.editorOpened(); - - drop.on('close', function() { - // need timeout here in order call drop.destroy - setTimeout(function() { - eventManager.editorClosed(); - scope.$destroy(); - drop.destroy(); - }); - }); - }]); - - }, 100); - } - - /* - * jquery.flot.events - * - * description: Flot plugin for adding events/markers to the plot - * version: 0.2.5 - * authors: - * Alexander Wunschik - * Joel Oughton - * Nicolas Joseph - * - * website: https://github.com/mojoaxel/flot-events - * - * released under MIT License and GPLv2+ - */ - - /** - * A class that allows for the drawing an remove of some object - */ - var DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { - var _object = object; - var _drawFunc = drawFunc; - var _clearFunc = clearFunc; - var _moveFunc = moveFunc; - var _position = { left: left, top: top }; - var _width = width; - var _height = height; - - this.width = function() { return _width; }; - this.height = function() { return _height; }; - this.position = function() { return _position; }; - this.draw = function() { _drawFunc(_object); }; - this.clear = function() { _clearFunc(_object); }; - this.getObject = function() { return _object; }; - this.moveTo = function(position) { - _position = position; - _moveFunc(_object, _position); - }; - }; - - /** - * Event class that stores options (eventType, min, max, title, description) and the object to draw. - */ - var VisualEvent = function(options, drawableEvent) { - var _parent; - var _options = options; - var _drawableEvent = drawableEvent; - var _hidden = false; - - this.visual = function() { return _drawableEvent; }; - this.getOptions = function() { return _options; }; - this.getParent = function() { return _parent; }; - this.isHidden = function() { return _hidden; }; - this.hide = function() { _hidden = true; }; - this.unhide = function() { _hidden = false; }; - }; - - /** - * A Class that handles the event-markers inside the given plot - */ - var EventMarkers = function(plot) { - var _events = []; - - this._types = []; - this._plot = plot; - this.eventsEnabled = false; - - this.getEvents = function() { - return _events; - }; - - this.setTypes = function(types) { - return this._types = types; - }; - - /** - * create internal objects for the given events - */ - this.setupEvents = function(events) { - var that = this; - var parts = _.partition(events, 'isRegion'); - var regions = parts[0]; - events = parts[1]; - - $.each(events, function(index, event) { - var ve = new VisualEvent(event, that._buildDiv(event)); - _events.push(ve); - }); - - $.each(regions, function (index, event) { - var vre = new VisualEvent(event, that._buildRegDiv(event)); - _events.push(vre); - }); - - _events.sort(function(a, b) { - var ao = a.getOptions(), bo = b.getOptions(); - if (ao.min > bo.min) { return 1; } - if (ao.min < bo.min) { return -1; } - return 0; - }); - }; - - /** - * draw the events to the plot - */ - this.drawEvents = function() { - var that = this; - // var o = this._plot.getPlotOffset(); - - $.each(_events, function(index, event) { - // check event is inside the graph range - if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { - event.visual().draw(); - } else { - event.visual().getObject().hide(); - } - }); - }; - - /** - * update the position of the event-markers (e.g. after scrolling or zooming) - */ - this.updateEvents = function() { - var that = this; - var o = this._plot.getPlotOffset(), left, top; - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - - $.each(_events, function(index, event) { - top = o.top + that._plot.height() - event.visual().height(); - left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; - event.visual().moveTo({ top: top, left: left }); - }); - }; - - /** - * remove all events from the plot - */ - this._clearEvents = function() { - $.each(_events, function(index, val) { - val.visual().clear(); - }); - _events = []; - }; - - /** - * create a DOM element for the given event - */ - this._buildDiv = function(event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; - var markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { - markerSize = 8; //default marker size - } else { - markerSize = this._types[eventTypeId].markerSize; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { - markerShow = true; - } else { - markerShow = this._types[eventTypeId].markerShow; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - var topOffset = xaxis.options.eventSectionHeight || 0; - topOffset = topOffset / 3; - - top = o.top + this._plot.height() + topOffset; - left = xaxis.p2c(event.min) + o.left; - - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": left + 'px', - "top": 8, - "width": lineWidth + "px", - "height": this._plot.height() + topOffset * 0.8, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }) - .appendTo(container); - - if (markerShow) { - var marker = $('
').css({ - "position": "absolute", - "left": (-markerSize - Math.round(lineWidth / 2)) + "px", - "font-size": 0, - "line-height": 0, - "width": 0, - "height": 0, - "border-left": markerSize+"px solid transparent", - "border-right": markerSize+"px solid transparent" - }); - - marker.appendTo(line); - - if (this._types[eventTypeId] && this._types[eventTypeId].position && this._types[eventTypeId].position.toUpperCase() === 'BOTTOM') { - marker.css({ - "top": top-markerSize-8 +"px", - "border-top": "none", - "border-bottom": markerSize+"px solid " + color - }); - } else { - marker.css({ - "top": "0px", - "border-top": markerSize+"px solid " + color, - "border-bottom": "none" - }); - } - - marker.data({ - "event": event - }); - - var mouseenter = function() { - createAnnotationToolip(marker, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(marker, event.editModel, that._plot); - } - - var mouseleave = function() { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - marker.css({ "cursor": "help" }); - marker.hover(mouseenter, mouseleave); - } - } - - var drawableEvent = new DrawableEvent( - line, - function drawFunc(obj) { obj.show(); }, - function(obj) { obj.remove(); }, - function(obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - line.width(), - line.height() - ); - - return drawableEvent; - }; - - /** - * create a DOM element for the given region - */ - this._buildRegDiv = function (event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - var topOffset = 2; - top = o.top + this._plot.height() + topOffset; - - var timeFrom = Math.min(event.min, event.timeEnd); - var timeTo = Math.max(event.min, event.timeEnd); - left = xaxis.p2c(timeFrom) + o.left; - var right = xaxis.p2c(timeTo) + o.left; - regionWidth = right - left; - - _.each([left, right], function(position) { - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": position + 'px', - "top": 8, - "width": lineWidth + "px", - "height": that._plot.height() + topOffset, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }); - line.appendTo(container); - }); - - var region = $('
').css({ - "position": "absolute", - "opacity": 0.5, - "left": left + 'px', - "top": top, - "width": Math.round(regionWidth + lineWidth) + "px", - "height": "0.5rem", - "border-left-color": color, - "color": color, - "background-color": color - }); - region.appendTo(container); - - region.data({ - "event": event - }); - - var mouseenter = function () { - createAnnotationToolip(region, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(region, event.editModel, that._plot); - } - - var mouseleave = function () { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - region.css({ "cursor": "help" }); - region.hover(mouseenter, mouseleave); - } - - var drawableEvent = new DrawableEvent( - region, - function drawFunc(obj) { obj.show(); }, - function (obj) { obj.remove(); }, - function (obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - region.width(), - region.height() - ); - - return drawableEvent; - }; - - /** - * check if the event is inside visible range - */ - this._insidePlot = function(x) { - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var xc = xaxis.p2c(x); - return xc > 0 && xc < xaxis.p2c(xaxis.max); - }; - }; - - /** - * initialize the plugin for the given plot - */ - function init(plot) { - /*jshint validthis:true */ - var that = this; - var eventMarkers = new EventMarkers(plot); - - plot.getEvents = function() { - return eventMarkers._events; - }; - - plot.hideEvents = function() { - $.each(eventMarkers._events, function(index, event) { - event.visual().getObject().hide(); - }); - }; - - plot.showEvents = function() { - plot.hideEvents(); - $.each(eventMarkers._events, function(index, event) { - event.hide(); - }); - - that.eventMarkers.drawEvents(); - }; - - // change events on an existing plot - plot.setEvents = function(events) { - if (eventMarkers.eventsEnabled) { - eventMarkers.setupEvents(events); - } - }; - - plot.hooks.processOptions.push(function(plot, options) { - // enable the plugin - if (options.events.data != null) { - eventMarkers.eventsEnabled = true; - } - }); - - plot.hooks.draw.push(function(plot) { - var options = plot.getOptions(); - - if (eventMarkers.eventsEnabled) { - // check for first run - if (eventMarkers.getEvents().length < 1) { - eventMarkers.setTypes(options.events.types); - eventMarkers.setupEvents(options.events.data); - } else { - eventMarkers.updateEvents(); - } - } - - eventMarkers.drawEvents(); - }); - } - - var defaultOptions = { - events: { - data: null, - types: null, - xaxis: 1, - position: 'BOTTOM' - } - }; - - $.plot.plugins.push({ - init: init, - options: defaultOptions, - name: "events", - version: "0.2.5" - }); -}); diff --git a/public/app/plugins/panel/graph/jquery.flot.events.ts b/public/app/plugins/panel/graph/jquery.flot.events.ts new file mode 100644 index 00000000000..642883ff75c --- /dev/null +++ b/public/app/plugins/panel/graph/jquery.flot.events.ts @@ -0,0 +1,663 @@ +import $ from 'jquery'; +import _ from 'lodash'; +import angular from 'angular'; +import Drop from 'tether-drop'; + +function createAnnotationToolip(element, event, plot) { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let eventManager = plot.getOptions().events.manager; + let tmpScope = $rootScope.$new(true); + tmpScope.event = event; + tmpScope.onEdit = function() { + eventManager.editEvent(event); + }; + + $compile(content)(tmpScope); + tmpScope.$digest(); + tmpScope.$destroy(); + + let drop = new Drop({ + target: element[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--annotation', + openOn: 'hover', + hoverCloseDelay: 200, + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + + drop.on('close', function() { + setTimeout(function() { + drop.destroy(); + }); + }); + }, + ]); +} + +let markerElementToAttachTo = null; + +function createEditPopover(element, event, plot) { + let eventManager = plot.getOptions().events.manager; + if (eventManager.editorOpen) { + // update marker element to attach to (needed in case of legend on the right + // when there is a double render pass and the inital marker element is removed) + markerElementToAttachTo = element; + return; + } + + // mark as openend + eventManager.editorOpened(); + // set marker elment to attache to + markerElementToAttachTo = element; + + // wait for element to be attached and positioned + setTimeout(function() { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let scope = $rootScope.$new(true); + let drop; + + scope.event = event; + scope.panelCtrl = eventManager.panelCtrl; + scope.close = function() { + drop.close(); + }; + + $compile(content)(scope); + scope.$digest(); + + drop = new Drop({ + target: markerElementToAttachTo[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--form', + openOn: 'click', + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + eventManager.editorOpened(); + + drop.on('close', function() { + // need timeout here in order call drop.destroy + setTimeout(function() { + eventManager.editorClosed(); + scope.$destroy(); + drop.destroy(); + }); + }); + }, + ]); + }, 100); +} + +/* + * jquery.flot.events + * + * description: Flot plugin for adding events/markers to the plot + * version: 0.2.5 + * authors: + * Alexander Wunschik + * Joel Oughton + * Nicolas Joseph + * + * website: https://github.com/mojoaxel/flot-events + * + * released under MIT License and GPLv2+ + */ + +/** + * A class that allows for the drawing an remove of some object + */ +let DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { + let _object = object; + let _drawFunc = drawFunc; + let _clearFunc = clearFunc; + let _moveFunc = moveFunc; + let _position = { left: left, top: top }; + let _width = width; + let _height = height; + + this.width = function() { + return _width; + }; + this.height = function() { + return _height; + }; + this.position = function() { + return _position; + }; + this.draw = function() { + _drawFunc(_object); + }; + this.clear = function() { + _clearFunc(_object); + }; + this.getObject = function() { + return _object; + }; + this.moveTo = function(position) { + _position = position; + _moveFunc(_object, _position); + }; +}; + +/** + * Event class that stores options (eventType, min, max, title, description) and the object to draw. + */ +let VisualEvent = function(options, drawableEvent) { + let _parent; + let _options = options; + let _drawableEvent = drawableEvent; + let _hidden = false; + + this.visual = function() { + return _drawableEvent; + }; + this.getOptions = function() { + return _options; + }; + this.getParent = function() { + return _parent; + }; + this.isHidden = function() { + return _hidden; + }; + this.hide = function() { + _hidden = true; + }; + this.unhide = function() { + _hidden = false; + }; +}; + +/** + * A Class that handles the event-markers inside the given plot + */ +let EventMarkers = function(plot) { + let _events = []; + + this._types = []; + this._plot = plot; + this.eventsEnabled = false; + + this.getEvents = function() { + return _events; + }; + + this.setTypes = function(types) { + return (this._types = types); + }; + + /** + * create internal objects for the given events + */ + this.setupEvents = function(events) { + let that = this; + let parts = _.partition(events, 'isRegion'); + let regions = parts[0]; + events = parts[1]; + + $.each(events, function(index, event) { + let ve = new VisualEvent(event, that._buildDiv(event)); + _events.push(ve); + }); + + $.each(regions, function(index, event) { + let vre = new VisualEvent(event, that._buildRegDiv(event)); + _events.push(vre); + }); + + _events.sort(function(a, b) { + let ao = a.getOptions(), + bo = b.getOptions(); + if (ao.min > bo.min) { + return 1; + } + if (ao.min < bo.min) { + return -1; + } + return 0; + }); + }; + + /** + * draw the events to the plot + */ + this.drawEvents = function() { + let that = this; + // let o = this._plot.getPlotOffset(); + + $.each(_events, function(index, event) { + // check event is inside the graph range + if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { + event.visual().draw(); + } else { + event + .visual() + .getObject() + .hide(); + } + }); + }; + + /** + * update the position of the event-markers (e.g. after scrolling or zooming) + */ + this.updateEvents = function() { + let that = this; + let o = this._plot.getPlotOffset(), + left, + top; + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + + $.each(_events, function(index, event) { + top = o.top + that._plot.height() - event.visual().height(); + left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; + event.visual().moveTo({ top: top, left: left }); + }); + }; + + /** + * remove all events from the plot + */ + this._clearEvents = function() { + $.each(_events, function(index, val) { + val.visual().clear(); + }); + _events = []; + }; + + /** + * create a DOM element for the given event + */ + this._buildDiv = function(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let axes = this._plot.getAxes(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; + let markerTooltip; + + // determine the y axis used + if (axes.yaxis && axes.yaxis.used) { + yaxis = axes.yaxis; + } + if (axes.yaxis2 && axes.yaxis2.used) { + yaxis = axes.yaxis2; + } + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { + markerSize = 8; //default marker size + } else { + markerSize = this._types[eventTypeId].markerSize; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { + markerShow = true; + } else { + markerShow = this._types[eventTypeId].markerShow; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + let topOffset = xaxis.options.eventSectionHeight || 0; + topOffset = topOffset / 3; + + top = o.top + this._plot.height() + topOffset; + left = xaxis.p2c(event.min) + o.left; + + let line = $('
') + .css({ + position: 'absolute', + opacity: 0.8, + left: left + 'px', + top: 8, + width: lineWidth + 'px', + height: this._plot.height() + topOffset * 0.8, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }) + .appendTo(container); + + if (markerShow) { + let marker = $('
').css({ + position: 'absolute', + left: -markerSize - Math.round(lineWidth / 2) + 'px', + 'font-size': 0, + 'line-height': 0, + width: 0, + height: 0, + 'border-left': markerSize + 'px solid transparent', + 'border-right': markerSize + 'px solid transparent', + }); + + marker.appendTo(line); + + if ( + this._types[eventTypeId] && + this._types[eventTypeId].position && + this._types[eventTypeId].position.toUpperCase() === 'BOTTOM' + ) { + marker.css({ + top: top - markerSize - 8 + 'px', + 'border-top': 'none', + 'border-bottom': markerSize + 'px solid ' + color, + }); + } else { + marker.css({ + top: '0px', + 'border-top': markerSize + 'px solid ' + color, + 'border-bottom': 'none', + }); + } + + marker.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(marker, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(marker, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + marker.css({ cursor: 'help' }); + marker.hover(mouseenter, mouseleave); + } + } + + let drawableEvent = new DrawableEvent( + line, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + line.width(), + line.height() + ); + + return drawableEvent; + }; + + /** + * create a DOM element for the given region + */ + this._buildRegDiv = function(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let axes = this._plot.getAxes(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; + + // determine the y axis used + if (axes.yaxis && axes.yaxis.used) { + yaxis = axes.yaxis; + } + if (axes.yaxis2 && axes.yaxis2.used) { + yaxis = axes.yaxis2; + } + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + let topOffset = 2; + top = o.top + this._plot.height() + topOffset; + + let timeFrom = Math.min(event.min, event.timeEnd); + let timeTo = Math.max(event.min, event.timeEnd); + left = xaxis.p2c(timeFrom) + o.left; + let right = xaxis.p2c(timeTo) + o.left; + regionWidth = right - left; + + _.each([left, right], function(position) { + let line = $('
').css({ + position: 'absolute', + opacity: 0.8, + left: position + 'px', + top: 8, + width: lineWidth + 'px', + height: that._plot.height() + topOffset, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }); + line.appendTo(container); + }); + + let region = $('
').css({ + position: 'absolute', + opacity: 0.5, + left: left + 'px', + top: top, + width: Math.round(regionWidth + lineWidth) + 'px', + height: '0.5rem', + 'border-left-color': color, + color: color, + 'background-color': color, + }); + region.appendTo(container); + + region.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(region, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(region, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + region.css({ cursor: 'help' }); + region.hover(mouseenter, mouseleave); + } + + let drawableEvent = new DrawableEvent( + region, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + region.width(), + region.height() + ); + + return drawableEvent; + }; + + /** + * check if the event is inside visible range + */ + this._insidePlot = function(x) { + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let xc = xaxis.p2c(x); + return xc > 0 && xc < xaxis.p2c(xaxis.max); + }; +}; + +/** + * initialize the plugin for the given plot + */ +function init(plot) { + /*jshint validthis:true */ + let that = this; + let eventMarkers = new EventMarkers(plot); + + plot.getEvents = function() { + return eventMarkers._events; + }; + + plot.hideEvents = function() { + $.each(eventMarkers._events, function(index, event) { + event + .visual() + .getObject() + .hide(); + }); + }; + + plot.showEvents = function() { + plot.hideEvents(); + $.each(eventMarkers._events, function(index, event) { + event.hide(); + }); + + that.eventMarkers.drawEvents(); + }; + + // change events on an existing plot + plot.setEvents = function(events) { + if (eventMarkers.eventsEnabled) { + eventMarkers.setupEvents(events); + } + }; + + plot.hooks.processOptions.push(function(plot, options) { + // enable the plugin + if (options.events.data != null) { + eventMarkers.eventsEnabled = true; + } + }); + + plot.hooks.draw.push(function(plot) { + let options = plot.getOptions(); + + if (eventMarkers.eventsEnabled) { + // check for first run + if (eventMarkers.getEvents().length < 1) { + eventMarkers.setTypes(options.events.types); + eventMarkers.setupEvents(options.events.data); + } else { + eventMarkers.updateEvents(); + } + } + + eventMarkers.drawEvents(); + }); +} + +let defaultOptions = { + events: { + data: null, + types: null, + xaxis: 1, + position: 'BOTTOM', + }, +}; + +$.plot.plugins.push({ + init: init, + options: defaultOptions, + name: 'events', + version: '0.2.5', +}); From b2027af4cb3cdca3b84e3bf7547bf90ab38a240e Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Tue, 10 Apr 2018 14:16:56 +0200 Subject: [PATCH 002/105] wrote classes --- .../plugins/panel/graph/jquery.flot.events.ts | 258 +++++++++--------- 1 file changed, 133 insertions(+), 125 deletions(-) diff --git a/public/app/plugins/panel/graph/jquery.flot.events.ts b/public/app/plugins/panel/graph/jquery.flot.events.ts index 642883ff75c..9dfe0a8573f 100644 --- a/public/app/plugins/panel/graph/jquery.flot.events.ts +++ b/public/app/plugins/panel/graph/jquery.flot.events.ts @@ -1,9 +1,10 @@ +import angular from 'angular'; import $ from 'jquery'; import _ from 'lodash'; -import angular from 'angular'; import Drop from 'tether-drop'; -function createAnnotationToolip(element, event, plot) { +/** @ngInject */ +export function createAnnotationToolip(element, event, plot) { let injector = angular.element(document).injector(); let content = document.createElement('div'); content.innerHTML = ''; @@ -48,7 +49,8 @@ function createAnnotationToolip(element, event, plot) { let markerElementToAttachTo = null; -function createEditPopover(element, event, plot) { +/** @ngInject */ +export function createEditPopover(element, event, plot) { let eventManager = plot.getOptions().events.manager; if (eventManager.editorOpen) { // update marker element to attach to (needed in case of legend on the right @@ -129,106 +131,130 @@ function createEditPopover(element, event, plot) { /** * A class that allows for the drawing an remove of some object */ -let DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { - let _object = object; - let _drawFunc = drawFunc; - let _clearFunc = clearFunc; - let _moveFunc = moveFunc; - let _position = { left: left, top: top }; - let _width = width; - let _height = height; +export class DrawableEvent { + _object: any; + _drawFunc: any; + _clearFunc: any; + _moveFunc: any; + _position: any; + _width: any; + _height: any; - this.width = function() { - return _width; - }; - this.height = function() { - return _height; - }; - this.position = function() { - return _position; - }; - this.draw = function() { - _drawFunc(_object); - }; - this.clear = function() { - _clearFunc(_object); - }; - this.getObject = function() { - return _object; - }; - this.moveTo = function(position) { - _position = position; - _moveFunc(_object, _position); - }; -}; + /** @ngInject */ + constructor(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { + this._object = object; + this._drawFunc = drawFunc; + this._clearFunc = clearFunc; + this._moveFunc = moveFunc; + this._position = { left: left, top: top }; + this._width = width; + this._height = height; + } + + width() { + return this._width; + } + height() { + return this._height; + } + position() { + return this._position; + } + draw() { + this._drawFunc(this._object); + } + clear() { + this._clearFunc(this._object); + } + getObject() { + return this._object; + } + moveTo(position) { + this._position = position; + this._moveFunc(this._object, this._position); + } +} /** * Event class that stores options (eventType, min, max, title, description) and the object to draw. */ -let VisualEvent = function(options, drawableEvent) { - let _parent; - let _options = options; - let _drawableEvent = drawableEvent; - let _hidden = false; +export class VisualEvent { + _parent: any; + _options: any; + _drawableEvent: any; + _hidden: any; - this.visual = function() { - return _drawableEvent; - }; - this.getOptions = function() { - return _options; - }; - this.getParent = function() { - return _parent; - }; - this.isHidden = function() { - return _hidden; - }; - this.hide = function() { - _hidden = true; - }; - this.unhide = function() { - _hidden = false; - }; -}; + /** @ngInject */ + constructor(options, drawableEvent) { + this._options = options; + this._drawableEvent = drawableEvent; + this._hidden = false; + } + + visual() { + return this._drawableEvent; + } + getOptions() { + return this._options; + } + getParent() { + return this._parent; + } + isHidden() { + return this._hidden; + } + hide() { + this._hidden = true; + } + unhide() { + this._hidden = false; + } +} /** * A Class that handles the event-markers inside the given plot */ -let EventMarkers = function(plot) { - let _events = []; +export class EventMarkers { + _events: any; + _types: any; + _plot: any; + eventsEnabled: any; - this._types = []; - this._plot = plot; - this.eventsEnabled = false; + /** @ngInject */ + constructor(plot) { + this._events = []; + this._types = []; + this._plot = plot; + this.eventsEnabled = false; + } - this.getEvents = function() { - return _events; - }; + getEvents() { + return this._events; + } - this.setTypes = function(types) { + setTypes(types) { return (this._types = types); - }; + } /** * create internal objects for the given events */ - this.setupEvents = function(events) { - let that = this; + setupEvents(events) { let parts = _.partition(events, 'isRegion'); let regions = parts[0]; events = parts[1]; - $.each(events, function(index, event) { - let ve = new VisualEvent(event, that._buildDiv(event)); - _events.push(ve); + $.each(events, (index, event) => { + let ve = new VisualEvent(event, this._buildDiv(event)); + this._events.push(ve); }); - $.each(regions, function(index, event) { - let vre = new VisualEvent(event, that._buildRegDiv(event)); - _events.push(vre); + $.each(regions, (index, event) => { + let vre = new VisualEvent(event, this._buildRegDiv(event)); + this._events.push(vre); }); - _events.sort(function(a, b) { + this._events.sort((a, b) => { let ao = a.getOptions(), bo = b.getOptions(); if (ao.min > bo.min) { @@ -239,18 +265,17 @@ let EventMarkers = function(plot) { } return 0; }); - }; + } /** * draw the events to the plot */ - this.drawEvents = function() { - let that = this; - // let o = this._plot.getPlotOffset(); + drawEvents() { + // var o = this._plot.getPlotOffset(); - $.each(_events, function(index, event) { + $.each(this._events, (index, event) => { // check event is inside the graph range - if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { + if (this._insidePlot(event.getOptions().min) && !event.isHidden()) { event.visual().draw(); } else { event @@ -259,56 +284,46 @@ let EventMarkers = function(plot) { .hide(); } }); - }; + } /** * update the position of the event-markers (e.g. after scrolling or zooming) */ - this.updateEvents = function() { - let that = this; + updateEvents() { let o = this._plot.getPlotOffset(), left, top; let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - $.each(_events, function(index, event) { - top = o.top + that._plot.height() - event.visual().height(); + $.each(this._events, (index, event) => { + top = o.top + this._plot.height() - event.visual().height(); left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; event.visual().moveTo({ top: top, left: left }); }); - }; + } /** * remove all events from the plot */ - this._clearEvents = function() { - $.each(_events, function(index, val) { + _clearEvents() { + $.each(this._events, (index, val) => { val.visual().clear(); }); - _events = []; - }; + this._events = []; + } /** * create a DOM element for the given event */ - this._buildDiv = function(event) { + _buildDiv(event) { let that = this; let container = this._plot.getPlaceholder(); let o = this._plot.getPlotOffset(); - let axes = this._plot.getAxes(); let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - let yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; + let top, left, color, markerSize, markerShow, lineStyle, lineWidth; let markerTooltip; - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { - yaxis = axes.yaxis; - } - if (axes.yaxis2 && axes.yaxis2.used) { - yaxis = axes.yaxis2; - } - // map the eventType to a types object let eventTypeId = event.eventType; @@ -444,27 +459,18 @@ let EventMarkers = function(plot) { ); return drawableEvent; - }; + } /** * create a DOM element for the given region */ - this._buildRegDiv = function(event) { + _buildRegDiv(event) { let that = this; let container = this._plot.getPlaceholder(); let o = this._plot.getPlotOffset(); - let axes = this._plot.getAxes(); let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - let yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { - yaxis = axes.yaxis; - } - if (axes.yaxis2 && axes.yaxis2.used) { - yaxis = axes.yaxis2; - } + let top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; // map the eventType to a types object let eventTypeId = event.eventType; @@ -502,14 +508,14 @@ let EventMarkers = function(plot) { let right = xaxis.p2c(timeTo) + o.left; regionWidth = right - left; - _.each([left, right], function(position) { + _.each([left, right], position => { let line = $('
').css({ position: 'absolute', opacity: 0.8, left: position + 'px', top: 8, width: lineWidth + 'px', - height: that._plot.height() + topOffset, + height: this._plot.height() + topOffset, 'border-left-width': lineWidth + 'px', 'border-left-style': lineStyle, 'border-left-color': color, @@ -573,22 +579,24 @@ let EventMarkers = function(plot) { ); return drawableEvent; - }; + } /** * check if the event is inside visible range */ - this._insidePlot = function(x) { + _insidePlot(x) { let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; let xc = xaxis.p2c(x); return xc > 0 && xc < xaxis.p2c(xaxis.max); - }; -}; + } +} /** * initialize the plugin for the given plot */ -function init(plot) { + +/** @ngInject */ +export function init(plot) { /*jshint validthis:true */ let that = this; let eventMarkers = new EventMarkers(plot); @@ -598,7 +606,7 @@ function init(plot) { }; plot.hideEvents = function() { - $.each(eventMarkers._events, function(index, event) { + $.each(eventMarkers._events, (index, event) => { event .visual() .getObject() @@ -608,7 +616,7 @@ function init(plot) { plot.showEvents = function() { plot.hideEvents(); - $.each(eventMarkers._events, function(index, event) { + $.each(eventMarkers._events, (index, event) => { event.hide(); }); From 664944980a86c7082ce20e007a8789e949543453 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Mon, 16 Apr 2018 15:27:25 +0900 Subject: [PATCH 003/105] update aws-sdk-go --- Gopkg.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gopkg.toml b/Gopkg.toml index 1768059f0b8..6c91ec37221 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -36,7 +36,7 @@ ignored = [ [[constraint]] name = "github.com/aws/aws-sdk-go" - version = "1.12.65" + version = "1.13.56" [[constraint]] branch = "master" From 077cf9a343dcc2f0d54607776cb998404870d565 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Mon, 16 Apr 2018 15:32:39 +0900 Subject: [PATCH 004/105] dep ensure --- Gopkg.lock | 15 +- .../aws/aws-sdk-go/aws/client/client.go | 4 +- .../aws/aws-sdk-go/aws/client/logger.go | 102 ++- .../aws/client/metadata/client_info.go | 1 + .../aws-sdk-go/aws/credentials/credentials.go | 18 +- .../github.com/aws/aws-sdk-go/aws/csm/doc.go | 46 + .../aws/aws-sdk-go/aws/csm/enable.go | 67 ++ .../aws/aws-sdk-go/aws/csm/metric.go | 51 ++ .../aws/aws-sdk-go/aws/csm/metricChan.go | 54 ++ .../aws/aws-sdk-go/aws/csm/reporter.go | 230 +++++ .../aws/aws-sdk-go/aws/endpoints/defaults.go | 75 +- .../github.com/aws/aws-sdk-go/aws/logger.go | 6 + .../aws/aws-sdk-go/aws/request/handlers.go | 18 + .../aws/aws-sdk-go/aws/request/request.go | 9 +- .../aws/aws-sdk-go/aws/request/request_1_7.go | 2 +- .../aws/aws-sdk-go/aws/request/request_1_8.go | 2 +- .../aws/request/request_pagination.go | 15 +- .../aws/aws-sdk-go/aws/session/env_config.go | 20 + .../aws/aws-sdk-go/aws/session/session.go | 26 +- .../aws/aws-sdk-go/aws/signer/v4/v4.go | 13 +- .../github.com/aws/aws-sdk-go/aws/version.go | 2 +- .../private/protocol/eventstream/debug.go | 144 ++++ .../private/protocol/eventstream/decode.go | 199 +++++ .../private/protocol/eventstream/encode.go | 114 +++ .../private/protocol/eventstream/error.go | 23 + .../eventstream/eventstreamapi/api.go | 160 ++++ .../eventstream/eventstreamapi/error.go | 24 + .../private/protocol/eventstream/header.go | 166 ++++ .../protocol/eventstream/header_value.go | 501 +++++++++++ .../private/protocol/eventstream/message.go | 103 +++ .../aws-sdk-go/private/protocol/payload.go | 81 ++ .../aws-sdk-go/private/protocol/rest/build.go | 8 +- .../private/protocol/rest/unmarshal.go | 2 +- .../aws-sdk-go/service/cloudwatch/service.go | 6 +- .../aws/aws-sdk-go/service/ec2/api.go | 94 ++- .../aws/aws-sdk-go/service/ec2/service.go | 6 +- .../aws/aws-sdk-go/service/s3/api.go | 796 ++++++++++++++++++ .../aws/aws-sdk-go/service/s3/service.go | 8 +- .../aws/aws-sdk-go/service/sts/service.go | 6 +- .../shurcooL/sanitized_anchor_name/LICENSE | 21 + .../shurcooL/sanitized_anchor_name/main.go | 29 + 41 files changed, 3183 insertions(+), 84 deletions(-) create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/main.go diff --git a/Gopkg.lock b/Gopkg.lock index 5acaf2a542c..6f08e208ecd 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -32,6 +32,7 @@ "aws/credentials/ec2rolecreds", "aws/credentials/endpointcreds", "aws/credentials/stscreds", + "aws/csm", "aws/defaults", "aws/ec2metadata", "aws/endpoints", @@ -43,6 +44,8 @@ "internal/shareddefaults", "private/protocol", "private/protocol/ec2query", + "private/protocol/eventstream", + "private/protocol/eventstream/eventstreamapi", "private/protocol/query", "private/protocol/query/queryutil", "private/protocol/rest", @@ -54,8 +57,8 @@ "service/s3", "service/sts" ] - revision = "c7cd1ebe87257cde9b65112fc876b0339ea0ac30" - version = "v1.13.49" + revision = "fde4ded7becdeae4d26bf1212916aabba79349b4" + version = "v1.14.12" [[projects]] branch = "master" @@ -424,6 +427,12 @@ revision = "1744e2970ca51c86172c8190fadad617561ed6e7" version = "v1.0.0" +[[projects]] + branch = "master" + name = "github.com/shurcooL/sanitized_anchor_name" + packages = ["."] + revision = "86672fcb3f950f35f2e675df2240550f2a50762f" + [[projects]] name = "github.com/smartystreets/assertions" packages = [ @@ -670,6 +679,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "85cc057e0cc074ab5b43bd620772d63d51e07b04e8782fcfe55e6929d2fc40f7" + inputs-digest = "cb8e7fd81f23ec987fc4d5dd9d31ae0f1164bc2f30cbea2fe86e0d97dd945beb" solver-name = "gps-cdcl" solver-version = 1 diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go index 3271a18e80e..212fe25e71e 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go @@ -91,6 +91,6 @@ func (c *Client) AddDebugHandlers() { return } - c.Handlers.Send.PushFrontNamed(request.NamedHandler{Name: "awssdk.client.LogRequest", Fn: logRequest}) - c.Handlers.Send.PushBackNamed(request.NamedHandler{Name: "awssdk.client.LogResponse", Fn: logResponse}) + c.Handlers.Send.PushFrontNamed(LogHTTPRequestHandler) + c.Handlers.Send.PushBackNamed(LogHTTPResponseHandler) } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go index e223c54cc6c..ce9fb896d94 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go @@ -44,12 +44,22 @@ func (reader *teeReaderCloser) Close() error { return reader.Source.Close() } +// LogHTTPRequestHandler is a SDK request handler to log the HTTP request sent +// to a service. Will include the HTTP request body if the LogLevel of the +// request matches LogDebugWithHTTPBody. +var LogHTTPRequestHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequest", + Fn: logRequest, +} + func logRequest(r *request.Request) { logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) bodySeekable := aws.IsReaderSeekable(r.Body) - dumpedBody, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) + + b, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) if err != nil { - r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, r.ClientInfo.ServiceName, r.Operation.Name, err)) + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) return } @@ -63,7 +73,28 @@ func logRequest(r *request.Request) { r.ResetBody() } - r.Config.Logger.Log(fmt.Sprintf(logReqMsg, r.ClientInfo.ServiceName, r.Operation.Name, string(dumpedBody))) + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} + +// LogHTTPRequestHeaderHandler is a SDK request handler to log the HTTP request sent +// to a service. Will only log the HTTP request's headers. The request payload +// will not be read. +var LogHTTPRequestHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequestHeader", + Fn: logRequestHeader, +} + +func logRequestHeader(r *request.Request) { + b, err := httputil.DumpRequestOut(r.HTTPRequest, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) } const logRespMsg = `DEBUG: Response %s/%s Details: @@ -76,27 +107,44 @@ const logRespErrMsg = `DEBUG ERROR: Response %s/%s: %s -----------------------------------------------------` +// LogHTTPResponseHandler is a SDK request handler to log the HTTP response +// received from a service. Will include the HTTP response body if the LogLevel +// of the request matches LogDebugWithHTTPBody. +var LogHTTPResponseHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponse", + Fn: logResponse, +} + func logResponse(r *request.Request) { lw := &logWriter{r.Config.Logger, bytes.NewBuffer(nil)} - r.HTTPResponse.Body = &teeReaderCloser{ - Reader: io.TeeReader(r.HTTPResponse.Body, lw), - Source: r.HTTPResponse.Body, + + logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) + if logBody { + r.HTTPResponse.Body = &teeReaderCloser{ + Reader: io.TeeReader(r.HTTPResponse.Body, lw), + Source: r.HTTPResponse.Body, + } } handlerFn := func(req *request.Request) { - body, err := httputil.DumpResponse(req.HTTPResponse, false) + b, err := httputil.DumpResponse(req.HTTPResponse, false) if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) return } - b, err := ioutil.ReadAll(lw.buf) - if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) - return - } - lw.Logger.Log(fmt.Sprintf(logRespMsg, req.ClientInfo.ServiceName, req.Operation.Name, string(body))) - if req.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) { + lw.Logger.Log(fmt.Sprintf(logRespMsg, + req.ClientInfo.ServiceName, req.Operation.Name, string(b))) + + if logBody { + b, err := ioutil.ReadAll(lw.buf) + if err != nil { + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) + return + } + lw.Logger.Log(string(b)) } } @@ -110,3 +158,27 @@ func logResponse(r *request.Request) { Name: handlerName, Fn: handlerFn, }) } + +// LogHTTPResponseHeaderHandler is a SDK request handler to log the HTTP +// response received from a service. Will only log the HTTP response's headers. +// The response payload will not be read. +var LogHTTPResponseHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponseHeader", + Fn: logResponseHeader, +} + +func logResponseHeader(r *request.Request) { + if r.Config.Logger == nil { + return + } + + b, err := httputil.DumpResponse(r.HTTPResponse, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logRespErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logRespMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go index 4778056ddfd..920e9fddf87 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go @@ -3,6 +3,7 @@ package metadata // ClientInfo wraps immutable data from the client.Client structure. type ClientInfo struct { ServiceName string + ServiceID string APIVersion string Endpoint string SigningName string diff --git a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go index 42416fc2f0f..ed086992f62 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go @@ -178,7 +178,8 @@ func (e *Expiry) IsExpired() bool { type Credentials struct { creds Value forceRefresh bool - m sync.Mutex + + m sync.RWMutex provider Provider } @@ -201,6 +202,17 @@ func NewCredentials(provider Provider) *Credentials { // If Credentials.Expire() was called the credentials Value will be force // expired, and the next call to Get() will cause them to be refreshed. func (c *Credentials) Get() (Value, error) { + // Check the cached credentials first with just the read lock. + c.m.RLock() + if !c.isExpired() { + creds := c.creds + c.m.RUnlock() + return creds, nil + } + c.m.RUnlock() + + // Credentials are expired need to retrieve the credentials taking the full + // lock. c.m.Lock() defer c.m.Unlock() @@ -234,8 +246,8 @@ func (c *Credentials) Expire() { // If the Credentials were forced to be expired with Expire() this will // reflect that override. func (c *Credentials) IsExpired() bool { - c.m.Lock() - defer c.m.Unlock() + c.m.RLock() + defer c.m.RUnlock() return c.isExpired() } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go new file mode 100644 index 00000000000..152d785b362 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go @@ -0,0 +1,46 @@ +// Package csm provides Client Side Monitoring (CSM) which enables sending metrics +// via UDP connection. Using the Start function will enable the reporting of +// metrics on a given port. If Start is called, with different parameters, again, +// a panic will occur. +// +// Pause can be called to pause any metrics publishing on a given port. Sessions +// that have had their handlers modified via InjectHandlers may still be used. +// However, the handlers will act as a no-op meaning no metrics will be published. +// +// Example: +// r, err := csm.Start("clientID", ":31000") +// if err != nil { +// panic(fmt.Errorf("failed starting CSM: %v", err)) +// } +// +// sess, err := session.NewSession(&aws.Config{}) +// if err != nil { +// panic(fmt.Errorf("failed loading session: %v", err)) +// } +// +// r.InjectHandlers(&sess.Handlers) +// +// client := s3.New(sess) +// resp, err := client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Will pause monitoring +// r.Pause() +// resp, err = client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Resume monitoring +// r.Continue() +// +// Start returns a Reporter that is used to enable or disable monitoring. If +// access to the Reporter is required later, calling Get will return the Reporter +// singleton. +// +// Example: +// r := csm.Get() +// r.Continue() +package csm diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go new file mode 100644 index 00000000000..2f0c6eac9a8 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go @@ -0,0 +1,67 @@ +package csm + +import ( + "fmt" + "sync" +) + +var ( + lock sync.Mutex +) + +// Client side metric handler names +const ( + APICallMetricHandlerName = "awscsm.SendAPICallMetric" + APICallAttemptMetricHandlerName = "awscsm.SendAPICallAttemptMetric" +) + +// Start will start the a long running go routine to capture +// client side metrics. Calling start multiple time will only +// start the metric listener once and will panic if a different +// client ID or port is passed in. +// +// Example: +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// sess := session.NewSession() +// r.InjectHandlers(sess.Handlers) +// +// svc := s3.New(sess) +// out, err := svc.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +func Start(clientID string, url string) (*Reporter, error) { + lock.Lock() + defer lock.Unlock() + + if sender == nil { + sender = newReporter(clientID, url) + } else { + if sender.clientID != clientID { + panic(fmt.Errorf("inconsistent client IDs. %q was expected, but received %q", sender.clientID, clientID)) + } + + if sender.url != url { + panic(fmt.Errorf("inconsistent URLs. %q was expected, but received %q", sender.url, url)) + } + } + + if err := connect(url); err != nil { + sender = nil + return nil, err + } + + return sender, nil +} + +// Get will return a reporter if one exists, if one does not exist, nil will +// be returned. +func Get() *Reporter { + lock.Lock() + defer lock.Unlock() + + return sender +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go new file mode 100644 index 00000000000..4b0d630e4c1 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go @@ -0,0 +1,51 @@ +package csm + +import ( + "strconv" + "time" +) + +type metricTime time.Time + +func (t metricTime) MarshalJSON() ([]byte, error) { + ns := time.Duration(time.Time(t).UnixNano()) + return []byte(strconv.FormatInt(int64(ns/time.Millisecond), 10)), nil +} + +type metric struct { + ClientID *string `json:"ClientId,omitempty"` + API *string `json:"Api,omitempty"` + Service *string `json:"Service,omitempty"` + Timestamp *metricTime `json:"Timestamp,omitempty"` + Type *string `json:"Type,omitempty"` + Version *int `json:"Version,omitempty"` + + AttemptCount *int `json:"AttemptCount,omitempty"` + Latency *int `json:"Latency,omitempty"` + + Fqdn *string `json:"Fqdn,omitempty"` + UserAgent *string `json:"UserAgent,omitempty"` + AttemptLatency *int `json:"AttemptLatency,omitempty"` + + SessionToken *string `json:"SessionToken,omitempty"` + Region *string `json:"Region,omitempty"` + AccessKey *string `json:"AccessKey,omitempty"` + HTTPStatusCode *int `json:"HttpStatusCode,omitempty"` + XAmzID2 *string `json:"XAmzId2,omitempty"` + XAmzRequestID *string `json:"XAmznRequestId,omitempty"` + + AWSException *string `json:"AwsException,omitempty"` + AWSExceptionMessage *string `json:"AwsExceptionMessage,omitempty"` + SDKException *string `json:"SdkException,omitempty"` + SDKExceptionMessage *string `json:"SdkExceptionMessage,omitempty"` + + DestinationIP *string `json:"DestinationIp,omitempty"` + ConnectionReused *int `json:"ConnectionReused,omitempty"` + + AcquireConnectionLatency *int `json:"AcquireConnectionLatency,omitempty"` + ConnectLatency *int `json:"ConnectLatency,omitempty"` + RequestLatency *int `json:"RequestLatency,omitempty"` + DNSLatency *int `json:"DnsLatency,omitempty"` + TCPLatency *int `json:"TcpLatency,omitempty"` + SSLLatency *int `json:"SslLatency,omitempty"` +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go new file mode 100644 index 00000000000..514fc3739a5 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go @@ -0,0 +1,54 @@ +package csm + +import ( + "sync/atomic" +) + +const ( + runningEnum = iota + pausedEnum +) + +var ( + // MetricsChannelSize of metrics to hold in the channel + MetricsChannelSize = 100 +) + +type metricChan struct { + ch chan metric + paused int64 +} + +func newMetricChan(size int) metricChan { + return metricChan{ + ch: make(chan metric, size), + } +} + +func (ch *metricChan) Pause() { + atomic.StoreInt64(&ch.paused, pausedEnum) +} + +func (ch *metricChan) Continue() { + atomic.StoreInt64(&ch.paused, runningEnum) +} + +func (ch *metricChan) IsPaused() bool { + v := atomic.LoadInt64(&ch.paused) + return v == pausedEnum +} + +// Push will push metrics to the metric channel if the channel +// is not paused +func (ch *metricChan) Push(m metric) bool { + if ch.IsPaused() { + return false + } + + select { + case ch.ch <- m: + return true + default: + return false + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go new file mode 100644 index 00000000000..1484c8fc5b1 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go @@ -0,0 +1,230 @@ +package csm + +import ( + "encoding/json" + "net" + "time" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/request" +) + +const ( + // DefaultPort is used when no port is specified + DefaultPort = "31000" +) + +// Reporter will gather metrics of API requests made and +// send those metrics to the CSM endpoint. +type Reporter struct { + clientID string + url string + conn net.Conn + metricsCh metricChan + done chan struct{} +} + +var ( + sender *Reporter +) + +func connect(url string) error { + const network = "udp" + if err := sender.connect(network, url); err != nil { + return err + } + + if sender.done == nil { + sender.done = make(chan struct{}) + go sender.start() + } + + return nil +} + +func newReporter(clientID, url string) *Reporter { + return &Reporter{ + clientID: clientID, + url: url, + metricsCh: newMetricChan(MetricsChannelSize), + } +} + +func (rep *Reporter) sendAPICallAttemptMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + creds, _ := r.Config.Credentials.Get() + + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + UserAgent: aws.String(r.HTTPRequest.Header.Get("User-Agent")), + Region: r.Config.Region, + Type: aws.String("ApiCallAttempt"), + Version: aws.Int(1), + + XAmzRequestID: aws.String(r.RequestID), + + AttemptCount: aws.Int(r.RetryCount + 1), + AttemptLatency: aws.Int(int(now.Sub(r.AttemptTime).Nanoseconds() / int64(time.Millisecond))), + AccessKey: aws.String(creds.AccessKeyID), + } + + if r.HTTPResponse != nil { + m.HTTPStatusCode = aws.Int(r.HTTPResponse.StatusCode) + } + + if r.Error != nil { + if awserr, ok := r.Error.(awserr.Error); ok { + setError(&m, awserr) + } + } + + rep.metricsCh.Push(m) +} + +func setError(m *metric, err awserr.Error) { + msg := err.Message() + code := err.Code() + + switch code { + case "RequestError", + "SerializationError", + request.CanceledErrorCode: + + m.SDKException = &code + m.SDKExceptionMessage = &msg + default: + m.AWSException = &code + m.AWSExceptionMessage = &msg + } +} + +func (rep *Reporter) sendAPICallMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + Type: aws.String("ApiCall"), + AttemptCount: aws.Int(r.RetryCount + 1), + Latency: aws.Int(int(time.Now().Sub(r.Time) / time.Millisecond)), + XAmzRequestID: aws.String(r.RequestID), + } + + // TODO: Probably want to figure something out for logging dropped + // metrics + rep.metricsCh.Push(m) +} + +func (rep *Reporter) connect(network, url string) error { + if rep.conn != nil { + rep.conn.Close() + } + + conn, err := net.Dial(network, url) + if err != nil { + return awserr.New("UDPError", "Could not connect", err) + } + + rep.conn = conn + + return nil +} + +func (rep *Reporter) close() { + if rep.done != nil { + close(rep.done) + } + + rep.metricsCh.Pause() +} + +func (rep *Reporter) start() { + defer func() { + rep.metricsCh.Pause() + }() + + for { + select { + case <-rep.done: + rep.done = nil + return + case m := <-rep.metricsCh.ch: + // TODO: What to do with this error? Probably should just log + b, err := json.Marshal(m) + if err != nil { + continue + } + + rep.conn.Write(b) + } + } +} + +// Pause will pause the metric channel preventing any new metrics from +// being added. +func (rep *Reporter) Pause() { + lock.Lock() + defer lock.Unlock() + + if rep == nil { + return + } + + rep.close() +} + +// Continue will reopen the metric channel and allow for monitoring +// to be resumed. +func (rep *Reporter) Continue() { + lock.Lock() + defer lock.Unlock() + if rep == nil { + return + } + + if !rep.metricsCh.IsPaused() { + return + } + + rep.metricsCh.Continue() +} + +// InjectHandlers will will enable client side metrics and inject the proper +// handlers to handle how metrics are sent. +// +// Example: +// // Start must be called in order to inject the correct handlers +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// +// sess := session.NewSession() +// r.InjectHandlers(&sess.Handlers) +// +// // create a new service client with our client side metric session +// svc := s3.New(sess) +func (rep *Reporter) InjectHandlers(handlers *request.Handlers) { + if rep == nil { + return + } + + apiCallHandler := request.NamedHandler{Name: APICallMetricHandlerName, Fn: rep.sendAPICallMetric} + handlers.Complete.PushFrontNamed(apiCallHandler) + + apiCallAttemptHandler := request.NamedHandler{Name: APICallAttemptMetricHandlerName, Fn: rep.sendAPICallAttemptMetric} + handlers.AfterRetry.PushFrontNamed(apiCallAttemptHandler) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go index 857f677dd10..c472a57fad2 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go @@ -48,6 +48,7 @@ const ( A4bServiceID = "a4b" // A4b. AcmServiceID = "acm" // Acm. AcmPcaServiceID = "acm-pca" // AcmPca. + ApiMediatailorServiceID = "api.mediatailor" // ApiMediatailor. ApiPricingServiceID = "api.pricing" // ApiPricing. ApigatewayServiceID = "apigateway" // Apigateway. ApplicationAutoscalingServiceID = "application-autoscaling" // ApplicationAutoscaling. @@ -130,6 +131,7 @@ const ( ModelsLexServiceID = "models.lex" // ModelsLex. MonitoringServiceID = "monitoring" // Monitoring. MturkRequesterServiceID = "mturk-requester" // MturkRequester. + NeptuneServiceID = "neptune" // Neptune. OpsworksServiceID = "opsworks" // Opsworks. OpsworksCmServiceID = "opsworks-cm" // OpsworksCm. OrganizationsServiceID = "organizations" // Organizations. @@ -307,6 +309,16 @@ var awsPartition = partition{ "us-west-2": endpoint{}, }, }, + "api.mediatailor": service{ + + Endpoints: endpoints{ + "ap-northeast-1": endpoint{}, + "ap-southeast-1": endpoint{}, + "ap-southeast-2": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + }, + }, "api.pricing": service{ Defaults: endpoint{ CredentialScope: credentialScope{ @@ -434,6 +446,7 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, "ap-northeast-2": endpoint{}, + "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "ca-central-1": endpoint{}, @@ -1046,6 +1059,7 @@ var awsPartition = partition{ "elasticfilesystem": service{ Endpoints: endpoints{ + "ap-northeast-2": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, @@ -1242,11 +1256,13 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, + "eu-west-2": endpoint{}, "us-east-1": endpoint{}, "us-east-2": endpoint{}, "us-west-2": endpoint{}, @@ -1509,8 +1525,10 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, + "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, "us-east-1": endpoint{}, "us-west-2": endpoint{}, @@ -1622,6 +1640,35 @@ var awsPartition = partition{ "us-east-1": endpoint{}, }, }, + "neptune": service{ + + Endpoints: endpoints{ + "eu-west-1": endpoint{ + Hostname: "rds.eu-west-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "eu-west-1", + }, + }, + "us-east-1": endpoint{ + Hostname: "rds.us-east-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-1", + }, + }, + "us-east-2": endpoint{ + Hostname: "rds.us-east-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-2", + }, + }, + "us-west-2": endpoint{ + Hostname: "rds.us-west-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-west-2", + }, + }, + }, + }, "opsworks": service{ Endpoints: endpoints{ @@ -1805,10 +1852,11 @@ var awsPartition = partition{ "runtime.sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "s3": service{ @@ -1873,10 +1921,11 @@ var awsPartition = partition{ "sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "sdb": service{ @@ -2081,6 +2130,10 @@ var awsPartition = partition{ "eu-west-1": endpoint{}, "eu-west-2": endpoint{}, "eu-west-3": endpoint{}, + "fips-us-east-1": endpoint{}, + "fips-us-east-2": endpoint{}, + "fips-us-west-1": endpoint{}, + "fips-us-west-2": endpoint{}, "sa-east-1": endpoint{}, "us-east-1": endpoint{ SSLCommonName: "queue.{dnsSuffix}", @@ -2507,13 +2560,15 @@ var awscnPartition = partition{ "ecr": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "ecs": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "elasticache": service{ diff --git a/vendor/github.com/aws/aws-sdk-go/aws/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/logger.go index 3babb5abdb6..6ed15b2ecc2 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/logger.go @@ -71,6 +71,12 @@ const ( // LogDebugWithRequestErrors states the SDK should log when service requests fail // to build, send, validate, or unmarshal. LogDebugWithRequestErrors + + // LogDebugWithEventStreamBody states the SDK should log EventStream + // request and response bodys. This should be used to log the EventStream + // wire unmarshaled message content of requests and responses made while + // using the SDK Will also enable LogDebug. + LogDebugWithEventStreamBody ) // A Logger is a minimalistic interface for the SDK to log messages to. Should diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go index 802ac88ad5c..605a72d3c94 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go @@ -14,6 +14,7 @@ type Handlers struct { Send HandlerList ValidateResponse HandlerList Unmarshal HandlerList + UnmarshalStream HandlerList UnmarshalMeta HandlerList UnmarshalError HandlerList Retry HandlerList @@ -30,6 +31,7 @@ func (h *Handlers) Copy() Handlers { Send: h.Send.copy(), ValidateResponse: h.ValidateResponse.copy(), Unmarshal: h.Unmarshal.copy(), + UnmarshalStream: h.UnmarshalStream.copy(), UnmarshalError: h.UnmarshalError.copy(), UnmarshalMeta: h.UnmarshalMeta.copy(), Retry: h.Retry.copy(), @@ -45,6 +47,7 @@ func (h *Handlers) Clear() { h.Send.Clear() h.Sign.Clear() h.Unmarshal.Clear() + h.UnmarshalStream.Clear() h.UnmarshalMeta.Clear() h.UnmarshalError.Clear() h.ValidateResponse.Clear() @@ -172,6 +175,21 @@ func (l *HandlerList) SwapNamed(n NamedHandler) (swapped bool) { return swapped } +// Swap will swap out all handlers matching the name passed in. The matched +// handlers will be swapped in. True is returned if the handlers were swapped. +func (l *HandlerList) Swap(name string, replace NamedHandler) bool { + var swapped bool + + for i := 0; i < len(l.list); i++ { + if l.list[i].Name == name { + l.list[i] = replace + swapped = true + } + } + + return swapped +} + // SetBackNamed will replace the named handler if it exists in the handler list. // If the handler does not exist the handler will be added to the end of the list. func (l *HandlerList) SetBackNamed(n NamedHandler) { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go index 69b7a01ad74..75f0fe07780 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go @@ -46,6 +46,7 @@ type Request struct { Handlers Handlers Retryer + AttemptTime time.Time Time time.Time Operation *Operation HTTPRequest *http.Request @@ -121,6 +122,7 @@ func New(cfg aws.Config, clientInfo metadata.ClientInfo, handlers Handlers, Handlers: handlers.Copy(), Retryer: retryer, + AttemptTime: time.Now(), Time: time.Now(), ExpireTime: 0, Operation: operation, @@ -368,9 +370,9 @@ func (r *Request) Build() error { return r.Error } -// Sign will sign the request returning error if errors are encountered. +// Sign will sign the request, returning error if errors are encountered. // -// Send will build the request prior to signing. All Sign Handlers will +// Sign will build the request prior to signing. All Sign Handlers will // be executed in the order they were set. func (r *Request) Sign() error { r.Build() @@ -440,7 +442,7 @@ func (r *Request) GetBody() io.ReadSeeker { return r.safeBody } -// Send will send the request returning error if errors are encountered. +// Send will send the request, returning error if errors are encountered. // // Send will sign the request prior to sending. All Send Handlers will // be executed in the order they were set. @@ -461,6 +463,7 @@ func (r *Request) Send() error { }() for { + r.AttemptTime = time.Now() if aws.BoolValue(r.Retryable) { if r.Config.LogLevel.Matches(aws.LogDebugWithRequestRetries) { r.Config.Logger.Log(fmt.Sprintf("DEBUG: Retrying Request %s/%s, attempt %d", diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go index 869b97a1a0f..e36e468b7c6 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go @@ -21,7 +21,7 @@ func (noBody) WriteTo(io.Writer) (int64, error) { return 0, nil } var NoBody = noBody{} // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go index c32fc69bc56..7c6a8000f67 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go @@ -11,7 +11,7 @@ import ( var NoBody = http.NoBody // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go index 159518a75cd..a633ed5acfa 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go @@ -35,8 +35,12 @@ type Pagination struct { // NewRequest should always be built from the same API operations. It is // undefined if different API operations are returned on subsequent calls. NewRequest func() (*Request, error) + // EndPageOnSameToken, when enabled, will allow the paginator to stop on + // token that are the same as its previous tokens. + EndPageOnSameToken bool started bool + prevTokens []interface{} nextTokens []interface{} err error @@ -49,7 +53,15 @@ type Pagination struct { // // Will always return true if Next has not been called yet. func (p *Pagination) HasNextPage() bool { - return !(p.started && len(p.nextTokens) == 0) + if !p.started { + return true + } + + hasNextPage := len(p.nextTokens) != 0 + if p.EndPageOnSameToken { + return hasNextPage && !awsutil.DeepEqual(p.nextTokens, p.prevTokens) + } + return hasNextPage } // Err returns the error Pagination encountered when retrieving the next page. @@ -96,6 +108,7 @@ func (p *Pagination) Next() bool { return false } + p.prevTokens = p.nextTokens p.nextTokens = req.nextPageTokens() p.curPage = req.Data diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go index 12b452177a8..82e04d76cde 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go @@ -96,9 +96,23 @@ type envConfig struct { // // AWS_CA_BUNDLE=$HOME/my_custom_ca_bundle CustomCABundle string + + csmEnabled string + CSMEnabled bool + CSMPort string + CSMClientID string } var ( + csmEnabledEnvKey = []string{ + "AWS_CSM_ENABLED", + } + csmPortEnvKey = []string{ + "AWS_CSM_PORT", + } + csmClientIDEnvKey = []string{ + "AWS_CSM_CLIENT_ID", + } credAccessEnvKey = []string{ "AWS_ACCESS_KEY_ID", "AWS_ACCESS_KEY", @@ -157,6 +171,12 @@ func envConfigLoad(enableSharedConfig bool) envConfig { setFromEnvVal(&cfg.Creds.SecretAccessKey, credSecretEnvKey) setFromEnvVal(&cfg.Creds.SessionToken, credSessionEnvKey) + // CSM environment variables + setFromEnvVal(&cfg.csmEnabled, csmEnabledEnvKey) + setFromEnvVal(&cfg.CSMPort, csmPortEnvKey) + setFromEnvVal(&cfg.CSMClientID, csmClientIDEnvKey) + cfg.CSMEnabled = len(cfg.csmEnabled) > 0 + // Require logical grouping of credentials if len(cfg.Creds.AccessKeyID) == 0 || len(cfg.Creds.SecretAccessKey) == 0 { cfg.Creds = credentials.Value{} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go index 259b5c0fecc..51f30556301 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go @@ -15,6 +15,7 @@ import ( "github.com/aws/aws-sdk-go/aws/corehandlers" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials/stscreds" + "github.com/aws/aws-sdk-go/aws/csm" "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/request" @@ -81,10 +82,16 @@ func New(cfgs ...*aws.Config) *Session { r.Error = err }) } + return s } - return deprecatedNewSession(cfgs...) + s := deprecatedNewSession(cfgs...) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } + + return s } // NewSession returns a new Session created from SDK defaults, config files, @@ -300,10 +307,22 @@ func deprecatedNewSession(cfgs ...*aws.Config) *Session { } initHandlers(s) - return s } +func enableCSM(handlers *request.Handlers, clientID string, port string, logger aws.Logger) { + logger.Log("Enabling CSM") + if len(port) == 0 { + port = csm.DefaultPort + } + + r, err := csm.Start(clientID, "127.0.0.1:"+port) + if err != nil { + return + } + r.InjectHandlers(handlers) +} + func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, error) { cfg := defaults.Config() handlers := defaults.Handlers() @@ -343,6 +362,9 @@ func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, } initHandlers(s) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } // Setup HTTP client with custom cert bundle if enabled if opts.CustomCABundle != nil { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go index 6e46376125b..f3586131538 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go @@ -135,6 +135,7 @@ var requiredSignedHeaders = rules{ "X-Amz-Server-Side-Encryption-Customer-Key-Md5": struct{}{}, "X-Amz-Storage-Class": struct{}{}, "X-Amz-Website-Redirect-Location": struct{}{}, + "X-Amz-Content-Sha256": struct{}{}, }, }, patterns{"X-Amz-Meta-"}, @@ -671,8 +672,15 @@ func (ctx *signingCtx) buildSignature() { func (ctx *signingCtx) buildBodyDigest() error { hash := ctx.Request.Header.Get("X-Amz-Content-Sha256") if hash == "" { - if ctx.unsignedPayload || (ctx.isPresign && ctx.ServiceName == "s3") { + includeSHA256Header := ctx.unsignedPayload || + ctx.ServiceName == "s3" || + ctx.ServiceName == "glacier" + + s3Presign := ctx.isPresign && ctx.ServiceName == "s3" + + if ctx.unsignedPayload || s3Presign { hash = "UNSIGNED-PAYLOAD" + includeSHA256Header = !s3Presign } else if ctx.Body == nil { hash = emptyStringSHA256 } else { @@ -681,7 +689,8 @@ func (ctx *signingCtx) buildBodyDigest() error { } hash = hex.EncodeToString(makeSha256Reader(ctx.Body)) } - if ctx.unsignedPayload || ctx.ServiceName == "s3" || ctx.ServiceName == "glacier" { + + if includeSHA256Header { ctx.Request.Header.Set("X-Amz-Content-Sha256", hash) } } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/version.go b/vendor/github.com/aws/aws-sdk-go/aws/version.go index befbff7df07..c108466609e 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/version.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.13.49" +const SDKVersion = "1.14.12" diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go new file mode 100644 index 00000000000..ecc7bf82fa2 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go @@ -0,0 +1,144 @@ +package eventstream + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "strconv" +) + +type decodedMessage struct { + rawMessage + Headers decodedHeaders `json:"headers"` +} +type jsonMessage struct { + Length json.Number `json:"total_length"` + HeadersLen json.Number `json:"headers_length"` + PreludeCRC json.Number `json:"prelude_crc"` + Headers decodedHeaders `json:"headers"` + Payload []byte `json:"payload"` + CRC json.Number `json:"message_crc"` +} + +func (d *decodedMessage) UnmarshalJSON(b []byte) (err error) { + var jsonMsg jsonMessage + if err = json.Unmarshal(b, &jsonMsg); err != nil { + return err + } + + d.Length, err = numAsUint32(jsonMsg.Length) + if err != nil { + return err + } + d.HeadersLen, err = numAsUint32(jsonMsg.HeadersLen) + if err != nil { + return err + } + d.PreludeCRC, err = numAsUint32(jsonMsg.PreludeCRC) + if err != nil { + return err + } + d.Headers = jsonMsg.Headers + d.Payload = jsonMsg.Payload + d.CRC, err = numAsUint32(jsonMsg.CRC) + if err != nil { + return err + } + + return nil +} + +func (d *decodedMessage) MarshalJSON() ([]byte, error) { + jsonMsg := jsonMessage{ + Length: json.Number(strconv.Itoa(int(d.Length))), + HeadersLen: json.Number(strconv.Itoa(int(d.HeadersLen))), + PreludeCRC: json.Number(strconv.Itoa(int(d.PreludeCRC))), + Headers: d.Headers, + Payload: d.Payload, + CRC: json.Number(strconv.Itoa(int(d.CRC))), + } + + return json.Marshal(jsonMsg) +} + +func numAsUint32(n json.Number) (uint32, error) { + v, err := n.Int64() + if err != nil { + return 0, fmt.Errorf("failed to get int64 json number, %v", err) + } + + return uint32(v), nil +} + +func (d decodedMessage) Message() Message { + return Message{ + Headers: Headers(d.Headers), + Payload: d.Payload, + } +} + +type decodedHeaders Headers + +func (hs *decodedHeaders) UnmarshalJSON(b []byte) error { + var jsonHeaders []struct { + Name string `json:"name"` + Type valueType `json:"type"` + Value interface{} `json:"value"` + } + + decoder := json.NewDecoder(bytes.NewReader(b)) + decoder.UseNumber() + if err := decoder.Decode(&jsonHeaders); err != nil { + return err + } + + var headers Headers + for _, h := range jsonHeaders { + value, err := valueFromType(h.Type, h.Value) + if err != nil { + return err + } + headers.Set(h.Name, value) + } + (*hs) = decodedHeaders(headers) + + return nil +} + +func valueFromType(typ valueType, val interface{}) (Value, error) { + switch typ { + case trueValueType: + return BoolValue(true), nil + case falseValueType: + return BoolValue(false), nil + case int8ValueType: + v, err := val.(json.Number).Int64() + return Int8Value(int8(v)), err + case int16ValueType: + v, err := val.(json.Number).Int64() + return Int16Value(int16(v)), err + case int32ValueType: + v, err := val.(json.Number).Int64() + return Int32Value(int32(v)), err + case int64ValueType: + v, err := val.(json.Number).Int64() + return Int64Value(v), err + case bytesValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return BytesValue(v), err + case stringValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return StringValue(string(v)), err + case timestampValueType: + v, err := val.(json.Number).Int64() + return TimestampValue(timeFromEpochMilli(v)), err + case uuidValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + var tv UUIDValue + copy(tv[:], v) + return tv, err + default: + panic(fmt.Sprintf("unknown type, %s, %T", typ.String(), val)) + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go new file mode 100644 index 00000000000..4b972b2d666 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go @@ -0,0 +1,199 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "encoding/hex" + "encoding/json" + "fmt" + "hash" + "hash/crc32" + "io" + + "github.com/aws/aws-sdk-go/aws" +) + +// Decoder provides decoding of an Event Stream messages. +type Decoder struct { + r io.Reader + logger aws.Logger +} + +// NewDecoder initializes and returns a Decoder for decoding event +// stream messages from the reader provided. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + r: r, + } +} + +// Decode attempts to decode a single message from the event stream reader. +// Will return the event stream message, or error if Decode fails to read +// the message from the stream. +func (d *Decoder) Decode(payloadBuf []byte) (m Message, err error) { + reader := d.r + if d.logger != nil { + debugMsgBuf := bytes.NewBuffer(nil) + reader = io.TeeReader(reader, debugMsgBuf) + defer func() { + logMessageDecode(d.logger, debugMsgBuf, m, err) + }() + } + + crc := crc32.New(crc32IEEETable) + hashReader := io.TeeReader(reader, crc) + + prelude, err := decodePrelude(hashReader, crc) + if err != nil { + return Message{}, err + } + + if prelude.HeadersLen > 0 { + lr := io.LimitReader(hashReader, int64(prelude.HeadersLen)) + m.Headers, err = decodeHeaders(lr) + if err != nil { + return Message{}, err + } + } + + if payloadLen := prelude.PayloadLen(); payloadLen > 0 { + buf, err := decodePayload(payloadBuf, io.LimitReader(hashReader, int64(payloadLen))) + if err != nil { + return Message{}, err + } + m.Payload = buf + } + + msgCRC := crc.Sum32() + if err := validateCRC(reader, msgCRC); err != nil { + return Message{}, err + } + + return m, nil +} + +// UseLogger specifies the Logger that that the decoder should use to log the +// message decode to. +func (d *Decoder) UseLogger(logger aws.Logger) { + d.logger = logger +} + +func logMessageDecode(logger aws.Logger, msgBuf *bytes.Buffer, msg Message, decodeErr error) { + w := bytes.NewBuffer(nil) + defer func() { logger.Log(w.String()) }() + + fmt.Fprintf(w, "Raw message:\n%s\n", + hex.Dump(msgBuf.Bytes())) + + if decodeErr != nil { + fmt.Fprintf(w, "Decode error: %v\n", decodeErr) + return + } + + rawMsg, err := msg.rawMessage() + if err != nil { + fmt.Fprintf(w, "failed to create raw message, %v\n", err) + return + } + + decodedMsg := decodedMessage{ + rawMessage: rawMsg, + Headers: decodedHeaders(msg.Headers), + } + + fmt.Fprintf(w, "Decoded message:\n") + encoder := json.NewEncoder(w) + if err := encoder.Encode(decodedMsg); err != nil { + fmt.Fprintf(w, "failed to generate decoded message, %v\n", err) + } +} + +func decodePrelude(r io.Reader, crc hash.Hash32) (messagePrelude, error) { + var p messagePrelude + + var err error + p.Length, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + p.HeadersLen, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + if err := p.ValidateLens(); err != nil { + return messagePrelude{}, err + } + + preludeCRC := crc.Sum32() + if err := validateCRC(r, preludeCRC); err != nil { + return messagePrelude{}, err + } + + p.PreludeCRC = preludeCRC + + return p, nil +} + +func decodePayload(buf []byte, r io.Reader) ([]byte, error) { + w := bytes.NewBuffer(buf[0:0]) + + _, err := io.Copy(w, r) + return w.Bytes(), err +} + +func decodeUint8(r io.Reader) (uint8, error) { + type byteReader interface { + ReadByte() (byte, error) + } + + if br, ok := r.(byteReader); ok { + v, err := br.ReadByte() + return uint8(v), err + } + + var b [1]byte + _, err := io.ReadFull(r, b[:]) + return uint8(b[0]), err +} +func decodeUint16(r io.Reader) (uint16, error) { + var b [2]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint16(bs), nil +} +func decodeUint32(r io.Reader) (uint32, error) { + var b [4]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint32(bs), nil +} +func decodeUint64(r io.Reader) (uint64, error) { + var b [8]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint64(bs), nil +} + +func validateCRC(r io.Reader, expect uint32) error { + msgCRC, err := decodeUint32(r) + if err != nil { + return err + } + + if msgCRC != expect { + return ChecksumError{} + } + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go new file mode 100644 index 00000000000..150a60981d8 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go @@ -0,0 +1,114 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash" + "hash/crc32" + "io" +) + +// Encoder provides EventStream message encoding. +type Encoder struct { + w io.Writer + + headersBuf *bytes.Buffer +} + +// NewEncoder initializes and returns an Encoder to encode Event Stream +// messages to an io.Writer. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + headersBuf: bytes.NewBuffer(nil), + } +} + +// Encode encodes a single EventStream message to the io.Writer the Encoder +// was created with. An error is returned if writing the message fails. +func (e *Encoder) Encode(msg Message) error { + e.headersBuf.Reset() + + err := encodeHeaders(e.headersBuf, msg.Headers) + if err != nil { + return err + } + + crc := crc32.New(crc32IEEETable) + hashWriter := io.MultiWriter(e.w, crc) + + headersLen := uint32(e.headersBuf.Len()) + payloadLen := uint32(len(msg.Payload)) + + if err := encodePrelude(hashWriter, crc, headersLen, payloadLen); err != nil { + return err + } + + if headersLen > 0 { + if _, err := io.Copy(hashWriter, e.headersBuf); err != nil { + return err + } + } + + if payloadLen > 0 { + if _, err := hashWriter.Write(msg.Payload); err != nil { + return err + } + } + + msgCRC := crc.Sum32() + return binary.Write(e.w, binary.BigEndian, msgCRC) +} + +func encodePrelude(w io.Writer, crc hash.Hash32, headersLen, payloadLen uint32) error { + p := messagePrelude{ + Length: minMsgLen + headersLen + payloadLen, + HeadersLen: headersLen, + } + if err := p.ValidateLens(); err != nil { + return err + } + + err := binaryWriteFields(w, binary.BigEndian, + p.Length, + p.HeadersLen, + ) + if err != nil { + return err + } + + p.PreludeCRC = crc.Sum32() + err = binary.Write(w, binary.BigEndian, p.PreludeCRC) + if err != nil { + return err + } + + return nil +} + +func encodeHeaders(w io.Writer, headers Headers) error { + for _, h := range headers { + hn := headerName{ + Len: uint8(len(h.Name)), + } + copy(hn.Name[:hn.Len], h.Name) + if err := hn.encode(w); err != nil { + return err + } + + if err := h.Value.encode(w); err != nil { + return err + } + } + + return nil +} + +func binaryWriteFields(w io.Writer, order binary.ByteOrder, vs ...interface{}) error { + for _, v := range vs { + if err := binary.Write(w, order, v); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go new file mode 100644 index 00000000000..5481ef30796 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go @@ -0,0 +1,23 @@ +package eventstream + +import "fmt" + +// LengthError provides the error for items being larger than a maximum length. +type LengthError struct { + Part string + Want int + Have int + Value interface{} +} + +func (e LengthError) Error() string { + return fmt.Sprintf("%s length invalid, %d/%d, %v", + e.Part, e.Want, e.Have, e.Value) +} + +// ChecksumError provides the error for message checksum invalidation errors. +type ChecksumError struct{} + +func (e ChecksumError) Error() string { + return "message checksum mismatch" +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go new file mode 100644 index 00000000000..4a4e64c713e --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go @@ -0,0 +1,160 @@ +package eventstreamapi + +import ( + "fmt" + "io" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" +) + +// Unmarshaler provides the interface for unmarshaling a EventStream +// message into a SDK type. +type Unmarshaler interface { + UnmarshalEvent(protocol.PayloadUnmarshaler, eventstream.Message) error +} + +// EventStream headers with specific meaning to async API functionality. +const ( + MessageTypeHeader = `:message-type` // Identifies type of message. + EventMessageType = `event` + ErrorMessageType = `error` + ExceptionMessageType = `exception` + + // Message Events + EventTypeHeader = `:event-type` // Identifies message event type e.g. "Stats". + + // Message Error + ErrorCodeHeader = `:error-code` + ErrorMessageHeader = `:error-message` + + // Message Exception + ExceptionTypeHeader = `:exception-type` +) + +// EventReader provides reading from the EventStream of an reader. +type EventReader struct { + reader io.ReadCloser + decoder *eventstream.Decoder + + unmarshalerForEventType func(string) (Unmarshaler, error) + payloadUnmarshaler protocol.PayloadUnmarshaler + + payloadBuf []byte +} + +// NewEventReader returns a EventReader built from the reader and unmarshaler +// provided. Use ReadStream method to start reading from the EventStream. +func NewEventReader( + reader io.ReadCloser, + payloadUnmarshaler protocol.PayloadUnmarshaler, + unmarshalerForEventType func(string) (Unmarshaler, error), +) *EventReader { + return &EventReader{ + reader: reader, + decoder: eventstream.NewDecoder(reader), + payloadUnmarshaler: payloadUnmarshaler, + unmarshalerForEventType: unmarshalerForEventType, + payloadBuf: make([]byte, 10*1024), + } +} + +// UseLogger instructs the EventReader to use the logger and log level +// specified. +func (r *EventReader) UseLogger(logger aws.Logger, logLevel aws.LogLevelType) { + if logger != nil && logLevel.Matches(aws.LogDebugWithEventStreamBody) { + r.decoder.UseLogger(logger) + } +} + +// ReadEvent attempts to read a message from the EventStream and return the +// unmarshaled event value that the message is for. +// +// For EventStream API errors check if the returned error satisfies the +// awserr.Error interface to get the error's Code and Message components. +// +// EventUnmarshalers called with EventStream messages must take copies of the +// message's Payload. The payload will is reused between events read. +func (r *EventReader) ReadEvent() (event interface{}, err error) { + msg, err := r.decoder.Decode(r.payloadBuf) + if err != nil { + return nil, err + } + defer func() { + // Reclaim payload buffer for next message read. + r.payloadBuf = msg.Payload[0:0] + }() + + typ, err := GetHeaderString(msg, MessageTypeHeader) + if err != nil { + return nil, err + } + + switch typ { + case EventMessageType: + return r.unmarshalEventMessage(msg) + case ErrorMessageType: + return nil, r.unmarshalErrorMessage(msg) + default: + return nil, fmt.Errorf("unknown eventstream message type, %v", typ) + } +} + +func (r *EventReader) unmarshalEventMessage( + msg eventstream.Message, +) (event interface{}, err error) { + eventType, err := GetHeaderString(msg, EventTypeHeader) + if err != nil { + return nil, err + } + + ev, err := r.unmarshalerForEventType(eventType) + if err != nil { + return nil, err + } + + err = ev.UnmarshalEvent(r.payloadUnmarshaler, msg) + if err != nil { + return nil, err + } + + return ev, nil +} + +func (r *EventReader) unmarshalErrorMessage(msg eventstream.Message) (err error) { + var msgErr messageError + + msgErr.code, err = GetHeaderString(msg, ErrorCodeHeader) + if err != nil { + return err + } + + msgErr.msg, err = GetHeaderString(msg, ErrorMessageHeader) + if err != nil { + return err + } + + return msgErr +} + +// Close closes the EventReader's EventStream reader. +func (r *EventReader) Close() error { + return r.reader.Close() +} + +// GetHeaderString returns the value of the header as a string. If the header +// is not set or the value is not a string an error will be returned. +func GetHeaderString(msg eventstream.Message, headerName string) (string, error) { + headerVal := msg.Headers.Get(headerName) + if headerVal == nil { + return "", fmt.Errorf("error header %s not present", headerName) + } + + v, ok := headerVal.Get().(string) + if !ok { + return "", fmt.Errorf("error header value is not a string, %T", headerVal) + } + + return v, nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go new file mode 100644 index 00000000000..5ea5a988b63 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go @@ -0,0 +1,24 @@ +package eventstreamapi + +import "fmt" + +type messageError struct { + code string + msg string +} + +func (e messageError) Code() string { + return e.code +} + +func (e messageError) Message() string { + return e.msg +} + +func (e messageError) Error() string { + return fmt.Sprintf("%s: %s", e.code, e.msg) +} + +func (e messageError) OrigErr() error { + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go new file mode 100644 index 00000000000..3b44dde2f32 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go @@ -0,0 +1,166 @@ +package eventstream + +import ( + "encoding/binary" + "fmt" + "io" +) + +// Headers are a collection of EventStream header values. +type Headers []Header + +// Header is a single EventStream Key Value header pair. +type Header struct { + Name string + Value Value +} + +// Set associates the name with a value. If the header name already exists in +// the Headers the value will be replaced with the new one. +func (hs *Headers) Set(name string, value Value) { + var i int + for ; i < len(*hs); i++ { + if (*hs)[i].Name == name { + (*hs)[i].Value = value + return + } + } + + *hs = append(*hs, Header{ + Name: name, Value: value, + }) +} + +// Get returns the Value associated with the header. Nil is returned if the +// value does not exist. +func (hs Headers) Get(name string) Value { + for i := 0; i < len(hs); i++ { + if h := hs[i]; h.Name == name { + return h.Value + } + } + return nil +} + +// Del deletes the value in the Headers if it exists. +func (hs *Headers) Del(name string) { + for i := 0; i < len(*hs); i++ { + if (*hs)[i].Name == name { + copy((*hs)[i:], (*hs)[i+1:]) + (*hs) = (*hs)[:len(*hs)-1] + } + } +} + +func decodeHeaders(r io.Reader) (Headers, error) { + hs := Headers{} + + for { + name, err := decodeHeaderName(r) + if err != nil { + if err == io.EOF { + // EOF while getting header name means no more headers + break + } + return nil, err + } + + value, err := decodeHeaderValue(r) + if err != nil { + return nil, err + } + + hs.Set(name, value) + } + + return hs, nil +} + +func decodeHeaderName(r io.Reader) (string, error) { + var n headerName + + var err error + n.Len, err = decodeUint8(r) + if err != nil { + return "", err + } + + name := n.Name[:n.Len] + if _, err := io.ReadFull(r, name); err != nil { + return "", err + } + + return string(name), nil +} + +func decodeHeaderValue(r io.Reader) (Value, error) { + var raw rawValue + + typ, err := decodeUint8(r) + if err != nil { + return nil, err + } + raw.Type = valueType(typ) + + var v Value + + switch raw.Type { + case trueValueType: + v = BoolValue(true) + case falseValueType: + v = BoolValue(false) + case int8ValueType: + var tv Int8Value + err = tv.decode(r) + v = tv + case int16ValueType: + var tv Int16Value + err = tv.decode(r) + v = tv + case int32ValueType: + var tv Int32Value + err = tv.decode(r) + v = tv + case int64ValueType: + var tv Int64Value + err = tv.decode(r) + v = tv + case bytesValueType: + var tv BytesValue + err = tv.decode(r) + v = tv + case stringValueType: + var tv StringValue + err = tv.decode(r) + v = tv + case timestampValueType: + var tv TimestampValue + err = tv.decode(r) + v = tv + case uuidValueType: + var tv UUIDValue + err = tv.decode(r) + v = tv + default: + panic(fmt.Sprintf("unknown value type %d", raw.Type)) + } + + // Error could be EOF, let caller deal with it + return v, err +} + +const maxHeaderNameLen = 255 + +type headerName struct { + Len uint8 + Name [maxHeaderNameLen]byte +} + +func (v headerName) encode(w io.Writer) error { + if err := binary.Write(w, binary.BigEndian, v.Len); err != nil { + return err + } + + _, err := w.Write(v.Name[:v.Len]) + return err +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go new file mode 100644 index 00000000000..d7786f92ce5 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go @@ -0,0 +1,501 @@ +package eventstream + +import ( + "encoding/base64" + "encoding/binary" + "fmt" + "io" + "strconv" + "time" +) + +const maxHeaderValueLen = 1<<15 - 1 // 2^15-1 or 32KB - 1 + +// valueType is the EventStream header value type. +type valueType uint8 + +// Header value types +const ( + trueValueType valueType = iota + falseValueType + int8ValueType // Byte + int16ValueType // Short + int32ValueType // Integer + int64ValueType // Long + bytesValueType + stringValueType + timestampValueType + uuidValueType +) + +func (t valueType) String() string { + switch t { + case trueValueType: + return "bool" + case falseValueType: + return "bool" + case int8ValueType: + return "int8" + case int16ValueType: + return "int16" + case int32ValueType: + return "int32" + case int64ValueType: + return "int64" + case bytesValueType: + return "byte_array" + case stringValueType: + return "string" + case timestampValueType: + return "timestamp" + case uuidValueType: + return "uuid" + default: + return fmt.Sprintf("unknown value type %d", uint8(t)) + } +} + +type rawValue struct { + Type valueType + Len uint16 // Only set for variable length slices + Value []byte // byte representation of value, BigEndian encoding. +} + +func (r rawValue) encodeScalar(w io.Writer, v interface{}) error { + return binaryWriteFields(w, binary.BigEndian, + r.Type, + v, + ) +} + +func (r rawValue) encodeFixedSlice(w io.Writer, v []byte) error { + binary.Write(w, binary.BigEndian, r.Type) + + _, err := w.Write(v) + return err +} + +func (r rawValue) encodeBytes(w io.Writer, v []byte) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + _, err = w.Write(v) + return err +} + +func (r rawValue) encodeString(w io.Writer, v string) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + type stringWriter interface { + WriteString(string) (int, error) + } + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + if sw, ok := w.(stringWriter); ok { + _, err = sw.WriteString(v) + } else { + _, err = w.Write([]byte(v)) + } + + return err +} + +func decodeFixedBytesValue(r io.Reader, buf []byte) error { + _, err := io.ReadFull(r, buf) + return err +} + +func decodeBytesValue(r io.Reader) ([]byte, error) { + var raw rawValue + var err error + raw.Len, err = decodeUint16(r) + if err != nil { + return nil, err + } + + buf := make([]byte, raw.Len) + _, err = io.ReadFull(r, buf) + if err != nil { + return nil, err + } + + return buf, nil +} + +func decodeStringValue(r io.Reader) (string, error) { + v, err := decodeBytesValue(r) + return string(v), err +} + +// Value represents the abstract header value. +type Value interface { + Get() interface{} + String() string + valueType() valueType + encode(io.Writer) error +} + +// An BoolValue provides eventstream encoding, and representation +// of a Go bool value. +type BoolValue bool + +// Get returns the underlying type +func (v BoolValue) Get() interface{} { + return bool(v) +} + +// valueType returns the EventStream header value type value. +func (v BoolValue) valueType() valueType { + if v { + return trueValueType + } + return falseValueType +} + +func (v BoolValue) String() string { + return strconv.FormatBool(bool(v)) +} + +// encode encodes the BoolValue into an eventstream binary value +// representation. +func (v BoolValue) encode(w io.Writer) error { + return binary.Write(w, binary.BigEndian, v.valueType()) +} + +// An Int8Value provides eventstream encoding, and representation of a Go +// int8 value. +type Int8Value int8 + +// Get returns the underlying value. +func (v Int8Value) Get() interface{} { + return int8(v) +} + +// valueType returns the EventStream header value type value. +func (Int8Value) valueType() valueType { + return int8ValueType +} + +func (v Int8Value) String() string { + return fmt.Sprintf("0x%02x", int8(v)) +} + +// encode encodes the Int8Value into an eventstream binary value +// representation. +func (v Int8Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeScalar(w, v) +} + +func (v *Int8Value) decode(r io.Reader) error { + n, err := decodeUint8(r) + if err != nil { + return err + } + + *v = Int8Value(n) + return nil +} + +// An Int16Value provides eventstream encoding, and representation of a Go +// int16 value. +type Int16Value int16 + +// Get returns the underlying value. +func (v Int16Value) Get() interface{} { + return int16(v) +} + +// valueType returns the EventStream header value type value. +func (Int16Value) valueType() valueType { + return int16ValueType +} + +func (v Int16Value) String() string { + return fmt.Sprintf("0x%04x", int16(v)) +} + +// encode encodes the Int16Value into an eventstream binary value +// representation. +func (v Int16Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int16Value) decode(r io.Reader) error { + n, err := decodeUint16(r) + if err != nil { + return err + } + + *v = Int16Value(n) + return nil +} + +// An Int32Value provides eventstream encoding, and representation of a Go +// int32 value. +type Int32Value int32 + +// Get returns the underlying value. +func (v Int32Value) Get() interface{} { + return int32(v) +} + +// valueType returns the EventStream header value type value. +func (Int32Value) valueType() valueType { + return int32ValueType +} + +func (v Int32Value) String() string { + return fmt.Sprintf("0x%08x", int32(v)) +} + +// encode encodes the Int32Value into an eventstream binary value +// representation. +func (v Int32Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int32Value) decode(r io.Reader) error { + n, err := decodeUint32(r) + if err != nil { + return err + } + + *v = Int32Value(n) + return nil +} + +// An Int64Value provides eventstream encoding, and representation of a Go +// int64 value. +type Int64Value int64 + +// Get returns the underlying value. +func (v Int64Value) Get() interface{} { + return int64(v) +} + +// valueType returns the EventStream header value type value. +func (Int64Value) valueType() valueType { + return int64ValueType +} + +func (v Int64Value) String() string { + return fmt.Sprintf("0x%016x", int64(v)) +} + +// encode encodes the Int64Value into an eventstream binary value +// representation. +func (v Int64Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int64Value) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = Int64Value(n) + return nil +} + +// An BytesValue provides eventstream encoding, and representation of a Go +// byte slice. +type BytesValue []byte + +// Get returns the underlying value. +func (v BytesValue) Get() interface{} { + return []byte(v) +} + +// valueType returns the EventStream header value type value. +func (BytesValue) valueType() valueType { + return bytesValueType +} + +func (v BytesValue) String() string { + return base64.StdEncoding.EncodeToString([]byte(v)) +} + +// encode encodes the BytesValue into an eventstream binary value +// representation. +func (v BytesValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeBytes(w, []byte(v)) +} + +func (v *BytesValue) decode(r io.Reader) error { + buf, err := decodeBytesValue(r) + if err != nil { + return err + } + + *v = BytesValue(buf) + return nil +} + +// An StringValue provides eventstream encoding, and representation of a Go +// string. +type StringValue string + +// Get returns the underlying value. +func (v StringValue) Get() interface{} { + return string(v) +} + +// valueType returns the EventStream header value type value. +func (StringValue) valueType() valueType { + return stringValueType +} + +func (v StringValue) String() string { + return string(v) +} + +// encode encodes the StringValue into an eventstream binary value +// representation. +func (v StringValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeString(w, string(v)) +} + +func (v *StringValue) decode(r io.Reader) error { + s, err := decodeStringValue(r) + if err != nil { + return err + } + + *v = StringValue(s) + return nil +} + +// An TimestampValue provides eventstream encoding, and representation of a Go +// timestamp. +type TimestampValue time.Time + +// Get returns the underlying value. +func (v TimestampValue) Get() interface{} { + return time.Time(v) +} + +// valueType returns the EventStream header value type value. +func (TimestampValue) valueType() valueType { + return timestampValueType +} + +func (v TimestampValue) epochMilli() int64 { + nano := time.Time(v).UnixNano() + msec := nano / int64(time.Millisecond) + return msec +} + +func (v TimestampValue) String() string { + msec := v.epochMilli() + return strconv.FormatInt(msec, 10) +} + +// encode encodes the TimestampValue into an eventstream binary value +// representation. +func (v TimestampValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + msec := v.epochMilli() + return raw.encodeScalar(w, msec) +} + +func (v *TimestampValue) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = TimestampValue(timeFromEpochMilli(int64(n))) + return nil +} + +func timeFromEpochMilli(t int64) time.Time { + secs := t / 1e3 + msec := t % 1e3 + return time.Unix(secs, msec*int64(time.Millisecond)) +} + +// An UUIDValue provides eventstream encoding, and representation of a UUID +// value. +type UUIDValue [16]byte + +// Get returns the underlying value. +func (v UUIDValue) Get() interface{} { + return v[:] +} + +// valueType returns the EventStream header value type value. +func (UUIDValue) valueType() valueType { + return uuidValueType +} + +func (v UUIDValue) String() string { + return fmt.Sprintf(`%X-%X-%X-%X-%X`, v[0:4], v[4:6], v[6:8], v[8:10], v[10:]) +} + +// encode encodes the UUIDValue into an eventstream binary value +// representation. +func (v UUIDValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeFixedSlice(w, v[:]) +} + +func (v *UUIDValue) decode(r io.Reader) error { + tv := (*v)[:] + return decodeFixedBytesValue(r, tv) +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go new file mode 100644 index 00000000000..2dc012a66e2 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go @@ -0,0 +1,103 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash/crc32" +) + +const preludeLen = 8 +const preludeCRCLen = 4 +const msgCRCLen = 4 +const minMsgLen = preludeLen + preludeCRCLen + msgCRCLen +const maxPayloadLen = 1024 * 1024 * 16 // 16MB +const maxHeadersLen = 1024 * 128 // 128KB +const maxMsgLen = minMsgLen + maxHeadersLen + maxPayloadLen + +var crc32IEEETable = crc32.MakeTable(crc32.IEEE) + +// A Message provides the eventstream message representation. +type Message struct { + Headers Headers + Payload []byte +} + +func (m *Message) rawMessage() (rawMessage, error) { + var raw rawMessage + + if len(m.Headers) > 0 { + var headers bytes.Buffer + if err := encodeHeaders(&headers, m.Headers); err != nil { + return rawMessage{}, err + } + raw.Headers = headers.Bytes() + raw.HeadersLen = uint32(len(raw.Headers)) + } + + raw.Length = raw.HeadersLen + uint32(len(m.Payload)) + minMsgLen + + hash := crc32.New(crc32IEEETable) + binaryWriteFields(hash, binary.BigEndian, raw.Length, raw.HeadersLen) + raw.PreludeCRC = hash.Sum32() + + binaryWriteFields(hash, binary.BigEndian, raw.PreludeCRC) + + if raw.HeadersLen > 0 { + hash.Write(raw.Headers) + } + + // Read payload bytes and update hash for it as well. + if len(m.Payload) > 0 { + raw.Payload = m.Payload + hash.Write(raw.Payload) + } + + raw.CRC = hash.Sum32() + + return raw, nil +} + +type messagePrelude struct { + Length uint32 + HeadersLen uint32 + PreludeCRC uint32 +} + +func (p messagePrelude) PayloadLen() uint32 { + return p.Length - p.HeadersLen - minMsgLen +} + +func (p messagePrelude) ValidateLens() error { + if p.Length == 0 || p.Length > maxMsgLen { + return LengthError{ + Part: "message prelude", + Want: maxMsgLen, + Have: int(p.Length), + } + } + if p.HeadersLen > maxHeadersLen { + return LengthError{ + Part: "message headers", + Want: maxHeadersLen, + Have: int(p.HeadersLen), + } + } + if payloadLen := p.PayloadLen(); payloadLen > maxPayloadLen { + return LengthError{ + Part: "message payload", + Want: maxPayloadLen, + Have: int(payloadLen), + } + } + + return nil +} + +type rawMessage struct { + messagePrelude + + Headers []byte + Payload []byte + + CRC uint32 +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go new file mode 100644 index 00000000000..e21614a1250 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go @@ -0,0 +1,81 @@ +package protocol + +import ( + "io" + "io/ioutil" + "net/http" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/client/metadata" + "github.com/aws/aws-sdk-go/aws/request" +) + +// PayloadUnmarshaler provides the interface for unmarshaling a payload's +// reader into a SDK shape. +type PayloadUnmarshaler interface { + UnmarshalPayload(io.Reader, interface{}) error +} + +// HandlerPayloadUnmarshal implements the PayloadUnmarshaler from a +// HandlerList. This provides the support for unmarshaling a payload reader to +// a shape without needing a SDK request first. +type HandlerPayloadUnmarshal struct { + Unmarshalers request.HandlerList +} + +// UnmarshalPayload unmarshals the io.Reader payload into the SDK shape using +// the Unmarshalers HandlerList provided. Returns an error if unable +// unmarshaling fails. +func (h HandlerPayloadUnmarshal) UnmarshalPayload(r io.Reader, v interface{}) error { + req := &request.Request{ + HTTPRequest: &http.Request{}, + HTTPResponse: &http.Response{ + StatusCode: 200, + Header: http.Header{}, + Body: ioutil.NopCloser(r), + }, + Data: v, + } + + h.Unmarshalers.Run(req) + + return req.Error +} + +// PayloadMarshaler provides the interface for marshaling a SDK shape into and +// io.Writer. +type PayloadMarshaler interface { + MarshalPayload(io.Writer, interface{}) error +} + +// HandlerPayloadMarshal implements the PayloadMarshaler from a HandlerList. +// This provides support for marshaling a SDK shape into an io.Writer without +// needing a SDK request first. +type HandlerPayloadMarshal struct { + Marshalers request.HandlerList +} + +// MarshalPayload marshals the SDK shape into the io.Writer using the +// Marshalers HandlerList provided. Returns an error if unable if marshal +// fails. +func (h HandlerPayloadMarshal) MarshalPayload(w io.Writer, v interface{}) error { + req := request.New( + aws.Config{}, + metadata.ClientInfo{}, + request.Handlers{}, + nil, + &request.Operation{HTTPMethod: "GET"}, + v, + nil, + ) + + h.Marshalers.Run(req) + + if req.Error != nil { + return req.Error + } + + io.Copy(w, req.GetBody()) + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go index c405288d742..f761e0b3a5b 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go @@ -20,8 +20,10 @@ import ( "github.com/aws/aws-sdk-go/private/protocol" ) -// RFC822 returns an RFC822 formatted timestamp for AWS protocols -const RFC822 = "Mon, 2 Jan 2006 15:04:05 GMT" +// RFC1123GMT is a RFC1123 (RFC822) formated timestame. This format is not +// using the standard library's time.RFC1123 due to the desire to always use +// GMT as the timezone. +const RFC1123GMT = "Mon, 2 Jan 2006 15:04:05 GMT" // Whether the byte value can be sent without escaping in AWS URLs var noEscape [256]bool @@ -270,7 +272,7 @@ func convertType(v reflect.Value, tag reflect.StructTag) (str string, err error) case float64: str = strconv.FormatFloat(value, 'f', -1, 64) case time.Time: - str = value.UTC().Format(RFC822) + str = value.UTC().Format(RFC1123GMT) case aws.JSONValue: if len(value) == 0 { return "", errValueNotSet diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go index 823f045eed7..9d4e7626775 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go @@ -198,7 +198,7 @@ func unmarshalHeader(v reflect.Value, header string, tag reflect.StructTag) erro } v.Set(reflect.ValueOf(&f)) case *time.Time: - t, err := time.Parse(RFC822, header) + t, err := time.Parse(time.RFC1123, header) if err != nil { return err } diff --git a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go index 4b0aa76edcd..0d478662240 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "monitoring" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "monitoring" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "CloudWatch" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the CloudWatch client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go index 99d12a66e42..b48e40e205c 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go @@ -2268,11 +2268,7 @@ func (c *EC2) CancelSpotInstanceRequestsRequest(input *CancelSpotInstanceRequest // CancelSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Cancels one or more Spot Instance requests. Spot Instances are instances -// that Amazon EC2 starts on your behalf when the maximum price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Cancels one or more Spot Instance requests. // // Canceling a Spot Instance request does not terminate running Spot Instances // associated with the request. @@ -4179,8 +4175,8 @@ func (c *EC2) CreateNetworkInterfacePermissionRequest(input *CreateNetworkInterf // CreateNetworkInterfacePermission API operation for Amazon Elastic Compute Cloud. // -// Grants an AWS authorized partner account permission to attach the specified -// network interface to an instance in their account. +// Grants an AWS-authorized account permission to attach the specified network +// interface to an instance in their account. // // You can grant permission to a single AWS account only, and only one account // at a time. @@ -13675,11 +13671,7 @@ func (c *EC2) DescribeSpotInstanceRequestsRequest(input *DescribeSpotInstanceReq // DescribeSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Describes the Spot Instance requests that belong to your account. Spot Instances -// are instances that Amazon EC2 launches when the Spot price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Describes the specified Spot Instance requests. // // You can use DescribeSpotInstanceRequests to find a running Spot Instance // by examining the response. If the status of the Spot Instance is fulfilled, @@ -21367,9 +21359,9 @@ func (c *EC2) RequestSpotInstancesRequest(input *RequestSpotInstancesInput) (req // RequestSpotInstances API operation for Amazon Elastic Compute Cloud. // -// Creates a Spot Instance request. Spot Instances are instances that Amazon -// EC2 launches when the maximum price that you specify exceeds the current -// Spot price. For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) +// Creates a Spot Instance request. +// +// For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) // in the Amazon EC2 User Guide for Linux Instances. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -37615,7 +37607,7 @@ type DescribeInstancesInput struct { // The maximum number of results to return in a single call. To retrieve the // remaining results, make another call with the returned NextToken value. This // value can be between 5 and 1000. You cannot specify this parameter and the - // instance IDs parameter or tag filters in the same call. + // instance IDs parameter in the same call. MaxResults *int64 `locationName:"maxResults" type:"integer"` // The token to request the next page of results. @@ -66458,19 +66450,23 @@ type StateReason struct { // The message for the state change. // - // * Server.InsufficientInstanceCapacity: There was insufficient instance - // capacity to satisfy the launch request. + // * Server.InsufficientInstanceCapacity: There was insufficient capacity + // available to satisfy the launch request. // - // * Server.InternalError: An internal error occurred during instance launch, - // resulting in termination. + // * Server.InternalError: An internal error caused the instance to terminate + // during launch. // // * Server.ScheduledStop: The instance was stopped due to a scheduled retirement. // - // * Server.SpotInstanceTermination: A Spot Instance was terminated due to - // an increase in the Spot price. + // * Server.SpotInstanceShutdown: The instance was stopped because the number + // of Spot requests with a maximum price equal to or higher than the Spot + // price exceeded available capacity or because of an increase in the Spot + // price. // - // * Client.InternalError: A client error caused the instance to terminate - // on launch. + // * Server.SpotInstanceTermination: The instance was terminated because + // the number of Spot requests with a maximum price equal to or higher than + // the Spot price exceeded available capacity or because of an increase in + // the Spot price. // // * Client.InstanceInitiatedShutdown: The instance was shut down using the // shutdown -h command from the instance. @@ -66478,14 +66474,17 @@ type StateReason struct { // * Client.InstanceTerminated: The instance was terminated or rebooted during // AMI creation. // + // * Client.InternalError: A client error caused the instance to terminate + // during launch. + // + // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // // * Client.UserInitiatedShutdown: The instance was shut down using the Amazon // EC2 API. // // * Client.VolumeLimitExceeded: The limit on the number of EBS volumes or // total storage was exceeded. Decrease usage or request an increase in your - // limits. - // - // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // account limits. Message *string `locationName:"message" type:"string"` } @@ -66969,7 +66968,7 @@ type TagSpecification struct { _ struct{} `type:"structure"` // The type of resource to tag. Currently, the resource types that support tagging - // on creation are instance and volume. + // on creation are instance, snapshot, and volume. ResourceType *string `locationName:"resourceType" type:"string" enum:"ResourceType"` // The tags to apply to the resource. @@ -70694,6 +70693,9 @@ const ( // InstanceTypeI316xlarge is a InstanceType enum value InstanceTypeI316xlarge = "i3.16xlarge" + // InstanceTypeI3Metal is a InstanceType enum value + InstanceTypeI3Metal = "i3.metal" + // InstanceTypeHi14xlarge is a InstanceType enum value InstanceTypeHi14xlarge = "hi1.4xlarge" @@ -70754,6 +70756,24 @@ const ( // InstanceTypeC518xlarge is a InstanceType enum value InstanceTypeC518xlarge = "c5.18xlarge" + // InstanceTypeC5dLarge is a InstanceType enum value + InstanceTypeC5dLarge = "c5d.large" + + // InstanceTypeC5dXlarge is a InstanceType enum value + InstanceTypeC5dXlarge = "c5d.xlarge" + + // InstanceTypeC5d2xlarge is a InstanceType enum value + InstanceTypeC5d2xlarge = "c5d.2xlarge" + + // InstanceTypeC5d4xlarge is a InstanceType enum value + InstanceTypeC5d4xlarge = "c5d.4xlarge" + + // InstanceTypeC5d9xlarge is a InstanceType enum value + InstanceTypeC5d9xlarge = "c5d.9xlarge" + + // InstanceTypeC5d18xlarge is a InstanceType enum value + InstanceTypeC5d18xlarge = "c5d.18xlarge" + // InstanceTypeCc14xlarge is a InstanceType enum value InstanceTypeCc14xlarge = "cc1.4xlarge" @@ -70832,6 +70852,24 @@ const ( // InstanceTypeM524xlarge is a InstanceType enum value InstanceTypeM524xlarge = "m5.24xlarge" + // InstanceTypeM5dLarge is a InstanceType enum value + InstanceTypeM5dLarge = "m5d.large" + + // InstanceTypeM5dXlarge is a InstanceType enum value + InstanceTypeM5dXlarge = "m5d.xlarge" + + // InstanceTypeM5d2xlarge is a InstanceType enum value + InstanceTypeM5d2xlarge = "m5d.2xlarge" + + // InstanceTypeM5d4xlarge is a InstanceType enum value + InstanceTypeM5d4xlarge = "m5d.4xlarge" + + // InstanceTypeM5d12xlarge is a InstanceType enum value + InstanceTypeM5d12xlarge = "m5d.12xlarge" + + // InstanceTypeM5d24xlarge is a InstanceType enum value + InstanceTypeM5d24xlarge = "m5d.24xlarge" + // InstanceTypeH12xlarge is a InstanceType enum value InstanceTypeH12xlarge = "h1.2xlarge" diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go index ba4433d388e..6acbc43fe3d 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "ec2" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "ec2" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "EC2" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the EC2 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go index a27823fdfb5..07fc06af1f9 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go @@ -3,14 +3,21 @@ package s3 import ( + "bytes" "fmt" "io" + "sync" + "sync/atomic" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awsutil" + "github.com/aws/aws-sdk-go/aws/client" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" + "github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi" + "github.com/aws/aws-sdk-go/private/protocol/rest" "github.com/aws/aws-sdk-go/private/protocol/restxml" ) @@ -6017,6 +6024,88 @@ func (c *S3) RestoreObjectWithContext(ctx aws.Context, input *RestoreObjectInput return out, req.Send() } +const opSelectObjectContent = "SelectObjectContent" + +// SelectObjectContentRequest generates a "aws/request.Request" representing the +// client's request for the SelectObjectContent operation. The "output" return +// value will be populated with the request's response once the request completes +// successfuly. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See SelectObjectContent for more information on using the SelectObjectContent +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// +// // Example sending a request using the SelectObjectContentRequest method. +// req, resp := client.SelectObjectContentRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContentRequest(input *SelectObjectContentInput) (req *request.Request, output *SelectObjectContentOutput) { + op := &request.Operation{ + Name: opSelectObjectContent, + HTTPMethod: "POST", + HTTPPath: "/{Bucket}/{Key+}?select&select-type=2", + } + + if input == nil { + input = &SelectObjectContentInput{} + } + + output = &SelectObjectContentOutput{} + req = c.newRequest(op, input, output) + req.Handlers.Send.Swap(client.LogHTTPResponseHandler.Name, client.LogHTTPResponseHeaderHandler) + req.Handlers.Unmarshal.Swap(restxml.UnmarshalHandler.Name, rest.UnmarshalHandler) + req.Handlers.Unmarshal.PushBack(output.runEventStreamLoop) + return +} + +// SelectObjectContent API operation for Amazon Simple Storage Service. +// +// This operation filters the contents of an Amazon S3 object based on a simple +// Structured Query Language (SQL) statement. In the request, along with the +// SQL expression, you must also specify a data serialization format (JSON or +// CSV) of the object. Amazon S3 uses this to parse object data into records, +// and returns only records that match the specified SQL expression. You must +// also specify the data serialization format for the response. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon Simple Storage Service's +// API operation SelectObjectContent for usage and error information. +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContent(input *SelectObjectContentInput) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + return out, req.Send() +} + +// SelectObjectContentWithContext is the same as SelectObjectContent with the addition of +// the ability to pass a context and additional request options. +// +// See SelectObjectContent for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *S3) SelectObjectContentWithContext(ctx aws.Context, input *SelectObjectContentInput, opts ...request.Option) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + const opUploadPart = "UploadPart" // UploadPartRequest generates a "aws/request.Request" representing the @@ -7474,6 +7563,32 @@ func (s *Condition) SetKeyPrefixEquals(v string) *Condition { return s } +type ContinuationEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s ContinuationEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ContinuationEvent) GoString() string { + return s.String() +} + +// The ContinuationEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ContinuationEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ContinuationEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ContinuationEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type CopyObjectInput struct { _ struct{} `type:"structure"` @@ -9919,6 +10034,32 @@ func (s *EncryptionConfiguration) SetReplicaKmsKeyID(v string) *EncryptionConfig return s } +type EndEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s EndEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s EndEvent) GoString() string { + return s.String() +} + +// The EndEvent is and event in the SelectObjectContentEventStream group of events. +func (s *EndEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the EndEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *EndEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type Error struct { _ struct{} `type:"structure"` @@ -16380,6 +16521,87 @@ func (s *Part) SetSize(v int64) *Part { return s } +type Progress struct { + _ struct{} `type:"structure"` + + // Current number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Current number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Current number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Progress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Progress) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Progress) SetBytesProcessed(v int64) *Progress { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Progress) SetBytesReturned(v int64) *Progress { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Progress) SetBytesScanned(v int64) *Progress { + s.BytesScanned = &v + return s +} + +type ProgressEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Progress event details. + Details *Progress `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s ProgressEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ProgressEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *ProgressEvent) SetDetails(v *Progress) *ProgressEvent { + s.Details = v + return s +} + +// The ProgressEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ProgressEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ProgressEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ProgressEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type PutBucketAccelerateConfigurationInput struct { _ struct{} `type:"structure" payload:"AccelerateConfiguration"` @@ -18622,6 +18844,45 @@ func (s *QueueConfigurationDeprecated) SetQueue(v string) *QueueConfigurationDep return s } +type RecordsEvent struct { + _ struct{} `type:"structure" payload:"Payload"` + + // The byte array of partial, one or more result records. + // + // Payload is automatically base64 encoded/decoded by the SDK. + Payload []byte `type:"blob"` +} + +// String returns the string representation +func (s RecordsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RecordsEvent) GoString() string { + return s.String() +} + +// SetPayload sets the Payload field's value. +func (s *RecordsEvent) SetPayload(v []byte) *RecordsEvent { + s.Payload = v + return s +} + +// The RecordsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *RecordsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the RecordsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *RecordsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + s.Payload = make([]byte, len(msg.Payload)) + copy(s.Payload, msg.Payload) + return nil +} + type Redirect struct { _ struct{} `type:"structure"` @@ -18939,6 +19200,30 @@ func (s *RequestPaymentConfiguration) SetPayer(v string) *RequestPaymentConfigur return s } +type RequestProgress struct { + _ struct{} `type:"structure"` + + // Specifies whether periodic QueryProgress frames should be sent. Valid values: + // TRUE, FALSE. Default value: FALSE. + Enabled *bool `type:"boolean"` +} + +// String returns the string representation +func (s RequestProgress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RequestProgress) GoString() string { + return s.String() +} + +// SetEnabled sets the Enabled field's value. +func (s *RequestProgress) SetEnabled(v bool) *RequestProgress { + s.Enabled = &v + return s +} + type RestoreObjectInput struct { _ struct{} `type:"structure" payload:"RestoreRequest"` @@ -19392,6 +19677,436 @@ func (s SSES3) GoString() string { return s.String() } +// SelectObjectContentEventStream provides handling of EventStreams for +// the SelectObjectContent API. +// +// Use this type to receive SelectObjectContentEventStream events. The events +// can be read from the Events channel member. +// +// The events that can be received are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStream struct { + // Reader is the EventStream reader for the SelectObjectContentEventStream + // events. This value is automatically set by the SDK when the API call is made + // Use this member when unit testing your code with the SDK to mock out the + // EventStream Reader. + // + // Must not be nil. + Reader SelectObjectContentEventStreamReader + + // StreamCloser is the io.Closer for the EventStream connection. For HTTP + // EventStream this is the response Body. The stream will be closed when + // the Close method of the EventStream is called. + StreamCloser io.Closer +} + +// Close closes the EventStream. This will also cause the Events channel to be +// closed. You can use the closing of the Events channel to terminate your +// application's read from the API's EventStream. +// +// Will close the underlying EventStream reader. For EventStream over HTTP +// connection this will also close the HTTP connection. +// +// Close must be called when done using the EventStream API. Not calling Close +// may result in resource leaks. +func (es *SelectObjectContentEventStream) Close() (err error) { + es.Reader.Close() + return es.Err() +} + +// Err returns any error that occurred while reading EventStream Events from +// the service API's response. Returns nil if there were no errors. +func (es *SelectObjectContentEventStream) Err() error { + if err := es.Reader.Err(); err != nil { + return err + } + es.StreamCloser.Close() + + return nil +} + +// Events returns a channel to read EventStream Events from the +// SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +func (es *SelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return es.Reader.Events() +} + +// SelectObjectContentEventStreamEvent groups together all EventStream +// events read from the SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamEvent interface { + eventSelectObjectContentEventStream() +} + +// SelectObjectContentEventStreamReader provides the interface for reading EventStream +// Events from the SelectObjectContent API. The +// default implementation for this interface will be SelectObjectContentEventStream. +// +// The reader's Close method must allow multiple concurrent calls. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamReader interface { + // Returns a channel of events as they are read from the event stream. + Events() <-chan SelectObjectContentEventStreamEvent + + // Close will close the underlying event stream reader. For event stream over + // HTTP this will also close the HTTP connection. + Close() error + + // Returns any error that has occured while reading from the event stream. + Err() error +} + +type readSelectObjectContentEventStream struct { + eventReader *eventstreamapi.EventReader + stream chan SelectObjectContentEventStreamEvent + errVal atomic.Value + + done chan struct{} + closeOnce sync.Once +} + +func newReadSelectObjectContentEventStream( + reader io.ReadCloser, + unmarshalers request.HandlerList, + logger aws.Logger, + logLevel aws.LogLevelType, +) *readSelectObjectContentEventStream { + r := &readSelectObjectContentEventStream{ + stream: make(chan SelectObjectContentEventStreamEvent), + done: make(chan struct{}), + } + + r.eventReader = eventstreamapi.NewEventReader( + reader, + protocol.HandlerPayloadUnmarshal{ + Unmarshalers: unmarshalers, + }, + r.unmarshalerForEventType, + ) + r.eventReader.UseLogger(logger, logLevel) + + return r +} + +// Close will close the underlying event stream reader. For EventStream over +// HTTP this will also close the HTTP connection. +func (r *readSelectObjectContentEventStream) Close() error { + r.closeOnce.Do(r.safeClose) + + return r.Err() +} + +func (r *readSelectObjectContentEventStream) safeClose() { + close(r.done) + err := r.eventReader.Close() + if err != nil { + r.errVal.Store(err) + } +} + +func (r *readSelectObjectContentEventStream) Err() error { + if v := r.errVal.Load(); v != nil { + return v.(error) + } + + return nil +} + +func (r *readSelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return r.stream +} + +func (r *readSelectObjectContentEventStream) readEventStream() { + defer close(r.stream) + + for { + event, err := r.eventReader.ReadEvent() + if err != nil { + if err == io.EOF { + return + } + select { + case <-r.done: + // If closed already ignore the error + return + default: + } + r.errVal.Store(err) + return + } + + select { + case r.stream <- event.(SelectObjectContentEventStreamEvent): + case <-r.done: + return + } + } +} + +func (r *readSelectObjectContentEventStream) unmarshalerForEventType( + eventType string, +) (eventstreamapi.Unmarshaler, error) { + switch eventType { + case "Cont": + return &ContinuationEvent{}, nil + + case "End": + return &EndEvent{}, nil + + case "Progress": + return &ProgressEvent{}, nil + + case "Records": + return &RecordsEvent{}, nil + + case "Stats": + return &StatsEvent{}, nil + default: + return nil, fmt.Errorf( + "unknown event type name, %s, for SelectObjectContentEventStream", eventType) + } +} + +// Request to filter the contents of an Amazon S3 object based on a simple Structured +// Query Language (SQL) statement. In the request, along with the SQL expression, +// you must also specify a data serialization format (JSON or CSV) of the object. +// Amazon S3 uses this to parse object data into records, and returns only records +// that match the specified SQL expression. You must also specify the data serialization +// format for the response. For more information, go to S3Select API Documentation +// (https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectSELECTContent.html) +type SelectObjectContentInput struct { + _ struct{} `locationName:"SelectObjectContentRequest" type:"structure" xmlURI:"http://s3.amazonaws.com/doc/2006-03-01/"` + + // The S3 Bucket. + // + // Bucket is a required field + Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"` + + // The expression that is used to query the object. + // + // Expression is a required field + Expression *string `type:"string" required:"true"` + + // The type of the provided expression (e.g., SQL). + // + // ExpressionType is a required field + ExpressionType *string `type:"string" required:"true" enum:"ExpressionType"` + + // Describes the format of the data in the object that is being queried. + // + // InputSerialization is a required field + InputSerialization *InputSerialization `type:"structure" required:"true"` + + // The Object Key. + // + // Key is a required field + Key *string `location:"uri" locationName:"Key" min:"1" type:"string" required:"true"` + + // Describes the format of the data that you want Amazon S3 to return in response. + // + // OutputSerialization is a required field + OutputSerialization *OutputSerialization `type:"structure" required:"true"` + + // Specifies if periodic request progress information should be enabled. + RequestProgress *RequestProgress `type:"structure"` + + // The SSE Algorithm used to encrypt the object. For more information, go to + // Server-Side Encryption (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"` + + // The SSE Customer Key. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"` + + // The SSE Customer Key MD5. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"` +} + +// String returns the string representation +func (s SelectObjectContentInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *SelectObjectContentInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "SelectObjectContentInput"} + if s.Bucket == nil { + invalidParams.Add(request.NewErrParamRequired("Bucket")) + } + if s.Expression == nil { + invalidParams.Add(request.NewErrParamRequired("Expression")) + } + if s.ExpressionType == nil { + invalidParams.Add(request.NewErrParamRequired("ExpressionType")) + } + if s.InputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("InputSerialization")) + } + if s.Key == nil { + invalidParams.Add(request.NewErrParamRequired("Key")) + } + if s.Key != nil && len(*s.Key) < 1 { + invalidParams.Add(request.NewErrParamMinLen("Key", 1)) + } + if s.OutputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("OutputSerialization")) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetBucket sets the Bucket field's value. +func (s *SelectObjectContentInput) SetBucket(v string) *SelectObjectContentInput { + s.Bucket = &v + return s +} + +func (s *SelectObjectContentInput) getBucket() (v string) { + if s.Bucket == nil { + return v + } + return *s.Bucket +} + +// SetExpression sets the Expression field's value. +func (s *SelectObjectContentInput) SetExpression(v string) *SelectObjectContentInput { + s.Expression = &v + return s +} + +// SetExpressionType sets the ExpressionType field's value. +func (s *SelectObjectContentInput) SetExpressionType(v string) *SelectObjectContentInput { + s.ExpressionType = &v + return s +} + +// SetInputSerialization sets the InputSerialization field's value. +func (s *SelectObjectContentInput) SetInputSerialization(v *InputSerialization) *SelectObjectContentInput { + s.InputSerialization = v + return s +} + +// SetKey sets the Key field's value. +func (s *SelectObjectContentInput) SetKey(v string) *SelectObjectContentInput { + s.Key = &v + return s +} + +// SetOutputSerialization sets the OutputSerialization field's value. +func (s *SelectObjectContentInput) SetOutputSerialization(v *OutputSerialization) *SelectObjectContentInput { + s.OutputSerialization = v + return s +} + +// SetRequestProgress sets the RequestProgress field's value. +func (s *SelectObjectContentInput) SetRequestProgress(v *RequestProgress) *SelectObjectContentInput { + s.RequestProgress = v + return s +} + +// SetSSECustomerAlgorithm sets the SSECustomerAlgorithm field's value. +func (s *SelectObjectContentInput) SetSSECustomerAlgorithm(v string) *SelectObjectContentInput { + s.SSECustomerAlgorithm = &v + return s +} + +// SetSSECustomerKey sets the SSECustomerKey field's value. +func (s *SelectObjectContentInput) SetSSECustomerKey(v string) *SelectObjectContentInput { + s.SSECustomerKey = &v + return s +} + +func (s *SelectObjectContentInput) getSSECustomerKey() (v string) { + if s.SSECustomerKey == nil { + return v + } + return *s.SSECustomerKey +} + +// SetSSECustomerKeyMD5 sets the SSECustomerKeyMD5 field's value. +func (s *SelectObjectContentInput) SetSSECustomerKeyMD5(v string) *SelectObjectContentInput { + s.SSECustomerKeyMD5 = &v + return s +} + +type SelectObjectContentOutput struct { + _ struct{} `type:"structure" payload:"Payload"` + + // Use EventStream to use the API's stream. + EventStream *SelectObjectContentEventStream `type:"structure"` +} + +// String returns the string representation +func (s SelectObjectContentOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentOutput) GoString() string { + return s.String() +} + +// SetEventStream sets the EventStream field's value. +func (s *SelectObjectContentOutput) SetEventStream(v *SelectObjectContentEventStream) *SelectObjectContentOutput { + s.EventStream = v + return s +} + +func (s *SelectObjectContentOutput) runEventStreamLoop(r *request.Request) { + if r.Error != nil { + return + } + reader := newReadSelectObjectContentEventStream( + r.HTTPResponse.Body, + r.Handlers.UnmarshalStream, + r.Config.Logger, + r.Config.LogLevel.Value(), + ) + go reader.readEventStream() + + eventStream := &SelectObjectContentEventStream{ + StreamCloser: r.HTTPResponse.Body, + Reader: reader, + } + s.EventStream = eventStream +} + // Describes the parameters for Select job types. type SelectParameters struct { _ struct{} `type:"structure"` @@ -19696,6 +20411,87 @@ func (s *SseKmsEncryptedObjects) SetStatus(v string) *SseKmsEncryptedObjects { return s } +type Stats struct { + _ struct{} `type:"structure"` + + // Total number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Total number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Total number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Stats) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Stats) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Stats) SetBytesProcessed(v int64) *Stats { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Stats) SetBytesReturned(v int64) *Stats { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Stats) SetBytesScanned(v int64) *Stats { + s.BytesScanned = &v + return s +} + +type StatsEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Stats event details. + Details *Stats `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s StatsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s StatsEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *StatsEvent) SetDetails(v *Stats) *StatsEvent { + s.Details = v + return s +} + +// The StatsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *StatsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the StatsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *StatsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type StorageClassAnalysis struct { _ struct{} `type:"structure"` diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go index 614e477d3bb..20de53f29d7 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "s3" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "s3" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "S3" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the S3 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, @@ -71,6 +73,8 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio svc.Handlers.UnmarshalMeta.PushBackNamed(restxml.UnmarshalMetaHandler) svc.Handlers.UnmarshalError.PushBackNamed(restxml.UnmarshalErrorHandler) + svc.Handlers.UnmarshalStream.PushBackNamed(restxml.UnmarshalHandler) + // Run custom client initialization if present if initClient != nil { initClient(svc.Client) diff --git a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go index 1ee5839e046..185c914d1b3 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "sts" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "sts" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "STS" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the STS client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE new file mode 100644 index 00000000000..c35c17af980 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2015 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/main.go b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go new file mode 100644 index 00000000000..6a77d124317 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go @@ -0,0 +1,29 @@ +// Package sanitized_anchor_name provides a func to create sanitized anchor names. +// +// Its logic can be reused by multiple packages to create interoperable anchor names +// and links to those anchors. +// +// At this time, it does not try to ensure that generated anchor names +// are unique, that responsibility falls on the caller. +package sanitized_anchor_name // import "github.com/shurcooL/sanitized_anchor_name" + +import "unicode" + +// Create returns a sanitized anchor name for the given text. +func Create(text string) string { + var anchorName []rune + var futureDash = false + for _, r := range text { + switch { + case unicode.IsLetter(r) || unicode.IsNumber(r): + if futureDash && len(anchorName) > 0 { + anchorName = append(anchorName, '-') + } + futureDash = false + anchorName = append(anchorName, unicode.ToLower(r)) + default: + futureDash = true + } + } + return string(anchorName) +} From 40ed235b3ba29de41081ba59110824caa0671801 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Mon, 16 Apr 2018 16:50:13 +0900 Subject: [PATCH 005/105] support GetMetricData --- pkg/metrics/metrics.go | 8 + pkg/tsdb/cloudwatch/cloudwatch.go | 215 +++++++++++++++--- pkg/tsdb/cloudwatch/types.go | 4 + .../datasource/cloudwatch/datasource.ts | 20 +- .../cloudwatch/partials/query.parameter.html | 45 ++-- .../cloudwatch/query_parameter_ctrl.ts | 3 + 6 files changed, 246 insertions(+), 49 deletions(-) diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 4dd84c12151..a8d9f7308fa 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -44,6 +44,7 @@ var ( M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter + M_Aws_CloudWatch_GetMetricData prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers @@ -218,6 +219,12 @@ func init() { Namespace: exporterName, }) + M_Aws_CloudWatch_GetMetricData = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "aws_cloudwatch_get_metric_data_total", + Help: "counter for getting metric data time series from aws", + Namespace: exporterName, + }) + M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", @@ -307,6 +314,7 @@ func initMetricVars() { M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, + M_Aws_CloudWatch_GetMetricData, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 8af97575ae9..54634bc0614 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb" + "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" @@ -88,48 +89,63 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo Results: make(map[string]*tsdb.QueryResult), } - errCh := make(chan error, 1) - resCh := make(chan *tsdb.QueryResult, 1) + eg, ectx := errgroup.WithContext(ctx) - currentlyExecuting := 0 + getMetricDataQueries := make(map[string]map[string]*CloudWatchQuery) for i, model := range queryContext.Queries { queryType := model.Model.Get("type").MustString() if queryType != "timeSeriesQuery" && queryType != "" { continue } - currentlyExecuting++ - go func(refId string, index int) { - queryRes, err := e.executeQuery(ctx, queryContext.Queries[index].Model, queryContext) - currentlyExecuting-- - if err != nil { - errCh <- err - } else { - queryRes.RefId = refId - resCh <- queryRes + + query, err := parseQuery(queryContext.Queries[i].Model) + if err != nil { + return nil, err + } + query.RefId = queryContext.Queries[i].RefId + + if query.Id != "" { + if _, ok := getMetricDataQueries[query.Region]; !ok { + getMetricDataQueries[query.Region] = make(map[string]*CloudWatchQuery) } - }(model.RefId, i) + getMetricDataQueries[query.Region][query.Id] = query + continue + } + + eg.Go(func() error { + queryRes, err := e.executeQuery(ectx, query, queryContext) + if err != nil { + return err + } + result.Results[queryRes.RefId] = queryRes + return nil + }) } - for currentlyExecuting != 0 { - select { - case res := <-resCh: - result.Results[res.RefId] = res - case err := <-errCh: - return result, err - case <-ctx.Done(): - return result, ctx.Err() + if len(getMetricDataQueries) > 0 { + for region, getMetricDataQuery := range getMetricDataQueries { + q := getMetricDataQuery + eg.Go(func() error { + queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) + if err != nil { + return err + } + for _, queryRes := range queryResponses { + result.Results[queryRes.RefId] = queryRes + } + return nil + }) } } + if err := eg.Wait(); err != nil { + return nil, err + } + return result, nil } -func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { - query, err := parseQuery(parameters) - if err != nil { - return nil, err - } - +func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatchQuery, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { client, err := e.getClient(query.Region) if err != nil { return nil, err @@ -201,6 +217,139 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl return queryRes, nil } +func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, region string, queries map[string]*CloudWatchQuery, queryContext *tsdb.TsdbQuery) ([]*tsdb.QueryResult, error) { + queryResponses := make([]*tsdb.QueryResult, 0) + + // validate query + for _, query := range queries { + if !(len(query.Statistics) == 1 && len(query.ExtendedStatistics) == 0) && + !(len(query.Statistics) == 0 && len(query.ExtendedStatistics) == 1) { + return queryResponses, errors.New("Statistics count should be 1") + } + } + + client, err := e.getClient(region) + if err != nil { + return queryResponses, err + } + + startTime, err := queryContext.TimeRange.ParseFrom() + if err != nil { + return queryResponses, err + } + + endTime, err := queryContext.TimeRange.ParseTo() + if err != nil { + return queryResponses, err + } + + params := &cloudwatch.GetMetricDataInput{ + StartTime: aws.Time(startTime), + EndTime: aws.Time(endTime), + ScanBy: aws.String("TimestampAscending"), + } + for _, query := range queries { + // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { + return nil, errors.New("too long query period") + } + + mdq := &cloudwatch.MetricDataQuery{ + Id: aws.String(query.Id), + ReturnData: aws.Bool(query.ReturnData), + } + if query.Expression != "" { + mdq.Expression = aws.String(query.Expression) + } else { + mdq.MetricStat = &cloudwatch.MetricStat{ + Metric: &cloudwatch.Metric{ + Namespace: aws.String(query.Namespace), + MetricName: aws.String(query.MetricName), + }, + Period: aws.Int64(int64(query.Period)), + } + for _, d := range query.Dimensions { + mdq.MetricStat.Metric.Dimensions = append(mdq.MetricStat.Metric.Dimensions, + &cloudwatch.Dimension{ + Name: d.Name, + Value: d.Value, + }) + } + if len(query.Statistics) == 1 { + mdq.MetricStat.Stat = query.Statistics[0] + } else { + mdq.MetricStat.Stat = query.ExtendedStatistics[0] + } + } + params.MetricDataQueries = append(params.MetricDataQueries, mdq) + } + + nextToken := "" + mdr := make(map[string]*cloudwatch.MetricDataResult) + for { + if nextToken != "" { + params.NextToken = aws.String(nextToken) + } + resp, err := client.GetMetricDataWithContext(ctx, params) + if err != nil { + return queryResponses, err + } + metrics.M_Aws_CloudWatch_GetMetricData.Add(float64(len(params.MetricDataQueries))) + + for _, r := range resp.MetricDataResults { + if _, ok := mdr[*r.Id]; !ok { + mdr[*r.Id] = r + } else { + mdr[*r.Id].Timestamps = append(mdr[*r.Id].Timestamps, r.Timestamps...) + mdr[*r.Id].Values = append(mdr[*r.Id].Values, r.Values...) + } + } + + if resp.NextToken == nil || *resp.NextToken == "" { + break + } + nextToken = *resp.NextToken + } + + for i, r := range mdr { + if *r.StatusCode != "Complete" { + return queryResponses, fmt.Errorf("Part of query is failed: %s", *r.StatusCode) + } + + queryRes := tsdb.NewQueryResult() + queryRes.RefId = queries[i].RefId + query := queries[*r.Id] + + series := tsdb.TimeSeries{ + Tags: map[string]string{}, + Points: make([]tsdb.TimePoint, 0), + } + for _, d := range query.Dimensions { + series.Tags[*d.Name] = *d.Value + } + s := "" + if len(query.Statistics) == 1 { + s = *query.Statistics[0] + } else { + s = *query.ExtendedStatistics[0] + } + series.Name = formatAlias(query, s, series.Tags) + + for j, t := range r.Timestamps { + expectedTimestamp := r.Timestamps[j].Add(time.Duration(query.Period) * time.Second) + if j > 0 && expectedTimestamp.Before(*t) { + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(expectedTimestamp.Unix()*1000))) + } + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(*r.Values[j]), float64((*t).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &series) + queryResponses = append(queryResponses, queryRes) + } + + return queryResponses, nil +} + func parseDimensions(model *simplejson.Json) ([]*cloudwatch.Dimension, error) { var result []*cloudwatch.Dimension @@ -257,6 +406,9 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { return nil, err } + id := model.Get("id").MustString("") + expression := model.Get("expression").MustString("") + dimensions, err := parseDimensions(model) if err != nil { return nil, err @@ -295,6 +447,7 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { alias = "{{metric}}_{{stat}}" } + returnData := model.Get("returnData").MustBool(false) highResolution := model.Get("highResolution").MustBool(false) return &CloudWatchQuery{ @@ -306,11 +459,18 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { ExtendedStatistics: aws.StringSlice(extendedStatistics), Period: period, Alias: alias, + Id: id, + Expression: expression, + ReturnData: returnData, HighResolution: highResolution, }, nil } func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]string) string { + if len(query.Id) > 0 && len(query.Expression) > 0 { + return query.Id + } + data := map[string]string{} data["region"] = query.Region data["namespace"] = query.Namespace @@ -338,6 +498,7 @@ func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]stri func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatchQuery) (*tsdb.QueryResult, error) { queryRes := tsdb.NewQueryResult() + queryRes.RefId = query.RefId var value float64 for _, s := range append(query.Statistics, query.ExtendedStatistics...) { series := tsdb.TimeSeries{ diff --git a/pkg/tsdb/cloudwatch/types.go b/pkg/tsdb/cloudwatch/types.go index 0737b64686d..1225fb9b31b 100644 --- a/pkg/tsdb/cloudwatch/types.go +++ b/pkg/tsdb/cloudwatch/types.go @@ -5,6 +5,7 @@ import ( ) type CloudWatchQuery struct { + RefId string Region string Namespace string MetricName string @@ -13,5 +14,8 @@ type CloudWatchQuery struct { ExtendedStatistics []*string Period int Alias string + Id string + Expression string + ReturnData bool HighResolution bool } diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 4101759ec1d..74100e5d69a 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -30,7 +30,9 @@ export default class CloudWatchDatasource { var queries = _.filter(options.targets, item => { return ( - item.hide !== true && !!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics) + (item.id !== '' || item.hide !== true) && + ((!!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics)) || + item.expression.length > 0) ); }).map(item => { item.region = this.templateSrv.replace(this.getActualRegion(item.region), options.scopedVars); @@ -38,6 +40,9 @@ export default class CloudWatchDatasource { item.metricName = this.templateSrv.replace(item.metricName, options.scopedVars); item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars); item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting + item.id = this.templateSrv.replace(item.id, options.scopedVars); + item.expression = this.templateSrv.replace(item.expression, options.scopedVars); + item.returnData = typeof item.hide === 'undefined' ? true : !item.hide; return _.extend( { @@ -384,11 +389,11 @@ export default class CloudWatchDatasource { var currentVariables = !_.isArray(variable.current.value) ? [variable.current] : variable.current.value.map(v => { - return { - text: v, - value: v, - }; - }); + return { + text: v, + value: v, + }; + }); let useSelectedVariables = selectedVariables.some(s => { return s.value === currentVariables[0].value; @@ -399,6 +404,9 @@ export default class CloudWatchDatasource { scopedVar[variable.name] = v; t.refId = target.refId + '_' + v.value; t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar); + if (target.id) { + t.id = target.id + window.btoa(v.value).replace(/=/g, '0'); // generate unique id + } return t; }); } diff --git a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html index 81bad39e23a..57a59f80265 100644 --- a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html +++ b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html @@ -1,4 +1,4 @@ -
+
@@ -20,7 +20,7 @@
-
+
@@ -31,18 +31,31 @@
-
+
- - + +
-
- - - +
+ + +
+
+ +
+
+ + +
+
+ + + Alias replacement variables:
  • {{metric}}
  • @@ -54,12 +67,12 @@
-
- +
+
-
-
+
+
diff --git a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts index 0b47ebd7069..689cf270feb 100644 --- a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts +++ b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts @@ -27,6 +27,9 @@ export class CloudWatchQueryParameterCtrl { target.dimensions = target.dimensions || {}; target.period = target.period || ''; target.region = target.region || 'default'; + target.id = target.id || ''; + target.expression = target.expression || ''; + target.returnData = target.returnData || false; target.highResolution = target.highResolution || false; $scope.regionSegment = uiSegmentSrv.getSegmentForValue($scope.target.region, 'select region'); From 4c59be4f5b0c4b3841a8190ec79139a581c3db84 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Fri, 22 Jun 2018 16:25:04 +0900 Subject: [PATCH 006/105] generate unique id when variable is multi --- .../plugins/datasource/cloudwatch/datasource.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 74100e5d69a..41e335dc320 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -389,11 +389,11 @@ export default class CloudWatchDatasource { var currentVariables = !_.isArray(variable.current.value) ? [variable.current] : variable.current.value.map(v => { - return { - text: v, - value: v, - }; - }); + return { + text: v, + value: v, + }; + }); let useSelectedVariables = selectedVariables.some(s => { return s.value === currentVariables[0].value; @@ -404,8 +404,10 @@ export default class CloudWatchDatasource { scopedVar[variable.name] = v; t.refId = target.refId + '_' + v.value; t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar); - if (target.id) { + if (variable.multi && target.id) { t.id = target.id + window.btoa(v.value).replace(/=/g, '0'); // generate unique id + } else { + t.id = target.id; } return t; }); From 77220456b6c5478e3dc0addfc3838008fb1ea2a5 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Fri, 22 Jun 2018 16:35:17 +0900 Subject: [PATCH 007/105] improve error message --- pkg/tsdb/cloudwatch/cloudwatch.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 54634bc0614..38fbac3aa29 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -112,6 +112,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo continue } + if query.Id == "" && query.Expression != "" { + return nil, fmt.Errorf("Invalid query: id should be set if using expression") + } + eg.Go(func() error { queryRes, err := e.executeQuery(ectx, query, queryContext) if err != nil { From 4ee4ca99be159862a8990034e0087417174dfd09 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 10 Jul 2018 12:54:45 +0200 Subject: [PATCH 008/105] Prevent scroll on focus for iframe --- public/app/core/components/scroll/page_scroll.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/core/components/scroll/page_scroll.ts b/public/app/core/components/scroll/page_scroll.ts index e6db344a4d6..0cb36eba914 100644 --- a/public/app/core/components/scroll/page_scroll.ts +++ b/public/app/core/components/scroll/page_scroll.ts @@ -29,11 +29,11 @@ export function pageScrollbar() { scope.$on('$routeChangeSuccess', () => { lastPos = 0; elem[0].scrollTop = 0; - elem[0].focus(); + elem[0].focus({ preventScroll: true }); }); elem[0].tabIndex = -1; - elem[0].focus(); + elem[0].focus({ preventScroll: true }); }, }; } From 4d2dd22095ba4f0f96a2d524cb43a7d71e771e5f Mon Sep 17 00:00:00 2001 From: David Date: Wed, 11 Jul 2018 12:29:19 +0200 Subject: [PATCH 009/105] Fix css loading in plugins (#12573) - allow css loader to be imported again (wasnt prefixed by plugin) --- public/app/features/plugins/plugin_loader.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index f999ee7e2ff..20023e27b5c 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -56,7 +56,7 @@ System.config({ css: 'vendor/plugin-css/css.js', }, meta: { - 'plugin*': { + '*': { esModule: true, authorization: true, loader: 'plugin-loader', From cfa555b5695d76bf7e4033190f216885a38a38ad Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 11 Jul 2018 12:31:07 +0200 Subject: [PATCH 010/105] Add folder name to dashboard title (#12545) * Add folder name to dashboard title. No display on medium displays and below * Compare folderId instead * folderId bigger than 0 --- public/app/features/dashboard/dashnav/dashnav.html | 2 +- public/sass/components/_navbar.scss | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/public/app/features/dashboard/dashnav/dashnav.html b/public/app/features/dashboard/dashnav/dashnav.html index 269d4b0bada..6ec272b5ca4 100644 --- a/public/app/features/dashboard/dashnav/dashnav.html +++ b/public/app/features/dashboard/dashnav/dashnav.html @@ -3,7 +3,7 @@ diff --git a/public/sass/components/_navbar.scss b/public/sass/components/_navbar.scss index 1a453b15069..0b7e3a79dcd 100644 --- a/public/sass/components/_navbar.scss +++ b/public/sass/components/_navbar.scss @@ -85,6 +85,14 @@ // icon hidden on smaller screens display: none; } + + &--folder { + color: $text-color-weak; + + @include media-breakpoint-down(md) { + display: none; + } + } } .navbar-buttons { From 2fbf2bba4e61af9ad7ea0c870364138387bd4400 Mon Sep 17 00:00:00 2001 From: Rollulus Rouloul Date: Wed, 11 Jul 2018 12:33:19 +0200 Subject: [PATCH 011/105] fix: requests/sec instead of requets (#12557) --- public/app/core/utils/kbn.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 4302e62e3e0..463025567cd 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -957,7 +957,7 @@ kbn.getUnitFormats = function() { text: 'throughput', submenu: [ { text: 'ops/sec (ops)', value: 'ops' }, - { text: 'requets/sec (rps)', value: 'reqps' }, + { text: 'requests/sec (rps)', value: 'reqps' }, { text: 'reads/sec (rps)', value: 'rps' }, { text: 'writes/sec (wps)', value: 'wps' }, { text: 'I/O ops/sec (iops)', value: 'iops' }, From 2c22a7b4ba12966cac8e092167cb45c5fdadfeb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Wed, 11 Jul 2018 13:31:34 +0200 Subject: [PATCH 012/105] Don't build-all for PRs --- .circleci/config.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f1d161c3cac..44f34d42926 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -246,7 +246,7 @@ workflows: test-and-build: jobs: - build-all: - filters: *filter-not-release + filters: *filter-only-master - build-enterprise: filters: *filter-only-master - codespell: @@ -270,9 +270,7 @@ workflows: - gometalinter - mysql-integration-test - postgres-integration-test - filters: - branches: - only: master + filters: *filter-only-master - deploy-enterprise-master: requires: - build-all From 24f395f986d413606675303988c2eb4d93694853 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Gryglicki?= Date: Wed, 11 Jul 2018 19:06:36 +0200 Subject: [PATCH 013/105] Add support for skipping variable value in URL, fixes #12174 (#12541) * New rebase Signed-off-by: Lukasz Gryglicki * Lint Signed-off-by: Lukasz Gryglicki --- .../app/features/templating/adhoc_variable.ts | 2 + .../features/templating/constant_variable.ts | 2 + .../features/templating/custom_variable.ts | 2 + .../templating/datasource_variable.ts | 2 + .../features/templating/interval_variable.ts | 2 + .../app/features/templating/query_variable.ts | 2 + .../templating/specs/template_srv.jest.ts | 57 +++++++++++++++++++ .../app/features/templating/template_srv.ts | 6 ++ 8 files changed, 75 insertions(+) diff --git a/public/app/features/templating/adhoc_variable.ts b/public/app/features/templating/adhoc_variable.ts index babeaf1f34e..9f8bd4c39a7 100644 --- a/public/app/features/templating/adhoc_variable.ts +++ b/public/app/features/templating/adhoc_variable.ts @@ -3,6 +3,7 @@ import { Variable, assignModelProperties, variableTypes } from './variable'; export class AdhocVariable implements Variable { filters: any[]; + skipUrlSync: boolean; defaults = { type: 'adhoc', @@ -11,6 +12,7 @@ export class AdhocVariable implements Variable { hide: 0, datasource: null, filters: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/constant_variable.ts b/public/app/features/templating/constant_variable.ts index f2fb4294537..e727c6e98af 100644 --- a/public/app/features/templating/constant_variable.ts +++ b/public/app/features/templating/constant_variable.ts @@ -4,6 +4,7 @@ export class ConstantVariable implements Variable { query: string; options: any[]; current: any; + skipUrlSync: boolean; defaults = { type: 'constant', @@ -13,6 +14,7 @@ export class ConstantVariable implements Variable { query: '', current: {}, options: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/custom_variable.ts b/public/app/features/templating/custom_variable.ts index c15178f6644..4490a41a38f 100644 --- a/public/app/features/templating/custom_variable.ts +++ b/public/app/features/templating/custom_variable.ts @@ -7,6 +7,7 @@ export class CustomVariable implements Variable { includeAll: boolean; multi: boolean; current: any; + skipUrlSync: boolean; defaults = { type: 'custom', @@ -19,6 +20,7 @@ export class CustomVariable implements Variable { includeAll: false, multi: false, allValue: null, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/datasource_variable.ts b/public/app/features/templating/datasource_variable.ts index 4c326a94e3b..519ce21e4d4 100644 --- a/public/app/features/templating/datasource_variable.ts +++ b/public/app/features/templating/datasource_variable.ts @@ -7,6 +7,7 @@ export class DatasourceVariable implements Variable { options: any; current: any; refresh: any; + skipUrlSync: boolean; defaults = { type: 'datasource', @@ -18,6 +19,7 @@ export class DatasourceVariable implements Variable { options: [], query: '', refresh: 1, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/interval_variable.ts b/public/app/features/templating/interval_variable.ts index 3faac316f98..b932819a7b7 100644 --- a/public/app/features/templating/interval_variable.ts +++ b/public/app/features/templating/interval_variable.ts @@ -11,6 +11,7 @@ export class IntervalVariable implements Variable { query: string; refresh: number; current: any; + skipUrlSync: boolean; defaults = { type: 'interval', @@ -24,6 +25,7 @@ export class IntervalVariable implements Variable { auto: false, auto_min: '10s', auto_count: 30, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/query_variable.ts b/public/app/features/templating/query_variable.ts index 54bd7bb660c..5ddd6d32864 100644 --- a/public/app/features/templating/query_variable.ts +++ b/public/app/features/templating/query_variable.ts @@ -22,6 +22,7 @@ export class QueryVariable implements Variable { tagsQuery: string; tagValuesQuery: string; tags: any[]; + skipUrlSync: boolean; defaults = { type: 'query', @@ -42,6 +43,7 @@ export class QueryVariable implements Variable { useTags: false, tagsQuery: '', tagValuesQuery: '', + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/specs/template_srv.jest.ts b/public/app/features/templating/specs/template_srv.jest.ts index 59915776b4f..86b6aa7ec99 100644 --- a/public/app/features/templating/specs/template_srv.jest.ts +++ b/public/app/features/templating/specs/template_srv.jest.ts @@ -345,6 +345,49 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + name: 'test', + skipUrlSync: true, + current: { value: 'value' }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + + describe('fillVariableValuesForUrl with multi value with skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + type: 'query', + name: 'test', + skipUrlSync: true, + current: { value: ['val1', 'val2'] }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('fillVariableValuesForUrl with multi value and scopedVars', function() { beforeEach(function() { initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); @@ -359,6 +402,20 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl with multi value, scopedVars and skip url sync', function() { + beforeEach(function() { + initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); + }); + + it('should not set scoped value as url params', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params, { + test: { name: 'test', value: 'val1', skipUrlSync: true }, + }); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('replaceWithText', function() { beforeEach(function() { initTemplateSrv([ diff --git a/public/app/features/templating/template_srv.ts b/public/app/features/templating/template_srv.ts index cdabe577f96..fc79d12ff9e 100644 --- a/public/app/features/templating/template_srv.ts +++ b/public/app/features/templating/template_srv.ts @@ -250,8 +250,14 @@ export class TemplateSrv { fillVariableValuesForUrl(params, scopedVars) { _.each(this.variables, function(variable) { if (scopedVars && scopedVars[variable.name] !== void 0) { + if (scopedVars[variable.name].skipUrlSync) { + return; + } params['var-' + variable.name] = scopedVars[variable.name].value; } else { + if (variable.skipUrlSync) { + return; + } params['var-' + variable.name] = variable.getValueForUrl(); } }); From b4e0ace7a2f5d50d84745726349bd4a475aa7776 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Wed, 11 Jul 2018 10:58:06 -0700 Subject: [PATCH 014/105] fix: folder picker did not notify parent that the initial folder had been changed, fixes #12543 (#12554) --- .../dashboard/folder_picker/folder_picker.ts | 23 +++++++------------ 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/public/app/features/dashboard/folder_picker/folder_picker.ts b/public/app/features/dashboard/folder_picker/folder_picker.ts index 28338c29d33..352b29d27a0 100644 --- a/public/app/features/dashboard/folder_picker/folder_picker.ts +++ b/public/app/features/dashboard/folder_picker/folder_picker.ts @@ -104,10 +104,7 @@ export class FolderPickerCtrl { appEvents.emit('alert-success', ['Folder Created', 'OK']); this.closeCreateFolder(); - this.folder = { - text: result.title, - value: result.id, - }; + this.folder = { text: result.title, value: result.id }; this.onFolderChange(this.folder); }); } @@ -149,17 +146,14 @@ export class FolderPickerCtrl { folder = result.length > 0 ? result[0] : resetFolder; } } - this.folder = folder; - this.onFolderLoad(); - }); - } - private onFolderLoad() { - if (this.onLoad) { - this.onLoad({ - $folder: { id: this.folder.value, title: this.folder.text }, - }); - } + this.folder = folder; + + // if this is not the same as our initial value notify parent + if (this.folder.id !== this.initialFolderId) { + this.onChange({ $folder: { id: this.folder.value, title: this.folder.text } }); + } + }); } } @@ -176,7 +170,6 @@ export function folderPicker() { labelClass: '@', rootName: '@', onChange: '&', - onLoad: '&', onCreateFolder: '&', enterFolderCreation: '&', exitFolderCreation: '&', From 18a8290c65007bf86c074dc5f3f2b1bdb7c6c3d4 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Thu, 12 Jul 2018 03:13:47 +0900 Subject: [PATCH 015/105] (prometheus) prevent error to use $__interval_ms in query (#12533) * prevent error to use $__interval_ms in query * add test * prevent error to use $__interval_ms in query --- .../app/features/panel/metrics_panel_ctrl.ts | 2 +- .../datasource/prometheus/datasource.ts | 2 +- .../prometheus/specs/datasource_specs.ts | 36 +++++++++---------- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 75c0de3bc6e..6eb6d3b3b00 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -222,7 +222,7 @@ class MetricsPanelCtrl extends PanelCtrl { // and add built in variables interval and interval_ms var scopedVars = Object.assign({}, this.panel.scopedVars, { __interval: { text: this.interval, value: this.interval }, - __interval_ms: { text: this.intervalMs, value: this.intervalMs }, + __interval_ms: { text: String(this.intervalMs), value: String(this.intervalMs) }, }); var metricsQuery = { diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index d7d33264c99..88d6141696d 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -196,7 +196,7 @@ export class PrometheusDatasource { interval = adjustedInterval; scopedVars = Object.assign({}, options.scopedVars, { __interval: { text: interval + 's', value: interval + 's' }, - __interval_ms: { text: interval * 1000, value: interval * 1000 }, + __interval_ms: { text: String(interval * 1000), value: String(interval * 1000) }, }); } query.step = interval; diff --git a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts index c5da671b757..09aa934dd63 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts @@ -452,7 +452,7 @@ describe('PrometheusDatasource', function() { interval: '10s', scopedVars: { __interval: { text: '10s', value: '10s' }, - __interval_ms: { text: 10 * 1000, value: 10 * 1000 }, + __interval_ms: { text: String(10 * 1000), value: String(10 * 1000) }, }, }; var urlExpected = @@ -463,8 +463,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('10s'); expect(query.scopedVars.__interval.value).to.be('10s'); - expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(10 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(10 * 1000)); }); it('should be min interval when it is greater than auto interval', function() { var query = { @@ -479,7 +479,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var urlExpected = @@ -490,8 +490,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); it('should account for intervalFactor', function() { var query = { @@ -507,7 +507,7 @@ describe('PrometheusDatasource', function() { interval: '10s', scopedVars: { __interval: { text: '10s', value: '10s' }, - __interval_ms: { text: 10 * 1000, value: 10 * 1000 }, + __interval_ms: { text: String(10 * 1000), value: String(10 * 1000) }, }, }; var urlExpected = @@ -518,8 +518,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('10s'); expect(query.scopedVars.__interval.value).to.be('10s'); - expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(10 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(10 * 1000)); }); it('should be interval * intervalFactor when greater than min interval', function() { var query = { @@ -535,7 +535,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var urlExpected = @@ -546,8 +546,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); it('should be min interval when greater than interval * intervalFactor', function() { var query = { @@ -563,7 +563,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var urlExpected = @@ -574,8 +574,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); it('should be determined by the 11000 data points limit, accounting for intervalFactor', function() { var query = { @@ -590,7 +590,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var end = 7 * 24 * 60 * 60; @@ -609,8 +609,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); }); }); From c03764ff8a47bcc9bcd007de2345baa53d80294e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Wed, 11 Jul 2018 11:23:07 -0700 Subject: [PATCH 016/105] Refactor team pages to react & design change (#12574) * Rewriting team pages in react * teams to react progress * teams: getting team by id returns same DTO as search, needed for AvatarUrl * teams: progress on new team pages * fix: team test * listing team members and removing team members now works * teams: team member page now works * ux: fixed adding team member issue * refactoring TeamPicker to conform to react coding styles better * teams: very close to being done with team page rewrite * minor style tweak * ux: polish to team pages * feature: team pages in react & everything working * fix: removed flickering when changing tabs by always rendering PageHeader --- pkg/api/alerting_test.go | 2 +- pkg/api/annotations_test.go | 2 +- pkg/api/dashboard_snapshot_test.go | 2 +- pkg/api/dashboard_test.go | 4 +- pkg/api/team.go | 1 + pkg/api/team_test.go | 2 +- pkg/models/team.go | 16 +- pkg/services/guardian/guardian.go | 10 +- pkg/services/guardian/guardian_util_test.go | 6 +- pkg/services/sqlstore/team.go | 44 +++-- .../ManageDashboards/FolderPermissions.tsx | 4 +- public/app/containers/Teams/TeamGroupSync.tsx | 149 +++++++++++++++++ public/app/containers/Teams/TeamList.tsx | 125 ++++++++++++++ public/app/containers/Teams/TeamMembers.tsx | 144 ++++++++++++++++ public/app/containers/Teams/TeamPages.tsx | 77 +++++++++ public/app/containers/Teams/TeamSettings.tsx | 69 ++++++++ public/app/core/angular_wrappers.ts | 2 - public/app/core/components/Forms/Forms.tsx | 21 +++ .../Permissions/AddPermissions.jest.tsx | 48 +++--- .../components/Permissions/AddPermissions.tsx | 61 +++---- .../Permissions/DashboardPermissions.tsx | 7 +- .../DisabledPermissionsListItem.tsx | 2 +- .../Permissions/PermissionsListItem.tsx | 2 +- .../components/Picker/DescriptionPicker.tsx | 10 +- .../components/Picker/TeamPicker.jest.tsx | 24 +-- .../app/core/components/Picker/TeamPicker.tsx | 38 ++--- .../components/Picker/UserPicker.jest.tsx | 21 +-- .../app/core/components/Picker/UserPicker.tsx | 57 ++++--- .../app/core/components/Picker/withPicker.tsx | 34 ---- public/app/core/components/grafana_app.ts | 4 +- public/app/core/components/team_picker.ts | 64 ------- public/app/core/components/user_picker.ts | 71 -------- public/app/core/core.ts | 4 - public/app/core/services/backend_srv.ts | 14 ++ public/app/features/org/all.ts | 2 - .../features/org/partials/team_details.html | 105 ------------ public/app/features/org/partials/teams.html | 68 -------- .../org/specs/team_details_ctrl.jest.ts | 42 ----- public/app/features/org/team_details_ctrl.ts | 108 ------------ public/app/features/org/teams_ctrl.ts | 66 -------- public/app/routes/routes.ts | 20 ++- public/app/stores/NavStore/NavItem.ts | 3 +- public/app/stores/NavStore/NavStore.ts | 40 +++++ public/app/stores/RootStore/RootStore.ts | 4 + public/app/stores/TeamsStore/TeamsStore.ts | 156 ++++++++++++++++++ public/sass/components/_gf-form.scss | 4 +- public/test/jest-shim.ts | 13 +- 47 files changed, 1015 insertions(+), 757 deletions(-) create mode 100644 public/app/containers/Teams/TeamGroupSync.tsx create mode 100644 public/app/containers/Teams/TeamList.tsx create mode 100644 public/app/containers/Teams/TeamMembers.tsx create mode 100644 public/app/containers/Teams/TeamPages.tsx create mode 100644 public/app/containers/Teams/TeamSettings.tsx create mode 100644 public/app/core/components/Forms/Forms.tsx delete mode 100644 public/app/core/components/Picker/withPicker.tsx delete mode 100644 public/app/core/components/team_picker.ts delete mode 100644 public/app/core/components/user_picker.ts delete mode 100644 public/app/features/org/partials/team_details.html delete mode 100755 public/app/features/org/partials/teams.html delete mode 100644 public/app/features/org/specs/team_details_ctrl.jest.ts delete mode 100644 public/app/features/org/team_details_ctrl.ts delete mode 100644 public/app/features/org/teams_ctrl.ts create mode 100644 public/app/stores/TeamsStore/TeamsStore.ts diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go index 9eba0e0d5b6..331beeef5e4 100644 --- a/pkg/api/alerting_test.go +++ b/pkg/api/alerting_test.go @@ -31,7 +31,7 @@ func TestAlertingApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go index 6590eb19ff2..08f3018c694 100644 --- a/pkg/api/annotations_test.go +++ b/pkg/api/annotations_test.go @@ -119,7 +119,7 @@ func TestAnnotationsApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/dashboard_snapshot_test.go b/pkg/api/dashboard_snapshot_test.go index 5e7637a24e1..e58f2c4712d 100644 --- a/pkg/api/dashboard_snapshot_test.go +++ b/pkg/api/dashboard_snapshot_test.go @@ -39,7 +39,7 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) { return nil }) - teamResp := []*m.Team{} + teamResp := []*m.TeamDTO{} bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { query.Result = teamResp return nil diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index 50a2e314f5c..283a9b5f12c 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -61,7 +61,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) @@ -230,7 +230,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/team.go b/pkg/api/team.go index 9919305881b..ebb426c4c82 100644 --- a/pkg/api/team.go +++ b/pkg/api/team.go @@ -93,5 +93,6 @@ func GetTeamByID(c *m.ReqContext) Response { return Error(500, "Failed to get Team", err) } + query.Result.AvatarUrl = dtos.GetGravatarUrlWithDefault(query.Result.Email, query.Result.Name) return JSON(200, &query.Result) } diff --git a/pkg/api/team_test.go b/pkg/api/team_test.go index 0bf06d723c8..a1984288870 100644 --- a/pkg/api/team_test.go +++ b/pkg/api/team_test.go @@ -13,7 +13,7 @@ import ( func TestTeamApiEndpoint(t *testing.T) { Convey("Given two teams", t, func() { mockResult := models.SearchTeamQueryResult{ - Teams: []*models.SearchTeamDto{ + Teams: []*models.TeamDTO{ {Name: "team1"}, {Name: "team2"}, }, diff --git a/pkg/models/team.go b/pkg/models/team.go index 9c679a13394..61285db3a5f 100644 --- a/pkg/models/team.go +++ b/pkg/models/team.go @@ -49,13 +49,13 @@ type DeleteTeamCommand struct { type GetTeamByIdQuery struct { OrgId int64 Id int64 - Result *Team + Result *TeamDTO } type GetTeamsByUserQuery struct { OrgId int64 - UserId int64 `json:"userId"` - Result []*Team `json:"teams"` + UserId int64 `json:"userId"` + Result []*TeamDTO `json:"teams"` } type SearchTeamsQuery struct { @@ -68,7 +68,7 @@ type SearchTeamsQuery struct { Result SearchTeamQueryResult } -type SearchTeamDto struct { +type TeamDTO struct { Id int64 `json:"id"` OrgId int64 `json:"orgId"` Name string `json:"name"` @@ -78,8 +78,8 @@ type SearchTeamDto struct { } type SearchTeamQueryResult struct { - TotalCount int64 `json:"totalCount"` - Teams []*SearchTeamDto `json:"teams"` - Page int `json:"page"` - PerPage int `json:"perPage"` + TotalCount int64 `json:"totalCount"` + Teams []*TeamDTO `json:"teams"` + Page int `json:"page"` + PerPage int `json:"perPage"` } diff --git a/pkg/services/guardian/guardian.go b/pkg/services/guardian/guardian.go index cfd8f5c3a6e..7506338c5f0 100644 --- a/pkg/services/guardian/guardian.go +++ b/pkg/services/guardian/guardian.go @@ -30,7 +30,7 @@ type dashboardGuardianImpl struct { dashId int64 orgId int64 acl []*m.DashboardAclInfoDTO - groups []*m.Team + teams []*m.TeamDTO log log.Logger } @@ -186,15 +186,15 @@ func (g *dashboardGuardianImpl) GetAcl() ([]*m.DashboardAclInfoDTO, error) { return g.acl, nil } -func (g *dashboardGuardianImpl) getTeams() ([]*m.Team, error) { - if g.groups != nil { - return g.groups, nil +func (g *dashboardGuardianImpl) getTeams() ([]*m.TeamDTO, error) { + if g.teams != nil { + return g.teams, nil } query := m.GetTeamsByUserQuery{OrgId: g.orgId, UserId: g.user.UserId} err := bus.Dispatch(&query) - g.groups = query.Result + g.teams = query.Result return query.Result, err } diff --git a/pkg/services/guardian/guardian_util_test.go b/pkg/services/guardian/guardian_util_test.go index 3d839e71b74..d85548ecb8c 100644 --- a/pkg/services/guardian/guardian_util_test.go +++ b/pkg/services/guardian/guardian_util_test.go @@ -19,7 +19,7 @@ type scenarioContext struct { givenUser *m.SignedInUser givenDashboardID int64 givenPermissions []*m.DashboardAclInfoDTO - givenTeams []*m.Team + givenTeams []*m.TeamDTO updatePermissions []*m.DashboardAcl expectedFlags permissionFlags callerFile string @@ -84,11 +84,11 @@ func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, per return nil }) - teams := []*m.Team{} + teams := []*m.TeamDTO{} for _, p := range permissions { if p.TeamId > 0 { - teams = append(teams, &m.Team{Id: p.TeamId}) + teams = append(teams, &m.TeamDTO{Id: p.TeamId}) } } diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go index 9378ca37f60..72955df9a6a 100644 --- a/pkg/services/sqlstore/team.go +++ b/pkg/services/sqlstore/team.go @@ -22,6 +22,16 @@ func init() { bus.AddHandler("sql", GetTeamMembers) } +func getTeamSelectSqlBase() string { + return `SELECT + team.id as id, + team.org_id, + team.name as name, + team.email as email, + (SELECT COUNT(*) from team_member where team_member.team_id = team.id) as member_count + FROM team as team ` +} + func CreateTeam(cmd *m.CreateTeamCommand) error { return inTransaction(func(sess *DBSession) error { @@ -130,21 +140,15 @@ func isTeamNameTaken(orgId int64, name string, existingId int64, sess *DBSession func SearchTeams(query *m.SearchTeamsQuery) error { query.Result = m.SearchTeamQueryResult{ - Teams: make([]*m.SearchTeamDto, 0), + Teams: make([]*m.TeamDTO, 0), } queryWithWildcards := "%" + query.Query + "%" var sql bytes.Buffer params := make([]interface{}, 0) - sql.WriteString(`select - team.id as id, - team.org_id, - team.name as name, - team.email as email, - (select count(*) from team_member where team_member.team_id = team.id) as member_count - from team as team - where team.org_id = ?`) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ?`) params = append(params, query.OrgId) @@ -186,8 +190,14 @@ func SearchTeams(query *m.SearchTeamsQuery) error { } func GetTeamById(query *m.GetTeamByIdQuery) error { - var team m.Team - exists, err := x.Where("org_id=? and id=?", query.OrgId, query.Id).Get(&team) + var sql bytes.Buffer + + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ? and team.id = ?`) + + var team m.TeamDTO + exists, err := x.Sql(sql.String(), query.OrgId, query.Id).Get(&team) + if err != nil { return err } @@ -202,13 +212,15 @@ func GetTeamById(query *m.GetTeamByIdQuery) error { // GetTeamsByUser is used by the Guardian when checking a users' permissions func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { - query.Result = make([]*m.Team, 0) + query.Result = make([]*m.TeamDTO, 0) - sess := x.Table("team") - sess.Join("INNER", "team_member", "team.id=team_member.team_id") - sess.Where("team.org_id=? and team_member.user_id=?", query.OrgId, query.UserId) + var sql bytes.Buffer - err := sess.Find(&query.Result) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`) + sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`) + + err := x.Sql(sql.String(), query.OrgId, query.UserId).Find(&query.Result) return err } diff --git a/public/app/containers/ManageDashboards/FolderPermissions.tsx b/public/app/containers/ManageDashboards/FolderPermissions.tsx index abbde63a179..aac5d32750a 100644 --- a/public/app/containers/ManageDashboards/FolderPermissions.tsx +++ b/public/app/containers/ManageDashboards/FolderPermissions.tsx @@ -54,7 +54,7 @@ export class FolderPermissions extends Component {
-

Folder Permissions

+

Folder Permissions

@@ -68,7 +68,7 @@ export class FolderPermissions extends Component {
- +
diff --git a/public/app/containers/Teams/TeamGroupSync.tsx b/public/app/containers/Teams/TeamGroupSync.tsx new file mode 100644 index 00000000000..323dceae0d8 --- /dev/null +++ b/public/app/containers/Teams/TeamGroupSync.tsx @@ -0,0 +1,149 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam, ITeamGroup } from 'app/stores/TeamsStore/TeamsStore'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import Tooltip from 'app/core/components/Tooltip/Tooltip'; + +interface Props { + team: ITeam; +} + +interface State { + isAdding: boolean; + newGroupId?: string; +} + +const headerTooltip = `Sync LDAP or OAuth groups with your Grafana teams.`; + +@observer +export class TeamGroupSync extends React.Component { + constructor(props) { + super(props); + this.state = { isAdding: false, newGroupId: '' }; + } + + componentDidMount() { + this.props.team.loadGroups(); + } + + renderGroup(group: ITeamGroup) { + return ( + + {group.groupId} + + this.onRemoveGroup(group)}> + + + + + ); + } + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onNewGroupIdChanged = evt => { + this.setState({ newGroupId: evt.target.value }); + }; + + onAddGroup = () => { + this.props.team.addGroup(this.state.newGroupId); + this.setState({ isAdding: false, newGroupId: '' }); + }; + + onRemoveGroup = (group: ITeamGroup) => { + this.props.team.removeGroup(group.groupId); + }; + + isNewGroupValid() { + return this.state.newGroupId.length > 1; + } + + render() { + const { isAdding, newGroupId } = this.state; + const groups = this.props.team.groups.values(); + + return ( +
+
+

External group sync

+ + + +
+ {groups.length > 0 && ( + + )} +
+ + +
+ +
Add External Group
+
+
+ +
+ +
+ +
+
+
+
+ + {groups.length === 0 && + !isAdding && ( +
+
There are no external groups to sync with
+ +
+ {headerTooltip} + + Learn more + +
+
+ )} + + {groups.length > 0 && ( +
+ + + + + + + {groups.map(group => this.renderGroup(group))} +
External Group ID +
+
+ )} +
+ ); + } +} + +export default hot(module)(TeamGroupSync); diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx new file mode 100644 index 00000000000..4429764b1cc --- /dev/null +++ b/public/app/containers/Teams/TeamList.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { BackendSrv } from 'app/core/services/backend_srv'; +import appEvents from 'app/core/app_events'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + backendSrv: BackendSrv; +} + +@inject('nav', 'teams') +@observer +export class TeamList extends React.Component { + constructor(props) { + super(props); + + this.props.nav.load('cfg', 'teams'); + this.fetchTeams(); + } + + fetchTeams() { + this.props.teams.loadTeams(); + } + + deleteTeam(team: ITeam) { + appEvents.emit('confirm-modal', { + title: 'Delete', + text: 'Are you sure you want to delete Team ' + team.name + '?', + yesText: 'Delete', + icon: 'fa-warning', + onConfirm: () => { + this.deleteTeamConfirmed(team); + }, + }); + } + + deleteTeamConfirmed(team) { + this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this)); + } + + onSearchQueryChange = evt => { + this.props.teams.setSearchQuery(evt.target.value); + }; + + renderTeamMember(team: ITeam): JSX.Element { + let teamUrl = `org/teams/edit/${team.id}`; + + return ( + + + + + + + + {team.name} + + + {team.email} + + + {team.memberCount} + + + this.deleteTeam(team)} className="btn btn-danger btn-small"> + + + + + ); + } + + render() { + const { nav, teams } = this.props; + return ( +
+ +
+
+
+ +
+ + + +
+ + + + + + + + + {teams.filteredTeams.map(team => this.renderTeamMember(team))} +
+ NameEmailMembers +
+
+
+
+ ); + } +} + +export default hot(module)(TeamList); diff --git a/public/app/containers/Teams/TeamMembers.tsx b/public/app/containers/Teams/TeamMembers.tsx new file mode 100644 index 00000000000..0d0762469a0 --- /dev/null +++ b/public/app/containers/Teams/TeamMembers.tsx @@ -0,0 +1,144 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam, ITeamMember } from 'app/stores/TeamsStore/TeamsStore'; +import appEvents from 'app/core/app_events'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; + +interface Props { + team: ITeam; +} + +interface State { + isAdding: boolean; + newTeamMember?: User; +} + +@observer +export class TeamMembers extends React.Component { + constructor(props) { + super(props); + this.state = { isAdding: false, newTeamMember: null }; + } + + componentDidMount() { + this.props.team.loadMembers(); + } + + onSearchQueryChange = evt => { + this.props.team.setSearchQuery(evt.target.value); + }; + + removeMember(member: ITeamMember) { + appEvents.emit('confirm-modal', { + title: 'Remove Member', + text: 'Are you sure you want to remove ' + member.login + ' from this group?', + yesText: 'Remove', + icon: 'fa-warning', + onConfirm: () => { + this.removeMemberConfirmed(member); + }, + }); + } + + removeMemberConfirmed(member: ITeamMember) { + this.props.team.removeMember(member); + } + + renderMember(member: ITeamMember) { + return ( + + + + + {member.login} + {member.email} + + this.removeMember(member)} className="btn btn-danger btn-mini"> + + + + + ); + } + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onUserSelected = (user: User) => { + this.setState({ newTeamMember: user }); + }; + + onAddUserToTeam = async () => { + await this.props.team.addMember(this.state.newTeamMember.id); + await this.props.team.loadMembers(); + this.setState({ newTeamMember: null }); + }; + + render() { + const { newTeamMember, isAdding } = this.state; + const members = this.props.team.members.values(); + const newTeamMemberValue = newTeamMember && newTeamMember.id.toString(); + + return ( +
+
+
+ +
+ +
+ + +
+ + +
+ +
Add Team Member
+
+ + + {this.state.newTeamMember && ( + + )} +
+
+
+ +
+ + + + + + + + {members.map(member => this.renderMember(member))} +
+ NameEmail +
+
+
+ ); + } +} + +export default hot(module)(TeamMembers); diff --git a/public/app/containers/Teams/TeamPages.tsx b/public/app/containers/Teams/TeamPages.tsx new file mode 100644 index 00000000000..500a7cbe5e8 --- /dev/null +++ b/public/app/containers/Teams/TeamPages.tsx @@ -0,0 +1,77 @@ +import React from 'react'; +import _ from 'lodash'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import config from 'app/core/config'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { ViewStore } from 'app/stores/ViewStore/ViewStore'; +import TeamMembers from './TeamMembers'; +import TeamSettings from './TeamSettings'; +import TeamGroupSync from './TeamGroupSync'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + view: typeof ViewStore.Type; +} + +@inject('nav', 'teams', 'view') +@observer +export class TeamPages extends React.Component { + isSyncEnabled: boolean; + currentPage: string; + + constructor(props) { + super(props); + + this.isSyncEnabled = config.buildInfo.isEnterprise; + this.currentPage = this.getCurrentPage(); + + this.loadTeam(); + } + + async loadTeam() { + const { teams, nav, view } = this.props; + + await teams.loadById(view.routeParams.get('id')); + + nav.initTeamPage(this.getCurrentTeam(), this.currentPage, this.isSyncEnabled); + } + + getCurrentTeam(): ITeam { + const { teams, view } = this.props; + return teams.map.get(view.routeParams.get('id')); + } + + getCurrentPage() { + const pages = ['members', 'settings', 'groupsync']; + const currentPage = this.props.view.routeParams.get('page'); + return _.includes(pages, currentPage) ? currentPage : pages[0]; + } + + render() { + const { nav } = this.props; + const currentTeam = this.getCurrentTeam(); + + if (!nav.main) { + return null; + } + + return ( +
+ + {currentTeam && ( +
+ {this.currentPage === 'members' && } + {this.currentPage === 'settings' && } + {this.currentPage === 'groupsync' && this.isSyncEnabled && } +
+ )} +
+ ); + } +} + +export default hot(module)(TeamPages); diff --git a/public/app/containers/Teams/TeamSettings.tsx b/public/app/containers/Teams/TeamSettings.tsx new file mode 100644 index 00000000000..142088a5d1e --- /dev/null +++ b/public/app/containers/Teams/TeamSettings.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { Label } from 'app/core/components/Forms/Forms'; + +interface Props { + team: ITeam; +} + +@observer +export class TeamSettings extends React.Component { + constructor(props) { + super(props); + } + + onChangeName = evt => { + this.props.team.setName(evt.target.value); + }; + + onChangeEmail = evt => { + this.props.team.setEmail(evt.target.value); + }; + + onUpdate = evt => { + evt.preventDefault(); + this.props.team.update(); + }; + + render() { + return ( +
+

Team Settings

+
+
+ + +
+
+ + +
+ +
+ +
+
+
+ ); + } +} + +export default hot(module)(TeamSettings); diff --git a/public/app/core/angular_wrappers.ts b/public/app/core/angular_wrappers.ts index ace0eb00b07..a4439509f8e 100644 --- a/public/app/core/angular_wrappers.ts +++ b/public/app/core/angular_wrappers.ts @@ -5,7 +5,6 @@ import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA'; import LoginBackground from './components/Login/LoginBackground'; import { SearchResult } from './components/search/SearchResult'; import { TagFilter } from './components/TagFilter/TagFilter'; -import UserPicker from './components/Picker/UserPicker'; import DashboardPermissions from './components/Permissions/DashboardPermissions'; export function registerAngularDirectives() { @@ -19,6 +18,5 @@ export function registerAngularDirectives() { ['onSelect', { watchDepth: 'reference' }], ['tagOptions', { watchDepth: 'reference' }], ]); - react2AngularDirective('selectUserPicker', UserPicker, ['backendSrv', 'handlePicked']); react2AngularDirective('dashboardPermissions', DashboardPermissions, ['backendSrv', 'dashboardId', 'folder']); } diff --git a/public/app/core/components/Forms/Forms.tsx b/public/app/core/components/Forms/Forms.tsx new file mode 100644 index 00000000000..4b74d48ba08 --- /dev/null +++ b/public/app/core/components/Forms/Forms.tsx @@ -0,0 +1,21 @@ +import React, { SFC, ReactNode } from 'react'; +import Tooltip from '../Tooltip/Tooltip'; + +interface Props { + tooltip?: string; + for?: string; + children: ReactNode; +} + +export const Label: SFC = props => { + return ( + + {props.children} + {props.tooltip && ( + + + + )} + + ); +}; diff --git a/public/app/core/components/Permissions/AddPermissions.jest.tsx b/public/app/core/components/Permissions/AddPermissions.jest.tsx index fe97c4c7e62..513a22ddea4 100644 --- a/public/app/core/components/Permissions/AddPermissions.jest.tsx +++ b/public/app/core/components/Permissions/AddPermissions.jest.tsx @@ -1,32 +1,32 @@ -import React from 'react'; +import React from 'react'; +import { shallow } from 'enzyme'; import AddPermissions from './AddPermissions'; import { RootStore } from 'app/stores/RootStore/RootStore'; -import { backendSrv } from 'test/mocks/common'; -import { shallow } from 'enzyme'; +import { getBackendSrv } from 'app/core/services/backend_srv'; + +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([ + { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, + { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, + ]); + }, + post: jest.fn(() => Promise.resolve({})), + }; + }, +})); describe('AddPermissions', () => { let wrapper; let store; let instance; + let backendSrv: any = getBackendSrv(); beforeAll(() => { - backendSrv.get.mockReturnValue( - Promise.resolve([ - { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, - { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, - ]) - ); - - backendSrv.post = jest.fn(() => Promise.resolve({})); - - store = RootStore.create( - {}, - { - backendSrv: backendSrv, - } - ); - - wrapper = shallow(); + store = RootStore.create({}, { backendSrv: backendSrv }); + wrapper = shallow(); instance = wrapper.instance(); return store.permissions.load(1, true, false); }); @@ -43,8 +43,8 @@ describe('AddPermissions', () => { login: 'user2', }; - instance.typeChanged(evt); - instance.userPicked(userItem); + instance.onTypeChanged(evt); + instance.onUserSelected(userItem); wrapper.update(); @@ -70,8 +70,8 @@ describe('AddPermissions', () => { name: 'ug1', }; - instance.typeChanged(evt); - instance.teamPicked(teamItem); + instance.onTypeChanged(evt); + instance.onTeamSelected(teamItem); wrapper.update(); diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx index 4dcd07ffb48..289e27aa731 100644 --- a/public/app/core/components/Permissions/AddPermissions.tsx +++ b/public/app/core/components/Permissions/AddPermissions.tsx @@ -1,24 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import { observer } from 'mobx-react'; import { aclTypes } from 'app/stores/PermissionsStore/PermissionsStore'; -import UserPicker, { User } from 'app/core/components/Picker/UserPicker'; -import TeamPicker, { Team } from 'app/core/components/Picker/TeamPicker'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; -export interface IProps { +export interface Props { permissions: any; - backendSrv: any; } + @observer -class AddPermissions extends Component { +class AddPermissions extends Component { constructor(props) { super(props); - this.userPicked = this.userPicked.bind(this); - this.teamPicked = this.teamPicked.bind(this); - this.permissionPicked = this.permissionPicked.bind(this); - this.typeChanged = this.typeChanged.bind(this); - this.handleSubmit = this.handleSubmit.bind(this); } componentWillMount() { @@ -26,49 +21,49 @@ class AddPermissions extends Component { permissions.resetNewType(); } - typeChanged(evt) { + onTypeChanged = evt => { const { value } = evt.target; const { permissions } = this.props; permissions.setNewType(value); - } + }; - userPicked(user: User) { + onUserSelected = (user: User) => { const { permissions } = this.props; if (!user) { permissions.newItem.setUser(null, null); return; } return permissions.newItem.setUser(user.id, user.login, user.avatarUrl); - } + }; - teamPicked(team: Team) { + onTeamSelected = (team: Team) => { const { permissions } = this.props; if (!team) { permissions.newItem.setTeam(null, null); return; } return permissions.newItem.setTeam(team.id, team.name, team.avatarUrl); - } + }; - permissionPicked(permission: OptionWithDescription) { + onPermissionChanged = (permission: OptionWithDescription) => { const { permissions } = this.props; return permissions.newItem.setPermission(permission.value); - } + }; resetNewType() { const { permissions } = this.props; return permissions.resetNewType(); } - handleSubmit(evt) { + onSubmit = evt => { evt.preventDefault(); const { permissions } = this.props; permissions.addStoreItem(); - } + }; render() { - const { permissions, backendSrv } = this.props; + const { permissions } = this.props; const newItem = permissions.newItem; const pickerClassName = 'width-20'; @@ -79,12 +74,12 @@ class AddPermissions extends Component { -
-
Add Permission For
+ +
Add Permission For
- {aclTypes.map((option, idx) => { return (
- + {
{}} + onSelected={() => {}} value={item.permission} disabled={true} className={'gf-form-input--form-dropdown-right'} diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx index b0158525d52..a17aa8c04df 100644 --- a/public/app/core/components/Permissions/PermissionsListItem.tsx +++ b/public/app/core/components/Permissions/PermissionsListItem.tsx @@ -68,7 +68,7 @@ export default observer(({ item, removeItem, permissionChanged, itemIndex, folde
void; + onSelected: (permission) => void; value: number; disabled: boolean; className?: string; @@ -16,14 +16,14 @@ export interface OptionWithDescription { description: string; } -class DescriptionPicker extends Component { +class DescriptionPicker extends Component { constructor(props) { super(props); this.state = {}; } render() { - const { optionsWithDesc, handlePicked, value, disabled, className } = this.props; + const { optionsWithDesc, onSelected, value, disabled, className } = this.props; return (
@@ -34,7 +34,7 @@ class DescriptionPicker extends Component { clearable={false} labelKey="label" options={optionsWithDesc} - onChange={handlePicked} + onChange={onSelected} className={`width-7 gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={DescriptionOption} placeholder="Choose" diff --git a/public/app/core/components/Picker/TeamPicker.jest.tsx b/public/app/core/components/Picker/TeamPicker.jest.tsx index 20b7620e0ac..3db9f7bb4eb 100644 --- a/public/app/core/components/Picker/TeamPicker.jest.tsx +++ b/public/app/core/components/Picker/TeamPicker.jest.tsx @@ -1,19 +1,23 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import TeamPicker from './TeamPicker'; +import { TeamPicker } from './TeamPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('TeamPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const props = { + onSelected: () => {}, + }; + const tree = renderer.create().toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/TeamPicker.tsx b/public/app/core/components/Picker/TeamPicker.tsx index 2dfff1850dd..04f108ff8da 100644 --- a/public/app/core/components/Picker/TeamPicker.tsx +++ b/public/app/core/components/Picker/TeamPicker.tsx @@ -1,18 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (team: Team) => void; value?: string; className?: string; } +export interface State { + isLoading; +} + export interface Team { id: number; label: string; @@ -20,13 +21,12 @@ export interface Team { avatarUrl: string; } -class TeamPicker extends Component { +export class TeamPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,9 +36,9 @@ class TeamPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); + this.setState({ isLoading: true }); - toggleLoading(true); return backendSrv.get(`/api/teams/search?perpage=10&page=1&query=${query}`).then(result => { const teams = result.teams.map(team => { return { @@ -49,18 +49,18 @@ class TeamPicker extends Component { }; }); - toggleLoading(false); + this.setState({ isLoading: false }); return { options: teams }; }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { onSelected, value, className } = this.props; + const { isLoading } = this.state; return (
- { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No teams found" - onChange={handlePicked} + onChange={onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select a team" value={value} autosize={true} /> @@ -80,5 +80,3 @@ class TeamPicker extends Component { ); } } - -export default withPicker(TeamPicker); diff --git a/public/app/core/components/Picker/UserPicker.jest.tsx b/public/app/core/components/Picker/UserPicker.jest.tsx index 756fa2d9801..054ca643700 100644 --- a/public/app/core/components/Picker/UserPicker.jest.tsx +++ b/public/app/core/components/Picker/UserPicker.jest.tsx @@ -1,19 +1,20 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import UserPicker from './UserPicker'; +import { UserPicker } from './UserPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('UserPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const tree = renderer.create( {}} />).toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/UserPicker.tsx b/public/app/core/components/Picker/UserPicker.tsx index 77bf6c1fe15..e50513c44e1 100644 --- a/public/app/core/components/Picker/UserPicker.tsx +++ b/public/app/core/components/Picker/UserPicker.tsx @@ -1,18 +1,19 @@ import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (user: User) => void; value?: string; className?: string; } +export interface State { + isLoading: boolean; +} + export interface User { id: number; label: string; @@ -20,13 +21,12 @@ export interface User { login: string; } -class UserPicker extends Component { +export class UserPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,29 +36,34 @@ class UserPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); - toggleLoading(true); - return backendSrv.get(`/api/org/users?query=${query}&limit=10`).then(result => { - const users = result.map(user => { + this.setState({ isLoading: true }); + + return backendSrv + .get(`/api/org/users?query=${query}&limit=10`) + .then(result => { return { - id: user.userId, - label: `${user.login} - ${user.email}`, - avatarUrl: user.avatarUrl, - login: user.login, + options: result.map(user => ({ + id: user.userId, + label: `${user.login} - ${user.email}`, + avatarUrl: user.avatarUrl, + login: user.login, + })), }; + }) + .finally(() => { + this.setState({ isLoading: false }); }); - toggleLoading(false); - return { options: users }; - }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { value, className } = this.props; + const { isLoading } = this.state; + return (
- { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No users found" - onChange={handlePicked} + onChange={this.props.onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select user" value={value} autosize={true} /> @@ -78,5 +83,3 @@ class UserPicker extends Component { ); } } - -export default withPicker(UserPicker); diff --git a/public/app/core/components/Picker/withPicker.tsx b/public/app/core/components/Picker/withPicker.tsx deleted file mode 100644 index 838ef927c30..00000000000 --- a/public/app/core/components/Picker/withPicker.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, { Component } from 'react'; - -export interface IProps { - backendSrv: any; - handlePicked: (data) => void; - value?: string; - className?: string; -} - -export default function withPicker(WrappedComponent) { - return class WithPicker extends Component { - constructor(props) { - super(props); - this.toggleLoading = this.toggleLoading.bind(this); - - this.state = { - isLoading: false, - }; - } - - toggleLoading(isLoading) { - this.setState(prevState => { - return { - ...prevState, - isLoading: isLoading, - }; - }); - } - - render() { - return ; - } - }; -} diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index fd2e32db3a7..bd6b6975006 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -8,7 +8,7 @@ import appEvents from 'app/core/app_events'; import Drop from 'tether-drop'; import { createStore } from 'app/stores/store'; import colors from 'app/core/utils/colors'; -import { BackendSrv } from 'app/core/services/backend_srv'; +import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; export class GrafanaCtrl { @@ -24,6 +24,8 @@ export class GrafanaCtrl { backendSrv: BackendSrv, datasourceSrv: DatasourceSrv ) { + // sets singleston instances for angular services so react components can access them + setBackendSrv(backendSrv); createStore({ backendSrv, datasourceSrv }); $scope.init = function() { diff --git a/public/app/core/components/team_picker.ts b/public/app/core/components/team_picker.ts deleted file mode 100644 index 228767a76c4..00000000000 --- a/public/app/core/components/team_picker.ts +++ /dev/null @@ -1,64 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class TeamPickerCtrl { - group: any; - teamPicked: any; - debouncedSearchGroups: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.debouncedSearchGroups = _.debounce(this.searchGroups, 500, { - leading: true, - trailing: false, - }); - this.reset(); - } - - reset() { - this.group = { text: 'Choose', value: null }; - } - - searchGroups(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/teams/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.teams, ug => { - return { text: ug.name, value: ug }; - }); - }) - ); - } - - onChange(option) { - this.teamPicked({ $group: option.value }); - } -} - -export function teamPicker() { - return { - restrict: 'E', - template: template, - controller: TeamPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - teamPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('team-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('teamPicker', teamPicker); diff --git a/public/app/core/components/user_picker.ts b/public/app/core/components/user_picker.ts deleted file mode 100644 index 606ded09885..00000000000 --- a/public/app/core/components/user_picker.ts +++ /dev/null @@ -1,71 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class UserPickerCtrl { - user: any; - debouncedSearchUsers: any; - userPicked: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.reset(); - this.debouncedSearchUsers = _.debounce(this.searchUsers, 500, { - leading: true, - trailing: false, - }); - } - - searchUsers(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/users/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.users, user => { - return { text: user.login + ' - ' + user.email, value: user }; - }); - }) - ); - } - - onChange(option) { - this.userPicked({ $user: option.value }); - } - - reset() { - this.user = { text: 'Choose', value: null }; - } -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export function userPicker() { - return { - restrict: 'E', - template: template, - controller: UserPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - userPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('user-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('userPicker', userPicker); diff --git a/public/app/core/core.ts b/public/app/core/core.ts index fb7021fe883..d6088283f3b 100644 --- a/public/app/core/core.ts +++ b/public/app/core/core.ts @@ -44,8 +44,6 @@ import { KeybindingSrv } from './services/keybindingSrv'; import { helpModal } from './components/help/help'; import { JsonExplorer } from './components/json_explorer/json_explorer'; import { NavModelSrv, NavModel } from './nav_model_srv'; -import { userPicker } from './components/user_picker'; -import { teamPicker } from './components/team_picker'; import { geminiScrollbar } from './components/scroll/scroll'; import { pageScrollbar } from './components/scroll/page_scroll'; import { gfPageDirective } from './components/gf_page'; @@ -83,8 +81,6 @@ export { JsonExplorer, NavModelSrv, NavModel, - userPicker, - teamPicker, geminiScrollbar, pageScrollbar, gfPageDirective, diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index d582b6a3b18..1aeeedef4dd 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -368,3 +368,17 @@ export class BackendSrv { } coreModule.service('backendSrv', BackendSrv); + +// +// Code below is to expore the service to react components +// + +let singletonInstance: BackendSrv; + +export function setBackendSrv(instance: BackendSrv) { + singletonInstance = instance; +} + +export function getBackendSrv(): BackendSrv { + return singletonInstance; +} diff --git a/public/app/features/org/all.ts b/public/app/features/org/all.ts index 97e01c53fe3..8872450e3ab 100644 --- a/public/app/features/org/all.ts +++ b/public/app/features/org/all.ts @@ -5,8 +5,6 @@ import './select_org_ctrl'; import './change_password_ctrl'; import './new_org_ctrl'; import './user_invite_ctrl'; -import './teams_ctrl'; -import './team_details_ctrl'; import './create_team_ctrl'; import './org_api_keys_ctrl'; import './org_details_ctrl'; diff --git a/public/app/features/org/partials/team_details.html b/public/app/features/org/partials/team_details.html deleted file mode 100644 index 3ce851d5546..00000000000 --- a/public/app/features/org/partials/team_details.html +++ /dev/null @@ -1,105 +0,0 @@ - - -
-

Team Details

- - -
- Name - -
-
- - Email - - This is optional and is primarily used for allowing custom team avatars. - - - -
- -
- -
- - -
- -

Team Members

-
-
- Add member - - -
-
- - - - - - - - - - - - - - - - -
UsernameEmail
{{member.login}}{{member.email}} - - - -
-
- - This team has no members yet. - -
- -
- -
- -

Mappings to external groups

-
-
- Add group - -
-
- -
-
- - - - - - - - - - - - -
Group
{{group.groupId}} - - - -
-
- - This team has no associated groups yet. - -
- -
diff --git a/public/app/features/org/partials/teams.html b/public/app/features/org/partials/teams.html deleted file mode 100755 index e15a15cf573..00000000000 --- a/public/app/features/org/partials/teams.html +++ /dev/null @@ -1,68 +0,0 @@ - - -
-
- -
- - - - Add Team - -
- -
- - - - - - - - - - - - - - - - - - - -
NameEmailMembers
- - - -
-
- -
-
    -
  1. - -
  2. -
-
- - - No Teams found. - -
diff --git a/public/app/features/org/specs/team_details_ctrl.jest.ts b/public/app/features/org/specs/team_details_ctrl.jest.ts deleted file mode 100644 index c636de7ec56..00000000000 --- a/public/app/features/org/specs/team_details_ctrl.jest.ts +++ /dev/null @@ -1,42 +0,0 @@ -import '../team_details_ctrl'; -import TeamDetailsCtrl from '../team_details_ctrl'; - -describe('TeamDetailsCtrl', () => { - var backendSrv = { - searchUsers: jest.fn(() => Promise.resolve([])), - get: jest.fn(() => Promise.resolve([])), - post: jest.fn(() => Promise.resolve([])), - }; - - //Team id - var routeParams = { - id: 1, - }; - - var navModelSrv = { - getNav: jest.fn(), - }; - - var teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv); - - describe('when user is chosen to be added to team', () => { - beforeEach(() => { - teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv); - const userItem = { - id: 2, - login: 'user2', - }; - teamDetailsCtrl.userPicked(userItem); - }); - - it('should parse the result and save to db', () => { - expect(backendSrv.post.mock.calls[0][0]).toBe('/api/teams/1/members'); - expect(backendSrv.post.mock.calls[0][1].userId).toBe(2); - }); - - it('should refresh the list after saving.', () => { - expect(backendSrv.get.mock.calls[0][0]).toBe('/api/teams/1'); - expect(backendSrv.get.mock.calls[1][0]).toBe('/api/teams/1/members'); - }); - }); -}); diff --git a/public/app/features/org/team_details_ctrl.ts b/public/app/features/org/team_details_ctrl.ts deleted file mode 100644 index 6e0fddafa9d..00000000000 --- a/public/app/features/org/team_details_ctrl.ts +++ /dev/null @@ -1,108 +0,0 @@ -import coreModule from 'app/core/core_module'; -import config from 'app/core/config'; - -export default class TeamDetailsCtrl { - team: Team; - teamMembers: User[] = []; - navModel: any; - teamGroups: TeamGroup[] = []; - newGroupId: string; - isMappingsEnabled: boolean; - - /** @ngInject **/ - constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.userPicked = this.userPicked.bind(this); - this.get = this.get.bind(this); - this.newGroupId = ''; - this.isMappingsEnabled = config.buildInfo.isEnterprise; - this.get(); - } - - get() { - if (this.$routeParams && this.$routeParams.id) { - this.backendSrv.get(`/api/teams/${this.$routeParams.id}`).then(result => { - this.team = result; - }); - - this.backendSrv.get(`/api/teams/${this.$routeParams.id}/members`).then(result => { - this.teamMembers = result; - }); - - if (this.isMappingsEnabled) { - this.backendSrv.get(`/api/teams/${this.$routeParams.id}/groups`).then(result => { - this.teamGroups = result; - }); - } - } - } - - removeTeamMember(teamMember: TeamMember) { - this.$scope.appEvent('confirm-modal', { - title: 'Remove Member', - text: 'Are you sure you want to remove ' + teamMember.login + ' from this group?', - yesText: 'Remove', - icon: 'fa-warning', - onConfirm: () => { - this.removeMemberConfirmed(teamMember); - }, - }); - } - - removeMemberConfirmed(teamMember: TeamMember) { - this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/members/${teamMember.userId}`).then(this.get); - } - - update() { - if (!this.$scope.teamDetailsForm.$valid) { - return; - } - - this.backendSrv.put('/api/teams/' + this.team.id, { - name: this.team.name, - email: this.team.email, - }); - } - - userPicked(user) { - this.backendSrv.post(`/api/teams/${this.$routeParams.id}/members`, { userId: user.id }).then(() => { - this.$scope.$broadcast('user-picker-reset'); - this.get(); - }); - } - - addGroup() { - this.backendSrv.post(`/api/teams/${this.$routeParams.id}/groups`, { groupId: this.newGroupId }).then(() => { - this.get(); - }); - } - - removeGroup(group: TeamGroup) { - this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/groups/${group.groupId}`).then(this.get); - } -} - -export interface TeamGroup { - groupId: string; -} - -export interface Team { - id: number; - name: string; - email: string; -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export interface TeamMember { - userId: number; - name: string; - login: string; -} - -coreModule.controller('TeamDetailsCtrl', TeamDetailsCtrl); diff --git a/public/app/features/org/teams_ctrl.ts b/public/app/features/org/teams_ctrl.ts deleted file mode 100644 index 29317e73d3b..00000000000 --- a/public/app/features/org/teams_ctrl.ts +++ /dev/null @@ -1,66 +0,0 @@ -import coreModule from 'app/core/core_module'; -import appEvents from 'app/core/app_events'; - -export class TeamsCtrl { - teams: any; - pages = []; - perPage = 50; - page = 1; - totalPages: number; - showPaging = false; - query: any = ''; - navModel: any; - - /** @ngInject */ - constructor(private backendSrv, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.get(); - } - - get() { - this.backendSrv - .get(`/api/teams/search?perpage=${this.perPage}&page=${this.page}&query=${this.query}`) - .then(result => { - this.teams = result.teams; - this.page = result.page; - this.perPage = result.perPage; - this.totalPages = Math.ceil(result.totalCount / result.perPage); - this.showPaging = this.totalPages > 1; - this.pages = []; - - for (var i = 1; i < this.totalPages + 1; i++) { - this.pages.push({ page: i, current: i === this.page }); - } - }); - } - - navigateToPage(page) { - this.page = page.page; - this.get(); - } - - deleteTeam(team) { - appEvents.emit('confirm-modal', { - title: 'Delete', - text: 'Are you sure you want to delete Team ' + team.name + '?', - yesText: 'Delete', - icon: 'fa-warning', - onConfirm: () => { - this.deleteTeamConfirmed(team); - }, - }); - } - - deleteTeamConfirmed(team) { - this.backendSrv.delete('/api/teams/' + team.id).then(this.get.bind(this)); - } - - openTeamModal() { - appEvents.emit('show-modal', { - templateHtml: '', - modalClass: 'modal--narrow', - }); - } -} - -coreModule.controller('TeamsCtrl', TeamsCtrl); diff --git a/public/app/routes/routes.ts b/public/app/routes/routes.ts index 568b3438b38..cd1aed549e0 100644 --- a/public/app/routes/routes.ts +++ b/public/app/routes/routes.ts @@ -5,6 +5,8 @@ import ServerStats from 'app/containers/ServerStats/ServerStats'; import AlertRuleList from 'app/containers/AlertRuleList/AlertRuleList'; import FolderSettings from 'app/containers/ManageDashboards/FolderSettings'; import FolderPermissions from 'app/containers/ManageDashboards/FolderPermissions'; +import TeamPages from 'app/containers/Teams/TeamPages'; +import TeamList from 'app/containers/Teams/TeamList'; /** @ngInject **/ export function setupAngularRoutes($routeProvider, $locationProvider) { @@ -140,19 +142,23 @@ export function setupAngularRoutes($routeProvider, $locationProvider) { controller: 'OrgApiKeysCtrl', }) .when('/org/teams', { - templateUrl: 'public/app/features/org/partials/teams.html', - controller: 'TeamsCtrl', - controllerAs: 'ctrl', + template: '', + resolve: { + roles: () => ['Editor', 'Admin'], + component: () => TeamList, + }, }) .when('/org/teams/new', { templateUrl: 'public/app/features/org/partials/create_team.html', controller: 'CreateTeamCtrl', controllerAs: 'ctrl', }) - .when('/org/teams/edit/:id', { - templateUrl: 'public/app/features/org/partials/team_details.html', - controller: 'TeamDetailsCtrl', - controllerAs: 'ctrl', + .when('/org/teams/edit/:id/:page?', { + template: '', + resolve: { + roles: () => ['Admin'], + component: () => TeamPages, + }, }) .when('/profile', { templateUrl: 'public/app/features/org/partials/profile.html', diff --git a/public/app/stores/NavStore/NavItem.ts b/public/app/stores/NavStore/NavItem.ts index 4521d4291aa..3e8a2a837b3 100644 --- a/public/app/stores/NavStore/NavItem.ts +++ b/public/app/stores/NavStore/NavItem.ts @@ -1,4 +1,4 @@ -import { types } from 'mobx-state-tree'; +import { types } from 'mobx-state-tree'; export const NavItem = types.model('NavItem', { id: types.identifier(types.string), @@ -8,6 +8,7 @@ export const NavItem = types.model('NavItem', { icon: types.optional(types.string, ''), img: types.optional(types.string, ''), active: types.optional(types.boolean, false), + hideFromTabs: types.optional(types.boolean, false), breadcrumbs: types.optional(types.array(types.late(() => Breadcrumb)), []), children: types.optional(types.array(types.late(() => NavItem)), []), }); diff --git a/public/app/stores/NavStore/NavStore.ts b/public/app/stores/NavStore/NavStore.ts index 86348c00487..c69c32befa8 100644 --- a/public/app/stores/NavStore/NavStore.ts +++ b/public/app/stores/NavStore/NavStore.ts @@ -1,6 +1,7 @@ import _ from 'lodash'; import { types, getEnv } from 'mobx-state-tree'; import { NavItem } from './NavItem'; +import { ITeam } from '../TeamsStore/TeamsStore'; export const NavStore = types .model('NavStore', { @@ -115,4 +116,43 @@ export const NavStore = types self.main = NavItem.create(main); }, + + initTeamPage(team: ITeam, tab: string, isSyncEnabled: boolean) { + let main = { + img: team.avatarUrl, + id: 'team-' + team.id, + subTitle: 'Manage members & settings', + url: '', + text: team.name, + breadcrumbs: [{ title: 'Teams', url: 'org/teams' }], + children: [ + { + active: tab === 'members', + icon: 'gicon gicon-team', + id: 'team-members', + text: 'Members', + url: `org/teams/edit/${team.id}/members`, + }, + { + active: tab === 'settings', + icon: 'fa fa-fw fa-sliders', + id: 'team-settings', + text: 'Settings', + url: `org/teams/edit/${team.id}/settings`, + }, + ], + }; + + if (isSyncEnabled) { + main.children.splice(1, 0, { + active: tab === 'groupsync', + icon: 'fa fa-fw fa-refresh', + id: 'team-settings', + text: 'External group sync', + url: `org/teams/edit/${team.id}/groupsync`, + }); + } + + self.main = NavItem.create(main); + }, })); diff --git a/public/app/stores/RootStore/RootStore.ts b/public/app/stores/RootStore/RootStore.ts index c3bfe75d59c..8a915d20ef1 100644 --- a/public/app/stores/RootStore/RootStore.ts +++ b/public/app/stores/RootStore/RootStore.ts @@ -6,6 +6,7 @@ import { AlertListStore } from './../AlertListStore/AlertListStore'; import { ViewStore } from './../ViewStore/ViewStore'; import { FolderStore } from './../FolderStore/FolderStore'; import { PermissionsStore } from './../PermissionsStore/PermissionsStore'; +import { TeamsStore } from './../TeamsStore/TeamsStore'; export const RootStore = types.model({ search: types.optional(SearchStore, { @@ -28,6 +29,9 @@ export const RootStore = types.model({ routeParams: {}, }), folder: types.optional(FolderStore, {}), + teams: types.optional(TeamsStore, { + map: {}, + }), }); type IRootStoreType = typeof RootStore.Type; diff --git a/public/app/stores/TeamsStore/TeamsStore.ts b/public/app/stores/TeamsStore/TeamsStore.ts new file mode 100644 index 00000000000..01cdca895d4 --- /dev/null +++ b/public/app/stores/TeamsStore/TeamsStore.ts @@ -0,0 +1,156 @@ +import { types, getEnv, flow } from 'mobx-state-tree'; + +export const TeamMember = types.model('TeamMember', { + userId: types.identifier(types.number), + teamId: types.number, + avatarUrl: types.string, + email: types.string, + login: types.string, +}); + +type TeamMemberType = typeof TeamMember.Type; +export interface ITeamMember extends TeamMemberType {} + +export const TeamGroup = types.model('TeamGroup', { + groupId: types.identifier(types.string), + teamId: types.number, +}); + +type TeamGroupType = typeof TeamGroup.Type; +export interface ITeamGroup extends TeamGroupType {} + +export const Team = types + .model('Team', { + id: types.identifier(types.number), + name: types.string, + avatarUrl: types.string, + email: types.string, + memberCount: types.number, + search: types.optional(types.string, ''), + members: types.optional(types.map(TeamMember), {}), + groups: types.optional(types.map(TeamGroup), {}), + }) + .views(self => ({ + get filteredMembers() { + let members = this.members.values(); + let regex = new RegExp(self.search, 'i'); + return members.filter(member => { + return regex.test(member.login) || regex.test(member.email); + }); + }, + })) + .actions(self => ({ + setName(name: string) { + self.name = name; + }, + + setEmail(email: string) { + self.email = email; + }, + + setSearchQuery(query: string) { + self.search = query; + }, + + update: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + + yield backendSrv.put(`/api/teams/${self.id}`, { + name: self.name, + email: self.email, + }); + }), + + loadMembers: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get(`/api/teams/${self.id}/members`); + self.members.clear(); + + for (let member of rsp) { + self.members.set(member.userId.toString(), TeamMember.create(member)); + } + }), + + removeMember: flow(function* load(member: ITeamMember) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.delete(`/api/teams/${self.id}/members/${member.userId}`); + // remove from store map + self.members.delete(member.userId.toString()); + }), + + addMember: flow(function* load(userId: number) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.post(`/api/teams/${self.id}/members`, { userId: userId }); + }), + + loadGroups: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get(`/api/teams/${self.id}/groups`); + self.groups.clear(); + + for (let group of rsp) { + self.groups.set(group.groupId, TeamGroup.create(group)); + } + }), + + addGroup: flow(function* load(groupId: string) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.post(`/api/teams/${self.id}/groups`, { groupId: groupId }); + self.groups.set( + groupId, + TeamGroup.create({ + teamId: self.id, + groupId: groupId, + }) + ); + }), + + removeGroup: flow(function* load(groupId: string) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.delete(`/api/teams/${self.id}/groups/${groupId}`); + self.groups.delete(groupId); + }), + })); + +type TeamType = typeof Team.Type; +export interface ITeam extends TeamType {} + +export const TeamsStore = types + .model('TeamsStore', { + map: types.map(Team), + search: types.optional(types.string, ''), + }) + .views(self => ({ + get filteredTeams() { + let teams = this.map.values(); + let regex = new RegExp(self.search, 'i'); + return teams.filter(team => { + return regex.test(team.name); + }); + }, + })) + .actions(self => ({ + loadTeams: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get('/api/teams/search/', { perpage: 50, page: 1 }); + self.map.clear(); + + for (let team of rsp.teams) { + self.map.set(team.id.toString(), Team.create(team)); + } + }), + + setSearchQuery(query: string) { + self.search = query; + }, + + loadById: flow(function* load(id: string) { + if (self.map.has(id)) { + return; + } + + const backendSrv = getEnv(self).backendSrv; + const team = yield backendSrv.get(`/api/teams/${id}`); + self.map.set(id, Team.create(team)); + }), + })); diff --git a/public/sass/components/_gf-form.scss b/public/sass/components/_gf-form.scss index 756d88ee935..0de386f3f68 100644 --- a/public/sass/components/_gf-form.scss +++ b/public/sass/components/_gf-form.scss @@ -403,9 +403,9 @@ select.gf-form-input ~ .gf-form-help-icon { .cta-form { position: relative; - padding: 1rem; + padding: 1.5rem; background-color: $empty-list-cta-bg; - margin-bottom: 1rem; + margin-bottom: 2rem; border-top: 3px solid $green; } diff --git a/public/test/jest-shim.ts b/public/test/jest-shim.ts index 80c4bb3d21b..dbf9ac4be50 100644 --- a/public/test/jest-shim.ts +++ b/public/test/jest-shim.ts @@ -1,6 +1,17 @@ declare var global: NodeJS.Global; -(global).requestAnimationFrame = (callback) => { +(global).requestAnimationFrame = callback => { setTimeout(callback, 0); }; +(Promise.prototype).finally = function(onFinally) { + return this.then( + /* onFulfilled */ + res => Promise.resolve(onFinally()).then(() => res), + /* onRejected */ + err => + Promise.resolve(onFinally()).then(() => { + throw err; + }) + ); +}; From 9b50c9038b7697abc4e72191c18326af976f2dc8 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Thu, 12 Jul 2018 03:23:38 +0900 Subject: [PATCH 017/105] skip backend request if extended statistics is invalid. (#12495) * check extended statistics pattern * check extended statistics pattern * Revert "check extended statistics pattern" This reverts commit 52c7b1a972636d5f5729e64ae5e00e6fae329257. * add test * fix test --- .../datasource/cloudwatch/datasource.ts | 8 ++++++++ .../cloudwatch/specs/datasource.jest.ts | 20 +++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 391f65bd7ae..00ce1bfa287 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -39,6 +39,14 @@ export default class CloudWatchDatasource { item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars); item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting + // valid ExtendedStatistics is like p90.00, check the pattern + let hasInvalidStatistics = item.statistics.some(s => { + return s.indexOf('p') === 0 && !/p\d{2}\.\d{2}/.test(s); + }); + if (hasInvalidStatistics) { + throw { message: 'Invalid extended statistics' }; + } + return _.extend( { refId: item.refId, diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index 2dc6e57b1aa..a8968008661 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -121,6 +121,26 @@ describe('CloudWatchDatasource', function() { }); }); + it('should cancel query for invalid extended statistics', function () { + var query = { + range: { from: 'now-1h', to: 'now' }, + rangeRaw: { from: 1483228800, to: 1483232400 }, + targets: [ + { + region: 'us-east-1', + namespace: 'AWS/EC2', + metricName: 'CPUUtilization', + dimensions: { + InstanceId: 'i-12345678', + }, + statistics: ['pNN.NN'], + period: '60s', + }, + ], + }; + expect(ctx.ds.query.bind(ctx.ds, query)).toThrow(/Invalid extended statistics/); + }); + it('should return series list', function(done) { ctx.ds.query(query).then(function(result) { expect(result.data[0].target).toBe(response.results.A.series[0].name); From 81e62e105143f9493169d86a20bc2dd0766dab38 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Thu, 12 Jul 2018 13:16:41 +0200 Subject: [PATCH 018/105] Fix freezing browser when loading plugin - broken since 4d2dd2209 - `*` was previously working as a path matcher, but freezes browser when used with new cache-busting plugin loader - changed matcher to be `/*` --- public/app/features/plugins/plugin_loader.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index 20023e27b5c..641b5100703 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -56,7 +56,7 @@ System.config({ css: 'vendor/plugin-css/css.js', }, meta: { - '*': { + '/*': { esModule: true, authorization: true, loader: 'plugin-loader', From 7361d352bf0fa503cc9775d5ceba39a2b6775e9b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 12 Jul 2018 15:38:41 +0200 Subject: [PATCH 019/105] Add comments --- public/app/core/components/scroll/page_scroll.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/app/core/components/scroll/page_scroll.ts b/public/app/core/components/scroll/page_scroll.ts index 0cb36eba914..b6603f06175 100644 --- a/public/app/core/components/scroll/page_scroll.ts +++ b/public/app/core/components/scroll/page_scroll.ts @@ -29,10 +29,12 @@ export function pageScrollbar() { scope.$on('$routeChangeSuccess', () => { lastPos = 0; elem[0].scrollTop = 0; + // Focus page to enable scrolling by keyboard elem[0].focus({ preventScroll: true }); }); elem[0].tabIndex = -1; + // Focus page to enable scrolling by keyboard elem[0].focus({ preventScroll: true }); }, }; From 756c08e713ad2d1be7aad681aee6db7c85d8791f Mon Sep 17 00:00:00 2001 From: Shane Date: Fri, 13 Jul 2018 02:56:37 -0400 Subject: [PATCH 020/105] changed you to your (#12590) --- docs/sources/reference/templating.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 8341b9770bd..efe9db61e3d 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -11,7 +11,7 @@ weight = 1 # Variables Variables allows for more interactive and dynamic dashboards. Instead of hard-coding things like server, application -and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of +and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard. {{< docs-imagebox img="/img/docs/v50/variables_dashboard.png" >}} From d06b26de262c1dccb9976d506fdc8e6f39b16118 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Fri, 13 Jul 2018 09:09:36 +0200 Subject: [PATCH 021/105] Explore Datasource selector Adds a datasource selector to the Explore UI. Only datasource plugins that have `explore: true` in their `plugin.json` can be selected. - adds datasource selector (based on react-select) to explore UI - adds getExploreSources to datasource service - new `explore` flag in datasource plugins model - Prometheus plugin enabled explore --- pkg/plugins/datasource_plugin.go | 1 + public/app/containers/Explore/Explore.tsx | 92 +++++++++++++++---- public/app/features/plugins/datasource_srv.ts | 33 ++++--- .../plugins/specs/datasource_srv.jest.ts | 30 +++++- .../plugins/datasource/prometheus/plugin.json | 30 ++++-- public/sass/pages/_explore.scss | 4 + 6 files changed, 148 insertions(+), 42 deletions(-) diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 2fec6acbf54..cef35a2e7d9 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -22,6 +22,7 @@ type DataSourcePlugin struct { Annotations bool `json:"annotations"` Metrics bool `json:"metrics"` Alerting bool `json:"alerting"` + Explore bool `json:"explore"` QueryOptions map[string]bool `json:"queryOptions,omitempty"` BuiltIn bool `json:"builtIn,omitempty"` Mixed bool `json:"mixed,omitempty"` diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/containers/Explore/Explore.tsx index deebe84f2c8..90bf0941572 100644 --- a/public/app/containers/Explore/Explore.tsx +++ b/public/app/containers/Explore/Explore.tsx @@ -1,16 +1,17 @@ import React from 'react'; import { hot } from 'react-hot-loader'; +import Select from 'react-select'; + import colors from 'app/core/utils/colors'; import TimeSeries from 'app/core/time_series2'; +import { decodePathComponent } from 'app/core/utils/location_util'; import ElapsedTime from './ElapsedTime'; import QueryRows from './QueryRows'; import Graph from './Graph'; import Table from './Table'; import TimePicker, { DEFAULT_RANGE } from './TimePicker'; -import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; import { buildQueryOptions, ensureQueries, generateQueryKey, hasQuery } from './utils/query'; -import { decodePathComponent } from 'app/core/utils/location_util'; function makeTimeSeriesList(dataList, options) { return dataList.map((seriesData, index) => { @@ -46,7 +47,8 @@ function parseInitialState(initial) { interface IExploreState { datasource: any; datasourceError: any; - datasourceLoading: any; + datasourceLoading: boolean | null; + datasourceMissing: boolean; graphResult: any; latency: number; loading: any; @@ -61,15 +63,14 @@ interface IExploreState { // @observer export class Explore extends React.Component { - datasourceSrv: DatasourceSrv; - constructor(props) { super(props); const { range, queries } = parseInitialState(props.routeParams.initial); this.state = { datasource: null, datasourceError: null, - datasourceLoading: true, + datasourceLoading: null, + datasourceMissing: false, graphResult: null, latency: 0, loading: false, @@ -85,19 +86,43 @@ export class Explore extends React.Component { } async componentDidMount() { - const datasource = await this.props.datasourceSrv.get(); - const testResult = await datasource.testDatasource(); - if (testResult.status === 'success') { - this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + const { datasourceSrv } = this.props; + if (!datasourceSrv) { + throw new Error('No datasource service passed as props.'); + } + const datasources = datasourceSrv.getExploreSources(); + if (datasources.length > 0) { + this.setState({ datasourceLoading: true }); + // Try default datasource, otherwise get first + let datasource = await datasourceSrv.get(); + if (!datasource.meta.explore) { + datasource = await datasourceSrv.get(datasources[0].name); + } + this.setDatasource(datasource); } else { - this.setState({ datasource: null, datasourceError: testResult.message, datasourceLoading: false }); + this.setState({ datasourceMissing: true }); } } componentDidCatch(error) { + this.setState({ datasourceError: error }); console.error(error); } + async setDatasource(datasource) { + try { + const testResult = await datasource.testDatasource(); + if (testResult.status === 'success') { + this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + } else { + this.setState({ datasource: datasource, datasourceError: testResult.message, datasourceLoading: false }); + } + } catch (error) { + const message = (error && error.statusText) || error; + this.setState({ datasource: datasource, datasourceError: message, datasourceLoading: false }); + } + } + handleAddQueryRow = index => { const { queries } = this.state; const nextQueries = [ @@ -108,6 +133,18 @@ export class Explore extends React.Component { this.setState({ queries: nextQueries }); }; + handleChangeDatasource = async option => { + this.setState({ + datasource: null, + datasourceError: null, + datasourceLoading: true, + graphResult: null, + tableResult: null, + }); + const datasource = await this.props.datasourceSrv.get(option.value); + this.setDatasource(datasource); + }; + handleChangeQuery = (query, index) => { const { queries } = this.state; const nextQuery = { @@ -226,11 +263,12 @@ export class Explore extends React.Component { }; render() { - const { position, split } = this.props; + const { datasourceSrv, position, split } = this.props; const { datasource, datasourceError, datasourceLoading, + datasourceMissing, graphResult, latency, loading, @@ -247,6 +285,12 @@ export class Explore extends React.Component { const graphButtonActive = showingBoth || showingGraph ? 'active' : ''; const tableButtonActive = showingBoth || showingTable ? 'active' : ''; const exploreClass = split ? 'explore explore-split' : 'explore'; + const datasources = datasourceSrv.getExploreSources().map(ds => ({ + value: ds.name, + label: ds.name, + })); + const selectedDatasource = datasource ? datasource.name : undefined; + return (
@@ -264,6 +308,18 @@ export class Explore extends React.Component {
)} + {!datasourceMissing ? ( +
+ + +
From ae935bf08b14c1457b4f96580048003c494b8063 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 11:06:30 +0200 Subject: [PATCH 054/105] Add jest test file --- .../panel/graph/specs/graph_ctrl.jest.ts | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts new file mode 100644 index 00000000000..bd5a69f28dd --- /dev/null +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -0,0 +1,81 @@ +// import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; + +import moment from 'moment'; +import { GraphCtrl } from '../module'; + +describe('GraphCtrl', function() { + let ctx = {}; + + beforeEach(() => { + ctx.ctrl = new GraphCtrl({}, {}, {}); + }); + + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + // beforeEach(ctx.providePhase()); + // beforeEach(ctx.createPanelController(GraphCtrl)); + beforeEach(() => { + ctx.ctrl.annotationsPromise = Promise.resolve({}); + ctx.ctrl.updateTimeRange(); + }); + + describe('when time series are outside range', function() { + beforeEach(function() { + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, 1234567890], [60, 1234567899]], + }, + ]; + + ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', function() { + expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range'); + }); + }); + + describe('when time series are inside range', function() { + beforeEach(function() { + var range = { + from: moment() + .subtract(1, 'days') + .valueOf(), + to: moment().valueOf(), + }; + + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, range.from + 1000], [60, range.from + 10000]], + }, + ]; + + ctx.ctrl.range = range; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', function() { + expect(ctx.ctrl.dataWarning).toBe(null); + }); + }); + + describe('datapointsCount given 2 series', function() { + beforeEach(function() { + var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsCount warning', function() { + expect(ctx.ctrl.dataWarning.title).toBe('No data points'); + }); + }); +}); From ee2eda615e4eac174907752911f486ebc5310ef9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20W=C4=99grzynek?= Date: Mon, 23 Jul 2018 12:07:54 +0200 Subject: [PATCH 055/105] Update kbn.ts --- public/app/core/utils/kbn.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 463025567cd..4fc4829811f 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -449,6 +449,7 @@ kbn.valueFormats.currencyNOK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencySEK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencyCZK = kbn.formatBuilders.currency('czk'); kbn.valueFormats.currencyCHF = kbn.formatBuilders.currency('CHF'); +kbn.valueFormats.currencyPLN = kbn.formatBuilders.currency('zł'); // Data (Binary) kbn.valueFormats.bits = kbn.formatBuilders.binarySIPrefix('b'); @@ -880,6 +881,7 @@ kbn.getUnitFormats = function() { { text: 'Swedish Krona (kr)', value: 'currencySEK' }, { text: 'Czech koruna (czk)', value: 'currencyCZK' }, { text: 'Swiss franc (CHF)', value: 'currencyCHF' }, + { text: 'Polish Złoty (PLN)', value: 'currencyPLN' }, ], }, { From 0fa98a812bac189c107a17ba7c1cb15050800fda Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 23 Jul 2018 13:13:18 +0200 Subject: [PATCH 056/105] changelog: add notes about closing #12691 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e53b3a904a3..5cf8602824b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) +* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) # 5.2.2 (unreleased) From ed8568f0dffcad022309e48e4b837ecd0414b69d Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 13:38:16 +0200 Subject: [PATCH 057/105] Add graph_ctrl jest --- .../panel/graph/specs/graph_ctrl.jest.ts | 42 ++++++---- .../panel/graph/specs/graph_ctrl_specs.ts | 78 ------------------- 2 files changed, 29 insertions(+), 91 deletions(-) delete mode 100644 public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index bd5a69f28dd..a778697527f 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -1,25 +1,41 @@ -// import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; - import moment from 'moment'; import { GraphCtrl } from '../module'; +jest.mock('../graph', () => ({})); + describe('GraphCtrl', function() { + let injector = { + get: () => { + return { + timeRange: () => { + return { + from: '', + to: '', + }; + }, + }; + }, + }; + + let scope = { + $on: function() {}, + }; + + GraphCtrl.prototype.panel = { + events: { + on: function() {}, + }, + gridPos: { + w: 100, + }, + }; + let ctx = {}; beforeEach(() => { - ctx.ctrl = new GraphCtrl({}, {}, {}); + ctx.ctrl = new GraphCtrl(scope, injector, {}); }); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - // beforeEach(ctx.providePhase()); - // beforeEach(ctx.createPanelController(GraphCtrl)); beforeEach(() => { ctx.ctrl.annotationsPromise = Promise.resolve({}); ctx.ctrl.updateTimeRange(); diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts b/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts deleted file mode 100644 index d5cefb345cf..00000000000 --- a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; - -import moment from 'moment'; -import { GraphCtrl } from '../module'; -import helpers from '../../../../../test/specs/helpers'; - -describe('GraphCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach(ctx.createPanelController(GraphCtrl)); - beforeEach(() => { - ctx.ctrl.annotationsPromise = Promise.resolve({}); - ctx.ctrl.updateTimeRange(); - }); - - describe('when time series are outside range', function() { - beforeEach(function() { - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, 1234567890], [60, 1234567899]], - }, - ]; - - ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning.title).to.be('Data points outside time range'); - }); - }); - - describe('when time series are inside range', function() { - beforeEach(function() { - var range = { - from: moment() - .subtract(1, 'days') - .valueOf(), - to: moment().valueOf(), - }; - - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, range.from + 1000], [60, range.from + 10000]], - }, - ]; - - ctx.ctrl.range = range; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning).to.be(null); - }); - }); - - describe('datapointsCount given 2 series', function() { - beforeEach(function() { - var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsCount warning', function() { - expect(ctx.ctrl.dataWarning.title).to.be('No data points'); - }); - }); -}); From 529883b61d43fefac03b578e1fe86b4259e9c2de Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 13:39:32 +0200 Subject: [PATCH 058/105] Change to arrow functions --- public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index a778697527f..788ca1840ba 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -18,12 +18,12 @@ describe('GraphCtrl', function() { }; let scope = { - $on: function() {}, + $on: () => {}, }; GraphCtrl.prototype.panel = { events: { - on: function() {}, + on: () => {}, }, gridPos: { w: 100, From 46e31621b071e36f658788c5b8f9c9ab11ca1aab Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 14:28:17 +0200 Subject: [PATCH 059/105] Add jest file --- .../influxdb/specs/query_ctrl.jest.ts | 211 ++++++++++++++++++ 1 file changed, 211 insertions(+) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts new file mode 100644 index 00000000000..e4dd5b226f4 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -0,0 +1,211 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +// import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', function() { + let uiSegmentSrv = { + newPlusButton: () => {}, + }; + + let ctx = { + dataSource: { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }, + }; + + InfluxQueryCtrl.prototype.panelCtrl = { + panel: { + targets: [{}], + }, + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + // beforeEach(ctx.providePhase()); + + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.target = { target: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + // ctx.ctrl = $controller( + // InfluxQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // target: ctx.target, + // datasource: ctx.datasource, + // } + // ); + // }) + // ); + + beforeEach(() => { + ctx.ctrl = new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + }); + + describe('init', function() { + it('should init tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).toBe(1); + }); + + it('should init measurementSegment', function() { + expect(ctx.ctrl.measurementSegment.value).toBe('select measurement'); + }); + }); + + describe('when first tag segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[0].key).toBe('asd'); + expect(ctx.ctrl.tagSegments[0].type).toBe('key'); + }); + + it('should add tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).toBe(3); + }); + }); + + describe('when last tag value segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', function() { + expect(ctx.ctrl.target.tags[0].value).toBe('server1'); + }); + + it('should set tag operator', function() { + expect(ctx.ctrl.target.tags[0].operator).toBe('='); + }); + + it('should add plus button for another filter', function() { + expect(ctx.ctrl.tagSegments[3].fake).toBe(true); + }); + }); + + describe('when last tag value segment is updated to regex', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', function() { + expect(ctx.ctrl.tagSegments[1].value).toBe('=~'); + expect(ctx.ctrl.target.tags[0].operator).toBe('=~'); + }); + }); + + describe('when second tag key is added', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[1].key).toBe('key2'); + }); + + it('should add AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).toBe('AND'); + }); + }); + + describe('when condition is changed', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', function() { + expect(ctx.ctrl.target.tags[1].condition).toBe('OR'); + }); + + it('should update AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).toBe('OR'); + expect(ctx.ctrl.tagSegments.length).toBe(7); + }); + }); + + describe('when deleting first tag filter after value is selected', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', function() { + expect(ctx.ctrl.target.tags.length).toBe(0); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(1); + expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); +}); From 6b6a23ff6a24c62955b48c9794c0b99023ceb608 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 12 Jul 2018 15:32:32 +0200 Subject: [PATCH 060/105] Add support for interval in query variable Add range to scopedVars Add basic tests and extract function for range vars Add support for range query variable in createQuery Template vars squash --- .../datasource/prometheus/datasource.ts | 20 ++++++++- .../prometheus/specs/datasource.jest.ts | 43 ++++++++++++++++++- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index 69ce6f440c5..75a946d6f36 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -196,13 +196,14 @@ export class PrometheusDatasource { var intervalFactor = target.intervalFactor || 1; // Adjust the interval to take into account any specified minimum and interval factor plus Prometheus limits var adjustedInterval = this.adjustInterval(interval, minInterval, range, intervalFactor); - var scopedVars = options.scopedVars; + var scopedVars = { ...options.scopedVars, ...this.getRangeScopedVars() }; // If the interval was adjusted, make a shallow copy of scopedVars with updated interval vars if (interval !== adjustedInterval) { interval = adjustedInterval; scopedVars = Object.assign({}, options.scopedVars, { __interval: { text: interval + 's', value: interval + 's' }, __interval_ms: { text: interval * 1000, value: interval * 1000 }, + ...this.getRangeScopedVars(), }); } query.step = interval; @@ -285,11 +286,26 @@ export class PrometheusDatasource { return this.$q.when([]); } - let interpolated = this.templateSrv.replace(query, {}, this.interpolateQueryExpr); + let scopedVars = { + __interval: { text: this.interval, value: this.interval }, + __interval_ms: { text: kbn.interval_to_ms(this.interval), value: kbn.interval_to_ms(this.interval) }, + ...this.getRangeScopedVars(), + }; + let interpolated = this.templateSrv.replace(query, scopedVars, this.interpolateQueryExpr); var metricFindQuery = new PrometheusMetricFindQuery(this, interpolated, this.timeSrv); return metricFindQuery.process(); } + getRangeScopedVars() { + let range = this.timeSrv.timeRange(); + let msRange = range.to.diff(range.from); + let regularRange = kbn.secondsToHms(msRange / 1000); + return { + __range_ms: { text: msRange, value: msRange }, + __range: { text: regularRange, value: regularRange }, + }; + } + annotationQuery(options) { var annotation = options.annotation; var expr = annotation.expr || ''; diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts index 15798a33cd2..b8b2b50f590 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts @@ -2,6 +2,7 @@ import _ from 'lodash'; import moment from 'moment'; import q from 'q'; import { alignRange, PrometheusDatasource, prometheusSpecialRegexEscape, prometheusRegularEscape } from '../datasource'; +jest.mock('../metric_find_query'); describe('PrometheusDatasource', () => { let ctx: any = {}; @@ -18,7 +19,14 @@ describe('PrometheusDatasource', () => { ctx.templateSrvMock = { replace: a => a, }; - ctx.timeSrvMock = {}; + ctx.timeSrvMock = { + timeRange: () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }, + }; beforeEach(() => { ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); @@ -204,4 +212,37 @@ describe('PrometheusDatasource', () => { expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?'); }); }); + + describe('metricFindQuery', () => { + beforeEach(() => { + let query = 'query_result(topk(5,rate(http_request_duration_microseconds_count[$__interval])))'; + ctx.templateSrvMock.replace = jest.fn(); + ctx.timeSrvMock.timeRange = () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }; + ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); + ctx.ds.metricFindQuery(query); + }); + + it('should call templateSrv.replace with scopedVars', () => { + expect(ctx.templateSrvMock.replace.mock.calls[0][1]).toBeDefined(); + }); + + it('should have the correct range and range_ms', () => { + let range = ctx.templateSrvMock.replace.mock.calls[0][1].__range; + let rangeMs = ctx.templateSrvMock.replace.mock.calls[0][1].__range_ms; + expect(range).toEqual({ text: '21s', value: '21s' }); + expect(rangeMs).toEqual({ text: 21031, value: 21031 }); + }); + + it('should pass the default interval value', () => { + let interval = ctx.templateSrvMock.replace.mock.calls[0][1].__interval; + let intervalMs = ctx.templateSrvMock.replace.mock.calls[0][1].__interval_ms; + expect(interval).toEqual({ text: '15s', value: '15s' }); + expect(intervalMs).toEqual({ text: 15000, value: 15000 }); + }); + }); }); From bb0af52d34b201a960d3ace19a54e1b44be8748b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 14:54:58 +0200 Subject: [PATCH 061/105] Figuring out why it doesn't initialize --- .../app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index e4dd5b226f4..c3b8d3ae20d 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -16,8 +16,9 @@ describe('InfluxDBQueryCtrl', function() { }; InfluxQueryCtrl.prototype.panelCtrl = { + target: { target: {} }, panel: { - targets: [{}], + targets: [this.target], }, }; From 76bc02b3fae41bae9b5a3643a503566332d4c267 Mon Sep 17 00:00:00 2001 From: David Date: Mon, 23 Jul 2018 14:58:11 +0200 Subject: [PATCH 062/105] Update CHANGELOG.md Added #12597 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5cf8602824b..58570c89c18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484) +* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) From 816ee82d2695157cbd969f43623ae686b683f08d Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:25:59 +0200 Subject: [PATCH 063/105] Add docs about global variables in query template variables --- docs/sources/features/datasources/prometheus.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 4ff0baee108..190220fb0f1 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -75,6 +75,9 @@ Name | Description For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). + +It is possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. + ### Using variables in queries There are two syntaxes: From 70575c8f7816f90b074d7f65226b70e334786958 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:34:03 +0200 Subject: [PATCH 064/105] Add templating docs for --- docs/sources/reference/templating.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index efe9db61e3d..08a142d3636 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -273,6 +273,9 @@ The `$__timeFilter` is used in the MySQL data source. This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias. +### The $__range Variable +Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`. + ## Repeating Panels Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want From 47bec0fd91f42cb28b87c0130088ed667149cb70 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:42:47 +0200 Subject: [PATCH 065/105] Fix requested changes --- .../panel/graph/specs/graph_ctrl.jest.ts | 23 ++++++++----------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index 788ca1840ba..3ebcf6cdf31 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -3,7 +3,7 @@ import { GraphCtrl } from '../module'; jest.mock('../graph', () => ({})); -describe('GraphCtrl', function() { +describe('GraphCtrl', () => { let injector = { get: () => { return { @@ -34,15 +34,12 @@ describe('GraphCtrl', function() { beforeEach(() => { ctx.ctrl = new GraphCtrl(scope, injector, {}); - }); - - beforeEach(() => { ctx.ctrl.annotationsPromise = Promise.resolve({}); ctx.ctrl.updateTimeRange(); }); - describe('when time series are outside range', function() { - beforeEach(function() { + describe('when time series are outside range', () => { + beforeEach(() => { var data = [ { target: 'test.cpu1', @@ -54,13 +51,13 @@ describe('GraphCtrl', function() { ctx.ctrl.onDataReceived(data); }); - it('should set datapointsOutside', function() { + it('should set datapointsOutside', () => { expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range'); }); }); - describe('when time series are inside range', function() { - beforeEach(function() { + describe('when time series are inside range', () => { + beforeEach(() => { var range = { from: moment() .subtract(1, 'days') @@ -79,18 +76,18 @@ describe('GraphCtrl', function() { ctx.ctrl.onDataReceived(data); }); - it('should set datapointsOutside', function() { + it('should set datapointsOutside', () => { expect(ctx.ctrl.dataWarning).toBe(null); }); }); - describe('datapointsCount given 2 series', function() { - beforeEach(function() { + describe('datapointsCount given 2 series', () => { + beforeEach(() => { var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; ctx.ctrl.onDataReceived(data); }); - it('should set datapointsCount warning', function() { + it('should set datapointsCount warning', () => { expect(ctx.ctrl.dataWarning.title).toBe('No data points'); }); }); From 6b071054a31cbf55eb7e62499b91ece784e4432f Mon Sep 17 00:00:00 2001 From: srid12 Date: Mon, 23 Jul 2018 19:53:26 +0530 Subject: [PATCH 066/105] changing callback fn into arrow functions for correct usage of this (#12673) --- public/app/plugins/datasource/opentsdb/datasource.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/opentsdb/datasource.ts b/public/app/plugins/datasource/opentsdb/datasource.ts index 39ad6c64e11..07ec4a794ec 100644 --- a/public/app/plugins/datasource/opentsdb/datasource.ts +++ b/public/app/plugins/datasource/opentsdb/datasource.ts @@ -480,17 +480,17 @@ export default class OpenTsDatasource { mapMetricsToTargets(metrics, options, tsdbVersion) { var interpolatedTagValue, arrTagV; - return _.map(metrics, function(metricData) { + return _.map(metrics, metricData => { if (tsdbVersion === 3) { return metricData.query.index; } else { - return _.findIndex(options.targets, function(target) { + return _.findIndex(options.targets, target => { if (target.filters && target.filters.length > 0) { return target.metric === metricData.metric; } else { return ( target.metric === metricData.metric && - _.every(target.tags, function(tagV, tagK) { + _.every(target.tags, (tagV, tagK) => { interpolatedTagValue = this.templateSrv.replace(tagV, options.scopedVars, 'pipe'); arrTagV = interpolatedTagValue.split('|'); return _.includes(arrTagV, metricData.tags[tagK]) || interpolatedTagValue === '*'; From d9bf89438325c01a0fe5f3205b4cefff25930c40 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Tue, 24 Jul 2018 16:58:48 +0900 Subject: [PATCH 067/105] return 400 if user input error --- pkg/api/metrics.go | 2 +- pkg/tsdb/cloudwatch/cloudwatch.go | 21 +++++++++++++++++---- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index c1b8ffe595e..00ad25ab8c2 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -52,7 +52,7 @@ func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { if res.Error != nil { res.ErrorString = res.Error.Error() resp.Message = res.ErrorString - statusCode = 500 + statusCode = 400 } } diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 38fbac3aa29..4af73fc2ba9 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -17,6 +17,7 @@ import ( "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" @@ -100,7 +101,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo query, err := parseQuery(queryContext.Queries[i].Model) if err != nil { - return nil, err + result.Results[query.RefId] = &tsdb.QueryResult{ + Error: err, + } + return result, nil } query.RefId = queryContext.Queries[i].RefId @@ -113,15 +117,21 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo } if query.Id == "" && query.Expression != "" { - return nil, fmt.Errorf("Invalid query: id should be set if using expression") + result.Results[query.RefId] = &tsdb.QueryResult{ + Error: fmt.Errorf("Invalid query: id should be set if using expression"), + } + return result, nil } eg.Go(func() error { queryRes, err := e.executeQuery(ectx, query, queryContext) - if err != nil { + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { return err } result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } return nil }) } @@ -131,11 +141,14 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo q := getMetricDataQuery eg.Go(func() error { queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) - if err != nil { + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { return err } for _, queryRes := range queryResponses { result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } } return nil }) From 59c17053990203e6f303b5dfbdb3aa4b20611e75 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Tue, 24 Jul 2018 10:34:11 +0200 Subject: [PATCH 068/105] docs: mentation that config changes requires restart. --- docs/sources/installation/configuration.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index e3db7a1d60b..2a799b044b3 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -15,6 +15,8 @@ weight = 1 The Grafana back-end has a number of configuration options that can be specified in a `.ini` configuration file or specified using environment variables. +> **Note.** Grafana needs to be restarted for any configuration changes to take effect. + ## Comments In .ini Files Semicolons (the `;` char) are the standard way to comment out lines in a `.ini` file. From 93e73919e814b6d583aa1f3666c22cf922faaa55 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 11:03:46 +0200 Subject: [PATCH 069/105] fix code style --- pkg/tsdb/postgres/postgres.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index 5ca333fe633..f19e4fb54f4 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -53,10 +53,12 @@ func generateConnectionString(datasource *models.DataSource) string { } sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full") - u := &url.URL{Scheme: "postgres", - User: url.UserPassword(datasource.User, password), - Host: datasource.Url, Path: datasource.Database, - RawQuery: "sslmode=" + url.QueryEscape(sslmode)} + u := &url.URL{ + Scheme: "postgres", + User: url.UserPassword(datasource.User, password), + Host: datasource.Url, Path: datasource.Database, + RawQuery: "sslmode=" + url.QueryEscape(sslmode), + } return u.String() } From 35efb7c225ae35758ab1826e7ad0012f5ddf46a8 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 11:26:09 +0200 Subject: [PATCH 070/105] changelog: add notes about closing #12644 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 58570c89c18..160aab9b91a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) +* **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) From 81c32780b905fa92ab874e4fac86395f0155f14a Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 11:27:53 +0200 Subject: [PATCH 071/105] Pass more tests --- .../plugins/datasource/influxdb/query_ctrl.ts | 1 - .../influxdb/specs/query_ctrl.jest.ts | 110 ++++++++++-------- 2 files changed, 60 insertions(+), 51 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index ce669c9f458..2be1ecc7bff 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -22,7 +22,6 @@ export class InfluxQueryCtrl extends QueryCtrl { /** @ngInject **/ constructor($scope, $injector, private templateSrv, private $q, private uiSegmentSrv) { super($scope, $injector); - this.target = this.target; this.queryModel = new InfluxQuery(this.target, templateSrv, this.panel.scopedVars); this.queryBuilder = new InfluxQueryBuilder(this.target, this.datasource.database); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index c3b8d3ae20d..139efbc3afa 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -4,29 +4,28 @@ import 'app/core/services/segment_srv'; // import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; -describe('InfluxDBQueryCtrl', function() { +describe('InfluxDBQueryCtrl', () => { let uiSegmentSrv = { newPlusButton: () => {}, + newKey: key => key, + newKeyValue: key => key, + newSegment: seg => seg, + newSelectMeasurement: () => { + return { value: 'select measurement' }; + }, + newOperator: op => op, + newFake: () => {}, }; let ctx = { - dataSource: { - metricFindQuery: jest.fn(() => Promise.resolve([])), - }, - }; - - InfluxQueryCtrl.prototype.panelCtrl = { - target: { target: {} }, - panel: { - targets: [this.target], - }, + dataSource: {}, }; // beforeEach(angularMocks.module('grafana.core')); // beforeEach(angularMocks.module('grafana.controllers')); // beforeEach(angularMocks.module('grafana.services')); // beforeEach( - // angularMocks.module(function($compileProvider) { + // angularMocks.module(($ =>compileProvider) { // $compileProvider.preAssignBindingsEnabled(true); // }) // ); @@ -56,147 +55,158 @@ describe('InfluxDBQueryCtrl', function() { // }) // ); - beforeEach(() => { - ctx.ctrl = new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + beforeEach(async () => { + InfluxQueryCtrl.prototype.datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }; + InfluxQueryCtrl.prototype.panelCtrl = { + panel: { + targets: [InfluxQueryCtrl.target], + }, + }; + + InfluxQueryCtrl.prototype.target = { target: {} }; + console.log('creating new instance'); + ctx.ctrl = await new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); }); - describe('init', function() { - it('should init tagSegments', function() { + describe('init', () => { + it('should init tagSegments', () => { expect(ctx.ctrl.tagSegments.length).toBe(1); }); - it('should init measurementSegment', function() { + it('should init measurementSegment', () => { expect(ctx.ctrl.measurementSegment.value).toBe('select measurement'); }); }); - describe('when first tag segment is updated', function() { - beforeEach(function() { + describe('when first tag segment is updated', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); }); - it('should update tag key', function() { + it('should update tag key', () => { expect(ctx.ctrl.target.tags[0].key).toBe('asd'); expect(ctx.ctrl.tagSegments[0].type).toBe('key'); }); - it('should add tagSegments', function() { + it('should add tagSegments', () => { expect(ctx.ctrl.tagSegments.length).toBe(3); }); }); - describe('when last tag value segment is updated', function() { - beforeEach(function() { + describe('when last tag value segment is updated', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); }); - it('should update tag value', function() { + it('should update tag value', () => { expect(ctx.ctrl.target.tags[0].value).toBe('server1'); }); - it('should set tag operator', function() { + it('should set tag operator', () => { expect(ctx.ctrl.target.tags[0].operator).toBe('='); }); - it('should add plus button for another filter', function() { + it('should add plus button for another filter', () => { expect(ctx.ctrl.tagSegments[3].fake).toBe(true); }); }); - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { + describe('when last tag value segment is updated to regex', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); }); - it('should update operator', function() { + it('should update operator', () => { expect(ctx.ctrl.tagSegments[1].value).toBe('=~'); expect(ctx.ctrl.target.tags[0].operator).toBe('=~'); }); }); - describe('when second tag key is added', function() { - beforeEach(function() { + describe('when second tag key is added', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); }); - it('should update tag key', function() { + it('should update tag key', () => { expect(ctx.ctrl.target.tags[1].key).toBe('key2'); }); - it('should add AND segment', function() { + it('should add AND segment', () => { expect(ctx.ctrl.tagSegments[3].value).toBe('AND'); }); }); - describe('when condition is changed', function() { - beforeEach(function() { + describe('when condition is changed', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); }); - it('should update tag condition', function() { + it('should update tag condition', () => { expect(ctx.ctrl.target.tags[1].condition).toBe('OR'); }); - it('should update AND segment', function() { + it('should update AND segment', () => { expect(ctx.ctrl.tagSegments[3].value).toBe('OR'); expect(ctx.ctrl.tagSegments.length).toBe(7); }); }); - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { + describe('when deleting first tag filter after value is selected', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); }); - it('should remove tags', function() { + it('should remove tags', () => { expect(ctx.ctrl.target.tags.length).toBe(0); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(1); expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button'); }); }); - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); }); - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); }); - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { + describe('when deleting second tag value after second tag filter is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); @@ -204,7 +214,7 @@ describe('InfluxDBQueryCtrl', function() { ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); From 987a16086bbafeccf3c07a5099e5b3ddf914102b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:34:37 +0200 Subject: [PATCH 072/105] Karma to Jest --- .../influxdb/specs/query_ctrl.jest.ts | 70 ++++--------------- 1 file changed, 14 insertions(+), 56 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index 139efbc3afa..6b929432dfa 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -1,73 +1,31 @@ import '../query_ctrl'; -import 'app/core/services/segment_srv'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; // import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; // import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; describe('InfluxDBQueryCtrl', () => { - let uiSegmentSrv = { - newPlusButton: () => {}, - newKey: key => key, - newKeyValue: key => key, - newSegment: seg => seg, - newSelectMeasurement: () => { - return { value: 'select measurement' }; - }, - newOperator: op => op, - newFake: () => {}, - }; + let ctx = {}; - let ctx = { - dataSource: {}, - }; - - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(($ =>compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - // beforeEach(ctx.providePhase()); - - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.target = { target: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - // ctx.ctrl = $controller( - // InfluxQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // target: ctx.target, - // datasource: ctx.datasource, - // } - // ); - // }) - // ); - - beforeEach(async () => { + beforeEach(() => { InfluxQueryCtrl.prototype.datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), + metricFindQuery: () => Promise.resolve([]), }; + InfluxQueryCtrl.prototype.target = { target: {} }; InfluxQueryCtrl.prototype.panelCtrl = { panel: { - targets: [InfluxQueryCtrl.target], + targets: [InfluxQueryCtrl.prototype.target], }, + refresh: () => {}, }; - InfluxQueryCtrl.prototype.target = { target: {} }; - console.log('creating new instance'); - ctx.ctrl = await new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + ctx.ctrl = new InfluxQueryCtrl( + {}, + {}, + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }) + ); }); describe('init', () => { From 48ae9ec77ebbc5e3b1546a795af1f8fded555ff4 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:35:37 +0200 Subject: [PATCH 073/105] Remove comments and Karm test --- .../influxdb/specs/query_ctrl.jest.ts | 2 - .../influxdb/specs/query_ctrl_specs.ts | 193 ------------------ 2 files changed, 195 deletions(-) delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index 6b929432dfa..4e3fc47a5fd 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -1,7 +1,5 @@ import '../query_ctrl'; import { uiSegmentSrv } from 'app/core/services/segment_srv'; -// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -// import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; describe('InfluxDBQueryCtrl', () => { diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts deleted file mode 100644 index 4daa48d6b9d..00000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,193 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - beforeEach(ctx.providePhase()); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.target = { target: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - ctx.ctrl = $controller( - InfluxQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - target: ctx.target, - datasource: ctx.datasource, - } - ); - }) - ); - - describe('init', function() { - it('should init tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - }); - - it('should init measurementSegment', function() { - expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); - }); - }); - - describe('when first tag segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[0].key).to.be('asd'); - expect(ctx.ctrl.tagSegments[0].type).to.be('key'); - }); - - it('should add tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(3); - }); - }); - - describe('when last tag value segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', function() { - expect(ctx.ctrl.target.tags[0].value).to.be('server1'); - }); - - it('should set tag operator', function() { - expect(ctx.ctrl.target.tags[0].operator).to.be('='); - }); - - it('should add plus button for another filter', function() { - expect(ctx.ctrl.tagSegments[3].fake).to.be(true); - }); - }); - - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', function() { - expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); - expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); - }); - }); - - describe('when second tag key is added', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[1].key).to.be('key2'); - }); - - it('should add AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); - }); - }); - - describe('when condition is changed', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', function() { - expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); - }); - - it('should update AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); - expect(ctx.ctrl.tagSegments.length).to.be(7); - }); - }); - - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', function() { - expect(ctx.ctrl.target.tags.length).to.be(0); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); -}); From c0f9c06f2163dc57424257b204e6c6c449aa0212 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 29 Jun 2018 13:37:21 +0200 Subject: [PATCH 074/105] Karma to Jest: completer --- .../{completer_specs.ts => completer.jest.ts} | 70 +++++++++---------- 1 file changed, 34 insertions(+), 36 deletions(-) rename public/app/plugins/datasource/prometheus/specs/{completer_specs.ts => completer.jest.ts} (79%) diff --git a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts similarity index 79% rename from public/app/plugins/datasource/prometheus/specs/completer_specs.ts rename to public/app/plugins/datasource/prometheus/specs/completer.jest.ts index 84694834089..cb8dd8e5bd6 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -1,47 +1,45 @@ -import { describe, it, sinon, expect } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; +//import { describe, it, sinon, expect } from 'test/lib/common'; +//import helpers from 'test/specs/helpers'; import { PromCompleter } from '../completer'; import { PrometheusDatasource } from '../datasource'; +import { BackendSrv } from 'app/core/services/backend_srv'; +jest.mock('../datasource'); +jest.mock('app/core/services/backend_srv'); describe('Prometheus editor completer', function() { - var ctx = new helpers.ServiceTestContext(); - beforeEach(ctx.providePhase(['templateSrv'])); + //beforeEach(ctx.providePhase(['templateSrv'])); function getSessionStub(data) { return { - getTokenAt: sinon.stub().returns(data.currentToken), - getTokens: sinon.stub().returns(data.tokens), - getLine: sinon.stub().returns(data.line), + getTokenAt:jest.fn(()=> (data.currentToken)), + getTokens:jest.fn(()=> (data.tokens)), + getLine:jest.fn(()=> (data.line)), }; } let editor = {}; - let datasourceStub = { - performInstantQuery: sinon - .stub() - .withArgs({ expr: '{__name__="node_cpu"' }) - .returns( - Promise.resolve({ - data: { + + let backendSrv = {} + let datasourceStub = new PrometheusDatasource({},{},backendSrv,{},{}); + + datasourceStub.performInstantQuery = jest.fn(() => Promise.resolve({ data: { - result: [ - { - metric: { - job: 'node', - instance: 'localhost:9100', + data: { + result: [ + { + metric: { + job: 'node', + instance: 'localhost:9100', + }, }, - }, - ], + ], + }, }, - }, - }) - ), - performSuggestQuery: sinon - .stub() - .withArgs('node', true) - .returns(Promise.resolve(['node_cpu'])), - }; + }) + ); + datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu'])); + let templateSrv = { variables: [ @@ -62,9 +60,9 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, '[', (s, res) => { - expect(res[0].caption).to.eql('$__interval'); - expect(res[0].value).to.eql('[$__interval'); - expect(res[0].meta).to.eql('range vector'); + expect(res[0].caption).toEqual('$__interval'); + expect(res[0].value).toEqual('[$__interval'); + expect(res[0].meta).toEqual('range vector'); }); }); }); @@ -93,7 +91,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -125,7 +123,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -156,7 +154,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 15 }, 'n', (s, res) => { - expect(res[0].meta).to.eql('label value'); + expect(res[0].meta).toEqual('label value'); }); }); }); @@ -192,7 +190,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'm', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); From 49a8c2e0c138118f4e1bc3bfa37446eba596b98c Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 29 Jun 2018 13:44:11 +0200 Subject: [PATCH 075/105] Make beautiful --- .../prometheus/specs/completer.jest.ts | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index cb8dd8e5bd6..b401cb9bf65 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -12,35 +12,35 @@ describe('Prometheus editor completer', function() { function getSessionStub(data) { return { - getTokenAt:jest.fn(()=> (data.currentToken)), - getTokens:jest.fn(()=> (data.tokens)), - getLine:jest.fn(()=> (data.line)), + getTokenAt: jest.fn(() => data.currentToken), + getTokens: jest.fn(() => data.tokens), + getLine: jest.fn(() => data.line), }; } let editor = {}; - let backendSrv = {} - let datasourceStub = new PrometheusDatasource({},{},backendSrv,{},{}); + let backendSrv = {}; + let datasourceStub = new PrometheusDatasource({}, {}, backendSrv, {}, {}); - datasourceStub.performInstantQuery = jest.fn(() => Promise.resolve({ - data: { - data: { - result: [ - { - metric: { - job: 'node', - instance: 'localhost:9100', - }, - }, - ], + datasourceStub.performInstantQuery = jest.fn(() => + Promise.resolve({ + data: { + data: { + result: [ + { + metric: { + job: 'node', + instance: 'localhost:9100', }, }, - }) - ); + ], + }, + }, + }) + ); datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu'])); - let templateSrv = { variables: [ { From d2f81d52d4b121cbc0bc6c39527900a4c5cf2042 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 2 Jul 2018 09:43:34 +0200 Subject: [PATCH 076/105] Karma to Jest: begin influx query_ctrl --- .../influxdb/specs/query_ctrl.jest.ts | 222 ++++++++++++++++++ .../influxdb/specs/query_ctrl_specs.ts | 193 --------------- 2 files changed, 222 insertions(+), 193 deletions(-) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts new file mode 100644 index 00000000000..dd6c9b4fa18 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -0,0 +1,222 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +//import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', () => { + //var ctx = new helpers.ControllerTestContext(); + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(($ =>compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + // beforeEach(ctx.providePhase()); + + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.target = { target: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + // influxQueryCtrl = $controller( + // InfluxQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // target: ctx.target, + // datasource: ctx.datasource, + // } + // ); + // }) + // ); + + InfluxQueryCtrl.prototype.target = { target: {} }; + InfluxQueryCtrl.prototype.panelCtrl = { + refresh: jest.fn(), + panel: { + targets: InfluxQueryCtrl.prototype.target, + }, + }; + InfluxQueryCtrl.prototype.datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }; + + // let uiSegmentSrv = { + // newPlusButton: jest.fn(), + // newSegment: jest.fn(), + // newSelectMeasurement: jest.fn() + // }; + let influxQueryCtrl; + + beforeEach(() => { + influxQueryCtrl = new InfluxQueryCtrl( + {}, + {}, + {}, + {}, + new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) + ); + }); + describe('init', () => { + it('should init tagSegments', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(1); + }); + + it('should init measurementSegment', () => { + expect(influxQueryCtrl.measurementSegment.value).toBe('select measurement'); + }); + }); + + describe('when first tag segment is updated', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', () => { + expect(influxQueryCtrl.target.tags[0].key).toBe('asd'); + expect(influxQueryCtrl.tagSegments[0].type).toBe('key'); + }); + + it('should add tagSegments', () => { + console.log(influxQueryCtrl.tagSegments); + expect(influxQueryCtrl.tagSegments.length).toBe(3); + }); + }); + + describe('when last tag value segment is updated', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', () => { + expect(influxQueryCtrl.target.tags[0].value).toBe('server1'); + }); + + it('should set tag operator', () => { + expect(influxQueryCtrl.target.tags[0].operator).toBe('='); + }); + + it('should add plus button for another filter', () => { + expect(influxQueryCtrl.tagSegments[3].fake).toBe(true); + }); + }); + + describe('when last tag value segment is updated to regex', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', () => { + expect(influxQueryCtrl.tagSegments[1].value).toBe('=~'); + expect(influxQueryCtrl.target.tags[0].operator).toBe('=~'); + }); + }); + + describe('when second tag key is added', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', () => { + expect(influxQueryCtrl.target.tags[1].key).toBe('key2'); + }); + + it('should add AND segment', () => { + expect(influxQueryCtrl.tagSegments[3].value).toBe('AND'); + }); + }); + + describe('when condition is changed', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', () => { + expect(influxQueryCtrl.target.tags[1].condition).toBe('OR'); + }); + + it('should update AND segment', () => { + expect(influxQueryCtrl.tagSegments[3].value).toBe('OR'); + expect(influxQueryCtrl.tagSegments.length).toBe(7); + }); + }); + + describe('when deleting first tag filter after value is selected', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', () => { + expect(influxQueryCtrl.target.tags.length).toBe(0); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(1); + expect(influxQueryCtrl.tagSegments[0].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); +}); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts deleted file mode 100644 index 4daa48d6b9d..00000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,193 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - beforeEach(ctx.providePhase()); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.target = { target: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - ctx.ctrl = $controller( - InfluxQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - target: ctx.target, - datasource: ctx.datasource, - } - ); - }) - ); - - describe('init', function() { - it('should init tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - }); - - it('should init measurementSegment', function() { - expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); - }); - }); - - describe('when first tag segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[0].key).to.be('asd'); - expect(ctx.ctrl.tagSegments[0].type).to.be('key'); - }); - - it('should add tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(3); - }); - }); - - describe('when last tag value segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', function() { - expect(ctx.ctrl.target.tags[0].value).to.be('server1'); - }); - - it('should set tag operator', function() { - expect(ctx.ctrl.target.tags[0].operator).to.be('='); - }); - - it('should add plus button for another filter', function() { - expect(ctx.ctrl.tagSegments[3].fake).to.be(true); - }); - }); - - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', function() { - expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); - expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); - }); - }); - - describe('when second tag key is added', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[1].key).to.be('key2'); - }); - - it('should add AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); - }); - }); - - describe('when condition is changed', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', function() { - expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); - }); - - it('should update AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); - expect(ctx.ctrl.tagSegments.length).to.be(7); - }); - }); - - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', function() { - expect(ctx.ctrl.target.tags.length).to.be(0); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); -}); From d6381bed7cebe7c0270bf0ddacc8333e17fb9658 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 2 Jul 2018 14:34:58 +0200 Subject: [PATCH 077/105] Test fail depending on test order --- .../plugins/datasource/influxdb/query_ctrl.ts | 2 +- .../influxdb/specs/query_ctrl.jest.ts | 4 +- .../influxdb/specs/query_ctrl_specs.ts | 195 ++++++++++++++++++ 3 files changed, 198 insertions(+), 3 deletions(-) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index ce669c9f458..17449711143 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -338,7 +338,7 @@ export class InfluxQueryCtrl extends QueryCtrl { this.tagSegments.push(this.uiSegmentSrv.newPlusButton()); } } - + console.log(this.tagSegments); this.rebuildTargetTagConditions(); } diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index dd6c9b4fa18..0c1ed3ed6b2 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -46,7 +46,7 @@ describe('InfluxDBQueryCtrl', () => { InfluxQueryCtrl.prototype.panelCtrl = { refresh: jest.fn(), panel: { - targets: InfluxQueryCtrl.prototype.target, + targets: [InfluxQueryCtrl.prototype.target], }, }; InfluxQueryCtrl.prototype.datasource = { @@ -69,6 +69,7 @@ describe('InfluxDBQueryCtrl', () => { new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) ); }); + describe('init', () => { it('should init tagSegments', () => { expect(influxQueryCtrl.tagSegments.length).toBe(1); @@ -90,7 +91,6 @@ describe('InfluxDBQueryCtrl', () => { }); it('should add tagSegments', () => { - console.log(influxQueryCtrl.tagSegments); expect(influxQueryCtrl.tagSegments.length).toBe(3); }); }); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts new file mode 100644 index 00000000000..151dd7ab0c6 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts @@ -0,0 +1,195 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', function() { + var ctx = new helpers.ControllerTestContext(); + + beforeEach(angularMocks.module('grafana.core')); + beforeEach(angularMocks.module('grafana.controllers')); + beforeEach(angularMocks.module('grafana.services')); + beforeEach( + angularMocks.module(function($compileProvider) { + $compileProvider.preAssignBindingsEnabled(true); + }) + ); + beforeEach(ctx.providePhase()); + + beforeEach( + angularMocks.inject(($rootScope, $controller, $q) => { + ctx.$q = $q; + ctx.scope = $rootScope.$new(); + ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + ctx.target = { target: {} }; + ctx.panelCtrl = { + panel: { + targets: [ctx.target], + }, + }; + ctx.panelCtrl.refresh = sinon.spy(); + ctx.ctrl = $controller( + InfluxQueryCtrl, + { $scope: ctx.scope }, + { + panelCtrl: ctx.panelCtrl, + target: ctx.target, + datasource: ctx.datasource, + } + ); + }) + ); + + describe('init', function() { + it('should init tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).to.be(1); + }); + + it('should init measurementSegment', function() { + expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); + }); + }); + + describe('when first tag segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', function() { + console.log(ctx.ctrl.target.tags); + expect(ctx.ctrl.target.tags[0].key).to.be('asd'); + expect(ctx.ctrl.tagSegments[0].type).to.be('key'); + }); + + it('should add tagSegments', function() { + console.log(ctx.ctrl.tagSegments); + expect(ctx.ctrl.tagSegments.length).to.be(3); + }); + }); + + describe('when last tag value segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', function() { + expect(ctx.ctrl.target.tags[0].value).to.be('server1'); + }); + + it('should set tag operator', function() { + expect(ctx.ctrl.target.tags[0].operator).to.be('='); + }); + + it('should add plus button for another filter', function() { + expect(ctx.ctrl.tagSegments[3].fake).to.be(true); + }); + }); + + describe('when last tag value segment is updated to regex', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', function() { + expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); + expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); + }); + }); + + describe('when second tag key is added', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[1].key).to.be('key2'); + }); + + it('should add AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); + }); + }); + + describe('when condition is changed', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', function() { + expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); + }); + + it('should update AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); + expect(ctx.ctrl.tagSegments.length).to.be(7); + }); + }); + + describe('when deleting first tag filter after value is selected', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', function() { + expect(ctx.ctrl.target.tags.length).to.be(0); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(1); + expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); +}); From 51caf470f50c07fdb7f6d47d7fe022f2ebfc1ac5 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:55:54 +0200 Subject: [PATCH 078/105] Remove influx qeury_ctrl jest, as it is already completed --- .../influxdb/specs/query_ctrl.jest.ts | 222 ------------------ .../prometheus/specs/completer.jest.ts | 3 - 2 files changed, 225 deletions(-) delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts deleted file mode 100644 index 0c1ed3ed6b2..00000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ /dev/null @@ -1,222 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { uiSegmentSrv } from 'app/core/services/segment_srv'; -//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -//import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', () => { - //var ctx = new helpers.ControllerTestContext(); - - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(($ =>compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - // beforeEach(ctx.providePhase()); - - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.target = { target: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - // influxQueryCtrl = $controller( - // InfluxQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // target: ctx.target, - // datasource: ctx.datasource, - // } - // ); - // }) - // ); - - InfluxQueryCtrl.prototype.target = { target: {} }; - InfluxQueryCtrl.prototype.panelCtrl = { - refresh: jest.fn(), - panel: { - targets: [InfluxQueryCtrl.prototype.target], - }, - }; - InfluxQueryCtrl.prototype.datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), - }; - - // let uiSegmentSrv = { - // newPlusButton: jest.fn(), - // newSegment: jest.fn(), - // newSelectMeasurement: jest.fn() - // }; - let influxQueryCtrl; - - beforeEach(() => { - influxQueryCtrl = new InfluxQueryCtrl( - {}, - {}, - {}, - {}, - new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) - ); - }); - - describe('init', () => { - it('should init tagSegments', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(1); - }); - - it('should init measurementSegment', () => { - expect(influxQueryCtrl.measurementSegment.value).toBe('select measurement'); - }); - }); - - describe('when first tag segment is updated', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', () => { - expect(influxQueryCtrl.target.tags[0].key).toBe('asd'); - expect(influxQueryCtrl.tagSegments[0].type).toBe('key'); - }); - - it('should add tagSegments', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(3); - }); - }); - - describe('when last tag value segment is updated', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', () => { - expect(influxQueryCtrl.target.tags[0].value).toBe('server1'); - }); - - it('should set tag operator', () => { - expect(influxQueryCtrl.target.tags[0].operator).toBe('='); - }); - - it('should add plus button for another filter', () => { - expect(influxQueryCtrl.tagSegments[3].fake).toBe(true); - }); - }); - - describe('when last tag value segment is updated to regex', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', () => { - expect(influxQueryCtrl.tagSegments[1].value).toBe('=~'); - expect(influxQueryCtrl.target.tags[0].operator).toBe('=~'); - }); - }); - - describe('when second tag key is added', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', () => { - expect(influxQueryCtrl.target.tags[1].key).toBe('key2'); - }); - - it('should add AND segment', () => { - expect(influxQueryCtrl.tagSegments[3].value).toBe('AND'); - }); - }); - - describe('when condition is changed', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', () => { - expect(influxQueryCtrl.target.tags[1].condition).toBe('OR'); - }); - - it('should update AND segment', () => { - expect(influxQueryCtrl.tagSegments[3].value).toBe('OR'); - expect(influxQueryCtrl.tagSegments.length).toBe(7); - }); - }); - - describe('when deleting first tag filter after value is selected', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', () => { - expect(influxQueryCtrl.target.tags.length).toBe(0); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(1); - expect(influxQueryCtrl.tagSegments[0].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); -}); diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index b401cb9bf65..fbe2dce0ce5 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -1,6 +1,3 @@ -//import { describe, it, sinon, expect } from 'test/lib/common'; -//import helpers from 'test/specs/helpers'; - import { PromCompleter } from '../completer'; import { PrometheusDatasource } from '../datasource'; import { BackendSrv } from 'app/core/services/backend_srv'; From b81621b6f5019e12893fdddde32b7850aabbad61 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:24:44 +0200 Subject: [PATCH 079/105] changelog: add notes about closing #12636 #9827 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 160aab9b91a..4917c5998d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) +* **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) # 5.2.1 (2018-06-29) From 3dab4e1b52c1a4e7712abd5c20da14a4736b8ca4 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:27:13 +0200 Subject: [PATCH 080/105] changelog: add notes about closing #12589 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4917c5998d0..826507e1bd6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) +* **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) # 5.2.1 (2018-06-29) From 25c8233523d317a378f628258b86d88686b1a744 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 4 Jul 2018 09:22:39 +0200 Subject: [PATCH 081/105] Begin conversion --- ...query_ctrl_specs.ts => query_ctrl.jest.ts} | 89 +++++++++++-------- 1 file changed, 50 insertions(+), 39 deletions(-) rename public/app/plugins/datasource/graphite/specs/{query_ctrl_specs.ts => query_ctrl.jest.ts} (84%) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts similarity index 84% rename from public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts rename to public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index b4f7718930f..776dec0a1a7 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -6,48 +6,59 @@ import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; describe('GraphiteQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); + + let datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), + getFuncDef: gfunc.getFuncDef, + waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), + createFuncInstance: gfunc.createFuncInstance, + + }; + let ctx = { - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); + }; - beforeEach(ctx.providePhase()); - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); - ctx.datasource.getFuncDef = gfunc.getFuncDef; - ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); - ctx.datasource.createFuncInstance = gfunc.createFuncInstance; - ctx.panelCtrl = { panel: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); - ctx.ctrl = $controller( - GraphiteQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - datasource: ctx.datasource, - target: ctx.target, - } - ); - ctx.scope.$digest(); - }) - ); + //beforeEach(ctx.providePhase()); + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); + // ctx.datasource.getFuncDef = gfunc.getFuncDef; + // ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); + // ctx.datasource.createFuncInstance = gfunc.createFuncInstance; + // ctx.panelCtrl = { panel: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + + // ctx.ctrl = $controller( + // GraphiteQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // datasource: ctx.datasource, + // target: ctx.target, + // } + // ); + // ctx.scope.$digest(); + // }) + // ); describe('init', function() { it('should validate metric key exists', function() { From b58a7642dc6b3be313a30be95b455fd6141f8da9 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 15:39:56 +0200 Subject: [PATCH 082/105] Karma to Jest --- .../graphite/specs/query_ctrl.jest.ts | 271 ++++++++++-------- 1 file changed, 145 insertions(+), 126 deletions(-) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index 776dec0a1a7..58cefeef6f6 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -1,22 +1,27 @@ -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; import gfunc from '../gfunc'; -import helpers from 'test/specs/helpers'; +// import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; -describe('GraphiteQueryCtrl', function() { - - let datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), - getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), - getFuncDef: gfunc.getFuncDef, - waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), - createFuncInstance: gfunc.createFuncInstance, - +describe('GraphiteQueryCtrl', () => { + let ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve([])), + getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), + getFuncDef: gfunc.getFuncDef, + waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), + createFuncInstance: gfunc.createFuncInstance, + }, + target: { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }, + panelCtrl: { + refresh: jest.fn(), + }, }; - let ctx = { + ctx.panelCtrl.panel = { + targets: [ctx.target], }; // beforeEach(angularMocks.module('grafana.core')); @@ -60,156 +65,170 @@ describe('GraphiteQueryCtrl', function() { // }) // ); - describe('init', function() { - it('should validate metric key exists', function() { - expect(ctx.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*'); + beforeEach(() => { + GraphiteQueryCtrl.prototype.target = ctx.target; + GraphiteQueryCtrl.prototype.datasource = ctx.datasource; + + GraphiteQueryCtrl.prototype.panelCtrl = ctx.panelCtrl; + + ctx.ctrl = new GraphiteQueryCtrl( + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }), + {}, + {} + ); + }); + + describe('init', () => { + it('should validate metric key exists', () => { + expect(ctx.datasource.metricFindQuery.mock.calls[0][0]).toBe('test.prod.*'); }); - it('should delete last segment if no metrics are found', function() { - expect(ctx.ctrl.segments[2].value).to.be('select metric'); + it('should delete last segment if no metrics are found', () => { + expect(ctx.ctrl.segments[2].value).toBe('select metric'); }); - it('should parse expression and build function model', function() { - expect(ctx.ctrl.queryModel.functions.length).to.be(2); + it('should parse expression and build function model', () => { + expect(ctx.ctrl.queryModel.functions.length).toBe(2); }); }); - describe('when adding function', function() { - beforeEach(function() { + describe('when adding function', () => { + beforeEach(() => { ctx.ctrl.target.target = 'test.prod.*.count'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('aliasByNode')); }); - it('should add function with correct node number', function() { - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(2); + it('should add function with correct node number', () => { + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(2); }); - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(test.prod.*.count, 2)'); + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(test.prod.*.count, 2)'); }); - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when adding function before any metric segment', function() { - beforeEach(function() { + describe('when adding function before any metric segment', () => { + beforeEach(() => { ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([{ expandable: true }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: true }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('asPercent')); }); - it('should add function and remove select metric link', function() { - expect(ctx.ctrl.segments.length).to.be(0); + it('should add function and remove select metric link', () => { + expect(ctx.ctrl.segments.length).toBe(0); }); }); - describe('when initializing target without metric expression and only function', function() { - beforeEach(function() { + describe('when initializing target without metric expression and only function', () => { + beforeEach(() => { ctx.ctrl.target.target = 'asPercent(#A, #B)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); - ctx.scope.$digest(); }); - it('should not add select metric segment', function() { - expect(ctx.ctrl.segments.length).to.be(1); + it('should not add select metric segment', () => { + expect(ctx.ctrl.segments.length).toBe(1); }); - it('should add second series ref as param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should add second series ref as param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when initializing a target with single param func using variable', function() { - beforeEach(function() { + describe('when initializing a target with single param func using variable', () => { + beforeEach(() => { ctx.ctrl.target.target = 'movingAverage(prod.count, $var)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); }); - it('should add 2 segments', function() { - expect(ctx.ctrl.segments.length).to.be(2); + it('should add 2 segments', () => { + expect(ctx.ctrl.segments.length).toBe(2); }); - it('should add function param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should add function param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when initializing target without metric expression and function with series-ref', function() { - beforeEach(function() { + describe('when initializing target without metric expression and function with series-ref', () => { + beforeEach(() => { ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); }); - it('should add segments', function() { - expect(ctx.ctrl.segments.length).to.be(3); + it('should add segments', () => { + expect(ctx.ctrl.segments.length).toBe(3); }); - it('should have correct func params', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should have correct func params', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when getting altSegments and metricFindQuery returns empty array', function() { - beforeEach(function() { + describe('when getting altSegments and metricFindQuery returns empty array', () => { + beforeEach(() => { ctx.ctrl.target.target = 'test.count'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); ctx.ctrl.getAltSegments(1).then(function(results) { ctx.altSegments = results; }); - ctx.scope.$digest(); }); - it('should have no segments', function() { - expect(ctx.altSegments.length).to.be(0); + it('should have no segments', () => { + expect(ctx.altSegments.length).toBe(0); }); }); - describe('targetChanged', function() { - beforeEach(function() { - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + describe('targetChanged', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.target.target = ''; ctx.ctrl.targetChanged(); }); - it('should rebuld target after expression model', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); + it('should rebuild target after expression model', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); }); - it('should call panelCtrl.refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call panelCtrl.refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when updating targets with nested query', function() { - beforeEach(function() { + describe('when updating targets with nested query', () => { + beforeEach(() => { ctx.ctrl.target.target = 'scaleToSeconds(#A, 60)'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); }); - it('should add function params', function() { - expect(ctx.ctrl.queryModel.segments.length).to.be(1); - expect(ctx.ctrl.queryModel.segments[0].value).to.be('#A'); + it('should add function params', () => { + expect(ctx.ctrl.queryModel.segments.length).toBe(1); + expect(ctx.ctrl.queryModel.segments[0].value).toBe('#A'); - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(60); + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(60); }); - it('target should remain the same', function() { - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); + it('target should remain the same', () => { + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); }); - it('targetFull should include nested queries', function() { + it('targetFull should include nested queries', () => { ctx.ctrl.panelCtrl.panel.targets = [ { target: 'nested.query.count', @@ -219,17 +238,17 @@ describe('GraphiteQueryCtrl', function() { ctx.ctrl.updateModelTarget(); - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); - expect(ctx.ctrl.target.targetFull).to.be('scaleToSeconds(nested.query.count, 60)'); + expect(ctx.ctrl.target.targetFull).toBe('scaleToSeconds(nested.query.count, 60)'); }); }); - describe('when updating target used in other query', function() { - beforeEach(function() { + describe('when updating target used in other query', () => { + beforeEach(() => { ctx.ctrl.target.target = 'metrics.a.count'; ctx.ctrl.target.refId = 'A'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.panelCtrl.panel.targets = [ctx.ctrl.target, { target: 'sumSeries(#A)', refId: 'B' }]; @@ -237,113 +256,113 @@ describe('GraphiteQueryCtrl', function() { ctx.ctrl.updateModelTarget(); }); - it('targetFull of other query should update', function() { - expect(ctx.ctrl.panel.targets[1].targetFull).to.be('sumSeries(metrics.a.count)'); + it('targetFull of other query should update', () => { + expect(ctx.ctrl.panel.targets[1].targetFull).toBe('sumSeries(metrics.a.count)'); }); }); - describe('when adding seriesByTag function', function() { - beforeEach(function() { + describe('when adding seriesByTag function', () => { + beforeEach(() => { ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag')); }); - it('should update functions', function() { - expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).to.be(0); + it('should update functions', () => { + expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).toBe(0); }); - it('should update seriesByTagUsed flag', function() { - expect(ctx.ctrl.queryModel.seriesByTagUsed).to.be(true); + it('should update seriesByTagUsed flag', () => { + expect(ctx.ctrl.queryModel.seriesByTagUsed).toBe(true); }); - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('seriesByTag()'); + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('seriesByTag()'); }); - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when parsing seriesByTag function', function() { - beforeEach(function() { + describe('when parsing seriesByTag function', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); }); - it('should add tags', function() { + it('should add tags', () => { const expected = [ { key: 'tag1', operator: '=', value: 'value1' }, { key: 'tag2', operator: '!=~', value: 'value2' }, ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should add plus button', function() { - expect(ctx.ctrl.addTagSegments.length).to.be(1); + it('should add plus button', () => { + expect(ctx.ctrl.addTagSegments.length).toBe(1); }); }); - describe('when tag added', function() { - beforeEach(function() { + describe('when tag added', () => { + beforeEach(() => { ctx.ctrl.target.target = 'seriesByTag()'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addNewTag({ value: 'tag1' }); }); - it('should update tags with default value', function() { + it('should update tags with default value', () => { const expected = [{ key: 'tag1', operator: '=', value: '' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag1=')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); - describe('when tag changed', function() { - beforeEach(function() { + describe('when tag changed', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.tagChanged({ key: 'tag1', operator: '=', value: 'new_value' }, 0); }); - it('should update tags', function() { + it('should update tags', () => { const expected = [ { key: 'tag1', operator: '=', value: 'new_value' }, { key: 'tag2', operator: '!=~', value: 'value2' }, ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); - describe('when tag removed', function() { - beforeEach(function() { + describe('when tag removed', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.removeTag(0); }); - it('should update tags', function() { + it('should update tags', () => { const expected = [{ key: 'tag2', operator: '!=~', value: 'value2' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); }); From 1c691ac855142222dc4549a613d52a1171487e1d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:51:34 +0200 Subject: [PATCH 083/105] changelog: add notes about closing #12533 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 826507e1bd6..0f3fb6b9d01 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) +* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) # 5.2.1 (2018-06-29) From a63fca03b87193c87d6154628254998a06cf434d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:57:07 +0200 Subject: [PATCH 084/105] changelog: add notes about closing #12551 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f3fb6b9d01..6a7d2db1c14 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) +* **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) From 5de8b6c2f01cdfa0505f93e6469a38702fdd66fa Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 16:45:36 +0200 Subject: [PATCH 085/105] changelog: add notes about closing #12489 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a7d2db1c14..aa794b92164 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) +* **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) # 5.2.2 (unreleased) From 27c081349fb11f1ad8d304873aa9cc92a45a2027 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 17:03:58 +0200 Subject: [PATCH 086/105] Remove old influx stuff --- public/app/plugins/datasource/influxdb/query_ctrl.ts | 2 +- .../app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index 17449711143..ce669c9f458 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -338,7 +338,7 @@ export class InfluxQueryCtrl extends QueryCtrl { this.tagSegments.push(this.uiSegmentSrv.newPlusButton()); } } - console.log(this.tagSegments); + this.rebuildTargetTagConditions(); } diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts index 151dd7ab0c6..4daa48d6b9d 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts @@ -57,13 +57,11 @@ describe('InfluxDBQueryCtrl', function() { }); it('should update tag key', function() { - console.log(ctx.ctrl.target.tags); expect(ctx.ctrl.target.tags[0].key).to.be('asd'); expect(ctx.ctrl.tagSegments[0].type).to.be('key'); }); it('should add tagSegments', function() { - console.log(ctx.ctrl.tagSegments); expect(ctx.ctrl.tagSegments.length).to.be(3); }); }); From d8d748d2aa9987e93e6b8988b66d2d217be98ac0 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 17:40:00 +0200 Subject: [PATCH 087/105] remove unneeded comment --- .../app/plugins/datasource/prometheus/specs/completer.jest.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index fbe2dce0ce5..b29e4d27233 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -5,8 +5,6 @@ jest.mock('../datasource'); jest.mock('app/core/services/backend_srv'); describe('Prometheus editor completer', function() { - //beforeEach(ctx.providePhase(['templateSrv'])); - function getSessionStub(data) { return { getTokenAt: jest.fn(() => data.currentToken), From ce9b25a5ac66f0f6a8b9a2f1c91b14c184ed9143 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 18:30:29 +0200 Subject: [PATCH 088/105] Remove comments --- .../graphite/specs/query_ctrl.jest.ts | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index 58cefeef6f6..b38ad56427b 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -1,8 +1,5 @@ import { uiSegmentSrv } from 'app/core/services/segment_srv'; -// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - import gfunc from '../gfunc'; -// import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; describe('GraphiteQueryCtrl', () => { @@ -24,47 +21,6 @@ describe('GraphiteQueryCtrl', () => { targets: [ctx.target], }; - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - //beforeEach(ctx.providePhase()); - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); - // ctx.datasource.getFuncDef = gfunc.getFuncDef; - // ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); - // ctx.datasource.createFuncInstance = gfunc.createFuncInstance; - // ctx.panelCtrl = { panel: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - - // ctx.ctrl = $controller( - // GraphiteQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // datasource: ctx.datasource, - // target: ctx.target, - // } - // ); - // ctx.scope.$digest(); - // }) - // ); - beforeEach(() => { GraphiteQueryCtrl.prototype.target = ctx.target; GraphiteQueryCtrl.prototype.datasource = ctx.datasource; From 1dd9646a502c8f0749ed1752b25f39111677effb Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 19:05:09 +0200 Subject: [PATCH 089/105] fix failing test due to time diff issues --- pkg/services/sqlstore/dashboard_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index 0ca1c5d67e4..8ff78c4a0ff 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -181,7 +181,7 @@ func TestDashboardDataAccess(t *testing.T) { So(err, ShouldBeNil) So(query.Result.FolderId, ShouldEqual, 0) So(query.Result.CreatedBy, ShouldEqual, savedDash.CreatedBy) - So(query.Result.Created, ShouldEqual, savedDash.Created.Truncate(time.Second)) + So(query.Result.Created, ShouldHappenWithin, 3*time.Second, savedDash.Created) So(query.Result.UpdatedBy, ShouldEqual, 100) So(query.Result.Updated.IsZero(), ShouldBeFalse) }) From 582652145fa825cfce0a85b827d70f09b2cda45e Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 19:21:23 +0200 Subject: [PATCH 090/105] minor fixes --- docs/sources/features/datasources/prometheus.md | 6 +++++- docs/sources/reference/templating.md | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 190220fb0f1..0ed9e108df6 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -76,7 +76,11 @@ Name | Description For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). -It is possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. +#### Using interval and range variables + +> Support for `$__range` and `$__range_ms` only available from Grafana v5.3 + +It's possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. ### Using variables in queries diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 08a142d3636..ce1a1299d26 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -274,6 +274,9 @@ The `$__timeFilter` is used in the MySQL data source. This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias. ### The $__range Variable + +> Only available in Grafana v5.3+ + Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`. ## Repeating Panels From 055d208a326f08cc4ad69324f9c4c1722b35e59e Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Wed, 25 Jul 2018 11:27:43 +0900 Subject: [PATCH 091/105] fix invalid reference --- pkg/tsdb/cloudwatch/cloudwatch.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 4af73fc2ba9..92352a51315 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -99,14 +99,15 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo continue } + RefId := queryContext.Queries[i].RefId query, err := parseQuery(queryContext.Queries[i].Model) if err != nil { - result.Results[query.RefId] = &tsdb.QueryResult{ + result.Results[RefId] = &tsdb.QueryResult{ Error: err, } return result, nil } - query.RefId = queryContext.Queries[i].RefId + query.RefId = RefId if query.Id != "" { if _, ok := getMetricDataQueries[query.Region]; !ok { From df62282c115cea465577b5f1c02077b87166255e Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Wed, 25 Jul 2018 11:27:43 +0200 Subject: [PATCH 092/105] fix for typeahead background, increased lighten --- public/sass/_variables.light.scss | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/sass/_variables.light.scss b/public/sass/_variables.light.scss index b6e9e7db979..b6248da6a00 100644 --- a/public/sass/_variables.light.scss +++ b/public/sass/_variables.light.scss @@ -218,7 +218,7 @@ $search-filter-box-bg: $gray-7; // Typeahead $typeahead-shadow: 0 5px 10px 0 $gray-5; -$typeahead-selected-bg: lighten($blue, 25%); +$typeahead-selected-bg: lighten($blue, 57%); $typeahead-selected-color: $blue; // Dropdowns From 5fbd8ada3c55cfe8eecc57d894b6a445b76e00c9 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 11:54:51 +0200 Subject: [PATCH 093/105] changelog: add notes about closing #12668 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa794b92164..27651b2216f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) +* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.1 (2018-06-29) From 45762d04e392be18658df8a0ecd081a03bb09b5f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 11:55:34 +0200 Subject: [PATCH 094/105] changelog: update [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 27651b2216f..0f813272e60 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,7 +23,7 @@ * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) -# 5.2.2 (unreleased) +# 5.2.2 (2018-07-25) ### Minor From 9c40028d58431fcab8c3d7dddb44b2593a0c7130 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 13:22:55 +0200 Subject: [PATCH 095/105] changelog: add notes about closing #12668 [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f813272e60..990421d30d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) +* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.2 (2018-07-25) @@ -33,7 +34,6 @@ * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) -* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.1 (2018-06-29) From 7e773e2d5e35045f87be875fa81ac2c930d1257f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 14:14:25 +0200 Subject: [PATCH 096/105] changelog: add notes about closing #12533 [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 990421d30d3..6409f094f65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,7 +33,7 @@ * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) -* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) +* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533), thx [@mtanda](https://github.com/mtanda) # 5.2.1 (2018-06-29) From f3504612062f2bcf43a02c985942d5b70ca52439 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 14:52:03 +0200 Subject: [PATCH 097/105] Start conversion --- .../specs/variable_srv_init.jest.ts | 238 ++++++++++++++++++ 1 file changed, 238 insertions(+) create mode 100644 public/app/features/templating/specs/variable_srv_init.jest.ts diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts new file mode 100644 index 00000000000..218170ae454 --- /dev/null +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -0,0 +1,238 @@ +//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; + +import '../all'; + +import _ from 'lodash'; +// import helpers from 'test/specs/helpers'; +// import { Emitter } from 'app/core/core'; +import { VariableSrv } from '../variable_srv'; +import $q from 'q'; + +describe('VariableSrv init', function() { + let templateSrv = { + init: () => {}, + }; + let $injector = { + instantiate: (vars, model) => { + return new vars(model.model); + }, + }; + let $rootscope = { + $on: () => {}, + }; + + let ctx = { + datasourceSrv: {}, + $location: {}, + dashboard: {}, + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + // beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); + // beforeEach( + // angularMocks.inject(($rootScope, $q, $location, $injector) => { + // ctx.$q = $q; + // ctx.$rootScope = $rootScope; + // ctx.$location = $location; + // ctx.variableSrv = $injector.get('variableSrv'); + // ctx.$rootScope.$digest(); + // }) + // ); + + function describeInitScenario(desc, fn) { + describe(desc, function() { + // events: new Emitter(), + var scenario: any = { + urlParams: {}, + setup: setupFn => { + scenario.setupFn = setupFn; + }, + }; + + beforeEach(function() { + scenario.setupFn(); + ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); + ctx.variableSrv.datasource = {}; + ctx.variableSrv.datasource.metricFindQuery = jest.fn(() => Promise.resolve(scenario.queryResult)); + + ctx.variableSrv.datasourceSrv = { + get: () => Promise.resolve(ctx.datasource), + getMetricSources: () => Promise.resolve(scenario.metricSources), + }; + + ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); + ctx.variableSrv.dashboard = { + templating: { list: scenario.variables }, + // events: new Emitter(), + }; + + ctx.variableSrv.init(ctx.variableSrv.dashboard); + // ctx.$rootScope.$digest(); + + scenario.variables = ctx.variableSrv.variables; + }); + + fn(scenario); + }); + } + + ['query', 'interval', 'custom', 'datasource'].forEach(type => { + describeInitScenario('when setting ' + type + ' variable via url', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: type, + current: { text: 'test', value: 'test' }, + options: [{ text: 'test', value: 'test' }], + }, + ]; + scenario.urlParams['var-apps'] = 'new'; + scenario.metricSources = []; + }); + + it('should update current value', () => { + expect(scenario.variables[0].current.value).toBe('new'); + expect(scenario.variables[0].current.text).toBe('new'); + }); + }); + }); + + describe('given dependent variables', () => { + var variableList = [ + { + name: 'app', + type: 'query', + query: '', + current: { text: 'app1', value: 'app1' }, + options: [{ text: 'app1', value: 'app1' }], + }, + { + name: 'server', + type: 'query', + refresh: 1, + query: '$app.*', + current: { text: 'server1', value: 'server1' }, + options: [{ text: 'server1', value: 'server1' }], + }, + ]; + + describeInitScenario('when setting parent var from url', scenario => { + scenario.setup(() => { + scenario.variables = _.cloneDeep(variableList); + scenario.urlParams['var-app'] = 'google'; + scenario.queryResult = [{ text: 'google-server1' }, { text: 'google-server2' }]; + }); + + it('should update child variable', () => { + expect(scenario.variables[1].options.length).toBe(2); + expect(scenario.variables[1].current.text).toBe('google-server1'); + }); + + it('should only update it once', () => { + expect(ctx.variableSrv.datasource.metricFindQuery).toHaveBeenCalledTimes(1); + }); + }); + }); + + describeInitScenario('when datasource variable is initialized', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + type: 'datasource', + query: 'graphite', + name: 'test', + current: { value: 'backend4_pee', text: 'backend4_pee' }, + regex: '/pee$/', + }, + ]; + scenario.metricSources = [ + { name: 'backend1', meta: { id: 'influx' } }, + { name: 'backend2_pee', meta: { id: 'graphite' } }, + { name: 'backend3', meta: { id: 'graphite' } }, + { name: 'backend4_pee', meta: { id: 'graphite' } }, + ]; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options.length).toBe(2); + }); + }); + + describeInitScenario('when template variable is present in url multiple times', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: 'query', + multi: true, + current: { text: 'val1', value: 'val1' }, + options: [ + { text: 'val1', value: 'val1' }, + { text: 'val2', value: 'val2' }, + { text: 'val3', value: 'val3', selected: true }, + ], + }, + ]; + scenario.urlParams['var-apps'] = ['val2', 'val1']; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('val2 + val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); + }); + + it('should set options that are not in value to selected false', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options[2].selected).toBe(false); + }); + }); + + describeInitScenario('when template variable is present in url multiple times using key/values', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: 'query', + multi: true, + current: { text: 'Val1', value: 'val1' }, + options: [ + { text: 'Val1', value: 'val1' }, + { text: 'Val2', value: 'val2' }, + { text: 'Val3', value: 'val3', selected: true }, + ], + }, + ]; + scenario.urlParams['var-apps'] = ['val2', 'val1']; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('Val2 + Val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); + }); + + it('should set options that are not in value to selected false', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options[2].selected).toBe(false); + }); + }); +}); From 7d51c1524007fc47dc225e1256535c1386c07aca Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 16:15:03 +0200 Subject: [PATCH 098/105] Two passing tests --- .../specs/variable_srv_init.jest.ts | 57 ++++++++++++++----- .../app/features/templating/variable_srv.ts | 1 + 2 files changed, 43 insertions(+), 15 deletions(-) diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index 218170ae454..519adc0a350 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -7,16 +7,18 @@ import _ from 'lodash'; // import { Emitter } from 'app/core/core'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; +// import { model } from 'mobx-state-tree/dist/internal'; describe('VariableSrv init', function() { let templateSrv = { - init: () => {}, - }; - let $injector = { - instantiate: (vars, model) => { - return new vars(model.model); + init: vars => { + this.variables = vars; }, + variableInitialized: () => {}, + updateTemplateData: () => {}, + replace: str => str, }; + let $injector = {}; let $rootscope = { $on: () => {}, }; @@ -57,24 +59,35 @@ describe('VariableSrv init', function() { }, }; - beforeEach(function() { + beforeEach(async () => { scenario.setupFn(); - ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); - ctx.variableSrv.datasource = {}; - ctx.variableSrv.datasource.metricFindQuery = jest.fn(() => Promise.resolve(scenario.queryResult)); - - ctx.variableSrv.datasourceSrv = { - get: () => Promise.resolve(ctx.datasource), - getMetricSources: () => Promise.resolve(scenario.metricSources), + ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve(scenario.queryResult)), + }, + datasourceSrv: { + get: () => Promise.resolve(ctx.datasource), + getMetricSources: () => Promise.resolve(scenario.metricSources), + }, + templateSrv, }; + ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); + + $injector.instantiate = (variable, model) => { + return getVarMockConstructor(variable, model, ctx); + }; + + ctx.variableSrv.datasource = ctx.datasource; + ctx.variableSrv.datasourceSrv = ctx.datasourceSrv; + ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); ctx.variableSrv.dashboard = { templating: { list: scenario.variables }, - // events: new Emitter(), + // events: new Emitter(), }; - ctx.variableSrv.init(ctx.variableSrv.dashboard); + await ctx.variableSrv.init(ctx.variableSrv.dashboard); // ctx.$rootScope.$digest(); scenario.variables = ctx.variableSrv.variables; @@ -236,3 +249,17 @@ describe('VariableSrv init', function() { }); }); }); + +function getVarMockConstructor(variable, model, ctx) { + console.log(model.model.type); + switch (model.model.type) { + case 'datasource': + return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + case 'query': + return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + case 'interval': + return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv); + default: + return new variable(model.model); + } +} diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index 8ad3c2845e2..9f6522c9b86 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -23,6 +23,7 @@ export class VariableSrv { // init variables for (let variable of this.variables) { + console.log(variable); variable.initLock = this.$q.defer(); } From 0f99e624b680b60e00ca05f408c5b85464d7cf81 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 16:20:00 +0200 Subject: [PATCH 099/105] docs: using interval and range variables in prometheus Included example usages --- .../features/datasources/prometheus.md | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 0ed9e108df6..3a04ef92e31 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -80,7 +80,26 @@ For details of *metric names*, *label names* and *label values* are please refer > Support for `$__range` and `$__range_ms` only available from Grafana v5.3 -It's possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. +It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since +`label_values` function doesn't support queries. + +Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard. + +**Example usage:** + +Populate a variable with the the busiest 5 request instances based on average QPS over the time range shown in the dashboard: + +``` +Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instance))) +Regex: /"([^"]+)"/ +``` + +Populate a variable with the instances having a certain state over the time range shown in the dashboard: + +``` +Query: query_result(max_over_time([$__range]) != ) +Regex: +``` ### Using variables in queries From 84e431d377b51405f37b4bae8321454218bcc7c4 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 25 Jul 2018 16:16:33 +0200 Subject: [PATCH 100/105] Add tslib to TS compiler - using tslib reduces bundle sizes - add compiler option for easier default imports of CJS modules - remove double entry of fork-ts-checker-plugin - speed up hot reload by using exprimental ts-loader API --- package.json | 16 ++++---- scripts/webpack/webpack.hot.js | 10 ++++- tsconfig.json | 73 +++++++++++++++++++--------------- yarn.lock | 8 +++- 4 files changed, 65 insertions(+), 42 deletions(-) diff --git a/package.json b/package.json index c26438230cc..c0581c1de43 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "expose-loader": "^0.7.3", "extract-text-webpack-plugin": "^4.0.0-beta.0", "file-loader": "^1.1.11", - "fork-ts-checker-webpack-plugin": "^0.4.1", + "fork-ts-checker-webpack-plugin": "^0.4.2", "gaze": "^1.1.2", "glob": "~7.0.0", "grunt": "1.0.1", @@ -71,12 +71,14 @@ "karma-webpack": "^3.0.0", "lint-staged": "^6.0.0", "load-grunt-tasks": "3.5.2", + "mini-css-extract-plugin": "^0.4.0", "mobx-react-devtools": "^4.2.15", "mocha": "^4.0.1", "ng-annotate-loader": "^0.6.1", "ng-annotate-webpack-plugin": "^0.2.1-pre", "ngtemplate-loader": "^2.0.1", "npm": "^5.4.2", + "optimize-css-assets-webpack-plugin": "^4.0.2", "phantomjs-prebuilt": "^2.1.15", "postcss-browser-reporter": "^0.5.0", "postcss-loader": "^2.0.6", @@ -90,15 +92,16 @@ "style-loader": "^0.21.0", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", - "ts-loader": "^4.3.0", "ts-jest": "^22.4.6", + "ts-loader": "^4.3.0", + "tslib": "^1.9.3", "tslint": "^5.8.0", "tslint-loader": "^3.5.3", "typescript": "^2.6.2", + "uglifyjs-webpack-plugin": "^1.2.7", "webpack": "^4.8.0", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", - "fork-ts-checker-webpack-plugin": "^0.4.2", "webpack-cli": "^2.1.4", "webpack-dev-server": "^3.1.0", "webpack-merge": "^4.1.0", @@ -155,14 +158,12 @@ "immutable": "^3.8.2", "jquery": "^3.2.1", "lodash": "^4.17.10", - "mini-css-extract-plugin": "^0.4.0", "mobx": "^3.4.1", "mobx-react": "^4.3.5", "mobx-state-tree": "^1.3.1", "moment": "^2.22.2", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", - "optimize-css-assets-webpack-plugin": "^4.0.2", "prismjs": "^1.6.0", "prop-types": "^15.6.0", "react": "^16.2.0", @@ -181,10 +182,9 @@ "slate-react": "^0.12.4", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop/tarball/master", - "tinycolor2": "^1.4.1", - "uglifyjs-webpack-plugin": "^1.2.7" + "tinycolor2": "^1.4.1" }, "resolutions": { "caniuse-db": "1.0.30000772" } -} +} \ No newline at end of file diff --git a/scripts/webpack/webpack.hot.js b/scripts/webpack/webpack.hot.js index 28c8cec504d..0305a6f465c 100644 --- a/scripts/webpack/webpack.hot.js +++ b/scripts/webpack/webpack.hot.js @@ -20,6 +20,7 @@ module.exports = merge(common, { path: path.resolve(__dirname, '../../public/build'), filename: '[name].[hash].js', publicPath: "/public/build/", + pathinfo: false, }, resolve: { @@ -37,6 +38,12 @@ module.exports = merge(common, { } }, + optimization: { + removeAvailableModules: false, + removeEmptyChunks: false, + splitChunks: false, + }, + module: { rules: [ { @@ -56,7 +63,8 @@ module.exports = merge(common, { { loader: 'ts-loader', options: { - transpileOnly: true + transpileOnly: true, + experimentalWatchApi: true }, }], }, diff --git a/tsconfig.json b/tsconfig.json index 3596930a62f..3ef1dd1b769 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,32 +1,43 @@ { - "compilerOptions": { - "moduleResolution": "node", - "outDir": "public/dist", - "target": "es5", - "lib": ["es6", "dom"], - "rootDir": "public/", - "jsx": "react", - "module": "esnext", - "declaration": false, - "allowSyntheticDefaultImports": true, - "inlineSourceMap": false, - "sourceMap": true, - "noEmitOnError": false, - "emitDecoratorMetadata": false, - "experimentalDecorators": true, - "noImplicitReturns": true, - "noImplicitThis": false, - "noImplicitUseStrict":false, - "noImplicitAny": false, - "noUnusedLocals": true, - "baseUrl": "public", - "paths": { - "app": ["app"] - } - }, - "include": [ - "public/app/**/*.ts", - "public/app/**/*.tsx", - "public/test/**/*.ts" - ] -} + "compilerOptions": { + "moduleResolution": "node", + "outDir": "public/dist", + "target": "es5", + "lib": [ + "es6", + "dom" + ], + "rootDir": "public/", + "jsx": "react", + "module": "esnext", + "declaration": false, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "importHelpers": true, // importing helper functions from tslib + "noEmitHelpers": true, // disable emitting inline helper functions + "removeComments": false, // comments are needed by angular injections + "inlineSourceMap": false, + "sourceMap": true, + "noEmitOnError": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": true, + "noImplicitReturns": true, + "noImplicitThis": false, + "noImplicitUseStrict": false, + "noImplicitAny": false, + "noUnusedLocals": true, + "baseUrl": "public", + "pretty": true, + "paths": { + "app": [ + "app" + ] + } + }, + "include": [ + "public/app/**/*.ts", + "public/app/**/*.tsx", + "public/test/**/*.ts" + ] +} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 6772d7c14a4..6e737e33348 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3101,7 +3101,7 @@ d3-request@1.0.6: d3-dsv "1" xmlhttprequest "1" -d3-scale-chromatic@^1.1.1: +d3-scale-chromatic@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-1.3.0.tgz#7ee38ffcaa7ad55cfed83a6a668aac5570c653c4" dependencies: @@ -7974,7 +7974,7 @@ mocha@^4.0.1: mkdirp "0.5.1" supports-color "4.4.0" -moment@^2.18.1: +moment@^2.22.2: version "2.22.2" resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.2.tgz#3c257f9839fc0e93ff53149632239eb90783ff66" @@ -12029,6 +12029,10 @@ tslib@^1.8.0, tslib@^1.8.1, tslib@^1.9.0: version "1.9.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.2.tgz#8be0cc9a1f6dc7727c38deb16c2ebd1a2892988e" +tslib@^1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" + tslint-loader@^3.5.3: version "3.6.0" resolved "https://registry.yarnpkg.com/tslint-loader/-/tslint-loader-3.6.0.tgz#12ed4d5ef57d68be25cd12692fb2108b66469d76" From 931b944cddb879dfbfb44c5da18bfda43d36a0e9 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 17:38:45 +0200 Subject: [PATCH 101/105] Almost all tests passing --- .../specs/variable_srv_init.jest.ts | 42 +++++-------------- .../app/features/templating/variable_srv.ts | 1 - 2 files changed, 10 insertions(+), 33 deletions(-) diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index 519adc0a350..eba0ba8cfee 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -1,13 +1,9 @@ -//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - import '../all'; import _ from 'lodash'; -// import helpers from 'test/specs/helpers'; -// import { Emitter } from 'app/core/core'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; -// import { model } from 'mobx-state-tree/dist/internal'; +// import { TemplateSrv } from '../template_srv'; describe('VariableSrv init', function() { let templateSrv = { @@ -16,8 +12,9 @@ describe('VariableSrv init', function() { }, variableInitialized: () => {}, updateTemplateData: () => {}, - replace: str => str, + replace: () => ' /pee$/', }; + // let templateSrv = new TemplateSrv(); let $injector = {}; let $rootscope = { $on: () => {}, @@ -29,29 +26,8 @@ describe('VariableSrv init', function() { dashboard: {}, }; - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - // beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); - // beforeEach( - // angularMocks.inject(($rootScope, $q, $location, $injector) => { - // ctx.$q = $q; - // ctx.$rootScope = $rootScope; - // ctx.$location = $location; - // ctx.variableSrv = $injector.get('variableSrv'); - // ctx.$rootScope.$digest(); - // }) - // ); - function describeInitScenario(desc, fn) { describe(desc, function() { - // events: new Emitter(), var scenario: any = { urlParams: {}, setup: setupFn => { @@ -81,14 +57,12 @@ describe('VariableSrv init', function() { ctx.variableSrv.datasource = ctx.datasource; ctx.variableSrv.datasourceSrv = ctx.datasourceSrv; - ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); + ctx.variableSrv.$location.search = () => scenario.urlParams; ctx.variableSrv.dashboard = { templating: { list: scenario.variables }, - // events: new Emitter(), }; await ctx.variableSrv.init(ctx.variableSrv.dashboard); - // ctx.$rootScope.$digest(); scenario.variables = ctx.variableSrv.variables; }); @@ -113,6 +87,7 @@ describe('VariableSrv init', function() { }); it('should update current value', () => { + console.log(type); expect(scenario.variables[0].current.value).toBe('new'); expect(scenario.variables[0].current.text).toBe('new'); }); @@ -176,6 +151,7 @@ describe('VariableSrv init', function() { }); it('should update current value', function() { + console.log(ctx.variableSrv.variables[0].options); var variable = ctx.variableSrv.variables[0]; expect(variable.options.length).toBe(2); }); @@ -251,14 +227,16 @@ describe('VariableSrv init', function() { }); function getVarMockConstructor(variable, model, ctx) { - console.log(model.model.type); + // console.log(model.model.type); switch (model.model.type) { case 'datasource': - return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv); case 'query': return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); case 'interval': return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv); + case 'custom': + return new variable(model.model, ctx.variableSrv); default: return new variable(model.model); } diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index 9f6522c9b86..8ad3c2845e2 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -23,7 +23,6 @@ export class VariableSrv { // init variables for (let variable of this.variables) { - console.log(variable); variable.initLock = this.$q.defer(); } From 35cc85bfcc46efdc79cf22b98741a6ea34b93d58 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 26 Jul 2018 09:36:46 +0200 Subject: [PATCH 102/105] All tests passing. Remove Karma test. --- .../specs/variable_srv_init.jest.ts | 31 ++- .../specs/variable_srv_init_specs.ts | 216 ------------------ 2 files changed, 13 insertions(+), 234 deletions(-) delete mode 100644 public/app/features/templating/specs/variable_srv_init_specs.ts diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index eba0ba8cfee..ea8689f528b 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -3,7 +3,6 @@ import '../all'; import _ from 'lodash'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; -// import { TemplateSrv } from '../template_srv'; describe('VariableSrv init', function() { let templateSrv = { @@ -12,22 +11,21 @@ describe('VariableSrv init', function() { }, variableInitialized: () => {}, updateTemplateData: () => {}, - replace: () => ' /pee$/', + replace: str => + str.replace(this.regex, match => { + return match; + }), }; - // let templateSrv = new TemplateSrv(); + let $injector = {}; let $rootscope = { $on: () => {}, }; - let ctx = { - datasourceSrv: {}, - $location: {}, - dashboard: {}, - }; + let ctx = {}; function describeInitScenario(desc, fn) { - describe(desc, function() { + describe(desc, () => { var scenario: any = { urlParams: {}, setup: setupFn => { @@ -43,7 +41,7 @@ describe('VariableSrv init', function() { }, datasourceSrv: { get: () => Promise.resolve(ctx.datasource), - getMetricSources: () => Promise.resolve(scenario.metricSources), + getMetricSources: () => scenario.metricSources, }, templateSrv, }; @@ -87,7 +85,6 @@ describe('VariableSrv init', function() { }); it('should update current value', () => { - console.log(type); expect(scenario.variables[0].current.value).toBe('new'); expect(scenario.variables[0].current.text).toBe('new'); }); @@ -150,8 +147,7 @@ describe('VariableSrv init', function() { ]; }); - it('should update current value', function() { - console.log(ctx.variableSrv.variables[0].options); + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options.length).toBe(2); }); @@ -175,7 +171,7 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.current.value.length).toBe(2); expect(variable.current.value[0]).toBe('val2'); @@ -185,7 +181,7 @@ describe('VariableSrv init', function() { expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options[2].selected).toBe(false); }); @@ -209,7 +205,7 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.current.value.length).toBe(2); expect(variable.current.value[0]).toBe('val2'); @@ -219,7 +215,7 @@ describe('VariableSrv init', function() { expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options[2].selected).toBe(false); }); @@ -227,7 +223,6 @@ describe('VariableSrv init', function() { }); function getVarMockConstructor(variable, model, ctx) { - // console.log(model.model.type); switch (model.model.type) { case 'datasource': return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv); diff --git a/public/app/features/templating/specs/variable_srv_init_specs.ts b/public/app/features/templating/specs/variable_srv_init_specs.ts deleted file mode 100644 index 11639c6aa8f..00000000000 --- a/public/app/features/templating/specs/variable_srv_init_specs.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -import '../all'; - -import _ from 'lodash'; -import helpers from 'test/specs/helpers'; -import { Emitter } from 'app/core/core'; - -describe('VariableSrv init', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); - beforeEach( - angularMocks.inject(($rootScope, $q, $location, $injector) => { - ctx.$q = $q; - ctx.$rootScope = $rootScope; - ctx.$location = $location; - ctx.variableSrv = $injector.get('variableSrv'); - ctx.$rootScope.$digest(); - }) - ); - - function describeInitScenario(desc, fn) { - describe(desc, function() { - var scenario: any = { - urlParams: {}, - setup: setupFn => { - scenario.setupFn = setupFn; - }, - }; - - beforeEach(function() { - scenario.setupFn(); - ctx.datasource = {}; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when(scenario.queryResult)); - - ctx.datasourceSrv.get = sinon.stub().returns(ctx.$q.when(ctx.datasource)); - ctx.datasourceSrv.getMetricSources = sinon.stub().returns(scenario.metricSources); - - ctx.$location.search = sinon.stub().returns(scenario.urlParams); - ctx.dashboard = { - templating: { list: scenario.variables }, - events: new Emitter(), - }; - - ctx.variableSrv.init(ctx.dashboard); - ctx.$rootScope.$digest(); - - scenario.variables = ctx.variableSrv.variables; - }); - - fn(scenario); - }); - } - - ['query', 'interval', 'custom', 'datasource'].forEach(type => { - describeInitScenario('when setting ' + type + ' variable via url', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: type, - current: { text: 'test', value: 'test' }, - options: [{ text: 'test', value: 'test' }], - }, - ]; - scenario.urlParams['var-apps'] = 'new'; - scenario.metricSources = []; - }); - - it('should update current value', () => { - expect(scenario.variables[0].current.value).to.be('new'); - expect(scenario.variables[0].current.text).to.be('new'); - }); - }); - }); - - describe('given dependent variables', () => { - var variableList = [ - { - name: 'app', - type: 'query', - query: '', - current: { text: 'app1', value: 'app1' }, - options: [{ text: 'app1', value: 'app1' }], - }, - { - name: 'server', - type: 'query', - refresh: 1, - query: '$app.*', - current: { text: 'server1', value: 'server1' }, - options: [{ text: 'server1', value: 'server1' }], - }, - ]; - - describeInitScenario('when setting parent var from url', scenario => { - scenario.setup(() => { - scenario.variables = _.cloneDeep(variableList); - scenario.urlParams['var-app'] = 'google'; - scenario.queryResult = [{ text: 'google-server1' }, { text: 'google-server2' }]; - }); - - it('should update child variable', () => { - expect(scenario.variables[1].options.length).to.be(2); - expect(scenario.variables[1].current.text).to.be('google-server1'); - }); - - it('should only update it once', () => { - expect(ctx.datasource.metricFindQuery.callCount).to.be(1); - }); - }); - }); - - describeInitScenario('when datasource variable is initialized', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - type: 'datasource', - query: 'graphite', - name: 'test', - current: { value: 'backend4_pee', text: 'backend4_pee' }, - regex: '/pee$/', - }, - ]; - scenario.metricSources = [ - { name: 'backend1', meta: { id: 'influx' } }, - { name: 'backend2_pee', meta: { id: 'graphite' } }, - { name: 'backend3', meta: { id: 'graphite' } }, - { name: 'backend4_pee', meta: { id: 'graphite' } }, - ]; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options.length).to.be(2); - }); - }); - - describeInitScenario('when template variable is present in url multiple times', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: 'query', - multi: true, - current: { text: 'val1', value: 'val1' }, - options: [ - { text: 'val1', value: 'val1' }, - { text: 'val2', value: 'val2' }, - { text: 'val3', value: 'val3', selected: true }, - ], - }, - ]; - scenario.urlParams['var-apps'] = ['val2', 'val1']; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('val2 + val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); - }); - - it('should set options that are not in value to selected false', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); - }); - }); - - describeInitScenario('when template variable is present in url multiple times using key/values', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: 'query', - multi: true, - current: { text: 'Val1', value: 'val1' }, - options: [ - { text: 'Val1', value: 'val1' }, - { text: 'Val2', value: 'val2' }, - { text: 'Val3', value: 'val3', selected: true }, - ], - }, - ]; - scenario.urlParams['var-apps'] = ['val2', 'val1']; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('Val2 + Val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); - }); - - it('should set options that are not in value to selected false', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); - }); - }); -}); From 7699451d9438546e6655975d53deb7bf6314562d Mon Sep 17 00:00:00 2001 From: David Date: Thu, 26 Jul 2018 14:04:12 +0200 Subject: [PATCH 103/105] Refactor Explore query field (#12643) * Refactor Explore query field - extract typeahead field that only contains logic for the typeahead mechanics - renamed QueryField to PromQueryField, a wrapper around TypeaheadField that deals with Prometheus-specific concepts - PromQueryField creates a promql typeahead by providing the handlers for producing suggestions, and for applying suggestions - The `refresher` promise is needed to trigger a render once an async action in the wrapper returns. This is prep work for a composable query field to be used by Explore, as well as editors in datasource plugins. * Added typeahead handling tests - extracted context-to-suggestion logic to make it testable - kept DOM-dependent parts in main onTypeahead funtion * simplified error handling in explore query field * Refactor query suggestions - use monaco's suggestion types (roughly), see https://github.com/Microsoft/monaco-editor/blob/f6fb545/monaco.d.ts#L4208 - suggest functions and metrics in empty field (ctrl+space) - copy and expand prometheus function docs from prometheus datasource (will be migrated back to the datasource in the future) * Added prop and state types, removed unused cwrp * Split up suggestion processing for code readability --- .../Explore/PromQueryField.jest.tsx | 125 ++++ .../app/containers/Explore/PromQueryField.tsx | 340 +++++++++++ public/app/containers/Explore/QueryField.tsx | 553 ++++++++---------- public/app/containers/Explore/QueryRows.tsx | 6 +- public/app/containers/Explore/Typeahead.tsx | 61 +- .../Explore/slate-plugins/prism/promql.ts | 417 +++++++++++-- public/sass/components/_slate_editor.scss | 1 + 7 files changed, 1100 insertions(+), 403 deletions(-) create mode 100644 public/app/containers/Explore/PromQueryField.jest.tsx create mode 100644 public/app/containers/Explore/PromQueryField.tsx diff --git a/public/app/containers/Explore/PromQueryField.jest.tsx b/public/app/containers/Explore/PromQueryField.jest.tsx new file mode 100644 index 00000000000..8d2903cb2c2 --- /dev/null +++ b/public/app/containers/Explore/PromQueryField.jest.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import Enzyme, { shallow } from 'enzyme'; +import Adapter from 'enzyme-adapter-react-16'; + +Enzyme.configure({ adapter: new Adapter() }); + +import PromQueryField from './PromQueryField'; + +describe('PromQueryField typeahead handling', () => { + const defaultProps = { + request: () => ({ data: { data: [] } }), + }; + + it('returns default suggestions on emtpty context', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: '', prefix: '', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + + describe('range suggestions', () => { + it('returns range suggestions in range context', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: '1', prefix: '1', wrapperClasses: ['context-range'] }); + expect(result.context).toBe('context-range'); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions).toEqual([ + { + items: [{ label: '1m' }, { label: '5m' }, { label: '10m' }, { label: '30m' }, { label: '1h' }], + label: 'Range vector', + }, + ]); + }); + }); + + describe('metric suggestions', () => { + it('returns metrics suggestions by default', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ text: 'a', prefix: 'a', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + + it('returns default suggestions after a binary operator', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ text: '*', prefix: '', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + }); + + describe('label suggestions', () => { + it('returns default label suggestions on label context and no metric', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: 'j', prefix: 'j', wrapperClasses: ['context-labels'] }); + expect(result.context).toBe('context-labels'); + expect(result.suggestions).toEqual([{ items: [{ label: 'job' }, { label: 'instance' }], label: 'Labels' }]); + }); + + it('returns label suggestions on label context and metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-labels'], + metric: 'foo', + }); + expect(result.context).toBe('context-labels'); + expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]); + }); + + it('returns a refresher on label context and unavailable metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-labels'], + metric: 'xxx', + }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeInstanceOf(Promise); + expect(result.suggestions).toEqual([]); + }); + + it('returns label values on label context when given a metric and a label key', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: '=ba', + prefix: 'ba', + wrapperClasses: ['context-labels'], + metric: 'foo', + labelKey: 'bar', + }); + expect(result.context).toBe('context-label-values'); + expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values' }]); + }); + + it('returns label suggestions on aggregation context and metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-aggregation'], + metric: 'foo', + }); + expect(result.context).toBe('context-aggregation'); + expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]); + }); + }); +}); diff --git a/public/app/containers/Explore/PromQueryField.tsx b/public/app/containers/Explore/PromQueryField.tsx new file mode 100644 index 00000000000..eb8fc25c67f --- /dev/null +++ b/public/app/containers/Explore/PromQueryField.tsx @@ -0,0 +1,340 @@ +import _ from 'lodash'; +import React from 'react'; + +// dom also includes Element polyfills +import { getNextCharacter, getPreviousCousin } from './utils/dom'; +import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index'; +import PrismPromql, { FUNCTIONS } from './slate-plugins/prism/promql'; +import RunnerPlugin from './slate-plugins/runner'; +import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; + +import TypeaheadField, { + Suggestion, + SuggestionGroup, + TypeaheadInput, + TypeaheadFieldState, + TypeaheadOutput, +} from './QueryField'; + +const EMPTY_METRIC = ''; +const METRIC_MARK = 'metric'; +const PRISM_LANGUAGE = 'promql'; + +export const wrapLabel = label => ({ label }); +export const setFunctionMove = (suggestion: Suggestion): Suggestion => { + suggestion.move = -1; + return suggestion; +}; + +export function willApplySuggestion( + suggestion: string, + { typeaheadContext, typeaheadText }: TypeaheadFieldState +): string { + // Modify suggestion based on context + switch (typeaheadContext) { + case 'context-labels': { + const nextChar = getNextCharacter(); + if (!nextChar || nextChar === '}' || nextChar === ',') { + suggestion += '='; + } + break; + } + + case 'context-label-values': { + // Always add quotes and remove existing ones instead + if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) { + suggestion = `"${suggestion}`; + } + if (getNextCharacter() !== '"') { + suggestion = `${suggestion}"`; + } + break; + } + + default: + } + return suggestion; +} + +interface PromQueryFieldProps { + initialQuery?: string | null; + labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...] + labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...] + metrics?: string[]; + onPressEnter?: () => void; + onQueryChange?: (value: string) => void; + portalPrefix?: string; + request?: (url: string) => any; +} + +interface PromQueryFieldState { + labelKeys: { [index: string]: string[] }; // metric -> [labelKey,...] + labelValues: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...] + metrics: string[]; +} + +interface PromTypeaheadInput { + text: string; + prefix: string; + wrapperClasses: string[]; + metric?: string; + labelKey?: string; +} + +class PromQueryField extends React.Component { + plugins: any[]; + + constructor(props, context) { + super(props, context); + + this.plugins = [ + RunnerPlugin({ handler: props.onPressEnter }), + PluginPrism({ definition: PrismPromql, language: PRISM_LANGUAGE }), + ]; + + this.state = { + labelKeys: props.labelKeys || {}, + labelValues: props.labelValues || {}, + metrics: props.metrics || [], + }; + } + + componentDidMount() { + this.fetchMetricNames(); + } + + onChangeQuery = value => { + // Send text change to parent + const { onQueryChange } = this.props; + if (onQueryChange) { + onQueryChange(value); + } + }; + + onReceiveMetrics = () => { + if (!this.state.metrics) { + return; + } + setPrismTokens(PRISM_LANGUAGE, METRIC_MARK, this.state.metrics); + }; + + onTypeahead = (typeahead: TypeaheadInput): TypeaheadOutput => { + const { editorNode, prefix, text, wrapperNode } = typeahead; + + // Get DOM-dependent context + const wrapperClasses = Array.from(wrapperNode.classList); + // Take first metric as lucky guess + const metricNode = editorNode.querySelector(`.${METRIC_MARK}`); + const metric = metricNode && metricNode.textContent; + const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); + const labelKey = labelKeyNode && labelKeyNode.textContent; + + const result = this.getTypeahead({ text, prefix, wrapperClasses, metric, labelKey }); + + console.log('handleTypeahead', wrapperClasses, text, prefix, result.context); + + return result; + }; + + // Keep this DOM-free for testing + getTypeahead({ prefix, wrapperClasses, metric, text }: PromTypeaheadInput): TypeaheadOutput { + // Determine candidates by CSS context + if (_.includes(wrapperClasses, 'context-range')) { + // Suggestions for metric[|] + return this.getRangeTypeahead(); + } else if (_.includes(wrapperClasses, 'context-labels')) { + // Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|} + return this.getLabelTypeahead.apply(this, arguments); + } else if (metric && _.includes(wrapperClasses, 'context-aggregation')) { + return this.getAggregationTypeahead.apply(this, arguments); + } else if ( + // Non-empty but not inside known token unless it's a metric + (prefix && !_.includes(wrapperClasses, 'token')) || + prefix === metric || + (prefix === '' && !text.match(/^[)\s]+$/)) || // Empty context or after ')' + text.match(/[+\-*/^%]/) // After binary operator + ) { + return this.getEmptyTypeahead(); + } + + return { + suggestions: [], + }; + } + + getEmptyTypeahead(): TypeaheadOutput { + const suggestions: SuggestionGroup[] = []; + suggestions.push({ + prefixMatch: true, + label: 'Functions', + items: FUNCTIONS.map(setFunctionMove), + }); + + if (this.state.metrics) { + suggestions.push({ + label: 'Metrics', + items: this.state.metrics.map(wrapLabel), + }); + } + return { suggestions }; + } + + getRangeTypeahead(): TypeaheadOutput { + return { + context: 'context-range', + suggestions: [ + { + label: 'Range vector', + items: [...RATE_RANGES].map(wrapLabel), + }, + ], + }; + } + + getAggregationTypeahead({ metric }: PromTypeaheadInput): TypeaheadOutput { + let refresher: Promise = null; + const suggestions: SuggestionGroup[] = []; + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } else { + refresher = this.fetchMetricLabels(metric); + } + + return { + refresher, + suggestions, + context: 'context-aggregation', + }; + } + + getLabelTypeahead({ metric, text, wrapperClasses, labelKey }: PromTypeaheadInput): TypeaheadOutput { + let context: string; + let refresher: Promise = null; + const suggestions: SuggestionGroup[] = []; + if (metric) { + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) { + // Label values + if (labelKey) { + const labelValues = this.state.labelValues[metric][labelKey]; + context = 'context-label-values'; + suggestions.push({ + label: 'Label values', + items: labelValues.map(wrapLabel), + }); + } + } else { + // Label keys + context = 'context-labels'; + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } + } else { + refresher = this.fetchMetricLabels(metric); + } + } else { + // Metric-independent label queries + const defaultKeys = ['job', 'instance']; + // Munge all keys that we have seen together + const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => { + return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1)); + }, defaultKeys); + if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) { + // Label values + if (labelKey) { + if (this.state.labelValues[EMPTY_METRIC]) { + const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey]; + context = 'context-label-values'; + suggestions.push({ + label: 'Label values', + items: labelValues.map(wrapLabel), + }); + } else { + // Can only query label values for now (API to query keys is under development) + refresher = this.fetchLabelValues(labelKey); + } + } + } else { + // Label keys + context = 'context-labels'; + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } + } + return { context, refresher, suggestions }; + } + + request = url => { + if (this.props.request) { + return this.props.request(url); + } + return fetch(url); + }; + + async fetchLabelValues(key) { + const url = `/api/v1/label/${key}/values`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const pairs = this.state.labelValues[EMPTY_METRIC]; + const values = { + ...pairs, + [key]: body.data, + }; + const labelValues = { + ...this.state.labelValues, + [EMPTY_METRIC]: values, + }; + this.setState({ labelValues }); + } catch (e) { + console.error(e); + } + } + + async fetchMetricLabels(name) { + const url = `/api/v1/series?match[]=${name}`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const { keys, values } = processLabels(body.data); + const labelKeys = { + ...this.state.labelKeys, + [name]: keys, + }; + const labelValues = { + ...this.state.labelValues, + [name]: values, + }; + this.setState({ labelKeys, labelValues }); + } catch (e) { + console.error(e); + } + } + + async fetchMetricNames() { + const url = '/api/v1/label/__name__/values'; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + this.setState({ metrics: body.data }, this.onReceiveMetrics); + } catch (error) { + console.error(error); + } + } + + render() { + return ( + + ); + } +} + +export default PromQueryField; diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index 41f6d53541c..60caddcad31 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -1,106 +1,163 @@ +import _ from 'lodash'; import React from 'react'; import ReactDOM from 'react-dom'; -import { Value } from 'slate'; +import { Block, Change, Document, Text, Value } from 'slate'; import { Editor } from 'slate-react'; import Plain from 'slate-plain-serializer'; -// dom also includes Element polyfills -import { getNextCharacter, getPreviousCousin } from './utils/dom'; import BracesPlugin from './slate-plugins/braces'; import ClearPlugin from './slate-plugins/clear'; import NewlinePlugin from './slate-plugins/newline'; -import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index'; -import RunnerPlugin from './slate-plugins/runner'; -import debounce from './utils/debounce'; -import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; import Typeahead from './Typeahead'; -const EMPTY_METRIC = ''; -const METRIC_MARK = 'metric'; export const TYPEAHEAD_DEBOUNCE = 300; -function flattenSuggestions(s) { +function flattenSuggestions(s: any[]): any[] { return s ? s.reduce((acc, g) => acc.concat(g.items), []) : []; } -export const getInitialValue = query => - Value.fromJSON({ - document: { - nodes: [ - { - object: 'block', - type: 'paragraph', - nodes: [ - { - object: 'text', - leaves: [ - { - text: query, - }, - ], - }, - ], - }, - ], - }, +export const makeFragment = (text: string): Document => { + const lines = text.split('\n').map(line => + Block.create({ + type: 'paragraph', + nodes: [Text.create(line)], + }) + ); + + const fragment = Document.create({ + nodes: lines, }); + return fragment; +}; -class Portal extends React.Component { - node: any; +export const getInitialValue = (value: string): Value => Value.create({ document: makeFragment(value) }); - constructor(props) { - super(props); - const { index = 0, prefix = 'query' } = props; - this.node = document.createElement('div'); - this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`); - document.body.appendChild(this.node); - } - - componentWillUnmount() { - document.body.removeChild(this.node); - } - - render() { - return ReactDOM.createPortal(this.props.children, this.node); - } +export interface Suggestion { + /** + * The label of this completion item. By default + * this is also the text that is inserted when selecting + * this completion. + */ + label: string; + /** + * The kind of this completion item. Based on the kind + * an icon is chosen by the editor. + */ + kind?: string; + /** + * A human-readable string with additional information + * about this item, like type or symbol information. + */ + detail?: string; + /** + * A human-readable string, can be Markdown, that represents a doc-comment. + */ + documentation?: string; + /** + * A string that should be used when comparing this item + * with other items. When `falsy` the `label` is used. + */ + sortText?: string; + /** + * A string that should be used when filtering a set of + * completion items. When `falsy` the `label` is used. + */ + filterText?: string; + /** + * A string or snippet that should be inserted in a document when selecting + * this completion. When `falsy` the `label` is used. + */ + insertText?: string; + /** + * Delete number of characters before the caret position, + * by default the letters from the beginning of the word. + */ + deleteBackwards?: number; + /** + * Number of steps to move after the insertion, can be negative. + */ + move?: number; } -class QueryField extends React.Component { - menuEl: any; - plugins: any; +export interface SuggestionGroup { + /** + * Label that will be displayed for all entries of this group. + */ + label: string; + /** + * List of suggestions of this group. + */ + items: Suggestion[]; + /** + * If true, match only by prefix (and not mid-word). + */ + prefixMatch?: boolean; + /** + * If true, do not filter items in this group based on the search. + */ + skipFilter?: boolean; +} + +interface TypeaheadFieldProps { + additionalPlugins?: any[]; + cleanText?: (text: string) => string; + initialValue: string | null; + onBlur?: () => void; + onFocus?: () => void; + onTypeahead?: (typeahead: TypeaheadInput) => TypeaheadOutput; + onValueChanged?: (value: Value) => void; + onWillApplySuggestion?: (suggestion: string, state: TypeaheadFieldState) => string; + placeholder?: string; + portalPrefix?: string; +} + +export interface TypeaheadFieldState { + suggestions: SuggestionGroup[]; + typeaheadContext: string | null; + typeaheadIndex: number; + typeaheadPrefix: string; + typeaheadText: string; + value: Value; +} + +export interface TypeaheadInput { + editorNode: Element; + prefix: string; + selection?: Selection; + text: string; + wrapperNode: Element; +} + +export interface TypeaheadOutput { + context?: string; + refresher?: Promise<{}>; + suggestions: SuggestionGroup[]; +} + +class QueryField extends React.Component { + menuEl: HTMLElement | null; + plugins: any[]; resetTimer: any; constructor(props, context) { super(props, context); - const { prismDefinition = {}, prismLanguage = 'promql' } = props; - - this.plugins = [ - BracesPlugin(), - ClearPlugin(), - RunnerPlugin({ handler: props.onPressEnter }), - NewlinePlugin(), - PluginPrism({ definition: prismDefinition, language: prismLanguage }), - ]; + // Base plugins + this.plugins = [BracesPlugin(), ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins]; this.state = { - labelKeys: {}, - labelValues: {}, - metrics: props.metrics || [], suggestions: [], + typeaheadContext: null, typeaheadIndex: 0, typeaheadPrefix: '', - value: getInitialValue(props.initialQuery || ''), + typeaheadText: '', + value: getInitialValue(props.initialValue || ''), }; } componentDidMount() { this.updateMenu(); - - if (this.props.metrics === undefined) { - this.fetchMetricNames(); - } } componentWillUnmount() { @@ -112,12 +169,9 @@ class QueryField extends React.Component { } componentWillReceiveProps(nextProps) { - if (nextProps.metrics && nextProps.metrics !== this.props.metrics) { - this.setState({ metrics: nextProps.metrics }, this.onMetricsReceived); - } - // initialQuery is null in case the user typed - if (nextProps.initialQuery !== null && nextProps.initialQuery !== this.props.initialQuery) { - this.setState({ value: getInitialValue(nextProps.initialQuery) }); + // initialValue is null in case the user typed + if (nextProps.initialValue !== null && nextProps.initialValue !== this.props.initialValue) { + this.setState({ value: getInitialValue(nextProps.initialValue) }); } } @@ -125,48 +179,28 @@ class QueryField extends React.Component { const changed = value.document !== this.state.value.document; this.setState({ value }, () => { if (changed) { - this.handleChangeQuery(); + this.handleChangeValue(); } }); - window.requestAnimationFrame(this.handleTypeahead); - }; - - onMetricsReceived = () => { - if (!this.state.metrics) { - return; + if (changed) { + window.requestAnimationFrame(this.handleTypeahead); } - setPrismTokens(this.props.prismLanguage, METRIC_MARK, this.state.metrics); - - // Trigger re-render - window.requestAnimationFrame(() => { - // Bogus edit to trigger highlighting - const change = this.state.value - .change() - .insertText(' ') - .deleteBackward(1); - this.onChange(change); - }); }; - request = url => { - if (this.props.request) { - return this.props.request(url); - } - return fetch(url); - }; - - handleChangeQuery = () => { + handleChangeValue = () => { // Send text change to parent - const { onQueryChange } = this.props; - if (onQueryChange) { - onQueryChange(Plain.serialize(this.state.value)); + const { onValueChanged } = this.props; + if (onValueChanged) { + onValueChanged(Plain.serialize(this.state.value)); } }; - handleTypeahead = debounce(() => { + handleTypeahead = _.debounce(async () => { const selection = window.getSelection(); - if (selection.anchorNode) { + const { cleanText, onTypeahead } = this.props; + + if (onTypeahead && selection.anchorNode) { const wrapperNode = selection.anchorNode.parentElement; const editorNode = wrapperNode.closest('.slate-query-field'); if (!editorNode || this.state.value.isBlurred) { @@ -175,164 +209,96 @@ class QueryField extends React.Component { } const range = selection.getRangeAt(0); - const text = selection.anchorNode.textContent; const offset = range.startOffset; - const prefix = cleanText(text.substr(0, offset)); - - // Determine candidates by context - const suggestionGroups = []; - const wrapperClasses = wrapperNode.classList; - let typeaheadContext = null; - - // Take first metric as lucky guess - const metricNode = editorNode.querySelector(`.${METRIC_MARK}`); - - if (wrapperClasses.contains('context-range')) { - // Rate ranges - typeaheadContext = 'context-range'; - suggestionGroups.push({ - label: 'Range vector', - items: [...RATE_RANGES], - }); - } else if (wrapperClasses.contains('context-labels') && metricNode) { - const metric = metricNode.textContent; - const labelKeys = this.state.labelKeys[metric]; - if (labelKeys) { - if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { - // Label values - const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); - if (labelKeyNode) { - const labelKey = labelKeyNode.textContent; - const labelValues = this.state.labelValues[metric][labelKey]; - typeaheadContext = 'context-label-values'; - suggestionGroups.push({ - label: 'Label values', - items: labelValues, - }); - } - } else { - // Label keys - typeaheadContext = 'context-labels'; - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } - } else { - this.fetchMetricLabels(metric); - } - } else if (wrapperClasses.contains('context-labels') && !metricNode) { - // Empty name queries - const defaultKeys = ['job', 'instance']; - // Munge all keys that we have seen together - const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => { - return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1)); - }, defaultKeys); - if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { - // Label values - const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); - if (labelKeyNode) { - const labelKey = labelKeyNode.textContent; - if (this.state.labelValues[EMPTY_METRIC]) { - const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey]; - typeaheadContext = 'context-label-values'; - suggestionGroups.push({ - label: 'Label values', - items: labelValues, - }); - } else { - // Can only query label values for now (API to query keys is under development) - this.fetchLabelValues(labelKey); - } - } - } else { - // Label keys - typeaheadContext = 'context-labels'; - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } - } else if (metricNode && wrapperClasses.contains('context-aggregation')) { - typeaheadContext = 'context-aggregation'; - const metric = metricNode.textContent; - const labelKeys = this.state.labelKeys[metric]; - if (labelKeys) { - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } else { - this.fetchMetricLabels(metric); - } - } else if ( - (this.state.metrics && ((prefix && !wrapperClasses.contains('token')) || text.match(/[+\-*/^%]/))) || - wrapperClasses.contains('context-function') - ) { - // Need prefix for metrics - typeaheadContext = 'context-metrics'; - suggestionGroups.push({ - label: 'Metrics', - items: this.state.metrics, - }); + const text = selection.anchorNode.textContent; + let prefix = text.substr(0, offset); + if (cleanText) { + prefix = cleanText(prefix); } - let results = 0; - const filteredSuggestions = suggestionGroups.map(group => { - if (group.items) { - group.items = group.items.filter(c => c.length !== prefix.length && c.indexOf(prefix) > -1); - results += group.items.length; + const { suggestions, context, refresher } = onTypeahead({ + editorNode, + prefix, + selection, + text, + wrapperNode, + }); + + const filteredSuggestions = suggestions + .map(group => { + if (group.items) { + if (prefix) { + // Filter groups based on prefix + if (!group.skipFilter) { + group.items = group.items.filter(c => (c.filterText || c.label).length >= prefix.length); + if (group.prefixMatch) { + group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) === 0); + } else { + group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) > -1); + } + } + // Filter out the already typed value (prefix) unless it inserts custom text + group.items = group.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix); + } + + group.items = _.sortBy(group.items, item => item.sortText || item.label); + } + return group; + }) + .filter(group => group.items && group.items.length > 0); // Filter out empty groups + + this.setState( + { + suggestions: filteredSuggestions, + typeaheadPrefix: prefix, + typeaheadContext: context, + typeaheadText: text, + }, + () => { + if (refresher) { + refresher.then(this.handleTypeahead).catch(e => console.error(e)); + } } - return group; - }); - - console.log('handleTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); - - this.setState({ - typeaheadPrefix: prefix, - typeaheadContext, - typeaheadText: text, - suggestions: results > 0 ? filteredSuggestions : [], - }); + ); } }, TYPEAHEAD_DEBOUNCE); - applyTypeahead(change, suggestion) { - const { typeaheadPrefix, typeaheadContext, typeaheadText } = this.state; + applyTypeahead(change: Change, suggestion: Suggestion): Change { + const { cleanText, onWillApplySuggestion } = this.props; + const { typeaheadPrefix, typeaheadText } = this.state; + let suggestionText = suggestion.insertText || suggestion.label; + const move = suggestion.move || 0; - // Modify suggestion based on context - switch (typeaheadContext) { - case 'context-labels': { - const nextChar = getNextCharacter(); - if (!nextChar || nextChar === '}' || nextChar === ',') { - suggestion += '='; - } - break; - } - - case 'context-label-values': { - // Always add quotes and remove existing ones instead - if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) { - suggestion = `"${suggestion}`; - } - if (getNextCharacter() !== '"') { - suggestion = `${suggestion}"`; - } - break; - } - - default: + if (onWillApplySuggestion) { + suggestionText = onWillApplySuggestion(suggestionText, { ...this.state }); } this.resetTypeahead(); // Remove the current, incomplete text and replace it with the selected suggestion - let backward = typeaheadPrefix.length; - const text = cleanText(typeaheadText); + const backward = suggestion.deleteBackwards || typeaheadPrefix.length; + const text = cleanText ? cleanText(typeaheadText) : typeaheadText; const suffixLength = text.length - typeaheadPrefix.length; const offset = typeaheadText.indexOf(typeaheadPrefix); - const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestion === typeaheadText); + const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestionText === typeaheadText); const forward = midWord ? suffixLength + offset : 0; - return ( - change - // TODO this line breaks if cursor was moved left and length is longer than whole prefix + // If new-lines, apply suggestion as block + if (suggestionText.match(/\n/)) { + const fragment = makeFragment(suggestionText); + return change .deleteBackward(backward) .deleteForward(forward) - .insertText(suggestion) - .focus() - ); + .insertFragment(fragment) + .focus(); + } + + return change + .deleteBackward(backward) + .deleteForward(forward) + .insertText(suggestionText) + .move(move) + .focus(); } onKeyDown = (event, change) => { @@ -413,74 +379,6 @@ class QueryField extends React.Component { }); }; - async fetchLabelValues(key) { - const url = `/api/v1/label/${key}/values`; - try { - const res = await this.request(url); - console.log(res); - const body = await (res.data || res.json()); - const pairs = this.state.labelValues[EMPTY_METRIC]; - const values = { - ...pairs, - [key]: body.data, - }; - // const labelKeys = { - // ...this.state.labelKeys, - // [EMPTY_METRIC]: keys, - // }; - const labelValues = { - ...this.state.labelValues, - [EMPTY_METRIC]: values, - }; - this.setState({ labelValues }, this.handleTypeahead); - } catch (e) { - if (this.props.onRequestError) { - this.props.onRequestError(e); - } else { - console.error(e); - } - } - } - - async fetchMetricLabels(name) { - const url = `/api/v1/series?match[]=${name}`; - try { - const res = await this.request(url); - const body = await (res.data || res.json()); - const { keys, values } = processLabels(body.data); - const labelKeys = { - ...this.state.labelKeys, - [name]: keys, - }; - const labelValues = { - ...this.state.labelValues, - [name]: values, - }; - this.setState({ labelKeys, labelValues }, this.handleTypeahead); - } catch (e) { - if (this.props.onRequestError) { - this.props.onRequestError(e); - } else { - console.error(e); - } - } - } - - async fetchMetricNames() { - const url = '/api/v1/label/__name__/values'; - try { - const res = await this.request(url); - const body = await (res.data || res.json()); - this.setState({ metrics: body.data }, this.onMetricsReceived); - } catch (error) { - if (this.props.onRequestError) { - this.props.onRequestError(error); - } else { - console.error(error); - } - } - } - handleBlur = () => { const { onBlur } = this.props; // If we dont wait here, menu clicks wont work because the menu @@ -498,7 +396,7 @@ class QueryField extends React.Component { } }; - handleClickMenu = item => { + onClickMenu = (item: Suggestion) => { // Manually triggering change const change = this.applyTypeahead(this.state.value.change(), item); this.onChange(change); @@ -531,7 +429,7 @@ class QueryField extends React.Component { // Write DOM requestAnimationFrame(() => { - menu.style.opacity = 1; + menu.style.opacity = '1'; menu.style.top = `${rect.top + scrollY + rect.height + 4}px`; menu.style.left = `${rect.left + scrollX - 2}px`; }); @@ -554,17 +452,16 @@ class QueryField extends React.Component { let selectedIndex = Math.max(this.state.typeaheadIndex, 0); const flattenedSuggestions = flattenSuggestions(suggestions); selectedIndex = selectedIndex % flattenedSuggestions.length || 0; - const selectedKeys = (flattenedSuggestions.length > 0 ? [flattenedSuggestions[selectedIndex]] : []).map( - i => (typeof i === 'object' ? i.text : i) - ); + const selectedItem: Suggestion | null = + flattenedSuggestions.length > 0 ? flattenedSuggestions[selectedIndex] : null; // Create typeahead in DOM root so we can later position it absolutely return ( @@ -591,4 +488,24 @@ class QueryField extends React.Component { } } +class Portal extends React.Component<{ index?: number; prefix: string }, {}> { + node: HTMLElement; + + constructor(props) { + super(props); + const { index = 0, prefix = 'query' } = props; + this.node = document.createElement('div'); + this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`); + document.body.appendChild(this.node); + } + + componentWillUnmount() { + document.body.removeChild(this.node); + } + + render() { + return ReactDOM.createPortal(this.props.children, this.node); + } +} + export default QueryField; diff --git a/public/app/containers/Explore/QueryRows.tsx b/public/app/containers/Explore/QueryRows.tsx index a968e1e2c64..3aaa006d6df 100644 --- a/public/app/containers/Explore/QueryRows.tsx +++ b/public/app/containers/Explore/QueryRows.tsx @@ -1,7 +1,6 @@ import React, { PureComponent } from 'react'; -import promql from './slate-plugins/prism/promql'; -import QueryField from './QueryField'; +import QueryField from './PromQueryField'; class QueryRow extends PureComponent { constructor(props) { @@ -62,9 +61,6 @@ class QueryRow extends PureComponent { portalPrefix="explore" onPressEnter={this.handlePressEnter} onQueryChange={this.handleChangeQuery} - placeholder="Enter a PromQL query" - prismLanguage="promql" - prismDefinition={promql} request={request} />
diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/containers/Explore/Typeahead.tsx index 44fce7f8c7e..9924488035c 100644 --- a/public/app/containers/Explore/Typeahead.tsx +++ b/public/app/containers/Explore/Typeahead.tsx @@ -1,17 +1,26 @@ import React from 'react'; -function scrollIntoView(el) { +import { Suggestion, SuggestionGroup } from './QueryField'; + +function scrollIntoView(el: HTMLElement) { if (!el || !el.offsetParent) { return; } - const container = el.offsetParent; + const container = el.offsetParent as HTMLElement; if (el.offsetTop > container.scrollTop + container.offsetHeight || el.offsetTop < container.scrollTop) { container.scrollTop = el.offsetTop - container.offsetTop; } } -class TypeaheadItem extends React.PureComponent { - el: any; +interface TypeaheadItemProps { + isSelected: boolean; + item: Suggestion; + onClickItem: (Suggestion) => void; +} + +class TypeaheadItem extends React.PureComponent { + el: HTMLElement; + componentDidUpdate(prevProps) { if (this.props.isSelected && !prevProps.isSelected) { scrollIntoView(this.el); @@ -22,20 +31,30 @@ class TypeaheadItem extends React.PureComponent { this.el = el; }; + onClick = () => { + this.props.onClickItem(this.props.item); + }; + render() { - const { hint, isSelected, label, onClickItem } = this.props; + const { isSelected, item } = this.props; const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item'; - const onClick = () => onClickItem(label); return ( -
  • - {label} - {hint && isSelected ?
    {hint}
    : null} +
  • + {item.detail || item.label} + {item.documentation && isSelected ?
    {item.documentation}
    : null}
  • ); } } -class TypeaheadGroup extends React.PureComponent { +interface TypeaheadGroupProps { + items: Suggestion[]; + label: string; + onClickItem: (Suggestion) => void; + selected: Suggestion; +} + +class TypeaheadGroup extends React.PureComponent { render() { const { items, label, selected, onClickItem } = this.props; return ( @@ -43,16 +62,8 @@ class TypeaheadGroup extends React.PureComponent {
    {label}
      {items.map(item => { - const text = typeof item === 'object' ? item.text : item; - const label = typeof item === 'object' ? item.display || item.text : item; return ( - -1} - hint={item.hint} - label={label} - /> + ); })}
    @@ -61,13 +72,19 @@ class TypeaheadGroup extends React.PureComponent { } } -class Typeahead extends React.PureComponent { +interface TypeaheadProps { + groupedItems: SuggestionGroup[]; + menuRef: any; + selectedItem: Suggestion | null; + onClickItem: (Suggestion) => void; +} +class Typeahead extends React.PureComponent { render() { - const { groupedItems, menuRef, selectedItems, onClickItem } = this.props; + const { groupedItems, menuRef, selectedItem, onClickItem } = this.props; return (
      {groupedItems.map(g => ( - + ))}
    ); diff --git a/public/app/containers/Explore/slate-plugins/prism/promql.ts b/public/app/containers/Explore/slate-plugins/prism/promql.ts index 0f0be18cb6f..a17c5fbc4f6 100644 --- a/public/app/containers/Explore/slate-plugins/prism/promql.ts +++ b/public/app/containers/Explore/slate-plugins/prism/promql.ts @@ -1,67 +1,368 @@ +/* tslint:disable max-line-length */ + export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', 'offset', 'without']; const AGGREGATION_OPERATORS = [ - 'sum', - 'min', - 'max', - 'avg', - 'stddev', - 'stdvar', - 'count', - 'count_values', - 'bottomk', - 'topk', - 'quantile', + { + label: 'sum', + insertText: 'sum()', + documentation: 'Calculate sum over dimensions', + }, + { + label: 'min', + insertText: 'min()', + documentation: 'Select minimum over dimensions', + }, + { + label: 'max', + insertText: 'max()', + documentation: 'Select maximum over dimensions', + }, + { + label: 'avg', + insertText: 'avg()', + documentation: 'Calculate the average over dimensions', + }, + { + label: 'stddev', + insertText: 'stddev()', + documentation: 'Calculate population standard deviation over dimensions', + }, + { + label: 'stdvar', + insertText: 'stdvar()', + documentation: 'Calculate population standard variance over dimensions', + }, + { + label: 'count', + insertText: 'count()', + documentation: 'Count number of elements in the vector', + }, + { + label: 'count_values', + insertText: 'count_values()', + documentation: 'Count number of elements with the same value', + }, + { + label: 'bottomk', + insertText: 'bottomk()', + documentation: 'Smallest k elements by sample value', + }, + { + label: 'topk', + insertText: 'topk()', + documentation: 'Largest k elements by sample value', + }, + { + label: 'quantile', + insertText: 'quantile()', + documentation: 'Calculate φ-quantile (0 ≤ φ ≤ 1) over dimensions', + }, ]; export const FUNCTIONS = [ ...AGGREGATION_OPERATORS, - 'abs', - 'absent', - 'ceil', - 'changes', - 'clamp_max', - 'clamp_min', - 'count_scalar', - 'day_of_month', - 'day_of_week', - 'days_in_month', - 'delta', - 'deriv', - 'drop_common_labels', - 'exp', - 'floor', - 'histogram_quantile', - 'holt_winters', - 'hour', - 'idelta', - 'increase', - 'irate', - 'label_replace', - 'ln', - 'log2', - 'log10', - 'minute', - 'month', - 'predict_linear', - 'rate', - 'resets', - 'round', - 'scalar', - 'sort', - 'sort_desc', - 'sqrt', - 'time', - 'vector', - 'year', - 'avg_over_time', - 'min_over_time', - 'max_over_time', - 'sum_over_time', - 'count_over_time', - 'quantile_over_time', - 'stddev_over_time', - 'stdvar_over_time', + { + insertText: 'abs()', + label: 'abs', + detail: 'abs(v instant-vector)', + documentation: 'Returns the input vector with all sample values converted to their absolute value.', + }, + { + insertText: 'absent()', + label: 'absent', + detail: 'absent(v instant-vector)', + documentation: + 'Returns an empty vector if the vector passed to it has any elements and a 1-element vector with the value 1 if the vector passed to it has no elements. This is useful for alerting on when no time series exist for a given metric name and label combination.', + }, + { + insertText: 'ceil()', + label: 'ceil', + detail: 'ceil(v instant-vector)', + documentation: 'Rounds the sample values of all elements in `v` up to the nearest integer.', + }, + { + insertText: 'changes()', + label: 'changes', + detail: 'changes(v range-vector)', + documentation: + 'For each input time series, `changes(v range-vector)` returns the number of times its value has changed within the provided time range as an instant vector.', + }, + { + insertText: 'clamp_max()', + label: 'clamp_max', + detail: 'clamp_max(v instant-vector, max scalar)', + documentation: 'Clamps the sample values of all elements in `v` to have an upper limit of `max`.', + }, + { + insertText: 'clamp_min()', + label: 'clamp_min', + detail: 'clamp_min(v instant-vector, min scalar)', + documentation: 'Clamps the sample values of all elements in `v` to have a lower limit of `min`.', + }, + { + insertText: 'count_scalar()', + label: 'count_scalar', + detail: 'count_scalar(v instant-vector)', + documentation: + 'Returns the number of elements in a time series vector as a scalar. This is in contrast to the `count()` aggregation operator, which always returns a vector (an empty one if the input vector is empty) and allows grouping by labels via a `by` clause.', + }, + { + insertText: 'day_of_month()', + label: 'day_of_month', + detail: 'day_of_month(v=vector(time()) instant-vector)', + documentation: 'Returns the day of the month for each of the given times in UTC. Returned values are from 1 to 31.', + }, + { + insertText: 'day_of_week()', + label: 'day_of_week', + detail: 'day_of_week(v=vector(time()) instant-vector)', + documentation: + 'Returns the day of the week for each of the given times in UTC. Returned values are from 0 to 6, where 0 means Sunday etc.', + }, + { + insertText: 'days_in_month()', + label: 'days_in_month', + detail: 'days_in_month(v=vector(time()) instant-vector)', + documentation: + 'Returns number of days in the month for each of the given times in UTC. Returned values are from 28 to 31.', + }, + { + insertText: 'delta()', + label: 'delta', + detail: 'delta(v range-vector)', + documentation: + 'Calculates the difference between the first and last value of each time series element in a range vector `v`, returning an instant vector with the given deltas and equivalent labels. The delta is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if the sample values are all integers.', + }, + { + insertText: 'deriv()', + label: 'deriv', + detail: 'deriv(v range-vector)', + documentation: + 'Calculates the per-second derivative of the time series in a range vector `v`, using simple linear regression.', + }, + { + insertText: 'drop_common_labels()', + label: 'drop_common_labels', + detail: 'drop_common_labels(instant-vector)', + documentation: 'Drops all labels that have the same name and value across all series in the input vector.', + }, + { + insertText: 'exp()', + label: 'exp', + detail: 'exp(v instant-vector)', + documentation: + 'Calculates the exponential function for all elements in `v`.\nSpecial cases are:\n* `Exp(+Inf) = +Inf` \n* `Exp(NaN) = NaN`', + }, + { + insertText: 'floor()', + label: 'floor', + detail: 'floor(v instant-vector)', + documentation: 'Rounds the sample values of all elements in `v` down to the nearest integer.', + }, + { + insertText: 'histogram_quantile()', + label: 'histogram_quantile', + detail: 'histogram_quantile(φ float, b instant-vector)', + documentation: + 'Calculates the φ-quantile (0 ≤ φ ≤ 1) from the buckets `b` of a histogram. The samples in `b` are the counts of observations in each bucket. Each sample must have a label `le` where the label value denotes the inclusive upper bound of the bucket. (Samples without such a label are silently ignored.) The histogram metric type automatically provides time series with the `_bucket` suffix and the appropriate labels.', + }, + { + insertText: 'holt_winters()', + label: 'holt_winters', + detail: 'holt_winters(v range-vector, sf scalar, tf scalar)', + documentation: + 'Produces a smoothed value for time series based on the range in `v`. The lower the smoothing factor `sf`, the more importance is given to old data. The higher the trend factor `tf`, the more trends in the data is considered. Both `sf` and `tf` must be between 0 and 1.', + }, + { + insertText: 'hour()', + label: 'hour', + detail: 'hour(v=vector(time()) instant-vector)', + documentation: 'Returns the hour of the day for each of the given times in UTC. Returned values are from 0 to 23.', + }, + { + insertText: 'idelta()', + label: 'idelta', + detail: 'idelta(v range-vector)', + documentation: + 'Calculates the difference between the last two samples in the range vector `v`, returning an instant vector with the given deltas and equivalent labels.', + }, + { + insertText: 'increase()', + label: 'increase', + detail: 'increase(v range-vector)', + documentation: + 'Calculates the increase in the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. The increase is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if a counter increases only by integer increments.', + }, + { + insertText: 'irate()', + label: 'irate', + detail: 'irate(v range-vector)', + documentation: + 'Calculates the per-second instant rate of increase of the time series in the range vector. This is based on the last two data points. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for.', + }, + { + insertText: 'label_replace()', + label: 'label_replace', + detail: 'label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)', + documentation: + "For each timeseries in `v`, `label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)` matches the regular expression `regex` against the label `src_label`. If it matches, then the timeseries is returned with the label `dst_label` replaced by the expansion of `replacement`. `$1` is replaced with the first matching subgroup, `$2` with the second etc. If the regular expression doesn't match then the timeseries is returned unchanged.", + }, + { + insertText: 'ln()', + label: 'ln', + detail: 'ln(v instant-vector)', + documentation: + 'calculates the natural logarithm for all elements in `v`.\nSpecial cases are:\n * `ln(+Inf) = +Inf`\n * `ln(0) = -Inf`\n * `ln(x < 0) = NaN`\n * `ln(NaN) = NaN`', + }, + { + insertText: 'log2()', + label: 'log2', + detail: 'log2(v instant-vector)', + documentation: + 'Calculates the binary logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.', + }, + { + insertText: 'log10()', + label: 'log10', + detail: 'log10(v instant-vector)', + documentation: + 'Calculates the decimal logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.', + }, + { + insertText: 'minute()', + label: 'minute', + detail: 'minute(v=vector(time()) instant-vector)', + documentation: + 'Returns the minute of the hour for each of the given times in UTC. Returned values are from 0 to 59.', + }, + { + insertText: 'month()', + label: 'month', + detail: 'month(v=vector(time()) instant-vector)', + documentation: + 'Returns the month of the year for each of the given times in UTC. Returned values are from 1 to 12, where 1 means January etc.', + }, + { + insertText: 'predict_linear()', + label: 'predict_linear', + detail: 'predict_linear(v range-vector, t scalar)', + documentation: + 'Predicts the value of time series `t` seconds from now, based on the range vector `v`, using simple linear regression.', + }, + { + insertText: 'rate()', + label: 'rate', + detail: 'rate(v range-vector)', + documentation: + "Calculates the per-second average rate of increase of the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. Also, the calculation extrapolates to the ends of the time range, allowing for missed scrapes or imperfect alignment of scrape cycles with the range's time period.", + }, + { + insertText: 'resets()', + label: 'resets', + detail: 'resets(v range-vector)', + documentation: + 'For each input time series, `resets(v range-vector)` returns the number of counter resets within the provided time range as an instant vector. Any decrease in the value between two consecutive samples is interpreted as a counter reset.', + }, + { + insertText: 'round()', + label: 'round', + detail: 'round(v instant-vector, to_nearest=1 scalar)', + documentation: + 'Rounds the sample values of all elements in `v` to the nearest integer. Ties are resolved by rounding up. The optional `to_nearest` argument allows specifying the nearest multiple to which the sample values should be rounded. This multiple may also be a fraction.', + }, + { + insertText: 'scalar()', + label: 'scalar', + detail: 'scalar(v instant-vector)', + documentation: + 'Given a single-element input vector, `scalar(v instant-vector)` returns the sample value of that single element as a scalar. If the input vector does not have exactly one element, `scalar` will return `NaN`.', + }, + { + insertText: 'sort()', + label: 'sort', + detail: 'sort(v instant-vector)', + documentation: 'Returns vector elements sorted by their sample values, in ascending order.', + }, + { + insertText: 'sort_desc()', + label: 'sort_desc', + detail: 'sort_desc(v instant-vector)', + documentation: 'Returns vector elements sorted by their sample values, in descending order.', + }, + { + insertText: 'sqrt()', + label: 'sqrt', + detail: 'sqrt(v instant-vector)', + documentation: 'Calculates the square root of all elements in `v`.', + }, + { + insertText: 'time()', + label: 'time', + detail: 'time()', + documentation: + 'Returns the number of seconds since January 1, 1970 UTC. Note that this does not actually return the current time, but the time at which the expression is to be evaluated.', + }, + { + insertText: 'vector()', + label: 'vector', + detail: 'vector(s scalar)', + documentation: 'Returns the scalar `s` as a vector with no labels.', + }, + { + insertText: 'year()', + label: 'year', + detail: 'year(v=vector(time()) instant-vector)', + documentation: 'Returns the year for each of the given times in UTC.', + }, + { + insertText: 'avg_over_time()', + label: 'avg_over_time', + detail: 'avg_over_time(range-vector)', + documentation: 'The average value of all points in the specified interval.', + }, + { + insertText: 'min_over_time()', + label: 'min_over_time', + detail: 'min_over_time(range-vector)', + documentation: 'The minimum value of all points in the specified interval.', + }, + { + insertText: 'max_over_time()', + label: 'max_over_time', + detail: 'max_over_time(range-vector)', + documentation: 'The maximum value of all points in the specified interval.', + }, + { + insertText: 'sum_over_time()', + label: 'sum_over_time', + detail: 'sum_over_time(range-vector)', + documentation: 'The sum of all values in the specified interval.', + }, + { + insertText: 'count_over_time()', + label: 'count_over_time', + detail: 'count_over_time(range-vector)', + documentation: 'The count of all values in the specified interval.', + }, + { + insertText: 'quantile_over_time()', + label: 'quantile_over_time', + detail: 'quantile_over_time(scalar, range-vector)', + documentation: 'The φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.', + }, + { + insertText: 'stddev_over_time()', + label: 'stddev_over_time', + detail: 'stddev_over_time(range-vector)', + documentation: 'The population standard deviation of the values in the specified interval.', + }, + { + insertText: 'stdvar_over_time()', + label: 'stdvar_over_time', + detail: 'stdvar_over_time(range-vector)', + documentation: 'The population standard variance of the values in the specified interval.', + }, ]; const tokenizer = { @@ -93,7 +394,7 @@ const tokenizer = { }, }, }, - function: new RegExp(`\\b(?:${FUNCTIONS.join('|')})(?=\\s*\\()`, 'i'), + function: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})(?=\\s*\\()`, 'i'), 'context-range': [ { pattern: /\[[^\]]*(?=])/, // [1m] diff --git a/public/sass/components/_slate_editor.scss b/public/sass/components/_slate_editor.scss index 119c468292a..10b2238f4b8 100644 --- a/public/sass/components/_slate_editor.scss +++ b/public/sass/components/_slate_editor.scss @@ -71,6 +71,7 @@ .typeahead-item-hint { font-size: $font-size-xs; color: $text-color; + white-space: normal; } } } From ab8fa0de7443136afeab82fcf8713fddbdc23a48 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 21:39:02 +0200 Subject: [PATCH 104/105] elasticsearch: support reversed index patterns Now both [index-]pattern and pattern[-index] are supported --- .../elasticsearch/client/index_pattern.go | 35 ++++++++++++++----- .../client/index_pattern_test.go | 27 +++++++++++++- 2 files changed, 53 insertions(+), 9 deletions(-) diff --git a/pkg/tsdb/elasticsearch/client/index_pattern.go b/pkg/tsdb/elasticsearch/client/index_pattern.go index 8391e902ea4..952b5c4f806 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern.go @@ -248,13 +248,28 @@ var datePatternReplacements = map[string]string{ func formatDate(t time.Time, pattern string) string { var datePattern string - parts := strings.Split(strings.TrimLeft(pattern, "["), "]") - base := parts[0] - if len(parts) == 2 { - datePattern = parts[1] - } else { - datePattern = base - base = "" + base := "" + ltr := false + + if strings.HasPrefix(pattern, "[") { + parts := strings.Split(strings.TrimLeft(pattern, "["), "]") + base = parts[0] + if len(parts) == 2 { + datePattern = parts[1] + } else { + datePattern = base + base = "" + } + ltr = true + } else if strings.HasSuffix(pattern, "]") { + parts := strings.Split(strings.TrimRight(pattern, "]"), "[") + datePattern = parts[0] + if len(parts) == 2 { + base = parts[1] + } else { + base = "" + } + ltr = false } formatted := t.Format(patternToLayout(datePattern)) @@ -293,7 +308,11 @@ func formatDate(t time.Time, pattern string) string { formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", t.Hour()), -1) } - return base + formatted + if ltr { + return base + formatted + } + + return formatted + base } func patternToLayout(pattern string) string { diff --git a/pkg/tsdb/elasticsearch/client/index_pattern_test.go b/pkg/tsdb/elasticsearch/client/index_pattern_test.go index 3bd823d8c87..ca20b39d532 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern_test.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern_test.go @@ -28,29 +28,54 @@ func TestIndexPattern(t *testing.T) { to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) { - //So(indices, ShouldHaveLength, 1) + So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15.17") }) + indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15.17-data") + }) + indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15") }) + indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15-data") + }) + indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.20") }) + indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.20-data") + }) + indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05") }) + indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05-data") + }) + indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018") }) + + indexPatternScenario(intervalYearly, "YYYY[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018-data") + }) }) Convey("Hourly interval", t, func() { From 48e5e65c73eea000bf2b702b8743de0146e29f86 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 27 Jul 2018 10:33:06 +0200 Subject: [PATCH 105/105] changelog: add notes about closing #12731 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6409f094f65..ad1b63234e9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) +* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731) # 5.2.2 (2018-07-25)