mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Chore: remove file export (use inspector/toCSV()) (#23508)
* remove file export... use CSV download * remove file export... use CSV download
This commit is contained in:
parent
8ed6ab2bc9
commit
db166c4c41
@ -1,145 +0,0 @@
|
||||
import * as fileExport from '../utils/file_export';
|
||||
import { beforeEach, expect } from 'test/lib/common';
|
||||
import { TableData } from '@grafana/data';
|
||||
|
||||
describe('file_export', () => {
|
||||
const ctx: any = {};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.seriesList = [
|
||||
{
|
||||
alias: 'series_1',
|
||||
datapoints: [
|
||||
[1, 1500026100000],
|
||||
[2, 1500026200000],
|
||||
[null, 1500026300000],
|
||||
[null, 1500026400000],
|
||||
[null, 1500026500000],
|
||||
[6, 1500026600000],
|
||||
],
|
||||
},
|
||||
{
|
||||
alias: 'series_2',
|
||||
datapoints: [
|
||||
[11, 1500026100000],
|
||||
[12, 1500026200000],
|
||||
[13, 1500026300000],
|
||||
[15, 1500026500000],
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
ctx.timeFormat = 'X'; // Unix timestamp (seconds)
|
||||
});
|
||||
|
||||
describe('when exporting series as rows', () => {
|
||||
it('should export points in proper order', () => {
|
||||
const text = fileExport.convertSeriesListToCsv(ctx.seriesList, { dateTimeFormat: ctx.timeFormat });
|
||||
const expectedText =
|
||||
'"Series";"Time";"Value"\r\n' +
|
||||
'"series_1";"1500026100";1\r\n' +
|
||||
'"series_1";"1500026200";2\r\n' +
|
||||
'"series_1";"1500026300";null\r\n' +
|
||||
'"series_1";"1500026400";null\r\n' +
|
||||
'"series_1";"1500026500";null\r\n' +
|
||||
'"series_1";"1500026600";6\r\n' +
|
||||
'"series_2";"1500026100";11\r\n' +
|
||||
'"series_2";"1500026200";12\r\n' +
|
||||
'"series_2";"1500026300";13\r\n' +
|
||||
'"series_2";"1500026500";15';
|
||||
|
||||
expect(text).toBe(expectedText);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when exporting series as columns', () => {
|
||||
it('should export points in proper order', () => {
|
||||
const text = fileExport.convertSeriesListToCsvColumns(ctx.seriesList, { dateTimeFormat: ctx.timeFormat });
|
||||
const expectedText =
|
||||
'"Time";"series_1";"series_2"\r\n' +
|
||||
'"1500026100";1;11\r\n' +
|
||||
'"1500026200";2;12\r\n' +
|
||||
'"1500026300";null;13\r\n' +
|
||||
'"1500026400";null;null\r\n' +
|
||||
'"1500026500";null;15\r\n' +
|
||||
'"1500026600";6;null';
|
||||
|
||||
expect(text).toBe(expectedText);
|
||||
});
|
||||
|
||||
it('should not modify series.datapoints', () => {
|
||||
const expectedSeries1DataPoints = ctx.seriesList[0].datapoints.slice();
|
||||
const expectedSeries2DataPoints = ctx.seriesList[1].datapoints.slice();
|
||||
|
||||
fileExport.convertSeriesListToCsvColumns(ctx.seriesList, { dateTimeFormat: ctx.timeFormat });
|
||||
|
||||
expect(expectedSeries1DataPoints).toEqual(ctx.seriesList[0].datapoints);
|
||||
expect(expectedSeries2DataPoints).toEqual(ctx.seriesList[1].datapoints);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when exporting table data to csv', () => {
|
||||
it('should properly escape special characters and quote all string values', () => {
|
||||
const inputTable: any = {
|
||||
columns: [
|
||||
{ title: 'integer_value' },
|
||||
{ text: 'string_value' },
|
||||
{ title: 'float_value' },
|
||||
{ text: 'boolean_value' },
|
||||
],
|
||||
rows: [
|
||||
[123, 'some_string', 1.234, true],
|
||||
[1000, 'some_string', 1.234567891, true],
|
||||
[0o765, 'some string with " in the middle', 1e-2, false],
|
||||
[0o765, 'some string with "" in the middle', 1e-2, false],
|
||||
[0o765, 'some string with """ in the middle', 1e-2, false],
|
||||
[0o765, '"some string with " at the beginning', 1e-2, false],
|
||||
[0o765, 'some string with " at the end"', 1e-2, false],
|
||||
[0x123, 'some string with \n in the middle', 10.01, false],
|
||||
[0b1011, 'some string with ; in the middle', -12.34, true],
|
||||
[123, 'some string with ;; in the middle', -12.34, true],
|
||||
[1234, '=a bogus formula ', '-and another', '+another', '@ref'],
|
||||
],
|
||||
};
|
||||
|
||||
const returnedText = fileExport.convertTableDataToCsv(inputTable, false);
|
||||
|
||||
const expectedText =
|
||||
'"integer_value";"string_value";"float_value";"boolean_value"\r\n' +
|
||||
'123;"some_string";1.234;true\r\n' +
|
||||
'1000;"some_string";1.234567891;true\r\n' +
|
||||
'501;"some string with "" in the middle";0.01;false\r\n' +
|
||||
'501;"some string with """" in the middle";0.01;false\r\n' +
|
||||
'501;"some string with """""" in the middle";0.01;false\r\n' +
|
||||
'501;"""some string with "" at the beginning";0.01;false\r\n' +
|
||||
'501;"some string with "" at the end""";0.01;false\r\n' +
|
||||
'291;"some string with \n in the middle";10.01;false\r\n' +
|
||||
'11;"some string with ; in the middle";-12.34;true\r\n' +
|
||||
'123;"some string with ;; in the middle";-12.34;true\r\n' +
|
||||
'1234;"\'=a bogus formula";"\'-and another";"\'+another";"\'@ref"';
|
||||
|
||||
expect(returnedText).toBe(expectedText);
|
||||
});
|
||||
|
||||
it('should decode HTML encoded characters', () => {
|
||||
const inputTable: TableData = {
|
||||
columns: [{ text: 'string_value' }],
|
||||
rows: [
|
||||
['"&ä'],
|
||||
['<strong>"some html"</strong>'],
|
||||
['<a href="http://something/index.html">some text</a>'],
|
||||
],
|
||||
};
|
||||
|
||||
const returnedText = fileExport.convertTableDataToCsv(inputTable, false);
|
||||
|
||||
const expectedText =
|
||||
'"string_value"\r\n' +
|
||||
'"""&ä"\r\n' +
|
||||
'"<strong>""some html""</strong>"\r\n' +
|
||||
'"<a href=""http://something/index.html"">some text</a>"';
|
||||
|
||||
expect(returnedText).toBe(expectedText);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,199 +0,0 @@
|
||||
import { isBoolean, isNumber, sortedUniq, sortedIndexOf, unescape as htmlUnescaped } from 'lodash';
|
||||
import { saveAs } from 'file-saver';
|
||||
import { isNullOrUndefined } from 'util';
|
||||
import { dateTime, TimeZone, TableData } from '@grafana/data';
|
||||
|
||||
const DEFAULT_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ssZ';
|
||||
const POINT_TIME_INDEX = 1;
|
||||
const POINT_VALUE_INDEX = 0;
|
||||
|
||||
const END_COLUMN = ';';
|
||||
const END_ROW = '\r\n';
|
||||
const QUOTE = '"';
|
||||
const EXPORT_FILENAME = 'grafana_data_export.csv';
|
||||
|
||||
interface SeriesListToCsvColumnsOptions {
|
||||
dateTimeFormat: string;
|
||||
excel: boolean;
|
||||
timezone: TimeZone;
|
||||
}
|
||||
|
||||
type SeriesList = Array<{
|
||||
datapoints: any;
|
||||
alias: any;
|
||||
}>;
|
||||
|
||||
const defaultOptions: SeriesListToCsvColumnsOptions = {
|
||||
dateTimeFormat: DEFAULT_DATETIME_FORMAT,
|
||||
excel: false,
|
||||
timezone: '',
|
||||
};
|
||||
|
||||
function csvEscaped(text: string) {
|
||||
if (!text) {
|
||||
return text;
|
||||
}
|
||||
|
||||
return text
|
||||
.split(QUOTE)
|
||||
.join(QUOTE + QUOTE)
|
||||
.replace(/^([-+=@])/, "'$1")
|
||||
.replace(/\s+$/, '');
|
||||
}
|
||||
|
||||
const domParser = new DOMParser();
|
||||
function htmlDecoded(text: string) {
|
||||
if (!text) {
|
||||
return text;
|
||||
}
|
||||
|
||||
const regexp = /&[^;]+;/g;
|
||||
function htmlDecoded(value: string) {
|
||||
const parsedDom = domParser.parseFromString(value, 'text/html');
|
||||
return parsedDom.body.textContent;
|
||||
}
|
||||
return text.replace(regexp, htmlDecoded).replace(regexp, htmlDecoded);
|
||||
}
|
||||
|
||||
function formatSpecialHeader(useExcelHeader: boolean) {
|
||||
return useExcelHeader ? `sep=${END_COLUMN}${END_ROW}` : '';
|
||||
}
|
||||
|
||||
function formatRow(row: any[], addEndRowDelimiter = true) {
|
||||
let text = '';
|
||||
for (let i = 0; i < row.length; i += 1) {
|
||||
if (isBoolean(row[i]) || isNumber(row[i]) || isNullOrUndefined(row[i])) {
|
||||
text += row[i];
|
||||
} else {
|
||||
text += `${QUOTE}${csvEscaped(htmlUnescaped(htmlDecoded(row[i])))}${QUOTE}`;
|
||||
}
|
||||
|
||||
if (i < row.length - 1) {
|
||||
text += END_COLUMN;
|
||||
}
|
||||
}
|
||||
return addEndRowDelimiter ? text + END_ROW : text;
|
||||
}
|
||||
|
||||
export function convertSeriesListToCsv(seriesList: SeriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const { dateTimeFormat, excel, timezone } = { ...defaultOptions, ...options };
|
||||
let text = formatSpecialHeader(excel) + formatRow(['Series', 'Time', 'Value']);
|
||||
for (let seriesIndex = 0; seriesIndex < seriesList.length; seriesIndex += 1) {
|
||||
for (let i = 0; i < seriesList[seriesIndex].datapoints.length; i += 1) {
|
||||
text += formatRow(
|
||||
[
|
||||
seriesList[seriesIndex].alias,
|
||||
timezone === 'utc'
|
||||
? dateTime(seriesList[seriesIndex].datapoints[i][POINT_TIME_INDEX])
|
||||
.utc()
|
||||
.format(dateTimeFormat)
|
||||
: dateTime(seriesList[seriesIndex].datapoints[i][POINT_TIME_INDEX]).format(dateTimeFormat),
|
||||
seriesList[seriesIndex].datapoints[i][POINT_VALUE_INDEX],
|
||||
],
|
||||
i < seriesList[seriesIndex].datapoints.length - 1 || seriesIndex < seriesList.length - 1
|
||||
);
|
||||
}
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
export function exportSeriesListToCsv(seriesList: SeriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const text = convertSeriesListToCsv(seriesList, options);
|
||||
saveSaveBlob(text, EXPORT_FILENAME);
|
||||
}
|
||||
|
||||
export function convertSeriesListToCsvColumns(seriesList: SeriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const { dateTimeFormat, excel, timezone } = { ...defaultOptions, ...options };
|
||||
// add header
|
||||
let text =
|
||||
formatSpecialHeader(excel) +
|
||||
formatRow(
|
||||
['Time'].concat(
|
||||
seriesList.map(val => {
|
||||
return val.alias;
|
||||
})
|
||||
)
|
||||
);
|
||||
// process data
|
||||
const extendedDatapointsList = mergeSeriesByTime(seriesList);
|
||||
|
||||
// make text
|
||||
for (let i = 0; i < extendedDatapointsList[0].length; i += 1) {
|
||||
const timestamp =
|
||||
timezone === 'utc'
|
||||
? dateTime(extendedDatapointsList[0][i][POINT_TIME_INDEX])
|
||||
.utc()
|
||||
.format(dateTimeFormat)
|
||||
: dateTime(extendedDatapointsList[0][i][POINT_TIME_INDEX]).format(dateTimeFormat);
|
||||
|
||||
text += formatRow(
|
||||
[timestamp].concat(
|
||||
extendedDatapointsList.map(datapoints => {
|
||||
return datapoints[i][POINT_VALUE_INDEX];
|
||||
})
|
||||
),
|
||||
i < extendedDatapointsList[0].length - 1
|
||||
);
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect all unique timestamps from series list and use it to fill
|
||||
* missing points by null.
|
||||
*/
|
||||
function mergeSeriesByTime(seriesList: SeriesList) {
|
||||
let timestamps = [];
|
||||
for (let i = 0; i < seriesList.length; i++) {
|
||||
const seriesPoints = seriesList[i].datapoints;
|
||||
for (let j = 0; j < seriesPoints.length; j++) {
|
||||
timestamps.push(seriesPoints[j][POINT_TIME_INDEX]);
|
||||
}
|
||||
}
|
||||
timestamps = sortedUniq(timestamps.sort());
|
||||
|
||||
const result = [];
|
||||
for (let i = 0; i < seriesList.length; i++) {
|
||||
const seriesPoints = seriesList[i].datapoints;
|
||||
const seriesTimestamps = seriesPoints.map((p: any) => p[POINT_TIME_INDEX]);
|
||||
const extendedDatapoints = [];
|
||||
for (let j = 0; j < timestamps.length; j++) {
|
||||
const timestamp = timestamps[j];
|
||||
const pointIndex = sortedIndexOf(seriesTimestamps, timestamp);
|
||||
if (pointIndex !== -1) {
|
||||
extendedDatapoints.push(seriesPoints[pointIndex]);
|
||||
} else {
|
||||
extendedDatapoints.push([null, timestamp]);
|
||||
}
|
||||
}
|
||||
result.push(extendedDatapoints);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function exportSeriesListToCsvColumns(seriesList: SeriesList, options: Partial<SeriesListToCsvColumnsOptions>) {
|
||||
const text = convertSeriesListToCsvColumns(seriesList, options);
|
||||
saveSaveBlob(text, EXPORT_FILENAME);
|
||||
}
|
||||
|
||||
export function convertTableDataToCsv(table: TableData, excel = false) {
|
||||
let text = formatSpecialHeader(excel);
|
||||
// add headline
|
||||
text += formatRow(table.columns.map((val: any) => val.title || val.text));
|
||||
// process data
|
||||
for (let i = 0; i < table.rows.length; i += 1) {
|
||||
text += formatRow(table.rows[i], i < table.rows.length - 1);
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
export function exportTableDataToCsv(table: TableData, excel = false) {
|
||||
const text = convertTableDataToCsv(table, excel);
|
||||
saveSaveBlob(text, EXPORT_FILENAME);
|
||||
}
|
||||
|
||||
export function saveSaveBlob(payload: any, fname: string) {
|
||||
const blob = new Blob([payload], { type: 'text/csv;charset=utf-8;header=present;' });
|
||||
saveAs(blob, fname);
|
||||
}
|
@ -32,7 +32,6 @@ import {
|
||||
DataSourceJsonData,
|
||||
DataQuery,
|
||||
} from '@grafana/data';
|
||||
import * as fileExport from 'app/core/utils/file_export';
|
||||
import * as flatten from 'app/core/utils/flatten';
|
||||
import * as ticks from 'app/core/utils/ticks';
|
||||
import { BackendSrv, getBackendSrv } from 'app/core/services/backend_srv';
|
||||
@ -131,7 +130,6 @@ exposeToPlugin('app/core/services/backend_srv', {
|
||||
|
||||
exposeToPlugin('app/plugins/sdk', sdk);
|
||||
exposeToPlugin('app/core/utils/datemath', dateMath);
|
||||
exposeToPlugin('app/core/utils/file_export', fileExport);
|
||||
exposeToPlugin('app/core/utils/flatten', flatten);
|
||||
exposeToPlugin('app/core/utils/kbn', kbn);
|
||||
exposeToPlugin('app/core/utils/ticks', ticks);
|
||||
|
Loading…
Reference in New Issue
Block a user