mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Logs: Add Download logs
button to log log-browser (#55163)
* move download functions to utils * add downloads button * removed unused export annotations * changed button to `ToolbarButton` * added tests and restructured download methods * pin `blob-polyfill` * add missing import * changed type
This commit is contained in:
parent
8b77ee2734
commit
6bc09a6390
@ -4526,9 +4526,6 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "3"]
|
||||
],
|
||||
"public/app/features/inspector/InspectDataTab.tsx:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/features/inspector/InspectErrorTab.tsx:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
|
@ -173,6 +173,7 @@
|
||||
"babel-loader": "8.2.5",
|
||||
"babel-plugin-angularjs-annotate": "0.10.0",
|
||||
"babel-plugin-macros": "3.1.0",
|
||||
"blob-polyfill": "7.0.20220408",
|
||||
"copy-webpack-plugin": "9.0.1",
|
||||
"css-loader": "6.7.1",
|
||||
"css-minimizer-webpack-plugin": "4.1.0",
|
||||
|
@ -1,13 +1,23 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React from 'react';
|
||||
|
||||
import { LogsDedupStrategy, LogsMetaItem, LogsMetaKind, LogRowModel } from '@grafana/data';
|
||||
import { Button, Tooltip } from '@grafana/ui';
|
||||
import { Button, ToolbarButton, Tooltip, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { downloadLogsModelAsTxt } from '../inspector/utils/download';
|
||||
import { LogLabels } from '../logs/components/LogLabels';
|
||||
import { MAX_CHARACTERS } from '../logs/components/LogRowMessage';
|
||||
|
||||
import { MetaInfoText, MetaItemProps } from './MetaInfoText';
|
||||
|
||||
const getStyles = () => ({
|
||||
metaContainer: css`
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
`,
|
||||
});
|
||||
|
||||
export type Props = {
|
||||
meta: LogsMetaItem[];
|
||||
dedupStrategy: LogsDedupStrategy;
|
||||
@ -32,6 +42,12 @@ export const LogsMetaRow = React.memo(
|
||||
onEscapeNewlines,
|
||||
logRows,
|
||||
}: Props) => {
|
||||
const style = useStyles2(getStyles);
|
||||
|
||||
const downloadLogs = () => {
|
||||
downloadLogsModelAsTxt({ meta, rows: logRows }, 'Explore');
|
||||
};
|
||||
|
||||
const logsMetaItem: Array<LogsMetaItem | MetaItemProps> = [...meta];
|
||||
|
||||
// Add deduplication info
|
||||
@ -85,18 +101,22 @@ export const LogsMetaRow = React.memo(
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{logsMetaItem && (
|
||||
<MetaInfoText
|
||||
metaItems={logsMetaItem.map((item) => {
|
||||
return {
|
||||
label: item.label,
|
||||
value: 'kind' in item ? renderMetaItem(item.value, item.kind) : item.value,
|
||||
};
|
||||
})}
|
||||
/>
|
||||
<div className={style.metaContainer}>
|
||||
<MetaInfoText
|
||||
metaItems={logsMetaItem.map((item) => {
|
||||
return {
|
||||
label: item.label,
|
||||
value: 'kind' in item ? renderMetaItem(item.value, item.kind) : item.value,
|
||||
};
|
||||
})}
|
||||
/>
|
||||
<ToolbarButton onClick={downloadLogs} variant="default" icon="download-alt">
|
||||
Download logs
|
||||
</ToolbarButton>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
@ -1,23 +1,19 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { Trans, t } from '@lingui/macro';
|
||||
import { saveAs } from 'file-saver';
|
||||
import { t, Trans } from '@lingui/macro';
|
||||
import React, { PureComponent } from 'react';
|
||||
import AutoSizer from 'react-virtualized-auto-sizer';
|
||||
|
||||
import {
|
||||
applyFieldOverrides,
|
||||
applyRawFieldOverrides,
|
||||
CoreApp,
|
||||
CSVConfig,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
dateTimeFormat,
|
||||
dateTimeFormatISO,
|
||||
MutableDataFrame,
|
||||
SelectableValue,
|
||||
toCSV,
|
||||
transformDataFrame,
|
||||
TimeZone,
|
||||
CoreApp,
|
||||
transformDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { reportInteraction } from '@grafana/runtime';
|
||||
@ -32,6 +28,7 @@ import { transformToZipkin } from 'app/plugins/datasource/zipkin/utils/transform
|
||||
|
||||
import { InspectDataOptions } from './InspectDataOptions';
|
||||
import { getPanelInspectorStyles } from './styles';
|
||||
import { downloadAsJson, downloadDataFrameAsCsv, downloadLogsModelAsTxt } from './utils/download';
|
||||
|
||||
interface Props {
|
||||
isLoading: boolean;
|
||||
@ -99,43 +96,19 @@ export class InspectDataTab extends PureComponent<Props, State> {
|
||||
const { panel } = this.props;
|
||||
const { transformId } = this.state;
|
||||
|
||||
const dataFrameCsv = toCSV([dataFrame], csvConfig);
|
||||
|
||||
const blob = new Blob([String.fromCharCode(0xfeff), dataFrameCsv], {
|
||||
type: 'text/csv;charset=utf-8',
|
||||
});
|
||||
const displayTitle = panel ? panel.getDisplayTitle() : 'Explore';
|
||||
const transformation = transformId !== DataTransformerID.noop ? '-as-' + transformId.toLocaleLowerCase() : '';
|
||||
const fileName = `${displayTitle}-data${transformation}-${dateTimeFormat(new Date())}.csv`;
|
||||
saveAs(blob, fileName);
|
||||
downloadDataFrameAsCsv(dataFrame, panel ? panel.getDisplayTitle() : 'Explore', csvConfig, transformId);
|
||||
};
|
||||
|
||||
exportLogsAsTxt = () => {
|
||||
const { data, panel, app } = this.props;
|
||||
|
||||
reportInteraction('grafana_logs_download_logs_clicked', {
|
||||
app,
|
||||
format: 'logs',
|
||||
});
|
||||
|
||||
const logsModel = dataFrameToLogsModel(data || [], undefined);
|
||||
let textToDownload = '';
|
||||
|
||||
logsModel.meta?.forEach((metaItem) => {
|
||||
const string = `${metaItem.label}: ${JSON.stringify(metaItem.value)}\n`;
|
||||
textToDownload = textToDownload + string;
|
||||
});
|
||||
textToDownload = textToDownload + '\n\n';
|
||||
|
||||
logsModel.rows.forEach((row) => {
|
||||
const newRow = dateTimeFormatISO(row.timeEpochMs) + '\t' + row.entry + '\n';
|
||||
textToDownload = textToDownload + newRow;
|
||||
});
|
||||
|
||||
const blob = new Blob([textToDownload], {
|
||||
type: 'text/plain;charset=utf-8',
|
||||
});
|
||||
const displayTitle = panel ? panel.getDisplayTitle() : 'Explore';
|
||||
const fileName = `${displayTitle}-logs-${dateTimeFormat(new Date())}.txt`;
|
||||
saveAs(blob, fileName);
|
||||
downloadLogsModelAsTxt(logsModel, panel ? panel.getDisplayTitle() : 'Explore');
|
||||
};
|
||||
|
||||
exportTracesAsJson = () => {
|
||||
@ -153,45 +126,31 @@ export class InspectDataTab extends PureComponent<Props, State> {
|
||||
switch (df.meta?.custom?.traceFormat) {
|
||||
case 'jaeger': {
|
||||
let res = transformToJaeger(new MutableDataFrame(df));
|
||||
this.saveTraceJson(res, panel);
|
||||
downloadAsJson(res, (panel ? panel.getDisplayTitle() : 'Explore') + '-traces');
|
||||
break;
|
||||
}
|
||||
case 'zipkin': {
|
||||
let res = transformToZipkin(new MutableDataFrame(df));
|
||||
this.saveTraceJson(res, panel);
|
||||
downloadAsJson(res, (panel ? panel.getDisplayTitle() : 'Explore') + '-traces');
|
||||
break;
|
||||
}
|
||||
case 'otlp':
|
||||
default: {
|
||||
let res = transformToOTLP(new MutableDataFrame(df));
|
||||
this.saveTraceJson(res, panel);
|
||||
downloadAsJson(res, (panel ? panel.getDisplayTitle() : 'Explore') + '-traces');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
saveTraceJson = (json: any, panel?: PanelModel) => {
|
||||
const blob = new Blob([JSON.stringify(json)], {
|
||||
type: 'application/json',
|
||||
});
|
||||
const displayTitle = panel ? panel.getDisplayTitle() : 'Explore';
|
||||
const fileName = `${displayTitle}-traces-${dateTimeFormat(new Date())}.json`;
|
||||
saveAs(blob, fileName);
|
||||
};
|
||||
|
||||
exportServiceGraph = () => {
|
||||
const { data, panel } = this.props;
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
const blob = new Blob([JSON.stringify(data)], {
|
||||
type: 'application/json',
|
||||
});
|
||||
const displayTitle = panel ? panel.getDisplayTitle() : 'Explore';
|
||||
const fileName = `${displayTitle}-service-graph-${dateTimeFormat(new Date())}.json`;
|
||||
saveAs(blob, fileName);
|
||||
downloadAsJson(data, panel ? panel.getDisplayTitle() : 'Explore');
|
||||
};
|
||||
|
||||
onDataFrameChange = (item: SelectableValue<DataTransformerID | number>) => {
|
||||
|
114
public/app/features/inspector/utils/download.test.ts
Normal file
114
public/app/features/inspector/utils/download.test.ts
Normal file
@ -0,0 +1,114 @@
|
||||
import saveAs from 'file-saver';
|
||||
|
||||
import {
|
||||
dataFrameFromJSON,
|
||||
DataFrameJSON,
|
||||
dateTimeFormat,
|
||||
dateTimeFormatISO,
|
||||
FieldType,
|
||||
LogRowModel,
|
||||
LogsMetaKind,
|
||||
} from '@grafana/data';
|
||||
|
||||
import { downloadAsJson, downloadDataFrameAsCsv, downloadLogsModelAsTxt } from './download';
|
||||
|
||||
jest.mock('file-saver', () => jest.fn());
|
||||
|
||||
describe('inspector download', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.useFakeTimers({ now: new Date(1400000000000) });
|
||||
});
|
||||
afterEach(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
describe('downloadDataFrameAsCsv', () => {
|
||||
const json: DataFrameJSON = {
|
||||
schema: {
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time },
|
||||
{ name: 'name', type: FieldType.string },
|
||||
{ name: 'value', type: FieldType.number },
|
||||
],
|
||||
},
|
||||
data: {
|
||||
values: [[100], ['a'], [1]],
|
||||
},
|
||||
};
|
||||
|
||||
it.each([[dataFrameFromJSON(json), 'test', '"time","name","value"\r\n100,a,1\r\n\r\n']])(
|
||||
'should, when logsModel is %s and title is %s, resolve in %s',
|
||||
async (dataFrame, title, expected) => {
|
||||
downloadDataFrameAsCsv(dataFrame, title);
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const filename = call[1];
|
||||
const text = await blob.text();
|
||||
|
||||
expect(text).toEqual(expected);
|
||||
expect(filename).toEqual(`${title}-data-${dateTimeFormat(1400000000000)}.csv`);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('downloadAsJson', () => {
|
||||
it.each([
|
||||
['foo', 'test', '"foo"'],
|
||||
[1, 'test', '1'],
|
||||
[{ foo: 'bar' }, 'test', '{"foo":"bar"}'],
|
||||
])('should, when logsModel is %s and title is %s, resolve in %s', async (logsModel, title, expected) => {
|
||||
downloadAsJson(logsModel, title);
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const filename = call[1];
|
||||
const text = await blob.text();
|
||||
|
||||
expect(text).toEqual(expected);
|
||||
expect(filename).toEqual(`${title}-${dateTimeFormat(1400000000000)}.json`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadLogsModelAsTxt', () => {
|
||||
it.each([
|
||||
[{ meta: [], rows: [] }, 'test', '\n\n'],
|
||||
[
|
||||
{ meta: [{ label: 'testLabel', value: 'testValue', kind: LogsMetaKind.String }], rows: [] },
|
||||
'test',
|
||||
'testLabel: "testValue"\n\n\n',
|
||||
],
|
||||
[{ meta: [{ label: 'testLabel', value: 1, kind: LogsMetaKind.Number }], rows: [] }, 'test', 'testLabel: 1\n\n\n'],
|
||||
[
|
||||
{
|
||||
meta: [
|
||||
{ label: 'testLabel', value: 1, kind: LogsMetaKind.String },
|
||||
{ label: 'secondTestLabel', value: 2, kind: LogsMetaKind.String },
|
||||
],
|
||||
rows: [],
|
||||
},
|
||||
'test',
|
||||
'testLabel: 1\nsecondTestLabel: 2\n\n\n',
|
||||
],
|
||||
[
|
||||
{
|
||||
meta: [
|
||||
{ label: 'testLabel', value: 1, kind: LogsMetaKind.String },
|
||||
{ label: 'secondTestLabel', value: 2, kind: LogsMetaKind.String },
|
||||
],
|
||||
rows: [{ timeEpochMs: 100, entry: 'testEntry' } as unknown as LogRowModel],
|
||||
},
|
||||
'test',
|
||||
`testLabel: 1\nsecondTestLabel: 2\n\n\n${dateTimeFormatISO(100)}\ttestEntry\n`,
|
||||
],
|
||||
])('should, when logsModel is %s and title is %s, resolve in %s', async (logsModel, title, expected) => {
|
||||
downloadLogsModelAsTxt(logsModel, title);
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const filename = call[1];
|
||||
const text = await blob.text();
|
||||
|
||||
expect(text).toEqual(expected);
|
||||
expect(filename).toEqual(`${title}-logs-${dateTimeFormat(1400000000000)}.txt`);
|
||||
});
|
||||
});
|
||||
});
|
79
public/app/features/inspector/utils/download.ts
Normal file
79
public/app/features/inspector/utils/download.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import saveAs from 'file-saver';
|
||||
|
||||
import {
|
||||
CSVConfig,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
dateTimeFormat,
|
||||
dateTimeFormatISO,
|
||||
LogsModel,
|
||||
toCSV,
|
||||
} from '@grafana/data';
|
||||
|
||||
/**
|
||||
* Downloads a DataFrame as a TXT file.
|
||||
*
|
||||
* @param {(Pick<LogsModel, 'meta' | 'rows'>)} logsModel
|
||||
* @param {string} title
|
||||
*/
|
||||
export function downloadLogsModelAsTxt(logsModel: Pick<LogsModel, 'meta' | 'rows'>, title: string) {
|
||||
let textToDownload = '';
|
||||
|
||||
logsModel.meta?.forEach((metaItem) => {
|
||||
const string = `${metaItem.label}: ${JSON.stringify(metaItem.value)}\n`;
|
||||
textToDownload = textToDownload + string;
|
||||
});
|
||||
textToDownload = textToDownload + '\n\n';
|
||||
|
||||
logsModel.rows.forEach((row) => {
|
||||
const newRow = dateTimeFormatISO(row.timeEpochMs) + '\t' + row.entry + '\n';
|
||||
textToDownload = textToDownload + newRow;
|
||||
});
|
||||
|
||||
const blob = new Blob([textToDownload], {
|
||||
type: 'text/plain;charset=utf-8',
|
||||
});
|
||||
|
||||
const fileName = `${title}-logs-${dateTimeFormat(new Date())}.txt`;
|
||||
saveAs(blob, fileName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports a DataFrame as a CSV file.
|
||||
*
|
||||
* @param {DataFrame} dataFrame
|
||||
* @param {string} title
|
||||
* @param {CSVConfig} [csvConfig]
|
||||
* @param {DataTransformerID} [transformId=DataTransformerID.noop]
|
||||
*/
|
||||
export function downloadDataFrameAsCsv(
|
||||
dataFrame: DataFrame,
|
||||
title: string,
|
||||
csvConfig?: CSVConfig,
|
||||
transformId: DataTransformerID = DataTransformerID.noop
|
||||
) {
|
||||
const dataFrameCsv = toCSV([dataFrame], csvConfig);
|
||||
|
||||
const blob = new Blob([String.fromCharCode(0xfeff), dataFrameCsv], {
|
||||
type: 'text/csv;charset=utf-8',
|
||||
});
|
||||
|
||||
const transformation = transformId !== DataTransformerID.noop ? '-as-' + transformId.toLocaleLowerCase() : '';
|
||||
const fileName = `${title}-data${transformation}-${dateTimeFormat(new Date())}.csv`;
|
||||
saveAs(blob, fileName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads any object as JSON file.
|
||||
*
|
||||
* @param {unknown} json
|
||||
* @param {string} title
|
||||
*/
|
||||
export function downloadAsJson(json: unknown, title: string) {
|
||||
const blob = new Blob([JSON.stringify(json)], {
|
||||
type: 'application/json',
|
||||
});
|
||||
|
||||
const fileName = `${title}-${dateTimeFormat(new Date())}.json`;
|
||||
saveAs(blob, fileName);
|
||||
}
|
@ -7,6 +7,7 @@ import angular from 'angular';
|
||||
import { configure } from 'enzyme';
|
||||
|
||||
import { EventBusSrv } from '@grafana/data';
|
||||
import 'blob-polyfill';
|
||||
import 'mutationobserver-shim';
|
||||
import './mocks/workers';
|
||||
|
||||
|
@ -15413,6 +15413,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"blob-polyfill@npm:7.0.20220408":
|
||||
version: 7.0.20220408
|
||||
resolution: "blob-polyfill@npm:7.0.20220408"
|
||||
checksum: bbd062e904d851b0e0a733265150d0c189a575f80a98256515d265e265ada6f92a787d43fedaa791c00910c174131f9d7168e8c158e3334c965d045358c177da
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"blob-util@npm:^2.0.2":
|
||||
version: 2.0.2
|
||||
resolution: "blob-util@npm:2.0.2"
|
||||
@ -22544,6 +22551,7 @@ __metadata:
|
||||
babel-plugin-angularjs-annotate: 0.10.0
|
||||
babel-plugin-macros: 3.1.0
|
||||
baron: 3.0.3
|
||||
blob-polyfill: 7.0.20220408
|
||||
brace: 0.11.1
|
||||
calculate-size: 1.1.1
|
||||
centrifuge: 3.0.1
|
||||
|
Loading…
Reference in New Issue
Block a user