mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
InfluxDB: Fix backend mode table result with aliases (#69943)
Co-authored-by: ludovio <ludovic.viaud@gmail.com>
This commit is contained in:
parent
4217c8057b
commit
80c432e524
@ -70,7 +70,7 @@ func transformRows(rows []Row, query Query) data.Frames {
|
||||
}
|
||||
frames := make([]*data.Frame, 0, len(rows)+cols)
|
||||
|
||||
// frameName is pre-allocated so we can reuse it, saving memory.
|
||||
// frameName is pre-allocated. So we can reuse it, saving memory.
|
||||
// It's sized for a reasonably-large name, but will grow if needed.
|
||||
frameName := make([]byte, 0, 128)
|
||||
|
||||
@ -87,6 +87,81 @@ func transformRows(rows []Row, query Query) data.Frames {
|
||||
}
|
||||
|
||||
if !hasTimeCol {
|
||||
newFrame := newFrameWithoutTimeField(row, retentionPolicyQuery, tagValuesQuery)
|
||||
frames = append(frames, newFrame)
|
||||
} else {
|
||||
for colIndex, column := range row.Columns {
|
||||
if column == "time" {
|
||||
continue
|
||||
}
|
||||
newFrame := newFrameWithTimeField(row, column, colIndex, query, frameName)
|
||||
frames = append(frames, newFrame)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return frames
|
||||
}
|
||||
|
||||
func newFrameWithTimeField(row Row, column string, colIndex int, query Query, frameName []byte) *data.Frame {
|
||||
var timeArray []time.Time
|
||||
var floatArray []*float64
|
||||
var stringArray []*string
|
||||
var boolArray []*bool
|
||||
valType := typeof(row.Values, colIndex)
|
||||
|
||||
for _, valuePair := range row.Values {
|
||||
timestamp, timestampErr := parseTimestamp(valuePair[0])
|
||||
// we only add this row if the timestamp is valid
|
||||
if timestampErr != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
timeArray = append(timeArray, timestamp)
|
||||
switch valType {
|
||||
case "string":
|
||||
value, ok := valuePair[colIndex].(string)
|
||||
if ok {
|
||||
stringArray = append(stringArray, &value)
|
||||
} else {
|
||||
stringArray = append(stringArray, nil)
|
||||
}
|
||||
case "json.Number":
|
||||
value := parseNumber(valuePair[colIndex])
|
||||
floatArray = append(floatArray, value)
|
||||
case "bool":
|
||||
value, ok := valuePair[colIndex].(bool)
|
||||
if ok {
|
||||
boolArray = append(boolArray, &value)
|
||||
} else {
|
||||
boolArray = append(boolArray, nil)
|
||||
}
|
||||
case "null":
|
||||
floatArray = append(floatArray, nil)
|
||||
}
|
||||
}
|
||||
|
||||
timeField := data.NewField("Time", nil, timeArray)
|
||||
|
||||
var valueField *data.Field
|
||||
|
||||
switch valType {
|
||||
case "string":
|
||||
valueField = data.NewField("Value", row.Tags, stringArray)
|
||||
case "json.Number":
|
||||
valueField = data.NewField("Value", row.Tags, floatArray)
|
||||
case "bool":
|
||||
valueField = data.NewField("Value", row.Tags, boolArray)
|
||||
case "null":
|
||||
valueField = data.NewField("Value", row.Tags, floatArray)
|
||||
}
|
||||
|
||||
name := string(formatFrameName(row, column, query, frameName[:]))
|
||||
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
|
||||
return newDataFrame(name, query.RawQuery, timeField, valueField)
|
||||
}
|
||||
|
||||
func newFrameWithoutTimeField(row Row, retentionPolicyQuery bool, tagValuesQuery bool) *data.Frame {
|
||||
var values []string
|
||||
|
||||
if retentionPolicyQuery {
|
||||
@ -123,75 +198,7 @@ func transformRows(rows []Row, query Query) data.Frames {
|
||||
}
|
||||
|
||||
field := data.NewField("Value", nil, values)
|
||||
frames = append(frames, data.NewFrame(row.Name, field))
|
||||
} else {
|
||||
for colIndex, column := range row.Columns {
|
||||
if column == "time" {
|
||||
continue
|
||||
}
|
||||
|
||||
var timeArray []time.Time
|
||||
var floatArray []*float64
|
||||
var stringArray []*string
|
||||
var boolArray []*bool
|
||||
valType := typeof(row.Values, colIndex)
|
||||
|
||||
for _, valuePair := range row.Values {
|
||||
timestamp, timestampErr := parseTimestamp(valuePair[0])
|
||||
// we only add this row if the timestamp is valid
|
||||
if timestampErr == nil {
|
||||
timeArray = append(timeArray, timestamp)
|
||||
switch valType {
|
||||
case "string":
|
||||
{
|
||||
value, chk := valuePair[colIndex].(string)
|
||||
if chk {
|
||||
stringArray = append(stringArray, &value)
|
||||
} else {
|
||||
stringArray = append(stringArray, nil)
|
||||
}
|
||||
}
|
||||
case "json.Number":
|
||||
value := parseNumber(valuePair[colIndex])
|
||||
floatArray = append(floatArray, value)
|
||||
case "bool":
|
||||
value, chk := valuePair[colIndex].(bool)
|
||||
if chk {
|
||||
boolArray = append(boolArray, &value)
|
||||
} else {
|
||||
boolArray = append(boolArray, nil)
|
||||
}
|
||||
case "null":
|
||||
floatArray = append(floatArray, nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
name := string(formatFrameName(row, column, query, frameName[:]))
|
||||
|
||||
timeField := data.NewField("Time", nil, timeArray)
|
||||
if valType == "string" {
|
||||
valueField := data.NewField("Value", row.Tags, stringArray)
|
||||
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
|
||||
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
|
||||
} else if valType == "json.Number" {
|
||||
valueField := data.NewField("Value", row.Tags, floatArray)
|
||||
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
|
||||
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
|
||||
} else if valType == "bool" {
|
||||
valueField := data.NewField("Value", row.Tags, boolArray)
|
||||
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
|
||||
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
|
||||
} else if valType == "null" {
|
||||
valueField := data.NewField("Value", row.Tags, floatArray)
|
||||
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
|
||||
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return frames
|
||||
return data.NewFrame(row.Name, field)
|
||||
}
|
||||
|
||||
func newDataFrame(name string, queryString string, timeField *data.Field, valueField *data.Field) *data.Frame {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { size } from 'lodash';
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { AnnotationEvent, DataQueryRequest, dateTime, FieldType, MutableDataFrame } from '@grafana/data';
|
||||
import { AnnotationEvent, DataFrame, DataQueryRequest, dateTime, FieldType, MutableDataFrame } from '@grafana/data';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
import config from 'app/core/config';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
@ -296,6 +296,27 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('table with aliases', () => {
|
||||
it('should parse the table with alias', () => {
|
||||
const table = parser.getTable(mockDataFramesWithAlias, mockQuery, { preferredVisualisationType: 'table' });
|
||||
expect(table.columns.length).toBe(4);
|
||||
expect(table.columns[0].text).toBe('Time');
|
||||
expect(table.columns[1].text).toBe('geohash');
|
||||
expect(table.columns[2].text).toBe('ALIAS1');
|
||||
expect(table.columns[3].text).toBe('ALIAS2');
|
||||
});
|
||||
|
||||
it('should parse the table when there is no alias and two field selects', () => {
|
||||
const table = parser.getTable(mockDataframesWithTwoFieldSelect, mockQueryWithTwoFieldSelect, {
|
||||
preferredVisualisationType: 'table',
|
||||
});
|
||||
expect(table.columns.length).toBe(3);
|
||||
expect(table.columns[0].text).toBe('Time');
|
||||
expect(table.columns[1].text).toBe('mean');
|
||||
expect(table.columns[2].text).toBe('mean_1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issuing annotationQuery', () => {
|
||||
const ctx = {
|
||||
ds: getMockDS(getMockDSInstanceSettings()),
|
||||
@ -330,7 +351,359 @@ describe('influxdb response parser', () => {
|
||||
let response: AnnotationEvent[];
|
||||
|
||||
beforeEach(async () => {
|
||||
const mockResponse: FetchResponse = {
|
||||
fetchMock.mockImplementation(() => {
|
||||
return of(annotationMockResponse);
|
||||
});
|
||||
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
response = await ctx.ds.annotationEvents(queryOptions, annotation);
|
||||
});
|
||||
|
||||
it('should return annotation list', () => {
|
||||
expect(response.length).toBe(2);
|
||||
expect(response[0].time).toBe(1645208701000);
|
||||
expect(response[0].title).toBe('Station softwareupdated[447]: Adding client 1');
|
||||
expect(response[0].text).toBe('text 1');
|
||||
expect(response[0].tags?.[0]).toBe('cbfa07e0e3bb 1');
|
||||
expect(response[0].tags?.[1]).toBe('/var/log/host/install.log 1');
|
||||
expect(response[1].time).toBe(1645208702000);
|
||||
expect(response[1].title).toBe('Station softwareupdated[447]: Adding client 2');
|
||||
expect(response[1].text).toBe('text 2');
|
||||
expect(response[1].tags?.[0]).toBe('cbfa07e0e3bb 2');
|
||||
expect(response[1].tags?.[1]).toBe('/var/log/host/install.log 2');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const mockQuery: InfluxQuery = {
|
||||
datasource: {
|
||||
type: 'influxdb',
|
||||
uid: '12345',
|
||||
},
|
||||
groupBy: [
|
||||
{
|
||||
params: ['$__interval'],
|
||||
type: 'time',
|
||||
},
|
||||
{
|
||||
type: 'tag',
|
||||
params: ['geohash::tag'],
|
||||
},
|
||||
{
|
||||
params: ['null'],
|
||||
type: 'fill',
|
||||
},
|
||||
],
|
||||
measurement: 'cpu',
|
||||
orderByTime: 'ASC',
|
||||
policy: 'bar',
|
||||
refId: 'A',
|
||||
resultFormat: 'table',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
{
|
||||
type: 'mean',
|
||||
params: [],
|
||||
},
|
||||
{
|
||||
type: 'alias',
|
||||
params: ['ALIAS1'],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
{
|
||||
type: 'mean',
|
||||
params: [],
|
||||
},
|
||||
{
|
||||
type: 'alias',
|
||||
params: ['ALIAS2'],
|
||||
},
|
||||
],
|
||||
],
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const mockDataFramesWithAlias: DataFrame[] = [
|
||||
{
|
||||
name: 'cpu.ALIAS1 { geohash: tz6h548nc111 }',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
executedQueryString:
|
||||
'SELECT mean("value") AS "ALIAS1", mean("value") AS "ALIAS2" FROM "bar"."cpu" WHERE time >= 1686582333244ms and time <= 1686583233244ms GROUP BY time(500ms), "geohash"::tag fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686582333000, 1686582333500, 1686582334000],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
labels: {
|
||||
geohash: 'tz6h548nc111',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.ALIAS1 { geohash: tz6h548nc111 }',
|
||||
},
|
||||
values: [null, 111.98024577663908, null],
|
||||
},
|
||||
],
|
||||
length: 1801,
|
||||
},
|
||||
{
|
||||
name: 'cpu.ALIAS2 { geohash: tz6h548nc111 }',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
executedQueryString:
|
||||
'SELECT mean("value") AS "ALIAS1", mean("value") AS "ALIAS2" FROM "bar"."cpu" WHERE time >= 1686582333244ms and time <= 1686583233244ms GROUP BY time(500ms), "geohash"::tag fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686582333000, 1686582333500, 1686582334000],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
labels: {
|
||||
geohash: 'tz6h548nc111',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.ALIAS2 { geohash: tz6h548nc111 }',
|
||||
},
|
||||
values: [null, 111.98024577663908, null],
|
||||
},
|
||||
],
|
||||
length: 1801,
|
||||
},
|
||||
{
|
||||
name: 'cpu.ALIAS1 { geohash: wj7c61wnv111 }',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
executedQueryString:
|
||||
'SELECT mean("value") AS "ALIAS1", mean("value") AS "ALIAS2" FROM "bar"."cpu" WHERE time >= 1686582333244ms and time <= 1686583233244ms GROUP BY time(500ms), "geohash"::tag fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686582333000, 1686582333500, 1686582334000],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
labels: {
|
||||
geohash: 'wj7c61wnv111',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.ALIAS1 { geohash: wj7c61wnv111 }',
|
||||
},
|
||||
values: [null, 112.97136059147347, null],
|
||||
},
|
||||
],
|
||||
length: 1801,
|
||||
},
|
||||
{
|
||||
name: 'cpu.ALIAS2 { geohash: wj7c61wnv111 }',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
executedQueryString:
|
||||
'SELECT mean("value") AS "ALIAS1", mean("value") AS "ALIAS2" FROM "bar"."cpu" WHERE time >= 1686582333244ms and time <= 1686583233244ms GROUP BY time(500ms), "geohash"::tag fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686582333000, 1686582333500, 1686582334000],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
labels: {
|
||||
geohash: 'wj7c61wnv111',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.ALIAS2 { geohash: wj7c61wnv111 }',
|
||||
},
|
||||
values: [null, 112.97136059147347, null],
|
||||
},
|
||||
],
|
||||
length: 1801,
|
||||
},
|
||||
{
|
||||
name: 'cpu.ALIAS1 { geohash: wr50zpuhj111 }',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
executedQueryString:
|
||||
'SELECT mean("value") AS "ALIAS1", mean("value") AS "ALIAS2" FROM "bar"."cpu" WHERE time >= 1686582333244ms and time <= 1686583233244ms GROUP BY time(500ms), "geohash"::tag fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686582333000, 1686582333500, 1686582334000],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
labels: {
|
||||
geohash: 'wr50zpuhj111',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.ALIAS1 { geohash: wr50zpuhj111 }',
|
||||
},
|
||||
values: [null, 112.27638560052755, null],
|
||||
},
|
||||
],
|
||||
length: 1801,
|
||||
},
|
||||
{
|
||||
name: 'cpu.ALIAS2 { geohash: wr50zpuhj111 }',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
executedQueryString:
|
||||
'SELECT mean("value") AS "ALIAS1", mean("value") AS "ALIAS2" FROM "bar"."cpu" WHERE time >= 1686582333244ms and time <= 1686583233244ms GROUP BY time(500ms), "geohash"::tag fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686582333000, 1686582333500, 1686582334000],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
labels: {
|
||||
geohash: 'wr50zpuhj111',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.ALIAS2 { geohash: wr50zpuhj111 }',
|
||||
},
|
||||
values: [null, 112.27638560052755, null],
|
||||
},
|
||||
],
|
||||
length: 1801,
|
||||
},
|
||||
];
|
||||
|
||||
const mockDataframesWithTwoFieldSelect: DataFrame[] = [
|
||||
{
|
||||
name: 'cpu.mean',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
typeVersion: [0, 0],
|
||||
executedQueryString:
|
||||
'SELECT mean("value"), mean("value") FROM "bar"."cpu" WHERE time >= 1686585763070ms and time <= 1686585793070ms GROUP BY time(10ms) fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686585763070, 1686585763080, 1686585763090],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.mean',
|
||||
},
|
||||
values: [null, 87.42703187930438, null],
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
{
|
||||
name: 'cpu.mean_1',
|
||||
refId: 'A',
|
||||
meta: {
|
||||
typeVersion: [0, 0],
|
||||
executedQueryString:
|
||||
'SELECT mean("value"), mean("value") FROM "bar"."cpu" WHERE time >= 1686585763070ms and time <= 1686585793070ms GROUP BY time(10ms) fill(null) ORDER BY time ASC',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: [1686585763070, 1686585763080, 1686585763090],
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
config: {
|
||||
displayNameFromDS: 'cpu.mean_1',
|
||||
},
|
||||
values: [87.3, 87.4, 87.5],
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
];
|
||||
|
||||
const mockQueryWithTwoFieldSelect: InfluxQuery = {
|
||||
datasource: {
|
||||
type: 'influxdb',
|
||||
uid: '1234',
|
||||
},
|
||||
groupBy: [
|
||||
{
|
||||
params: ['$__interval'],
|
||||
type: 'time',
|
||||
},
|
||||
{
|
||||
params: ['null'],
|
||||
type: 'fill',
|
||||
},
|
||||
],
|
||||
measurement: 'cpu',
|
||||
orderByTime: 'ASC',
|
||||
policy: 'bar',
|
||||
refId: 'A',
|
||||
resultFormat: 'table',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
{
|
||||
type: 'mean',
|
||||
params: [],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
{
|
||||
type: 'mean',
|
||||
params: [],
|
||||
},
|
||||
],
|
||||
],
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const annotationMockResponse: FetchResponse = {
|
||||
config: { url: '' },
|
||||
headers: new Headers(),
|
||||
ok: false,
|
||||
@ -381,10 +754,7 @@ describe('influxdb response parser', () => {
|
||||
data: {
|
||||
values: [
|
||||
[1645208701000, 1645208702000],
|
||||
[
|
||||
'Station softwareupdated[447]: Adding client 1',
|
||||
'Station softwareupdated[447]: Adding client 2',
|
||||
],
|
||||
['Station softwareupdated[447]: Adding client 1', 'Station softwareupdated[447]: Adding client 2'],
|
||||
],
|
||||
},
|
||||
},
|
||||
@ -435,27 +805,3 @@ describe('influxdb response parser', () => {
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
fetchMock.mockImplementation(() => {
|
||||
return of(mockResponse);
|
||||
});
|
||||
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
response = await ctx.ds.annotationEvents(queryOptions, annotation);
|
||||
});
|
||||
|
||||
it('should return annotation list', () => {
|
||||
expect(response.length).toBe(2);
|
||||
expect(response[0].time).toBe(1645208701000);
|
||||
expect(response[0].title).toBe('Station softwareupdated[447]: Adding client 1');
|
||||
expect(response[0].text).toBe('text 1');
|
||||
expect(response[0].tags?.[0]).toBe('cbfa07e0e3bb 1');
|
||||
expect(response[0].tags?.[1]).toBe('/var/log/host/install.log 1');
|
||||
expect(response[1].time).toBe(1645208702000);
|
||||
expect(response[1].title).toBe('Station softwareupdated[447]: Adding client 2');
|
||||
expect(response[1].text).toBe('text 2');
|
||||
expect(response[1].tags?.[0]).toBe('cbfa07e0e3bb 2');
|
||||
expect(response[1].tags?.[1]).toBe('/var/log/host/install.log 2');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -258,7 +258,12 @@ export function getSelectedParams(target: InfluxQuery): string[] {
|
||||
target.select?.forEach((select) => {
|
||||
const selector = select.filter((x) => x.type !== 'field');
|
||||
if (selector.length > 0) {
|
||||
const aliasIfExist = selector.find((s) => s.type === 'alias');
|
||||
if (aliasIfExist) {
|
||||
allParams.push(aliasIfExist.params?.[0].toString() ?? '');
|
||||
} else {
|
||||
allParams.push(selector[0].type);
|
||||
}
|
||||
} else {
|
||||
if (select[0] && select[0].params && select[0].params[0]) {
|
||||
allParams.push(select[0].params[0].toString());
|
||||
|
Loading…
Reference in New Issue
Block a user