TestData: Support for csv files & csv content (#34674)

* initial implementation of csv support for test data source

* CSV file & content scenarios working

* Removing categorical data

* fixing handler names

* Update pkg/tsdb/testdatasource/csv_data.go

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Update pkg/tsdb/testdatasource/csv_data.go

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Update pkg/tsdb/testdatasource/csv_data.go

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Update pkg/tsdb/testdatasource/csv_data.go

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Update pkg/tsdb/testdatasource/csv_data.go

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Update pkg/tsdb/testdatasource/csv_data.go

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Update pkg/tsdb/testdatasource/csv_data.go

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>

* Fixed lint issues

* updated so it uses the same parsing

* more CSV tests

* lint fixes

* more lint

* lint

* support time field

* migrate manual entry to csv

* more test output

* more test output

* missing file

Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
This commit is contained in:
Torkel Ödegaard
2021-05-26 10:42:42 +02:00
committed by GitHub
parent b4ce068f0e
commit 987bffe482
24 changed files with 883 additions and 699 deletions

View File

@@ -0,0 +1,276 @@
package testdatasource
import (
"context"
"encoding/csv"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
)
func (p *testDataPlugin) handleCsvContentScenario(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
model, err := simplejson.NewJson(q.JSON)
if err != nil {
return nil, fmt.Errorf("failed to parse query json: %v", err)
}
csvContent := model.Get("csvContent").MustString()
alias := model.Get("alias").MustString(q.RefID)
frame, err := p.loadCsvContent(strings.NewReader(csvContent), alias)
if err != nil {
return nil, err
}
respD := resp.Responses[q.RefID]
respD.Frames = append(respD.Frames, frame)
resp.Responses[q.RefID] = respD
}
return resp, nil
}
func (p *testDataPlugin) handleCsvFileScenario(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
model, err := simplejson.NewJson(q.JSON)
if err != nil {
return nil, fmt.Errorf("failed to parse query json %v", err)
}
fileName := model.Get("csvFileName").MustString()
if len(fileName) == 0 {
continue
}
frame, err := p.loadCsvFile(fileName)
if err != nil {
return nil, err
}
respD := resp.Responses[q.RefID]
respD.Frames = append(respD.Frames, frame)
resp.Responses[q.RefID] = respD
}
return resp, nil
}
func (p *testDataPlugin) loadCsvFile(fileName string) (*data.Frame, error) {
validFileName := regexp.MustCompile(`([\w_]+)\.csv`)
if !validFileName.MatchString(fileName) {
return nil, fmt.Errorf("invalid csv file name: %q", fileName)
}
filePath := filepath.Join(p.Cfg.StaticRootPath, "testdata", fileName)
// Can ignore gosec G304 here, because we check the file pattern above
// nolint:gosec
fileReader, err := os.Open(filePath)
if err != nil {
return nil, fmt.Errorf("failed open file: %v", err)
}
defer func() {
if err := fileReader.Close(); err != nil {
p.logger.Warn("Failed to close file", "err", err, "path", fileName)
}
}()
return p.loadCsvContent(fileReader, fileName)
}
func (p *testDataPlugin) loadCsvContent(ioReader io.Reader, name string) (*data.Frame, error) {
reader := csv.NewReader(ioReader)
// Read the header records
headerFields, err := reader.Read()
if err != nil {
return nil, fmt.Errorf("failed to read header line: %v", err)
}
fields := []*data.Field{}
fieldNames := []string{}
fieldRawValues := [][]string{}
for _, fieldName := range headerFields {
fieldNames = append(fieldNames, strings.Trim(fieldName, " "))
fieldRawValues = append(fieldRawValues, []string{})
}
for {
lineValues, err := reader.Read()
if errors.Is(err, io.EOF) {
break // reached end of the file
} else if err != nil {
return nil, fmt.Errorf("failed to read line: %v", err)
}
for fieldIndex, value := range lineValues {
fieldRawValues[fieldIndex] = append(fieldRawValues[fieldIndex], strings.Trim(value, " "))
}
}
longest := 0
for fieldIndex, rawValues := range fieldRawValues {
fieldName := fieldNames[fieldIndex]
field, err := csvValuesToField(rawValues)
if err == nil {
// Check if the values are actually a time field
if strings.Contains(strings.ToLower(fieldName), "time") {
timeField := toTimeField(field)
if timeField != nil {
field = timeField
}
}
field.Name = fieldName
fields = append(fields, field)
if field.Len() > longest {
longest = field.Len()
}
}
}
// Make all fields the same length
for _, field := range fields {
delta := field.Len() - longest
if delta > 0 {
field.Extend(delta)
}
}
frame := data.NewFrame(name, fields...)
return frame, nil
}
func csvLineToField(stringInput string) (*data.Field, error) {
return csvValuesToField(strings.Split(strings.ReplaceAll(stringInput, " ", ""), ","))
}
func csvValuesToField(parts []string) (*data.Field, error) {
if len(parts) < 1 {
return nil, fmt.Errorf("csv must have at least one value")
}
first := strings.ToUpper(parts[0])
if first == "T" || first == "F" || first == "TRUE" || first == "FALSE" {
field := data.NewFieldFromFieldType(data.FieldTypeNullableBool, len(parts))
for idx, strVal := range parts {
strVal = strings.ToUpper(strVal)
if strVal == "NULL" || strVal == "" {
continue
}
field.SetConcrete(idx, strVal == "T" || strVal == "TRUE")
}
return field, nil
}
// Try parsing values as numbers
ok := false
field := data.NewFieldFromFieldType(data.FieldTypeNullableInt64, len(parts))
for idx, strVal := range parts {
if strVal == "null" || strVal == "" {
continue
}
val, err := strconv.ParseInt(strVal, 10, 64)
if err != nil {
ok = false
break
}
field.SetConcrete(idx, val)
ok = true
}
if ok {
return field, nil
}
// Maybe floats
field = data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, len(parts))
for idx, strVal := range parts {
if strVal == "null" || strVal == "" {
continue
}
val, err := strconv.ParseFloat(strVal, 64)
if err != nil {
ok = false
break
}
field.SetConcrete(idx, val)
ok = true
}
if ok {
return field, nil
}
// Replace empty strings with null
field = data.NewFieldFromFieldType(data.FieldTypeNullableString, len(parts))
for idx, strVal := range parts {
if strVal == "null" || strVal == "" {
continue
}
field.SetConcrete(idx, strVal)
}
return field, nil
}
// This will try to convert the values to a timestamp
func toTimeField(field *data.Field) *data.Field {
found := false
count := field.Len()
timeField := data.NewFieldFromFieldType(data.FieldTypeNullableTime, count)
timeField.Config = field.Config
timeField.Name = field.Name
timeField.Labels = field.Labels
ft := field.Type()
if ft.Numeric() {
for i := 0; i < count; i++ {
v, err := field.FloatAt(i)
if err == nil {
t := time.Unix(0, int64(v)*int64(time.Millisecond))
timeField.SetConcrete(i, t.UTC())
found = true
}
}
if !found {
return nil
}
return timeField
}
if ft == data.FieldTypeNullableString || ft == data.FieldTypeString {
for i := 0; i < count; i++ {
v, ok := field.ConcreteAt(i)
if ok && v != nil {
t, err := time.Parse(time.RFC3339, v.(string))
if err == nil {
timeField.SetConcrete(i, t.UTC())
found = true
}
}
}
if !found {
return nil
}
return timeField
}
return nil
}

View File

@@ -0,0 +1,107 @@
package testdatasource
import (
"os"
"path/filepath"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
)
func TestCSVFileScenario(t *testing.T) {
cfg := setting.NewCfg()
cfg.DataPath = t.TempDir()
cfg.StaticRootPath = "../../../public"
p := &testDataPlugin{
Cfg: cfg,
}
t.Run("loadCsvFile", func(t *testing.T) {
files := []string{"population_by_state.csv", "city_stats.csv"}
for _, name := range files {
t.Run("Should load file and convert to DataFrame", func(t *testing.T) {
frame, err := p.loadCsvFile(name)
require.NoError(t, err)
require.NotNil(t, frame)
dr := &backend.DataResponse{
Frames: data.Frames{frame},
}
err = experimental.CheckGoldenDataResponse(
filepath.Join("testdata", name+".golden.txt"), dr, true,
)
require.NoError(t, err)
})
}
files = []string{"simple", "mixed"}
for _, name := range files {
t.Run("Should load CSV Text: "+name, func(t *testing.T) {
filePath := filepath.Join("testdata", name+".csv")
// Can ignore gosec G304 here, because this is a constant defined above
// nolint:gosec
fileReader, err := os.Open(filePath)
require.NoError(t, err)
defer func() {
_ = fileReader.Close()
}()
frame, err := p.loadCsvContent(fileReader, name)
require.NoError(t, err)
require.NotNil(t, frame)
dr := &backend.DataResponse{
Frames: data.Frames{frame},
}
err = experimental.CheckGoldenDataResponse(
filepath.Join("testdata", name+".golden.txt"), dr, true,
)
require.NoError(t, err)
})
}
t.Run("Should not allow non file name chars", func(t *testing.T) {
_, err := p.loadCsvFile("../population_by_state.csv")
require.Error(t, err)
})
})
}
func TestReadCSV(t *testing.T) {
fBool, err := csvLineToField("T, F,F,T ,")
require.NoError(t, err)
fBool2, err := csvLineToField("true,false,T,F,F")
require.NoError(t, err)
fNum, err := csvLineToField("1,null,,4,5")
require.NoError(t, err)
fStr, err := csvLineToField("a,b,,,c")
require.NoError(t, err)
frame := data.NewFrame("", fBool, fBool2, fNum, fStr)
frameToJSON, err := data.FrameToJSON(frame)
require.NoError(t, err)
out := frameToJSON.Bytes(data.IncludeAll)
require.JSONEq(t, `{"schema":{
"fields":[
{"type":"boolean","typeInfo":{"frame":"bool","nullable":true}},
{"type":"boolean","typeInfo":{"frame":"bool","nullable":true}},
{"type":"number","typeInfo":{"frame":"int64","nullable":true}},
{"type":"string","typeInfo":{"frame":"string","nullable":true}}
]},"data":{
"values":[
[true,false,false,true,null],
[true,false,true,false,false],
[1,null,null,4,5],
["a","b",null,null,"c"]
]}}`, string(out))
}

View File

@@ -27,7 +27,6 @@ const (
noDataPointsQuery queryType = "no_data_points"
datapointsOutsideRangeQuery queryType = "datapoints_outside_range"
csvMetricValuesQuery queryType = "csv_metric_values"
manualEntryQuery queryType = "manual_entry"
predictablePulseQuery queryType = "predictable_pulse"
predictableCSVWaveQuery queryType = "predictable_csv_wave"
streamingClientQuery queryType = "streaming_client"
@@ -39,7 +38,8 @@ const (
serverError500Query queryType = "server_error_500"
logsQuery queryType = "logs"
nodeGraphQuery queryType = "node_graph"
categoricalDataQuery queryType = "categorical_data"
csvFileQueryType queryType = "csv_file"
csvContentQueryType queryType = "csv_content"
)
type queryType string
@@ -117,12 +117,6 @@ Timestamps will line up evenly on timeStepSeconds (For example, 60 seconds means
handler: p.handleDatapointsOutsideRangeScenario,
})
p.registerScenario(&Scenario{
ID: string(manualEntryQuery),
Name: "Manual Entry",
handler: p.handleManualEntryScenario,
})
p.registerScenario(&Scenario{
ID: string(csvMetricValuesQuery),
Name: "CSV Metric Values",
@@ -190,9 +184,15 @@ Timestamps will line up evenly on timeStepSeconds (For example, 60 seconds means
})
p.registerScenario(&Scenario{
ID: string(categoricalDataQuery),
Name: "Categorical Data",
handler: p.handleCategoricalDataScenario,
ID: string(csvFileQueryType),
Name: "CSV File",
handler: p.handleCsvFileScenario,
})
p.registerScenario(&Scenario{
ID: string(csvContentQueryType),
Name: "CSV Content",
handler: p.handleCsvContentScenario,
})
p.queryMux.HandleFunc("", p.handleFallbackScenario)
@@ -286,97 +286,6 @@ func (p *testDataPlugin) handleDatapointsOutsideRangeScenario(ctx context.Contex
return resp, nil
}
func (p *testDataPlugin) handleManualEntryScenario(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
model, err := simplejson.NewJson(q.JSON)
if err != nil {
return nil, fmt.Errorf("error reading query")
}
points := model.Get("points").MustArray()
frame := newSeriesForQuery(q, model, 0)
timeField := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
valueField := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, 0)
timeField.Name = data.TimeSeriesTimeFieldName
valueField.Name = data.TimeSeriesValueFieldName
for _, val := range points {
pointValues := val.([]interface{})
var value *float64
if pointValues[0] != nil {
if valueFloat, err := strconv.ParseFloat(string(pointValues[0].(json.Number)), 64); err == nil {
value = &valueFloat
}
}
timeInt, err := strconv.ParseInt(string(pointValues[1].(json.Number)), 10, 64)
if err != nil {
continue
}
t := time.Unix(timeInt/int64(1e+3), (timeInt%int64(1e+3))*int64(1e+6))
timeField.Append(t)
valueField.Append(value)
}
frame.Fields = data.Fields{timeField, valueField}
respD := resp.Responses[q.RefID]
respD.Frames = append(respD.Frames, frame)
resp.Responses[q.RefID] = respD
}
return resp, nil
}
func csvToFieldValues(stringInput string) (*data.Field, error) {
parts := strings.Split(strings.ReplaceAll(stringInput, " ", ""), ",")
if len(parts) < 1 {
return nil, fmt.Errorf("csv must have at least one value")
}
first := strings.ToUpper(parts[0])
if first == "T" || first == "F" || first == "TRUE" || first == "FALSE" {
field := data.NewFieldFromFieldType(data.FieldTypeNullableBool, len(parts))
for idx, strVal := range parts {
strVal = strings.ToUpper(strVal)
if strVal == "NULL" || strVal == "" {
continue
}
field.SetConcrete(idx, strVal == "T" || strVal == "TRUE")
}
return field, nil
}
// If we can not parse the first value as a number, assume strings
_, err := strconv.ParseFloat(first, 64)
if err != nil {
field := data.NewFieldFromFieldType(data.FieldTypeNullableString, len(parts))
for idx, strVal := range parts {
if strVal == "null" || strVal == "" {
continue
}
field.SetConcrete(idx, strVal)
}
return field, nil
}
// Set any valid numbers
field := data.NewFieldFromFieldType(data.FieldTypeNullableFloat64, len(parts))
for idx, strVal := range parts {
if val, err := strconv.ParseFloat(strVal, 64); err == nil {
field.SetConcrete(idx, val)
}
}
return field, nil
}
func (p *testDataPlugin) handleCSVMetricValuesScenario(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
@@ -388,7 +297,7 @@ func (p *testDataPlugin) handleCSVMetricValuesScenario(ctx context.Context, req
stringInput := model.Get("stringInput").MustString()
valueField, err := csvToFieldValues(stringInput)
valueField, err := csvLineToField(stringInput)
if err != nil {
return nil, err
}
@@ -695,27 +604,6 @@ func (p *testDataPlugin) handleLogsScenario(ctx context.Context, req *backend.Qu
return resp, nil
}
func (p *testDataPlugin) handleCategoricalDataScenario(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
frame := data.NewFrame(q.RefID,
data.NewField("location", nil, []string{}),
data.NewField("temperature", nil, []int64{}),
data.NewField("humidity", nil, []int64{}),
data.NewField("pressure", nil, []int64{}),
)
for i := 0; i < len(houseLocations); i++ {
frame.AppendRow(houseLocations[i], rand.Int63n(40+40)-40, rand.Int63n(100), rand.Int63n(1020-900)+900)
}
respD := resp.Responses[q.RefID]
respD.Frames = append(respD.Frames, frame)
resp.Responses[q.RefID] = respD
}
return resp, nil
}
func randomWalk(query backend.DataQuery, model *simplejson.Json, index int) *data.Frame {
timeWalkerMs := query.TimeRange.From.UnixNano() / int64(time.Millisecond)
to := query.TimeRange.To.UnixNano() / int64(time.Millisecond)

View File

@@ -185,57 +185,6 @@ func TestTestdataScenarios(t *testing.T) {
require.True(t, maxNil)
})
})
t.Run("manual entry ", func(t *testing.T) {
t.Run("should support nulls and return all data", func(t *testing.T) {
timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: time.Now()}
query := backend.DataQuery{
RefID: "A",
TimeRange: backend.TimeRange{
From: timeRange.MustGetFrom(),
To: timeRange.MustGetTo(),
},
JSON: []byte(`{ "points": [
[
4, 1616557148000
],
[
null, 1616558756000
],
[
4, 1616561658000
]] }`),
}
req := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{},
Queries: []backend.DataQuery{query},
}
resp, err := p.handleManualEntryScenario(context.Background(), req)
require.NoError(t, err)
require.NotNil(t, resp)
dResp, exists := resp.Responses[query.RefID]
require.True(t, exists)
require.NoError(t, dResp.Error)
require.Len(t, dResp.Frames, 1)
frame := dResp.Frames[0]
require.Len(t, frame.Fields, 2)
require.Equal(t, "Time", frame.Fields[0].Name)
require.Equal(t, "Value", frame.Fields[1].Name)
require.Equal(t, 3, frame.Rows())
vals := frame.Fields[1]
v, _ := vals.ConcreteAt(0)
require.Equal(t, float64(4), v)
require.Nil(t, vals.At(1))
v, _ = vals.ConcreteAt(2)
require.Equal(t, float64(4), v)
})
})
}
func TestParseLabels(t *testing.T) {
@@ -263,38 +212,3 @@ func TestParseLabels(t *testing.T) {
assert.Equal(t, expectedTags, parseLabels(model), fmt.Sprintf("Actual tags in test case %d doesn't match expected tags", i+1))
}
}
func TestReadCSV(t *testing.T) {
fBool, err := csvToFieldValues("T, F,F,T ,")
require.NoError(t, err)
fBool2, err := csvToFieldValues("true,false,T,F,F")
require.NoError(t, err)
fNum, err := csvToFieldValues("1,2,,4,5")
require.NoError(t, err)
fStr, err := csvToFieldValues("a,b,,,c")
require.NoError(t, err)
frame := data.NewFrame("", fBool, fBool2, fNum, fStr)
frameToJSON, err := data.FrameToJSON(frame)
require.NoError(t, err)
out := frameToJSON.Bytes(data.IncludeAll)
// require.Equal(t, "", string(out))
require.JSONEq(t, `{"schema":{
"fields":[
{"type":"boolean","typeInfo":{"frame":"bool","nullable":true}},
{"type":"boolean","typeInfo":{"frame":"bool","nullable":true}},
{"type":"number","typeInfo":{"frame":"float64","nullable":true}},
{"type":"string","typeInfo":{"frame":"string","nullable":true}}
]},"data":{
"values":[
[true,false,false,true,null],
[true,false,true,false,false],
[1,2,null,4,5],
["a","b",null,null,"c"]
]}}`, string(out))
}

View File

@@ -11,6 +11,7 @@ import (
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/setting"
)
func init() {
@@ -19,6 +20,7 @@ func init() {
type testDataPlugin struct {
BackendPluginManager backendplugin.Manager `inject:""`
Cfg *setting.Cfg `inject:""`
logger log.Logger
scenarios map[string]*Scenario
queryMux *datasource.QueryTypeMux

View File

@@ -0,0 +1,17 @@
🌟 This was machine generated. Do not edit. 🌟
Frame[0]
Name: city_stats.csv
Dimensions: 2 Fields by 2 Rows
+-----------------+------------------+
| Name: City | Name: Population |
| Labels: | Labels: |
| Type: []*string | Type: []*int64 |
+-----------------+------------------+
| Stockholm | 1000000 |
| New York | 13333300 |
+-----------------+------------------+
====== TEST DATA RESPONSE (arrow base64) ======
FRAME=QVJST1cxAAD/////gAEAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEDAAoADAAAAAgABAAKAAAACAAAAFwAAAACAAAAKAAAAAQAAAAE////CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAACT///8IAAAAGAAAAA4AAABjaXR5X3N0YXRzLmNzdgAABAAAAG5hbWUAAAAAAgAAAIwAAAAEAAAAjv///xQAAABAAAAASAAAAAAAAgFMAAAAAQAAAAQAAAB8////CAAAABQAAAAKAAAAUG9wdWxhdGlvbgAABAAAAG5hbWUAAAAAAAAAAAgADAAIAAcACAAAAAAAAAFAAAAACgAAAFBvcHVsYXRpb24AAAAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABEAAAASAAAAAAABQFEAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAABDaXR5AAAAAAQAAABuYW1lAAAAAAAAAAAEAAQABAAAAAQAAABDaXR5AAAAAP/////IAAAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAAOAAAAAAAAAAUAAAAAAAAAwMACgAYAAwACAAEAAoAAAAUAAAAaAAAAAIAAAAAAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAABAAAAAAAAAAGAAAAAAAAAAoAAAAAAAAAAAAAAAAAAAAKAAAAAAAAAAQAAAAAAAAAAAAAAACAAAAAgAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAACQAAABEAAAAAAAAAU3RvY2tob2xtTmV3IFlvcmsAAAAAAAAAQEIPAAAAAAA0c8sAAAAAABAAAAAMABQAEgAMAAgABAAMAAAAEAAAACwAAAA8AAAAAAADAAEAAACQAQAAAAAAANAAAAAAAAAAOAAAAAAAAAAAAAAAAAAAAAAAAAAAAAoADAAAAAgABAAKAAAACAAAAFwAAAACAAAAKAAAAAQAAAAE////CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAACT///8IAAAAGAAAAA4AAABjaXR5X3N0YXRzLmNzdgAABAAAAG5hbWUAAAAAAgAAAIwAAAAEAAAAjv///xQAAABAAAAASAAAAAAAAgFMAAAAAQAAAAQAAAB8////CAAAABQAAAAKAAAAUG9wdWxhdGlvbgAABAAAAG5hbWUAAAAAAAAAAAgADAAIAAcACAAAAAAAAAFAAAAACgAAAFBvcHVsYXRpb24AAAAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABEAAAASAAAAAAABQFEAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAQAAABDaXR5AAAAAAQAAABuYW1lAAAAAAAAAAAEAAQABAAAAAQAAABDaXR5AAAAALABAABBUlJPVzE=

View File

@@ -0,0 +1,3 @@
Field1,Field2,Field3,123
True,Hello,6,
False,6,World,6
1 Field1 Field2 Field3 123
2 True Hello 6
3 False 6 World 6

View File

@@ -0,0 +1,17 @@
🌟 This was machine generated. Do not edit. 🌟
Frame[0]
Name: mixed
Dimensions: 4 Fields by 2 Rows
+---------------+-----------------+-----------------+----------------+
| Name: Field1 | Name: Field2 | Name: Field3 | Name: 123 |
| Labels: | Labels: | Labels: | Labels: |
| Type: []*bool | Type: []*string | Type: []*string | Type: []*int64 |
+---------------+-----------------+-----------------+----------------+
| true | Hello | 6 | null |
| false | 6 | World | 6 |
+---------------+-----------------+-----------------+----------------+
====== TEST DATA RESPONSE (arrow base64) ======
FRAME=QVJST1cxAAD/////IAIAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEDAAoADAAAAAgABAAKAAAACAAAAFQAAAACAAAAKAAAAAQAAABk/v//CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAAIT+//8IAAAAEAAAAAUAAABtaXhlZAAAAAQAAABuYW1lAAAAAAQAAAA0AQAAxAAAAGgAAAAEAAAA7v7//xQAAAA4AAAAQAAAAAAAAgFEAAAAAQAAAAQAAADc/v//CAAAAAwAAAADAAAAMTIzAAQAAABuYW1lAAAAAAAAAAAIAAwACAAHAAgAAAAAAAABQAAAAAMAAAAxMjMATv///xQAAAA8AAAAPAAAAAAABQE4AAAAAQAAAAQAAAA8////CAAAABAAAAAGAAAARmllbGQzAAAEAAAAbmFtZQAAAAAAAAAANP///wYAAABGaWVsZDMAAKb///8UAAAAPAAAADwAAAAAAAUBOAAAAAEAAAAEAAAAlP///wgAAAAQAAAABgAAAEZpZWxkMgAABAAAAG5hbWUAAAAAAAAAAIz///8GAAAARmllbGQyAAAAABIAGAAUABMAEgAMAAAACAAEABIAAAAUAAAARAAAAEgAAAAAAAYBRAAAAAEAAAAMAAAACAAMAAgABAAIAAAACAAAABAAAAAGAAAARmllbGQxAAAEAAAAbmFtZQAAAAAAAAAABAAEAAQAAAAGAAAARmllbGQxAAD/////OAEAABQAAAAAAAAADAAWABQAEwAMAAQADAAAAFAAAAAAAAAAFAAAAAAAAAMDAAoAGAAMAAgABAAKAAAAFAAAALgAAAACAAAAAAAAAAAAAAAKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAQAAAAAAAAABgAAAAAAAAACAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAQAAAAAAAAADAAAAAAAAAACAAAAAAAAAA4AAAAAAAAAAgAAAAAAAAAQAAAAAAAAAAQAAAAAAAAAAAAAAAEAAAAAgAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAABAAAAAAAAAAEAAAAAAAAAAAAAAAUAAAAGAAAAAAAAAEhlbGxvNgAAAAAAAAEAAAAGAAAAAAAAADZXb3JsZAAAAgAAAAAAAAAAAAAAAAAAAAYAAAAAAAAAEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADwAAAAAAAMAAQAAADACAAAAAAAAQAEAAAAAAABQAAAAAAAAAAAAAAAAAAAAAAAAAAAACgAMAAAACAAEAAoAAAAIAAAAVAAAAAIAAAAoAAAABAAAAGT+//8IAAAADAAAAAAAAAAAAAAABQAAAHJlZklkAAAAhP7//wgAAAAQAAAABQAAAG1peGVkAAAABAAAAG5hbWUAAAAABAAAADQBAADEAAAAaAAAAAQAAADu/v//FAAAADgAAABAAAAAAAACAUQAAAABAAAABAAAANz+//8IAAAADAAAAAMAAAAxMjMABAAAAG5hbWUAAAAAAAAAAAgADAAIAAcACAAAAAAAAAFAAAAAAwAAADEyMwBO////FAAAADwAAAA8AAAAAAAFATgAAAABAAAABAAAADz///8IAAAAEAAAAAYAAABGaWVsZDMAAAQAAABuYW1lAAAAAAAAAAA0////BgAAAEZpZWxkMwAApv///xQAAAA8AAAAPAAAAAAABQE4AAAAAQAAAAQAAACU////CAAAABAAAAAGAAAARmllbGQyAAAEAAAAbmFtZQAAAAAAAAAAjP///wYAAABGaWVsZDIAAAAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABEAAAASAAAAAAABgFEAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAYAAABGaWVsZDEAAAQAAABuYW1lAAAAAAAAAAAEAAQABAAAAAYAAABGaWVsZDEAAFACAABBUlJPVzE=

View File

@@ -0,0 +1,18 @@
🌟 This was machine generated. Do not edit. 🌟
Frame[0]
Name: population_by_state.csv
Dimensions: 4 Fields by 3 Rows
+-----------------+----------------+----------------+----------------+
| Name: State | Name: 2020 | Name: 2000 | Name: 1980 |
| Labels: | Labels: | Labels: | Labels: |
| Type: []*string | Type: []*int64 | Type: []*int64 | Type: []*int64 |
+-----------------+----------------+----------------+----------------+
| California | 39368078 | 33987977 | 23800800 |
| Texas | 29360759 | 20944499 | 14338208 |
| Florida | 21733312 | 16047515 | 9839835 |
+-----------------+----------------+----------------+----------------+
====== TEST DATA RESPONSE (arrow base64) ======
FRAME=QVJST1cxAAD/////SAIAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEDAAoADAAAAAgABAAKAAAACAAAAGQAAAACAAAAKAAAAAQAAABA/v//CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAAGD+//8IAAAAIAAAABcAAABwb3B1bGF0aW9uX2J5X3N0YXRlLmNzdgAEAAAAbmFtZQAAAAAEAAAASAEAAMwAAABoAAAABAAAANr+//8UAAAAPAAAADwAAAAAAAIBQAAAAAEAAAAEAAAAyP7//wgAAAAQAAAABAAAADE5ODAAAAAABAAAAG5hbWUAAAAAAAAAAED///8AAAABQAAAAAQAAAAxOTgwAAAAADr///8UAAAAPAAAADwAAAAAAAIBQAAAAAEAAAAEAAAAKP///wgAAAAQAAAABAAAADIwMDAAAAAABAAAAG5hbWUAAAAAAAAAAKD///8AAAABQAAAAAQAAAAyMDAwAAAAAJr///8UAAAAPAAAAEQAAAAAAAIBSAAAAAEAAAAEAAAAiP///wgAAAAQAAAABAAAADIwMjAAAAAABAAAAG5hbWUAAAAAAAAAAAgADAAIAAcACAAAAAAAAAFAAAAABAAAADIwMjAAABIAGAAUABMAEgAMAAAACAAEABIAAAAUAAAARAAAAEgAAAAAAAUBRAAAAAEAAAAMAAAACAAMAAgABAAIAAAACAAAABAAAAAFAAAAU3RhdGUAAAAEAAAAbmFtZQAAAAAAAAAABAAEAAQAAAAFAAAAU3RhdGUAAAAAAAAA/////ygBAAAUAAAAAAAAAAwAFgAUABMADAAEAAwAAABwAAAAAAAAABQAAAAAAAADAwAKABgADAAIAAQACgAAABQAAACoAAAAAwAAAAAAAAAAAAAACQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAEAAAAAAAAAAYAAAAAAAAACgAAAAAAAAAAAAAAAAAAAAoAAAAAAAAABgAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAGAAAAAAAAABYAAAAAAAAAAAAAAAAAAAAWAAAAAAAAAAYAAAAAAAAAAAAAAAEAAAAAwAAAAAAAAAAAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAKAAAADwAAABYAAABDYWxpZm9ybmlhVGV4YXNGbG9yaWRhAACOtVgCAAAAAHcCwAEAAAAAwJ9LAQAAAACJnQYCAAAAAHOWPwEAAAAAm930AAAAAADgK2sBAAAAAKDI2gAAAAAA2ySWAAAAAAAQAAAADAAUABIADAAIAAQADAAAABAAAAAsAAAAOAAAAAAAAwABAAAAWAIAAAAAAAAwAQAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAAAoADAAAAAgABAAKAAAACAAAAGQAAAACAAAAKAAAAAQAAABA/v//CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAAGD+//8IAAAAIAAAABcAAABwb3B1bGF0aW9uX2J5X3N0YXRlLmNzdgAEAAAAbmFtZQAAAAAEAAAASAEAAMwAAABoAAAABAAAANr+//8UAAAAPAAAADwAAAAAAAIBQAAAAAEAAAAEAAAAyP7//wgAAAAQAAAABAAAADE5ODAAAAAABAAAAG5hbWUAAAAAAAAAAED///8AAAABQAAAAAQAAAAxOTgwAAAAADr///8UAAAAPAAAADwAAAAAAAIBQAAAAAEAAAAEAAAAKP///wgAAAAQAAAABAAAADIwMDAAAAAABAAAAG5hbWUAAAAAAAAAAKD///8AAAABQAAAAAQAAAAyMDAwAAAAAJr///8UAAAAPAAAAEQAAAAAAAIBSAAAAAEAAAAEAAAAiP///wgAAAAQAAAABAAAADIwMjAAAAAABAAAAG5hbWUAAAAAAAAAAAgADAAIAAcACAAAAAAAAAFAAAAABAAAADIwMjAAABIAGAAUABMAEgAMAAAACAAEABIAAAAUAAAARAAAAEgAAAAAAAUBRAAAAAEAAAAMAAAACAAMAAgABAAIAAAACAAAABAAAAAFAAAAU3RhdGUAAAAEAAAAbmFtZQAAAAAAAAAABAAEAAQAAAAFAAAAU3RhdGUAAABwAgAAQVJST1cx

View File

@@ -0,0 +1,3 @@
Field1,Field2,Field3,Float,Time
A,5,6,6.7,1621987000000
B,6,7,8.9,1621988000000
1 Field1 Field2 Field3 Float Time
2 A 5 6 6.7 1621987000000
3 B 6 7 8.9 1621988000000

View File

@@ -0,0 +1,17 @@
🌟 This was machine generated. Do not edit. 🌟
Frame[0]
Name: simple
Dimensions: 5 Fields by 2 Rows
+-----------------+----------------+----------------+------------------+-------------------------------+
| Name: Field1 | Name: Field2 | Name: Field3 | Name: Float | Name: Time |
| Labels: | Labels: | Labels: | Labels: | Labels: |
| Type: []*string | Type: []*int64 | Type: []*int64 | Type: []*float64 | Type: []*time.Time |
+-----------------+----------------+----------------+------------------+-------------------------------+
| A | 5 | 6 | 6.7 | 2021-05-25 23:56:40 +0000 UTC |
| B | 6 | 7 | 8.9 | 2021-05-26 00:13:20 +0000 UTC |
+-----------------+----------------+----------------+------------------+-------------------------------+
====== TEST DATA RESPONSE (arrow base64) ======
FRAME=QVJST1cxAAD/////oAIAABAAAAAAAAoADgAMAAsABAAKAAAAFAAAAAAAAAEDAAoADAAAAAgABAAKAAAACAAAAFQAAAACAAAAKAAAAAQAAADo/f//CAAAAAwAAAAAAAAAAAAAAAUAAAByZWZJZAAAAAj+//8IAAAAEAAAAAYAAABzaW1wbGUAAAQAAABuYW1lAAAAAAUAAACwAQAAMAEAAMwAAABkAAAABAAAAHb+//8UAAAAPAAAADwAAAAAAAoBPAAAAAEAAAAEAAAAZP7//wgAAAAQAAAABAAAAFRpbWUAAAAABAAAAG5hbWUAAAAAAAAAAKL///8AAAMABAAAAFRpbWUAAAAA0v7//xQAAAA8AAAARAAAAAAAAwFEAAAAAQAAAAQAAADA/v//CAAAABAAAAAFAAAARmxvYXQAAAAEAAAAbmFtZQAAAAAAAAAAAAAGAAgABgAGAAAAAAACAAUAAABGbG9hdAAAADb///8UAAAAPAAAADwAAAAAAAIBQAAAAAEAAAAEAAAAJP///wgAAAAQAAAABgAAAEZpZWxkMwAABAAAAG5hbWUAAAAAAAAAAKD///8AAAABQAAAAAYAAABGaWVsZDMAAJb///8UAAAAPAAAAEQAAAAAAAIBSAAAAAEAAAAEAAAAhP///wgAAAAQAAAABgAAAEZpZWxkMgAABAAAAG5hbWUAAAAAAAAAAAgADAAIAAcACAAAAAAAAAFAAAAABgAAAEZpZWxkMgAAAAASABgAFAATABIADAAAAAgABAASAAAAFAAAAEQAAABIAAAAAAAFAUQAAAABAAAADAAAAAgADAAIAAQACAAAAAgAAAAQAAAABgAAAEZpZWxkMQAABAAAAG5hbWUAAAAAAAAAAAQABAAEAAAABgAAAEZpZWxkMQAAAAAAAP////9YAQAAFAAAAAAAAAAMABYAFAATAAwABAAMAAAAWAAAAAAAAAAUAAAAAAAAAwMACgAYAAwACAAEAAoAAAAUAAAAyAAAAAIAAAAAAAAAAAAAAAsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAABAAAAAAAAAACAAAAAAAAAAYAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAQAAAAAAAAACgAAAAAAAAAAAAAAAAAAAAoAAAAAAAAABAAAAAAAAAAOAAAAAAAAAAAAAAAAAAAADgAAAAAAAAAEAAAAAAAAABIAAAAAAAAAAAAAAAAAAAASAAAAAAAAAAQAAAAAAAAAAAAAAAFAAAAAgAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAIAAAAAAAAAQUIAAAAAAAAFAAAAAAAAAAYAAAAAAAAABgAAAAAAAAAHAAAAAAAAAM3MzMzMzBpAzczMzMzMIUAAME01lXSCFgBA8gl+dYIWEAAAAAwAFAASAAwACAAEAAwAAAAQAAAALAAAADgAAAAAAAMAAQAAALACAAAAAAAAYAEAAAAAAABYAAAAAAAAAAAAAAAAAAAAAAAKAAwAAAAIAAQACgAAAAgAAABUAAAAAgAAACgAAAAEAAAA6P3//wgAAAAMAAAAAAAAAAAAAAAFAAAAcmVmSWQAAAAI/v//CAAAABAAAAAGAAAAc2ltcGxlAAAEAAAAbmFtZQAAAAAFAAAAsAEAADABAADMAAAAZAAAAAQAAAB2/v//FAAAADwAAAA8AAAAAAAKATwAAAABAAAABAAAAGT+//8IAAAAEAAAAAQAAABUaW1lAAAAAAQAAABuYW1lAAAAAAAAAACi////AAADAAQAAABUaW1lAAAAANL+//8UAAAAPAAAAEQAAAAAAAMBRAAAAAEAAAAEAAAAwP7//wgAAAAQAAAABQAAAEZsb2F0AAAABAAAAG5hbWUAAAAAAAAAAAAABgAIAAYABgAAAAAAAgAFAAAARmxvYXQAAAA2////FAAAADwAAAA8AAAAAAACAUAAAAABAAAABAAAACT///8IAAAAEAAAAAYAAABGaWVsZDMAAAQAAABuYW1lAAAAAAAAAACg////AAAAAUAAAAAGAAAARmllbGQzAACW////FAAAADwAAABEAAAAAAACAUgAAAABAAAABAAAAIT///8IAAAAEAAAAAYAAABGaWVsZDIAAAQAAABuYW1lAAAAAAAAAAAIAAwACAAHAAgAAAAAAAABQAAAAAYAAABGaWVsZDIAAAAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABEAAAASAAAAAAABQFEAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAYAAABGaWVsZDEAAAQAAABuYW1lAAAAAAAAAAAEAAQABAAAAAYAAABGaWVsZDEAAMgCAABBUlJPVzE=