Chore: Fix various spelling errors in back-end code (#25241)

* Chore: Fix various spelling errors in back-end code
Co-authored-by: Sofia Papagiannaki <papagian@users.noreply.github.com>
Co-authored-by: Josh Soref <jsoref@users.noreply.github.com>>
This commit is contained in:
Arve Knudsen
2020-06-01 17:11:25 +02:00
committed by GitHub
parent 13787294c6
commit 07582a8e85
60 changed files with 174 additions and 168 deletions

View File

@@ -91,7 +91,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
}
]
],
"alias": "serie alias",
"alias": "series alias",
"tags": [
{
"key": "datacenter",
@@ -118,7 +118,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
So(len(res.Tags), ShouldEqual, 2)
So(res.Tz, ShouldEqual, "Europe/Paris")
So(res.Interval, ShouldEqual, time.Second*20)
So(res.Alias, ShouldEqual, "serie alias")
So(res.Alias, ShouldEqual, "series alias")
})
Convey("can part raw query json model", func() {
@@ -140,7 +140,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
],
"interval": ">10s",
"policy": "default",
"query": "RawDummieQuery",
"query": "RawDummyQuery",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
@@ -171,7 +171,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
res, err := parser.Parse(modelJson, dsInfo)
So(err, ShouldBeNil)
So(res.RawQuery, ShouldEqual, "RawDummieQuery")
So(res.RawQuery, ShouldEqual, "RawDummyQuery")
So(len(res.GroupBy), ShouldEqual, 2)
So(len(res.Selects), ShouldEqual, 1)
So(len(res.Tags), ShouldEqual, 0)

View File

@@ -51,7 +51,7 @@ func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResul
}
}
result = append(result, &tsdb.TimeSeries{
Name: rp.formatSerieName(row, column, query),
Name: rp.formatSeriesName(row, column, query),
Points: points,
Tags: row.Tags,
})
@@ -61,9 +61,9 @@ func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResul
return result
}
func (rp *ResponseParser) formatSerieName(row Row, column string, query *Query) string {
func (rp *ResponseParser) formatSeriesName(row Row, column string, query *Query) string {
if query.Alias == "" {
return rp.buildSerieNameFromQuery(row, column)
return rp.buildSeriesNameFromQuery(row, column)
}
nameSegment := strings.Split(row.Name, ".")
@@ -102,7 +102,7 @@ func (rp *ResponseParser) formatSerieName(row Row, column string, query *Query)
return string(result)
}
func (rp *ResponseParser) buildSerieNameFromQuery(row Row, column string) string {
func (rp *ResponseParser) buildSeriesNameFromQuery(row Row, column string) string {
var tags []string
for k, v := range row.Tags {

View File

@@ -61,7 +61,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
So(result.Series[0].Points[2][0].Valid, ShouldBeFalse)
})
Convey("can format serie names", func() {
Convey("can format series names", func() {
So(result.Series[0].Name, ShouldEqual, "cpu.mean { datacenter: America }")
So(result.Series[1].Name, ShouldEqual, "cpu.sum { datacenter: America }")
})
@@ -92,10 +92,10 @@ func TestInfluxdbResponseParser(t *testing.T) {
Convey("$ alias", func() {
Convey("simple alias", func() {
query := &Query{Alias: "serie alias"}
query := &Query{Alias: "series alias"}
result := parser.Parse(response, query)
So(result.Series[0].Name, ShouldEqual, "serie alias")
So(result.Series[0].Name, ShouldEqual, "series alias")
})
Convey("measurement alias", func() {
@@ -137,10 +137,10 @@ func TestInfluxdbResponseParser(t *testing.T) {
Convey("[[]] alias", func() {
Convey("simple alias", func() {
query := &Query{Alias: "serie alias"}
query := &Query{Alias: "series alias"}
result := parser.Parse(response, query)
So(result.Series[0].Name, ShouldEqual, "serie alias")
So(result.Series[0].Name, ShouldEqual, "series alias")
})
Convey("measurement alias", func() {

View File

@@ -23,7 +23,7 @@ import (
// Use the docker/blocks/mssql_tests/docker-compose.yaml to spin up a
// preconfigured MSSQL server suitable for running these tests.
// There is also a datasource and dashboard provisioned by devenv scripts that you can
// use to verify that the generated data are vizualized as expected, see
// use to verify that the generated data are visualized as expected, see
// devenv/README.md for setup instructions.
// If needed, change the variable below to the IP address of the database.
var serverIP = "localhost"

View File

@@ -26,7 +26,7 @@ import (
// Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a
// preconfigured MySQL server suitable for running these tests.
// There is also a datasource and dashboard provisioned by devenv scripts that you can
// use to verify that the generated data are vizualized as expected, see
// use to verify that the generated data are visualized as expected, see
// devenv/README.md for setup instructions.
func TestMySQL(t *testing.T) {
// change to true to run the MySQL tests

View File

@@ -27,7 +27,7 @@ import (
// Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a
// preconfigured Postgres server suitable for running these tests.
// There is also a datasource and dashboard provisioned by devenv scripts that you can
// use to verify that the generated data are vizualized as expected, see
// use to verify that the generated data are visualized as expected, see
// devenv/README.md for setup instructions.
func TestPostgres(t *testing.T) {
// change to true to run the PostgreSQL tests

View File

@@ -31,7 +31,7 @@ func TestPrometheus(t *testing.T) {
So(formatLegend(metric, query), ShouldEqual, "legend backend mobile ")
})
Convey("build full serie name", func() {
Convey("build full series name", func() {
metric := map[p.LabelName]p.LabelValue{
p.LabelName(p.MetricNameLabel): p.LabelValue("http_request_total"),
p.LabelName("app"): p.LabelValue("backend"),

View File

@@ -47,7 +47,7 @@ func init() {
factor := 2
for i := 0; i < 10; i++ {
timeWalkerMs := context.TimeRange.GetFromAsMsEpoch()
serie := &tsdb.TimeSeries{Name: strconv.Itoa(start)}
ts := &tsdb.TimeSeries{Name: strconv.Itoa(start)}
start *= factor
points := make(tsdb.TimeSeriesPoints, 0)
@@ -57,8 +57,8 @@ func init() {
timeWalkerMs += query.IntervalMs * 50
}
serie.Points = points
series = append(series, serie)
ts.Points = points
series = append(series, ts)
}
queryRes := tsdb.NewQueryResult()
@@ -77,7 +77,7 @@ func init() {
var series []*tsdb.TimeSeries
for i := 0; i < 10; i++ {
timeWalkerMs := context.TimeRange.GetFromAsMsEpoch()
serie := &tsdb.TimeSeries{Name: strconv.Itoa(i * 10)}
ts := &tsdb.TimeSeries{Name: strconv.Itoa(i * 10)}
points := make(tsdb.TimeSeriesPoints, 0)
for j := int64(0); j < 100 && timeWalkerMs < to; j++ {
@@ -86,8 +86,8 @@ func init() {
timeWalkerMs += query.IntervalMs * 50
}
serie.Points = points
series = append(series, serie)
ts.Points = points
series = append(series, ts)
}
queryRes := tsdb.NewQueryResult()