mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Fixing typos (#70487)
This commit is contained in:
parent
1441c90178
commit
95b1f3c875
@ -40,7 +40,7 @@ Project maintainers who do not follow or enforce the Code of Conduct in good fai
|
|||||||
|
|
||||||
## Attribution
|
## Attribution
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [https://contributor-covenant.org/version/1/4/code-of-conduct][version]
|
||||||
|
|
||||||
[homepage]: http://contributor-covenant.org
|
[homepage]: https://contributor-covenant.org
|
||||||
[version]: http://contributor-covenant.org/version/1/4/
|
[version]: https://www.contributor-covenant.org/version/1/4/code-of-conduct/
|
||||||
|
@ -66,7 +66,7 @@ groups:
|
|||||||
# <string, required> which query should be used for the condition
|
# <string, required> which query should be used for the condition
|
||||||
condition: A
|
condition: A
|
||||||
# <list, required> list of query objects that should be executed on each
|
# <list, required> list of query objects that should be executed on each
|
||||||
# evaluation - should be obtained trough the API
|
# evaluation - should be obtained through the API
|
||||||
data:
|
data:
|
||||||
- refId: A
|
- refId: A
|
||||||
datasourceUid: '__expr__'
|
datasourceUid: '__expr__'
|
||||||
|
@ -109,7 +109,7 @@ You can run a backend plugin and attach a debugger to it, which allows you to se
|
|||||||
```
|
```
|
||||||
1. Install your plugin into your local Grafana instance.
|
1. Install your plugin into your local Grafana instance.
|
||||||
|
|
||||||
Now that your plugin is ready to run, follow the instructions bellow for your IDE of choice.
|
Now that your plugin is ready to run, follow the instructions below for your IDE of choice.
|
||||||
|
|
||||||
### Visual Studio Code
|
### Visual Studio Code
|
||||||
|
|
||||||
|
@ -76,8 +76,8 @@ export enum HealthStatus {
|
|||||||
enum PluginRequestHeaders {
|
enum PluginRequestHeaders {
|
||||||
PluginID = 'X-Plugin-Id', // can be used for routing
|
PluginID = 'X-Plugin-Id', // can be used for routing
|
||||||
DatasourceUID = 'X-Datasource-Uid', // can be used for routing/ load balancing
|
DatasourceUID = 'X-Datasource-Uid', // can be used for routing/ load balancing
|
||||||
DashboardUID = 'X-Dashboard-Uid', // mainly useful for debuging slow queries
|
DashboardUID = 'X-Dashboard-Uid', // mainly useful for debugging slow queries
|
||||||
PanelID = 'X-Panel-Id', // mainly useful for debuging slow queries
|
PanelID = 'X-Panel-Id', // mainly useful for debugging slow queries
|
||||||
QueryGroupID = 'X-Query-Group-Id', // mainly useful to find related queries with query splitting
|
QueryGroupID = 'X-Query-Group-Id', // mainly useful to find related queries with query splitting
|
||||||
FromExpression = 'X-Grafana-From-Expr', // used by datasources to identify expression queries
|
FromExpression = 'X-Grafana-From-Expr', // used by datasources to identify expression queries
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ export const BarGaugeCell = (props: TableCellProps) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Getting gauge values to align is very tricky without looking at all values and passing them trough display processor. For very large tables that
|
* Getting gauge values to align is very tricky without looking at all values and passing them through display processor. For very large tables that
|
||||||
* could pretty expensive. So this is kind of a compromise. We look at the first 1000 rows and cache the longest value.
|
* could pretty expensive. So this is kind of a compromise. We look at the first 1000 rows and cache the longest value.
|
||||||
* If we have a cached value we just check if the current value is longer and update the alignmentFactor. This can obviously still lead to
|
* If we have a cached value we just check if the current value is longer and update the alignmentFactor. This can obviously still lead to
|
||||||
* unaligned gauges but it should a lot less common.
|
* unaligned gauges but it should a lot less common.
|
||||||
|
@ -248,7 +248,7 @@ func TestHTTPServer_GetFrontendSettings_apps(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
desc: "enalbed app with preload",
|
desc: "enabled app with preload",
|
||||||
pluginStore: func() plugins.Store {
|
pluginStore: func() plugins.Store {
|
||||||
return &plugins.FakePluginStore{
|
return &plugins.FakePluginStore{
|
||||||
PluginList: []plugins.PluginDTO{
|
PluginList: []plugins.PluginDTO{
|
||||||
|
@ -117,11 +117,11 @@ func FetchNpmPackages(ctx context.Context, tag, bucketName string) error {
|
|||||||
// Latest and next is 9.1.6.
|
// Latest and next is 9.1.6.
|
||||||
// 9.2.0-beta1 is released, the latest should stay on 9.1.6, next should point to 9.2.0-beta1
|
// 9.2.0-beta1 is released, the latest should stay on 9.1.6, next should point to 9.2.0-beta1
|
||||||
// No move of dist-tags
|
// No move of dist-tags
|
||||||
// 9.1.7 is relased, the latest should point to 9.1.7, next should stay to 9.2.0-beta1
|
// 9.1.7 is released, the latest should point to 9.1.7, next should stay to 9.2.0-beta1
|
||||||
// No move of dist-tags
|
// No move of dist-tags
|
||||||
// Next week 9.2.0-beta2 is released, the latest should point to 9.1.7, next should point to 9.2.0-beta2
|
// Next week 9.2.0-beta2 is released, the latest should point to 9.1.7, next should point to 9.2.0-beta2
|
||||||
// No move of dist-tags
|
// No move of dist-tags
|
||||||
// In two weeks 9.2.0 stable is relased, the latest and next should point to 9.2.0.
|
// In two weeks 9.2.0 stable is released, the latest and next should point to 9.2.0.
|
||||||
// The next dist-tag is moved to point to 9.2.0.
|
// The next dist-tag is moved to point to 9.2.0.
|
||||||
//
|
//
|
||||||
// 3. Releasing an older stable than the current stable
|
// 3. Releasing an older stable than the current stable
|
||||||
|
@ -110,7 +110,7 @@ func TestTracingConfig(t *testing.T) {
|
|||||||
ExpectedAttrs: []attribute.KeyValue{},
|
ExpectedAttrs: []attribute.KeyValue{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "legacy env variables are supproted",
|
Name: "legacy env variables are supported",
|
||||||
Cfg: `[tracing.jaeger]`,
|
Cfg: `[tracing.jaeger]`,
|
||||||
Env: map[string]string{
|
Env: map[string]string{
|
||||||
"JAEGER_AGENT_HOST": "example.com",
|
"JAEGER_AGENT_HOST": "example.com",
|
||||||
@ -137,7 +137,7 @@ func TestTracingConfig(t *testing.T) {
|
|||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
t.Run(test.Name, func(t *testing.T) {
|
t.Run(test.Name, func(t *testing.T) {
|
||||||
// export envioronment variables
|
// export environment variables
|
||||||
if test.Env != nil {
|
if test.Env != nil {
|
||||||
for k, v := range test.Env {
|
for k, v := range test.Env {
|
||||||
t.Setenv(k, v)
|
t.Setenv(k, v)
|
||||||
|
@ -31,7 +31,7 @@ type CachedResourceDataResponse struct {
|
|||||||
Response *backend.CallResourceResponse
|
Response *backend.CallResourceResponse
|
||||||
// A function that should be used to cache a CallResourceResponse for a given resource request.
|
// A function that should be used to cache a CallResourceResponse for a given resource request.
|
||||||
// It can be set to nil by the method implementation (if there is an error, for example), so it should be checked before being called.
|
// It can be set to nil by the method implementation (if there is an error, for example), so it should be checked before being called.
|
||||||
// Because plugins can send multiple responses asyncronously, the implementation should be able to handle multiple calls to this function for one request.
|
// Because plugins can send multiple responses asynchronously, the implementation should be able to handle multiple calls to this function for one request.
|
||||||
UpdateCacheFn CacheResourceResponseFn
|
UpdateCacheFn CacheResourceResponseFn
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ func (fm *FeatureManager) IsEnabled(flag string) bool {
|
|||||||
return fm.enabled[flag]
|
return fm.enabled[flag]
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetEnabled returns a map contaning only the features that are enabled
|
// GetEnabled returns a map containing only the features that are enabled
|
||||||
func (fm *FeatureManager) GetEnabled(ctx context.Context) map[string]bool {
|
func (fm *FeatureManager) GetEnabled(ctx context.Context) map[string]bool {
|
||||||
enabled := make(map[string]bool, len(fm.enabled))
|
enabled := make(map[string]bool, len(fm.enabled))
|
||||||
for key, val := range fm.enabled {
|
for key, val := range fm.enabled {
|
||||||
|
@ -128,7 +128,7 @@ type DeleteFolderCommand struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetFolderQuery is used for all folder Get requests. Only one of UID, ID, or
|
// GetFolderQuery is used for all folder Get requests. Only one of UID, ID, or
|
||||||
// Title should be set; if multilpe fields are set by the caller the dashboard
|
// Title should be set; if multiple fields are set by the caller the dashboard
|
||||||
// service will select the field with the most specificity, in order: ID, UID,
|
// service will select the field with the most specificity, in order: ID, UID,
|
||||||
// Title.
|
// Title.
|
||||||
type GetFolderQuery struct {
|
type GetFolderQuery struct {
|
||||||
|
@ -99,7 +99,7 @@ func TestProvisioningApi(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("when an unspecified error occurrs", func(t *testing.T) {
|
t.Run("when an unspecified error occurs", func(t *testing.T) {
|
||||||
t.Run("GET returns 500", func(t *testing.T) {
|
t.Run("GET returns 500", func(t *testing.T) {
|
||||||
sut := createProvisioningSrvSut(t)
|
sut := createProvisioningSrvSut(t)
|
||||||
sut.policies = &fakeFailingNotificationPolicyService{}
|
sut.policies = &fakeFailingNotificationPolicyService{}
|
||||||
|
@ -334,7 +334,7 @@ func TestEvaluatorTest(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("when evalution fails", func(t *testing.T) {
|
t.Run("when evaluation fails", func(t *testing.T) {
|
||||||
expectedError := errors.New("test-error")
|
expectedError := errors.New("test-error")
|
||||||
evaluator.evalCallback = func(now time.Time) (eval.Results, error) {
|
evaluator.evalCallback = func(now time.Time) (eval.Results, error) {
|
||||||
return nil, expectedError
|
return nil, expectedError
|
||||||
|
@ -354,7 +354,7 @@ func (p *redisPeer) WaitReady(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Settle is mostly copied from uptream.
|
// Settle is mostly copied from upstream.
|
||||||
// Ref: https://github.com/prometheus/alertmanager/blob/2888649b473970400c0bd375fdd563486dc80481/cluster/cluster.go#L674-L712
|
// Ref: https://github.com/prometheus/alertmanager/blob/2888649b473970400c0bd375fdd563486dc80481/cluster/cluster.go#L674-L712
|
||||||
func (p *redisPeer) Settle(ctx context.Context, interval time.Duration) {
|
func (p *redisPeer) Settle(ctx context.Context, interval time.Duration) {
|
||||||
const NumOkayRequired = 3
|
const NumOkayRequired = 3
|
||||||
|
@ -26,8 +26,8 @@ import (
|
|||||||
const (
|
const (
|
||||||
HeaderPluginID = "X-Plugin-Id" // can be used for routing
|
HeaderPluginID = "X-Plugin-Id" // can be used for routing
|
||||||
HeaderDatasourceUID = "X-Datasource-Uid" // can be used for routing/ load balancing
|
HeaderDatasourceUID = "X-Datasource-Uid" // can be used for routing/ load balancing
|
||||||
HeaderDashboardUID = "X-Dashboard-Uid" // mainly useful for debuging slow queries
|
HeaderDashboardUID = "X-Dashboard-Uid" // mainly useful for debugging slow queries
|
||||||
HeaderPanelID = "X-Panel-Id" // mainly useful for debuging slow queries
|
HeaderPanelID = "X-Panel-Id" // mainly useful for debugging slow queries
|
||||||
HeaderQueryGroupID = "X-Query-Group-Id" // mainly useful for finding related queries with query chunking
|
HeaderQueryGroupID = "X-Query-Group-Id" // mainly useful for finding related queries with query chunking
|
||||||
HeaderFromExpression = "X-Grafana-From-Expr" // used by datasources to identify expression queries
|
HeaderFromExpression = "X-Grafana-From-Expr" // used by datasources to identify expression queries
|
||||||
)
|
)
|
||||||
|
@ -1438,7 +1438,7 @@ type EntitySearchResult struct {
|
|||||||
|
|
||||||
// Entity identifier
|
// Entity identifier
|
||||||
GRN *GRN `protobuf:"bytes,1,opt,name=GRN,proto3" json:"GRN,omitempty"`
|
GRN *GRN `protobuf:"bytes,1,opt,name=GRN,proto3" json:"GRN,omitempty"`
|
||||||
// The current veresion of this entity
|
// The current version of this entity
|
||||||
Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"`
|
Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"`
|
||||||
// Content Length
|
// Content Length
|
||||||
Size int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
|
Size int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
|
||||||
|
@ -311,7 +311,7 @@ message EntitySearchResult {
|
|||||||
// Entity identifier
|
// Entity identifier
|
||||||
GRN GRN = 1;
|
GRN GRN = 1;
|
||||||
|
|
||||||
// The current veresion of this entity
|
// The current version of this entity
|
||||||
string version = 2;
|
string version = 2;
|
||||||
|
|
||||||
// Content Length
|
// Content Length
|
||||||
|
@ -306,13 +306,13 @@ func parseNumber(value interface{}) *float64 {
|
|||||||
|
|
||||||
number, ok := value.(json.Number)
|
number, ok := value.(json.Number)
|
||||||
if !ok {
|
if !ok {
|
||||||
// in the current inmplementation, errors become nils
|
// in the current implementation, errors become nils
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
fvalue, err := number.Float64()
|
fvalue, err := number.Float64()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// in the current inmplementation, errors become nils
|
// in the current implementation, errors become nils
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ func (e *StandardDeviationSampler) updateAggregations(val float64) {
|
|||||||
e.m2 += delta * delta2
|
e.m2 += delta * delta2
|
||||||
}
|
}
|
||||||
|
|
||||||
// standardDeviation calculates the amount of varation in the data
|
// standardDeviation calculates the amount of variation in the data
|
||||||
// https://en.wikipedia.org/wiki/Standard_deviation
|
// https://en.wikipedia.org/wiki/Standard_deviation
|
||||||
func (e *StandardDeviationSampler) standardDeviation() float64 {
|
func (e *StandardDeviationSampler) standardDeviation() float64 {
|
||||||
if e.count < 2 {
|
if e.count < 2 {
|
||||||
|
@ -422,7 +422,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
|||||||
const hasUnescapedContent = !!message.match(/\\n|\\t|\\r/);
|
const hasUnescapedContent = !!message.match(/\\n|\\t|\\r/);
|
||||||
|
|
||||||
// Data sources that set up searchWords on backend use meta.custom.searchWords
|
// Data sources that set up searchWords on backend use meta.custom.searchWords
|
||||||
// Data sources that set up searchWords trough frontend can use meta.searchWords
|
// Data sources that set up searchWords through frontend can use meta.searchWords
|
||||||
const searchWords = series.meta?.custom?.searchWords ?? series.meta?.searchWords ?? [];
|
const searchWords = series.meta?.custom?.searchWords ?? series.meta?.searchWords ?? [];
|
||||||
const entry = hasAnsi ? ansicolor.strip(message) : message;
|
const entry = hasAnsi ? ansicolor.strip(message) : message;
|
||||||
|
|
||||||
|
@ -487,7 +487,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should generate the correct query in the case of one multilple template variables', async () => {
|
it('should generate the correct query in the case of one multiple template variables', async () => {
|
||||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||||
const queries: CloudWatchMetricsQuery[] = [
|
const queries: CloudWatchMetricsQuery[] = [
|
||||||
{
|
{
|
||||||
@ -524,7 +524,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should generate the correct query in the case of multilple multi template variables', async () => {
|
it('should generate the correct query in the case of multiple multi template variables', async () => {
|
||||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||||
const queries: CloudWatchMetricsQuery[] = [
|
const queries: CloudWatchMetricsQuery[] = [
|
||||||
{
|
{
|
||||||
@ -553,7 +553,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should generate the correct query for multilple template variables, lack scopedVars', async () => {
|
it('should generate the correct query for multiple template variables, lack scopedVars', async () => {
|
||||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||||
const queries: CloudWatchMetricsQuery[] = [
|
const queries: CloudWatchMetricsQuery[] = [
|
||||||
{
|
{
|
||||||
|
@ -729,7 +729,7 @@ export class ElasticDatasource
|
|||||||
const url = this.getMultiSearchUrl();
|
const url = this.getMultiSearchUrl();
|
||||||
|
|
||||||
const termsObservable = config.featureToggles.enableElasticsearchBackendQuerying
|
const termsObservable = config.featureToggles.enableElasticsearchBackendQuerying
|
||||||
? // TODO: This is run trough resource call, but maybe should run trough query
|
? // TODO: This is run through resource call, but maybe should run through query
|
||||||
from(this.postResourceRequest(url, esQuery))
|
from(this.postResourceRequest(url, esQuery))
|
||||||
: this.legacyQueryRunner.request('POST', url, esQuery);
|
: this.legacyQueryRunner.request('POST', url, esQuery);
|
||||||
|
|
||||||
|
@ -508,7 +508,7 @@ export class PrometheusDatasource
|
|||||||
trackQuery(response, request, startTime);
|
trackQuery(response, request, startTime);
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
// Run queries trough browser/proxy
|
// Run queries through browser/proxy
|
||||||
} else {
|
} else {
|
||||||
const start = getPrometheusTime(request.range.from, false);
|
const start = getPrometheusTime(request.range.from, false);
|
||||||
const end = getPrometheusTime(request.range.to, true);
|
const end = getPrometheusTime(request.range.to, true);
|
||||||
@ -1233,7 +1233,7 @@ export class PrometheusDatasource
|
|||||||
return finalQuery;
|
return finalQuery;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used when running queries trough backend
|
// Used when running queries through backend
|
||||||
filterQuery(query: PromQuery): boolean {
|
filterQuery(query: PromQuery): boolean {
|
||||||
if (query.hide || !query.expr) {
|
if (query.hide || !query.expr) {
|
||||||
return false;
|
return false;
|
||||||
@ -1241,7 +1241,7 @@ export class PrometheusDatasource
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used when running queries trough backend
|
// Used when running queries through backend
|
||||||
applyTemplateVariables(target: PromQuery, scopedVars: ScopedVars): Record<string, any> {
|
applyTemplateVariables(target: PromQuery, scopedVars: ScopedVars): Record<string, any> {
|
||||||
const variables = cloneDeep(scopedVars);
|
const variables = cloneDeep(scopedVars);
|
||||||
|
|
||||||
|
@ -1359,7 +1359,7 @@ def publish_grafanacom_step(edition, ver_mode):
|
|||||||
ver_mode: if ver_mode == 'main', pass the DRONE_BUILD_NUMBER environment
|
ver_mode: if ver_mode == 'main', pass the DRONE_BUILD_NUMBER environment
|
||||||
variable as the value for the --build-id option.
|
variable as the value for the --build-id option.
|
||||||
TODO: is this actually used by the grafanacom subcommand? I think it might
|
TODO: is this actually used by the grafanacom subcommand? I think it might
|
||||||
just use the environment varaiable directly.
|
just use the environment variable directly.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Drone step.
|
Drone step.
|
||||||
|
@ -33,7 +33,7 @@ def pipeline(
|
|||||||
edition: used to differentiate the pipeline for enterprise builds.
|
edition: used to differentiate the pipeline for enterprise builds.
|
||||||
trigger: a Drone trigger for the pipeline.
|
trigger: a Drone trigger for the pipeline.
|
||||||
steps: the Drone steps for the pipeline.
|
steps: the Drone steps for the pipeline.
|
||||||
services: auxilliary services used during the pipeline.
|
services: auxiliary services used during the pipeline.
|
||||||
Defaults to [].
|
Defaults to [].
|
||||||
platform: abstracts platform specific configuration primarily for different Drone behavior on Windows.
|
platform: abstracts platform specific configuration primarily for different Drone behavior on Windows.
|
||||||
Defaults to 'linux'.
|
Defaults to 'linux'.
|
||||||
|
Loading…
Reference in New Issue
Block a user