diff --git a/.betterer.results b/.betterer.results index 408d11a39d1..6df3eefe0a5 100644 --- a/.betterer.results +++ b/.betterer.results @@ -3337,11 +3337,9 @@ exports[`better eslint`] = { [0, 0, 0, "Unexpected any. Specify a different type.", "1"] ], "public/app/features/alerting/unified/components/receivers/form/ChannelOptions.tsx:5381": [ - [0, 0, 0, "Do not use any type assertions.", "0"], - [0, 0, 0, "Unexpected any. Specify a different type.", "1"], - [0, 0, 0, "Unexpected any. Specify a different type.", "2"], - [0, 0, 0, "Do not use any type assertions.", "3"], - [0, 0, 0, "Unexpected any. Specify a different type.", "4"] + [0, 0, 0, "Unexpected any. Specify a different type.", "0"], + [0, 0, 0, "Do not use any type assertions.", "1"], + [0, 0, 0, "Unexpected any. Specify a different type.", "2"] ], "public/app/features/alerting/unified/components/receivers/form/ReceiverForm.tsx:5381": [ [0, 0, 0, "Do not use any type assertions.", "0"], @@ -4528,9 +4526,6 @@ exports[`better eslint`] = { [0, 0, 0, "Do not use any type assertions.", "2"], [0, 0, 0, "Do not use any type assertions.", "3"] ], - "public/app/features/inspector/InspectDataTab.tsx:5381": [ - [0, 0, 0, "Unexpected any. Specify a different type.", "0"] - ], "public/app/features/inspector/InspectErrorTab.tsx:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"] ], @@ -4651,6 +4646,13 @@ exports[`better eslint`] = { "public/app/features/logs/components/logParser.ts:5381": [ [0, 0, 0, "Do not use any type assertions.", "0"] ], + "public/app/features/logs/utils.ts:5381": [ + [0, 0, 0, "Do not use any type assertions.", "0"], + [0, 0, 0, "Unexpected any. Specify a different type.", "1"], + [0, 0, 0, "Do not use any type assertions.", "2"], + [0, 0, 0, "Unexpected any. Specify a different type.", "3"], + [0, 0, 0, "Do not use any type assertions.", "4"] + ], "public/app/features/manage-dashboards/DashboardImportPage.tsx:5381": [ [0, 0, 0, "Unexpected any. Specify a different type.", "0"], [0, 0, 0, "Unexpected any. Specify a different type.", "1"] diff --git a/.eslintrc b/.eslintrc index d57ff41338a..3d3cae6a0fd 100644 --- a/.eslintrc +++ b/.eslintrc @@ -113,7 +113,7 @@ } ], "jsx-a11y/no-noninteractive-element-to-interactive-role": "off", - "jsx-a11y/no-noninteractive-tabindex": "off", + "jsx-a11y/no-noninteractive-tabindex": "error", "jsx-a11y/no-redundant-roles": "error", "jsx-a11y/no-static-element-interactions": "off", "jsx-a11y/role-has-required-aria-props": "error", diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 1d68a983622..58222f07297 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -16,9 +16,8 @@ jobs: steps: - uses: actions/checkout@v3 - run: git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.GH_BOT_ACCESS_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync - - name: generate-packages-docs - uses: actions/setup-node@v3.4.0 - id: generate-docs + - name: setup node + uses: actions/setup-node@v3.4.0 with: node-version: '16' - name: Get yarn cache directory path diff --git a/.gitignore b/.gitignore index 865bc7dd70b..cf9fe79b44d 100644 --- a/.gitignore +++ b/.gitignore @@ -130,6 +130,7 @@ pkg/cmd/grafana-server/__debug_bin /packages/**/.rpt2_cache /packages/**/tsdoc-metadata.json /packages/**/package.tgz +/packages/grafana-toolkit/sass ## CI places the packages in a different location /npm-artifacts/*.tgz diff --git a/contribute/engineering/backend/instrumentation.md b/contribute/engineering/backend/instrumentation.md index b8b77b473dd..c6957d6c969 100644 --- a/contribute/engineering/backend/instrumentation.md +++ b/contribute/engineering/backend/instrumentation.md @@ -55,6 +55,8 @@ When to use which log level? Use a contextual logger to include additional key/value pairs attached to `context.Context`, e.g. `traceID`, to allow correlating logs with traces and/or correlate logs with a common identifier. +You must [Enable tracing in Grafana](#2-enable-tracing-in-grafana) to get a traceID + Example: ```go @@ -241,36 +243,38 @@ Be **careful** to not expose any sensitive information in span names, attribute ### How to collect, visualize and query traces (and correlate logs with traces) locally -1. Start Jaeger +#### 1. Start Jaeger - ```bash - make devenv sources=jaeger - ``` +```bash +make devenv sources=jaeger +``` -2. Enable tracing in Grafana +#### 2. Enable tracing in Grafana - opentelemetry tracing (recommended): +To enable tracing in Grafana, you must set the address in your config.ini file - ```ini - [tracing.opentelemetry.jaeger] - address = http://localhost:14268/api/traces - ``` +opentelemetry tracing (recommended): - opentracing tracing (deprecated/not recommended): +```ini +[tracing.opentelemetry.jaeger] +address = http://localhost:14268/api/traces +``` - ```ini - [tracing.jaeger] - address = localhost:6831 - ``` +opentracing tracing (deprecated/not recommended): -3. Search/browse collected logs and traces in Grafana Explore +```ini +[tracing.jaeger] +address = localhost:6831 +``` - You need provisioned gdev-jaeger and gdev-loki datasources, see [developer dashboard and data sources](https://github.com/grafana/grafana/tree/main/devenv#developer-dashboards-and-data-sources) for setup instructions. +#### 3. Search/browse collected logs and traces in Grafana Explore - Open Grafana explore and select gdev-loki datasource and use the query `{filename="/var/log/grafana/grafana.log"} | logfmt`. +You need provisioned gdev-jaeger and gdev-loki datasources, see [developer dashboard and data sources](https://github.com/grafana/grafana/tree/main/devenv#developer-dashboards-and-data-sources) for setup instructions. - You can then inspect any log message that includes a `traceID` and from there click on `gdev-jaeger` to split view and inspect the trace in question. +Open Grafana explore and select gdev-loki datasource and use the query `{filename="/var/log/grafana/grafana.log"} | logfmt`. -4. Search/browse collected traces in Jaeger UI +You can then inspect any log message that includes a `traceID` and from there click on `gdev-jaeger` to split view and inspect the trace in question. - You can open http://localhost:16686 to use the Jaeger UI for browsing and searching traces. +#### 4. Search/browse collected traces in Jaeger UI + +You can open http://localhost:16686 to use the Jaeger UI for browsing and searching traces. diff --git a/docs/sources/alerting/fundamentals/_index.md b/docs/sources/alerting/fundamentals/_index.md index 77ae41cb259..56e87c448fa 100644 --- a/docs/sources/alerting/fundamentals/_index.md +++ b/docs/sources/alerting/fundamentals/_index.md @@ -9,7 +9,7 @@ weight: 105 # Explore Grafana Alerting -Whether you're starting or expanding your implementation of Grafana Alerting, learn more about the key concepts and available features that help you create, manage, and take action on your alerts and improve your team’s ability to resolve issues quickly. +Learn about the key concepts and features that help you create, manage, and take action on your alerts and improve your team's ability to resolve issues quickly. - [Data sources](https://grafana.com/docs/grafana/latest/alerting/fundamentals/data-source-alerting/) - [Alert rules](https://grafana.com/docs/grafana/latest/alerting/fundamentals/alert-rules/) diff --git a/docs/sources/alerting/set-up/provision-alerting-resources/terraform-provisioning/index.md b/docs/sources/alerting/set-up/provision-alerting-resources/terraform-provisioning/index.md index 4401baedfeb..a448cd1fb26 100644 --- a/docs/sources/alerting/set-up/provision-alerting-resources/terraform-provisioning/index.md +++ b/docs/sources/alerting/set-up/provision-alerting-resources/terraform-provisioning/index.md @@ -96,19 +96,17 @@ EOT } ``` -1. Enter text for your notification in the text field. +2. Enter text for your notification in the text field. The `text` field supports [Go-style templating](https://pkg.go.dev/text/template). This enables you to manage your Grafana Alerting message templates directly in Terraform. -1. Run the command ‘terraform apply’. +3. Run the command ‘terraform apply’. -1. Go to the Grafana UI and check the details of your contact point. - -**Note:** +4. Go to the Grafana UI and check the details of your contact point. You cannot edit resources provisioned via Terraform from the UI. This ensures that your alerting stack always stays in sync with your code. -1. Click **Test** to verify that the contact point works correctly. +5. Click **Test** to verify that the contact point works correctly. **Note:** @@ -172,17 +170,17 @@ contact_point = grafana_contact_point.my_slack_contact_point.name } -1. In the mute_timings field, link a mute timing to your notification policy. +2. In the mute_timings field, link a mute timing to your notification policy. -1. Run the command ‘terraform apply’. +3. Run the command ‘terraform apply’. -1. Go to the Grafana UI and check the details of your notification policy. +4. Go to the Grafana UI and check the details of your notification policy. **Note:** You cannot edit resources provisioned from Terraform from the UI. This ensures that your alerting stack always stays in sync with your code. -1. Click **Test** to verify that the notification point is working correctly. +5. Click **Test** to verify that the notification point is working correctly. ## Provision mute timings @@ -209,16 +207,16 @@ name = "My Mute Timing" } -1. Run the command ‘terraform apply’. -1. Go to the Grafana UI and check the details of your mute timing. -1. Reference your newly created mute timing in a notification policy using the `mute_timings` field. +2. Run the command ‘terraform apply’. +3. Go to the Grafana UI and check the details of your mute timing. +4. Reference your newly created mute timing in a notification policy using the `mute_timings` field. This will apply your mute timing to some or all of your notifications. **Note:** You cannot edit resources provisioned from Terraform from the UI. This ensures that your alerting stack always stays in sync with your code. -1. Click **Test** to verify that the mute timing is working correctly. +5. Click **Test** to verify that the mute timing is working correctly. ## Provision alert rules @@ -243,11 +241,11 @@ resource "grafana_folder" "rule_folder" { } ``` -1. Define an alert rule. +2. Define an alert rule. For more information on alert rules, refer to [how to create Grafana-managed alerts](https://grafana.com/blog/2022/08/01/grafana-alerting-video-how-to-create-alerts-in-grafana-9/). -1. Create a rule group containing one or more rules. +3. Create a rule group containing one or more rules. In this example, the `grafana_rule_group` resource group is used. @@ -314,7 +312,7 @@ EOT } ``` -1. Go to the Grafana UI and check your alert rule. +4. Go to the Grafana UI and check your alert rule. You can see whether or not the alert rule is firing. You can also see a visualization of each of the alert rule’s query stages diff --git a/docs/sources/dashboards/dashboard-public.md b/docs/sources/dashboards/dashboard-public.md index d39db17bf7a..b26b2e19df4 100644 --- a/docs/sources/dashboards/dashboard-public.md +++ b/docs/sources/dashboards/dashboard-public.md @@ -59,5 +59,6 @@ publicDashboards = true - Exemplars will be omitted from the panel. - Annotations will not be displayed in public dashboards. - Grafana Live and real-time event streams are not supported. +- Library panels are currently not supported, but are planned to be in the future. We are excited to share this enhancement with you and we’d love your feedback! Please check out the [Github](https://github.com/grafana/grafana/discussions/49253) discussion and join the conversation. diff --git a/docs/sources/developers/http_api/reporting.md b/docs/sources/developers/http_api/reporting.md index 12ffa51ed33..19fcb80d471 100644 --- a/docs/sources/developers/http_api/reporting.md +++ b/docs/sources/developers/http_api/reporting.md @@ -15,16 +15,416 @@ title: Reporting API This API allows you to interact programmatically with the [Reporting]({{< relref "../../dashboards/create-reports/" >}}) feature. +> The Reporting API is not stabilized yet, it is still in active development and may change without prior notice. + > Reporting is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "../../enterprise/" >}}). > If you are running Grafana Enterprise, for some endpoints you'll need to have specific permissions. Refer to [Role-based access control permissions]({{< relref "../../administration/roles-and-permissions/access-control/custom-role-actions-scopes/" >}}) for more information. +## List all reports + +`GET /api/reports` + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| ------------ | --------------------------- | +| reports:read | reports:\*
reports:id:\* | + +### Example request + +```http +GET /api/reports HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 1840 + +[ + { + "id": 2, + "userId": 1, + "orgId": 1, + "name": "Report 2", + "recipients": "example-report@grafana.com", + "replyTo": "", + "message": "Hi, \nPlease find attached a PDF status report. If you have any questions, feel free to contact me!\nBest,", + "schedule": { + "startDate": "2022-10-02T00:00:00+02:00", + "endDate": null, + "frequency": "once", + "intervalFrequency": "", + "intervalAmount": 0, + "workdaysOnly": false, + "dayOfMonth": "2", + "timeZone": "Europe/Warsaw" + }, + "options": { + "orientation": "landscape", + "layout": "grid", + }, + "enableDashboardUrl": true, + "state": "scheduled", + "dashboards": [ + { + "dashboard": { + "id": 463, + "uid": "7MeksYbmk", + "name": "Alerting with TestData" + }, + "reportVariables": { + "namefilter": "TestData" + } + } + ], + "formats": [ + "pdf", + "csv" + ], + "created": "2022-09-19T11:44:42+02:00", + "updated": "2022-09-19T11:44:42+02:00" + } +] +``` + +### Status Codes + +- **200** – OK +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **500** – Unexpected error or server misconfiguration. Refer to server logs for more details. + +## Get a report + +`GET /api/reports/:id` + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| ------------ | ---------------------------------------------------------- | +| reports:read | reports:\*
reports:id:\*
reports:id:1(single report) | + +### Example request + +```http +GET /api/reports/2 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 940 + +{ + "id": 2, + "userId": 1, + "orgId": 1, + "name": "Report 2", + "recipients": "example-report@grafana.com", + "replyTo": "", + "message": "Hi, \nPlease find attached a PDF status report. If you have any questions, feel free to contact me!\nBest,", + "schedule": { + "startDate": "2022-10-02T00:00:00+02:00", + "endDate": null, + "frequency": "once", + "intervalFrequency": "", + "intervalAmount": 0, + "workdaysOnly": false, + "dayOfMonth": "2", + "timeZone": "Europe/Warsaw" + }, + "options": { + "orientation": "landscape", + "layout": "grid", + }, + "enableDashboardUrl": true, + "state": "scheduled", + "dashboards": [ + { + "dashboard": { + "id": 463, + "uid": "7MeksYbmk", + "name": "Alerting with TestData" + }, + "timeRange": { + "from": "", + "to": "" + }, + "reportVariables": { + "namefilter": "TestData" + } + } + ], + "formats": [ + "pdf", + "csv" + ], + "created": "2022-09-12T11:44:42+02:00", + "updated": "2022-09-12T11:44:42+02:00" +} +``` + +### Status Codes + +- **200** – OK +- **400** – Bad request (invalid report ID). +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **403** – Forbidden (access denied to a report or a dashboard used in the report). +- **404** – Not found (such report does not exist). +- **500** – Unexpected error or server misconfiguration. Refer to server logs for more details. + +## Create a report + +`POST /api/reports` + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| -------------- | ----- | +| reports:create | n/a | + +### Example request + +```http +POST /api/reports HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{ + "name": "Report 4", + "recipients": "texample-report@grafana.com", + "replyTo": "", + "message": "Hello, please, find the report attached", + "schedule": { + "startDate": "2022-10-02T10:00:00+02:00", + "endDate": "2022-11-02T20:00:00+02:00", + "frequency": "daily", + "intervalFrequency": "", + "intervalAmount": 0, + "workdaysOnly": true, + "timeZone": "Europe/Warsaw" + }, + "options": { + "orientation": "landscape", + "layout": "grid" + }, + "enableDashboardUrl": true, + "dashboards": [ + { + "dashboard": { + "uid": "7MeksYbmk", + }, + "timeRange": { + "from": "2022-08-08T15:00:00+02:00", + "to": "2022-09-02T17:00:00+02:00" + }, + "reportVariables": { + "varibale1": "Value1" + } + } + ], + "formats": [ + "pdf", + "csv" + ] +} +``` + +#### Config JSON Body Schema + +| Field name | Data type | Description | +| ------------------ | --------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| name | string | Name of the report that is used as an email subject. | +| recipients | string | Comma-separated list of emails to which to send the report to. | +| replyTo | string | Comma-separated list of emails used in a reply-to field of the report email. | +| message | string | Text message used for the body of the report email. | +| startDate | string | Report distribution starts from this date. | +| endDate | string | Report distribution ends on this date. | +| frequency | string | Specifies how often the report should be sent. Can be `once`, `hourly`, `daily`, `weekly`, `monthly`, `last` or `custom`.

`last` - schedules the report for the last day of month.

`custom` - schedules the report to be sent on a custom interval.
It requires `intervalFrequency` and `intervalAmount` to be specified: for example, every 2 weeks, where 2 is an `intervalAmount` and `weeks` is an `intervalFrequency`. | +| intervalFrequency | string | The type of the `custom` interval: `hours`, `days`, `weeks`, `months`. | +| intervalAmount | number | `custom` interval amount. | +| workdaysOnly | string | Send the report only on Monday-Friday. Applicable to `hourly` and `daily` types of schedule. | +| timeZone | string | Time zone used to schedule report execution. | +| orientation | string | Can be `portrait` or `landscape`. | +| layout | string | Can be `grid` or `simple`. | +| enableDashboardUrl | bool | Adds a dashboard url to the bottom of the report email. | +| formats | []string | Specified what kind of attachment to generate for the report - `csv`, `pdf`, `image`.
`pdf` is the default one.
`csv` attaches a CSV file for each table panel.
`image` embeds an image of a dashboard into the email's body. | +| dashboards | []object | Dashboards to generate a report for.
See "Report Dashboard Schema" section below. | + +#### Report Dashboard Schema + +| Field name | Data type | Description | +| ------------------------------ | --------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| dashboard.uid | string | Dashboard [UID](../dashboard#identifier-id-vs-unique-identifier-uid). | +| timeRange.from | string | Dashboard time range from. | +| timeRange.to | string | Dashboard time range to. | +| reportVariables. | string | Key-value pairs containing the template variables for this report, in JSON format. If empty, the template variables from the report's dashboard will be used. | + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 35 + +{ + "id": 4, + "message": "Report created" +} +``` + +### Status Codes + +- **200** – OK +- **400** – Bad request (invalid json, missing or invalid fields values, etc.). +- **403** - Forbidden (access denied to a report or a dashboard used in the report). +- **500** - Unexpected error or server misconfiguration. Refer to server logs for more details + +## Update a report + +`PUT /api/reports/:id` + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| ------------- | --------------------------------------------------------- | +| reports:write | reports:\*
reports:id:\*
reports:1(single report) | + +### Example request + +See [JSON body schema]({{< ref "#config-json-body-schema" >}}) for fields description. + +```http +GET /api/reports HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{ + "name": "Updated Report", + "recipients": "example-report@grafana.com", + "replyTo": "", + "message": "Hello, please, find the report attached", + "schedule": { + "frequency": "hourly", + "timeZone": "Africa/Cairo", + "workdaysOnly": true, + "startDate": "2022-10-10T10:00:00+02:00", + "endDate": "2022-11-20T19:00:00+02:00" + }, + "options": { + "orientation": "landscape", + "layout": "grid", + }, + "enableDashboardUrl": true, + "state": "scheduled", + "dashboards": [ + { + "dashboard": { + "id": 463, + "uid": "7MeksYbmk", + "name": "Alerting with TestData" + }, + "timeRange": { + "from": "2022-08-08T15:00:00+02:00", + "to": "2022-09-02T17:00:00+02:00" + }, + "reportVariables": { + "varibale1": "Value1" + } + } + ], + "formats": [ + "pdf", + "csv" + ] +} +``` + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 28 + +{ + "message": "Report updated" +} +``` + +### Status Codes + +- **200** – OK +- **400** – Bad request (invalid json, missing or invalid fields values, etc.). +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **403** – Forbidden (access denied to a report or a dashboard used in the report). +- **404** – Not found (such report does not exist). +- **500** – Unexpected error or server misconfiguration. Refer to server logs for more details. + +## Delete a report + +`DELETE /api/reports/:id` + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| -------------- | --------------------------------------------------------- | +| reports:delete | reports:\*
reports:id:\*
reports:1(single report) | + +### Example request + +```http +GET /api/reports/6 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 39 + +{ + "message": "Report config was removed" +} +``` + +### Status Codes + +- **200** – OK +- **400** – Bad request (invalid report ID). +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **404** - Not found (report with this ID does not exist). +- **500** - Unexpected error or server misconfiguration. Refer to server logs for more details + ## Send a report -> Only available in Grafana Enterprise v7.0+. - -> This API endpoint is experimental and may be deprecated in a future release. On deprecation, a migration strategy will be provided and the endpoint will remain functional until the next major release of Grafana. - `POST /api/reports/email` Generate and send a report. This API waits for the report to be generated before returning. We recommend that you set the client's timeout to at least 60 seconds. @@ -51,13 +451,13 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk } ``` -### JSON Body Schema +#### JSON Body Schema -| Field name | Data type | Description | -| ------------------- | --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -| id | string | ID of the report to send. It is the same as in the URL when editing a report, not to be confused with the ID of the dashboard. Required. | -| emails | string | Comma-separated list of emails to which to send the report to. Overrides the emails from the report. Required if **useEmailsFromReport** is not present. | -| useEmailsFromReport | boolean | Send the report to the emails specified in the report. Required if **emails** is not present. | +| Field name | Data type | Description | +| ------------------- | --------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ | +| id | string | ID of the report to send. It is the same as in the URL when editing a report, not to be confused with the ID of the dashboard. Required. | +| emails | string | Comma-separated list of emails to which to send the report to. Overrides the emails from the report. Required if `useEmailsFromReport` is not present. | +| useEmailsFromReport | boolean | Send the report to the emails specified in the report. Required if `emails` is not present. | ### Example response @@ -71,11 +471,205 @@ Content-Length: 29 ### Status Codes -| Code | Description | -| ---- | ----------------------------------------------------------------------------------- | -| 200 | Report was sent. | -| 400 | Bad request (invalid json, missing content-type, missing or invalid fields, etc.). | -| 401 | Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). | -| 403 | User is authenticated but is not authorized to generate the report. | -| 404 | Report not found. | -| 500 | Unexpected error or server misconfiguration. Refer to server logs for more details. | +- **200** – Report was sent. +- **400** – Bad request (invalid json, missing content-type, missing or invalid fields, etc.). +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **403** - Forbidden (access denied to a report or a dashboard used in the report). +- **404** - Report not found. +- **500** - Unexpected error or server misconfiguration. Refer to server logs for more details. + +## Get reports branding settings + +`GET /api/reports/settings` + +Returns reports branding settings that are global and used across all the reports. + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| --------------------- | ----- | +| reports.settings:read | n/a | + +### Example request + +```http +GET /api/reports/settings HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 181 + +{ + "id": 1, + "userId": 1, + "orgId": 1, + "branding": { + "reportLogoUrl": "", + "emailLogoUrl": "", + "emailFooterMode": "sent-by", + "emailFooterText": "Grafana Labs", + "emailFooterLink": "https://grafana.com/" + } +} +``` + +### Status Codes + +- **200** – OK +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **500** - Unexpected error or server misconfiguration. Refer to server logs for more detail + +## Save reports branding settings + +`POST /api/reports/settings` + +Creates settings if they don't exist, otherwise updates them. These settings are global and used across all the reports. + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| ---------------------- | ----- | +| reports.settings:write | n/a | + +### Example request + +```http +POST /api/reports/settings HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{ + "branding": { + "reportLogoUrl": "https://grafana.com/reportLogo.jpg", + "emailLogoUrl": "https://grafana.com/emailLogo.jpg", + "emailFooterMode": "sent-by", + "emailFooterText": "Grafana Labs", + "emailFooterLink": "https://grafana.com/" + } +} +``` + +#### JSON Body Schema + +| Field name | Data type | Description | +| ------------------------ | --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| branding.reportLogoUrl | string | URL of an image used as a logo on every page of the report. | +| branding.emailLogoUrl | string | URL of an image used as a logo in the email. | +| branding.emailFooterMode | string | Can be `sent-by` or `none`.
`sent-by` adds a "Sent by `branding.emailFooterText`" footer link to the email. Requires specifying values in the `branding.emailFooterText` and `branding.emailFooterLink` fields.
`none` suppresses adding a "Sent by" footer link to the email. | +| branding.emailFooterText | string | Text of a URL added to the email "Sent by" footer. | +| branding.emailFooterLink | string | URL address value added to the email "Sent by" footer. | + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 35 + +{ + "message": "Report settings saved" +} +``` + +### Status Codes + +- **200** – OK +- **400** – Bad request (invalid json, missing or invalid fields values, etc.). +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **500** - Unexpected error or server misconfiguration. Refer to server logs for more detail + +## Send a test email + +`POST /api/reports/test-email` + +Sends a test email with a report without persisting it in the database. + +#### Required permissions + +See note in the [introduction]({{< ref "#reporting-api" >}}) for an explanation. + +| Action | Scope | +| ------------ | ----- | +| reports:send | n/a | + +### Example request + +See [JSON body schema]({{< ref "#config-json-body-schema" >}}) for fields description. + +```http +POST /api/reports/test-email HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{{ + "name": "Report 4", + "recipients": "example-report@grafana.com", + "replyTo": "", + "message": "Hello, please, find the report attached", + "schedule": { + "startDate": "2022-10-02T10:00:00+02:00", + "endDate": "2022-11-02T20:00:00+02:00", + "frequency": "daily", + "intervalFrequency": "", + "intervalAmount": 0, + "workdaysOnly": true, + "timeZone": "Europe/Warsaw" + }, + "options": { + "orientation": "landscape", + "layout": "grid" + }, + "enableDashboardUrl": true, + "dashboards": [ + { + "dashboard": { + "uid": "7MeksYbmk", + }, + "timeRange": { + "from": "2022-08-08T15:00:00+02:00", + "to": "2022-09-02T17:00:00+02:00" + }, + "reportVariables": { + "varibale1": "Value1" + } + } + ], + "formats": [ + "pdf", + "csv" + ] +} +``` + +### Example response + +```http +HTTP/1.1 200 OK +Content-Type: application/json +Content-Length: 29 + +{ + "message": "Test email sent" +} +``` + +### Status Codes + +- **200** – OK +- **400** – Bad request (invalid json, missing or invalid fields values, etc.). +- **401** - Authentication failed, refer to [Authentication API]({{< relref "auth/" >}}). +- **403** - Forbidden (access denied to a report or a dashboard used in the report). +- **500** - Unexpected error or server misconfiguration. Refer to server logs for more details diff --git a/docs/sources/developers/plugins/migration-guide.md b/docs/sources/developers/plugins/migration-guide.md index 8b525640f69..8269d182272 100644 --- a/docs/sources/developers/plugins/migration-guide.md +++ b/docs/sources/developers/plugins/migration-guide.md @@ -17,6 +17,8 @@ This guide helps you identify the steps required to update a plugin from the Gra - [Plugin migration guide](#plugin-migration-guide) - [Introduction](#introduction) - [Table of contents](#table-of-contents) + - [From version 9.1.x to 9.2.x](#from-version-91x-to-92x) + - [NavModelItem requires a valid icon name](#navmodelitem-requires-a-valid-icon-name) - [From version 8.x to 9.x](#from-version-8x-to-9x) - [9.0 breaking changes](#90-breaking-changes) - [theme.visualization.getColorByName replaces getColorForTheme](#themevisualizationgetcolorbyname-replaces-getcolorfortheme) @@ -60,6 +62,32 @@ This guide helps you identify the steps required to update a plugin from the Gra - [Migrate to data frames](#migrate-to-data-frames) - [Troubleshoot plugin migration](#troubleshoot-plugin-migration) +## From version 9.1.x to 9.2.x + +### NavModelItem requires a valid icon name + +The typings of the `NavModelItem` have improved to only allow a valid `IconName` for the icon property. You can find the complete list of valid icons [here](https://github.com/grafana/grafana/blob/v9.2.0-beta1/packages/grafana-data/src/types/icon.ts). The icons specified in the list will work for older versions of Grafana 9. + +Example: + +```ts +// before +const model: NavModelItem = { + id: 'settings', + text: 'Settings', + icon: 'fa fa-cog', + url: `${baseUrl}/settings`, +}; + +// after +const model: NavModelItem = { + id: 'settings', + text: 'Settings', + icon: 'cog', + url: `${baseUrl}/settings`, +}; +``` + ## From version 8.x to 9.x ### 9.0 breaking changes diff --git a/docs/sources/panels/configure-standard-options/index.md b/docs/sources/panels/configure-standard-options/index.md index 587d297362b..ca7da3050dc 100644 --- a/docs/sources/panels/configure-standard-options/index.md +++ b/docs/sources/panels/configure-standard-options/index.md @@ -54,6 +54,7 @@ You can use the unit dropdown to also specify custom units, custom prefix or suf To select a custom unit enter the unit and select the last `Custom: xxx` option in the dropdown. - `suffix:` for custom unit that should go after value. +- `prefix:` for custom unit that should go before value. - `time:` For custom date time formats type for example `time:YYYY-MM-DD`. See [formats](https://momentjs.com/docs/#/displaying/) for the format syntax and options. - `si:` for custom SI units. For example: `si: mF`. This one is a bit more advanced as you can specify both a unit and the source data scale. So if your source data is represented as milli (thousands of) something prefix the unit with that diff --git a/docs/sources/panels/query-a-data-source/use-expressions-to-manipulate-data/about-expressions.md b/docs/sources/panels/query-a-data-source/use-expressions-to-manipulate-data/about-expressions.md index 4e760a47c3f..5b4535fe089 100644 --- a/docs/sources/panels/query-a-data-source/use-expressions-to-manipulate-data/about-expressions.md +++ b/docs/sources/panels/query-a-data-source/use-expressions-to-manipulate-data/about-expressions.md @@ -114,7 +114,7 @@ is_nan takes a number or a series and returns `1` for `NaN` values and `0` for o ##### is_null -is_nan takes a number or a series and returns `1` for `null` values and `0` for other values. For example `is_null($A)`. +is_null takes a number or a series and returns `1` for `null` values and `0` for other values. For example `is_null($A)`. ##### is_number diff --git a/e2e/various-suite/loki-query-builder.spec.ts b/e2e/various-suite/loki-query-builder.spec.ts index 32ee27e0d5e..6557a993c4c 100644 --- a/e2e/various-suite/loki-query-builder.spec.ts +++ b/e2e/various-suite/loki-query-builder.spec.ts @@ -14,6 +14,8 @@ const addDataSource = () => { }); }; +const finalQuery = 'rate({instance=~"instance1|instance2"} | logfmt | __error__=`` [$__interval]'; + describe('Loki query builder', () => { beforeEach(() => { e2e.flows.login('admin', 'admin'); @@ -37,8 +39,6 @@ describe('Loki query builder', () => { req.reply({ status: 'success', data: [{ instance: 'instance1' }] }); }); - const finalQuery = 'rate({instance=~"instance1|instance2"} | logfmt | __error__=`` [$__interval]'; - // Go to Explore and choose Loki data source e2e.pages.Explore.visit(); e2e.components.DataSourcePicker.container().should('be.visible').click(); @@ -72,13 +72,21 @@ describe('Loki query builder', () => { e2e().contains(MISSING_LABEL_FILTER_ERROR_MESSAGE).should('not.exist'); e2e().contains(finalQuery).should('be.visible'); - // Switch to code editor and check if query was parsed - for (const word of finalQuery.split(' ')) { - e2e().contains(word).should('be.visible'); - } + // Toggle raw query + e2e().contains('label', 'Raw query').click(); + e2e().contains('Raw query').should('have.length', 1); - // Switch to explain mode and check if query is visible + // Change to code editor + e2e().contains('label', 'Code').click(); + // We need to test this manually because the final query is split into separate DOM elements using e2e().contains(finalQuery).should('be.visible'); does not detect the query. + e2e().contains('rate').should('be.visible'); + e2e().contains('instance1|instance2').should('be.visible'); + e2e().contains('logfmt').should('be.visible'); + e2e().contains('__error__').should('be.visible'); + e2e().contains('$__interval').should('be.visible'); + + // Checks the explain mode toggle e2e().contains('label', 'Explain').click(); - e2e().contains(finalQuery).should('be.visible'); + e2e().contains('Fetch all log lines matching label filters.').should('be.visible'); }); }); diff --git a/go.mod b/go.mod index 92fd45c35d6..46db1203e84 100644 --- a/go.mod +++ b/go.mod @@ -55,7 +55,7 @@ require ( github.com/gorilla/websocket v1.5.0 github.com/gosimple/slug v1.12.0 github.com/grafana/cuetsy v0.1.1 - github.com/grafana/grafana-aws-sdk v0.10.8 + github.com/grafana/grafana-aws-sdk v0.11.0 github.com/grafana/grafana-azure-sdk-go v1.3.0 github.com/grafana/grafana-plugin-sdk-go v0.139.0 github.com/grafana/thema v0.0.0-20220817114012-ebeee841c104 @@ -106,7 +106,7 @@ require ( go.opentelemetry.io/otel/trace v1.6.3 golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d golang.org/x/exp v0.0.0-20220613132600-b0d781184e0d - golang.org/x/net v0.0.0-20220722155237-a158d28d115b // indirect + golang.org/x/net v0.0.0-20220909164309-bea034e7d591 // indirect golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 golang.org/x/time v0.0.0-20220609170525-579cf78fd858 @@ -231,7 +231,7 @@ require ( go.opencensus.io v0.23.0 // indirect go.uber.org/atomic v1.9.0 go.uber.org/goleak v1.1.12 // indirect - golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect + golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect golang.org/x/text v0.3.7 golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect google.golang.org/appengine v1.6.7 // indirect @@ -254,6 +254,7 @@ require ( github.com/google/go-github/v45 v45.2.0 github.com/grafana/dskit v0.0.0-20211011144203-3a88ec0b675f github.com/jmoiron/sqlx v1.3.5 + github.com/matryer/is v1.4.0 github.com/urfave/cli v1.22.5 go.etcd.io/etcd/api/v3 v3.5.4 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.31.0 diff --git a/go.sum b/go.sum index 272fd7e2903..519b3771fb6 100644 --- a/go.sum +++ b/go.sum @@ -1374,8 +1374,8 @@ github.com/grafana/dskit v0.0.0-20211011144203-3a88ec0b675f h1:FvvSVEbnGeM2bUivG github.com/grafana/dskit v0.0.0-20211011144203-3a88ec0b675f/go.mod h1:uPG2nyK4CtgNDmWv7qyzYcdI+S90kHHRWvHnBtEMBXM= github.com/grafana/go-mssqldb v0.0.0-20210326084033-d0ce3c521036 h1:GplhUk6Xes5JIhUUrggPcPBhOn+eT8+WsHiebvq7GgA= github.com/grafana/go-mssqldb v0.0.0-20210326084033-d0ce3c521036/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= -github.com/grafana/grafana-aws-sdk v0.10.8 h1:6MGlWlQD4E0aI+Vp4Cfgzsj9V3U7kSQ1wCye9D1NMoU= -github.com/grafana/grafana-aws-sdk v0.10.8/go.mod h1:5Iw3xY7iXJfNaYHrRHMXa/kaB2lWoyntg71PPLGvSs8= +github.com/grafana/grafana-aws-sdk v0.11.0 h1:ncPD/UN0wNcKq3kEU90RdvrnK/6R4VW2Lo5dPcGk9t0= +github.com/grafana/grafana-aws-sdk v0.11.0/go.mod h1:5Iw3xY7iXJfNaYHrRHMXa/kaB2lWoyntg71PPLGvSs8= github.com/grafana/grafana-azure-sdk-go v1.3.0 h1:zboQpq/ljBjqHo/6UQNZAUwqGTtnEGRYSEnqIQvLuAo= github.com/grafana/grafana-azure-sdk-go v1.3.0/go.mod h1:rgrnK9m6CgKlgx4rH3FFP/6dTdyRO6LYC2mVZov35yo= github.com/grafana/grafana-google-sdk-go v0.0.0-20211104130251-b190293eaf58 h1:2ud7NNM7LrGPO4x0NFR8qLq68CqI4SmB7I2yRN2w9oE= @@ -2881,8 +2881,8 @@ golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220401154927-543a649e0bdd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220418201149-a630d4f3e7a2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220909164309-bea034e7d591 h1:D0B/7al0LLrVC8aWF4+oxpv/m8bc7ViFfVS8/gXGdqI= +golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -3097,8 +3097,8 @@ golang.org/x/sys v0.0.0-20220330033206-e17cdc41300f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg= +golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= diff --git a/package.json b/package.json index 2df90d27fa5..f9e8e785985 100644 --- a/package.json +++ b/package.json @@ -173,6 +173,7 @@ "babel-loader": "8.2.5", "babel-plugin-angularjs-annotate": "0.10.0", "babel-plugin-macros": "3.1.0", + "blob-polyfill": "7.0.20220408", "copy-webpack-plugin": "9.0.1", "css-loader": "6.7.1", "css-minimizer-webpack-plugin": "4.1.0", @@ -257,7 +258,7 @@ "@grafana/e2e-selectors": "workspace:*", "@grafana/experimental": "^0.0.2-canary.36", "@grafana/google-sdk": "0.0.3", - "@grafana/lezer-logql": "0.1.0", + "@grafana/lezer-logql": "0.1.1", "@grafana/monaco-logql": "^0.0.6", "@grafana/runtime": "workspace:*", "@grafana/schema": "workspace:*", @@ -354,7 +355,7 @@ "rc-drawer": "4.4.3", "rc-slider": "9.7.5", "rc-time-picker": "3.7.3", - "rc-tree": "5.6.6", + "rc-tree": "5.7.0", "re-resizable": "6.9.9", "react": "17.0.2", "react-awesome-query-builder": "5.3.1", @@ -408,7 +409,7 @@ "resolutions": { "underscore": "1.13.4", "@types/slate": "0.47.9", - "@rushstack/node-core-library": "3.52.0", + "@rushstack/node-core-library": "3.53.0", "@rushstack/rig-package": "0.3.13", "@rushstack/ts-command-line": "4.12.1", "@storybook/react/webpack": "5.74.0", diff --git a/packages/grafana-data/package.json b/packages/grafana-data/package.json index e2d293a0fde..4e197b6fe70 100644 --- a/packages/grafana-data/package.json +++ b/packages/grafana-data/package.json @@ -23,7 +23,8 @@ }, "files": [ "dist", - "CHANGELOG.md", + "./README.md", + "./CHANGELOG.md", "LICENSE_APACHE2" ], "scripts": { diff --git a/packages/grafana-data/src/utils/logs.ts b/packages/grafana-data/src/utils/logs.ts index 35bd1430d7e..93993a0178e 100644 --- a/packages/grafana-data/src/utils/logs.ts +++ b/packages/grafana-data/src/utils/logs.ts @@ -16,6 +16,7 @@ const LOGFMT_REGEXP = /(?:^|\s)([\w\(\)\[\]\{\}]+)=(""|(?:".*?[^\\]"|[^"\s]\S*)) * * Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn` */ +/** @deprecated will be removed in the next major version */ export function getLogLevel(line: string): LogLevel { if (!line) { return LogLevel.unknown; @@ -37,6 +38,7 @@ export function getLogLevel(line: string): LogLevel { return level; } +/** @deprecated will be removed in the next major version */ export function getLogLevelFromKey(key: string | number): LogLevel { const level = (LogLevel as any)[key.toString().toLowerCase()]; if (level) { @@ -46,6 +48,7 @@ export function getLogLevelFromKey(key: string | number): LogLevel { return LogLevel.unknown; } +/** @deprecated will be removed in the next major version */ export function addLogLevelToSeries(series: DataFrame, lineIndex: number): DataFrame { const levels = new ArrayVector(); const lines = series.fields[lineIndex]; @@ -68,6 +71,7 @@ export function addLogLevelToSeries(series: DataFrame, lineIndex: number): DataF }; } +/** @deprecated will be removed in the next major version */ export const LogsParsers: { [name: string]: LogsParser } = { JSON: { buildMatcher: (label) => new RegExp(`(?:{|,)\\s*"${label}"\\s*:\\s*"?([\\d\\.]+|[^"]*)"?`), @@ -109,6 +113,7 @@ export const LogsParsers: { [name: string]: LogsParser } = { }, }; +/** @deprecated will be removed in the next major version */ export function calculateFieldStats(rows: LogRowModel[], extractor: RegExp): LogLabelStatsModel[] { // Consider only rows that satisfy the matcher const rowsWithField = rows.filter((row) => extractor.test(row.entry)); @@ -124,6 +129,7 @@ export function calculateFieldStats(rows: LogRowModel[], extractor: RegExp): Log return getSortedCounts(countsByValue, rowCount); } +/** @deprecated will be removed in the next major version */ export function calculateLogsLabelStats(rows: LogRowModel[], label: string): LogLabelStatsModel[] { // Consider only rows that have the given label const rowsWithLabel = rows.filter((row) => row.labels[label] !== undefined); @@ -134,6 +140,7 @@ export function calculateLogsLabelStats(rows: LogRowModel[], label: string): Log return getSortedCounts(countsByValue, rowCount); } +/** @deprecated will be removed in the next major version */ export function calculateStats(values: unknown[]): LogLabelStatsModel[] { const nonEmptyValues = values.filter((value) => value !== undefined && value !== null); const countsByValue = countBy(nonEmptyValues); @@ -148,6 +155,7 @@ const getSortedCounts = (countsByValue: { [value: string]: number }, rowCount: n .value(); }; +/** @deprecated will be removed in the next major version */ export function getParser(line: string): LogsParser | undefined { let parser; try { @@ -163,6 +171,7 @@ export function getParser(line: string): LogsParser | undefined { return parser; } +/** @deprecated will be removed in the next major version */ export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => { // compare milliseconds if (a.timeEpochMs < b.timeEpochMs) { @@ -185,6 +194,7 @@ export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => { return 0; }; +/** @deprecated will be removed in the next major version */ export const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => { // compare milliseconds if (a.timeEpochMs > b.timeEpochMs) { @@ -207,15 +217,18 @@ export const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => { return 0; }; +/** @deprecated will be removed in the next major version */ export const sortLogsResult = (logsResult: LogsModel | null, sortOrder: LogsSortOrder): LogsModel => { const rows = logsResult ? sortLogRows(logsResult.rows, sortOrder) : []; return logsResult ? { ...logsResult, rows } : { hasUniqueLabels: false, rows }; }; +/** @deprecated will be removed in the next major version */ export const sortLogRows = (logRows: LogRowModel[], sortOrder: LogsSortOrder) => sortOrder === LogsSortOrder.Ascending ? logRows.sort(sortInAscendingOrder) : logRows.sort(sortInDescendingOrder); // Currently supports only error condition in Loki logs +/** @deprecated will be removed in the next major version */ export const checkLogsError = (logRow: LogRowModel): { hasError: boolean; errorMessage?: string } => { if (logRow.labels.__error__) { return { @@ -228,5 +241,6 @@ export const checkLogsError = (logRow: LogRowModel): { hasError: boolean; errorM }; }; +/** @deprecated will be removed in the next major version */ export const escapeUnescapedString = (string: string) => string.replace(/\\r\\n|\\n|\\t|\\r/g, (match: string) => (match.slice(1) === 't' ? '\t' : '\n')); diff --git a/packages/grafana-e2e-selectors/package.json b/packages/grafana-e2e-selectors/package.json index 9c71a7bbc92..45b1de30667 100644 --- a/packages/grafana-e2e-selectors/package.json +++ b/packages/grafana-e2e-selectors/package.json @@ -26,7 +26,8 @@ }, "files": [ "dist", - "CHANGELOG.md", + "./README.md", + "./CHANGELOG.md", "LICENSE_APACHE2" ], "scripts": { diff --git a/packages/grafana-e2e/package.json b/packages/grafana-e2e/package.json index e94f37a8481..bdbfa91a4e4 100644 --- a/packages/grafana-e2e/package.json +++ b/packages/grafana-e2e/package.json @@ -30,7 +30,8 @@ "dist", "cli.js", "cypress.json", - "CHANGELOG.md", + "./README.md", + "./CHANGELOG.md", "LICENSE_APACHE2" ], "scripts": { diff --git a/packages/grafana-runtime/package.json b/packages/grafana-runtime/package.json index ba5bee319c2..73f557c6965 100644 --- a/packages/grafana-runtime/package.json +++ b/packages/grafana-runtime/package.json @@ -24,7 +24,8 @@ }, "files": [ "dist", - "CHANGELOG.md", + "./README.md", + "./CHANGELOG.md", "LICENSE_APACHE2" ], "scripts": { diff --git a/packages/grafana-schema/package.json b/packages/grafana-schema/package.json index 66c9ac3206f..fe87ef1f45c 100644 --- a/packages/grafana-schema/package.json +++ b/packages/grafana-schema/package.json @@ -23,7 +23,8 @@ }, "files": [ "dist", - "CHANGELOG.md", + "./README.md", + "./CHANGELOG.md", "LICENSE_APACHE2" ], "scripts": { diff --git a/packages/grafana-schema/src/index.gen.ts b/packages/grafana-schema/src/index.gen.ts index a34df96996a..4ec9a38b62f 100644 --- a/packages/grafana-schema/src/index.gen.ts +++ b/packages/grafana-schema/src/index.gen.ts @@ -74,3 +74,9 @@ export { defaultFieldConfigSource, defaultFieldConfig } from './veneer/dashboard.types'; + +// Raw generated types from playlist entity type. +export type { Playlist } from './raw/playlist/x/playlist.gen'; + +// Raw generated default consts from playlist entity type. +export { defaultPlaylist } from './raw/playlist/x/playlist.gen'; diff --git a/packages/grafana-schema/src/raw/playlist/x/playlist.gen.ts b/packages/grafana-schema/src/raw/playlist/x/playlist.gen.ts new file mode 100644 index 00000000000..bfd1c192b1b --- /dev/null +++ b/packages/grafana-schema/src/raw/playlist/x/playlist.gen.ts @@ -0,0 +1,69 @@ +// This file is autogenerated. DO NOT EDIT. +// +// Generated by pkg/framework/coremodel/gen.go +// +// Derived from the Thema lineage declared in pkg/coremodel/playlist/coremodel.cue +// +// Run `make gen-cue` from repository root to regenerate. + +export interface Playlist { + /** + * Unique playlist identifier for internal use, set by Grafana. + */ + id: number; + /** + * Interval sets the time between switching views in a playlist. + * FIXME: Is this based on a standardized format or what options are available? Can datemath be used? + */ + interval: string; + /** + * The ordered list of items that the playlist will iterate over. + */ + items?: Array<{ + /** + * FIXME: The prefixDropper removes playlist from playlist_id, that doesn't work for us since it'll mean we'll have Id twice. + * ID of the playlist item for internal use by Grafana. Deprecated. + */ + id: number; + /** + * PlaylistID for the playlist containing the item. Deprecated. + */ + playlistid: number; + /** + * Type of the item. + */ + type: ('dashboard_by_uid' | 'dashboard_by_id' | 'dashboard_by_tag'); + /** + * Value depends on type and describes the playlist item. + * + * - dashboard_by_id: The value is an internal numerical identifier set by Grafana. This + * is not portable as the numerical identifier is non-deterministic between different instances. + * Will be replaced by dashboard_by_uid in the future. + * - dashboard_by_tag: The value is a tag which is set on any number of dashboards. All + * dashboards behind the tag will be added to the playlist. + */ + value: string; + /** + * Title is the human-readable identifier for the playlist item. + */ + title: string; + /** + * Order is the position in the list for the item. Deprecated. + */ + order: number; + }>; + /** + * Name of the playlist. + */ + name: string; + /** + * Unique playlist identifier. Generated on creation, either by the + * creator of the playlist of by the application. + */ + uid: string; +} + +export const defaultPlaylist: Partial = { + interval: '5m', + items: [], +}; diff --git a/packages/grafana-toolkit/package.json b/packages/grafana-toolkit/package.json index f75a0e503a7..b67d92b072b 100644 --- a/packages/grafana-toolkit/package.json +++ b/packages/grafana-toolkit/package.json @@ -19,25 +19,26 @@ "grafana-toolkit": "./bin/grafana-toolkit.js" }, "publishConfig": { - "bin": { - "grafana-toolkit": "./dist/bin/grafana-toolkit.js" - }, "access": "public" }, "files": [ - "dist", - "README.md", - "CHANGELOG.md" + "config", + "src", + "sass", + "./README.md", + "./CHANGELOG.md", + "LICENSE_APACHE2" ], "scripts": { "build": "grafana-toolkit toolkit:build", - "clean": "rimraf ./dist ./compiled ./package.tgz", - "precommit": "npm run lint & npm run typecheck", + "clean": "rimraf ./dist ./compiled ./sass ./package.tgz", + "prepack": "mv ./src ./src_bak && cp -r ./dist/src ./src", + "postpack": "rimraf ./src && mv ./src_bak ./src", "typecheck": "tsc --noEmit" }, "main": "src/index.ts", "dependencies": { - "@babel/core": "^7.18.9", + "@babel/core": "7.18.9", "@babel/plugin-proposal-class-properties": "7.18.6", "@babel/plugin-proposal-nullish-coalescing-operator": "7.18.6", "@babel/plugin-proposal-object-rest-spread": "7.18.9", @@ -46,11 +47,11 @@ "@babel/plugin-transform-react-constant-elements": "7.18.9", "@babel/plugin-transform-runtime": "7.18.10", "@babel/plugin-transform-typescript": "7.19.0", - "@babel/preset-env": "^7.18.9", - "@babel/preset-react": "^7.18.6", - "@babel/preset-typescript": "^7.18.6", + "@babel/preset-env": "7.18.9", + "@babel/preset-react": "7.18.6", + "@babel/preset-typescript": "7.18.6", "@grafana/data": "9.3.0-pre", - "@grafana/eslint-config": "^4.0.0", + "@grafana/eslint-config": "5.0.0", "@grafana/tsconfig": "^1.2.0-rc1", "@grafana/ui": "9.3.0-pre", "@jest/core": "27.5.1", diff --git a/packages/grafana-toolkit/src/cli/tasks/toolkit.build.ts b/packages/grafana-toolkit/src/cli/tasks/toolkit.build.ts index ae80a8a6645..2711425b2a4 100644 --- a/packages/grafana-toolkit/src/cli/tasks/toolkit.build.ts +++ b/packages/grafana-toolkit/src/cli/tasks/toolkit.build.ts @@ -24,8 +24,6 @@ const compile = () => const copyFiles = () => { const files = [ - 'config/circleci/config.yml', - 'bin/grafana-toolkit.js', 'src/config/prettier.plugin.config.json', 'src/config/prettier.plugin.rc.js', 'src/config/tsconfig.plugin.json', @@ -60,12 +58,16 @@ const copyFiles = () => { const copySassFiles = () => { const files = ['_variables.generated.scss', '_variables.dark.generated.scss', '_variables.light.generated.scss']; + const exportDir = `${cwd}/sass`; return useSpinner(`Copy scss files ${files.join(', ')} files`, async () => { const sassDir = path.resolve(cwd, '../../public/sass/'); + if (!fs.existsSync(exportDir)) { + fs.mkdirSync(exportDir); + } const promises = files.map((file) => { return new Promise((resolve, reject) => { const name = file.replace('.generated', ''); - fs.copyFile(`${sassDir}/${file}`, `${distDir}/sass/${name}`, (err) => { + fs.copyFile(`${sassDir}/${file}`, `${exportDir}/${name}`, (err) => { if (err) { reject(err); return; @@ -89,8 +91,6 @@ const toolkitBuildTaskRunner: TaskRunner = async () => { await clean(); await compile(); - fs.mkdirSync('./dist/bin'); - fs.mkdirSync('./dist/sass'); await copyFiles(); await copySassFiles(); }; diff --git a/packages/grafana-ui/package.json b/packages/grafana-ui/package.json index a79f602719d..29dd855df5a 100644 --- a/packages/grafana-ui/package.json +++ b/packages/grafana-ui/package.json @@ -26,7 +26,8 @@ }, "files": [ "dist", - "CHANGELOG.md", + "./README.md", + "./CHANGELOG.md", "LICENSE_APACHE2" ], "scripts": { @@ -79,7 +80,7 @@ "rc-slider": "9.7.5", "rc-time-picker": "^3.7.3", "react-beautiful-dnd": "13.1.0", - "react-calendar": "3.7.0", + "react-calendar": "3.9.0", "react-colorful": "5.5.1", "react-custom-scrollbars-2": "4.5.0", "react-dropzone": "14.2.2", diff --git a/packages/grafana-ui/src/components/ConfirmModal/ConfirmModal.tsx b/packages/grafana-ui/src/components/ConfirmModal/ConfirmModal.tsx index 8139228a81c..0251fbcfd0c 100644 --- a/packages/grafana-ui/src/components/ConfirmModal/ConfirmModal.tsx +++ b/packages/grafana-ui/src/components/ConfirmModal/ConfirmModal.tsx @@ -1,4 +1,4 @@ -import { css } from '@emotion/css'; +import { css, cx } from '@emotion/css'; import React, { useEffect, useRef, useState } from 'react'; import { GrafanaTheme2 } from '@grafana/data'; @@ -25,6 +25,8 @@ export interface ConfirmModalProps { dismissText?: string; /** Icon for the modal header */ icon?: IconName; + /** Additional styling for modal container */ + modalClass?: string; /** Text user needs to fill in before confirming */ confirmationText?: string; /** Text for alternative button */ @@ -46,6 +48,7 @@ export const ConfirmModal = ({ confirmationText, dismissText = 'Cancel', alternativeText, + modalClass, icon = 'exclamation-triangle', onConfirm, onDismiss, @@ -66,7 +69,7 @@ export const ConfirmModal = ({ }, [isOpen]); return ( - +
{body} {description ?
{description}
: null} diff --git a/packages/grafana-ui/src/components/DataLinks/DataLinksContextMenu.tsx b/packages/grafana-ui/src/components/DataLinks/DataLinksContextMenu.tsx index 1bb1236eee8..2823c7c77c2 100644 --- a/packages/grafana-ui/src/components/DataLinks/DataLinksContextMenu.tsx +++ b/packages/grafana-ui/src/components/DataLinks/DataLinksContextMenu.tsx @@ -9,7 +9,7 @@ import { WithContextMenu } from '../ContextMenu/WithContextMenu'; import { MenuGroup, MenuItemsGroup } from '../Menu/MenuGroup'; import { MenuItem } from '../Menu/MenuItem'; -interface DataLinksContextMenuProps { +export interface DataLinksContextMenuProps { children: (props: DataLinksContextMenuApi) => JSX.Element; links: () => LinkModel[]; style?: CSSProperties; diff --git a/packages/grafana-ui/src/components/DateTimePickers/RelativeTimeRangePicker/RelativeTimeRangePicker.tsx b/packages/grafana-ui/src/components/DateTimePickers/RelativeTimeRangePicker/RelativeTimeRangePicker.tsx index 37bfb64fbcf..37af429b70d 100644 --- a/packages/grafana-ui/src/components/DateTimePickers/RelativeTimeRangePicker/RelativeTimeRangePicker.tsx +++ b/packages/grafana-ui/src/components/DateTimePickers/RelativeTimeRangePicker/RelativeTimeRangePicker.tsx @@ -63,7 +63,7 @@ export function RelativeTimeRangePicker(props: RelativeTimeRangePickerProps) { }; const onOpen = useCallback( - (event: FormEvent) => { + (event: FormEvent) => { event.stopPropagation(); event.preventDefault(); setIsOpen(!isOpen); @@ -94,7 +94,7 @@ export function RelativeTimeRangePicker(props: RelativeTimeRangePickerProps) { return (
-
+
+ {isOpen && (
diff --git a/packages/grafana-ui/src/components/DateTimePickers/TimeRangeInput.tsx b/packages/grafana-ui/src/components/DateTimePickers/TimeRangeInput.tsx index ec327943755..54e9125ff28 100644 --- a/packages/grafana-ui/src/components/DateTimePickers/TimeRangeInput.tsx +++ b/packages/grafana-ui/src/components/DateTimePickers/TimeRangeInput.tsx @@ -51,7 +51,7 @@ export const TimeRangeInput: FC = ({ const theme = useTheme2(); const styles = getStyles(theme, disabled); - const onOpen = (event: FormEvent) => { + const onOpen = (event: FormEvent) => { event.stopPropagation(); event.preventDefault(); if (disabled) { @@ -78,12 +78,7 @@ export const TimeRangeInput: FC = ({ return (
-
+
+ {isOpen && ( { .drawer-open .drawer-content-wrapper { box-shadow: ${theme.shadows.z3}; } + z-index: ${theme.zIndex.dropdown}; + + ${theme.breakpoints.down('sm')} { + .drawer-content-wrapper { + width: 100% !important; + } + } `, header: css` background-color: ${theme.colors.background.canvas}; diff --git a/packages/grafana-ui/src/components/PageLayout/PageToolbar.tsx b/packages/grafana-ui/src/components/PageLayout/PageToolbar.tsx index 38e6b93e340..8b65ddc728e 100644 --- a/packages/grafana-ui/src/components/PageLayout/PageToolbar.tsx +++ b/packages/grafana-ui/src/components/PageLayout/PageToolbar.tsx @@ -168,7 +168,7 @@ const getStyles = (theme: GrafanaTheme2) => { `, pageIcon: css` display: none; - ${theme.breakpoints.up('md')} { + ${theme.breakpoints.up('sm')} { display: flex; padding-right: ${theme.spacing(1)}; align-items: center; diff --git a/packages/grafana-ui/src/components/VizLayout/VizLayout.tsx b/packages/grafana-ui/src/components/VizLayout/VizLayout.tsx index bae52e9001f..55faf5b1f75 100644 --- a/packages/grafana-ui/src/components/VizLayout/VizLayout.tsx +++ b/packages/grafana-ui/src/components/VizLayout/VizLayout.tsx @@ -40,9 +40,13 @@ export const VizLayout: VizLayoutComponentType = ({ width, height, legend, child if (!legend) { return ( -
- {children(width, height)} -
+ <> + {/* tabIndex={0} is needed for keyboard accessibility in the plot area */} + {/* eslint-disable-next-line jsx-a11y/no-noninteractive-tabindex */} +
+ {children(width, height)} +
+ ); } @@ -88,6 +92,8 @@ export const VizLayout: VizLayoutComponentType = ({ width, height, legend, child return (
+ {/* tabIndex={0} is needed for keyboard accessibility in the plot area */} + {/* eslint-disable-next-line jsx-a11y/no-noninteractive-tabindex */}
{size && children(size.width, size.height)}
diff --git a/packages/grafana-ui/src/components/index.ts b/packages/grafana-ui/src/components/index.ts index 5e5f3db4781..5db63c342c7 100644 --- a/packages/grafana-ui/src/components/index.ts +++ b/packages/grafana-ui/src/components/index.ts @@ -156,7 +156,11 @@ export { MenuItem, type MenuItemProps } from './Menu/MenuItem'; export { WithContextMenu } from './ContextMenu/WithContextMenu'; export { DataLinksInlineEditor } from './DataLinks/DataLinksInlineEditor/DataLinksInlineEditor'; export { DataLinkInput } from './DataLinks/DataLinkInput'; -export { DataLinksContextMenu } from './DataLinks/DataLinksContextMenu'; +export { + DataLinksContextMenu, + type DataLinksContextMenuProps, + type DataLinksContextMenuApi, +} from './DataLinks/DataLinksContextMenu'; export { SeriesIcon } from './VizLegend/SeriesIcon'; export { InfoBox } from './InfoBox/InfoBox'; export { FeatureBadge, FeatureInfoBox } from './InfoBox/FeatureInfoBox'; diff --git a/pkg/api/playlist.go b/pkg/api/playlist.go index 281f93af860..3909929a7ca 100644 --- a/pkg/api/playlist.go +++ b/pkg/api/playlist.go @@ -6,6 +6,7 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/api/response" + cmplaylist "github.com/grafana/grafana/pkg/coremodel/playlist" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/playlist" "github.com/grafana/grafana/pkg/web" @@ -83,13 +84,13 @@ func (hs *HTTPServer) GetPlaylist(c *models.ReqContext) response.Response { playlistDTOs, _ := hs.LoadPlaylistItemDTOs(c.Req.Context(), uid, c.OrgID) dto := &playlist.PlaylistDTO{ - Id: p.Id, - UID: p.UID, - Name: p.Name, - Interval: p.Interval, - OrgId: p.OrgId, - Items: playlistDTOs, + OrgId: p.OrgId, } + dto.Id = p.Id + dto.Uid = p.UID + dto.Name = p.Name + dto.Interval = p.Interval + dto.Items = &playlistDTOs return response.JSON(http.StatusOK, dto) } @@ -106,8 +107,8 @@ func (hs *HTTPServer) LoadPlaylistItemDTOs(ctx context.Context, uid string, orgI for _, item := range playlistitems { playlistDTOs = append(playlistDTOs, playlist.PlaylistItemDTO{ Id: item.Id, - PlaylistId: item.PlaylistId, - Type: item.Type, + Playlistid: item.PlaylistId, + Type: cmplaylist.PlaylistItemType(item.Type), Value: item.Value, Order: item.Order, Title: item.Title, @@ -244,7 +245,7 @@ func (hs *HTTPServer) UpdatePlaylist(c *models.ReqContext) response.Response { return response.Error(500, "Failed to save playlist", err) } - p.Items = playlistDTOs + p.Items = &playlistDTOs return response.JSON(http.StatusOK, p) } diff --git a/pkg/build/cmd/artifacts.go b/pkg/build/cmd/artifacts.go new file mode 100644 index 00000000000..53807878518 --- /dev/null +++ b/pkg/build/cmd/artifacts.go @@ -0,0 +1,22 @@ +package main + +import ( + "github.com/grafana/grafana/pkg/build/config" +) + +const ReleaseFolder = "release" +const EnterpriseSfx = "-enterprise" +const CacheSettings = "Cache-Control:public, max-age=" + +type PublishConfig struct { + config.Config + + Edition config.Edition + ReleaseMode config.ReleaseMode + GrafanaAPIKey string + WhatsNewURL string + ReleaseNotesURL string + DryRun bool + TTL string + SimulateRelease bool +} diff --git a/pkg/build/cmd/deb.go b/pkg/build/cmd/deb.go new file mode 100644 index 00000000000..8ec651a91ec --- /dev/null +++ b/pkg/build/cmd/deb.go @@ -0,0 +1,243 @@ +package main + +import ( + "fmt" + "log" + "os" + "os/exec" + "path" + "path/filepath" + "strings" + + "github.com/grafana/grafana/pkg/build/config" + "github.com/grafana/grafana/pkg/build/packaging" + "github.com/grafana/grafana/pkg/infra/fs" + "github.com/urfave/cli/v2" +) + +func writeAptlyConf(dbDir, repoDir string) error { + aptlyConf := fmt.Sprintf(`{ + "rootDir": "%s", + "downloadConcurrency": 4, + "downloadSpeedLimit": 0, + "architectures": [], + "dependencyFollowSuggests": false, + "dependencyFollowRecommends": false, + "dependencyFollowAllVariants": false, + "dependencyFollowSource": false, + "dependencyVerboseResolve": false, + "gpgDisableSign": false, + "gpgDisableVerify": false, + "gpgProvider": "gpg2", + "downloadSourcePackages": false, + "skipLegacyPool": true, + "ppaDistributorID": "ubuntu", + "ppaCodename": "", + "skipContentsPublishing": false, + "FileSystemPublishEndpoints": { + "repo": { + "rootDir": "%s", + "linkMethod": "copy" + } + }, + "S3PublishEndpoints": {}, + "SwiftPublishEndpoints": {} +} +`, dbDir, repoDir) + home, err := os.UserHomeDir() + if err != nil { + return err + } + return os.WriteFile(filepath.Join(home, ".aptly.conf"), []byte(aptlyConf), 0600) +} + +// downloadDebs downloads Deb packages. +func downloadDebs(cfg PublishConfig, workDir string) error { + if cfg.Bucket == "" { + panic("cfg.Bucket has to be set") + } + if !strings.HasSuffix(workDir, string(filepath.Separator)) { + workDir += string(filepath.Separator) + } + + var version string + if cfg.ReleaseMode.Mode == config.TagMode { + if cfg.ReleaseMode.IsBeta { + version = strings.ReplaceAll(cfg.Version, "-", "~") + } else { + version = cfg.Version + } + } + if version == "" { + panic(fmt.Sprintf("Unrecognized version mode %s", cfg.ReleaseMode.Mode)) + } + + var sfx string + switch cfg.Edition { + case config.EditionOSS: + case config.EditionEnterprise: + sfx = EnterpriseSfx + default: + return fmt.Errorf("unrecognized edition %q", cfg.Edition) + } + + u := fmt.Sprintf("gs://%s/%s/%s/grafana%s_%s_*.deb*", cfg.Bucket, + strings.ToLower(string(cfg.Edition)), ReleaseFolder, sfx, version) + log.Printf("Downloading Deb packages %q...\n", u) + args := []string{ + "-m", + "cp", + u, + workDir, + } + //nolint:gosec + cmd := exec.Command("gsutil", args...) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to download Deb packages %q: %w\n%s", u, err, output) + } + + return nil +} + +// updateDebRepo updates the Debian repository with the new release. +func updateDebRepo(cfg PublishConfig, workDir string) error { + if cfg.ReleaseMode.Mode != config.TagMode { + panic(fmt.Sprintf("Unsupported version mode: %s", cfg.ReleaseMode.Mode)) + } + + if cfg.ReleaseMode.IsTest { + if cfg.Config.DebDBBucket == packaging.DefaultDebDBBucket { + return fmt.Errorf("in test-release mode, the default Deb DB bucket shouldn't be used") + } + if cfg.Config.DebRepoBucket == packaging.DefaultDebRepoBucket { + return fmt.Errorf("in test-release mode, the default Deb repo bucket shouldn't be used") + } + } + + if err := downloadDebs(cfg, workDir); err != nil { + return err + } + + repoName := "grafana" + if cfg.ReleaseMode.IsBeta { + repoName = "beta" + } + + repoRoot := path.Join(os.TempDir(), "deb-repo") + defer func() { + if err := os.RemoveAll(repoRoot); err != nil { + log.Printf("Failed to remove temporary directory %q: %s\n", repoRoot, err.Error()) + } + }() + + dbDir := filepath.Join(repoRoot, "db") + repoDir := filepath.Join(repoRoot, "repo") + tmpDir := filepath.Join(repoRoot, "tmp") + for _, dpath := range []string{dbDir, repoDir, tmpDir} { + if err := os.MkdirAll(dpath, 0750); err != nil { + return err + } + } + + if err := writeAptlyConf(dbDir, repoDir); err != nil { + return err + } + + // Download the Debian repo database + u := fmt.Sprintf("gs://%s/%s", cfg.DebDBBucket, strings.ToLower(string(cfg.Edition))) + log.Printf("Downloading Debian repo database from %s...\n", u) + //nolint:gosec + cmd := exec.Command("gsutil", "-m", "rsync", "-r", "-d", u, dbDir) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to download Debian repo database: %w\n%s", err, output) + } + + if err := addPkgsToRepo(cfg, workDir, tmpDir, repoName); err != nil { + return err + } + + log.Println("Updating local Debian package repository...") + // Update published local repository. This assumes that there exists already a local, published repo. + for _, tp := range []string{"stable", "beta"} { + passArg := fmt.Sprintf("-passphrase-file=%s", cfg.GPGPassPath) + //nolint:gosec + cmd := exec.Command("aptly", "publish", "update", "-batch", passArg, "-force-overwrite", tp, + "filesystem:repo:grafana") + if output, err := cmd.CombinedOutput(); err != nil { + return cli.NewExitError(fmt.Sprintf("failed to update Debian %q repository: %s", tp, output), 1) + } + } + + // Update database in GCS + u = fmt.Sprintf("gs://%s/%s", cfg.DebDBBucket, strings.ToLower(string(cfg.Edition))) + if cfg.DryRun { + log.Printf("Simulating upload of Debian repo database to GCS (%s)\n", u) + } else { + log.Printf("Uploading Debian repo database to GCS (%s)...\n", u) + //nolint:gosec + cmd = exec.Command("gsutil", "-m", "rsync", "-r", "-d", dbDir, u) + if output, err := cmd.CombinedOutput(); err != nil { + return cli.NewExitError(fmt.Sprintf("failed to upload Debian repo database to GCS: %s", output), 1) + } + } + + // Update metadata and binaries in repository bucket + u = fmt.Sprintf("gs://%s/%s/deb", cfg.DebRepoBucket, strings.ToLower(string(cfg.Edition))) + grafDir := filepath.Join(repoDir, "grafana") + if cfg.DryRun { + log.Printf("Simulating upload of Debian repo resources to GCS (%s)\n", u) + } else { + log.Printf("Uploading Debian repo resources to GCS (%s)...\n", u) + //nolint:gosec + cmd = exec.Command("gsutil", "-m", "rsync", "-r", "-d", grafDir, u) + if output, err := cmd.CombinedOutput(); err != nil { + return cli.NewExitError(fmt.Sprintf("failed to upload Debian repo resources to GCS: %s", output), 1) + } + allRepoResources := fmt.Sprintf("%s/**/*", u) + log.Printf("Setting cache ttl for Debian repo resources on GCS (%s)...\n", allRepoResources) + //nolint:gosec + cmd = exec.Command("gsutil", "-m", "setmeta", "-h", CacheSettings+cfg.TTL, allRepoResources) + if output, err := cmd.CombinedOutput(); err != nil { + return cli.NewExitError(fmt.Sprintf("failed to set cache ttl for Debian repo resources on GCS: %s", output), 1) + } + } + + return nil +} + +func addPkgsToRepo(cfg PublishConfig, workDir, tmpDir, repoName string) error { + var sfx string + switch cfg.Edition { + case config.EditionOSS: + case config.EditionEnterprise: + sfx = EnterpriseSfx + default: + return fmt.Errorf("unsupported edition %q", cfg.Edition) + } + + log.Printf("Adding packages to Debian %q repo...\n", repoName) + // TODO: Be more specific about filename pattern + debs, err := filepath.Glob(filepath.Join(workDir, fmt.Sprintf("grafana%s*.deb", sfx))) + if err != nil { + return err + } + for _, deb := range debs { + basename := filepath.Base(deb) + if strings.Contains(basename, "latest") { + continue + } + + tgt := filepath.Join(tmpDir, basename) + if err := fs.CopyFile(deb, tgt); err != nil { + return err + } + } + // XXX: Adds too many packages in enterprise (Arve: What does this mean exactly?) + //nolint:gosec + cmd := exec.Command("aptly", "repo", "add", "-force-replace", repoName, tmpDir) + if output, err := cmd.CombinedOutput(); err != nil { + return cli.NewExitError(fmt.Sprintf("failed to add packages to local Debian repository: %s", output), 1) + } + + return nil +} diff --git a/pkg/build/cmd/flags.go b/pkg/build/cmd/flags.go index 3ae24027d76..7aa6d98d026 100644 --- a/pkg/build/cmd/flags.go +++ b/pkg/build/cmd/flags.go @@ -37,4 +37,13 @@ var ( Name: "sign", Usage: "Enable plug-in signing (you must set GRAFANA_API_KEY)", } + dryRunFlag = cli.BoolFlag{ + Name: "dry-run", + Usage: "Only simulate actions", + } + gcpKeyFlag = cli.StringFlag{ + Name: "gcp-key", + Usage: "Google Cloud Platform key file", + Required: true, + } ) diff --git a/pkg/build/cmd/main.go b/pkg/build/cmd/main.go index ccf894fc8f0..66c5e801d07 100644 --- a/pkg/build/cmd/main.go +++ b/pkg/build/cmd/main.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/grafana/grafana/pkg/build/docker" + "github.com/grafana/grafana/pkg/build/packaging" "github.com/urfave/cli/v2" ) @@ -169,6 +170,54 @@ func main() { }, }, }, + { + Name: "publish", + Usage: "Publish packages to Grafana com and repositories", + Subcommands: cli.Commands{ + { + Name: "packages", + Usage: "publish Grafana packages", + ArgsUsage: "[version]", + Action: PublishPackages, + Flags: []cli.Flag{ + &jobsFlag, + &editionFlag, + &buildIDFlag, + &dryRunFlag, + &gcpKeyFlag, + &cli.StringFlag{ + Name: "packages-bucket", + Value: "grafana-downloads", + Usage: "Google Cloud Storage Debian database bucket", + }, + &cli.StringFlag{ + Name: "deb-db-bucket", + Value: packaging.DefaultDebDBBucket, + Usage: "Google Cloud Storage Debian database bucket", + }, + &cli.StringFlag{ + Name: "deb-repo-bucket", + Value: packaging.DefaultDebRepoBucket, + Usage: "Google Cloud Storage Debian repo bucket", + }, + &cli.StringFlag{ + Name: "rpm-repo-bucket", + Value: packaging.DefaultRPMRepoBucket, + Usage: "Google Cloud Storage RPM repo bucket", + }, + &cli.StringFlag{ + Name: "ttl", + Value: packaging.DefaultTTLSeconds, + Usage: "Cache time to live for uploaded packages", + }, + &cli.BoolFlag{ + Name: "simulate-release", + Usage: "Only simulate creating release at grafana.com", + }, + }, + }, + }, + }, } if err := app.Run(os.Args); err != nil { diff --git a/pkg/build/cmd/publishpackages.go b/pkg/build/cmd/publishpackages.go new file mode 100644 index 00000000000..b640688f261 --- /dev/null +++ b/pkg/build/cmd/publishpackages.go @@ -0,0 +1,112 @@ +package main + +import ( + "fmt" + "log" + "os" + "strings" + + "github.com/grafana/grafana/pkg/build/config" + "github.com/grafana/grafana/pkg/build/gcloud" + "github.com/grafana/grafana/pkg/build/gpg" + "github.com/urfave/cli/v2" +) + +// PublishPackages implements the sub-command "publish-packages". +func PublishPackages(c *cli.Context) error { + if err := gcloud.ActivateServiceAccount(); err != nil { + return fmt.Errorf("couldn't activate service account, err: %w", err) + } + + metadata, err := GenerateMetadata(c) + if err != nil { + return err + } + + releaseMode, err := metadata.GetReleaseMode() + if err != nil { + return err + } + + dryRun := c.Bool("dry-run") + simulateRelease := c.Bool("simulate-release") + // Test release mode and dryRun imply simulateRelease + if releaseMode.IsTest || dryRun { + simulateRelease = true + } + + grafanaAPIKey := strings.TrimSpace(os.Getenv("GRAFANA_COM_API_KEY")) + if grafanaAPIKey == "" { + return cli.NewExitError("the environment variable GRAFANA_COM_API_KEY must be set", 1) + } + + edition := config.Edition(c.String("edition")) + + // TODO: Verify config values + cfg := PublishConfig{ + Config: config.Config{ + Version: metadata.GrafanaVersion, + Bucket: c.String("packages-bucket"), + DebDBBucket: c.String("deb-db-bucket"), + DebRepoBucket: c.String("deb-repo-bucket"), + RPMRepoBucket: c.String("rpm-repo-bucket"), + }, + Edition: edition, + ReleaseMode: releaseMode, + GrafanaAPIKey: grafanaAPIKey, + DryRun: dryRun, + TTL: c.String("ttl"), + SimulateRelease: simulateRelease, + } + if err := gpg.LoadGPGKeys(&cfg.Config); err != nil { + return err + } + defer gpg.RemoveGPGFiles(cfg.Config) + + // Only update package manager repos for releases. + // In test release mode, the operator should configure different GCS buckets for the package repos, + // so should be safe. + if cfg.ReleaseMode.Mode == config.TagMode { + workDir := os.TempDir() + defer func() { + if err := os.RemoveAll(workDir); err != nil { + log.Printf("Failed to remove temporary directory %q: %s\n", workDir, err.Error()) + } + }() + if err := updatePkgRepos(cfg, workDir); err != nil { + return err + } + } + + log.Println("Successfully published packages!") + return nil +} + +// updatePkgRepos updates package manager repositories. +func updatePkgRepos(cfg PublishConfig, workDir string) error { + if err := gpg.Import(cfg.Config); err != nil { + return err + } + + // If updating the Deb repo fails, still continue with the RPM repo, so we don't have to retry + // both by hand + debErr := updateDebRepo(cfg, workDir) + if debErr != nil { + log.Printf("Updating Deb repo failed: %s\n", debErr) + } + rpmErr := updateRPMRepo(cfg, workDir) + if rpmErr != nil { + log.Printf("Updating RPM repo failed: %s\n", rpmErr) + } + + if debErr != nil { + return debErr + } + if rpmErr != nil { + return rpmErr + } + + log.Println("Updated Deb and RPM repos successfully!") + + return nil +} diff --git a/pkg/build/cmd/rpm.go b/pkg/build/cmd/rpm.go new file mode 100644 index 00000000000..ef5725c959c --- /dev/null +++ b/pkg/build/cmd/rpm.go @@ -0,0 +1,365 @@ +package main + +import ( + "bytes" + "crypto" + "fmt" + "log" + "os" + "os/exec" + "path" + "path/filepath" + "strings" + + "github.com/grafana/grafana/pkg/build/config" + "github.com/grafana/grafana/pkg/build/packaging" + "github.com/grafana/grafana/pkg/infra/fs" + "golang.org/x/crypto/openpgp" + "golang.org/x/crypto/openpgp/armor" + "golang.org/x/crypto/openpgp/packet" +) + +// updateRPMRepo updates the RPM repository with the new release. +func updateRPMRepo(cfg PublishConfig, workDir string) error { + if cfg.ReleaseMode.Mode != config.TagMode { + panic(fmt.Sprintf("Unsupported version mode %s", cfg.ReleaseMode.Mode)) + } + + if cfg.ReleaseMode.IsTest && cfg.Config.RPMRepoBucket == packaging.DefaultRPMRepoBucket { + return fmt.Errorf("in test-release mode, the default RPM repo bucket shouldn't be used") + } + + if err := downloadRPMs(cfg, workDir); err != nil { + return err + } + + repoRoot := path.Join(os.TempDir(), "rpm-repo") + defer func() { + if err := os.RemoveAll(repoRoot); err != nil { + log.Printf("Failed to remove %q: %s\n", repoRoot, err.Error()) + } + }() + + repoName := "rpm" + if cfg.ReleaseMode.IsBeta { + repoName = "rpm-beta" + } + folderURI := fmt.Sprintf("gs://%s/%s/%s", cfg.RPMRepoBucket, strings.ToLower(string(cfg.Edition)), repoName) + + // Download the RPM database + log.Printf("Downloading RPM database from GCS (%s)...\n", folderURI) + //nolint:gosec + cmd := exec.Command("gsutil", "-m", "rsync", "-r", "-d", folderURI, repoRoot) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to download RPM database from GCS: %w\n%s", err, output) + } + + // Add the new release to the repo + var sfx string + switch cfg.Edition { + case config.EditionOSS: + case config.EditionEnterprise: + sfx = EnterpriseSfx + default: + return fmt.Errorf("unsupported edition %q", cfg.Edition) + } + allRPMs, err := filepath.Glob(filepath.Join(workDir, fmt.Sprintf("grafana%s-*.rpm", sfx))) + if err != nil { + return fmt.Errorf("failed to list RPMs in %q: %w", workDir, err) + } + rpms := []string{} + for _, rpm := range allRPMs { + if strings.Contains(rpm, "-latest") { + continue + } + + rpms = append(rpms, rpm) + } + // XXX: What does the following comment mean? + // adds to many files for enterprise + for _, rpm := range rpms { + if err := fs.CopyFile(rpm, filepath.Join(repoRoot, filepath.Base(rpm))); err != nil { + return err + } + } + + //nolint:gosec + cmd = exec.Command("createrepo", repoRoot) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to create repo at %q: %w\n%s", repoRoot, err, output) + } + + if err := signRPMRepo(repoRoot, cfg); err != nil { + return err + } + + // Update the repo in GCS + + // Sync packages first to avoid cache misses + if cfg.DryRun { + log.Printf("Simulating upload of RPMs to GCS (%s)\n", folderURI) + } else { + log.Printf("Uploading RPMs to GCS (%s)...\n", folderURI) + args := []string{"-m", "cp"} + args = append(args, rpms...) + args = append(args, folderURI) + //nolint:gosec + cmd = exec.Command("gsutil", args...) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to upload RPMs to GCS: %w\n%s", err, output) + } + } + + if cfg.DryRun { + log.Printf("Simulating upload of RPM repo metadata to GCS (%s)\n", folderURI) + } else { + log.Printf("Uploading RPM repo metadata to GCS (%s)...\n", folderURI) + //nolint:gosec + cmd = exec.Command("gsutil", "-m", "rsync", "-r", "-d", repoRoot, folderURI) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to upload RPM repo metadata to GCS: %w\n%s", err, output) + } + allRepoResources := fmt.Sprintf("%s/**/*", folderURI) + log.Printf("Setting cache ttl for RPM repo resources on GCS (%s)...\n", allRepoResources) + //nolint:gosec + cmd = exec.Command("gsutil", "-m", "setmeta", "-h", CacheSettings+cfg.TTL, allRepoResources) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to set cache ttl for RPM repo resources on GCS: %w\n%s", err, output) + } + } + + return nil +} + +// downloadRPMs downloads RPM packages. +func downloadRPMs(cfg PublishConfig, workDir string) error { + if !strings.HasSuffix(workDir, string(filepath.Separator)) { + workDir += string(filepath.Separator) + } + var version string + if cfg.ReleaseMode.Mode == config.TagMode { + if cfg.ReleaseMode.IsBeta { + version = strings.ReplaceAll(cfg.Version, "-", "~") + } else { + version = cfg.Version + } + } + if version == "" { + panic(fmt.Sprintf("Unrecognized version mode %s", cfg.ReleaseMode.Mode)) + } + + var sfx string + switch cfg.Edition { + case config.EditionOSS: + case config.EditionEnterprise: + sfx = EnterpriseSfx + default: + return fmt.Errorf("unrecognized edition %q", cfg.Edition) + } + + u := fmt.Sprintf("gs://%s/%s/%s/grafana%s-%s-*.*.rpm*", cfg.Bucket, + strings.ToLower(string(cfg.Edition)), ReleaseFolder, sfx, version) + log.Printf("Downloading RPM packages %q...\n", u) + args := []string{ + "-m", + "cp", + u, + workDir, + } + //nolint:gosec + cmd := exec.Command("gsutil", args...) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("failed to download RPM packages %q: %w\n%s", u, err, output) + } + + return nil +} + +func getPublicKey(cfg PublishConfig) (*packet.PublicKey, error) { + f, err := os.Open(cfg.GPGPublicKey) + if err != nil { + return nil, fmt.Errorf("failed to open %q: %w", cfg.GPGPublicKey, err) + } + defer func(f *os.File) { + err := f.Close() + if err != nil { + return + } + }(f) + + block, err := armor.Decode(f) + if err != nil { + return nil, err + } + + if block.Type != openpgp.PublicKeyType { + return nil, fmt.Errorf("invalid public key block type: %q", block.Type) + } + + packetReader := packet.NewReader(block.Body) + pkt, err := packetReader.Next() + if err != nil { + return nil, err + } + + key, ok := pkt.(*packet.PublicKey) + if !ok { + return nil, fmt.Errorf("got non-public key from packet reader: %T", pkt) + } + + return key, nil +} + +func getPrivateKey(cfg PublishConfig) (*packet.PrivateKey, error) { + f, err := os.Open(cfg.GPGPrivateKey) + if err != nil { + return nil, fmt.Errorf("failed to open %q: %w", cfg.GPGPrivateKey, err) + } + defer func(f *os.File) { + err := f.Close() + if err != nil { + return + } + }(f) + + passphraseB, err := os.ReadFile(cfg.GPGPassPath) + if err != nil { + return nil, fmt.Errorf("failed to read %q: %w", cfg.GPGPrivateKey, err) + } + passphraseB = bytes.TrimSuffix(passphraseB, []byte("\n")) + + block, err := armor.Decode(f) + if err != nil { + return nil, err + } + + if block.Type != openpgp.PrivateKeyType { + return nil, fmt.Errorf("invalid private key block type: %q", block.Type) + } + + packetReader := packet.NewReader(block.Body) + pkt, err := packetReader.Next() + if err != nil { + return nil, err + } + + key, ok := pkt.(*packet.PrivateKey) + if !ok { + return nil, fmt.Errorf("got non-private key from packet reader: %T", pkt) + } + + if err := key.Decrypt(passphraseB); err != nil { + return nil, fmt.Errorf("failed to decrypt private key: %w", err) + } + return key, nil +} + +// signRPMRepo signs an RPM repository using PGP. +// The signature gets written to the file repodata/repomd.xml.asc. +func signRPMRepo(repoRoot string, cfg PublishConfig) error { + if cfg.GPGPublicKey == "" || cfg.GPGPrivateKey == "" { + return fmt.Errorf("private or public key is empty") + } + + log.Printf("Signing RPM repo") + + pubKey, err := getPublicKey(cfg) + if err != nil { + return err + } + + privKey, err := getPrivateKey(cfg) + if err != nil { + return err + } + + pcfg := packet.Config{ + DefaultHash: crypto.SHA256, + DefaultCipher: packet.CipherAES256, + DefaultCompressionAlgo: packet.CompressionZLIB, + CompressionConfig: &packet.CompressionConfig{ + Level: 9, + }, + RSABits: 4096, + } + currentTime := pcfg.Now() + uid := packet.NewUserId("", "", "") + + isPrimaryID := false + keyLifetimeSecs := uint32(86400 * 365) + signer := openpgp.Entity{ + PrimaryKey: pubKey, + PrivateKey: privKey, + Identities: map[string]*openpgp.Identity{ + uid.Id: { + Name: uid.Name, + UserId: uid, + SelfSignature: &packet.Signature{ + CreationTime: currentTime, + SigType: packet.SigTypePositiveCert, + PubKeyAlgo: packet.PubKeyAlgoRSA, + Hash: pcfg.Hash(), + IsPrimaryId: &isPrimaryID, + FlagsValid: true, + FlagSign: true, + FlagCertify: true, + IssuerKeyId: &pubKey.KeyId, + }, + }, + }, + Subkeys: []openpgp.Subkey{ + { + PublicKey: pubKey, + PrivateKey: privKey, + Sig: &packet.Signature{ + CreationTime: currentTime, + SigType: packet.SigTypeSubkeyBinding, + PubKeyAlgo: packet.PubKeyAlgoRSA, + Hash: pcfg.Hash(), + PreferredHash: []uint8{8}, // SHA-256 + FlagsValid: true, + FlagEncryptStorage: true, + FlagEncryptCommunications: true, + IssuerKeyId: &pubKey.KeyId, + KeyLifetimeSecs: &keyLifetimeSecs, + }, + }, + }, + } + + // Ignore gosec G304 as this function is only used in the build process. + //nolint:gosec + freader, err := os.Open(filepath.Join(repoRoot, "repodata", "repomd.xml")) + if err != nil { + return err + } + defer func(freader *os.File) { + err := freader.Close() + if err != nil { + return + } + }(freader) + + // Ignore gosec G304 as this function is only used in the build process. + //nolint:gosec + sigwriter, err := os.Create(filepath.Join(repoRoot, "repodata", "repomd.xml.asc")) + if err != nil { + return err + } + defer func(sigwriter *os.File) { + err := sigwriter.Close() + if err != nil { + return + } + }(sigwriter) + + if err := openpgp.ArmoredDetachSignText(sigwriter, &signer, freader, nil); err != nil { + return fmt.Errorf("failed to write PGP signature: %w", err) + } + + if err := sigwriter.Close(); err != nil { + return fmt.Errorf("failed to write PGP signature: %w", err) + } + + return nil +} diff --git a/pkg/build/cmd/rpm_test.go b/pkg/build/cmd/rpm_test.go new file mode 100644 index 00000000000..60d8ac5f50a --- /dev/null +++ b/pkg/build/cmd/rpm_test.go @@ -0,0 +1,146 @@ +package main + +import ( + "os" + "path/filepath" + "testing" + + "github.com/grafana/grafana/pkg/build/config" + "github.com/stretchr/testify/require" +) + +const pubKey = `-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: OpenPGP.js v4.10.10 +Comment: https://openpgpjs.org + +xsBNBGM1b9wBCADZM49X7vwOS93KbgA6yhpwrYf8ZlzksGcDaYgp1IzvqHbs +xeU1mmBYVH/bSKRDG0tt3Qdky4Nvl4Oqd+g0e2ZGjmlEy9zUiPTTK/BtXT+5 +s8oqih2NIAkyF91BNZABAgvh/vJdYImhYeUQBDqMJgqZ/Y/Ha31N7rSW+jUt +LHspbN0ztJYjuEd/bg2NKH7Gs/AyNvX9IQTC4k7iRRafx7q/PBCVtsk+NCwz +BEkL93xpAdcdYiMNrRP2eIHQjBmNZ/oUCkcDsLCBvcSq6P2lGpNnpPzVoTJf +v2qrWkVn5txJJsOkmBGpEDbECPunVilrWO6RPomP0yYkr6NE4XeCJ3QhABEB +AAHNGWR1bW15IDxkdW1teUBob3RtYWlsLmNvbT7CwI0EEAEIACAFAmM1b9wG +CwkHCAMCBBUICgIEFgIBAAIZAQIbAwIeAQAhCRAoJ1i5w6kkAxYhBCQv+iwt +IFn7vj9PLygnWLnDqSQDPxkH/0Ju2Cah+bOxl09uv2Ft2BVlQh0u+wJyRVgs +KxTxldAXFZwMrN4wK/GUoGWDiy2tzNtoVE6GpxWUj+LvSGFaVLNVjW+Le77I +BP/sl1wKHJbQhseKc7Mz5Zj3i0F1FPM+rLik7tNk6kiEBqYVyyXahyT98Hu1 +1OKEV+8NiRG47iNgd/dpgEdVSS4DN/dL6m5q+CVy9YnlR+wXxF/2xcMmWBzR +V2cPVw0JzunpUV8lDDQ/n1sPw61D3oL1aH0bkn8aA8pEceKOVIYOaja7LkLX +uSlROlALA/M2fuubradW9I3FcrJNn+/xA52el2l/Hn/Syf9GQV/Ll/R+qKIo +Z57xWd7OwE0EYzVv3AEIAJl/PNYOF2prNKY58BfZ74XurDb9mNlZ1wsIqrOu +J/euzHEnzkCAjMUuXV7wcugjQlmpcZn6Y0QmQ2uX7SwPCMovDvngbXeAfbdd +6FUKecQ0sG54Plm8HSMNdjetdUVl7ACxjJO8Rdc/Asx7ua7gMm42CVfqMj4L +qN5foUBlaKJ1iGKUpQ+673UQWMYeOBuu9G8awbSzGaphN97CIX7xEMGzGeff +yHLHK+MsfX935uDgDwJQzxJKEugIJDMKgWOLgVz1jRCsJKHlywHTWpVuMiKY +Wnuq4tDNLBUQtaRL7uclG7Wejw/XNN0uD/zNHPgF5rmlYHVhrtDbBCP2XqTn +WU8AEQEAAcLAdgQYAQgACQUCYzVv3AIbDAAhCRAoJ1i5w6kkAxYhBCQv+iwt +IFn7vj9PLygnWLnDqSQDFqYH/AkdNaPUQlE7RQBigNRGOFBuqjhbLsV/rZf+ +/4K6wDHojM606lgLm57T4NUXnk53VIF3KO8+v8N11mCtPb+zBngfvVU14COC +HEDNdOK19TlR+tH25cftfUiF+OJsgMQysErGuFEtwLE6TNzpQIcnw7SbjxMr +EGacF9xCBKexB6MlR3GwJ2LBUJm3Lq/fvqImztoTlKDsrpk4JOH5FfYG+G2f +1zU73fVsCCElX4qA/49rRQf0RNfhjRjmHULP8hSvCXUEhfiBggEgxof/vKlC +qauHC55luuIeabju8HaXTjpz019cq+3IUgewX/ky0PhQXEW9SoODKabPY2yS +yUbHFm4= +=OCSx +-----END PGP PUBLIC KEY BLOCK----- +` + +const privKey = `-----BEGIN PGP PRIVATE KEY BLOCK----- +Version: OpenPGP.js v4.10.10 +Comment: https://openpgpjs.org + +xcMGBGM1b9wBCADZM49X7vwOS93KbgA6yhpwrYf8ZlzksGcDaYgp1IzvqHbs +xeU1mmBYVH/bSKRDG0tt3Qdky4Nvl4Oqd+g0e2ZGjmlEy9zUiPTTK/BtXT+5 +s8oqih2NIAkyF91BNZABAgvh/vJdYImhYeUQBDqMJgqZ/Y/Ha31N7rSW+jUt +LHspbN0ztJYjuEd/bg2NKH7Gs/AyNvX9IQTC4k7iRRafx7q/PBCVtsk+NCwz +BEkL93xpAdcdYiMNrRP2eIHQjBmNZ/oUCkcDsLCBvcSq6P2lGpNnpPzVoTJf +v2qrWkVn5txJJsOkmBGpEDbECPunVilrWO6RPomP0yYkr6NE4XeCJ3QhABEB +AAH+CQMIuDEg1p2Y6zbg0EQ3JvsP7VQBGsuXg9khTjktoxhwici/d+rcIW7Q +SuKWJGqs83LTeeGmS+9etNtf3LqRdPnI7f0qbT47mAqvp2gn7Rvbrabk+5Jj +AQS/DDLlWNiWsPrMBMZ7TZpiQ+g7gnIZaV10taFupYJr69AjtED+NPu8LOvZ +2ItK9xBqOwl5mkNe7ps/uTT6jwYSWxeObp4ymnLDLONY3eHuaYP9QB/NSlw0 +80Wo5qBPljlU8JdbEoLFU4gY6wkEbLa/DVbEVXSHfWVtr8jZbzHW39TBxpG2 +Dxk52EVyu8Gf9XIQN2ZjDP3CzBGmlxJjLxLUD4GmRSPaDGK7LCN9ZztaXy3Y +WtF6RJfNzEoDdCaV0kkM3AskQDsQ+CWsDVsbbQyDtfncVG6cDzqmoDrBCSq1 +Bsoz07k2hj9VP0aP2xU78qcpJWO2rmhAHy9W2NqjXSBJriy1JXrK5o2/lUUr +94R8NLvqeVbInUw/zovVctaujHIBhNKL9wn2T0LWrA2OEJUz0HWo6ZQSaNzl +Obtz0M8gCj/4sDYjRAiDk50FzOcZp8ijYQFVypQTVzHki5T/JfvBnMpo+4Uc +93QB1woyiZuJCIj7DpY3MkZ5fTDtgJPa+0k8r+lPnAmE6auGUaH7JRKhbBu0 +8faDwaiSv3kD3EEDffoWX/axLLYta9jTDnitTXbf1jY03pdJeiU/ZX0BQTZi +pehZ/6yi/qXM/F8HDVEWriSLqVsMLrXXeFIojAc3fJ/QPpAZSx6E/Fe2xh8c +yURov5krU1zNJDwqC3SjHsHQ/UlLtamDDmmuXX+xb1CwIDd6WksGsCbe/LoN +TxViV4hOjIeh5TwRP5jQaqsVKCT8fzoDrRXy76taT+Zaaen+J6rC51HQwyEq +Qgf1e7WodzN3r10UV6/L/wNkfdWJgf5MzRlkdW1teSA8ZHVtbXlAaG90bWFp +bC5jb20+wsCNBBABCAAgBQJjNW/cBgsJBwgDAgQVCAoCBBYCAQACGQECGwMC +HgEAIQkQKCdYucOpJAMWIQQkL/osLSBZ+74/Ty8oJ1i5w6kkAz8ZB/9Cbtgm +ofmzsZdPbr9hbdgVZUIdLvsCckVYLCsU8ZXQFxWcDKzeMCvxlKBlg4strczb +aFROhqcVlI/i70hhWlSzVY1vi3u+yAT/7JdcChyW0IbHinOzM+WY94tBdRTz +Pqy4pO7TZOpIhAamFcsl2ock/fB7tdTihFfvDYkRuO4jYHf3aYBHVUkuAzf3 +S+puavglcvWJ5UfsF8Rf9sXDJlgc0VdnD1cNCc7p6VFfJQw0P59bD8OtQ96C +9Wh9G5J/GgPKRHHijlSGDmo2uy5C17kpUTpQCwPzNn7rm62nVvSNxXKyTZ/v +8QOdnpdpfx5/0sn/RkFfy5f0fqiiKGee8Vnex8MGBGM1b9wBCACZfzzWDhdq +azSmOfAX2e+F7qw2/ZjZWdcLCKqzrif3rsxxJ85AgIzFLl1e8HLoI0JZqXGZ ++mNEJkNrl+0sDwjKLw754G13gH23XehVCnnENLBueD5ZvB0jDXY3rXVFZewA +sYyTvEXXPwLMe7mu4DJuNglX6jI+C6jeX6FAZWiidYhilKUPuu91EFjGHjgb +rvRvGsG0sxmqYTfewiF+8RDBsxnn38hyxyvjLH1/d+bg4A8CUM8SShLoCCQz +CoFji4Fc9Y0QrCSh5csB01qVbjIimFp7quLQzSwVELWkS+7nJRu1no8P1zTd +Lg/8zRz4Bea5pWB1Ya7Q2wQj9l6k51lPABEBAAH+CQMIwr3YSD15lYrgItoy +MDsrWqMMHJsSxusbQiK0KLgjFBuDuTolsu9zqQCHEm2dxChqT+yQ6AeeynRD +pDMVkHEvhShvGUhB6Bu5wClHj8+xFpyprShE/KbEuppNdfIRgWVYc7UX+TYz +6BymqhzKyIw2Q33ocrXgTRZ02HM7urKVvAhsJCEff0paByOzCspiv/TPRihi +7GAZY0wFLDPe9qr+07ExT2ndMDX8Xb1mlg8IeaSWUaNilm7M8oW3xnUBnXeD +XglTkObGeRVXAINim9uL4soT3lamN4QwgBus9WzFqOOCMk11fjatY8kY1zX9 +epO27igGtMwTFl11XcQLlFyvlgPBeWtFam7RiDPa3VF0XubmBYZBmqWpccWs +xl0xHCtUK7Pd0O4kSqxsL9cB0MX9iR1yPkM8wA++Mp6pEfNcXUrGIdlie0H5 +aCq8rguYG5VuFosSUatdCbpRVGBxGnhxHes0mNTPgwAoAVNYBWXH5iq5HxKy +i3Zy5V7ZKSyDrfg/0AajtDW5h3g+wglUI9UCdT4tNLFwYbhHqGH2xdBztYI0 +iSJ7COLmo26smkA8UXxsrlw8PWPzpbhQOG06EbMjncJimJDMI1YDC6ag7M5l +OcG9uXZQ22ipAz5CSPtyL0/0WAp4yyn1VQRBK42n/y9ld+dMbuq6majazb15 +6sEgHUKERcwGs0Ftfj5Zamwhm7ZoIe26XEqvcshpQpv1Q9hktluVeSbiVaBe +Nl8zUZHlo/0zUc5j7G5Up58t+ChSsyOFJGM7CGkKHHawBZYCs0EcpsdAPr3T +1C8A0Wt9POTETYM4pZFOoLds6VTolZZcxeBN5YPoN2kbwFpOgPJN09Zz8z8S +4psQRV4KQ92XDPZ/6q2BH5i2+F2ZwUsvCR4DwgzbVGZSRV6mM7lkjZSmnWfC +AE7DUl7XwsB2BBgBCAAJBQJjNW/cAhsMACEJECgnWLnDqSQDFiEEJC/6LC0g +Wfu+P08vKCdYucOpJAMWpgf8CR01o9RCUTtFAGKA1EY4UG6qOFsuxX+tl/7/ +grrAMeiMzrTqWAubntPg1ReeTndUgXco7z6/w3XWYK09v7MGeB+9VTXgI4Ic +QM104rX1OVH60fblx+19SIX44myAxDKwSsa4US3AsTpM3OlAhyfDtJuPEysQ +ZpwX3EIEp7EHoyVHcbAnYsFQmbcur9++oibO2hOUoOyumTgk4fkV9gb4bZ/X +NTvd9WwIISVfioD/j2tFB/RE1+GNGOYdQs/yFK8JdQSF+IGCASDGh/+8qUKp +q4cLnmW64h5puO7wdpdOOnPTX1yr7chSB7Bf+TLQ+FBcRb1Kg4Mpps9jbJLJ +RscWbg== +=KJNy +-----END PGP PRIVATE KEY BLOCK----- +` + +// Dummy GPG keys, used only for testing +// nolint:gosec +const passPhrase = `MkDgjkrgdGxt` + +func TestSignRPMRepo(t *testing.T) { + repoDir := t.TempDir() + workDir := t.TempDir() + pubKeyPath := filepath.Join(workDir, "pub.key") + err := os.WriteFile(pubKeyPath, []byte(pubKey), 0600) + require.NoError(t, err) + privKeyPath := filepath.Join(workDir, "priv.key") + err = os.WriteFile(privKeyPath, []byte(privKey), 0600) + require.NoError(t, err) + passPhrasePath := filepath.Join(workDir, "passphrase") + err = os.WriteFile(passPhrasePath, []byte(passPhrase), 0600) + require.NoError(t, err) + err = os.Mkdir(filepath.Join(repoDir, "repodata"), 0700) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(repoDir, "repodata", "repomd.xml"), []byte(""), 0600) + require.NoError(t, err) + + cfg := PublishConfig{ + Config: config.Config{ + GPGPrivateKey: privKeyPath, + GPGPublicKey: pubKeyPath, + GPGPassPath: passPhrasePath, + }, + } + + err = signRPMRepo(repoDir, cfg) + require.NoError(t, err) +} diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index 2765940a67b..6436b0ce4d2 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -208,9 +208,19 @@ var adminCommands = []*cli.Command{ }, { Name: "generate-file", - Usage: "creates a conflict users file.. Safe to execute multiple times.", + Usage: "creates a conflict users file. Safe to execute multiple times.", Action: runGenerateConflictUsersFile(), }, + { + Name: "validate-file", + Usage: "validates the conflict users file. Safe to execute multiple times.", + Action: runValidateConflictUsersFile(), + }, + { + Name: "ingest-file", + Usage: "ingests the conflict users file", + Action: runIngestConflictUsersFile(), + }, }, }, }, diff --git a/pkg/cmd/grafana-cli/commands/conflict_user_command.go b/pkg/cmd/grafana-cli/commands/conflict_user_command.go index 53cf732fd20..d4f5f7acbd6 100644 --- a/pkg/cmd/grafana-cli/commands/conflict_user_command.go +++ b/pkg/cmd/grafana-cli/commands/conflict_user_command.go @@ -1,8 +1,13 @@ package commands import ( + "context" + "errors" "fmt" "os" + "path/filepath" + "regexp" + "strconv" "strings" "github.com/fatih/color" @@ -10,19 +15,49 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/infra/tracing" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/db" "github.com/grafana/grafana/pkg/services/sqlstore/migrations" + "github.com/grafana/grafana/pkg/services/user" + "github.com/grafana/grafana/pkg/services/user/userimpl" + "github.com/grafana/grafana/pkg/setting" "github.com/urfave/cli/v2" ) -func getSqlStore(context *cli.Context) (*sqlstore.SQLStore, error) { - cmd := &utils.ContextCommandLine{Context: context} - cfg, err := initCfg(cmd) - cfg.Logger = nil +func initConflictCfg(cmd *utils.ContextCommandLine) (*setting.Cfg, error) { + configOptions := strings.Split(cmd.String("configOverrides"), " ") + configOptions = append(configOptions, cmd.Args().Slice()...) + cfg, err := setting.NewCfgFromArgs(setting.CommandLineArgs{ + Config: cmd.ConfigFile(), + HomePath: cmd.HomePath(), + Args: append(configOptions, "cfg:log.level=error"), // tailing arguments have precedence over the options string + }) + if err != nil { + return nil, err + } + return cfg, nil +} + +func initializeConflictResolver(cmd *utils.ContextCommandLine, f Formatter, ctx *cli.Context) (*ConflictResolver, error) { + cfg, err := initConflictCfg(cmd) if err != nil { return nil, fmt.Errorf("%v: %w", "failed to load configuration", err) } + s, err := getSqlStore(cfg) + if err != nil { + return nil, fmt.Errorf("%v: %w", "failed to get to sql", err) + } + conflicts, err := GetUsersWithConflictingEmailsOrLogins(ctx, s) + if err != nil { + return nil, fmt.Errorf("%v: %w", "failed to get users with conflicting logins", err) + } + resolver := ConflictResolver{Users: conflicts} + resolver.BuildConflictBlocks(conflicts, f) + return &resolver, nil +} + +func getSqlStore(cfg *setting.Cfg) (*sqlstore.SQLStore, error) { tracer, err := tracing.ProvideService(cfg) if err != nil { return nil, fmt.Errorf("%v: %w", "failed to initialize tracer service", err) @@ -33,28 +68,21 @@ func getSqlStore(context *cli.Context) (*sqlstore.SQLStore, error) { func runListConflictUsers() func(context *cli.Context) error { return func(context *cli.Context) error { - s, err := getSqlStore(context) + cmd := &utils.ContextCommandLine{Context: context} + whiteBold := color.New(color.FgWhite).Add(color.Bold) + r, err := initializeConflictResolver(cmd, whiteBold.Sprintf, context) if err != nil { - return fmt.Errorf("%v: %w", "failed to get to sql", err) + return fmt.Errorf("%v: %w", "failed to initialize conflict resolver", err) } - conflicts, err := GetUsersWithConflictingEmailsOrLogins(context, s) - if err != nil { - return fmt.Errorf("%v: %w", "failed to get users with conflicting logins", err) - } - if len(conflicts) < 1 { + if len(r.Users) < 1 { logger.Info(color.GreenString("No Conflicting users found.\n\n")) return nil } - whiteBold := color.New(color.FgWhite).Add(color.Bold) - resolver := ConflictResolver{Users: conflicts} - resolver.BuildConflictBlocks(whiteBold.Sprintf) - logger.Infof("\n\nShowing Conflicts\n\n") - logger.Infof(resolver.ToStringPresentation()) + logger.Infof("\n\nShowing conflicts\n\n") + logger.Infof(r.ToStringPresentation()) logger.Infof("\n") - // TODO: remove line when finished - // this is only for debugging - if len(resolver.DiscardedBlocks) != 0 { - resolver.logDiscardedUsers() + if len(r.DiscardedBlocks) != 0 { + r.logDiscardedUsers() } return nil } @@ -62,74 +90,328 @@ func runListConflictUsers() func(context *cli.Context) error { func runGenerateConflictUsersFile() func(context *cli.Context) error { return func(context *cli.Context) error { - s, err := getSqlStore(context) + cmd := &utils.ContextCommandLine{Context: context} + r, err := initializeConflictResolver(cmd, fmt.Sprintf, context) if err != nil { - return fmt.Errorf("%v: %w", "failed to get to sql", err) + return fmt.Errorf("%v: %w", "failed to initialize conflict resolver", err) } - conflicts, err := GetUsersWithConflictingEmailsOrLogins(context, s) - if err != nil { - return fmt.Errorf("%v: %w", "failed to get users with conflicting logins", err) - } - if len(conflicts) < 1 { + if len(r.Users) < 1 { logger.Info(color.GreenString("No Conflicting users found.\n\n")) return nil } - resolver := ConflictResolver{Users: conflicts} - resolver.BuildConflictBlocks(fmt.Sprintf) - tmpFile, err := generateConflictUsersFile(&resolver) + tmpFile, err := generateConflictUsersFile(r) if err != nil { return fmt.Errorf("generating file return error: %w", err) } logger.Infof("\n\ngenerated file\n") logger.Infof("%s\n\n", tmpFile.Name()) logger.Infof("once the file is edited and resolved conflicts, you can either validate or ingest the file\n\n") - if len(resolver.DiscardedBlocks) != 0 { - resolver.logDiscardedUsers() + if len(r.DiscardedBlocks) != 0 { + r.logDiscardedUsers() } return nil } } +func runValidateConflictUsersFile() func(context *cli.Context) error { + return func(context *cli.Context) error { + cmd := &utils.ContextCommandLine{Context: context} + r, err := initializeConflictResolver(cmd, fmt.Sprintf, context) + if err != nil { + return fmt.Errorf("%v: %w", "failed to initialize conflict resolver", err) + } + + // read in the file to validate + // read in the file to ingest + arg := cmd.Args().First() + if arg == "" { + return errors.New("please specify a absolute path to file to read from") + } + b, err := os.ReadFile(filepath.Clean(arg)) + if err != nil { + return fmt.Errorf("could not read file with error %e", err) + } + validErr := getValidConflictUsers(r, b) + if validErr != nil { + return fmt.Errorf("could not validate file with error %s", err) + } + logger.Info("File validation complete without errors.\n\n File can be used with ingesting command `ingest-file`.\n\n") + return nil + } +} + +func runIngestConflictUsersFile() func(context *cli.Context) error { + return func(context *cli.Context) error { + cmd := &utils.ContextCommandLine{Context: context} + r, err := initializeConflictResolver(cmd, fmt.Sprintf, context) + if err != nil { + return fmt.Errorf("%v: %w", "failed to initialize conflict resolver", err) + } + + // read in the file to ingest + arg := cmd.Args().First() + if arg == "" { + return errors.New("please specify a absolute path to file to read from") + } + b, err := os.ReadFile(filepath.Clean(arg)) + if err != nil { + return fmt.Errorf("could not read file with error %e", err) + } + validErr := getValidConflictUsers(r, b) + if validErr != nil { + return fmt.Errorf("could not validate file with error %s", validErr) + } + // should we rebuild blocks here? + // kind of a weird thing maybe? + if len(r.ValidUsers) == 0 { + return fmt.Errorf("no users") + } + r.showChanges() + if !confirm("\n\nWe encourage users to create a db backup before running this command. \n Proceed with operation?") { + return fmt.Errorf("user cancelled") + } + err = r.MergeConflictingUsers(context.Context) + if err != nil { + return fmt.Errorf("not able to merge with %e", err) + } + logger.Info("\n\nconflicts resolved.\n") + return nil + } +} + +func getDocumentationForFile() string { + return `# Conflicts File +# This file is generated by the grafana-cli command ` + color.CyanString("grafana-cli admin user-manager conflicts generate-file") + `. +# +# Commands: +# +, keep = keep user +# -, delete = delete user +# +# The fields conflict_email and conflict_login +# indicate that we see a conflict in email and/or login with another user. +# Both these fields can be true. +# +# There needs to be exactly one picked user per conflict block. +# +# The lines can be re-ordered. +# +# If you feel like you want to wait with a specific block, +# delete all lines regarding that conflict block. +# +` +} + func generateConflictUsersFile(r *ConflictResolver) (*os.File, error) { tmpFile, err := os.CreateTemp(os.TempDir(), "conflicting_user_*.diff") if err != nil { return nil, err } + if _, err := tmpFile.Write([]byte(getDocumentationForFile())); err != nil { + return nil, err + } if _, err := tmpFile.Write([]byte(r.ToStringPresentation())); err != nil { return nil, err } return tmpFile, nil } +func getValidConflictUsers(r *ConflictResolver, b []byte) error { + newConflicts := make(ConflictingUsers, 0) + // need to verify that id or email exists + previouslySeenIds := map[string]bool{} + previouslySeenEmails := map[string]bool{} + for _, users := range r.Blocks { + for _, u := range users { + previouslySeenIds[strings.ToLower(u.ID)] = true + previouslySeenEmails[strings.ToLower(u.Email)] = true + } + } + + // tested in https://regex101.com/r/una3zC/1 + diffPattern := `^[+-]` + // compiling since in a loop + matchingExpression, err := regexp.Compile(diffPattern) + if err != nil { + return fmt.Errorf("unable to compile regex %s: %w", diffPattern, err) + } + for _, row := range strings.Split(string(b), "\n") { + if row == "" { + // end of file + break + } + // if the row starts with a #, it is a comment + if row[0] == '#' { + // comment + continue + } + entryRow := matchingExpression.Match([]byte(row)) + if !entryRow { + // block row + // conflict: hej + continue + } + + newUser := &ConflictingUser{} + err := newUser.Marshal(row) + if err != nil { + return fmt.Errorf("could not parse the content of the file with error %e", err) + } + if !previouslySeenEmails[strings.ToLower(newUser.Email)] { + return fmt.Errorf("not valid email: %s, email not in previous conflicts seen", newUser.Email) + } + // valid entry + newConflicts = append(newConflicts, *newUser) + } + r.ValidUsers = newConflicts + r.BuildConflictBlocks(newConflicts, fmt.Sprintf) + return nil +} + +func (r *ConflictResolver) MergeConflictingUsers(ctx context.Context) error { + for block, users := range r.Blocks { + if len(users) < 2 { + return fmt.Errorf("not enough users to perform merge, found %d for id %s, should be at least 2", len(users), block) + } + var intoUser user.User + var intoUserId int64 + var fromUserIds []int64 + + // creating a session for each block of users + // we want to rollback incase something happens during update / delete + if err := r.Store.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + err := sess.Begin() + if err != nil { + return fmt.Errorf("could not open a db session: %w", err) + } + for _, u := range users { + if u.Direction == "+" { + id, err := strconv.ParseInt(u.ID, 10, 64) + if err != nil { + return fmt.Errorf("could not convert id in +") + } + intoUserId = id + } else if u.Direction == "-" { + id, err := strconv.ParseInt(u.ID, 10, 64) + if err != nil { + return fmt.Errorf("could not convert id in -") + } + fromUserIds = append(fromUserIds, id) + } + } + if _, err := sess.ID(intoUserId).Where(sqlstore.NotServiceAccountFilter(r.Store)).Get(&intoUser); err != nil { + return fmt.Errorf("could not find intoUser: %w", err) + } + + for _, fromUserId := range fromUserIds { + var fromUser user.User + exists, err := sess.ID(fromUserId).Where(sqlstore.NotServiceAccountFilter(r.Store)).Get(&fromUser) + if err != nil { + return fmt.Errorf("could not find fromUser: %w", err) + } + if !exists { + fmt.Printf("user with id %d does not exist, skipping\n", fromUserId) + } + // // delete the user + delErr := r.Store.DeleteUserInSession(ctx, sess, &models.DeleteUserCommand{UserId: fromUserId}) + if delErr != nil { + return fmt.Errorf("error during deletion of user: %w", delErr) + } + } + commitErr := sess.Commit() + if commitErr != nil { + return fmt.Errorf("could not commit operation for useridentification %s: %w", block, commitErr) + } + userStore := userimpl.ProvideStore(r.Store, setting.NewCfg()) + updateMainCommand := &user.UpdateUserCommand{ + UserID: intoUser.ID, + Login: strings.ToLower(intoUser.Login), + Email: strings.ToLower(intoUser.Email), + } + updateErr := userStore.Update(ctx, updateMainCommand) + if updateErr != nil { + return fmt.Errorf("could not update user: %w", updateErr) + } + + return nil + }); err != nil { + return err + } + } + return nil +} + +/* +hej@test.com+hej@test.com +all of the permissions, roles and ownership will be transferred to the user. ++ id: 1, email: hej@test.com, login: hej@test.com +these user(s) will be deleted and their permissions transferred. +- id: 2, email: HEJ@TEST.COM, login: HEJ@TEST.COM +- id: 3, email: hej@TEST.com, login: hej@TEST.com +*/ +func (r *ConflictResolver) showChanges() { + if len(r.ValidUsers) == 0 { + fmt.Println("no changes will take place as we have no valid users.") + return + } + + var b strings.Builder + for block, users := range r.Blocks { + if _, ok := r.DiscardedBlocks[block]; ok { + // skip block + continue + } + + // looping as we want to can get these out of order (meaning the + and -) + var mainUser ConflictingUser + for _, u := range users { + if u.Direction == "+" { + mainUser = u + break + } + } + b.WriteString("Keep the following user.\n") + b.WriteString(fmt.Sprintf("%s\n", block)) + b.WriteString(fmt.Sprintf("id: %s, email: %s, login: %s\n", mainUser.ID, mainUser.Email, mainUser.Login)) + b.WriteString("\n\n") + b.WriteString("The following user(s) will be deleted.\n") + for _, user := range users { + if user.ID == mainUser.ID { + continue + } + // mergeable users + b.WriteString(fmt.Sprintf("id: %s, email: %s, login: %s\n", user.ID, user.Email, user.Login)) + } + b.WriteString("\n\n") + } + logger.Info("\n\nChanges that will take place\n\n") + logger.Infof(b.String()) +} + // Formatter make it possible for us to write to terminal and to a file // with different formats depending on the usecase type Formatter func(format string, a ...interface{}) string -func BoldFormatter(format string, a ...interface{}) string { - white := color.New(color.FgWhite) - whiteBold := white.Add(color.Bold) - return whiteBold.Sprintf(format, a...) -} - func shouldDiscardBlock(seenUsersInBlock map[string]string, block string, user ConflictingUser) bool { // loop through users to see if we should skip this block // we have some more tricky scenarios where we have more than two users that can have conflicts with each other // we have made the approach to discard any users that we have seen - if _, ok := seenUsersInBlock[user.Id]; ok { + if _, ok := seenUsersInBlock[user.ID]; ok { // we have seen the user in different block than the current block - if seenUsersInBlock[user.Id] != block { + if seenUsersInBlock[user.ID] != block { return true } } - seenUsersInBlock[user.Id] = block + seenUsersInBlock[user.ID] = block return false } -func (r *ConflictResolver) BuildConflictBlocks(f Formatter) { +// BuildConflictBlocks builds blocks of users where each block is a unique email/login +// NOTE: currently this function assumes that the users are in order of grouping already +func (r *ConflictResolver) BuildConflictBlocks(users ConflictingUsers, f Formatter) { discardedBlocks := make(map[string]bool) seenUsersToBlock := make(map[string]string) blocks := make(map[string]ConflictingUsers) - for _, user := range r.Users { + for _, user := range users { // conflict blocks is how we identify a conflict in the user base. var conflictBlock string if user.ConflictEmail != "" { @@ -165,7 +447,7 @@ func (r *ConflictResolver) BuildConflictBlocks(f Formatter) { func contains(cu ConflictingUsers, target ConflictingUser) bool { for _, u := range cu { - if u.Id == target.Id { + if u.ID == target.ID { return true } } @@ -176,7 +458,7 @@ func (r *ConflictResolver) logDiscardedUsers() { keys := make([]string, 0, len(r.DiscardedBlocks)) for block := range r.DiscardedBlocks { for _, u := range r.Blocks[block] { - keys = append(keys, u.Id) + keys = append(keys, u.ID) } } warn := color.YellowString("Note: We discarded some conflicts that have multiple conflicting types involved.") @@ -208,7 +490,7 @@ func (r *ConflictResolver) ToStringPresentation() string { - id: 3, email: hej@TEST.com, login: hej@TEST.com */ startOfBlock := make(map[string]bool) - fileString := "" + var b strings.Builder for block, users := range r.Blocks { if _, ok := r.DiscardedBlocks[block]; ok { // skip block @@ -216,76 +498,105 @@ func (r *ConflictResolver) ToStringPresentation() string { } for _, user := range users { if !startOfBlock[block] { - fileString += fmt.Sprintf("%s\n", block) + b.WriteString(fmt.Sprintf("%s\n", block)) startOfBlock[block] = true - fileString += fmt.Sprintf("+ id: %s, email: %s, login: %s\n", user.Id, user.Email, user.Login) + b.WriteString(fmt.Sprintf("+ id: %s, email: %s, login: %s, last_seen_at: %s, auth_module: %s, conflict_email: %s, conflict_login: %s\n", + user.ID, + user.Email, + user.Login, + user.LastSeenAt, + user.AuthModule, + user.ConflictEmail, + user.ConflictLogin, + )) continue } - // mergable users - fileString += fmt.Sprintf("- id: %s, email: %s, login: %s\n", user.Id, user.Email, user.Login) + // mergeable users + b.WriteString(fmt.Sprintf("- id: %s, email: %s, login: %s, last_seen_at: %s, auth_module: %s, conflict_email: %s, conflict_login: %s\n", + user.ID, + user.Email, + user.Login, + user.LastSeenAt, + user.AuthModule, + user.ConflictEmail, + user.ConflictLogin, + )) } } - return fileString + return b.String() } type ConflictResolver struct { + Store *sqlstore.SQLStore + Config *setting.Cfg Users ConflictingUsers + ValidUsers ConflictingUsers Blocks map[string]ConflictingUsers DiscardedBlocks map[string]bool } type ConflictingUser struct { - // IDENTIFIER - // TODO: should have conflict block in sql for performance and stability - Direction string `xorm:"direction"` - // FIXME: refactor change to correct type int64 - Id string `xorm:"id"` - Email string `xorm:"email"` - Login string `xorm:"login"` - // FIXME: refactor change to correct type <> - LastSeenAt string `xorm:"last_seen_at"` - AuthModule string `xorm:"auth_module"` - // currently not really used for anything + // direction is the +/- which indicates if we should keep or delete the user + Direction string `xorm:"direction"` + ID string `xorm:"id"` + Email string `xorm:"email"` + Login string `xorm:"login"` + LastSeenAt string `xorm:"last_seen_at"` + AuthModule string `xorm:"auth_module"` ConflictEmail string `xorm:"conflict_email"` ConflictLogin string `xorm:"conflict_login"` } -// always better to have a slice of the object -// not a pointer for slice type ConflictingUsers []*ConflictingUser type ConflictingUsers []ConflictingUser func (c *ConflictingUser) Marshal(filerow string) error { - // +/- id: 1, email: hej, + // example view of the file to ingest + // +/- id: 1, email: hej, auth_module: LDAP trimmedSpaces := strings.ReplaceAll(filerow, " ", "") if trimmedSpaces[0] == '+' { c.Direction = "+" } else if trimmedSpaces[0] == '-' { c.Direction = "-" } else { - return fmt.Errorf("unable to get which operation the user would receive") + return fmt.Errorf("unable to get which operation was chosen") } trimmed := strings.TrimLeft(trimmedSpaces, "+-") values := strings.Split(trimmed, ",") - if len(values) != 5 { - // fmt errror - return fmt.Errorf("expected 5 values in entryrow") + + if len(values) < 3 { + return fmt.Errorf("expected at least 3 values in entry row") } + // expected fields id := strings.Split(values[0], ":") email := strings.Split(values[1], ":") login := strings.Split(values[2], ":") + c.ID = id[1] + c.Email = email[1] + c.Login = login[1] + + // why trim values, 2022-08-20:19:17:12 lastSeenAt := strings.TrimPrefix(values[3], "last_seen_at:") authModule := strings.Split(values[4], ":") - // optional field if len(authModule) < 2 { c.AuthModule = "" } else { c.AuthModule = authModule[1] } - // expected fields - c.Id = id[1] - c.Email = email[1] - c.Login = login[1] c.LastSeenAt = lastSeenAt + + // which conflict + conflictEmail := strings.Split(values[5], ":") + conflictLogin := strings.Split(values[6], ":") + if len(conflictEmail) < 2 { + c.ConflictEmail = "" + } else { + c.ConflictEmail = conflictEmail[1] + } + if len(conflictLogin) < 2 { + c.ConflictLogin = "" + } else { + c.ConflictLogin = conflictLogin[1] + } return nil } @@ -306,6 +617,7 @@ func GetUsersWithConflictingEmailsOrLogins(ctx *cli.Context, s *sqlstore.SQLStor // sorts the users by their useridentification and ids func conflictingUserEntriesSQL(s *sqlstore.SQLStore) string { userDialect := db.DB.GetDialect(s).Quote("user") + sqlQuery := ` SELECT DISTINCT u1.id, @@ -314,12 +626,12 @@ func conflictingUserEntriesSQL(s *sqlstore.SQLStore) string { u1.last_seen_at, user_auth.auth_module, ( SELECT - 'conflict_email' + 'true' FROM ` + userDialect + ` WHERE (LOWER(u1.email) = LOWER(u2.email)) AND(u1.email != u2.email)) AS conflict_email, ( SELECT - 'conflict_login' + 'true' FROM ` + userDialect + ` WHERE (LOWER(u1.login) = LOWER(u2.login) AND(u1.login != u2.login))) AS conflict_login @@ -337,3 +649,21 @@ func notServiceAccount(ss *sqlstore.SQLStore) string { return fmt.Sprintf("is_service_account = %s", ss.Dialect.BooleanStr(false)) } + +// confirm function asks for user input +// returns bool +func confirm(confirmPrompt string) bool { + var input string + logger.Infof("%s? [y|n]: ", confirmPrompt) + + _, err := fmt.Scanln(&input) + if err != nil { + logger.Infof("could not parse input from user for confirmation") + return false + } + input = strings.ToLower(input) + if input == "y" || input == "yes" { + return true + } + return false +} diff --git a/pkg/cmd/grafana-cli/commands/conflict_user_command_test.go b/pkg/cmd/grafana-cli/commands/conflict_user_command_test.go index d59501717b2..6e1883d9da5 100644 --- a/pkg/cmd/grafana-cli/commands/conflict_user_command_test.go +++ b/pkg/cmd/grafana-cli/commands/conflict_user_command_test.go @@ -3,14 +3,250 @@ package commands import ( "context" "fmt" + "os" + "sort" + "testing" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/team/teamimpl" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/user" "github.com/stretchr/testify/require" "github.com/urfave/cli/v2" ) +// "Skipping conflicting users test for mysql as it does make unique constraint case insensitive by default +const ignoredDatabase = "mysql" + +func TestBuildConflictBlock(t *testing.T) { + type testBuildConflictBlock struct { + desc string + users []user.User + expectedBlock string + wantDiscardedBlock string + wantConflictUser *ConflictingUser + wantedNumberOfUsers int + } + testOrgID := 1 + testCases := []testBuildConflictBlock{ + { + desc: "should get one block with only 3 users", + users: []user.User{ + { + Email: "ldap-editor", + Login: "ldap-editor", + OrgID: int64(testOrgID), + }, + { + Email: "LDAP-EDITOR", + Login: "LDAP-EDITOR", + OrgID: int64(testOrgID), + }, + { + Email: "overlapping conflict", + Login: "LDAP-editor", + OrgID: int64(testOrgID), + }, + { + Email: "OVERLAPPING conflict", + Login: "no conflict", + OrgID: int64(testOrgID), + }, + }, + expectedBlock: "conflict: ldap-editor", + wantDiscardedBlock: "conflict: overlapping conflict", + wantedNumberOfUsers: 3, + }, + { + desc: "should get conflict_email true and conflict_login empty string", + users: []user.User{ + { + Email: "conflict@email", + Login: "login", + OrgID: int64(testOrgID), + }, + { + Email: "conflict@EMAIL", + Login: "plainlogin", + OrgID: int64(testOrgID), + }, + }, + expectedBlock: "conflict: conflict@email", + wantedNumberOfUsers: 2, + wantConflictUser: &ConflictingUser{ConflictEmail: "true", ConflictLogin: ""}, + }, + { + desc: "should get conflict_email empty string and conflict_login true", + users: []user.User{ + { + Email: "regular@email", + Login: "CONFLICTLOGIN", + OrgID: int64(testOrgID), + }, + { + Email: "regular-no-conflict@email", + Login: "conflictlogin", + OrgID: int64(testOrgID), + }, + }, + expectedBlock: "conflict: conflictlogin", + wantedNumberOfUsers: 2, + wantConflictUser: &ConflictingUser{ConflictEmail: "", ConflictLogin: "true"}, + }, + } + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + // Restore after destructive operation + sqlStore := sqlstore.InitTestDB(t) + + if sqlStore.GetDialect().DriverName() != ignoredDatabase { + for _, u := range tc.users { + cmd := user.CreateUserCommand{ + Email: u.Email, + Name: u.Name, + Login: u.Login, + OrgID: int64(testOrgID), + } + _, err := sqlStore.CreateUser(context.Background(), cmd) + require.NoError(t, err) + } + m, err := GetUsersWithConflictingEmailsOrLogins(&cli.Context{Context: context.Background()}, sqlStore) + require.NoError(t, err) + r := ConflictResolver{Store: sqlStore} + r.BuildConflictBlocks(m, fmt.Sprintf) + require.Equal(t, tc.wantedNumberOfUsers, len(r.Blocks[tc.expectedBlock])) + if tc.wantDiscardedBlock != "" { + require.Equal(t, true, r.DiscardedBlocks[tc.wantDiscardedBlock]) + } + if tc.wantConflictUser != nil { + for _, u := range m { + require.Equal(t, tc.wantConflictUser.ConflictEmail, u.ConflictEmail) + require.Equal(t, tc.wantConflictUser.ConflictLogin, u.ConflictLogin) + } + } + } + }) + } +} + +func TestBuildConflictBlockFromFileRepresentation(t *testing.T) { + type testBuildConflictBlock struct { + desc string + users []user.User + fileString string + expectedBlocks []string + expectedIdsInBlocks map[string][]string + } + testOrgID := 1 + testCases := []testBuildConflictBlock{ + { + desc: "should be able to parse the fileString containing the conflicts", + users: []user.User{ + { + Email: "test", + Login: "test", + OrgID: int64(testOrgID), + }, + { + Email: "TEST", + Login: "TEST", + OrgID: int64(testOrgID), + }, + { + Email: "test2", + Login: "test2", + OrgID: int64(testOrgID), + }, + { + Email: "TEST2", + Login: "TEST2", + OrgID: int64(testOrgID), + }, + { + Email: "Test2", + Login: "Test2", + OrgID: int64(testOrgID), + }, + }, + fileString: `conflict: test +- id: 2, email: test, login: test, last_seen_at: 2012-09-19T08:31:20Z, auth_module: , conflict_email: true, conflict_login: true ++ id: 3, email: TEST, login: TEST, last_seen_at: 2012-09-19T08:31:29Z, auth_module: , conflict_email: true, conflict_login: true +conflict: test2 +- id: 4, email: test2, login: test2, last_seen_at: 2012-09-19T08:31:41Z, auth_module: , conflict_email: true, conflict_login: true ++ id: 5, email: TEST2, login: TEST2, last_seen_at: 2012-09-19T08:31:51Z, auth_module: , conflict_email: true, conflict_login: true +- id: 6, email: Test2, login: Test2, last_seen_at: 2012-09-19T08:32:03Z, auth_module: , conflict_email: true, conflict_login: true`, + expectedBlocks: []string{"conflict: test", "conflict: test2"}, + expectedIdsInBlocks: map[string][]string{"conflict: test": {"2", "3"}, "conflict: test2": {"4", "5", "6"}}, + }, + { + desc: "should be able to parse the fileString containing the conflicts 123", + users: []user.User{ + { + Email: "saml-misi@example.org", + Login: "saml-misi", + OrgID: int64(testOrgID), + }, + { + Email: "saml-misi@example", + Login: "saml-Misi", + OrgID: int64(testOrgID), + }, + }, + fileString: `conflict: saml-misi ++ id: 5, email: saml-misi@example.org, login: saml-misi, last_seen_at: 2022-09-22T12:00:49Z, auth_module: auth.saml, conflict_email: , conflict_login: true +- id: 15, email: saml-misi@example, login: saml-Misi, last_seen_at: 2012-09-26T11:31:32Z, auth_module: , conflict_email: , conflict_login: true`, + expectedBlocks: []string{"conflict: saml-misi"}, + expectedIdsInBlocks: map[string][]string{"conflict: saml-misi": {"5", "15"}}, + }, + } + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + // Restore after destructive operation + sqlStore := sqlstore.InitTestDB(t) + + if sqlStore.GetDialect().DriverName() != ignoredDatabase { + for _, u := range tc.users { + cmd := user.CreateUserCommand{ + Email: u.Email, + Name: u.Name, + Login: u.Login, + OrgID: int64(testOrgID), + } + _, err := sqlStore.CreateUser(context.Background(), cmd) + require.NoError(t, err) + } + + conflicts, err := GetUsersWithConflictingEmailsOrLogins(&cli.Context{Context: context.Background()}, sqlStore) + r := ConflictResolver{Users: conflicts, Store: sqlStore} + r.BuildConflictBlocks(conflicts, fmt.Sprintf) + require.NoError(t, err) + validErr := getValidConflictUsers(&r, []byte(tc.fileString)) + require.NoError(t, validErr) + + // test starts here + keys := make([]string, 0) + for k := range r.Blocks { + keys = append(keys, k) + } + sort.Strings(keys) + require.Equal(t, tc.expectedBlocks, keys) + + // we want to validate the ids in the blocks + for _, block := range tc.expectedBlocks { + // checking for parsing of ids + conflictIds := []string{} + for _, u := range r.Blocks[block] { + conflictIds = append(conflictIds, u.ID) + } + require.Equal(t, tc.expectedIdsInBlocks[block], conflictIds) + } + } + }) + } +} func TestGetConflictingUsers(t *testing.T) { type testListConflictingUsers struct { desc string @@ -52,9 +288,6 @@ func TestGetConflictingUsers(t *testing.T) { }, want: 2, }, - // TODO: - // refactor the sql to get 3 users from this test - // if this is changed, one needs to correct the filerepresentation { desc: "should be 5 conflicting users, each conflict gets 2 users", users: []user.User{ @@ -151,8 +384,7 @@ func TestGetConflictingUsers(t *testing.T) { t.Run(tc.desc, func(t *testing.T) { // Restore after destructive operation sqlStore := sqlstore.InitTestDB(t) - // "Skipping conflicting users test for mysql as it does make unique constraint case insensitive by default - if sqlStore.GetDialect().DriverName() != "mysql" { + if sqlStore.GetDialect().DriverName() != ignoredDatabase { for _, u := range tc.users { cmd := user.CreateUserCommand{ Email: u.Email, @@ -175,82 +407,16 @@ func TestGetConflictingUsers(t *testing.T) { } } -func TestBuildConflictBlock(t *testing.T) { - type testBuildConflictBlock struct { - desc string - users []user.User - expectedBlock string - wantDiscardedBlock string - wantedNumberOfUsers int - } - testOrgID := 1 - testCases := []testBuildConflictBlock{ - { - desc: "should get one block with only 3 users", - users: []user.User{ - { - Email: "ldap-editor", - Login: "ldap-editor", - OrgID: int64(testOrgID), - }, - { - Email: "LDAP-EDITOR", - Login: "LDAP-EDITOR", - OrgID: int64(testOrgID), - }, - { - Email: "overlapping conflict", - Login: "LDAP-editor", - OrgID: int64(testOrgID), - }, - { - Email: "OVERLAPPING conflict", - Login: "no conflict", - OrgID: int64(testOrgID), - }, - }, - expectedBlock: "conflict: ldap-editor", - wantDiscardedBlock: "conflict: overlapping conflict", - wantedNumberOfUsers: 3, - }, - } - for _, tc := range testCases { - t.Run(tc.desc, func(t *testing.T) { - // Restore after destructive operation - sqlStore := sqlstore.InitTestDB(t) - - // "Skipping conflicting users test for mysql as it does make unique constraint case insensitive by default - if sqlStore.GetDialect().DriverName() != "mysql" { - for _, u := range tc.users { - cmd := user.CreateUserCommand{ - Email: u.Email, - Name: u.Name, - Login: u.Login, - OrgID: int64(testOrgID), - } - _, err := sqlStore.CreateUser(context.Background(), cmd) - require.NoError(t, err) - } - m, err := GetUsersWithConflictingEmailsOrLogins(&cli.Context{Context: context.Background()}, sqlStore) - require.NoError(t, err) - r := ConflictResolver{Users: m} - r.BuildConflictBlocks(fmt.Sprintf) - require.Equal(t, tc.wantedNumberOfUsers, len(r.Blocks[tc.expectedBlock])) - require.Equal(t, true, r.DiscardedBlocks[tc.wantDiscardedBlock]) - } - }) - } -} - func TestGenerateConflictingUsersFile(t *testing.T) { - type testListConflictingUsers struct { - desc string - users []user.User - wantDiscardedBlock string - want string + type testGenerateConflictUsers struct { + desc string + users []user.User + expectedDiscardedBlock string + expectedBlocks []string + expectedEmailInBlocks map[string][]string } testOrgID := 1 - testCases := []testListConflictingUsers{ + testCases := []testGenerateConflictUsers{ { desc: "should get conflicting users", users: []user.User{ @@ -290,10 +456,17 @@ func TestGenerateConflictingUsersFile(t *testing.T) { OrgID: int64(testOrgID), }, }, - wantDiscardedBlock: "conflict: user2", + expectedBlocks: []string{"conflict: ldap-admin", "conflict: user_duplicate_test_login", "conflict: oauth-admin@example.org", "conflict: user2"}, + expectedEmailInBlocks: map[string][]string{ + "conflict: ldap-admin": {"ldap-admin", "xo"}, + "conflict: user_duplicate_test_login": {"user1", "user2"}, + "conflict: oauth-admin@example.org": {"oauth-admin@EXAMPLE.ORG", "oauth-admin@example.org"}, + "conflict: user2": {"USER2", "user2"}, + }, + expectedDiscardedBlock: "conflict: user2", }, { - desc: "should get one block with only 3 users", + desc: "should get only one block with 3 users", users: []user.User{ { Email: "ldap-editor", @@ -311,19 +484,15 @@ func TestGenerateConflictingUsersFile(t *testing.T) { OrgID: int64(testOrgID), }, }, - want: `conflict: ldap-editor -+ id: 1, email: ldap-editor, login: ldap-editor -- id: 2, email: LDAP-EDITOR, login: LDAP-EDITOR -- id: 3, email: No confli, login: LDAP-editor -`, + expectedBlocks: []string{"conflict: ldap-editor"}, + expectedEmailInBlocks: map[string][]string{"conflict: ldap-editor": {"ldap-editor", "LDAP-EDITOR", "No confli"}}, }, } for _, tc := range testCases { t.Run(tc.desc, func(t *testing.T) { // Restore after destructive operation sqlStore := sqlstore.InitTestDB(t) - // "Skipping conflicting users test for mysql as it does make unique constraint case insensitive by default - if sqlStore.GetDialect().DriverName() != "mysql" { + if sqlStore.GetDialect().DriverName() != ignoredDatabase { for _, u := range tc.users { cmd := user.CreateUserCommand{ Email: u.Email, @@ -336,49 +505,280 @@ func TestGenerateConflictingUsersFile(t *testing.T) { } m, err := GetUsersWithConflictingEmailsOrLogins(&cli.Context{Context: context.Background()}, sqlStore) require.NoError(t, err) - r := ConflictResolver{Users: m} - r.BuildConflictBlocks(fmt.Sprintf) - if tc.wantDiscardedBlock != "" { - require.Equal(t, true, r.DiscardedBlocks[tc.wantDiscardedBlock]) + r := ConflictResolver{Store: sqlStore} + r.BuildConflictBlocks(m, fmt.Sprintf) + if tc.expectedDiscardedBlock != "" { + require.Equal(t, true, r.DiscardedBlocks[tc.expectedDiscardedBlock]) } - if tc.want != "" { - fileString := r.ToStringPresentation() - require.Equal(t, tc.want, fileString) + + // test starts here + keys := make([]string, 0) + for k := range r.Blocks { + keys = append(keys, k) + } + expectedBlocks := tc.expectedBlocks + sort.Strings(keys) + sort.Strings(expectedBlocks) + require.Equal(t, expectedBlocks, keys) + + // we want to validate the ids in the blocks + for _, block := range tc.expectedBlocks { + // checking for parsing of ids + conflictEmails := []string{} + for _, u := range r.Blocks[block] { + conflictEmails = append(conflictEmails, u.Email) + } + expectedEmailsInBlock := tc.expectedEmailInBlocks[block] + sort.Strings(conflictEmails) + sort.Strings(expectedEmailsInBlock) + require.Equal(t, expectedEmailsInBlock, conflictEmails) } } }) } } +func TestRunValidateConflictUserFile(t *testing.T) { + t.Run("should validate file thats gets created", func(t *testing.T) { + // Restore after destructive operation + sqlStore := sqlstore.InitTestDB(t) + const testOrgID int64 = 1 + if sqlStore.GetDialect().DriverName() != ignoredDatabase { + // add additional user with conflicting login where DOMAIN is upper case + dupUserLogincmd := user.CreateUserCommand{ + Email: "userduplicatetest1@test.com", + Login: "user_duplicate_test_1_login", + OrgID: testOrgID, + } + _, err := sqlStore.CreateUser(context.Background(), dupUserLogincmd) + require.NoError(t, err) + dupUserEmailcmd := user.CreateUserCommand{ + Email: "USERDUPLICATETEST1@TEST.COM", + Login: "USER_DUPLICATE_TEST_1_LOGIN", + OrgID: testOrgID, + } + _, err = sqlStore.CreateUser(context.Background(), dupUserEmailcmd) + require.NoError(t, err) + + // get users + conflictUsers, err := GetUsersWithConflictingEmailsOrLogins(&cli.Context{Context: context.Background()}, sqlStore) + require.NoError(t, err) + r := ConflictResolver{Store: sqlStore} + r.BuildConflictBlocks(conflictUsers, fmt.Sprintf) + tmpFile, err := generateConflictUsersFile(&r) + require.NoError(t, err) + + b, err := os.ReadFile(tmpFile.Name()) + require.NoError(t, err) + + validErr := getValidConflictUsers(&r, b) + require.NoError(t, validErr) + require.Equal(t, 2, len(r.ValidUsers)) + } + }) +} + +func TestMergeUser(t *testing.T) { + t.Run("should be able to merge user", func(t *testing.T) { + // Restore after destructive operation + sqlStore := sqlstore.InitTestDB(t) + teamSvc := teamimpl.ProvideService(sqlStore, setting.NewCfg()) + team1, err := teamSvc.CreateTeam("team1 name", "", 1) + require.Nil(t, err) + const testOrgID int64 = 1 + + if sqlStore.GetDialect().DriverName() != ignoredDatabase { + // add additional user with conflicting login where DOMAIN is upper case + + // the order of adding the conflict matters + dupUserLogincmd := user.CreateUserCommand{ + Email: "userduplicatetest1@test.com", + Name: "user name 1", + Login: "user_duplicate_test_1_login", + OrgID: testOrgID, + } + _, err := sqlStore.CreateUser(context.Background(), dupUserLogincmd) + require.NoError(t, err) + dupUserEmailcmd := user.CreateUserCommand{ + Email: "USERDUPLICATETEST1@TEST.COM", + Name: "user name 1", + Login: "USER_DUPLICATE_TEST_1_LOGIN", + OrgID: testOrgID, + } + userWithUpperCase, err := sqlStore.CreateUser(context.Background(), dupUserEmailcmd) + require.NoError(t, err) + // this is the user we want to update to another team + err = teamSvc.AddTeamMember(userWithUpperCase.ID, testOrgID, team1.Id, false, 0) + require.NoError(t, err) + + // get users + conflictUsers, err := GetUsersWithConflictingEmailsOrLogins(&cli.Context{Context: context.Background()}, sqlStore) + require.NoError(t, err) + r := ConflictResolver{Store: sqlStore} + r.BuildConflictBlocks(conflictUsers, fmt.Sprintf) + tmpFile, err := generateConflictUsersFile(&r) + require.NoError(t, err) + // validation to get newConflicts + // edited file + b, err := os.ReadFile(tmpFile.Name()) + require.NoError(t, err) + validErr := getValidConflictUsers(&r, b) + require.NoError(t, validErr) + require.Equal(t, 2, len(r.ValidUsers)) + + // test starts here + err = r.MergeConflictingUsers(context.Background()) + require.NoError(t, err) + + // user with uppercaseemail should not exist + query := &models.GetUserByIdQuery{Id: userWithUpperCase.ID} + err = sqlStore.GetUserById(context.Background(), query) + require.Error(t, user.ErrUserNotFound, err) + } + }) +} + +func TestMergeUserFromNewFileInput(t *testing.T) { + t.Run("should be able to merge users after choosing a different user to keep", func(t *testing.T) { + // Restore after destructive operation + sqlStore := sqlstore.InitTestDB(t) + + type testBuildConflictBlock struct { + desc string + users []user.User + fileString string + expectedBlocks []string + expectedIdsInBlocks map[string][]string + } + testOrgID := 1 + m := make(map[string][]string) + conflict1 := "conflict: test" + conflict2 := "conflict: test2" + m[conflict1] = []string{"2", "3"} + m[conflict2] = []string{"4", "5", "6"} + testCases := []testBuildConflictBlock{ + { + desc: "should be able to parse the fileString containing the conflicts", + users: []user.User{ + { + Email: "TEST", + Login: "TEST", + OrgID: int64(testOrgID), + }, + { + Email: "test", + Login: "test", + OrgID: int64(testOrgID), + }, + { + Email: "test2", + Login: "test2", + OrgID: int64(testOrgID), + }, + { + Email: "TEST2", + Login: "TEST2", + OrgID: int64(testOrgID), + }, + { + Email: "Test2", + Login: "Test2", + OrgID: int64(testOrgID), + }, + }, + fileString: `conflict: test +- id: 1, email: test, login: test, last_seen_at: 2012-09-19T08:31:20Z, auth_module:, conflict_email: true, conflict_login: true ++ id: 2, email: TEST, login: TEST, last_seen_at: 2012-09-19T08:31:29Z, auth_module:, conflict_email: true, conflict_login: true +conflict: test2 +- id: 3, email: test2, login: test2, last_seen_at: 2012-09-19T08:31:41Z, auth_module: , conflict_email: true, conflict_login: true ++ id: 4, email: TEST2, login: TEST2, last_seen_at: 2012-09-19T08:31:51Z, auth_module: , conflict_email: true, conflict_login: true +- id: 5, email: Test2, login: Test2, last_seen_at: 2012-09-19T08:32:03Z, auth_module: , conflict_email: true, conflict_login: true`, + expectedBlocks: []string{"conflict: test", "conflict: test2"}, + expectedIdsInBlocks: m, + }, + } + for _, tc := range testCases { + if sqlStore.GetDialect().DriverName() != ignoredDatabase { + for _, u := range tc.users { + cmd := user.CreateUserCommand{ + Email: u.Email, + Name: u.Name, + Login: u.Login, + OrgID: int64(testOrgID), + } + _, err := sqlStore.CreateUser(context.Background(), cmd) + require.NoError(t, err) + } + // add additional user with conflicting login where DOMAIN is upper case + conflictUsers, err := GetUsersWithConflictingEmailsOrLogins(&cli.Context{Context: context.Background()}, sqlStore) + require.NoError(t, err) + r := ConflictResolver{Store: sqlStore} + r.BuildConflictBlocks(conflictUsers, fmt.Sprintf) + require.NoError(t, err) + // validation to get newConflicts + // edited file + // b, err := os.ReadFile(tmpFile.Name()) + // mocked file input + b := tc.fileString + require.NoError(t, err) + validErr := getValidConflictUsers(&r, []byte(b)) + require.NoError(t, validErr) + + // test starts here + err = r.MergeConflictingUsers(context.Background()) + require.NoError(t, err) + } + } + }) +} + func TestMarshalConflictUser(t *testing.T) { - // TODO: add more testcases testCases := []struct { name string inputRow string expectedUser ConflictingUser - }{{ - name: "should be able to marshal expected input row", - inputRow: "+ id: 4, email: userduplicatetest1@test.com, login: userduplicatetest1@test.com, last_seen_at: 2012-07-26T16:08:11Z, auth_module:", - expectedUser: ConflictingUser{ - Direction: "+", - Id: "4", - Email: "userduplicatetest1@test.com", - Login: "userduplicatetest1@test.com", - LastSeenAt: "2012-07-26T16:08:11Z", - AuthModule: "", + }{ + { + name: "should be able to marshal expected input row", + inputRow: "+ id: 4, email: userduplicatetest1@test.com, login: userduplicatetest1, last_seen_at: 2012-07-26T16:08:11Z, auth_module: auth.saml, conflict_email: true, conflict_login: ", + expectedUser: ConflictingUser{ + Direction: "+", + ID: "4", + Email: "userduplicatetest1@test.com", + Login: "userduplicatetest1", + LastSeenAt: "2012-07-26T16:08:11Z", + AuthModule: "auth.saml", + ConflictEmail: "true", + ConflictLogin: "", + }, }, - }} - + { + name: "should be able to marshal expected input row", + inputRow: "+ id: 1, email: userduplicatetest1@test.com, login: user_duplicate_test_1_login, last_seen_at: 2012-07-26T16:08:11Z, auth_module: , conflict_email: , conflict_login: true", + expectedUser: ConflictingUser{ + Direction: "+", + ID: "1", + Email: "userduplicatetest1@test.com", + Login: "user_duplicate_test_1_login", + LastSeenAt: "2012-07-26T16:08:11Z", + AuthModule: "", + ConflictEmail: "", + ConflictLogin: "true", + }, + }, + } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { user := ConflictingUser{} err := user.Marshal(tc.inputRow) require.NoError(t, err) require.Equal(t, tc.expectedUser.Direction, user.Direction) - require.Equal(t, tc.expectedUser.Id, user.Id) + require.Equal(t, tc.expectedUser.ID, user.ID) require.Equal(t, tc.expectedUser.Email, user.Email) require.Equal(t, tc.expectedUser.Login, user.Login) require.Equal(t, tc.expectedUser.LastSeenAt, user.LastSeenAt) + require.Equal(t, tc.expectedUser.ConflictEmail, user.ConflictEmail) + require.Equal(t, tc.expectedUser.ConflictLogin, user.ConflictLogin) }) } } diff --git a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go index 98b99de6424..114f5291111 100644 --- a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go +++ b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go @@ -17,10 +17,15 @@ import ( func TestPasswordMigrationCommand(t *testing.T) { // setup datasources with password, basic_auth and none - sqlstore := sqlstore.InitTestDB(t) - session := sqlstore.NewSession(context.Background()) - defer session.Close() + store := sqlstore.InitTestDB(t) + err := store.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + passwordMigration(t, sess, store) + return nil + }) + require.NoError(t, err) +} +func passwordMigration(t *testing.T, session *sqlstore.DBSession, sqlstore *sqlstore.SQLStore) { ds := []*datasources.DataSource{ {Type: "influxdb", Name: "influxdb", Password: "foobar", Uid: "influx"}, {Type: "graphite", Name: "graphite", BasicAuthPassword: "foobar", Uid: "graphite"}, diff --git a/pkg/codegen/astmanip_test.go b/pkg/codegen/astmanip_test.go new file mode 100644 index 00000000000..0f44a112f39 --- /dev/null +++ b/pkg/codegen/astmanip_test.go @@ -0,0 +1,263 @@ +package codegen + +import ( + "bytes" + "go/format" + "go/parser" + "go/token" + "testing" + + "github.com/matryer/is" + "golang.org/x/tools/go/ast/astutil" +) + +func TestPrefixDropper(t *testing.T) { + tt := map[string]struct { + in, out string + skip bool + }{ + "basic": { + in: `package foo + +type Foo struct { + Id int64 + Ref FooThing +} + +type FooThing struct { + Id int64 +}`, + out: `package foo + +type Model struct { + Id int64 + Ref Thing +} + +type Thing struct { + Id int64 +} +`, + }, + "pointer": { + in: `package foo + +type Foo struct { + Id int64 + Ref *FooThing +} + +type FooThing struct { + Id int64 +}`, + out: `package foo + +type Model struct { + Id int64 + Ref *Thing +} + +type Thing struct { + Id int64 +} +`, + }, + "sliceref": { + in: `package foo + +type Foo struct { + Id int64 + Ref []FooThing + PRef []*FooThing + SPRef *[]FooThing +} + +type FooThing struct { + Id int64 +}`, + out: `package foo + +type Model struct { + Id int64 + Ref []Thing + PRef []*Thing + SPRef *[]Thing +} + +type Thing struct { + Id int64 +} +`, + }, + "mapref": { + in: `package foo + +type Foo struct { + Id int64 + KeyRef map[FooThing]string + ValRef map[string]FooThing + BothRef map[FooThing]FooThing +} + +type FooThing struct { + Id int64 +}`, + out: `package foo + +type Model struct { + Id int64 + KeyRef map[Thing]string + ValRef map[string]Thing + BothRef map[Thing]Thing +} + +type Thing struct { + Id int64 +} +`, + }, + "pmapref": { + in: `package foo + +type Foo struct { + Id int64 + KeyRef map[*FooThing]string + ValRef map[string]*FooThing + BothRef map[*FooThing]*FooThing + PKeyRef *map[*FooThing]string +} + +type FooThing struct { + Id int64 +}`, + out: `package foo + +type Model struct { + Id int64 + KeyRef map[*Thing]string + ValRef map[string]*Thing + BothRef map[*Thing]*Thing + PKeyRef *map[*Thing]string +} + +type Thing struct { + Id int64 +} +`, + }, + "ignore-fieldname": { + in: `package foo + +type Foo struct { + Id int64 + FooRef []string +}`, + out: `package foo + +type Model struct { + Id int64 + FooRef []string +} +`, + }, + "const": { + in: `package foo + +const one FooThing = "boop" + +const ( + two FooThing = "boop" + three FooThing = "boop" +) + +type FooThing string +`, + out: `package foo + +const one Thing = "boop" + +const ( + two Thing = "boop" + three Thing = "boop" +) + +type Thing string +`, + }, + "var": { + in: `package foo + +var one FooThing = "boop" + +var ( + two FooThing = "boop" + three FooThing = "boop" +) + +type FooThing string +`, + out: `package foo + +var one Thing = "boop" + +var ( + two Thing = "boop" + three Thing = "boop" +) + +type Thing string +`, + }, + "varp": { + in: `package foo + +var one *FooThing = "boop" + +var ( + two []FooThing = []FooThing{"boop"} + three map[FooThing]string = map[FooThing]string{ "beep": "boop" } +) + +type FooThing string +`, + out: `package foo + +var one *Thing = "boop" + +var ( + two []Thing = []Thing{"boop"} + three map[Thing]string = map[Thing]string{ "beep": "boop" } +) + +type Thing string +`, + // Skip this one for now - there's currently no codegen that constructs instances + // of objects, only types, so we shouldn't encounter this case. + skip: true, + }, + } + + for name, it := range tt { + item := it + t.Run(name, func(t *testing.T) { + if item.skip { + t.Skip() + } + is := is.New(t) + fset := token.NewFileSet() + inf, err := parser.ParseFile(fset, "input.go", item.in, parser.ParseComments) + if err != nil { + t.Fatal(err) + } + + drop := makePrefixDropper("Foo", "Model") + astutil.Apply(inf, drop, nil) + buf := new(bytes.Buffer) + err = format.Node(buf, fset, inf) + if err != nil { + t.Fatal(err) + } + is.Equal(item.out, buf.String()) + }) + } +} diff --git a/pkg/codegen/coremodel.go b/pkg/codegen/coremodel.go index 07676ef1a24..d70ae77d33e 100644 --- a/pkg/codegen/coremodel.go +++ b/pkg/codegen/coremodel.go @@ -21,6 +21,7 @@ import ( "github.com/grafana/grafana/pkg/cuectx" "github.com/grafana/thema" "github.com/grafana/thema/encoding/openapi" + "golang.org/x/tools/go/ast/astutil" ) // CoremodelDeclaration contains the results of statically analyzing a Grafana @@ -279,30 +280,77 @@ type prefixDropper struct { rxpsuff *regexp.Regexp } -func makePrefixDropper(str, base string) prefixDropper { - return prefixDropper{ +func makePrefixDropper(str, base string) astutil.ApplyFunc { + return (&prefixDropper{ str: str, base: base, rxpsuff: regexp.MustCompile(fmt.Sprintf(`%s([a-zA-Z_]*)`, str)), rxp: regexp.MustCompile(fmt.Sprintf(`%s([\s.,;-])`, str)), - } + }).applyfunc } -func (d prefixDropper) Visit(n ast.Node) ast.Visitor { +func depoint(e ast.Expr) ast.Expr { + if star, is := e.(*ast.StarExpr); is { + return star.X + } + return e +} + +func (d prefixDropper) applyfunc(c *astutil.Cursor) bool { + n := c.Node() + + // fmt.Printf("%T %s\n", c.Node(), ast.Print(nil, c.Node())) switch x := n.(type) { - case *ast.Ident: - if x.Name != d.str { - x.Name = strings.TrimPrefix(x.Name, d.str) - } else { - x.Name = d.base + case *ast.ValueSpec: + // fmt.Printf("%T %s\n", c.Node(), ast.Print(nil, c.Node())) + d.handleExpr(x.Type) + for _, id := range x.Names { + d.do(id) } + case *ast.TypeSpec: + // Always do typespecs + d.do(x.Name) + case *ast.Field: + // Don't rename struct fields. We just want to rename type declarations, and + // field value specifications that reference those types. + d.handleExpr(x.Type) + // return false + case *ast.CommentGroup: for _, c := range x.List { c.Text = d.rxp.ReplaceAllString(c.Text, d.base+"$1") c.Text = d.rxpsuff.ReplaceAllString(c.Text, "$1") } } - return d + return true +} + +func (d prefixDropper) handleExpr(e ast.Expr) { + // Deref a StarExpr, if there is one + expr := depoint(e) + switch x := expr.(type) { + case *ast.Ident: + d.do(x) + case *ast.ArrayType: + if id, is := depoint(x.Elt).(*ast.Ident); is { + d.do(id) + } + case *ast.MapType: + if id, is := depoint(x.Key).(*ast.Ident); is { + d.do(id) + } + if id, is := depoint(x.Value).(*ast.Ident); is { + d.do(id) + } + } +} + +func (d prefixDropper) do(n *ast.Ident) { + if n.Name != d.str { + n.Name = strings.TrimPrefix(n.Name, d.str) + } else { + n.Name = d.base + } } // GenerateCoremodelRegistry produces Go files that define a registry with diff --git a/pkg/codegen/util_go.go b/pkg/codegen/util_go.go index 525b764298c..9033e29b968 100644 --- a/pkg/codegen/util_go.go +++ b/pkg/codegen/util_go.go @@ -3,7 +3,6 @@ package codegen import ( "bytes" "fmt" - "go/ast" "go/format" "go/parser" "go/token" @@ -11,12 +10,13 @@ import ( "path/filepath" "strings" + "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/imports" ) type genGoFile struct { path string - walker ast.Visitor + walker astutil.ApplyFunc in []byte } @@ -30,7 +30,7 @@ func postprocessGoFile(cfg genGoFile) ([]byte, error) { } if cfg.walker != nil { - ast.Walk(cfg.walker, gf) + astutil.Apply(gf, cfg.walker, nil) err = format.Node(buf, fset, gf) if err != nil { diff --git a/pkg/coremodel/playlist/coremodel.cue b/pkg/coremodel/playlist/coremodel.cue new file mode 100644 index 00000000000..15a1a3c2422 --- /dev/null +++ b/pkg/coremodel/playlist/coremodel.cue @@ -0,0 +1,54 @@ +package playlist + +import ( + "github.com/grafana/thema" +) + +thema.#Lineage +name: "playlist" +seqs: [ + { + schemas: [ + {//0.0 + // Unique playlist identifier for internal use, set by Grafana. + id: int64 @grafana(decisionNeeded) + // Unique playlist identifier. Generated on creation, either by the + // creator of the playlist of by the application. + uid: string + // Name of the playlist. + name: string + // Interval sets the time between switching views in a playlist. + // FIXME: Is this based on a standardized format or what options are available? Can datemath be used? + interval: string | *"5m" + // The ordered list of items that the playlist will iterate over. + items?: [...#PlaylistItem] + + /////////////////////////////////////// + // Definitions (referenced above) are declared below + + #PlaylistItem: { + // FIXME: The prefixDropper removes playlist from playlist_id, that doesn't work for us since it'll mean we'll have Id twice. + // ID of the playlist item for internal use by Grafana. Deprecated. + id: int64 @grafana(decisionNeeded) + // PlaylistID for the playlist containing the item. Deprecated. + playlistid: int64 @grafana(decisionNeeded) + + // Type of the item. + type: "dashboard_by_uid" | "dashboard_by_id" | "dashboard_by_tag" + // Value depends on type and describes the playlist item. + // + // - dashboard_by_id: The value is an internal numerical identifier set by Grafana. This + // is not portable as the numerical identifier is non-deterministic between different instances. + // Will be replaced by dashboard_by_uid in the future. + // - dashboard_by_tag: The value is a tag which is set on any number of dashboards. All + // dashboards behind the tag will be added to the playlist. + value: string + // Title is the human-readable identifier for the playlist item. + title: string @grafana(decisionNeeded) + // Order is the position in the list for the item. Deprecated. + order: int64 | *0 @grafana(decisionNeeded) + } + } + ] + } +] diff --git a/pkg/coremodel/playlist/playlist_gen.go b/pkg/coremodel/playlist/playlist_gen.go new file mode 100644 index 00000000000..d885aef5c8b --- /dev/null +++ b/pkg/coremodel/playlist/playlist_gen.go @@ -0,0 +1,147 @@ +// This file is autogenerated. DO NOT EDIT. +// +// Generated by pkg/framework/coremodel/gen.go +// +// Derived from the Thema lineage declared in pkg/coremodel/playlist/coremodel.cue +// +// Run `make gen-cue` from repository root to regenerate. + +package playlist + +import ( + "embed" + "path/filepath" + + "github.com/grafana/grafana/pkg/cuectx" + "github.com/grafana/grafana/pkg/framework/coremodel" + "github.com/grafana/thema" +) + +// Defines values for PlaylistItemType. +const ( + PlaylistItemTypeDashboardById PlaylistItemType = "dashboard_by_id" + + PlaylistItemTypeDashboardByTag PlaylistItemType = "dashboard_by_tag" + + PlaylistItemTypeDashboardByUid PlaylistItemType = "dashboard_by_uid" +) + +// Model is the Go representation of a playlist. +// +// THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES. +// Equivalent Go types at stable import paths are provided in https://github.com/grafana/grok. +type Model struct { + // Unique playlist identifier for internal use, set by Grafana. + Id int64 `json:"id"` + + // Interval sets the time between switching views in a playlist. + // FIXME: Is this based on a standardized format or what options are available? Can datemath be used? + Interval string `json:"interval"` + + // The ordered list of items that the playlist will iterate over. + Items *[]PlaylistItem `json:"items,omitempty"` + + // Name of the playlist. + Name string `json:"name"` + + // Unique playlist identifier. Generated on creation, either by the + // creator of the playlist of by the application. + Uid string `json:"uid"` +} + +// PlaylistItem is the Go representation of a playlist.Item. +// +// THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES. +// Equivalent Go types at stable import paths are provided in https://github.com/grafana/grok. +type PlaylistItem struct { + // FIXME: The prefixDropper removes playlist from playlist_id, that doesn't work for us since it'll mean we'll have Id twice. + // ID of the playlist item for internal use by Grafana. Deprecated. + Id int64 `json:"id"` + + // Order is the position in the list for the item. Deprecated. + Order int `json:"order"` + + // ID for the playlist containing the item. Deprecated. + Playlistid int64 `json:"playlistid"` + + // Title is the human-readable identifier for the playlist item. + Title string `json:"title"` + + // Type of the item. + Type PlaylistItemType `json:"type"` + + // Value depends on type and describes the playlist item. + // + // - dashboard_by_id: The value is an internal numerical identifier set by Grafana. This + // is not portable as the numerical identifier is non-deterministic between different instances. + // Will be replaced by dashboard_by_uid in the future. + // - dashboard_by_tag: The value is a tag which is set on any number of dashboards. All + // dashboards behind the tag will be added to the playlist. + Value string `json:"value"` +} + +// Type of the item. +// +// THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES. +// Equivalent Go types at stable import paths are provided in https://github.com/grafana/grok. +type PlaylistItemType string + +//go:embed coremodel.cue +var cueFS embed.FS + +// The current version of the coremodel schema, as declared in coremodel.cue. +// This version determines what schema version is returned from [Coremodel.CurrentSchema], +// and which schema version is used for code generation within the grafana/grafana repository. +// +// The code generator ensures that this is always the latest Thema schema version. +var currentVersion = thema.SV(0, 0) + +// Lineage returns the Thema lineage representing a Grafana playlist. +// +// The lineage is the canonical specification of the current playlist schema, +// all prior schema versions, and the mappings that allow migration between +// schema versions. +func Lineage(lib thema.Library, opts ...thema.BindOption) (thema.Lineage, error) { + return cuectx.LoadGrafanaInstancesWithThema(filepath.Join("pkg", "coremodel", "playlist"), cueFS, lib, opts...) +} + +var _ thema.LineageFactory = Lineage +var _ coremodel.Interface = &Coremodel{} + +// Coremodel contains the foundational schema declaration for playlists. +// It implements coremodel.Interface. +type Coremodel struct { + lin thema.Lineage +} + +// Lineage returns the canonical playlist Lineage. +func (c *Coremodel) Lineage() thema.Lineage { + return c.lin +} + +// CurrentSchema returns the current (latest) playlist Thema schema. +func (c *Coremodel) CurrentSchema() thema.Schema { + return thema.SchemaP(c.lin, currentVersion) +} + +// GoType returns a pointer to an empty Go struct that corresponds to +// the current Thema schema. +func (c *Coremodel) GoType() interface{} { + return &Model{} +} + +// New returns a new instance of the playlist coremodel. +// +// Note that this function does not cache, and initially loading a Thema lineage +// can be expensive. As such, the Grafana backend should prefer to access this +// coremodel through a registry (pkg/framework/coremodel/registry), which does cache. +func New(lib thema.Library) (*Coremodel, error) { + lin, err := Lineage(lib) + if err != nil { + return nil, err + } + + return &Coremodel{ + lin: lin, + }, nil +} diff --git a/pkg/framework/coremodel/registry/registry_gen.go b/pkg/framework/coremodel/registry/registry_gen.go index c198ef8647f..74ac7b9d88d 100644 --- a/pkg/framework/coremodel/registry/registry_gen.go +++ b/pkg/framework/coremodel/registry/registry_gen.go @@ -10,6 +10,7 @@ import ( "fmt" "github.com/grafana/grafana/pkg/coremodel/dashboard" + "github.com/grafana/grafana/pkg/coremodel/playlist" "github.com/grafana/grafana/pkg/coremodel/pluginmeta" "github.com/grafana/grafana/pkg/framework/coremodel" "github.com/grafana/thema" @@ -27,12 +28,14 @@ import ( type Base struct { all []coremodel.Interface dashboard *dashboard.Coremodel + playlist *playlist.Coremodel pluginmeta *pluginmeta.Coremodel } // type guards var ( _ coremodel.Interface = &dashboard.Coremodel{} + _ coremodel.Interface = &playlist.Coremodel{} _ coremodel.Interface = &pluginmeta.Coremodel{} ) @@ -42,6 +45,12 @@ func (b *Base) Dashboard() *dashboard.Coremodel { return b.dashboard } +// Playlist returns the playlist coremodel. The return value is guaranteed to +// implement coremodel.Interface. +func (b *Base) Playlist() *playlist.Coremodel { + return b.playlist +} + // Pluginmeta returns the pluginmeta coremodel. The return value is guaranteed to // implement coremodel.Interface. func (b *Base) Pluginmeta() *pluginmeta.Coremodel { @@ -58,6 +67,12 @@ func doProvideBase(lib thema.Library) *Base { } reg.all = append(reg.all, reg.dashboard) + reg.playlist, err = playlist.New(lib) + if err != nil { + panic(fmt.Sprintf("error while initializing playlist coremodel: %s", err)) + } + reg.all = append(reg.all, reg.playlist) + reg.pluginmeta, err = pluginmeta.New(lib) if err != nil { panic(fmt.Sprintf("error while initializing pluginmeta coremodel: %s", err)) diff --git a/pkg/infra/remotecache/database_storage.go b/pkg/infra/remotecache/database_storage.go index d9ff8702b01..ddba108693a 100644 --- a/pkg/infra/remotecache/database_storage.go +++ b/pkg/infra/remotecache/database_storage.go @@ -54,36 +54,38 @@ func (dc *databaseCache) internalRunGC() { func (dc *databaseCache) Get(ctx context.Context, key string) (interface{}, error) { cacheHit := CacheData{} - session := dc.SQLStore.NewSession(ctx) - defer session.Close() - - exist, err := session.Where("cache_key= ?", key).Get(&cacheHit) - - if err != nil { - return nil, err - } - - if !exist { - return nil, ErrCacheItemNotFound - } - - if cacheHit.Expires > 0 { - existedButExpired := getTime().Unix()-cacheHit.CreatedAt >= cacheHit.Expires - if existedButExpired { - err = dc.Delete(ctx, key) // ignore this error since we will return `ErrCacheItemNotFound` anyway - if err != nil { - dc.log.Debug("Deletion of expired key failed: %v", err) - } - return nil, ErrCacheItemNotFound - } - } item := &cachedItem{} - if err = decodeGob(cacheHit.Data, item); err != nil { - return nil, err - } + err := dc.SQLStore.WithDbSession(ctx, func(session *sqlstore.DBSession) error { + exist, err := session.Where("cache_key= ?", key).Get(&cacheHit) - return item.Val, nil + if err != nil { + return err + } + + if !exist { + return ErrCacheItemNotFound + } + + if cacheHit.Expires > 0 { + existedButExpired := getTime().Unix()-cacheHit.CreatedAt >= cacheHit.Expires + if existedButExpired { + err = dc.Delete(ctx, key) // ignore this error since we will return `ErrCacheItemNotFound` anyway + if err != nil { + dc.log.Debug("Deletion of expired key failed: %v", err) + } + return ErrCacheItemNotFound + } + } + + if err = decodeGob(cacheHit.Data, item); err != nil { + return err + } + + return nil + }) + + return item.Val, err } func (dc *databaseCache) Set(ctx context.Context, key string, value interface{}, expire time.Duration) error { @@ -93,34 +95,33 @@ func (dc *databaseCache) Set(ctx context.Context, key string, value interface{}, return err } - session := dc.SQLStore.NewSession(context.Background()) - defer session.Close() + return dc.SQLStore.WithDbSession(ctx, func(session *sqlstore.DBSession) error { + var expiresInSeconds int64 + if expire != 0 { + expiresInSeconds = int64(expire) / int64(time.Second) + } - var expiresInSeconds int64 - if expire != 0 { - expiresInSeconds = int64(expire) / int64(time.Second) - } - - // attempt to insert the key - sql := `INSERT INTO cache_data (cache_key,data,created_at,expires) VALUES(?,?,?,?)` - _, err = session.Exec(sql, key, data, getTime().Unix(), expiresInSeconds) - if err != nil { - // attempt to update if a unique constrain violation or a deadlock (for MySQL) occurs - // if the update fails propagate the error - // which eventually will result in a key that is not finally set - // but since it's a cache does not harm a lot - if dc.SQLStore.Dialect.IsUniqueConstraintViolation(err) || dc.SQLStore.Dialect.IsDeadlock(err) { - sql := `UPDATE cache_data SET data=?, created_at=?, expires=? WHERE cache_key=?` - _, err = session.Exec(sql, data, getTime().Unix(), expiresInSeconds, key) - if err != nil && dc.SQLStore.Dialect.IsDeadlock(err) { - // most probably somebody else is upserting the key - // so it is safe enough not to propagate this error - return nil + // attempt to insert the key + sql := `INSERT INTO cache_data (cache_key,data,created_at,expires) VALUES(?,?,?,?)` + _, err := session.Exec(sql, key, data, getTime().Unix(), expiresInSeconds) + if err != nil { + // attempt to update if a unique constrain violation or a deadlock (for MySQL) occurs + // if the update fails propagate the error + // which eventually will result in a key that is not finally set + // but since it's a cache does not harm a lot + if dc.SQLStore.Dialect.IsUniqueConstraintViolation(err) || dc.SQLStore.Dialect.IsDeadlock(err) { + sql := `UPDATE cache_data SET data=?, created_at=?, expires=? WHERE cache_key=?` + _, err = session.Exec(sql, data, getTime().Unix(), expiresInSeconds, key) + if err != nil && dc.SQLStore.Dialect.IsDeadlock(err) { + // most probably somebody else is upserting the key + // so it is safe enough not to propagate this error + return nil + } } } - } - return err + return err + }) } func (dc *databaseCache) Delete(ctx context.Context, key string) error { diff --git a/pkg/infra/tracing/opentelemetry_tracing.go b/pkg/infra/tracing/opentelemetry_tracing.go index b51dddac107..1774e33458f 100644 --- a/pkg/infra/tracing/opentelemetry_tracing.go +++ b/pkg/infra/tracing/opentelemetry_tracing.go @@ -8,8 +8,6 @@ import ( "time" "github.com/go-kit/log/level" - "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/setting" "go.etcd.io/etcd/api/v3/version" jaegerpropagator "go.opentelemetry.io/contrib/propagators/jaeger" "go.opentelemetry.io/otel" @@ -23,6 +21,9 @@ import ( tracesdk "go.opentelemetry.io/otel/sdk/trace" semconv "go.opentelemetry.io/otel/semconv/v1.4.0" trace "go.opentelemetry.io/otel/trace" + + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/setting" ) const ( @@ -34,21 +35,6 @@ const ( w3cPropagator string = "w3c" ) -type Tracer interface { - Run(context.Context) error - Start(ctx context.Context, spanName string, opts ...trace.SpanStartOption) (context.Context, Span) - Inject(context.Context, http.Header, Span) -} - -type Span interface { - End() - SetAttributes(key string, value interface{}, kv attribute.KeyValue) - SetName(name string) - SetStatus(code codes.Code, description string) - RecordError(err error, options ...trace.EventOption) - AddEvents(keys []string, values []EventValue) -} - type Opentelemetry struct { enabled string address string @@ -307,9 +293,7 @@ func (s OpentelemetrySpan) SetStatus(code codes.Code, description string) { } func (s OpentelemetrySpan) RecordError(err error, options ...trace.EventOption) { - for _, o := range options { - s.span.RecordError(err, o) - } + s.span.RecordError(err, options...) } func (s OpentelemetrySpan) AddEvents(keys []string, values []EventValue) { diff --git a/pkg/infra/tracing/tracing.go b/pkg/infra/tracing/tracing.go index e125f0e501b..82c53eff6a1 100644 --- a/pkg/infra/tracing/tracing.go +++ b/pkg/infra/tracing/tracing.go @@ -8,9 +8,6 @@ import ( "os" "strings" - "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" - "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/setting" opentracing "github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go/ext" ol "github.com/opentracing/opentracing-go/log" @@ -20,6 +17,10 @@ import ( "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/codes" trace "go.opentelemetry.io/otel/trace" + + "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/setting" ) const ( @@ -27,6 +28,56 @@ const ( envJaegerAgentPort = "JAEGER_AGENT_PORT" ) +// Tracer defines the service used to create new spans. +type Tracer interface { + // Run implements registry.BackgroundService. + Run(context.Context) error + // Start creates a new [Span] and places trace metadata on the + // [context.Context] passed to the method. + // Chose a low cardinality spanName and use [Span.SetAttributes] + // or [Span.AddEvents] for high cardinality data. + Start(ctx context.Context, spanName string, opts ...trace.SpanStartOption) (context.Context, Span) + // Inject adds identifying information for the span to the + // headers defined in [http.Header] map (this mutates http.Header). + // + // Implementation quirk: Where OpenTelemetry is used, the [Span] is + // picked up from [context.Context] and for OpenTracing the + // information passed as [Span] is preferred. + // Both the context and span must be derived from the same call to + // [Tracer.Start]. + Inject(context.Context, http.Header, Span) +} + +// Span defines a time range for an operation. This is equivalent to a +// single line in a flame graph. +type Span interface { + // End finalizes the Span and adds its end timestamp. + // Any further operations on the Span are not permitted after + // End has been called. + End() + // SetAttributes adds additional data to a span. + // SetAttributes repeats the key value pair with [string] and [any] + // used for OpenTracing and [attribute.KeyValue] used for + // OpenTelemetry. + SetAttributes(key string, value interface{}, kv attribute.KeyValue) + // SetName renames the span. + SetName(name string) + // SetStatus can be used to indicate whether the span was + // successfully or unsuccessfully executed. + // + // Only useful for OpenTelemetry. + SetStatus(code codes.Code, description string) + // RecordError adds an error to the span. + // + // Only useful for OpenTelemetry. + RecordError(err error, options ...trace.EventOption) + // AddEvents adds additional data with a temporal dimension to the + // span. + // + // Panics if the length of keys is shorter than the length of values. + AddEvents(keys []string, values []EventValue) +} + func ProvideService(cfg *setting.Cfg) (Tracer, error) { ts, ots, err := parseSettings(cfg) if err != nil { @@ -239,7 +290,9 @@ func (s OpentracingSpan) SetName(name string) { } func (s OpentracingSpan) SetStatus(code codes.Code, description string) { - ext.Error.Set(s.span, true) + if code == codes.Error { + ext.Error.Set(s.span, true) + } } func (s OpentracingSpan) RecordError(err error, options ...trace.EventOption) { diff --git a/pkg/server/backgroundsvcs/background_services.go b/pkg/server/backgroundsvcs/background_services.go index 5862715661e..03aa3837eef 100644 --- a/pkg/server/backgroundsvcs/background_services.go +++ b/pkg/server/backgroundsvcs/background_services.go @@ -51,6 +51,7 @@ func ProvideBackgroundServiceRegistry( _ serviceaccounts.Service, _ *guardian.Provider, _ *plugindashboardsservice.DashboardUpdater, _ *sanitizer.Provider, _ *grpcserver.HealthService, + _ *grpcserver.ReflectionService, ) *BackgroundServiceRegistry { return NewBackgroundServiceRegistry( httpServer, diff --git a/pkg/server/test_env.go b/pkg/server/test_env.go index 5256cab34cb..2bc16f2aab7 100644 --- a/pkg/server/test_env.go +++ b/pkg/server/test_env.go @@ -1,16 +1,18 @@ package server import ( + "github.com/grafana/grafana/pkg/services/grpcserver" "github.com/grafana/grafana/pkg/services/notifications" "github.com/grafana/grafana/pkg/services/sqlstore" ) -func ProvideTestEnv(server *Server, store *sqlstore.SQLStore, ns *notifications.NotificationServiceMock) (*TestEnv, error) { - return &TestEnv{server, store, ns}, nil +func ProvideTestEnv(server *Server, store *sqlstore.SQLStore, ns *notifications.NotificationServiceMock, grpcServer grpcserver.Provider) (*TestEnv, error) { + return &TestEnv{server, store, ns, grpcServer}, nil } type TestEnv struct { Server *Server SQLStore *sqlstore.SQLStore NotificationService *notifications.NotificationServiceMock + GRPCServer grpcserver.Provider } diff --git a/pkg/server/wire.go b/pkg/server/wire.go index 36b61905181..109d1619b9a 100644 --- a/pkg/server/wire.go +++ b/pkg/server/wire.go @@ -344,6 +344,7 @@ var wireBasicSet = wire.NewSet( orgimpl.ProvideService, grpcserver.ProvideService, grpcserver.ProvideHealthService, + grpcserver.ProvideReflectionService, teamimpl.ProvideService, tempuserimpl.ProvideService, loginattemptimpl.ProvideService, diff --git a/pkg/services/accesscontrol/database/database.go b/pkg/services/accesscontrol/database/database.go index 7856232ed33..dcf967751ad 100644 --- a/pkg/services/accesscontrol/database/database.go +++ b/pkg/services/accesscontrol/database/database.go @@ -49,7 +49,9 @@ func (s *AccessControlStore) GetUserPermissions(ctx context.Context, query acces params = append(params, a) } } - + q += ` + ORDER BY permission.scope + ` if err := sess.SQL(q, params...).Find(&result); err != nil { return err } diff --git a/pkg/services/accesscontrol/filter_bench_test.go b/pkg/services/accesscontrol/filter_bench_test.go index 7cd5b788d63..1e4e930c07e 100644 --- a/pkg/services/accesscontrol/filter_bench_test.go +++ b/pkg/services/accesscontrol/filter_bench_test.go @@ -43,10 +43,10 @@ func benchmarkFilter(b *testing.B, numDs, numPermissions int) { require.NoError(b, err) var datasources []datasources.DataSource - sess := store.NewSession(context.Background()) - err = sess.SQL(baseSql+acFilter.Where, acFilter.Args...).Find(&datasources) + err = store.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + return sess.SQL(baseSql+acFilter.Where, acFilter.Args...).Find(&datasources) + }) require.NoError(b, err) - sess.Close() require.Len(b, datasources, numPermissions) } } diff --git a/pkg/services/accesscontrol/filter_test.go b/pkg/services/accesscontrol/filter_test.go index cb343a8ae5d..a7bb1d059b1 100644 --- a/pkg/services/accesscontrol/filter_test.go +++ b/pkg/services/accesscontrol/filter_test.go @@ -168,40 +168,41 @@ func TestFilter_Datasources(t *testing.T) { t.Run(tt.desc, func(t *testing.T) { store := sqlstore.InitTestDB(t) - sess := store.NewSession(context.Background()) - defer sess.Close() - - // seed 10 data sources - for i := 1; i <= 10; i++ { - dsStore := dsService.CreateStore(store, log.New("accesscontrol.test")) - err := dsStore.AddDataSource(context.Background(), &datasources.AddDataSourceCommand{Name: fmt.Sprintf("ds:%d", i), Uid: fmt.Sprintf("uid%d", i)}) - require.NoError(t, err) - } - - baseSql := `SELECT data_source.* FROM data_source WHERE` - acFilter, err := accesscontrol.Filter( - &user.SignedInUser{ - OrgID: 1, - Permissions: map[int64]map[string][]string{1: tt.permissions}, - }, - tt.sqlID, - tt.prefix, - tt.actions..., - ) - - if !tt.expectErr { - require.NoError(t, err) - var datasources []datasources.DataSource - err = sess.SQL(baseSql+acFilter.Where, acFilter.Args...).Find(&datasources) - require.NoError(t, err) - - assert.Len(t, datasources, len(tt.expectedDataSources)) - for i, ds := range datasources { - assert.Equal(t, tt.expectedDataSources[i], ds.Name) + err := store.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + // seed 10 data sources + for i := 1; i <= 10; i++ { + dsStore := dsService.CreateStore(store, log.New("accesscontrol.test")) + err := dsStore.AddDataSource(context.Background(), &datasources.AddDataSourceCommand{Name: fmt.Sprintf("ds:%d", i), Uid: fmt.Sprintf("uid%d", i)}) + require.NoError(t, err) } - } else { - require.Error(t, err) - } + + baseSql := `SELECT data_source.* FROM data_source WHERE` + acFilter, err := accesscontrol.Filter( + &user.SignedInUser{ + OrgID: 1, + Permissions: map[int64]map[string][]string{1: tt.permissions}, + }, + tt.sqlID, + tt.prefix, + tt.actions..., + ) + + if !tt.expectErr { + require.NoError(t, err) + var datasources []datasources.DataSource + err = sess.SQL(baseSql+acFilter.Where, acFilter.Args...).Find(&datasources) + require.NoError(t, err) + + assert.Len(t, datasources, len(tt.expectedDataSources)) + for i, ds := range datasources { + assert.Equal(t, tt.expectedDataSources[i], ds.Name) + } + } else { + require.Error(t, err) + } + return nil + }) + require.NoError(t, err) }) } } diff --git a/pkg/services/alerting/store_notification.go b/pkg/services/alerting/store_notification.go index 9423108896a..9d9cca60db1 100644 --- a/pkg/services/alerting/store_notification.go +++ b/pkg/services/alerting/store_notification.go @@ -58,7 +58,9 @@ func (ss *sqlStore) DeleteAlertNotification(ctx context.Context, cmd *models.Del func (ss *sqlStore) DeleteAlertNotificationWithUid(ctx context.Context, cmd *models.DeleteAlertNotificationWithUidCommand) error { existingNotification := &models.GetAlertNotificationsWithUidQuery{OrgId: cmd.OrgId, Uid: cmd.Uid} - if err := getAlertNotificationWithUidInternal(ctx, existingNotification, ss.db.NewSession(ctx)); err != nil { + if err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return getAlertNotificationWithUidInternal(ctx, existingNotification, sess) + }); err != nil { return err } @@ -79,7 +81,9 @@ func (ss *sqlStore) DeleteAlertNotificationWithUid(ctx context.Context, cmd *mod } func (ss *sqlStore) GetAlertNotifications(ctx context.Context, query *models.GetAlertNotificationsQuery) error { - return getAlertNotificationInternal(ctx, query, ss.db.NewSession(ctx)) + return ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return getAlertNotificationInternal(ctx, query, sess) + }) } func (ss *sqlStore) GetAlertNotificationUidWithId(ctx context.Context, query *models.GetAlertNotificationUidQuery) error { @@ -90,8 +94,9 @@ func (ss *sqlStore) GetAlertNotificationUidWithId(ctx context.Context, query *mo return nil } - err := getAlertNotificationUidInternal(ctx, query, ss.db.NewSession(ctx)) - if err != nil { + if err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return getAlertNotificationUidInternal(ctx, query, sess) + }); err != nil { return err } @@ -105,7 +110,9 @@ func newAlertNotificationUidCacheKey(orgID, notificationId int64) string { } func (ss *sqlStore) GetAlertNotificationsWithUid(ctx context.Context, query *models.GetAlertNotificationsWithUidQuery) error { - return getAlertNotificationWithUidInternal(ctx, query, ss.db.NewSession(ctx)) + return ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return getAlertNotificationWithUidInternal(ctx, query, sess) + }) } func (ss *sqlStore) GetAllAlertNotifications(ctx context.Context, query *models.GetAllAlertNotificationsQuery) error { @@ -444,7 +451,9 @@ func (ss *sqlStore) UpdateAlertNotification(ctx context.Context, cmd *models.Upd func (ss *sqlStore) UpdateAlertNotificationWithUid(ctx context.Context, cmd *models.UpdateAlertNotificationWithUidCommand) error { getAlertNotificationWithUidQuery := &models.GetAlertNotificationsWithUidQuery{OrgId: cmd.OrgId, Uid: cmd.Uid} - if err := getAlertNotificationWithUidInternal(ctx, getAlertNotificationWithUidQuery, ss.db.NewSession(ctx)); err != nil { + if err := ss.db.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return getAlertNotificationWithUidInternal(ctx, getAlertNotificationWithUidQuery, sess) + }); err != nil { return err } diff --git a/pkg/services/annotations/annotationsimpl/cleanup_test.go b/pkg/services/annotations/annotationsimpl/cleanup_test.go index 844d7855157..baaaeac5a7c 100644 --- a/pkg/services/annotations/annotationsimpl/cleanup_test.go +++ b/pkg/services/annotations/annotationsimpl/cleanup_test.go @@ -132,57 +132,62 @@ func TestOldAnnotationsAreDeletedFirst(t *testing.T) { Created: time.Now().AddDate(-10, 0, -10).UnixNano() / int64(time.Millisecond), } - session := fakeSQL.NewSession(context.Background()) - defer session.Close() + err := fakeSQL.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + _, err := sess.Insert(a) + require.NoError(t, err, "cannot insert annotation") + _, err = sess.Insert(a) + require.NoError(t, err, "cannot insert annotation") - _, err := session.Insert(a) - require.NoError(t, err, "cannot insert annotation") - _, err = session.Insert(a) - require.NoError(t, err, "cannot insert annotation") + a.AlertId = 20 + _, err = sess.Insert(a) + require.NoError(t, err, "cannot insert annotation") - a.AlertId = 20 - _, err = session.Insert(a) - require.NoError(t, err, "cannot insert annotation") + // run the clean up task to keep one annotation. + cfg := setting.NewCfg() + cfg.AnnotationCleanupJobBatchSize = 1 + cleaner := &xormRepositoryImpl{cfg: cfg, log: log.New("test-logger"), db: fakeSQL} + _, err = cleaner.CleanAnnotations(context.Background(), setting.AnnotationCleanupSettings{MaxCount: 1}, alertAnnotationType) + require.NoError(t, err) - // run the clean up task to keep one annotation. - cfg := setting.NewCfg() - cfg.AnnotationCleanupJobBatchSize = 1 - cleaner := &xormRepositoryImpl{cfg: cfg, log: log.New("test-logger"), db: fakeSQL} - _, err = cleaner.CleanAnnotations(context.Background(), setting.AnnotationCleanupSettings{MaxCount: 1}, alertAnnotationType) + // assert that the last annotations were kept + countNew, err := sess.Where("alert_id = 20").Count(&annotations.Item{}) + require.NoError(t, err) + require.Equal(t, int64(1), countNew, "the last annotations should be kept") + + countOld, err := sess.Where("alert_id = 10").Count(&annotations.Item{}) + require.NoError(t, err) + require.Equal(t, int64(0), countOld, "the two first annotations should have been deleted") + + return nil + }) require.NoError(t, err) - - // assert that the last annotations were kept - countNew, err := session.Where("alert_id = 20").Count(&annotations.Item{}) - require.NoError(t, err) - require.Equal(t, int64(1), countNew, "the last annotations should be kept") - - countOld, err := session.Where("alert_id = 10").Count(&annotations.Item{}) - require.NoError(t, err) - require.Equal(t, int64(0), countOld, "the two first annotations should have been deleted") } func assertAnnotationCount(t *testing.T, fakeSQL *sqlstore.SQLStore, sql string, expectedCount int64) { t.Helper() - session := fakeSQL.NewSession(context.Background()) - defer session.Close() - count, err := session.Where(sql).Count(&annotations.Item{}) + err := fakeSQL.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + count, err := sess.Where(sql).Count(&annotations.Item{}) + require.NoError(t, err) + require.Equal(t, expectedCount, count) + return nil + }) require.NoError(t, err) - require.Equal(t, expectedCount, count) } func assertAnnotationTagCount(t *testing.T, fakeSQL *sqlstore.SQLStore, expectedCount int64) { t.Helper() - session := fakeSQL.NewSession(context.Background()) - defer session.Close() - - count, err := session.SQL("select count(*) from annotation_tag").Count() + err := fakeSQL.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + count, err := sess.SQL("select count(*) from annotation_tag").Count() + require.NoError(t, err) + require.Equal(t, expectedCount, count) + return nil + }) require.NoError(t, err) - require.Equal(t, expectedCount, count) } -func createTestAnnotations(t *testing.T, sqlstore *sqlstore.SQLStore, expectedCount int, oldAnnotations int) { +func createTestAnnotations(t *testing.T, store *sqlstore.SQLStore, expectedCount int, oldAnnotations int) { t.Helper() cutoffDate := time.Now() @@ -216,16 +221,19 @@ func createTestAnnotations(t *testing.T, sqlstore *sqlstore.SQLStore, expectedCo a.Created = cutoffDate.AddDate(-10, 0, -10).UnixNano() / int64(time.Millisecond) } - _, err := sqlstore.NewSession(context.Background()).Insert(a) - require.NoError(t, err, "should be able to save annotation", err) + err := store.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + _, err := sess.Insert(a) + require.NoError(t, err, "should be able to save annotation", err) - // mimick the SQL annotation Save logic by writing records to the annotation_tag table - // we need to ensure they get deleted when we clean up annotations - sess := sqlstore.NewSession(context.Background()) - for tagID := range []int{1, 2} { - _, err = sess.Exec("INSERT INTO annotation_tag (annotation_id, tag_id) VALUES(?,?)", a.Id, tagID) - require.NoError(t, err, "should be able to save annotation tag ID", err) - } + // mimick the SQL annotation Save logic by writing records to the annotation_tag table + // we need to ensure they get deleted when we clean up annotations + for tagID := range []int{1, 2} { + _, err = sess.Exec("INSERT INTO annotation_tag (annotation_id, tag_id) VALUES(?,?)", a.Id, tagID) + require.NoError(t, err, "should be able to save annotation tag ID", err) + } + return err + }) + require.NoError(t, err) } } diff --git a/pkg/services/auth/auth_token_test.go b/pkg/services/auth/auth_token_test.go index 938aca66bae..2f9a46bc142 100644 --- a/pkg/services/auth/auth_token_test.go +++ b/pkg/services/auth/auth_token_test.go @@ -566,40 +566,54 @@ type testContext struct { } func (c *testContext) getAuthTokenByID(id int64) (*userAuthToken, error) { - sess := c.sqlstore.NewSession(context.Background()) - var t userAuthToken - found, err := sess.ID(id).Get(&t) - if err != nil || !found { - return nil, err - } + var res *userAuthToken + err := c.sqlstore.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + var t userAuthToken + found, err := sess.ID(id).Get(&t) + if err != nil || !found { + return err + } - return &t, nil + res = &t + return nil + }) + + return res, err } func (c *testContext) markAuthTokenAsSeen(id int64) (bool, error) { - sess := c.sqlstore.NewSession(context.Background()) - res, err := sess.Exec("UPDATE user_auth_token SET auth_token_seen = ? WHERE id = ?", c.sqlstore.Dialect.BooleanStr(true), id) - if err != nil { - return false, err - } + hasRowsAffected := false + err := c.sqlstore.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + res, err := sess.Exec("UPDATE user_auth_token SET auth_token_seen = ? WHERE id = ?", c.sqlstore.Dialect.BooleanStr(true), id) + if err != nil { + return err + } - rowsAffected, err := res.RowsAffected() - if err != nil { - return false, err - } - return rowsAffected == 1, nil + rowsAffected, err := res.RowsAffected() + if err != nil { + return err + } + hasRowsAffected = rowsAffected == 1 + return nil + }) + return hasRowsAffected, err } func (c *testContext) updateRotatedAt(id, rotatedAt int64) (bool, error) { - sess := c.sqlstore.NewSession(context.Background()) - res, err := sess.Exec("UPDATE user_auth_token SET rotated_at = ? WHERE id = ?", rotatedAt, id) - if err != nil { - return false, err - } + hasRowsAffected := false + err := c.sqlstore.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + res, err := sess.Exec("UPDATE user_auth_token SET rotated_at = ? WHERE id = ?", rotatedAt, id) + if err != nil { + return err + } - rowsAffected, err := res.RowsAffected() - if err != nil { - return false, err - } - return rowsAffected == 1, nil + rowsAffected, err := res.RowsAffected() + if err != nil { + return err + } + + hasRowsAffected = rowsAffected == 1 + return nil + }) + return hasRowsAffected, err } diff --git a/pkg/services/auth/token_cleanup_test.go b/pkg/services/auth/token_cleanup_test.go index f611701ff18..dad055ef743 100644 --- a/pkg/services/auth/token_cleanup_test.go +++ b/pkg/services/auth/token_cleanup_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/stretchr/testify/require" ) @@ -21,8 +22,12 @@ func TestUserAuthTokenCleanup(t *testing.T) { insertToken := func(ctx *testContext, token string, prev string, createdAt, rotatedAt int64) { ut := userAuthToken{AuthToken: token, PrevAuthToken: prev, CreatedAt: createdAt, RotatedAt: rotatedAt, UserAgent: "", ClientIp: ""} - _, err := ctx.sqlstore.NewSession(context.Background()).Insert(&ut) - require.Nil(t, err) + err := ctx.sqlstore.WithDbSession(context.Background(), func(sess *sqlstore.DBSession) error { + _, err := sess.Insert(&ut) + require.Nil(t, err) + return nil + }) + require.NoError(t, err) } now := time.Date(2018, 12, 13, 13, 45, 0, 0, time.UTC) diff --git a/pkg/services/dashboards/database/database.go b/pkg/services/dashboards/database/database.go index 56ed0f0b458..f66e8bd96e0 100644 --- a/pkg/services/dashboards/database/database.go +++ b/pkg/services/dashboards/database/database.go @@ -574,7 +574,7 @@ func GetAlertsByDashboardId2(dashboardId int64, sess *sqlstore.DBSession) ([]*mo } func (d *DashboardStore) updateAlerts(ctx context.Context, existingAlerts []*models.Alert, alerts []*models.Alert, log log.Logger) error { - return d.sqlStore.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { + return d.sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { for _, alert := range alerts { update := false var alertToUpdate *models.Alert diff --git a/pkg/services/grpcserver/reflection.go b/pkg/services/grpcserver/reflection.go new file mode 100644 index 00000000000..3a4ecd09fe4 --- /dev/null +++ b/pkg/services/grpcserver/reflection.go @@ -0,0 +1,35 @@ +package grpcserver + +import ( + "context" + + "github.com/grafana/grafana/pkg/setting" + + "google.golang.org/grpc/reflection" + "google.golang.org/grpc/reflection/grpc_reflection_v1alpha" +) + +// ReflectionService implements the gRPC Server Reflection Protocol: +// https://github.com/grpc/grpc/blob/master/doc/server-reflection.md +type ReflectionService struct { + cfg *setting.Cfg + reflectionServer *reflectionServer +} + +type reflectionServer struct { + grpc_reflection_v1alpha.ServerReflectionServer +} + +// AuthFuncOverride no auth for reflection service. +func (s *reflectionServer) AuthFuncOverride(ctx context.Context, _ string) (context.Context, error) { + return ctx, nil +} + +func ProvideReflectionService(cfg *setting.Cfg, grpcServerProvider Provider) (*ReflectionService, error) { + re := &reflectionServer{reflection.NewServer(reflection.ServerOptions{Services: grpcServerProvider.GetServer()})} + grpc_reflection_v1alpha.RegisterServerReflectionServer(grpcServerProvider.GetServer(), re) + return &ReflectionService{ + cfg: cfg, + reflectionServer: re, + }, nil +} diff --git a/pkg/services/grpcserver/service.go b/pkg/services/grpcserver/service.go index e207bd3fa4f..9dac0ced378 100644 --- a/pkg/services/grpcserver/service.go +++ b/pkg/services/grpcserver/service.go @@ -16,18 +16,19 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/credentials" - "google.golang.org/grpc/reflection" ) type Provider interface { registry.BackgroundService GetServer() *grpc.Server + GetAddress() string } type GPRCServerService struct { - cfg *setting.Cfg - logger log.Logger - server *grpc.Server + cfg *setting.Cfg + logger log.Logger + server *grpc.Server + address string } func ProvideService(cfg *setting.Cfg, apiKey apikey.Service, userService user.Service) (Provider, error) { @@ -51,9 +52,7 @@ func ProvideService(cfg *setting.Cfg, apiKey apikey.Service, userService user.Se opts = append(opts, grpc.Creds(credentials.NewTLS(cfg.GRPCServerTLSConfig))) } - grpcServer := grpc.NewServer(opts...) - reflection.Register(grpcServer) - s.server = grpcServer + s.server = grpc.NewServer(opts...) return s, nil } @@ -65,6 +64,8 @@ func (s *GPRCServerService) Run(ctx context.Context) error { return fmt.Errorf("GRPC server: failed to listen: %w", err) } + s.address = listener.Addr().String() + serveErr := make(chan error, 1) go func() { s.logger.Info("GRPC server: starting") @@ -96,3 +97,7 @@ func (s *GPRCServerService) IsDisabled() bool { func (s *GPRCServerService) GetServer() *grpc.Server { return s.server } + +func (s *GPRCServerService) GetAddress() string { + return s.address +} diff --git a/pkg/services/navtree/navtreeimpl/applinks.go b/pkg/services/navtree/navtreeimpl/applinks.go index d6a8978f759..88b52e62b5b 100644 --- a/pkg/services/navtree/navtreeimpl/applinks.go +++ b/pkg/services/navtree/navtreeimpl/applinks.go @@ -85,7 +85,7 @@ func (s *ServiceImpl) processAppPlugin(plugin plugins.PluginDTO, c *models.ReqCo SortWeight: navtree.WeightPlugin, } - if s.features.IsEnabled(featuremgmt.FlagTopnav) { + if topNavEnabled { appLink.Url = s.cfg.AppSubURL + "/a/" + plugin.ID } else { appLink.Url = path.Join(s.cfg.AppSubURL, plugin.DefaultNavURL) diff --git a/pkg/services/ngalert/api/api_ruler.go b/pkg/services/ngalert/api/api_ruler.go index 83b87cec1ef..b9efa4c36b8 100644 --- a/pkg/services/ngalert/api/api_ruler.go +++ b/pkg/services/ngalert/api/api_ruler.go @@ -359,11 +359,11 @@ func (srv RulerSrv) updateAlertRulesInGroup(c *models.ReqContext, groupKey ngmod logger.Debug("updating database with the authorized changes", "add", len(finalChanges.New), "update", len(finalChanges.New), "delete", len(finalChanges.Delete)) if len(finalChanges.Update) > 0 || len(finalChanges.New) > 0 { - updates := make([]store.UpdateRule, 0, len(finalChanges.Update)) + updates := make([]ngmodels.UpdateRule, 0, len(finalChanges.Update)) inserts := make([]ngmodels.AlertRule, 0, len(finalChanges.New)) for _, update := range finalChanges.Update { logger.Debug("updating rule", "rule_uid", update.New.UID, "diff", update.Diff.String()) - updates = append(updates, store.UpdateRule{ + updates = append(updates, ngmodels.UpdateRule{ Existing: update.Existing, New: *update.New, }) diff --git a/pkg/services/ngalert/api/persist.go b/pkg/services/ngalert/api/persist.go index e143d705195..bb8f59c7412 100644 --- a/pkg/services/ngalert/api/persist.go +++ b/pkg/services/ngalert/api/persist.go @@ -5,7 +5,6 @@ import ( "github.com/grafana/grafana/pkg/models" ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" - "github.com/grafana/grafana/pkg/services/ngalert/store" "github.com/grafana/grafana/pkg/services/user" ) @@ -19,7 +18,7 @@ type RuleStore interface { // InsertAlertRules will insert all alert rules passed into the function // and return the map of uuid to id. InsertAlertRules(ctx context.Context, rule []ngmodels.AlertRule) (map[string]int64, error) - UpdateAlertRules(ctx context.Context, rule []store.UpdateRule) error + UpdateAlertRules(ctx context.Context, rule []ngmodels.UpdateRule) error DeleteAlertRulesByUID(ctx context.Context, orgID int64, ruleUID ...string) error // IncreaseVersionForAllRulesInNamespace Increases version for all rules that have specified namespace. Returns all rules that belong to the namespace diff --git a/pkg/services/ngalert/models/alert_rule.go b/pkg/services/ngalert/models/alert_rule.go index d1be2d9aee0..8d55f7e3828 100644 --- a/pkg/services/ngalert/models/alert_rule.go +++ b/pkg/services/ngalert/models/alert_rule.go @@ -379,6 +379,11 @@ type ListOrgRuleGroupsQuery struct { Result [][]string } +type UpdateRule struct { + Existing *AlertRule + New AlertRule +} + // Condition contains backend expressions and queries and the RefID // of the query or expression that will be evaluated. type Condition struct { diff --git a/pkg/services/ngalert/provisioning/alert_rules.go b/pkg/services/ngalert/provisioning/alert_rules.go index 1f41c974eb3..429d76526c0 100644 --- a/pkg/services/ngalert/provisioning/alert_rules.go +++ b/pkg/services/ngalert/provisioning/alert_rules.go @@ -143,14 +143,14 @@ func (service *AlertRuleService) UpdateRuleGroup(ctx context.Context, orgID int6 if err != nil { return fmt.Errorf("failed to list alert rules: %w", err) } - updateRules := make([]store.UpdateRule, 0, len(query.Result)) + updateRules := make([]models.UpdateRule, 0, len(query.Result)) for _, rule := range query.Result { if rule.IntervalSeconds == intervalSeconds { continue } newRule := *rule newRule.IntervalSeconds = intervalSeconds - updateRules = append(updateRules, store.UpdateRule{ + updateRules = append(updateRules, models.UpdateRule{ Existing: rule, New: newRule, }) @@ -216,7 +216,7 @@ func (service *AlertRuleService) ReplaceRuleGroup(ctx context.Context, orgID int } } - updates := make([]store.UpdateRule, 0, len(delta.Update)) + updates := make([]models.UpdateRule, 0, len(delta.Update)) for _, update := range delta.Update { // check that provenance is not changed in a invalid way storedProvenance, err := service.provenanceStore.GetProvenance(ctx, update.New, orgID) @@ -226,7 +226,7 @@ func (service *AlertRuleService) ReplaceRuleGroup(ctx context.Context, orgID int if storedProvenance != provenance && storedProvenance != models.ProvenanceNone { return fmt.Errorf("cannot update with provided provenance '%s', needs '%s'", provenance, storedProvenance) } - updates = append(updates, store.UpdateRule{ + updates = append(updates, models.UpdateRule{ Existing: update.Existing, New: *update.New, }) @@ -281,7 +281,7 @@ func (service *AlertRuleService) UpdateAlertRule(ctx context.Context, rule model return models.AlertRule{}, err } err = service.xact.InTransaction(ctx, func(ctx context.Context) error { - err := service.ruleStore.UpdateAlertRules(ctx, []store.UpdateRule{ + err := service.ruleStore.UpdateAlertRules(ctx, []models.UpdateRule{ { Existing: &storedRule, New: rule, diff --git a/pkg/services/ngalert/provisioning/persist.go b/pkg/services/ngalert/provisioning/persist.go index 30fda7cd7fb..97d406a8214 100644 --- a/pkg/services/ngalert/provisioning/persist.go +++ b/pkg/services/ngalert/provisioning/persist.go @@ -7,7 +7,6 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" "github.com/grafana/grafana/pkg/services/ngalert/models" - "github.com/grafana/grafana/pkg/services/ngalert/store" "github.com/grafana/grafana/pkg/services/quota" ) @@ -40,7 +39,7 @@ type RuleStore interface { ListAlertRules(ctx context.Context, query *models.ListAlertRulesQuery) error GetRuleGroupInterval(ctx context.Context, orgID int64, namespaceUID string, ruleGroup string) (int64, error) InsertAlertRules(ctx context.Context, rule []models.AlertRule) (map[string]int64, error) - UpdateAlertRules(ctx context.Context, rule []store.UpdateRule) error + UpdateAlertRules(ctx context.Context, rule []models.UpdateRule) error DeleteAlertRulesByUID(ctx context.Context, orgID int64, ruleUID ...string) error GetAlertRulesGroupByRuleUID(ctx context.Context, query *models.GetAlertRulesGroupByRuleUIDQuery) error } diff --git a/pkg/services/ngalert/store/alert_rule.go b/pkg/services/ngalert/store/alert_rule.go index 1743e0eb56b..b41fe02a66e 100644 --- a/pkg/services/ngalert/store/alert_rule.go +++ b/pkg/services/ngalert/store/alert_rule.go @@ -8,7 +8,6 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/guardian" - apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/searchstore" @@ -22,17 +21,6 @@ const AlertRuleMaxTitleLength = 190 // AlertRuleMaxRuleGroupNameLength is the maximum length of the alert rule group name const AlertRuleMaxRuleGroupNameLength = 190 -type UpdateRuleGroupCmd struct { - OrgID int64 - NamespaceUID string - RuleGroupConfig apimodels.PostableRuleGroupConfig -} - -type UpdateRule struct { - Existing *ngmodels.AlertRule - New ngmodels.AlertRule -} - var ( ErrAlertRuleGroupNotFound = errors.New("rulegroup not found") ErrOptimisticLock = errors.New("version conflict while updating a record in the database with optimistic locking") @@ -185,7 +173,7 @@ func (st DBstore) InsertAlertRules(ctx context.Context, rules []ngmodels.AlertRu } // UpdateAlertRules is a handler for updating alert rules. -func (st DBstore) UpdateAlertRules(ctx context.Context, rules []UpdateRule) error { +func (st DBstore) UpdateAlertRules(ctx context.Context, rules []ngmodels.UpdateRule) error { return st.SQLStore.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { ruleVersions := make([]ngmodels.AlertRuleVersion, 0, len(rules)) for _, r := range rules { diff --git a/pkg/services/ngalert/store/alert_rule_test.go b/pkg/services/ngalert/store/alert_rule_test.go index 0232c28d61b..41a48df7021 100644 --- a/pkg/services/ngalert/store/alert_rule_test.go +++ b/pkg/services/ngalert/store/alert_rule_test.go @@ -52,7 +52,7 @@ func TestUpdateAlertRules(t *testing.T) { rule := createRule(t) newRule := models.CopyRule(rule) newRule.Title = util.GenerateShortUID() - err := store.UpdateAlertRules(context.Background(), []UpdateRule{{ + err := store.UpdateAlertRules(context.Background(), []models.UpdateRule{{ Existing: rule, New: *newRule, }, @@ -77,7 +77,7 @@ func TestUpdateAlertRules(t *testing.T) { newRule := models.CopyRule(rule) newRule.Title = util.GenerateShortUID() - err := store.UpdateAlertRules(context.Background(), []UpdateRule{{ + err := store.UpdateAlertRules(context.Background(), []models.UpdateRule{{ Existing: rule, New: *newRule, }, diff --git a/pkg/services/ngalert/store/image_test.go b/pkg/services/ngalert/store/image_test.go index 2680ba67950..58510f9d4ab 100644 --- a/pkg/services/ngalert/store/image_test.go +++ b/pkg/services/ngalert/store/image_test.go @@ -11,6 +11,7 @@ import ( "github.com/grafana/grafana/pkg/services/ngalert/models" "github.com/grafana/grafana/pkg/services/ngalert/store" "github.com/grafana/grafana/pkg/services/ngalert/tests" + "github.com/grafana/grafana/pkg/services/sqlstore" ) func TestIntegrationSaveAndGetImage(t *testing.T) { @@ -168,30 +169,32 @@ func TestIntegrationDeleteExpiredImages(t *testing.T) { image2 := models.Image{URL: "https://example.com/example.png"} require.NoError(t, dbstore.SaveImage(ctx, &image2)) - s := dbstore.SQLStore.NewSession(ctx) - t.Cleanup(s.Close) + err := dbstore.SQLStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + // should return both images + var result1, result2 models.Image + ok, err := sess.Where("token = ?", image1.Token).Get(&result1) + require.NoError(t, err) + assert.True(t, ok) + ok, err = sess.Where("token = ?", image2.Token).Get(&result2) + require.NoError(t, err) + assert.True(t, ok) - // should return both images - var result1, result2 models.Image - ok, err := s.Where("token = ?", image1.Token).Get(&result1) - require.NoError(t, err) - assert.True(t, ok) - ok, err = s.Where("token = ?", image2.Token).Get(&result2) - require.NoError(t, err) - assert.True(t, ok) + // should delete expired image + image1.ExpiresAt = time.Now().Add(-time.Second) + require.NoError(t, dbstore.SaveImage(ctx, &image1)) + n, err := dbstore.DeleteExpiredImages(ctx) + require.NoError(t, err) + assert.Equal(t, int64(1), n) - // should delete expired image - image1.ExpiresAt = time.Now().Add(-time.Second) - require.NoError(t, dbstore.SaveImage(ctx, &image1)) - n, err := dbstore.DeleteExpiredImages(ctx) - require.NoError(t, err) - assert.Equal(t, int64(1), n) + // should return just the second image + ok, err = sess.Where("token = ?", image1.Token).Get(&result1) + require.NoError(t, err) + assert.False(t, ok) + ok, err = sess.Where("token = ?", image2.Token).Get(&result2) + require.NoError(t, err) + assert.True(t, ok) - // should return just the second image - ok, err = s.Where("token = ?", image1.Token).Get(&result1) + return nil + }) require.NoError(t, err) - assert.False(t, ok) - ok, err = s.Where("token = ?", image2.Token).Get(&result2) - require.NoError(t, err) - assert.True(t, ok) } diff --git a/pkg/services/ngalert/store/testing.go b/pkg/services/ngalert/store/testing.go index 2b57b5d8dcc..5f6d2c3dd82 100644 --- a/pkg/services/ngalert/store/testing.go +++ b/pkg/services/ngalert/store/testing.go @@ -320,7 +320,7 @@ func (f *FakeRuleStore) GetNamespaceByUID(_ context.Context, uid string, orgID i return nil, fmt.Errorf("not found") } -func (f *FakeRuleStore) UpdateAlertRules(_ context.Context, q []UpdateRule) error { +func (f *FakeRuleStore) UpdateAlertRules(_ context.Context, q []models.UpdateRule) error { f.mtx.Lock() defer f.mtx.Unlock() f.RecordedOps = append(f.RecordedOps, q) diff --git a/pkg/services/playlist/model.go b/pkg/services/playlist/model.go index d65e47479a7..40e7305dd93 100644 --- a/pkg/services/playlist/model.go +++ b/pkg/services/playlist/model.go @@ -2,6 +2,8 @@ package playlist import ( "errors" + + "github.com/grafana/grafana/pkg/coremodel/playlist" ) // Typed errors @@ -21,22 +23,11 @@ type Playlist struct { } type PlaylistDTO struct { - Id int64 `json:"id"` - UID string `json:"uid"` - Name string `json:"name"` - Interval string `json:"interval"` - OrgId int64 `json:"-"` - Items []PlaylistItemDTO `json:"items"` + playlist.Model + OrgId int64 `json:"-"` } -type PlaylistItemDTO struct { - Id int64 `json:"id"` - PlaylistId int64 `json:"playlistid"` - Type string `json:"type"` - Title string `json:"title"` - Value string `json:"value"` - Order int `json:"order"` -} +type PlaylistItemDTO = playlist.PlaylistItem type PlaylistItem struct { Id int64 `db:"id"` diff --git a/pkg/services/playlist/playlistimpl/sqlx_store.go b/pkg/services/playlist/playlistimpl/sqlx_store.go index 230efb54bd6..34a4d1fe882 100644 --- a/pkg/services/playlist/playlistimpl/sqlx_store.go +++ b/pkg/services/playlist/playlistimpl/sqlx_store.go @@ -42,7 +42,7 @@ func (s *sqlxStore) Insert(ctx context.Context, cmd *playlist.CreatePlaylistComm for _, item := range cmd.Items { playlistItems = append(playlistItems, playlist.PlaylistItem{ PlaylistId: p.Id, - Type: item.Type, + Type: string(item.Type), Value: item.Value, Order: item.Order, Title: item.Title, @@ -94,7 +94,7 @@ func (s *sqlxStore) Update(ctx context.Context, cmd *playlist.UpdatePlaylistComm for index, item := range cmd.Items { playlistItems = append(playlistItems, playlist.PlaylistItem{ PlaylistId: p.Id, - Type: item.Type, + Type: string(item.Type), Value: item.Value, Order: index + 1, Title: item.Title, diff --git a/pkg/services/playlist/playlistimpl/xorm_store.go b/pkg/services/playlist/playlistimpl/xorm_store.go index 1a51bd25b51..bb75c1b9900 100644 --- a/pkg/services/playlist/playlistimpl/xorm_store.go +++ b/pkg/services/playlist/playlistimpl/xorm_store.go @@ -38,7 +38,7 @@ func (s *sqlStore) Insert(ctx context.Context, cmd *playlist.CreatePlaylistComma for _, item := range cmd.Items { playlistItems = append(playlistItems, playlist.PlaylistItem{ PlaylistId: p.Id, - Type: item.Type, + Type: string(item.Type), Value: item.Value, Order: item.Order, Title: item.Title, @@ -70,13 +70,12 @@ func (s *sqlStore) Update(ctx context.Context, cmd *playlist.UpdatePlaylistComma p.Id = existingPlaylist.Id dto = playlist.PlaylistDTO{ - - Id: p.Id, - UID: p.UID, - OrgId: p.OrgId, - Name: p.Name, - Interval: p.Interval, + OrgId: p.OrgId, } + dto.Id = p.Id + dto.Uid = p.UID + dto.Name = p.Name + dto.Interval = p.Interval _, err = sess.Where("id=?", p.Id).Cols("name", "interval").Update(&p) if err != nil { @@ -95,7 +94,7 @@ func (s *sqlStore) Update(ctx context.Context, cmd *playlist.UpdatePlaylistComma for index, item := range cmd.Items { playlistItems = append(playlistItems, models.PlaylistItem{ PlaylistId: p.Id, - Type: item.Type, + Type: string(item.Type), Value: item.Value, Order: index + 1, Title: item.Title, diff --git a/pkg/services/publicdashboards/service/service.go b/pkg/services/publicdashboards/service/service.go index 377489f1247..848a69d6654 100644 --- a/pkg/services/publicdashboards/service/service.go +++ b/pkg/services/publicdashboards/service/service.go @@ -96,16 +96,12 @@ func (pd *PublicDashboardServiceImpl) GetPublicDashboardConfig(ctx context.Conte // SavePublicDashboardConfig is a helper method to persist the sharing config // to the database. It handles validations for sharing config and persistence func (pd *PublicDashboardServiceImpl) SavePublicDashboardConfig(ctx context.Context, u *user.SignedInUser, dto *SavePublicDashboardConfigDTO) (*PublicDashboard, error) { + // validate if the dashboard exists dashboard, err := pd.GetDashboard(ctx, dto.DashboardUid) if err != nil { return nil, err } - err = validation.ValidateSavePublicDashboard(dto, dashboard) - if err != nil { - return nil, err - } - // set default value for time settings if dto.PublicDashboard.TimeSettings == nil { dto.PublicDashboard.TimeSettings = &TimeSettings{} @@ -120,6 +116,10 @@ func (pd *PublicDashboardServiceImpl) SavePublicDashboardConfig(ctx context.Cont // save changes var pubdashUid string if existingPubdash == nil { + err = validation.ValidateSavePublicDashboard(dto, dashboard) + if err != nil { + return nil, err + } pubdashUid, err = pd.savePublicDashboardConfig(ctx, dto) } else { pubdashUid, err = pd.updatePublicDashboardConfig(ctx, dto) diff --git a/pkg/services/secrets/database/database.go b/pkg/services/secrets/database/database.go index 7a88a9f4862..348122bd049 100644 --- a/pkg/services/secrets/database/database.go +++ b/pkg/services/secrets/database/database.go @@ -126,7 +126,9 @@ func (ss *SecretsStoreImpl) ReEncryptDataKeys( currProvider secrets.ProviderID, ) error { keys := make([]*secrets.DataKey, 0) - if err := ss.sqlStore.NewSession(ctx).Table(dataKeysTable).Find(&keys); err != nil { + if err := ss.sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.Table(dataKeysTable).Find(&keys) + }); err != nil { return err } diff --git a/pkg/services/secrets/migrator/migrator.go b/pkg/services/secrets/migrator/migrator.go index 07f29e84d8c..a87838b92b9 100644 --- a/pkg/services/secrets/migrator/migrator.go +++ b/pkg/services/secrets/migrator/migrator.go @@ -104,8 +104,10 @@ func (m *SecretsMigrator) RollBackSecrets(ctx context.Context) (bool, error) { return false, nil } - _, sqlErr := m.sqlStore.NewSession(ctx).Exec("DELETE FROM data_keys") - if sqlErr != nil { + if sqlErr := m.sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + _, err := sess.Exec("DELETE FROM data_keys") + return err + }); sqlErr != nil { logger.Warn("Error while cleaning up data keys table...", "error", sqlErr) return false, nil } diff --git a/pkg/services/secrets/migrator/reencrypt.go b/pkg/services/secrets/migrator/reencrypt.go index 80a8adb6cde..10c2ecca596 100644 --- a/pkg/services/secrets/migrator/reencrypt.go +++ b/pkg/services/secrets/migrator/reencrypt.go @@ -18,7 +18,9 @@ func (s simpleSecret) reencrypt(ctx context.Context, secretsSrv *manager.Secrets Secret []byte } - if err := sqlStore.NewSession(ctx).Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows) + }); err != nil { logger.Warn("Could not find any secret to re-encrypt", "table", s.tableName) return false } @@ -72,7 +74,9 @@ func (s b64Secret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsSer Secret string } - if err := sqlStore.NewSession(ctx).Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows) + }); err != nil { logger.Warn("Could not find any secret to re-encrypt", "table", s.tableName) return false } @@ -140,7 +144,9 @@ func (s jsonSecret) reencrypt(ctx context.Context, secretsSrv *manager.SecretsSe SecureJsonData map[string][]byte } - if err := sqlStore.NewSession(ctx).Table(s.tableName).Cols("id", "secure_json_data").Find(&rows); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.Table(s.tableName).Cols("id", "secure_json_data").Find(&rows) + }); err != nil { logger.Warn("Could not find any secret to re-encrypt", "table", s.tableName) return false } @@ -199,7 +205,9 @@ func (s alertingSecret) reencrypt(ctx context.Context, secretsSrv *manager.Secre } selectSQL := "SELECT id, alertmanager_configuration FROM alert_configuration" - if err := sqlStore.NewSession(ctx).SQL(selectSQL).Find(&results); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.SQL(selectSQL).Find(&results) + }); err != nil { logger.Warn("Could not find any alert_configuration secret to re-encrypt") return false } diff --git a/pkg/services/secrets/migrator/rollback.go b/pkg/services/secrets/migrator/rollback.go index 7668ae8e801..2fa396b5d83 100644 --- a/pkg/services/secrets/migrator/rollback.go +++ b/pkg/services/secrets/migrator/rollback.go @@ -24,7 +24,9 @@ func (s simpleSecret) rollback( Secret []byte } - if err := sqlStore.NewSession(ctx).Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows) + }); err != nil { logger.Warn("Could not find any secret to roll back", "table", s.tableName) return true } @@ -82,7 +84,9 @@ func (s b64Secret) rollback( Secret string } - if err := sqlStore.NewSession(ctx).Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.Table(s.tableName).Select(fmt.Sprintf("id, %s as secret", s.columnName)).Find(&rows) + }); err != nil { logger.Warn("Could not find any secret to roll back", "table", s.tableName) return true } @@ -154,7 +158,9 @@ func (s jsonSecret) rollback( SecureJsonData map[string][]byte } - if err := sqlStore.NewSession(ctx).Table(s.tableName).Cols("id", "secure_json_data").Find(&rows); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.Table(s.tableName).Cols("id", "secure_json_data").Find(&rows) + }); err != nil { logger.Warn("Could not find any secret to roll back", "table", s.tableName) return true } @@ -217,7 +223,9 @@ func (s alertingSecret) rollback( } selectSQL := "SELECT id, alertmanager_configuration FROM alert_configuration" - if err := sqlStore.NewSession(ctx).SQL(selectSQL).Find(&results); err != nil { + if err := sqlStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error { + return sess.SQL(selectSQL).Find(&results) + }); err != nil { logger.Warn("Could not find any alert_configuration secret to roll back") return true } diff --git a/pkg/services/serviceaccounts/tests/common.go b/pkg/services/serviceaccounts/tests/common.go index 788d6675629..a82dbcb0016 100644 --- a/pkg/services/serviceaccounts/tests/common.go +++ b/pkg/services/serviceaccounts/tests/common.go @@ -20,14 +20,16 @@ type TestUser struct { Role string Login string IsServiceAccount bool + OrgID int64 } type TestApiKey struct { - Name string - Role org.RoleType - OrgId int64 - Key string - IsExpired bool + Name string + Role org.RoleType + OrgId int64 + Key string + IsExpired bool + ServiceAccountID *int64 } func SetupUserServiceAccount(t *testing.T, sqlStore *sqlstore.SQLStore, testUser TestUser) *user.User { @@ -41,6 +43,7 @@ func SetupUserServiceAccount(t *testing.T, sqlStore *sqlstore.SQLStore, testUser IsServiceAccount: testUser.IsServiceAccount, DefaultOrgRole: role, Name: testUser.Name, + OrgID: testUser.OrgID, }) require.NoError(t, err) return u1 @@ -53,9 +56,10 @@ func SetupApiKey(t *testing.T, sqlStore *sqlstore.SQLStore, testKey TestApiKey) } addKeyCmd := &apikey.AddCommand{ - Name: testKey.Name, - Role: role, - OrgId: testKey.OrgId, + Name: testKey.Name, + Role: role, + OrgId: testKey.OrgId, + ServiceAccountID: testKey.ServiceAccountID, } if testKey.Key != "" { diff --git a/pkg/services/sqlstore/db/db.go b/pkg/services/sqlstore/db/db.go index 8af1321bd00..85100f924a4 100644 --- a/pkg/services/sqlstore/db/db.go +++ b/pkg/services/sqlstore/db/db.go @@ -12,7 +12,7 @@ import ( type DB interface { WithTransactionalDbSession(ctx context.Context, callback sqlstore.DBTransactionFunc) error WithDbSession(ctx context.Context, callback sqlstore.DBTransactionFunc) error - NewSession(ctx context.Context) *sqlstore.DBSession + WithNewDbSession(ctx context.Context, callback sqlstore.DBTransactionFunc) error GetDialect() migrator.Dialect GetDBType() core.DbType GetSqlxSession() *session.SessionDB diff --git a/pkg/services/sqlstore/db/dbtest/dbtest.go b/pkg/services/sqlstore/db/dbtest/dbtest.go index c8013cb4f49..d069a096d30 100644 --- a/pkg/services/sqlstore/db/dbtest/dbtest.go +++ b/pkg/services/sqlstore/db/dbtest/dbtest.go @@ -21,3 +21,7 @@ func (f *FakeDB) WithTransactionalDbSession(ctx context.Context, callback sqlsto func (f *FakeDB) WithDbSession(ctx context.Context, callback sqlstore.DBTransactionFunc) error { return f.ExpectedError } + +func (f *FakeDB) WithNewDbSession(ctx context.Context, callback sqlstore.DBTransactionFunc) error { + return f.ExpectedError +} diff --git a/pkg/services/sqlstore/mockstore/mockstore.go b/pkg/services/sqlstore/mockstore/mockstore.go index 48941e14e13..c493bfc8046 100644 --- a/pkg/services/sqlstore/mockstore/mockstore.go +++ b/pkg/services/sqlstore/mockstore/mockstore.go @@ -222,6 +222,10 @@ func (m *SQLStoreMock) WithDbSession(ctx context.Context, callback sqlstore.DBTr return m.ExpectedError } +func (m *SQLStoreMock) WithNewDbSession(ctx context.Context, callback sqlstore.DBTransactionFunc) error { + return m.ExpectedError +} + func (m *SQLStoreMock) GetOrgQuotaByTarget(ctx context.Context, query *models.GetOrgQuotaByTargetQuery) error { return m.ExpectedError } diff --git a/pkg/services/sqlstore/org_users.go b/pkg/services/sqlstore/org_users.go index 1e154b07610..34de541d591 100644 --- a/pkg/services/sqlstore/org_users.go +++ b/pkg/services/sqlstore/org_users.go @@ -14,7 +14,7 @@ func (ss *SQLStore) AddOrgUser(ctx context.Context, cmd *models.AddOrgUserComman var usr user.User session := sess.ID(cmd.UserId) if !cmd.AllowAddingServiceAccount { - session = session.Where(notServiceAccountFilter(ss)) + session = session.Where(NotServiceAccountFilter(ss)) } if exists, err := session.Get(&usr); err != nil { diff --git a/pkg/services/sqlstore/session.go b/pkg/services/sqlstore/session.go index 9289acb66ac..b8a3b9ccc56 100644 --- a/pkg/services/sqlstore/session.go +++ b/pkg/services/sqlstore/session.go @@ -27,13 +27,6 @@ func (sess *DBSession) PublishAfterCommit(msg interface{}) { sess.events = append(sess.events, msg) } -// NewSession returns a new DBSession -func (ss *SQLStore) NewSession(ctx context.Context) *DBSession { - sess := &DBSession{Session: ss.engine.NewSession()} - sess.Session = sess.Session.Context(ctx) - return sess -} - func startSessionOrUseExisting(ctx context.Context, engine *xorm.Engine, beginTran bool) (*DBSession, bool, error) { value := ctx.Value(ContextSessionKey{}) var sess *DBSession @@ -55,14 +48,24 @@ func startSessionOrUseExisting(ctx context.Context, engine *xorm.Engine, beginTr } newSess.Session = newSess.Session.Context(ctx) + return newSess, true, nil } -// WithDbSession calls the callback with a session. +// WithDbSession calls the callback with the session in the context (if exists). +// Otherwise it creates a new one that is closed upon completion. +// A session is stored in the context if sqlstore.InTransaction() has been been previously called with the same context (and it's not committed/rolledback yet). func (ss *SQLStore) WithDbSession(ctx context.Context, callback DBTransactionFunc) error { return withDbSession(ctx, ss.engine, callback) } +// WithNewDbSession calls the callback with a new session that is closed upon completion. +func (ss *SQLStore) WithNewDbSession(ctx context.Context, callback DBTransactionFunc) error { + sess := &DBSession{Session: ss.engine.NewSession(), transactionOpen: false} + defer sess.Close() + return callback(sess) +} + func withDbSession(ctx context.Context, engine *xorm.Engine, callback DBTransactionFunc) error { sess, isNew, err := startSessionOrUseExisting(ctx, engine, false) if err != nil { diff --git a/pkg/services/sqlstore/store.go b/pkg/services/sqlstore/store.go index 23412cf1d25..fab61c5fbd9 100644 --- a/pkg/services/sqlstore/store.go +++ b/pkg/services/sqlstore/store.go @@ -30,8 +30,8 @@ type Store interface { GetSignedInUser(ctx context.Context, query *models.GetSignedInUserQuery) error UpdateUserPermissions(userID int64, isAdmin bool) error SetUserHelpFlag(ctx context.Context, cmd *models.SetUserHelpFlagCommand) error - NewSession(ctx context.Context) *DBSession WithDbSession(ctx context.Context, callback DBTransactionFunc) error + WithNewDbSession(ctx context.Context, callback DBTransactionFunc) error GetOrgQuotaByTarget(ctx context.Context, query *models.GetOrgQuotaByTargetQuery) error GetOrgQuotas(ctx context.Context, query *models.GetOrgQuotasQuery) error UpdateOrgQuota(ctx context.Context, cmd *models.UpdateOrgQuotaCmd) error diff --git a/pkg/services/sqlstore/transactions.go b/pkg/services/sqlstore/transactions.go index baf3efd2c94..15a3f2e0ccd 100644 --- a/pkg/services/sqlstore/transactions.go +++ b/pkg/services/sqlstore/transactions.go @@ -20,6 +20,8 @@ func (ss *SQLStore) WithTransactionalDbSession(ctx context.Context, callback DBT return inTransactionWithRetryCtx(ctx, ss.engine, ss.bus, callback, 0) } +// InTransaction starts a transaction and calls the fn +// It stores the session in the context func (ss *SQLStore) InTransaction(ctx context.Context, fn func(ctx context.Context) error) error { return ss.inTransactionWithRetry(ctx, fn, 0) } diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index eacaa039dc3..71090e630ee 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -169,7 +169,7 @@ func (ss *SQLStore) CreateUser(ctx context.Context, cmd user.CreateUserCommand) return &user, createErr } -func notServiceAccountFilter(ss *SQLStore) string { +func NotServiceAccountFilter(ss *SQLStore) string { return fmt.Sprintf("%s.is_service_account = %s", ss.Dialect.Quote("user"), ss.Dialect.BooleanStr(false)) @@ -180,7 +180,7 @@ func (ss *SQLStore) GetUserById(ctx context.Context, query *models.GetUserByIdQu usr := new(user.User) has, err := sess.ID(query.Id). - Where(notServiceAccountFilter(ss)). + Where(NotServiceAccountFilter(ss)). Get(usr) if err != nil { @@ -201,67 +201,6 @@ func (ss *SQLStore) GetUserById(ctx context.Context, query *models.GetUserByIdQu }) } -func (ss *SQLStore) UpdateUser(ctx context.Context, cmd *models.UpdateUserCommand) error { - if ss.Cfg.CaseInsensitiveLogin { - cmd.Login = strings.ToLower(cmd.Login) - cmd.Email = strings.ToLower(cmd.Email) - } - - return ss.WithTransactionalDbSession(ctx, func(sess *DBSession) error { - user := user.User{ - Name: cmd.Name, - Email: cmd.Email, - Login: cmd.Login, - Theme: cmd.Theme, - Updated: TimeNow(), - } - - if _, err := sess.ID(cmd.UserId).Where(notServiceAccountFilter(ss)).Update(&user); err != nil { - return err - } - - if ss.Cfg.CaseInsensitiveLogin { - if err := ss.userCaseInsensitiveLoginConflict(ctx, sess, user.Login, user.Email); err != nil { - return err - } - } - - sess.publishAfterCommit(&events.UserUpdated{ - Timestamp: user.Created, - Id: user.ID, - Name: user.Name, - Login: user.Login, - Email: user.Email, - }) - - return nil - }) -} - -func (ss *SQLStore) ChangeUserPassword(ctx context.Context, cmd *models.ChangeUserPasswordCommand) error { - return ss.WithTransactionalDbSession(ctx, func(sess *DBSession) error { - user := user.User{ - Password: cmd.NewPassword, - Updated: TimeNow(), - } - - _, err := sess.ID(cmd.UserId).Where(notServiceAccountFilter(ss)).Update(&user) - return err - }) -} - -func (ss *SQLStore) UpdateUserLastSeenAt(ctx context.Context, cmd *models.UpdateUserLastSeenAtCommand) error { - return ss.WithTransactionalDbSession(ctx, func(sess *DBSession) error { - user := user.User{ - ID: cmd.UserId, - LastSeenAt: TimeNow(), - } - - _, err := sess.ID(cmd.UserId).Update(&user) - return err - }) -} - func (ss *SQLStore) SetUsingOrg(ctx context.Context, cmd *models.SetUsingOrgCommand) error { getOrgsForUserCmd := &models.GetUserOrgListQuery{UserId: cmd.UserId} if err := ss.GetUserOrgList(ctx, getOrgsForUserCmd); err != nil { @@ -296,7 +235,7 @@ func setUsingOrgInTransaction(sess *DBSession, userID int64, orgID int64) error func (ss *SQLStore) GetUserProfile(ctx context.Context, query *models.GetUserProfileQuery) error { return ss.WithDbSession(ctx, func(sess *DBSession) error { var usr user.User - has, err := sess.ID(query.UserId).Where(notServiceAccountFilter(ss)).Get(&usr) + has, err := sess.ID(query.UserId).Where(NotServiceAccountFilter(ss)).Get(&usr) if err != nil { return err @@ -349,7 +288,7 @@ func (ss *SQLStore) GetUserOrgList(ctx context.Context, query *models.GetUserOrg sess.Join("INNER", "org", "org_user.org_id=org.id") sess.Join("INNER", ss.Dialect.Quote("user"), fmt.Sprintf("org_user.user_id=%s.id", ss.Dialect.Quote("user"))) sess.Where("org_user.user_id=?", query.UserId) - sess.Where(notServiceAccountFilter(ss)) + sess.Where(NotServiceAccountFilter(ss)) sess.Cols("org.name", "org_user.role", "org_user.org_id") sess.OrderBy("org.name") err := sess.Find(&query.Result) @@ -642,7 +581,7 @@ func (ss *SQLStore) DisableUser(ctx context.Context, cmd *models.DisableUserComm usr := user.User{} sess := dbSess.Table("user") - if has, err := sess.ID(cmd.UserId).Where(notServiceAccountFilter(ss)).Get(&usr); err != nil { + if has, err := sess.ID(cmd.UserId).Where(NotServiceAccountFilter(ss)).Get(&usr); err != nil { return err } else if !has { return user.ErrUserNotFound @@ -672,7 +611,7 @@ func (ss *SQLStore) BatchDisableUsers(ctx context.Context, cmd *models.BatchDisa disableParams = append(disableParams, v) } - _, err := sess.Where(notServiceAccountFilter(ss)).Exec(disableParams...) + _, err := sess.Where(NotServiceAccountFilter(ss)).Exec(disableParams...) return err }) } @@ -683,10 +622,14 @@ func (ss *SQLStore) DeleteUser(ctx context.Context, cmd *models.DeleteUserComman }) } +func (ss *SQLStore) DeleteUserInSession(ctx context.Context, sess *DBSession, cmd *models.DeleteUserCommand) error { + return deleteUserInTransaction(ss, sess, cmd) +} + func deleteUserInTransaction(ss *SQLStore, sess *DBSession, cmd *models.DeleteUserCommand) error { // Check if user exists usr := user.User{ID: cmd.UserId} - has, err := sess.Where(notServiceAccountFilter(ss)).Get(&usr) + has, err := sess.Where(NotServiceAccountFilter(ss)).Get(&usr) if err != nil { return err } @@ -762,23 +705,20 @@ func UserDeletions() []string { func (ss *SQLStore) UpdateUserPermissions(userID int64, isAdmin bool) error { return ss.WithTransactionalDbSession(context.Background(), func(sess *DBSession) error { var user user.User - if _, err := sess.ID(userID).Where(notServiceAccountFilter(ss)).Get(&user); err != nil { + if _, err := sess.ID(userID).Where(NotServiceAccountFilter(ss)).Get(&user); err != nil { return err } user.IsAdmin = isAdmin sess.UseBool("is_admin") - _, err := sess.ID(user.ID).Update(&user) if err != nil { return err } - // validate that after update there is at least one server admin if err := validateOneAdminLeft(sess); err != nil { return err } - return nil }) } diff --git a/pkg/services/sqlstore/user_test.go b/pkg/services/sqlstore/user_test.go index 51e1d56993c..2094a8263c5 100644 --- a/pkg/services/sqlstore/user_test.go +++ b/pkg/services/sqlstore/user_test.go @@ -12,73 +12,6 @@ import ( "github.com/stretchr/testify/require" ) -func TestIntegrationUserUpdate(t *testing.T) { - if testing.Short() { - t.Skip("skipping integration test") - } - - ss := InitTestDB(t) - - users := createFiveTestUsers(t, ss, func(i int) *user.CreateUserCommand { - return &user.CreateUserCommand{ - Email: fmt.Sprint("USER", i, "@test.com"), - Name: fmt.Sprint("USER", i), - Login: fmt.Sprint("loginUSER", i), - IsDisabled: false, - } - }) - - ss.Cfg.CaseInsensitiveLogin = true - - t.Run("Testing DB - update generates duplicate user", func(t *testing.T) { - err := ss.UpdateUser(context.Background(), &models.UpdateUserCommand{ - Login: "loginuser2", - UserId: users[0].ID, - }) - - require.Error(t, err) - }) - - t.Run("Testing DB - update lowercases existing user", func(t *testing.T) { - err := ss.UpdateUser(context.Background(), &models.UpdateUserCommand{ - Login: "loginUSER0", - Email: "USER0@test.com", - UserId: users[0].ID, - }) - require.NoError(t, err) - - query := models.GetUserByIdQuery{Id: users[0].ID} - err = ss.GetUserById(context.Background(), &query) - require.NoError(t, err) - - require.Equal(t, "loginuser0", query.Result.Login) - require.Equal(t, "user0@test.com", query.Result.Email) - }) - - t.Run("Testing DB - no user info provided", func(t *testing.T) { - err := ss.UpdateUser(context.Background(), &models.UpdateUserCommand{ - Login: "", - Email: "", - Name: "Change Name", - UserId: users[3].ID, - }) - require.NoError(t, err) - - query := models.GetUserByIdQuery{Id: users[3].ID} - err = ss.GetUserById(context.Background(), &query) - require.NoError(t, err) - - // Changed - require.Equal(t, "Change Name", query.Result.Name) - - // Unchanged - require.Equal(t, "loginUSER3", query.Result.Login) - require.Equal(t, "USER3@test.com", query.Result.Email) - }) - - ss.Cfg.CaseInsensitiveLogin = false -} - func TestIntegrationUserDataAccess(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") diff --git a/pkg/services/store/object/generate.sh b/pkg/services/store/object/generate.sh new file mode 100755 index 00000000000..dac1819eed3 --- /dev/null +++ b/pkg/services/store/object/generate.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +# To compile all protobuf files in this repository, run +# "mage protobuf" at the top-level. + +set -eu + +#DST_DIR=../genproto/entity +DST_DIR=./ + +SOURCE="${BASH_SOURCE[0]}" +while [ -h "$SOURCE" ] ; do SOURCE="$(readlink "$SOURCE")"; done +DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + +cd "$DIR" + +protoc -I ./ \ + --go_out=${DST_DIR} \ + --go-grpc_out=${DST_DIR} --go-grpc_opt=require_unimplemented_servers=false \ + object.proto + \ No newline at end of file diff --git a/pkg/services/store/object/object.go b/pkg/services/store/object/object.go deleted file mode 100644 index d54ff367bee..00000000000 --- a/pkg/services/store/object/object.go +++ /dev/null @@ -1,64 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.1 -// protoc v3.21.5 -// source: object.proto - -package object - -// Will be replaced with something from the SDK -type UserInfo struct { - Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` // internal grafana ID - Login string `protobuf:"bytes,2,opt,name=login,proto3" json:"login,omitempty"` // string ID? -} - -// The canonical object/document data -- this represents the raw bytes and storage level metadata -type RawObject struct { - // Unique ID - UID string `protobuf:"bytes,1,opt,name=UID,proto3" json:"UID,omitempty"` - // Identify the object kind. This kind will be used to apply a schema to the body and - // will trigger additional indexing behavior. - Kind string `protobuf:"bytes,2,opt,name=kind,proto3" json:"kind,omitempty"` - // Time in epoch milliseconds that the object was modified - Modified int64 `protobuf:"varint,3,opt,name=modified,proto3" json:"modified,omitempty"` - // Who modified the object - ModifiedBy *UserInfo `protobuf:"bytes,4,opt,name=modified_by,json=modifiedBy,proto3" json:"modified_by,omitempty"` - // Content Length - Size int64 `protobuf:"varint,5,opt,name=size,proto3" json:"size,omitempty"` - // MD5 digest of the body - ETag string `protobuf:"bytes,6,opt,name=ETag,proto3" json:"ETag,omitempty"` - // Raw bytes of the storage object. The kind will determine what is a valid payload - Body []byte `protobuf:"bytes,7,opt,name=body,proto3" json:"body,omitempty"` - // The version will change when the object is saved. It is not necessarily sortable - // - // NOTE: currently managed by the dashboard+dashboard_version tables - Version string `protobuf:"bytes,8,opt,name=version,proto3" json:"version,omitempty"` - // optional "save" or "commit" message - // - // NOTE: currently managed by the dashboard_version table, and will be returned from a "history" command - Comment string `protobuf:"bytes,9,opt,name=comment,proto3" json:"comment,omitempty"` - // Location (path/repo/etc) that defines the canonocal form - // - // NOTE: currently managed by the dashboard_provisioning table - SyncSrc string `protobuf:"bytes,10,opt,name=sync_src,json=syncSrc,proto3" json:"sync_src,omitempty"` - // Time in epoch milliseconds that the object was last synced with an external system (provisioning/git) - // - // NOTE: currently managed by the dashboard_provisioning table - SyncTime int64 `protobuf:"varint,11,opt,name=sync_time,json=syncTime,proto3" json:"sync_time,omitempty"` -} - -// Searchable fields extracted from the object -type ObjectErrorInfo struct { - Code int64 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` // TODO... registry somewhere... should be limited to most severe issues - Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` - Details string `protobuf:"bytes,3,opt,name=details,proto3" json:"details,omitempty"` -} - -type ExternalReference struct { - // datasource, panel - Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"` - // prometheus / heatmap - Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` - // Unique ID for this object - UID string `protobuf:"bytes,3,opt,name=UID,proto3" json:"UID,omitempty"` -} diff --git a/pkg/services/store/object/object.pb.go b/pkg/services/store/object/object.pb.go new file mode 100644 index 00000000000..93d87d3a839 --- /dev/null +++ b/pkg/services/store/object/object.pb.go @@ -0,0 +1,1552 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.5 +// source: object.proto + +package object + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// Will be replaced with something from the SDK +type UserInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // internal grafana user ID + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // login name + Login string `protobuf:"bytes,2,opt,name=login,proto3" json:"login,omitempty"` // string ID? +} + +func (x *UserInfo) Reset() { + *x = UserInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UserInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UserInfo) ProtoMessage() {} + +func (x *UserInfo) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UserInfo.ProtoReflect.Descriptor instead. +func (*UserInfo) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{0} +} + +func (x *UserInfo) GetId() int64 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *UserInfo) GetLogin() string { + if x != nil { + return x.Login + } + return "" +} + +// The canonical object/document data -- this represents the raw bytes and storage level metadata +type RawObject struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Unique ID + UID string `protobuf:"bytes,1,opt,name=UID,proto3" json:"UID,omitempty"` + // Identify the object kind. This kind will be used to apply a schema to the body and + // will trigger additional indexing behavior. + Kind string `protobuf:"bytes,2,opt,name=kind,proto3" json:"kind,omitempty"` + // Time in epoch milliseconds that the object was created + Created int64 `protobuf:"varint,3,opt,name=created,proto3" json:"created,omitempty"` + // Time in epoch milliseconds that the object was modified + Modified int64 `protobuf:"varint,4,opt,name=modified,proto3" json:"modified,omitempty"` + // Who created the object + CreatedBy *UserInfo `protobuf:"bytes,5,opt,name=created_by,json=createdBy,proto3" json:"created_by,omitempty"` + // Who modified the object + ModifiedBy *UserInfo `protobuf:"bytes,6,opt,name=modified_by,json=modifiedBy,proto3" json:"modified_by,omitempty"` + // Content Length + Size int64 `protobuf:"varint,7,opt,name=size,proto3" json:"size,omitempty"` + // MD5 digest of the body + ETag string `protobuf:"bytes,8,opt,name=ETag,proto3" json:"ETag,omitempty"` + // Raw bytes of the storage object. The kind will determine what is a valid payload + Body []byte `protobuf:"bytes,9,opt,name=body,proto3" json:"body,omitempty"` + // The version will change when the object is saved. It is not necessarily sortable + // + // NOTE: currently managed by the dashboard+dashboard_version tables + Version string `protobuf:"bytes,10,opt,name=version,proto3" json:"version,omitempty"` + // optional "save" or "commit" message + // + // NOTE: currently managed by the dashboard_version table, and will be returned from a "history" command + Comment string `protobuf:"bytes,11,opt,name=comment,proto3" json:"comment,omitempty"` + // Location (path/repo/etc) that defines the canonocal form + // + // NOTE: currently managed by the dashboard_provisioning table + SyncSrc string `protobuf:"bytes,12,opt,name=sync_src,json=syncSrc,proto3" json:"sync_src,omitempty"` + // Time in epoch milliseconds that the object was last synced with an external system (provisioning/git) + // + // NOTE: currently managed by the dashboard_provisioning table + SyncTime int64 `protobuf:"varint,13,opt,name=sync_time,json=syncTime,proto3" json:"sync_time,omitempty"` +} + +func (x *RawObject) Reset() { + *x = RawObject{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RawObject) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RawObject) ProtoMessage() {} + +func (x *RawObject) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RawObject.ProtoReflect.Descriptor instead. +func (*RawObject) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{1} +} + +func (x *RawObject) GetUID() string { + if x != nil { + return x.UID + } + return "" +} + +func (x *RawObject) GetKind() string { + if x != nil { + return x.Kind + } + return "" +} + +func (x *RawObject) GetCreated() int64 { + if x != nil { + return x.Created + } + return 0 +} + +func (x *RawObject) GetModified() int64 { + if x != nil { + return x.Modified + } + return 0 +} + +func (x *RawObject) GetCreatedBy() *UserInfo { + if x != nil { + return x.CreatedBy + } + return nil +} + +func (x *RawObject) GetModifiedBy() *UserInfo { + if x != nil { + return x.ModifiedBy + } + return nil +} + +func (x *RawObject) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *RawObject) GetETag() string { + if x != nil { + return x.ETag + } + return "" +} + +func (x *RawObject) GetBody() []byte { + if x != nil { + return x.Body + } + return nil +} + +func (x *RawObject) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *RawObject) GetComment() string { + if x != nil { + return x.Comment + } + return "" +} + +func (x *RawObject) GetSyncSrc() string { + if x != nil { + return x.SyncSrc + } + return "" +} + +func (x *RawObject) GetSyncTime() int64 { + if x != nil { + return x.SyncTime + } + return 0 +} + +// Report error while working with objects +// NOTE: real systems at scale will contain errors. +type ObjectErrorInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Match an error code registry? + Code int64 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` + // Simple error display + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + // Details encoded in JSON + DetailsJson []byte `protobuf:"bytes,3,opt,name=details_json,json=detailsJson,proto3" json:"details_json,omitempty"` +} + +func (x *ObjectErrorInfo) Reset() { + *x = ObjectErrorInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ObjectErrorInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectErrorInfo) ProtoMessage() {} + +func (x *ObjectErrorInfo) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectErrorInfo.ProtoReflect.Descriptor instead. +func (*ObjectErrorInfo) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{2} +} + +func (x *ObjectErrorInfo) GetCode() int64 { + if x != nil { + return x.Code + } + return 0 +} + +func (x *ObjectErrorInfo) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *ObjectErrorInfo) GetDetailsJson() []byte { + if x != nil { + return x.DetailsJson + } + return nil +} + +type ReadObjectRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + UID string `protobuf:"bytes,1,opt,name=UID,proto3" json:"UID,omitempty"` + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + Kind string `protobuf:"bytes,2,opt,name=kind,proto3" json:"kind,omitempty"` + // Fetch an explicit version + Version string `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"` + // Include the full body bytes + WithBody bool `protobuf:"varint,4,opt,name=with_body,json=withBody,proto3" json:"with_body,omitempty"` + // Include derived summary metadata + WithSummary bool `protobuf:"varint,5,opt,name=with_summary,json=withSummary,proto3" json:"with_summary,omitempty"` +} + +func (x *ReadObjectRequest) Reset() { + *x = ReadObjectRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ReadObjectRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ReadObjectRequest) ProtoMessage() {} + +func (x *ReadObjectRequest) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ReadObjectRequest.ProtoReflect.Descriptor instead. +func (*ReadObjectRequest) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{3} +} + +func (x *ReadObjectRequest) GetUID() string { + if x != nil { + return x.UID + } + return "" +} + +func (x *ReadObjectRequest) GetKind() string { + if x != nil { + return x.Kind + } + return "" +} + +func (x *ReadObjectRequest) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *ReadObjectRequest) GetWithBody() bool { + if x != nil { + return x.WithBody + } + return false +} + +func (x *ReadObjectRequest) GetWithSummary() bool { + if x != nil { + return x.WithSummary + } + return false +} + +type ReadObjectResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Object details with the body removed + Object *RawObject `protobuf:"bytes,1,opt,name=object,proto3" json:"object,omitempty"` + // Object summary as JSON + SummaryJson []byte `protobuf:"bytes,2,opt,name=summary_json,json=summaryJson,proto3" json:"summary_json,omitempty"` +} + +func (x *ReadObjectResponse) Reset() { + *x = ReadObjectResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ReadObjectResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ReadObjectResponse) ProtoMessage() {} + +func (x *ReadObjectResponse) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ReadObjectResponse.ProtoReflect.Descriptor instead. +func (*ReadObjectResponse) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{4} +} + +func (x *ReadObjectResponse) GetObject() *RawObject { + if x != nil { + return x.Object + } + return nil +} + +func (x *ReadObjectResponse) GetSummaryJson() []byte { + if x != nil { + return x.SummaryJson + } + return nil +} + +type BatchReadObjectRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Batch []*ReadObjectRequest `protobuf:"bytes,3,rep,name=batch,proto3" json:"batch,omitempty"` +} + +func (x *BatchReadObjectRequest) Reset() { + *x = BatchReadObjectRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BatchReadObjectRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchReadObjectRequest) ProtoMessage() {} + +func (x *BatchReadObjectRequest) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchReadObjectRequest.ProtoReflect.Descriptor instead. +func (*BatchReadObjectRequest) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{5} +} + +func (x *BatchReadObjectRequest) GetBatch() []*ReadObjectRequest { + if x != nil { + return x.Batch + } + return nil +} + +type BatchReadObjectResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Results []*ReadObjectResponse `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` +} + +func (x *BatchReadObjectResponse) Reset() { + *x = BatchReadObjectResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BatchReadObjectResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BatchReadObjectResponse) ProtoMessage() {} + +func (x *BatchReadObjectResponse) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BatchReadObjectResponse.ProtoReflect.Descriptor instead. +func (*BatchReadObjectResponse) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{6} +} + +func (x *BatchReadObjectResponse) GetResults() []*ReadObjectResponse { + if x != nil { + return x.Results + } + return nil +} + +type WriteObjectRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + UID string `protobuf:"bytes,1,opt,name=UID,proto3" json:"UID,omitempty"` + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + Kind string `protobuf:"bytes,2,opt,name=kind,proto3" json:"kind,omitempty"` + // The raw object body + Body []byte `protobuf:"bytes,3,opt,name=body,proto3" json:"body,omitempty"` + // Message that can be seen when exploring object history + Comment string `protobuf:"bytes,4,opt,name=comment,proto3" json:"comment,omitempty"` + // Used for optimistic locking. If missing, the previous version will be replaced regardless + PreviousVersion string `protobuf:"bytes,6,opt,name=previous_version,json=previousVersion,proto3" json:"previous_version,omitempty"` +} + +func (x *WriteObjectRequest) Reset() { + *x = WriteObjectRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *WriteObjectRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*WriteObjectRequest) ProtoMessage() {} + +func (x *WriteObjectRequest) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use WriteObjectRequest.ProtoReflect.Descriptor instead. +func (*WriteObjectRequest) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{7} +} + +func (x *WriteObjectRequest) GetUID() string { + if x != nil { + return x.UID + } + return "" +} + +func (x *WriteObjectRequest) GetKind() string { + if x != nil { + return x.Kind + } + return "" +} + +func (x *WriteObjectRequest) GetBody() []byte { + if x != nil { + return x.Body + } + return nil +} + +func (x *WriteObjectRequest) GetComment() string { + if x != nil { + return x.Comment + } + return "" +} + +func (x *WriteObjectRequest) GetPreviousVersion() string { + if x != nil { + return x.PreviousVersion + } + return "" +} + +type WriteObjectResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Error info -- if exists, the save did not happen + Error *ObjectErrorInfo `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` + // Object details with the body removed + Object *RawObject `protobuf:"bytes,2,opt,name=object,proto3" json:"object,omitempty"` + // Object summary as JSON + SummaryJson []byte `protobuf:"bytes,3,opt,name=summary_json,json=summaryJson,proto3" json:"summary_json,omitempty"` +} + +func (x *WriteObjectResponse) Reset() { + *x = WriteObjectResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *WriteObjectResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*WriteObjectResponse) ProtoMessage() {} + +func (x *WriteObjectResponse) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use WriteObjectResponse.ProtoReflect.Descriptor instead. +func (*WriteObjectResponse) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{8} +} + +func (x *WriteObjectResponse) GetError() *ObjectErrorInfo { + if x != nil { + return x.Error + } + return nil +} + +func (x *WriteObjectResponse) GetObject() *RawObject { + if x != nil { + return x.Object + } + return nil +} + +func (x *WriteObjectResponse) GetSummaryJson() []byte { + if x != nil { + return x.SummaryJson + } + return nil +} + +type DeleteObjectRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + UID string `protobuf:"bytes,1,opt,name=UID,proto3" json:"UID,omitempty"` + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + Kind string `protobuf:"bytes,2,opt,name=kind,proto3" json:"kind,omitempty"` + // Used for optimistic locking. If missing, the previous version will be replaced regardless + PreviousVersion string `protobuf:"bytes,3,opt,name=previous_version,json=previousVersion,proto3" json:"previous_version,omitempty"` +} + +func (x *DeleteObjectRequest) Reset() { + *x = DeleteObjectRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteObjectRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteObjectRequest) ProtoMessage() {} + +func (x *DeleteObjectRequest) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteObjectRequest.ProtoReflect.Descriptor instead. +func (*DeleteObjectRequest) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{9} +} + +func (x *DeleteObjectRequest) GetUID() string { + if x != nil { + return x.UID + } + return "" +} + +func (x *DeleteObjectRequest) GetKind() string { + if x != nil { + return x.Kind + } + return "" +} + +func (x *DeleteObjectRequest) GetPreviousVersion() string { + if x != nil { + return x.PreviousVersion + } + return "" +} + +type DeleteObjectResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + OK bool `protobuf:"varint,1,opt,name=OK,proto3" json:"OK,omitempty"` +} + +func (x *DeleteObjectResponse) Reset() { + *x = DeleteObjectResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteObjectResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteObjectResponse) ProtoMessage() {} + +func (x *DeleteObjectResponse) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteObjectResponse.ProtoReflect.Descriptor instead. +func (*DeleteObjectResponse) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{10} +} + +func (x *DeleteObjectResponse) GetOK() bool { + if x != nil { + return x.OK + } + return false +} + +type ObjectHistoryRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + UID string `protobuf:"bytes,1,opt,name=UID,proto3" json:"UID,omitempty"` + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + Kind string `protobuf:"bytes,2,opt,name=kind,proto3" json:"kind,omitempty"` + // Maximum number of items to return + Limit int64 `protobuf:"varint,3,opt,name=limit,proto3" json:"limit,omitempty"` + // Starting from the requested page + NextPageToken string `protobuf:"bytes,5,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` +} + +func (x *ObjectHistoryRequest) Reset() { + *x = ObjectHistoryRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ObjectHistoryRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectHistoryRequest) ProtoMessage() {} + +func (x *ObjectHistoryRequest) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectHistoryRequest.ProtoReflect.Descriptor instead. +func (*ObjectHistoryRequest) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{11} +} + +func (x *ObjectHistoryRequest) GetUID() string { + if x != nil { + return x.UID + } + return "" +} + +func (x *ObjectHistoryRequest) GetKind() string { + if x != nil { + return x.Kind + } + return "" +} + +func (x *ObjectHistoryRequest) GetLimit() int64 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *ObjectHistoryRequest) GetNextPageToken() string { + if x != nil { + return x.NextPageToken + } + return "" +} + +type ObjectHistoryResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Object metadata without the raw bytes + Object []*RawObject `protobuf:"bytes,1,rep,name=object,proto3" json:"object,omitempty"` + // More results exist... pass this in the next request + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` +} + +func (x *ObjectHistoryResponse) Reset() { + *x = ObjectHistoryResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ObjectHistoryResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectHistoryResponse) ProtoMessage() {} + +func (x *ObjectHistoryResponse) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectHistoryResponse.ProtoReflect.Descriptor instead. +func (*ObjectHistoryResponse) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{12} +} + +func (x *ObjectHistoryResponse) GetObject() []*RawObject { + if x != nil { + return x.Object + } + return nil +} + +func (x *ObjectHistoryResponse) GetNextPageToken() string { + if x != nil { + return x.NextPageToken + } + return "" +} + +type ObjectSearchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Starting from the requested page (other query parameters must match!) + NextPageToken string `protobuf:"bytes,1,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Maximum number of items to return + Limit int64 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + // Free text query string -- mileage may vary :) + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` + // limit to a specific kind (empty is all) + Kind []string `protobuf:"bytes,4,rep,name=kind,proto3" json:"kind,omitempty"` + // Limit results to items in a specific folder + Folder string `protobuf:"bytes,5,opt,name=folder,proto3" json:"folder,omitempty"` + // Must match all labels + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Sorting instructions `field ASC/DESC` + Sort []string `protobuf:"bytes,7,rep,name=sort,proto3" json:"sort,omitempty"` + // TODO, limit the set of fields we actually want returned + // Only supported in the QueryResponse flavor? + Fields []string `protobuf:"bytes,8,rep,name=fields,proto3" json:"fields,omitempty"` + // Return the full body in each payload + WithBody bool `protobuf:"varint,9,opt,name=with_body,json=withBody,proto3" json:"with_body,omitempty"` +} + +func (x *ObjectSearchRequest) Reset() { + *x = ObjectSearchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ObjectSearchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectSearchRequest) ProtoMessage() {} + +func (x *ObjectSearchRequest) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectSearchRequest.ProtoReflect.Descriptor instead. +func (*ObjectSearchRequest) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{13} +} + +func (x *ObjectSearchRequest) GetNextPageToken() string { + if x != nil { + return x.NextPageToken + } + return "" +} + +func (x *ObjectSearchRequest) GetLimit() int64 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *ObjectSearchRequest) GetQuery() string { + if x != nil { + return x.Query + } + return "" +} + +func (x *ObjectSearchRequest) GetKind() []string { + if x != nil { + return x.Kind + } + return nil +} + +func (x *ObjectSearchRequest) GetFolder() string { + if x != nil { + return x.Folder + } + return "" +} + +func (x *ObjectSearchRequest) GetLabels() map[string]string { + if x != nil { + return x.Labels + } + return nil +} + +func (x *ObjectSearchRequest) GetSort() []string { + if x != nil { + return x.Sort + } + return nil +} + +func (x *ObjectSearchRequest) GetFields() []string { + if x != nil { + return x.Fields + } + return nil +} + +func (x *ObjectSearchRequest) GetWithBody() bool { + if x != nil { + return x.WithBody + } + return false +} + +type ObjectSearchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Results []*RawObject `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // More results exist... pass this in the next request + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` +} + +func (x *ObjectSearchResponse) Reset() { + *x = ObjectSearchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_object_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ObjectSearchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ObjectSearchResponse) ProtoMessage() {} + +func (x *ObjectSearchResponse) ProtoReflect() protoreflect.Message { + mi := &file_object_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ObjectSearchResponse.ProtoReflect.Descriptor instead. +func (*ObjectSearchResponse) Descriptor() ([]byte, []int) { + return file_object_proto_rawDescGZIP(), []int{14} +} + +func (x *ObjectSearchResponse) GetResults() []*RawObject { + if x != nil { + return x.Results + } + return nil +} + +func (x *ObjectSearchResponse) GetNextPageToken() string { + if x != nil { + return x.NextPageToken + } + return "" +} + +var File_object_proto protoreflect.FileDescriptor + +var file_object_proto_rawDesc = []byte{ + 0x0a, 0x0c, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, + 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x30, 0x0a, 0x08, 0x55, 0x73, 0x65, 0x72, 0x49, 0x6e, + 0x66, 0x6f, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x22, 0xf3, 0x02, 0x0a, 0x09, 0x52, 0x61, 0x77, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x55, 0x49, 0x44, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x55, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x07, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, + 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, + 0x65, 0x64, 0x12, 0x2f, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x62, 0x79, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, + 0x55, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x64, 0x42, 0x79, 0x12, 0x31, 0x0a, 0x0b, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, + 0x62, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0a, 0x6d, 0x6f, 0x64, 0x69, + 0x66, 0x69, 0x65, 0x64, 0x42, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x45, 0x54, + 0x61, 0x67, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x45, 0x54, 0x61, 0x67, 0x12, 0x12, + 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, + 0x64, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, + 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, + 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x73, + 0x72, 0x63, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x79, 0x6e, 0x63, 0x53, 0x72, + 0x63, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x79, 0x6e, 0x63, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x0d, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x73, 0x79, 0x6e, 0x63, 0x54, 0x69, 0x6d, 0x65, 0x22, 0x62, + 0x0a, 0x0f, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x49, 0x6e, 0x66, + 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, + 0x21, 0x0a, 0x0c, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x4a, 0x73, + 0x6f, 0x6e, 0x22, 0x93, 0x01, 0x0a, 0x11, 0x52, 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x55, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x55, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, + 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x18, + 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x69, 0x74, 0x68, + 0x5f, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x77, 0x69, 0x74, + 0x68, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x73, 0x75, + 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x77, 0x69, 0x74, + 0x68, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x22, 0x62, 0x0a, 0x12, 0x52, 0x65, 0x61, 0x64, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x29, + 0x0a, 0x06, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, + 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x61, 0x77, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x52, 0x06, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x75, 0x6d, + 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x0b, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x4a, 0x73, 0x6f, 0x6e, 0x22, 0x49, 0x0a, 0x16, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x05, 0x62, 0x61, 0x74, 0x63, 0x68, 0x18, + 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x52, + 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x52, 0x05, 0x62, 0x61, 0x74, 0x63, 0x68, 0x22, 0x4f, 0x0a, 0x17, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x52, 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x34, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, + 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x52, + 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x93, 0x01, 0x0a, 0x12, 0x57, 0x72, 0x69, + 0x74, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x10, 0x0a, 0x03, 0x55, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x55, 0x49, + 0x44, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, + 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x70, + 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x92, + 0x01, 0x0a, 0x13, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x29, 0x0a, 0x06, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x52, + 0x61, 0x77, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x06, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x4a, + 0x73, 0x6f, 0x6e, 0x22, 0x66, 0x0a, 0x13, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x55, 0x49, + 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x55, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, + 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, + 0x12, 0x29, 0x0a, 0x10, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x76, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x70, 0x72, 0x65, 0x76, + 0x69, 0x6f, 0x75, 0x73, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x26, 0x0a, 0x14, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x4f, 0x4b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x02, 0x4f, 0x4b, 0x22, 0x7a, 0x0a, 0x14, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x55, + 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x55, 0x49, 0x44, 0x12, 0x12, 0x0a, + 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, + 0x64, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, + 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, + 0x6a, 0x0a, 0x15, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x29, 0x0a, 0x06, 0x6f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x2e, 0x52, 0x61, 0x77, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x06, 0x6f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, + 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0xda, 0x02, 0x0a, 0x13, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, + 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6c, + 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, + 0x74, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, + 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x66, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x6c, + 0x64, 0x65, 0x72, 0x12, 0x3f, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x06, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x18, 0x07, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x69, 0x74, 0x68, 0x5f, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x09, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x08, 0x77, 0x69, 0x74, 0x68, 0x42, 0x6f, 0x64, 0x79, 0x1a, 0x39, 0x0a, + 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x6b, 0x0a, 0x14, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x2b, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x11, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x61, 0x77, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x26, 0x0a, + 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, + 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x32, 0xae, 0x03, 0x0a, 0x0b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x3d, 0x0a, 0x04, 0x52, 0x65, 0x61, 0x64, 0x12, 0x19, 0x2e, + 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x61, + 0x64, 0x12, 0x1e, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x52, 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1f, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x52, 0x65, 0x61, 0x64, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x40, 0x0a, 0x05, 0x57, 0x72, 0x69, 0x74, 0x65, 0x12, 0x1a, 0x2e, 0x6f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x43, 0x0a, 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x1b, + 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x6f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x46, 0x0a, 0x07, 0x48, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1c, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x43, 0x0a, 0x06, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x1b, 0x2e, 0x6f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x53, 0x65, 0x61, 0x72, 0x63, + 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x53, 0x65, 0x61, 0x72, 0x63, 0x68, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x0b, 0x5a, 0x09, 0x2e, 0x2f, 0x3b, 0x6f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_object_proto_rawDescOnce sync.Once + file_object_proto_rawDescData = file_object_proto_rawDesc +) + +func file_object_proto_rawDescGZIP() []byte { + file_object_proto_rawDescOnce.Do(func() { + file_object_proto_rawDescData = protoimpl.X.CompressGZIP(file_object_proto_rawDescData) + }) + return file_object_proto_rawDescData +} + +var file_object_proto_msgTypes = make([]protoimpl.MessageInfo, 16) +var file_object_proto_goTypes = []interface{}{ + (*UserInfo)(nil), // 0: object.UserInfo + (*RawObject)(nil), // 1: object.RawObject + (*ObjectErrorInfo)(nil), // 2: object.ObjectErrorInfo + (*ReadObjectRequest)(nil), // 3: object.ReadObjectRequest + (*ReadObjectResponse)(nil), // 4: object.ReadObjectResponse + (*BatchReadObjectRequest)(nil), // 5: object.BatchReadObjectRequest + (*BatchReadObjectResponse)(nil), // 6: object.BatchReadObjectResponse + (*WriteObjectRequest)(nil), // 7: object.WriteObjectRequest + (*WriteObjectResponse)(nil), // 8: object.WriteObjectResponse + (*DeleteObjectRequest)(nil), // 9: object.DeleteObjectRequest + (*DeleteObjectResponse)(nil), // 10: object.DeleteObjectResponse + (*ObjectHistoryRequest)(nil), // 11: object.ObjectHistoryRequest + (*ObjectHistoryResponse)(nil), // 12: object.ObjectHistoryResponse + (*ObjectSearchRequest)(nil), // 13: object.ObjectSearchRequest + (*ObjectSearchResponse)(nil), // 14: object.ObjectSearchResponse + nil, // 15: object.ObjectSearchRequest.LabelsEntry +} +var file_object_proto_depIdxs = []int32{ + 0, // 0: object.RawObject.created_by:type_name -> object.UserInfo + 0, // 1: object.RawObject.modified_by:type_name -> object.UserInfo + 1, // 2: object.ReadObjectResponse.object:type_name -> object.RawObject + 3, // 3: object.BatchReadObjectRequest.batch:type_name -> object.ReadObjectRequest + 4, // 4: object.BatchReadObjectResponse.results:type_name -> object.ReadObjectResponse + 2, // 5: object.WriteObjectResponse.error:type_name -> object.ObjectErrorInfo + 1, // 6: object.WriteObjectResponse.object:type_name -> object.RawObject + 1, // 7: object.ObjectHistoryResponse.object:type_name -> object.RawObject + 15, // 8: object.ObjectSearchRequest.labels:type_name -> object.ObjectSearchRequest.LabelsEntry + 1, // 9: object.ObjectSearchResponse.results:type_name -> object.RawObject + 3, // 10: object.ObjectStore.Read:input_type -> object.ReadObjectRequest + 5, // 11: object.ObjectStore.BatchRead:input_type -> object.BatchReadObjectRequest + 7, // 12: object.ObjectStore.Write:input_type -> object.WriteObjectRequest + 9, // 13: object.ObjectStore.Delete:input_type -> object.DeleteObjectRequest + 11, // 14: object.ObjectStore.History:input_type -> object.ObjectHistoryRequest + 13, // 15: object.ObjectStore.Search:input_type -> object.ObjectSearchRequest + 4, // 16: object.ObjectStore.Read:output_type -> object.ReadObjectResponse + 6, // 17: object.ObjectStore.BatchRead:output_type -> object.BatchReadObjectResponse + 8, // 18: object.ObjectStore.Write:output_type -> object.WriteObjectResponse + 10, // 19: object.ObjectStore.Delete:output_type -> object.DeleteObjectResponse + 12, // 20: object.ObjectStore.History:output_type -> object.ObjectHistoryResponse + 14, // 21: object.ObjectStore.Search:output_type -> object.ObjectSearchResponse + 16, // [16:22] is the sub-list for method output_type + 10, // [10:16] is the sub-list for method input_type + 10, // [10:10] is the sub-list for extension type_name + 10, // [10:10] is the sub-list for extension extendee + 0, // [0:10] is the sub-list for field type_name +} + +func init() { file_object_proto_init() } +func file_object_proto_init() { + if File_object_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_object_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UserInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RawObject); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ObjectErrorInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ReadObjectRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ReadObjectResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BatchReadObjectRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BatchReadObjectResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*WriteObjectRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*WriteObjectResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteObjectRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteObjectResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ObjectHistoryRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ObjectHistoryResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ObjectSearchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_object_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ObjectSearchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_object_proto_rawDesc, + NumEnums: 0, + NumMessages: 16, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_object_proto_goTypes, + DependencyIndexes: file_object_proto_depIdxs, + MessageInfos: file_object_proto_msgTypes, + }.Build() + File_object_proto = out.File + file_object_proto_rawDesc = nil + file_object_proto_goTypes = nil + file_object_proto_depIdxs = nil +} diff --git a/pkg/services/store/object/object.proto b/pkg/services/store/object/object.proto new file mode 100644 index 00000000000..b9eedebb787 --- /dev/null +++ b/pkg/services/store/object/object.proto @@ -0,0 +1,257 @@ +syntax = "proto3"; +package object; + +option go_package = "./;object"; + +// Will be replaced with something from the SDK +message UserInfo { + // internal grafana user ID + int64 id = 1; + + // login name + string login = 2; // string ID? +} + +// The canonical object/document data -- this represents the raw bytes and storage level metadata +message RawObject { + // Unique ID + string UID = 1; + + // Identify the object kind. This kind will be used to apply a schema to the body and + // will trigger additional indexing behavior. + string kind = 2; + + // Time in epoch milliseconds that the object was created + int64 created = 3; + + // Time in epoch milliseconds that the object was modified + int64 modified = 4; + + // Who created the object + UserInfo created_by = 5; + + // Who modified the object + UserInfo modified_by = 6; + + // Content Length + int64 size = 7; + + // MD5 digest of the body + string ETag = 8; + + // Raw bytes of the storage object. The kind will determine what is a valid payload + bytes body = 9; + + // The version will change when the object is saved. It is not necessarily sortable + // + // NOTE: currently managed by the dashboard+dashboard_version tables + string version = 10; + + // optional "save" or "commit" message + // + // NOTE: currently managed by the dashboard_version table, and will be returned from a "history" command + string comment = 11; + + // Location (path/repo/etc) that defines the canonocal form + // + // NOTE: currently managed by the dashboard_provisioning table + string sync_src = 12; + + // Time in epoch milliseconds that the object was last synced with an external system (provisioning/git) + // + // NOTE: currently managed by the dashboard_provisioning table + int64 sync_time = 13; +} + +// Report error while working with objects +// NOTE: real systems at scale will contain errors. +message ObjectErrorInfo { + // Match an error code registry? + int64 code = 1; + + // Simple error display + string message = 2; + + // Details encoded in JSON + bytes details_json = 3; +} + +//----------------------------------------------- +// Get request/response +//----------------------------------------------- + +message ReadObjectRequest { + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string UID = 1; + + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string kind = 2; + + // Fetch an explicit version + string version = 3; + + // Include the full body bytes + bool with_body = 4; + + // Include derived summary metadata + bool with_summary = 5; +} + +message ReadObjectResponse { + // Object details with the body removed + RawObject object = 1; + + // Object summary as JSON + bytes summary_json = 2; +} + +//------------------------------------------------------ +// Make many read requests at once (by Kind+ID+version) +//------------------------------------------------------ + +message BatchReadObjectRequest { + repeated ReadObjectRequest batch = 3; +} + +message BatchReadObjectResponse { + repeated ReadObjectResponse results = 1; +} + +//----------------------------------------------- +// Write request/response +//----------------------------------------------- + +message WriteObjectRequest { + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string UID = 1; + + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string kind = 2; + + // The raw object body + bytes body = 3; + + // Message that can be seen when exploring object history + string comment = 4; + + // Used for optimistic locking. If missing, the previous version will be replaced regardless + string previous_version = 6; +} + +message WriteObjectResponse { + // Error info -- if exists, the save did not happen + ObjectErrorInfo error = 1; + + // Object details with the body removed + RawObject object = 2; + + // Object summary as JSON + bytes summary_json = 3; +} + +//----------------------------------------------- +// Delete request/response +//----------------------------------------------- + +message DeleteObjectRequest { + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string UID = 1; + + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string kind = 2; + + // Used for optimistic locking. If missing, the previous version will be replaced regardless + string previous_version = 3; +} + +message DeleteObjectResponse { + bool OK = 1; +} + +//----------------------------------------------- +// History request/response +//----------------------------------------------- + +message ObjectHistoryRequest { + // Unique ID (Kind is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string UID = 1; + + // Object kind (UID is also required) NOTE: UID+kind will likely be replaced with GRN that encodes both + string kind = 2; + + // Maximum number of items to return + int64 limit = 3; + + // Starting from the requested page + string next_page_token = 5; +} + +message ObjectHistoryResponse { + // Object metadata without the raw bytes + repeated RawObject object = 1; + + // More results exist... pass this in the next request + string next_page_token = 2; +} + + +//----------------------------------------------- +// List request/response +//----------------------------------------------- + +message ObjectSearchRequest { + // Starting from the requested page (other query parameters must match!) + string next_page_token = 1; + + // Maximum number of items to return + int64 limit = 2; + + // Free text query string -- mileage may vary :) + string query = 3; + + // limit to a specific kind (empty is all) + repeated string kind = 4; + + // Limit results to items in a specific folder + string folder = 5; + + // Must match all labels + map labels = 6; + + // Sorting instructions `field ASC/DESC` + repeated string sort = 7; + + // TODO, limit the set of fields we actually want returned + // Only supported in the QueryResponse flavor? + repeated string fields = 8; + + // Return the full body in each payload + bool with_body = 9; +} + +message ObjectSearchResponse { + repeated RawObject results = 1; + + // More results exist... pass this in the next request + string next_page_token = 2; +} + + +//----------------------------------------------- +// Storage interface +//----------------------------------------------- + +// This assumes a future grpc interface where the user info is passed in context, not in each message body +// for now it will only work with an admin API key +service ObjectStore { + rpc Read(ReadObjectRequest) returns (ReadObjectResponse); + rpc BatchRead(BatchReadObjectRequest) returns (BatchReadObjectResponse); + rpc Write(WriteObjectRequest) returns (WriteObjectResponse); + rpc Delete(DeleteObjectRequest) returns (DeleteObjectResponse); + rpc History(ObjectHistoryRequest) returns (ObjectHistoryResponse); + rpc Search(ObjectSearchRequest) returns (ObjectSearchResponse); + +// Ideally an additional search endpoint with more flexibility to limit what you actually care about +// https://github.com/grafana/grafana-plugin-sdk-go/blob/main/proto/backend.proto#L129 +// rpc SearchEX(ObjectSearchRequest) returns (DataResponse); +} diff --git a/pkg/services/store/object/object_grpc.pb.go b/pkg/services/store/object/object_grpc.pb.go new file mode 100644 index 00000000000..1a4887e7353 --- /dev/null +++ b/pkg/services/store/object/object_grpc.pb.go @@ -0,0 +1,283 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.5 +// source: object.proto + +package object + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// ObjectStoreClient is the client API for ObjectStore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ObjectStoreClient interface { + Read(ctx context.Context, in *ReadObjectRequest, opts ...grpc.CallOption) (*ReadObjectResponse, error) + BatchRead(ctx context.Context, in *BatchReadObjectRequest, opts ...grpc.CallOption) (*BatchReadObjectResponse, error) + Write(ctx context.Context, in *WriteObjectRequest, opts ...grpc.CallOption) (*WriteObjectResponse, error) + Delete(ctx context.Context, in *DeleteObjectRequest, opts ...grpc.CallOption) (*DeleteObjectResponse, error) + History(ctx context.Context, in *ObjectHistoryRequest, opts ...grpc.CallOption) (*ObjectHistoryResponse, error) + Search(ctx context.Context, in *ObjectSearchRequest, opts ...grpc.CallOption) (*ObjectSearchResponse, error) +} + +type objectStoreClient struct { + cc grpc.ClientConnInterface +} + +func NewObjectStoreClient(cc grpc.ClientConnInterface) ObjectStoreClient { + return &objectStoreClient{cc} +} + +func (c *objectStoreClient) Read(ctx context.Context, in *ReadObjectRequest, opts ...grpc.CallOption) (*ReadObjectResponse, error) { + out := new(ReadObjectResponse) + err := c.cc.Invoke(ctx, "/object.ObjectStore/Read", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *objectStoreClient) BatchRead(ctx context.Context, in *BatchReadObjectRequest, opts ...grpc.CallOption) (*BatchReadObjectResponse, error) { + out := new(BatchReadObjectResponse) + err := c.cc.Invoke(ctx, "/object.ObjectStore/BatchRead", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *objectStoreClient) Write(ctx context.Context, in *WriteObjectRequest, opts ...grpc.CallOption) (*WriteObjectResponse, error) { + out := new(WriteObjectResponse) + err := c.cc.Invoke(ctx, "/object.ObjectStore/Write", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *objectStoreClient) Delete(ctx context.Context, in *DeleteObjectRequest, opts ...grpc.CallOption) (*DeleteObjectResponse, error) { + out := new(DeleteObjectResponse) + err := c.cc.Invoke(ctx, "/object.ObjectStore/Delete", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *objectStoreClient) History(ctx context.Context, in *ObjectHistoryRequest, opts ...grpc.CallOption) (*ObjectHistoryResponse, error) { + out := new(ObjectHistoryResponse) + err := c.cc.Invoke(ctx, "/object.ObjectStore/History", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *objectStoreClient) Search(ctx context.Context, in *ObjectSearchRequest, opts ...grpc.CallOption) (*ObjectSearchResponse, error) { + out := new(ObjectSearchResponse) + err := c.cc.Invoke(ctx, "/object.ObjectStore/Search", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ObjectStoreServer is the server API for ObjectStore service. +// All implementations should embed UnimplementedObjectStoreServer +// for forward compatibility +type ObjectStoreServer interface { + Read(context.Context, *ReadObjectRequest) (*ReadObjectResponse, error) + BatchRead(context.Context, *BatchReadObjectRequest) (*BatchReadObjectResponse, error) + Write(context.Context, *WriteObjectRequest) (*WriteObjectResponse, error) + Delete(context.Context, *DeleteObjectRequest) (*DeleteObjectResponse, error) + History(context.Context, *ObjectHistoryRequest) (*ObjectHistoryResponse, error) + Search(context.Context, *ObjectSearchRequest) (*ObjectSearchResponse, error) +} + +// UnimplementedObjectStoreServer should be embedded to have forward compatible implementations. +type UnimplementedObjectStoreServer struct { +} + +func (UnimplementedObjectStoreServer) Read(context.Context, *ReadObjectRequest) (*ReadObjectResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Read not implemented") +} +func (UnimplementedObjectStoreServer) BatchRead(context.Context, *BatchReadObjectRequest) (*BatchReadObjectResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method BatchRead not implemented") +} +func (UnimplementedObjectStoreServer) Write(context.Context, *WriteObjectRequest) (*WriteObjectResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Write not implemented") +} +func (UnimplementedObjectStoreServer) Delete(context.Context, *DeleteObjectRequest) (*DeleteObjectResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Delete not implemented") +} +func (UnimplementedObjectStoreServer) History(context.Context, *ObjectHistoryRequest) (*ObjectHistoryResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method History not implemented") +} +func (UnimplementedObjectStoreServer) Search(context.Context, *ObjectSearchRequest) (*ObjectSearchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Search not implemented") +} + +// UnsafeObjectStoreServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ObjectStoreServer will +// result in compilation errors. +type UnsafeObjectStoreServer interface { + mustEmbedUnimplementedObjectStoreServer() +} + +func RegisterObjectStoreServer(s grpc.ServiceRegistrar, srv ObjectStoreServer) { + s.RegisterService(&ObjectStore_ServiceDesc, srv) +} + +func _ObjectStore_Read_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReadObjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ObjectStoreServer).Read(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/object.ObjectStore/Read", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ObjectStoreServer).Read(ctx, req.(*ReadObjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ObjectStore_BatchRead_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchReadObjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ObjectStoreServer).BatchRead(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/object.ObjectStore/BatchRead", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ObjectStoreServer).BatchRead(ctx, req.(*BatchReadObjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ObjectStore_Write_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WriteObjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ObjectStoreServer).Write(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/object.ObjectStore/Write", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ObjectStoreServer).Write(ctx, req.(*WriteObjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ObjectStore_Delete_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteObjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ObjectStoreServer).Delete(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/object.ObjectStore/Delete", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ObjectStoreServer).Delete(ctx, req.(*DeleteObjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ObjectStore_History_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ObjectHistoryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ObjectStoreServer).History(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/object.ObjectStore/History", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ObjectStoreServer).History(ctx, req.(*ObjectHistoryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ObjectStore_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ObjectSearchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ObjectStoreServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/object.ObjectStore/Search", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ObjectStoreServer).Search(ctx, req.(*ObjectSearchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ObjectStore_ServiceDesc is the grpc.ServiceDesc for ObjectStore service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ObjectStore_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "object.ObjectStore", + HandlerType: (*ObjectStoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Read", + Handler: _ObjectStore_Read_Handler, + }, + { + MethodName: "BatchRead", + Handler: _ObjectStore_BatchRead_Handler, + }, + { + MethodName: "Write", + Handler: _ObjectStore_Write_Handler, + }, + { + MethodName: "Delete", + Handler: _ObjectStore_Delete_Handler, + }, + { + MethodName: "History", + Handler: _ObjectStore_History_Handler, + }, + { + MethodName: "Search", + Handler: _ObjectStore_Search_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "object.proto", +} diff --git a/pkg/services/store/object/summary.go b/pkg/services/store/object/summary.go index 895e469237d..d19d3a6448c 100644 --- a/pkg/services/store/object/summary.go +++ b/pkg/services/store/object/summary.go @@ -24,6 +24,20 @@ type ObjectSummary struct { _ interface{} } +// Reference to another object outside itself +// This message is derived from the object body and can be used to search for references. +// This does not represent a method to declare a reference to another object. +type ExternalReference struct { + // datasource (instance), dashboard (instance), + Kind string `json:"kind,omitempty"` + + // prometheus / heatmap, heatamp|prometheus + Type string `json:"type,omitempty"` // flavor + + // Unique ID for this object + UID string `json:"UID,omitempty"` +} + // ObjectSummaryBuilder will read an object and create the summary. // This should not include values that depend on system state, only the raw object type ObjectSummaryBuilder = func(obj *RawObject) (ObjectSummary, error) diff --git a/pkg/services/user/userimpl/store.go b/pkg/services/user/userimpl/store.go index e986352254b..f2ed0c5c8dd 100644 --- a/pkg/services/user/userimpl/store.go +++ b/pkg/services/user/userimpl/store.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strings" + "time" "github.com/grafana/grafana/pkg/events" "github.com/grafana/grafana/pkg/infra/log" @@ -23,6 +24,9 @@ type store interface { CaseInsensitiveLoginConflict(context.Context, string, string) error GetByLogin(context.Context, *user.GetUserByLoginQuery) (*user.User, error) GetByEmail(context.Context, *user.GetUserByEmailQuery) (*user.User, error) + Update(context.Context, *user.UpdateUserCommand) error + ChangePassword(context.Context, *user.ChangeUserPasswordCommand) error + UpdateLastSeenAt(context.Context, *user.UpdateUserLastSeenAtCommand) error } type sqlStore struct { @@ -246,3 +250,64 @@ func (ss *sqlStore) userCaseInsensitiveLoginConflict(ctx context.Context, sess * return nil } + +func (ss *sqlStore) Update(ctx context.Context, cmd *user.UpdateUserCommand) error { + if ss.cfg.CaseInsensitiveLogin { + cmd.Login = strings.ToLower(cmd.Login) + cmd.Email = strings.ToLower(cmd.Email) + } + + return ss.db.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { + user := user.User{ + Name: cmd.Name, + Email: cmd.Email, + Login: cmd.Login, + Theme: cmd.Theme, + Updated: time.Now(), + } + + if _, err := sess.ID(cmd.UserID).Where(ss.notServiceAccountFilter()).Update(&user); err != nil { + return err + } + + if ss.cfg.CaseInsensitiveLogin { + if err := ss.userCaseInsensitiveLoginConflict(ctx, sess, user.Login, user.Email); err != nil { + return err + } + } + + sess.PublishAfterCommit(&events.UserUpdated{ + Timestamp: user.Created, + Id: user.ID, + Name: user.Name, + Login: user.Login, + Email: user.Email, + }) + + return nil + }) +} + +func (ss *sqlStore) ChangePassword(ctx context.Context, cmd *user.ChangeUserPasswordCommand) error { + return ss.db.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { + user := user.User{ + Password: cmd.NewPassword, + Updated: time.Now(), + } + + _, err := sess.ID(cmd.UserID).Where(ss.notServiceAccountFilter()).Update(&user) + return err + }) +} + +func (ss *sqlStore) UpdateLastSeenAt(ctx context.Context, cmd *user.UpdateUserLastSeenAtCommand) error { + return ss.db.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error { + user := user.User{ + ID: cmd.UserID, + LastSeenAt: time.Now(), + } + + _, err := sess.ID(cmd.UserID).Update(&user) + return err + }) +} diff --git a/pkg/services/user/userimpl/store_test.go b/pkg/services/user/userimpl/store_test.go index 9e865038c2b..bf1c8cef735 100644 --- a/pkg/services/user/userimpl/store_test.go +++ b/pkg/services/user/userimpl/store_test.go @@ -2,6 +2,7 @@ package userimpl import ( "context" + "fmt" "testing" "time" @@ -194,4 +195,97 @@ func TestIntegrationUserDataAccess(t *testing.T) { ss.Cfg.CaseInsensitiveLogin = false }) + + t.Run("Change user password", func(t *testing.T) { + err := userStore.ChangePassword(context.Background(), &user.ChangeUserPasswordCommand{}) + require.NoError(t, err) + }) + + t.Run("update last seen at", func(t *testing.T) { + err := userStore.UpdateLastSeenAt(context.Background(), &user.UpdateUserLastSeenAtCommand{}) + require.NoError(t, err) + }) +} + +func TestIntegrationUserUpdate(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + ss := sqlstore.InitTestDB(t) + userStore := ProvideStore(ss, setting.NewCfg()) + + users := createFiveTestUsers(t, ss, func(i int) *user.CreateUserCommand { + return &user.CreateUserCommand{ + Email: fmt.Sprint("USER", i, "@test.com"), + Name: fmt.Sprint("USER", i), + Login: fmt.Sprint("loginUSER", i), + IsDisabled: false, + } + }) + + userStore.cfg.CaseInsensitiveLogin = true + + t.Run("Testing DB - update generates duplicate user", func(t *testing.T) { + err := userStore.Update(context.Background(), &user.UpdateUserCommand{ + Login: "loginuser2", + UserID: users[0].ID, + }) + + require.Error(t, err) + }) + + t.Run("Testing DB - update lowercases existing user", func(t *testing.T) { + err := userStore.Update(context.Background(), &user.UpdateUserCommand{ + Login: "loginUSER0", + Email: "USER0@test.com", + UserID: users[0].ID, + }) + require.NoError(t, err) + + result, err := userStore.GetByID(context.Background(), users[0].ID) + require.NoError(t, err) + + require.Equal(t, "loginuser0", result.Login) + require.Equal(t, "user0@test.com", result.Email) + }) + + t.Run("Testing DB - no user info provided", func(t *testing.T) { + err := userStore.Update(context.Background(), &user.UpdateUserCommand{ + Login: "", + Email: "", + Name: "Change Name", + UserID: users[3].ID, + }) + require.NoError(t, err) + + // query := user.GetUserByIDQuery{ID: users[3].ID} + result, err := userStore.GetByID(context.Background(), users[3].ID) + require.NoError(t, err) + + // Changed + require.Equal(t, "Change Name", result.Name) + + // Unchanged + require.Equal(t, "loginUSER3", result.Login) + require.Equal(t, "USER3@test.com", result.Email) + }) + + ss.Cfg.CaseInsensitiveLogin = false +} + +func createFiveTestUsers(t *testing.T, sqlStore *sqlstore.SQLStore, fn func(i int) *user.CreateUserCommand) []user.User { + t.Helper() + + users := []user.User{} + for i := 0; i < 5; i++ { + cmd := fn(i) + + user, err := sqlStore.CreateUser(context.Background(), *cmd) + users = append(users, *user) + + require.Nil(t, err) + } + + return users } diff --git a/pkg/services/user/userimpl/user.go b/pkg/services/user/userimpl/user.go index e713e1be78b..1e1a9df869e 100644 --- a/pkg/services/user/userimpl/user.go +++ b/pkg/services/user/userimpl/user.go @@ -160,34 +160,16 @@ func (s *Service) GetByEmail(ctx context.Context, query *user.GetUserByEmailQuer return s.store.GetByEmail(ctx, query) } -// TODO: remove wrapper around sqlstore func (s *Service) Update(ctx context.Context, cmd *user.UpdateUserCommand) error { - q := &models.UpdateUserCommand{ - Name: cmd.Name, - Email: cmd.Email, - Login: cmd.Login, - Theme: cmd.Theme, - UserId: cmd.UserID, - } - return s.sqlStore.UpdateUser(ctx, q) + return s.store.Update(ctx, cmd) } -// TODO: remove wrapper around sqlstore func (s *Service) ChangePassword(ctx context.Context, cmd *user.ChangeUserPasswordCommand) error { - q := &models.ChangeUserPasswordCommand{ - UserId: cmd.UserID, - NewPassword: cmd.NewPassword, - OldPassword: cmd.OldPassword, - } - return s.sqlStore.ChangeUserPassword(ctx, q) + return s.store.ChangePassword(ctx, cmd) } -// TODO: remove wrapper around sqlstore func (s *Service) UpdateLastSeenAt(ctx context.Context, cmd *user.UpdateUserLastSeenAtCommand) error { - q := &models.UpdateUserLastSeenAtCommand{ - UserId: cmd.UserID, - } - return s.sqlStore.UpdateUserLastSeenAt(ctx, q) + return s.store.UpdateLastSeenAt(ctx, cmd) } // TODO: remove wrapper around sqlstore diff --git a/pkg/services/user/userimpl/user_test.go b/pkg/services/user/userimpl/user_test.go index d69329a0d64..4bf87408b3e 100644 --- a/pkg/services/user/userimpl/user_test.go +++ b/pkg/services/user/userimpl/user_test.go @@ -129,3 +129,15 @@ func (f *FakeUserStore) GetByLogin(ctx context.Context, query *user.GetUserByLog func (f *FakeUserStore) GetByEmail(ctx context.Context, query *user.GetUserByEmailQuery) (*user.User, error) { return f.ExpectedUser, f.ExpectedError } + +func (f *FakeUserStore) Update(ctx context.Context, cmd *user.UpdateUserCommand) error { + return f.ExpectedError +} + +func (f *FakeUserStore) ChangePassword(ctx context.Context, cmd *user.ChangeUserPasswordCommand) error { + return f.ExpectedError +} + +func (f *FakeUserStore) UpdateLastSeenAt(ctx context.Context, cmd *user.UpdateUserLastSeenAtCommand) error { + return f.ExpectedError +} diff --git a/pkg/tests/testinfra/testinfra.go b/pkg/tests/testinfra/testinfra.go index 6bf071fd0db..e05e39908ea 100644 --- a/pkg/tests/testinfra/testinfra.go +++ b/pkg/tests/testinfra/testinfra.go @@ -311,6 +311,12 @@ func CreateGrafDir(t *testing.T, opts ...GrafanaOpts) (string, string) { _, err = logSection.NewKey("enabled", "false") require.NoError(t, err) } + if o.GRPCServerAddress != "" { + logSection, err := getOrCreateSection("grpc_server") + require.NoError(t, err) + _, err = logSection.NewKey("address", o.GRPCServerAddress) + require.NoError(t, err) + } } cfgPath := filepath.Join(cfgDir, "test.ini") @@ -341,4 +347,5 @@ type GrafanaOpts struct { EnableUnifiedAlerting bool UnifiedAlertingDisabledOrgs []int64 EnableLog bool + GRPCServerAddress string } diff --git a/pkg/tsdb/tempo/trace_transform.go b/pkg/tsdb/tempo/trace_transform.go index 1b390833d6f..46ba1b2c077 100644 --- a/pkg/tsdb/tempo/trace_transform.go +++ b/pkg/tsdb/tempo/trace_transform.go @@ -111,7 +111,7 @@ func resourceSpansToRows(rs pdata.ResourceSpans) ([][]interface{}, error) { func spanToSpanRow(span pdata.Span, libraryTags pdata.InstrumentationLibrary, resource pdata.Resource) ([]interface{}, error) { // If the id representation changed from hexstring to something else we need to change the transformBase64IDToHexString in the frontend code traceID := span.TraceID().HexString() - traceID = strings.TrimLeft(traceID, "0") + traceID = strings.TrimPrefix(traceID, strings.Repeat("0", 16)) spanID := span.SpanID().HexString() diff --git a/pkg/tsdb/tempo/trace_transform_test.go b/pkg/tsdb/tempo/trace_transform_test.go index ae153162a39..5f378c6bce6 100644 --- a/pkg/tsdb/tempo/trace_transform_test.go +++ b/pkg/tsdb/tempo/trace_transform_test.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/stretchr/testify/require" otlp "go.opentelemetry.io/collector/model/otlp" + "go.opentelemetry.io/collector/model/pdata" ) func TestTraceToFrame(t *testing.T) { @@ -51,6 +52,43 @@ func TestTraceToFrame(t *testing.T) { require.Equal(t, json.RawMessage("[{\"timestamp\":1616072924072.856,\"fields\":[{\"value\":1,\"key\":\"chunks requested\"}]},{\"timestamp\":1616072924072.9448,\"fields\":[{\"value\":1,\"key\":\"chunks fetched\"}]}]"), span["logs"]) require.Equal(t, json.RawMessage("[{\"value\":0,\"key\":\"status.code\"}]"), span["tags"]) }) + + t.Run("should transform correct traceID", func(t *testing.T) { + proto, err := os.ReadFile("testData/tempo_proto_response") + require.NoError(t, err) + + otTrace, err := otlp.NewProtobufTracesUnmarshaler().UnmarshalTraces(proto) + require.NoError(t, err) + + var index int + otTrace.ResourceSpans().RemoveIf(func(rsp pdata.ResourceSpans) bool { + rsp.InstrumentationLibrarySpans().RemoveIf(func(sp pdata.InstrumentationLibrarySpans) bool { + sp.Spans().RemoveIf(func(span pdata.Span) bool { + if index == 0 { + span.SetTraceID(pdata.NewTraceID([16]byte{0, 1, 2, 3, 4, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7})) + } + if index == 1 { + span.SetTraceID(pdata.NewTraceID([16]byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7})) + } + index++ + return false + }) + return false + }) + return false + }) + frame, err := TraceToFrame(otTrace) + require.NoError(t, err) + bFrame := &BetterFrame{frame} + + traceID128Bit := bFrame.GetRow(0) + require.NotNil(t, traceID128Bit) + require.Equal(t, "00010203040506070001020304050607", traceID128Bit["traceID"]) + + traceID64Bit := bFrame.GetRow(1) + require.NotNil(t, traceID64Bit) + require.Equal(t, "0001020304050607", traceID64Bit["traceID"]) + }) } type Row map[string]interface{} diff --git a/public/api-merged.json b/public/api-merged.json index 64fff7bf107..1444b725093 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -8524,7 +8524,7 @@ "tags": [ "service_accounts" ], - "summary": "Create service account", + "summary": "# Create service account", "operationId": "createServiceAccount", "parameters": [ { @@ -8560,7 +8560,7 @@ "tags": [ "service_accounts" ], - "summary": "Search service accounts with paging", + "summary": "# Search service accounts with paging", "operationId": "searchOrgServiceAccountsWithPaging", "parameters": [ { @@ -8616,7 +8616,7 @@ "tags": [ "service_accounts" ], - "summary": "Get single serviceaccount by Id", + "summary": "# Get single serviceaccount by Id", "operationId": "retrieveServiceAccount", "parameters": [ { @@ -8653,7 +8653,7 @@ "tags": [ "service_accounts" ], - "summary": "Delete service account", + "summary": "# Delete service account", "operationId": "deleteServiceAccount", "parameters": [ { @@ -8687,7 +8687,7 @@ "tags": [ "service_accounts" ], - "summary": "Update service account", + "summary": "# Update service account", "operationId": "updateServiceAccount", "parameters": [ { @@ -8733,7 +8733,7 @@ "tags": [ "service_accounts" ], - "summary": "Get service account tokens", + "summary": "# Get service account tokens", "operationId": "listTokens", "parameters": [ { @@ -8767,7 +8767,7 @@ "tags": [ "service_accounts" ], - "summary": "CreateNewToken adds a token to a service account", + "summary": "# CreateNewToken adds a token to a service account", "operationId": "createToken", "parameters": [ { @@ -8816,7 +8816,7 @@ "tags": [ "service_accounts" ], - "summary": "DeleteToken deletes service account tokens", + "summary": "# DeleteToken deletes service account tokens", "operationId": "deleteToken", "parameters": [ { @@ -11430,8 +11430,7 @@ } }, "CorrelationConfigTarget": { - "description": "CorrelationConfigTarget is the target data query specific to target data source (Correlation.TargetUID)", - "type": "object" + "description": "CorrelationConfigTarget is the target data query specific to target data source (Correlation.TargetUID)" }, "CreateAlertNotificationCommand": { "type": "object", @@ -11659,7 +11658,7 @@ "items": { "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } }, "name": { @@ -14908,22 +14907,27 @@ "type": "object", "properties": { "id": { + "description": "Unique playlist identifier for internal use, set by Grafana.", "type": "integer", "format": "int64" }, "interval": { + "description": "Interval sets the time between switching views in a playlist.\nFIXME: Is this based on a standardized format or what options are available? Can datemath be used?", "type": "string" }, "items": { + "description": "The ordered list of items that the playlist will iterate over.", "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } }, "name": { + "description": "Name of the playlist.", "type": "string" }, "uid": { + "description": "Unique playlist identifier. Generated on creation, either by the\ncreator of the playlist of by the application.", "type": "string" } } @@ -14959,32 +14963,44 @@ "$ref": "#/definitions/PlaylistDashboard" } }, - "PlaylistItemDTO": { + "PlaylistItem": { + "description": "THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES.\nEquivalent Go types at stable import paths are provided in https://github.com/grafana/grok.", "type": "object", + "title": "PlaylistItem is the Go representation of a playlist.Item.", "properties": { "id": { + "description": "FIXME: The prefixDropper removes playlist from playlist_id, that doesn't work for us since it'll mean we'll have Id twice.\nID of the playlist item for internal use by Grafana. Deprecated.", "type": "integer", "format": "int64" }, "order": { + "description": "Order is the position in the list for the item. Deprecated.", "type": "integer", "format": "int64" }, "playlistid": { + "description": "ID for the playlist containing the item. Deprecated.", "type": "integer", "format": "int64" }, "title": { + "description": "Title is the human-readable identifier for the playlist item.", "type": "string" }, "type": { - "type": "string" + "$ref": "#/definitions/PlaylistItemType" }, "value": { + "description": "Value depends on type and describes the playlist item.\n\ndashboard_by_id: The value is an internal numerical identifier set by Grafana. This\nis not portable as the numerical identifier is non-deterministic between different instances.\nWill be replaced by dashboard_by_uid in the future.\ndashboard_by_tag: The value is a tag which is set on any number of dashboards. All\ndashboards behind the tag will be added to the playlist.", "type": "string" } } }, + "PlaylistItemType": { + "description": "THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES.\nEquivalent Go types at stable import paths are provided in https://github.com/grafana/grok.", + "type": "string", + "title": "Type of the item." + }, "Playlists": { "type": "array", "items": { @@ -17756,7 +17772,7 @@ "items": { "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } }, "name": { @@ -19467,7 +19483,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } } }, @@ -20126,4 +20142,4 @@ "name": "service_accounts" } ] -} +} \ No newline at end of file diff --git a/public/api-spec.json b/public/api-spec.json index 0e05977143c..3ea011c7b6f 100644 --- a/public/api-spec.json +++ b/public/api-spec.json @@ -7877,7 +7877,7 @@ "tags": [ "service_accounts" ], - "summary": "Create service account", + "summary": "# Create service account", "operationId": "createServiceAccount", "parameters": [ { @@ -7913,7 +7913,7 @@ "tags": [ "service_accounts" ], - "summary": "Search service accounts with paging", + "summary": "# Search service accounts with paging", "operationId": "searchOrgServiceAccountsWithPaging", "parameters": [ { @@ -7969,7 +7969,7 @@ "tags": [ "service_accounts" ], - "summary": "Get single serviceaccount by Id", + "summary": "# Get single serviceaccount by Id", "operationId": "retrieveServiceAccount", "parameters": [ { @@ -8006,7 +8006,7 @@ "tags": [ "service_accounts" ], - "summary": "Delete service account", + "summary": "# Delete service account", "operationId": "deleteServiceAccount", "parameters": [ { @@ -8040,7 +8040,7 @@ "tags": [ "service_accounts" ], - "summary": "Update service account", + "summary": "# Update service account", "operationId": "updateServiceAccount", "parameters": [ { @@ -8086,7 +8086,7 @@ "tags": [ "service_accounts" ], - "summary": "Get service account tokens", + "summary": "# Get service account tokens", "operationId": "listTokens", "parameters": [ { @@ -8120,7 +8120,7 @@ "tags": [ "service_accounts" ], - "summary": "CreateNewToken adds a token to a service account", + "summary": "# CreateNewToken adds a token to a service account", "operationId": "createToken", "parameters": [ { @@ -8169,7 +8169,7 @@ "tags": [ "service_accounts" ], - "summary": "DeleteToken deletes service account tokens", + "summary": "# DeleteToken deletes service account tokens", "operationId": "deleteToken", "parameters": [ { @@ -10450,8 +10450,7 @@ } }, "CorrelationConfigTarget": { - "description": "CorrelationConfigTarget is the target data query specific to target data source (Correlation.TargetUID)", - "type": "object" + "description": "CorrelationConfigTarget is the target data query specific to target data source (Correlation.TargetUID)" }, "CreateAlertNotificationCommand": { "type": "object", @@ -10679,7 +10678,7 @@ "items": { "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } }, "name": { @@ -12825,22 +12824,27 @@ "type": "object", "properties": { "id": { + "description": "Unique playlist identifier for internal use, set by Grafana.", "type": "integer", "format": "int64" }, "interval": { + "description": "Interval sets the time between switching views in a playlist.\nFIXME: Is this based on a standardized format or what options are available? Can datemath be used?", "type": "string" }, "items": { + "description": "The ordered list of items that the playlist will iterate over.", "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } }, "name": { + "description": "Name of the playlist.", "type": "string" }, "uid": { + "description": "Unique playlist identifier. Generated on creation, either by the\ncreator of the playlist of by the application.", "type": "string" } } @@ -12876,32 +12880,44 @@ "$ref": "#/definitions/PlaylistDashboard" } }, - "PlaylistItemDTO": { + "PlaylistItem": { + "description": "THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES.\nEquivalent Go types at stable import paths are provided in https://github.com/grafana/grok.", "type": "object", + "title": "PlaylistItem is the Go representation of a playlist.Item.", "properties": { "id": { + "description": "FIXME: The prefixDropper removes playlist from playlist_id, that doesn't work for us since it'll mean we'll have Id twice.\nID of the playlist item for internal use by Grafana. Deprecated.", "type": "integer", "format": "int64" }, "order": { + "description": "Order is the position in the list for the item. Deprecated.", "type": "integer", "format": "int64" }, "playlistid": { + "description": "ID for the playlist containing the item. Deprecated.", "type": "integer", "format": "int64" }, "title": { + "description": "Title is the human-readable identifier for the playlist item.", "type": "string" }, "type": { - "type": "string" + "$ref": "#/definitions/PlaylistItemType" }, "value": { + "description": "Value depends on type and describes the playlist item.\n\ndashboard_by_id: The value is an internal numerical identifier set by Grafana. This\nis not portable as the numerical identifier is non-deterministic between different instances.\nWill be replaced by dashboard_by_uid in the future.\ndashboard_by_tag: The value is a tag which is set on any number of dashboards. All\ndashboards behind the tag will be added to the playlist.", "type": "string" } } }, + "PlaylistItemType": { + "description": "THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES.\nEquivalent Go types at stable import paths are provided in https://github.com/grafana/grok.", + "type": "string", + "title": "Type of the item." + }, "Playlists": { "type": "array", "items": { @@ -14436,7 +14452,7 @@ "items": { "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } }, "name": { @@ -15492,7 +15508,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/definitions/PlaylistItemDTO" + "$ref": "#/definitions/PlaylistItem" } } }, @@ -16151,4 +16167,4 @@ "name": "service_accounts" } ] -} +} \ No newline at end of file diff --git a/public/app/core/components/AppChrome/News/NewsContainer.test.tsx b/public/app/core/components/AppChrome/News/NewsContainer.test.tsx new file mode 100644 index 00000000000..6036d11d9f8 --- /dev/null +++ b/public/app/core/components/AppChrome/News/NewsContainer.test.tsx @@ -0,0 +1,28 @@ +import { render, screen } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import fs from 'fs'; +import React from 'react'; + +import { NewsContainer } from './NewsContainer'; + +const setup = () => { + const { container } = render(); + + return { container }; +}; + +describe('News', () => { + const result = fs.readFileSync(`${__dirname}/fixtures/news.xml`, 'utf8'); + beforeEach(() => { + jest.resetAllMocks(); + window.fetch = jest.fn().mockResolvedValue({ text: () => result }); + }); + + it('should render the drawer when the drawer button is clicked', async () => { + setup(); + + await userEvent.click(screen.getByRole('button')); + expect(screen.getByRole('article')).toBeInTheDocument(); + expect(screen.getByRole('link')).toHaveAttribute('href', 'https://www.example.net/2022/02/10/something-fake/'); + }); +}); diff --git a/public/app/core/components/AppChrome/News/NewsContainer.tsx b/public/app/core/components/AppChrome/News/NewsContainer.tsx new file mode 100644 index 00000000000..7ff107020a7 --- /dev/null +++ b/public/app/core/components/AppChrome/News/NewsContainer.tsx @@ -0,0 +1,32 @@ +import React from 'react'; +import { useToggle } from 'react-use'; + +import { Drawer, Icon } from '@grafana/ui'; +import { DEFAULT_FEED_URL } from 'app/plugins/panel/news/constants'; + +import { NewsWrapper } from './NewsWrapper'; + +interface NewsContainerProps { + buttonCss?: string; +} + +export function NewsContainer({ buttonCss }: NewsContainerProps) { + const [showNewsDrawer, onToggleShowNewsDrawer] = useToggle(false); + + const onChildClick = () => { + onToggleShowNewsDrawer(true); + }; + + return ( + <> + + {showNewsDrawer && ( + + + + )} + + ); +} diff --git a/public/app/core/components/AppChrome/News/NewsWrapper.tsx b/public/app/core/components/AppChrome/News/NewsWrapper.tsx new file mode 100644 index 00000000000..cff91cbffe2 --- /dev/null +++ b/public/app/core/components/AppChrome/News/NewsWrapper.tsx @@ -0,0 +1,56 @@ +import { css } from '@emotion/css'; +import React, { useEffect } from 'react'; +import AutoSizer from 'react-virtualized-auto-sizer'; + +import { GrafanaTheme2 } from '@grafana/data'; +import { LoadingPlaceholder, useStyles2 } from '@grafana/ui'; +import { News } from 'app/plugins/panel/news/component/News'; +import { useNewsFeed } from 'app/plugins/panel/news/useNewsFeed'; + +interface NewsWrapperProps { + feedUrl: string; +} +export function NewsWrapper({ feedUrl }: NewsWrapperProps) { + const styles = useStyles2(getStyles); + const { state, getNews } = useNewsFeed(feedUrl); + useEffect(() => { + getNews(); + }, [getNews]); + + if (state.loading || state.error) { + return ( +
+ {state.loading && } + {state.error && state.error.message} +
+ ); + } + + if (!state.value) { + return null; + } + + return ( + + {({ width }) => ( +
+ {state.value.map((_, index) => ( + + ))} +
+ )} +
+ ); +} + +const getStyles = (theme: GrafanaTheme2) => { + return { + innerWrapper: css` + width: 100%; + height: 100%; + display: flex; + align-items: center; + justify-content: center; + `, + }; +}; diff --git a/public/app/core/components/AppChrome/News/fixtures/news.xml b/public/app/core/components/AppChrome/News/fixtures/news.xml new file mode 100644 index 00000000000..872da91715e --- /dev/null +++ b/public/app/core/components/AppChrome/News/fixtures/news.xml @@ -0,0 +1,21 @@ + + + + + RSS Feed Example + + https://www.example.net + A small description of this feed + en-US + + A fake item + https://www.example.net/2022/02/10/something-fake/ + + Bill Test + Thu, 10 Feb 2022 16:00:17 +0000 + Fake + + A description of a fake blog post + + + diff --git a/public/app/core/components/AppChrome/TopSearchBar.tsx b/public/app/core/components/AppChrome/TopSearchBar.tsx index 94a505f1a78..432e7a5d792 100644 --- a/public/app/core/components/AppChrome/TopSearchBar.tsx +++ b/public/app/core/components/AppChrome/TopSearchBar.tsx @@ -6,6 +6,7 @@ import { Dropdown, Icon, Tooltip, useStyles2 } from '@grafana/ui'; import { contextSrv } from 'app/core/core'; import { useSelector } from 'app/types'; +import { NewsContainer } from './News/NewsContainer'; import { TopNavBarMenu } from './TopBar/TopNavBarMenu'; import { TopSearchBarInput } from './TopSearchBarInput'; import { TOP_BAR_LEVEL_HEIGHT } from './types'; @@ -36,11 +37,7 @@ export function TopSearchBar() { )} - - - + {signInNode && ( diff --git a/public/app/core/components/AppChrome/constants.ts b/public/app/core/components/AppChrome/constants.ts new file mode 100644 index 00000000000..92ca1590811 --- /dev/null +++ b/public/app/core/components/AppChrome/constants.ts @@ -0,0 +1 @@ +export const NEWS_FEED = 'https://grafana.com/blog/news.xml'; diff --git a/public/app/core/components/AppChrome/types.ts b/public/app/core/components/AppChrome/types.ts index 654334fdea6..d307c26d351 100644 --- a/public/app/core/components/AppChrome/types.ts +++ b/public/app/core/components/AppChrome/types.ts @@ -1,5 +1,4 @@ import { NavModelItem } from '@grafana/data'; - export const TOP_BAR_LEVEL_HEIGHT = 40; export interface ToolbarUpdateProps { diff --git a/public/app/core/components/NavBar/NavBarItemMenu.tsx b/public/app/core/components/NavBar/NavBarItemMenu.tsx index 7eccd658ec2..fffbc7f3fcd 100644 --- a/public/app/core/components/NavBar/NavBarItemMenu.tsx +++ b/public/app/core/components/NavBar/NavBarItemMenu.tsx @@ -85,7 +85,7 @@ export function NavBarItemMenu(props: NavBarItemMenuProps): ReactElement | null const menu = [headerComponent, contentComponent]; return ( -