From 3af7eeb331b8bb62918d114dda5a9d39a0469914 Mon Sep 17 00:00:00 2001
From: kay delaney <45561153+kaydelaney@users.noreply.github.com>
Date: Wed, 26 Jun 2019 19:06:32 +0100
Subject: [PATCH] Docs: Adds section on Querying Logs for Elasticsearch
 (#17730)

Closes #17713
---
 docs/sources/administration/provisioning.md   |  2 +
 .../features/datasources/elasticsearch.md     | 53 +++++++++++++++++++
 2 files changed, 55 insertions(+)

diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md
index df1c909dc0c..1299b757752 100644
--- a/docs/sources/administration/provisioning.md
+++ b/docs/sources/administration/provisioning.md
@@ -149,6 +149,8 @@ Since not all datasources have the same configuration settings we only have the
 | esVersion | number | Elasticsearch | Elasticsearch version as a number (2/5/56/60/70) |
 | timeField | string | Elasticsearch | Which field that should be used as timestamp |
 | interval | string | Elasticsearch | Index date time format. nil(No Pattern), 'Hourly', 'Daily', 'Weekly', 'Monthly' or 'Yearly' |
+| logMessageField | string | Elasticsearch | Which field should be used as the log message |
+| logLevelField | string | Elasticsearch | Which field should be used to indicate the priority of the log message |
 | authType | string | Cloudwatch | Auth provider. keys/credentials/arn |
 | assumeRoleArn | string | Cloudwatch | ARN of Assume Role |
 | defaultRegion | string | Cloudwatch | AWS region |
diff --git a/docs/sources/features/datasources/elasticsearch.md b/docs/sources/features/datasources/elasticsearch.md
index 9d4d65e3699..8c07a187a5e 100644
--- a/docs/sources/features/datasources/elasticsearch.md
+++ b/docs/sources/features/datasources/elasticsearch.md
@@ -79,6 +79,18 @@ Identifier | Description
 `s`   | second
 `ms`  | millisecond
 
+### Logs (BETA)
+
+> Only available in Grafana v6.3+.
+
+There are two parameters, `Message field name` and `Level field name`, that can optionally be configured from the data source settings page that determine
+which fields will be used for log messages and log levels when visualizing logs in [Explore](/features/explore).
+
+For example, if you're using a default setup of Filebeat for shipping logs to Elasticsearch the following configuration should work:
+
+- **Message field name:**  message
+- **Level field name:** fields.level
+
 ## Metric Query editor
 
 ![Elasticsearch Query Editor](/img/docs/elasticsearch/query_editor.png)
@@ -162,6 +174,28 @@ Time | The name of the time field, needs to be date field.
 Text | Event description field.
 Tags | Optional field name to use for event tags (can be an array or a CSV string).
 
+## Querying Logs (BETA)
+
+> Only available in Grafana v6.3+.
+
+Querying and displaying log data from Elasticsearch is available via [Explore](/features/explore).
+
+![](/img/docs/v63/elasticsearch_explore_logs.png)
+
+Select the Elasticsearch data source, change to Logs using the Metrics/Logs switcher, and then optionally enter a lucene query into the query field to filter the log messages.
+
+Finally, press the `Enter` key or the `Run Query` button to display your logs.
+
+### Log Queries
+
+Once the result is returned, the log panel shows a list of log rows and a bar chart where the x-axis shows the time and the y-axis shows the frequency/count.
+
+Note that the fields used for log message and level is based on an [optional datasource configuration](#logs-beta).
+
+### Filter Log Messages
+
+Optionally enter a lucene query into the query field to filter the log messages. For example, using a default Filebeat setup you should be able to use `fields.level:error` to only show error log messages.
+
 ## Configure the Datasource with Provisioning
 
 It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources)
@@ -181,3 +215,22 @@ datasources:
       interval: Daily
       timeField: "@timestamp"
 ```
+
+or, for logs:
+
+```yaml
+apiVersion: 1
+
+datasources:
+  - name: elasticsearch-v7-filebeat
+    type: elasticsearch
+    access: proxy
+    database: "[filebeat-]YYYY.MM.DD"
+    url: http://localhost:9200
+    jsonData:
+      interval: Daily
+      timeField: "@timestamp"
+      esVersion: 70
+      logMessageField: message
+      logLevelField: fields.level
+```