Merge branch 'master' of github.com:grafana/grafana into preferences

This commit is contained in:
Torkel Ödegaard 2016-03-30 13:35:17 -07:00
commit 6ed9164c9d
205 changed files with 7158 additions and 2402 deletions

View File

@ -14,6 +14,7 @@
* **Plugin API**: Both datasource and panel plugin api (and plugin.json schema) have been updated, requiring an update to plugins. See [plugin api](https://github.com/grafana/grafana/blob/master/public/app/plugins/plugin_api.md) for more info. * **Plugin API**: Both datasource and panel plugin api (and plugin.json schema) have been updated, requiring an update to plugins. See [plugin api](https://github.com/grafana/grafana/blob/master/public/app/plugins/plugin_api.md) for more info.
* **InfluxDB 0.8.x** The data source for the old version of influxdb (0.8.x) is no longer included in default builds, but can easily be installed via improved plugin system, closes [#3523](https://github.com/grafana/grafana/issues/3523) * **InfluxDB 0.8.x** The data source for the old version of influxdb (0.8.x) is no longer included in default builds, but can easily be installed via improved plugin system, closes [#3523](https://github.com/grafana/grafana/issues/3523)
* **KairosDB** The data source is no longer included in default builds, but can easily be installed via improved plugin system, closes [#3524](https://github.com/grafana/grafana/issues/3524) * **KairosDB** The data source is no longer included in default builds, but can easily be installed via improved plugin system, closes [#3524](https://github.com/grafana/grafana/issues/3524)
* **Templating**: Templating value formats (glob/regex/pipe etc) are now handled automatically and not specified by the user, this makes variable values possible to reuse in many contexts. It can in some edge cases break existing dashboards that have template variables that do not reload on dashboard load. To fix any issue just go into template variable options and update the variable (so it's values are reloaded.).
### Enhancements ### Enhancements
* **LDAP**: Support for nested LDAP Groups, closes [#4401](https://github.com/grafana/grafana/issues/4401), [#3808](https://github.com/grafana/grafana/issues/3808) * **LDAP**: Support for nested LDAP Groups, closes [#4401](https://github.com/grafana/grafana/issues/4401), [#3808](https://github.com/grafana/grafana/issues/3808)

9
Godeps/Godeps.json generated
View File

@ -1,6 +1,7 @@
{ {
"ImportPath": "github.com/grafana/grafana", "ImportPath": "github.com/grafana/grafana",
"GoVersion": "go1.5.1", "GoVersion": "go1.5.1",
"GodepVersion": "v60",
"Packages": [ "Packages": [
"./pkg/..." "./pkg/..."
], ],
@ -159,8 +160,8 @@
}, },
{ {
"ImportPath": "github.com/go-ldap/ldap", "ImportPath": "github.com/go-ldap/ldap",
"Comment": "v1-19-g83e6542", "Comment": "v2.2.1",
"Rev": "83e65426fd1c06626e88aa8a085e5bfed0208e29" "Rev": "07a7330929b9ee80495c88a4439657d89c7dbd87"
}, },
{ {
"ImportPath": "github.com/go-macaron/binding", "ImportPath": "github.com/go-macaron/binding",
@ -209,6 +210,10 @@
"Comment": "v0.4.4-44-gf561133", "Comment": "v0.4.4-44-gf561133",
"Rev": "f56113384f2c63dfe4cd8e768e349f1c35122b58" "Rev": "f56113384f2c63dfe4cd8e768e349f1c35122b58"
}, },
{
"ImportPath": "github.com/gorilla/websocket",
"Rev": "c45a635370221f34fea2d5163fd156fcb4e38e8a"
},
{ {
"ImportPath": "github.com/gosimple/slug", "ImportPath": "github.com/gosimple/slug",
"Rev": "8d258463b4459f161f51d6a357edacd3eef9d663" "Rev": "8d258463b4459f161f51d6a357edacd3eef9d663"

View File

@ -2,10 +2,13 @@ language: go
go: go:
- 1.2 - 1.2
- 1.3 - 1.3
- 1.4
- 1.5
- tip - tip
go_import_path: gopkg.in/ldap.v2
install: install:
- go get gopkg.in/asn1-ber.v1 - go get gopkg.in/asn1-ber.v1
- go get gopkg.in/ldap.v1 - go get gopkg.in/ldap.v2
- go get code.google.com/p/go.tools/cmd/cover || go get golang.org/x/tools/cmd/cover - go get code.google.com/p/go.tools/cmd/cover || go get golang.org/x/tools/cmd/cover
- go build -v ./... - go build -v ./...
script: script:

View File

@ -1,8 +1,20 @@
[![GoDoc](https://godoc.org/gopkg.in/ldap.v1?status.svg)](https://godoc.org/gopkg.in/ldap.v1) [![Build Status](https://travis-ci.org/go-ldap/ldap.svg)](https://travis-ci.org/go-ldap/ldap) [![GoDoc](https://godoc.org/gopkg.in/ldap.v2?status.svg)](https://godoc.org/gopkg.in/ldap.v2)
[![Build Status](https://travis-ci.org/go-ldap/ldap.svg)](https://travis-ci.org/go-ldap/ldap)
# Basic LDAP v3 functionality for the GO programming language. # Basic LDAP v3 functionality for the GO programming language.
## Required Librarys: ## Install
For the latest version use:
go get gopkg.in/ldap.v2
Import the latest version with:
import "gopkg.in/ldap.v2"
## Required Libraries:
- gopkg.in/asn1-ber.v1 - gopkg.in/asn1-ber.v1
@ -14,6 +26,9 @@
- Compiling string filters to LDAP filters - Compiling string filters to LDAP filters
- Paging Search Results - Paging Search Results
- Modify Requests / Responses - Modify Requests / Responses
- Add Requests / Responses
- Delete Requests / Responses
- Better Unicode support
## Examples: ## Examples:
@ -26,23 +41,15 @@
## TODO: ## TODO:
- Add Requests / Responses - [x] Add Requests / Responses
- Delete Requests / Responses - [x] Delete Requests / Responses
- Modify DN Requests / Responses - [x] Modify DN Requests / Responses
- Compare Requests / Responses - [ ] Compare Requests / Responses
- Implement Tests / Benchmarks - [ ] Implement Tests / Benchmarks
--- ---
This feature is disabled at the moment, because in some cases the "Search Request Done" packet will be handled before the last "Search Request Entry":
- Mulitple internal goroutines to handle network traffic
Makes library goroutine safe
Can perform multiple search requests at the same time and return
the results to the proper goroutine. All requests are blocking requests,
so the goroutine does not need special handling
---
The Go gopher was designed by Renee French. (http://reneefrench.blogspot.com/) The Go gopher was designed by Renee French. (http://reneefrench.blogspot.com/)
The design is licensed under the Creative Commons 3.0 Attributions license. The design is licensed under the Creative Commons 3.0 Attributions license.
Read this article for more details: http://blog.golang.org/gopher Read this article for more details: http://blog.golang.org/gopher

104
Godeps/_workspace/src/github.com/go-ldap/ldap/add.go generated vendored Normal file
View File

@ -0,0 +1,104 @@
//
// https://tools.ietf.org/html/rfc4511
//
// AddRequest ::= [APPLICATION 8] SEQUENCE {
// entry LDAPDN,
// attributes AttributeList }
//
// AttributeList ::= SEQUENCE OF attribute Attribute
package ldap
import (
"errors"
"log"
"gopkg.in/asn1-ber.v1"
)
type Attribute struct {
attrType string
attrVals []string
}
func (a *Attribute) encode() *ber.Packet {
seq := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Attribute")
seq.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, a.attrType, "Type"))
set := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSet, nil, "AttributeValue")
for _, value := range a.attrVals {
set.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, value, "Vals"))
}
seq.AppendChild(set)
return seq
}
type AddRequest struct {
dn string
attributes []Attribute
}
func (a AddRequest) encode() *ber.Packet {
request := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationAddRequest, nil, "Add Request")
request.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, a.dn, "DN"))
attributes := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Attributes")
for _, attribute := range a.attributes {
attributes.AppendChild(attribute.encode())
}
request.AppendChild(attributes)
return request
}
func (a *AddRequest) Attribute(attrType string, attrVals []string) {
a.attributes = append(a.attributes, Attribute{attrType: attrType, attrVals: attrVals})
}
func NewAddRequest(dn string) *AddRequest {
return &AddRequest{
dn: dn,
}
}
func (l *Conn) Add(addRequest *AddRequest) error {
messageID := l.nextMessageID()
packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "LDAP Request")
packet.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, messageID, "MessageID"))
packet.AppendChild(addRequest.encode())
l.Debug.PrintPacket(packet)
channel, err := l.sendMessage(packet)
if err != nil {
return err
}
if channel == nil {
return NewError(ErrorNetwork, errors.New("ldap: could not send message"))
}
defer l.finishMessage(messageID)
l.Debug.Printf("%d: waiting for response", messageID)
packet = <-channel
l.Debug.Printf("%d: got response %p", messageID, packet)
if packet == nil {
return NewError(ErrorNetwork, errors.New("ldap: could not retrieve message"))
}
if l.Debug {
if err := addLDAPDescriptions(packet); err != nil {
return err
}
ber.PrintPacket(packet)
}
if packet.Children[1].Tag == ApplicationAddResponse {
resultCode, resultDescription := getLDAPResultCode(packet)
if resultCode != 0 {
return NewError(resultCode, errors.New(resultDescription))
}
} else {
log.Printf("Unexpected Response: %d", packet.Children[1].Tag)
}
l.Debug.Printf("%d: returning", messageID)
return nil
}

View File

@ -0,0 +1,23 @@
package ldap
import "crypto/tls"
// Client knows how to interact with an LDAP server
type Client interface {
Start()
StartTLS(config *tls.Config) error
Close()
Bind(username, password string) error
SimpleBind(simpleBindRequest *SimpleBindRequest) (*SimpleBindResult, error)
Add(addRequest *AddRequest) error
Del(delRequest *DelRequest) error
Modify(modifyRequest *ModifyRequest) error
Compare(dn, attribute, value string) (bool, error)
PasswordModify(passwordModifyRequest *PasswordModifyRequest) (*PasswordModifyResult, error)
Search(searchRequest *SearchRequest) (*SearchResult, error)
SearchWithPaging(searchRequest *SearchRequest, pagingSize uint32) (*SearchResult, error)
}

View File

@ -8,11 +8,12 @@ import (
"crypto/tls" "crypto/tls"
"errors" "errors"
"fmt" "fmt"
"gopkg.in/asn1-ber.v1"
"log" "log"
"net" "net"
"sync" "sync"
"time" "time"
"gopkg.in/asn1-ber.v1"
) )
const ( const (
@ -53,6 +54,8 @@ type Conn struct {
messageMutex sync.Mutex messageMutex sync.Mutex
} }
var _ Client = &Conn{}
// DefaultTimeout is a package-level variable that sets the timeout value // DefaultTimeout is a package-level variable that sets the timeout value
// used for the Dial and DialTLS methods. // used for the Dial and DialTLS methods.
// //
@ -176,7 +179,7 @@ func (l *Conn) StartTLS(config *tls.Config) error {
ber.PrintPacket(packet) ber.PrintPacket(packet)
} }
if packet.Children[1].Children[0].Value.(int64) == 0 { if resultCode, message := getLDAPResultCode(packet); resultCode == LDAPResultSuccess {
conn := tls.Client(l.conn, config) conn := tls.Client(l.conn, config)
if err := conn.Handshake(); err != nil { if err := conn.Handshake(); err != nil {
@ -186,6 +189,8 @@ func (l *Conn) StartTLS(config *tls.Config) error {
l.isTLS = true l.isTLS = true
l.conn = conn l.conn = conn
} else {
return NewError(resultCode, fmt.Errorf("ldap: cannot StartTLS (%s)", message))
} }
go l.reader() go l.reader()

View File

@ -16,11 +16,13 @@ const (
ControlTypeBeheraPasswordPolicy = "1.3.6.1.4.1.42.2.27.8.5.1" ControlTypeBeheraPasswordPolicy = "1.3.6.1.4.1.42.2.27.8.5.1"
ControlTypeVChuPasswordMustChange = "2.16.840.1.113730.3.4.4" ControlTypeVChuPasswordMustChange = "2.16.840.1.113730.3.4.4"
ControlTypeVChuPasswordWarning = "2.16.840.1.113730.3.4.5" ControlTypeVChuPasswordWarning = "2.16.840.1.113730.3.4.5"
ControlTypeManageDsaIT = "2.16.840.1.113730.3.4.2"
) )
var ControlTypeMap = map[string]string{ var ControlTypeMap = map[string]string{
ControlTypePaging: "Paging", ControlTypePaging: "Paging",
ControlTypeBeheraPasswordPolicy: "Password Policy - Behera Draft", ControlTypeBeheraPasswordPolicy: "Password Policy - Behera Draft",
ControlTypeManageDsaIT: "Manage DSA IT",
} }
type Control interface { type Control interface {
@ -165,6 +167,36 @@ func (c *ControlVChuPasswordWarning) String() string {
c.Expire) c.Expire)
} }
type ControlManageDsaIT struct {
Criticality bool
}
func (c *ControlManageDsaIT) GetControlType() string {
return ControlTypeManageDsaIT
}
func (c *ControlManageDsaIT) Encode() *ber.Packet {
//FIXME
packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "Control")
packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, ControlTypeManageDsaIT, "Control Type ("+ControlTypeMap[ControlTypeManageDsaIT]+")"))
if c.Criticality {
packet.AppendChild(ber.NewBoolean(ber.ClassUniversal, ber.TypePrimitive, ber.TagBoolean, c.Criticality, "Criticality"))
}
return packet
}
func (c *ControlManageDsaIT) String() string {
return fmt.Sprintf(
"Control Type: %s (%q) Criticality: %t",
ControlTypeMap[ControlTypeManageDsaIT],
ControlTypeManageDsaIT,
c.Criticality)
}
func NewControlManageDsaIT(Criticality bool) *ControlManageDsaIT {
return &ControlManageDsaIT{Criticality: Criticality}
}
func FindControl(controls []Control, controlType string) Control { func FindControl(controls []Control, controlType string) Control {
for _, c := range controls { for _, c := range controls {
if c.GetControlType() == controlType { if c.GetControlType() == controlType {

79
Godeps/_workspace/src/github.com/go-ldap/ldap/del.go generated vendored Normal file
View File

@ -0,0 +1,79 @@
//
// https://tools.ietf.org/html/rfc4511
//
// DelRequest ::= [APPLICATION 10] LDAPDN
package ldap
import (
"errors"
"log"
"gopkg.in/asn1-ber.v1"
)
type DelRequest struct {
DN string
Controls []Control
}
func (d DelRequest) encode() *ber.Packet {
request := ber.Encode(ber.ClassApplication, ber.TypePrimitive, ApplicationDelRequest, d.DN, "Del Request")
request.Data.Write([]byte(d.DN))
return request
}
func NewDelRequest(DN string,
Controls []Control) *DelRequest {
return &DelRequest{
DN: DN,
Controls: Controls,
}
}
func (l *Conn) Del(delRequest *DelRequest) error {
messageID := l.nextMessageID()
packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "LDAP Request")
packet.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, messageID, "MessageID"))
packet.AppendChild(delRequest.encode())
if delRequest.Controls != nil {
packet.AppendChild(encodeControls(delRequest.Controls))
}
l.Debug.PrintPacket(packet)
channel, err := l.sendMessage(packet)
if err != nil {
return err
}
if channel == nil {
return NewError(ErrorNetwork, errors.New("ldap: could not send message"))
}
defer l.finishMessage(messageID)
l.Debug.Printf("%d: waiting for response", messageID)
packet = <-channel
l.Debug.Printf("%d: got response %p", messageID, packet)
if packet == nil {
return NewError(ErrorNetwork, errors.New("ldap: could not retrieve message"))
}
if l.Debug {
if err := addLDAPDescriptions(packet); err != nil {
return err
}
ber.PrintPacket(packet)
}
if packet.Children[1].Tag == ApplicationDelResponse {
resultCode, resultDescription := getLDAPResultCode(packet)
if resultCode != 0 {
return NewError(resultCode, errors.New(resultDescription))
}
} else {
log.Printf("Unexpected Response: %d", packet.Children[1].Tag)
}
l.Debug.Printf("%d: returning", messageID)
return nil
}

View File

@ -47,10 +47,10 @@ package ldap
import ( import (
"bytes" "bytes"
enchex "encoding/hex"
"errors" "errors"
"fmt" "fmt"
"strings" "strings"
enchex "encoding/hex"
ber "gopkg.in/asn1-ber.v1" ber "gopkg.in/asn1-ber.v1"
) )

View File

@ -1,38 +1,40 @@
package ldap package ldap_test
import ( import (
"reflect" "reflect"
"testing" "testing"
"gopkg.in/ldap.v2"
) )
func TestSuccessfulDNParsing(t *testing.T) { func TestSuccessfulDNParsing(t *testing.T) {
testcases := map[string]DN { testcases := map[string]ldap.DN{
"": DN{[]*RelativeDN{}}, "": ldap.DN{[]*ldap.RelativeDN{}},
"cn=Jim\\2C \\22Hasse Hö\\22 Hansson!,dc=dummy,dc=com": DN{[]*RelativeDN{ "cn=Jim\\2C \\22Hasse Hö\\22 Hansson!,dc=dummy,dc=com": ldap.DN{[]*ldap.RelativeDN{
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"cn", "Jim, \"Hasse Hö\" Hansson!"},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"cn", "Jim, \"Hasse Hö\" Hansson!"}}},
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"dc", "dummy"},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"dc", "dummy"}}},
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"dc", "com"}, }},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"dc", "com"}}}}},
"UID=jsmith,DC=example,DC=net": DN{[]*RelativeDN{ "UID=jsmith,DC=example,DC=net": ldap.DN{[]*ldap.RelativeDN{
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"UID", "jsmith"},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"UID", "jsmith"}}},
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"DC", "example"},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"DC", "example"}}},
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"DC", "net"}, }},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"DC", "net"}}}}},
"OU=Sales+CN=J. Smith,DC=example,DC=net": DN{[]*RelativeDN{ "OU=Sales+CN=J. Smith,DC=example,DC=net": ldap.DN{[]*ldap.RelativeDN{
&RelativeDN{[]*AttributeTypeAndValue{ &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{
&AttributeTypeAndValue{"OU", "Sales"}, &ldap.AttributeTypeAndValue{"OU", "Sales"},
&AttributeTypeAndValue{"CN", "J. Smith"},}}, &ldap.AttributeTypeAndValue{"CN", "J. Smith"}}},
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"DC", "example"},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"DC", "example"}}},
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"DC", "net"}, }},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"DC", "net"}}}}},
"1.3.6.1.4.1.1466.0=#04024869": DN{[]*RelativeDN{ "1.3.6.1.4.1.1466.0=#04024869": ldap.DN{[]*ldap.RelativeDN{
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"1.3.6.1.4.1.1466.0", "Hi"},}},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"1.3.6.1.4.1.1466.0", "Hi"}}}}},
"1.3.6.1.4.1.1466.0=#04024869,DC=net": DN{[]*RelativeDN{ "1.3.6.1.4.1.1466.0=#04024869,DC=net": ldap.DN{[]*ldap.RelativeDN{
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"1.3.6.1.4.1.1466.0", "Hi"},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"1.3.6.1.4.1.1466.0", "Hi"}}},
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"DC", "net"}, }},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"DC", "net"}}}}},
"CN=Lu\\C4\\8Di\\C4\\87": DN{[]*RelativeDN{ "CN=Lu\\C4\\8Di\\C4\\87": ldap.DN{[]*ldap.RelativeDN{
&RelativeDN{[]*AttributeTypeAndValue{&AttributeTypeAndValue{"CN", "Lučić"},}},}}, &ldap.RelativeDN{[]*ldap.AttributeTypeAndValue{&ldap.AttributeTypeAndValue{"CN", "Lučić"}}}}},
} }
for test, answer := range testcases { for test, answer := range testcases {
dn, err := ParseDN(test) dn, err := ldap.ParseDN(test)
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
continue continue
@ -58,7 +60,7 @@ func TestErrorDNParsing(t *testing.T) {
} }
for test, answer := range testcases { for test, answer := range testcases {
_, err := ParseDN(test) _, err := ldap.ParseDN(test)
if err == nil { if err == nil {
t.Errorf("Expected %s to fail parsing but succeeded\n", test) t.Errorf("Expected %s to fail parsing but succeeded\n", test)
} else if err.Error() != answer { } else if err.Error() != answer {
@ -66,5 +68,3 @@ func TestErrorDNParsing(t *testing.T) {
} }
} }
} }

137
Godeps/_workspace/src/github.com/go-ldap/ldap/error.go generated vendored Normal file
View File

@ -0,0 +1,137 @@
package ldap
import (
"fmt"
"gopkg.in/asn1-ber.v1"
)
// LDAP Result Codes
const (
LDAPResultSuccess = 0
LDAPResultOperationsError = 1
LDAPResultProtocolError = 2
LDAPResultTimeLimitExceeded = 3
LDAPResultSizeLimitExceeded = 4
LDAPResultCompareFalse = 5
LDAPResultCompareTrue = 6
LDAPResultAuthMethodNotSupported = 7
LDAPResultStrongAuthRequired = 8
LDAPResultReferral = 10
LDAPResultAdminLimitExceeded = 11
LDAPResultUnavailableCriticalExtension = 12
LDAPResultConfidentialityRequired = 13
LDAPResultSaslBindInProgress = 14
LDAPResultNoSuchAttribute = 16
LDAPResultUndefinedAttributeType = 17
LDAPResultInappropriateMatching = 18
LDAPResultConstraintViolation = 19
LDAPResultAttributeOrValueExists = 20
LDAPResultInvalidAttributeSyntax = 21
LDAPResultNoSuchObject = 32
LDAPResultAliasProblem = 33
LDAPResultInvalidDNSyntax = 34
LDAPResultAliasDereferencingProblem = 36
LDAPResultInappropriateAuthentication = 48
LDAPResultInvalidCredentials = 49
LDAPResultInsufficientAccessRights = 50
LDAPResultBusy = 51
LDAPResultUnavailable = 52
LDAPResultUnwillingToPerform = 53
LDAPResultLoopDetect = 54
LDAPResultNamingViolation = 64
LDAPResultObjectClassViolation = 65
LDAPResultNotAllowedOnNonLeaf = 66
LDAPResultNotAllowedOnRDN = 67
LDAPResultEntryAlreadyExists = 68
LDAPResultObjectClassModsProhibited = 69
LDAPResultAffectsMultipleDSAs = 71
LDAPResultOther = 80
ErrorNetwork = 200
ErrorFilterCompile = 201
ErrorFilterDecompile = 202
ErrorDebugging = 203
ErrorUnexpectedMessage = 204
ErrorUnexpectedResponse = 205
)
var LDAPResultCodeMap = map[uint8]string{
LDAPResultSuccess: "Success",
LDAPResultOperationsError: "Operations Error",
LDAPResultProtocolError: "Protocol Error",
LDAPResultTimeLimitExceeded: "Time Limit Exceeded",
LDAPResultSizeLimitExceeded: "Size Limit Exceeded",
LDAPResultCompareFalse: "Compare False",
LDAPResultCompareTrue: "Compare True",
LDAPResultAuthMethodNotSupported: "Auth Method Not Supported",
LDAPResultStrongAuthRequired: "Strong Auth Required",
LDAPResultReferral: "Referral",
LDAPResultAdminLimitExceeded: "Admin Limit Exceeded",
LDAPResultUnavailableCriticalExtension: "Unavailable Critical Extension",
LDAPResultConfidentialityRequired: "Confidentiality Required",
LDAPResultSaslBindInProgress: "Sasl Bind In Progress",
LDAPResultNoSuchAttribute: "No Such Attribute",
LDAPResultUndefinedAttributeType: "Undefined Attribute Type",
LDAPResultInappropriateMatching: "Inappropriate Matching",
LDAPResultConstraintViolation: "Constraint Violation",
LDAPResultAttributeOrValueExists: "Attribute Or Value Exists",
LDAPResultInvalidAttributeSyntax: "Invalid Attribute Syntax",
LDAPResultNoSuchObject: "No Such Object",
LDAPResultAliasProblem: "Alias Problem",
LDAPResultInvalidDNSyntax: "Invalid DN Syntax",
LDAPResultAliasDereferencingProblem: "Alias Dereferencing Problem",
LDAPResultInappropriateAuthentication: "Inappropriate Authentication",
LDAPResultInvalidCredentials: "Invalid Credentials",
LDAPResultInsufficientAccessRights: "Insufficient Access Rights",
LDAPResultBusy: "Busy",
LDAPResultUnavailable: "Unavailable",
LDAPResultUnwillingToPerform: "Unwilling To Perform",
LDAPResultLoopDetect: "Loop Detect",
LDAPResultNamingViolation: "Naming Violation",
LDAPResultObjectClassViolation: "Object Class Violation",
LDAPResultNotAllowedOnNonLeaf: "Not Allowed On Non Leaf",
LDAPResultNotAllowedOnRDN: "Not Allowed On RDN",
LDAPResultEntryAlreadyExists: "Entry Already Exists",
LDAPResultObjectClassModsProhibited: "Object Class Mods Prohibited",
LDAPResultAffectsMultipleDSAs: "Affects Multiple DSAs",
LDAPResultOther: "Other",
}
func getLDAPResultCode(packet *ber.Packet) (code uint8, description string) {
if len(packet.Children) >= 2 {
response := packet.Children[1]
if response.ClassType == ber.ClassApplication && response.TagType == ber.TypeConstructed && len(response.Children) >= 3 {
// Children[1].Children[2] is the diagnosticMessage which is guaranteed to exist as seen here: https://tools.ietf.org/html/rfc4511#section-4.1.9
return uint8(response.Children[0].Value.(int64)), response.Children[2].Value.(string)
}
}
return ErrorNetwork, "Invalid packet format"
}
type Error struct {
Err error
ResultCode uint8
}
func (e *Error) Error() string {
return fmt.Sprintf("LDAP Result Code %d %q: %s", e.ResultCode, LDAPResultCodeMap[e.ResultCode], e.Err.Error())
}
func NewError(resultCode uint8, err error) error {
return &Error{ResultCode: resultCode, Err: err}
}
func IsErrorWithCode(err error, desiredResultCode uint8) bool {
if err == nil {
return false
}
serverError, ok := err.(*Error)
if !ok {
return false
}
return serverError.ResultCode == desiredResultCode
}

View File

@ -5,10 +5,10 @@ import (
"fmt" "fmt"
"log" "log"
"github.com/go-ldap/ldap" "gopkg.in/ldap.v2"
) )
// ExampleConn_Bind demonstrats how to bind a connection to an ldap user // ExampleConn_Bind demonstrates how to bind a connection to an ldap user
// allowing access to restricted attrabutes that user has access to // allowing access to restricted attrabutes that user has access to
func ExampleConn_Bind() { func ExampleConn_Bind() {
l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", "ldap.example.com", 389)) l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", "ldap.example.com", 389))

View File

@ -5,9 +5,12 @@
package ldap package ldap
import ( import (
"bytes"
hexpac "encoding/hex"
"errors" "errors"
"fmt" "fmt"
"strings" "strings"
"unicode/utf8"
"gopkg.in/asn1-ber.v1" "gopkg.in/asn1-ber.v1"
) )
@ -50,6 +53,20 @@ var FilterSubstringsMap = map[uint64]string{
FilterSubstringsFinal: "Substrings Final", FilterSubstringsFinal: "Substrings Final",
} }
const (
MatchingRuleAssertionMatchingRule = 1
MatchingRuleAssertionType = 2
MatchingRuleAssertionMatchValue = 3
MatchingRuleAssertionDNAttributes = 4
)
var MatchingRuleAssertionMap = map[uint64]string{
MatchingRuleAssertionMatchingRule: "Matching Rule Assertion Matching Rule",
MatchingRuleAssertionType: "Matching Rule Assertion Type",
MatchingRuleAssertionMatchValue: "Matching Rule Assertion Match Value",
MatchingRuleAssertionDNAttributes: "Matching Rule Assertion DN Attributes",
}
func CompileFilter(filter string) (*ber.Packet, error) { func CompileFilter(filter string) (*ber.Packet, error) {
if len(filter) == 0 || filter[0] != '(' { if len(filter) == 0 || filter[0] != '(' {
return nil, NewError(ErrorFilterCompile, errors.New("ldap: filter does not start with an '('")) return nil, NewError(ErrorFilterCompile, errors.New("ldap: filter does not start with an '('"))
@ -108,7 +125,7 @@ func DecompileFilter(packet *ber.Packet) (ret string, err error) {
if i == 0 && child.Tag != FilterSubstringsInitial { if i == 0 && child.Tag != FilterSubstringsInitial {
ret += "*" ret += "*"
} }
ret += ber.DecodeString(child.Data.Bytes()) ret += EscapeFilter(ber.DecodeString(child.Data.Bytes()))
if child.Tag != FilterSubstringsFinal { if child.Tag != FilterSubstringsFinal {
ret += "*" ret += "*"
} }
@ -116,22 +133,53 @@ func DecompileFilter(packet *ber.Packet) (ret string, err error) {
case FilterEqualityMatch: case FilterEqualityMatch:
ret += ber.DecodeString(packet.Children[0].Data.Bytes()) ret += ber.DecodeString(packet.Children[0].Data.Bytes())
ret += "=" ret += "="
ret += ber.DecodeString(packet.Children[1].Data.Bytes()) ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes()))
case FilterGreaterOrEqual: case FilterGreaterOrEqual:
ret += ber.DecodeString(packet.Children[0].Data.Bytes()) ret += ber.DecodeString(packet.Children[0].Data.Bytes())
ret += ">=" ret += ">="
ret += ber.DecodeString(packet.Children[1].Data.Bytes()) ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes()))
case FilterLessOrEqual: case FilterLessOrEqual:
ret += ber.DecodeString(packet.Children[0].Data.Bytes()) ret += ber.DecodeString(packet.Children[0].Data.Bytes())
ret += "<=" ret += "<="
ret += ber.DecodeString(packet.Children[1].Data.Bytes()) ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes()))
case FilterPresent: case FilterPresent:
ret += ber.DecodeString(packet.Data.Bytes()) ret += ber.DecodeString(packet.Data.Bytes())
ret += "=*" ret += "=*"
case FilterApproxMatch: case FilterApproxMatch:
ret += ber.DecodeString(packet.Children[0].Data.Bytes()) ret += ber.DecodeString(packet.Children[0].Data.Bytes())
ret += "~=" ret += "~="
ret += ber.DecodeString(packet.Children[1].Data.Bytes()) ret += EscapeFilter(ber.DecodeString(packet.Children[1].Data.Bytes()))
case FilterExtensibleMatch:
attr := ""
dnAttributes := false
matchingRule := ""
value := ""
for _, child := range packet.Children {
switch child.Tag {
case MatchingRuleAssertionMatchingRule:
matchingRule = ber.DecodeString(child.Data.Bytes())
case MatchingRuleAssertionType:
attr = ber.DecodeString(child.Data.Bytes())
case MatchingRuleAssertionMatchValue:
value = ber.DecodeString(child.Data.Bytes())
case MatchingRuleAssertionDNAttributes:
dnAttributes = child.Value.(bool)
}
}
if len(attr) > 0 {
ret += attr
}
if dnAttributes {
ret += ":dn"
}
if len(matchingRule) > 0 {
ret += ":"
ret += matchingRule
}
ret += ":="
ret += EscapeFilter(value)
} }
ret += ")" ret += ")"
@ -155,58 +203,143 @@ func compileFilterSet(filter string, pos int, parent *ber.Packet) (int, error) {
} }
func compileFilter(filter string, pos int) (*ber.Packet, int, error) { func compileFilter(filter string, pos int) (*ber.Packet, int, error) {
var packet *ber.Packet var (
var err error packet *ber.Packet
err error
)
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
err = NewError(ErrorFilterCompile, errors.New("ldap: error compiling filter")) err = NewError(ErrorFilterCompile, errors.New("ldap: error compiling filter"))
} }
}() }()
newPos := pos newPos := pos
switch filter[pos] {
currentRune, currentWidth := utf8.DecodeRuneInString(filter[newPos:])
switch currentRune {
case utf8.RuneError:
return nil, 0, NewError(ErrorFilterCompile, fmt.Errorf("ldap: error reading rune at position %d", newPos))
case '(': case '(':
packet, newPos, err = compileFilter(filter, pos+1) packet, newPos, err = compileFilter(filter, pos+currentWidth)
newPos++ newPos++
return packet, newPos, err return packet, newPos, err
case '&': case '&':
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterAnd, nil, FilterMap[FilterAnd]) packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterAnd, nil, FilterMap[FilterAnd])
newPos, err = compileFilterSet(filter, pos+1, packet) newPos, err = compileFilterSet(filter, pos+currentWidth, packet)
return packet, newPos, err return packet, newPos, err
case '|': case '|':
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterOr, nil, FilterMap[FilterOr]) packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterOr, nil, FilterMap[FilterOr])
newPos, err = compileFilterSet(filter, pos+1, packet) newPos, err = compileFilterSet(filter, pos+currentWidth, packet)
return packet, newPos, err return packet, newPos, err
case '!': case '!':
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterNot, nil, FilterMap[FilterNot]) packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterNot, nil, FilterMap[FilterNot])
var child *ber.Packet var child *ber.Packet
child, newPos, err = compileFilter(filter, pos+1) child, newPos, err = compileFilter(filter, pos+currentWidth)
packet.AppendChild(child) packet.AppendChild(child)
return packet, newPos, err return packet, newPos, err
default: default:
READING_ATTR := 0
READING_EXTENSIBLE_MATCHING_RULE := 1
READING_CONDITION := 2
state := READING_ATTR
attribute := "" attribute := ""
extensibleDNAttributes := false
extensibleMatchingRule := ""
condition := "" condition := ""
for newPos < len(filter) && filter[newPos] != ')' {
for newPos < len(filter) {
remainingFilter := filter[newPos:]
currentRune, currentWidth = utf8.DecodeRuneInString(remainingFilter)
if currentRune == ')' {
break
}
if currentRune == utf8.RuneError {
return packet, newPos, NewError(ErrorFilterCompile, fmt.Errorf("ldap: error reading rune at position %d", newPos))
}
switch state {
case READING_ATTR:
switch { switch {
case packet != nil: // Extensible rule, with only DN-matching
condition += fmt.Sprintf("%c", filter[newPos]) case currentRune == ':' && strings.HasPrefix(remainingFilter, ":dn:="):
case filter[newPos] == '=': packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch])
extensibleDNAttributes = true
state = READING_CONDITION
newPos += 5
// Extensible rule, with DN-matching and a matching OID
case currentRune == ':' && strings.HasPrefix(remainingFilter, ":dn:"):
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch])
extensibleDNAttributes = true
state = READING_EXTENSIBLE_MATCHING_RULE
newPos += 4
// Extensible rule, with attr only
case currentRune == ':' && strings.HasPrefix(remainingFilter, ":="):
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch])
state = READING_CONDITION
newPos += 2
// Extensible rule, with no DN attribute matching
case currentRune == ':':
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterExtensibleMatch, nil, FilterMap[FilterExtensibleMatch])
state = READING_EXTENSIBLE_MATCHING_RULE
newPos += 1
// Equality condition
case currentRune == '=':
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterEqualityMatch, nil, FilterMap[FilterEqualityMatch]) packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterEqualityMatch, nil, FilterMap[FilterEqualityMatch])
case filter[newPos] == '>' && filter[newPos+1] == '=': state = READING_CONDITION
newPos += 1
// Greater-than or equal
case currentRune == '>' && strings.HasPrefix(remainingFilter, ">="):
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterGreaterOrEqual, nil, FilterMap[FilterGreaterOrEqual]) packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterGreaterOrEqual, nil, FilterMap[FilterGreaterOrEqual])
newPos++ state = READING_CONDITION
case filter[newPos] == '<' && filter[newPos+1] == '=': newPos += 2
// Less-than or equal
case currentRune == '<' && strings.HasPrefix(remainingFilter, "<="):
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterLessOrEqual, nil, FilterMap[FilterLessOrEqual]) packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterLessOrEqual, nil, FilterMap[FilterLessOrEqual])
newPos++ state = READING_CONDITION
case filter[newPos] == '~' && filter[newPos+1] == '=': newPos += 2
packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterApproxMatch, nil, FilterMap[FilterLessOrEqual])
newPos++ // Approx
case packet == nil: case currentRune == '~' && strings.HasPrefix(remainingFilter, "~="):
attribute += fmt.Sprintf("%c", filter[newPos]) packet = ber.Encode(ber.ClassContext, ber.TypeConstructed, FilterApproxMatch, nil, FilterMap[FilterApproxMatch])
state = READING_CONDITION
newPos += 2
// Still reading the attribute name
default:
attribute += fmt.Sprintf("%c", currentRune)
newPos += currentWidth
} }
newPos++
case READING_EXTENSIBLE_MATCHING_RULE:
switch {
// Matching rule OID is done
case currentRune == ':' && strings.HasPrefix(remainingFilter, ":="):
state = READING_CONDITION
newPos += 2
// Still reading the matching rule oid
default:
extensibleMatchingRule += fmt.Sprintf("%c", currentRune)
newPos += currentWidth
} }
case READING_CONDITION:
// append to the condition
condition += fmt.Sprintf("%c", currentRune)
newPos += currentWidth
}
}
if newPos == len(filter) { if newPos == len(filter) {
err = NewError(ErrorFilterCompile, errors.New("ldap: unexpected end of filter")) err = NewError(ErrorFilterCompile, errors.New("ldap: unexpected end of filter"))
return packet, newPos, err return packet, newPos, err
@ -217,6 +350,36 @@ func compileFilter(filter string, pos int) (*ber.Packet, int, error) {
} }
switch { switch {
case packet.Tag == FilterExtensibleMatch:
// MatchingRuleAssertion ::= SEQUENCE {
// matchingRule [1] MatchingRuleID OPTIONAL,
// type [2] AttributeDescription OPTIONAL,
// matchValue [3] AssertionValue,
// dnAttributes [4] BOOLEAN DEFAULT FALSE
// }
// Include the matching rule oid, if specified
if len(extensibleMatchingRule) > 0 {
packet.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionMatchingRule, extensibleMatchingRule, MatchingRuleAssertionMap[MatchingRuleAssertionMatchingRule]))
}
// Include the attribute, if specified
if len(attribute) > 0 {
packet.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionType, attribute, MatchingRuleAssertionMap[MatchingRuleAssertionType]))
}
// Add the value (only required child)
encodedString, err := escapedStringToEncodedBytes(condition)
if err != nil {
return packet, newPos, err
}
packet.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionMatchValue, encodedString, MatchingRuleAssertionMap[MatchingRuleAssertionMatchValue]))
// Defaults to false, so only include in the sequence if true
if extensibleDNAttributes {
packet.AppendChild(ber.NewBoolean(ber.ClassContext, ber.TypePrimitive, MatchingRuleAssertionDNAttributes, extensibleDNAttributes, MatchingRuleAssertionMap[MatchingRuleAssertionDNAttributes]))
}
case packet.Tag == FilterEqualityMatch && condition == "*": case packet.Tag == FilterEqualityMatch && condition == "*":
packet = ber.NewString(ber.ClassContext, ber.TypePrimitive, FilterPresent, attribute, FilterMap[FilterPresent]) packet = ber.NewString(ber.ClassContext, ber.TypePrimitive, FilterPresent, attribute, FilterMap[FilterPresent])
case packet.Tag == FilterEqualityMatch && strings.Contains(condition, "*"): case packet.Tag == FilterEqualityMatch && strings.Contains(condition, "*"):
@ -238,15 +401,56 @@ func compileFilter(filter string, pos int) (*ber.Packet, int, error) {
default: default:
tag = FilterSubstringsAny tag = FilterSubstringsAny
} }
seq.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, tag, part, FilterSubstringsMap[uint64(tag)])) encodedString, err := escapedStringToEncodedBytes(part)
if err != nil {
return packet, newPos, err
}
seq.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, tag, encodedString, FilterSubstringsMap[uint64(tag)]))
} }
packet.AppendChild(seq) packet.AppendChild(seq)
default: default:
encodedString, err := escapedStringToEncodedBytes(condition)
if err != nil {
return packet, newPos, err
}
packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, attribute, "Attribute")) packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, attribute, "Attribute"))
packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, condition, "Condition")) packet.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, encodedString, "Condition"))
} }
newPos++ newPos += currentWidth
return packet, newPos, err return packet, newPos, err
} }
} }
// Convert from "ABC\xx\xx\xx" form to literal bytes for transport
func escapedStringToEncodedBytes(escapedString string) (string, error) {
var buffer bytes.Buffer
i := 0
for i < len(escapedString) {
currentRune, currentWidth := utf8.DecodeRuneInString(escapedString[i:])
if currentRune == utf8.RuneError {
return "", NewError(ErrorFilterCompile, fmt.Errorf("ldap: error reading rune at position %d", i))
}
// Check for escaped hex characters and convert them to their literal value for transport.
if currentRune == '\\' {
// http://tools.ietf.org/search/rfc4515
// \ (%x5C) is not a valid character unless it is followed by two HEX characters due to not
// being a member of UTF1SUBSET.
if i+2 > len(escapedString) {
return "", NewError(ErrorFilterCompile, errors.New("ldap: missing characters for escape in filter"))
}
if escByte, decodeErr := hexpac.DecodeString(escapedString[i+1 : i+3]); decodeErr != nil {
return "", NewError(ErrorFilterCompile, errors.New("ldap: invalid characters for escape in filter"))
} else {
buffer.WriteByte(escByte[0])
i += 2 // +1 from end of loop, so 3 total for \xx.
}
} else {
buffer.WriteRune(currentRune)
}
i += currentWidth
}
return buffer.String(), nil
}

View File

@ -1,50 +1,216 @@
package ldap package ldap_test
import ( import (
"strings"
"testing" "testing"
"gopkg.in/asn1-ber.v1" "gopkg.in/asn1-ber.v1"
"gopkg.in/ldap.v2"
) )
type compileTest struct { type compileTest struct {
filterStr string filterStr string
filterType int
expectedFilter string
expectedType int
expectedErr string
} }
var testFilters = []compileTest{ var testFilters = []compileTest{
compileTest{filterStr: "(&(sn=Miller)(givenName=Bob))", filterType: FilterAnd}, compileTest{
compileTest{filterStr: "(|(sn=Miller)(givenName=Bob))", filterType: FilterOr}, filterStr: "(&(sn=Miller)(givenName=Bob))",
compileTest{filterStr: "(!(sn=Miller))", filterType: FilterNot}, expectedFilter: "(&(sn=Miller)(givenName=Bob))",
compileTest{filterStr: "(sn=Miller)", filterType: FilterEqualityMatch}, expectedType: ldap.FilterAnd,
compileTest{filterStr: "(sn=Mill*)", filterType: FilterSubstrings}, },
compileTest{filterStr: "(sn=*Mill)", filterType: FilterSubstrings}, compileTest{
compileTest{filterStr: "(sn=*Mill*)", filterType: FilterSubstrings}, filterStr: "(|(sn=Miller)(givenName=Bob))",
compileTest{filterStr: "(sn=*i*le*)", filterType: FilterSubstrings}, expectedFilter: "(|(sn=Miller)(givenName=Bob))",
compileTest{filterStr: "(sn=Mi*l*r)", filterType: FilterSubstrings}, expectedType: ldap.FilterOr,
compileTest{filterStr: "(sn=Mi*le*)", filterType: FilterSubstrings}, },
compileTest{filterStr: "(sn=*i*ler)", filterType: FilterSubstrings}, compileTest{
compileTest{filterStr: "(sn>=Miller)", filterType: FilterGreaterOrEqual}, filterStr: "(!(sn=Miller))",
compileTest{filterStr: "(sn<=Miller)", filterType: FilterLessOrEqual}, expectedFilter: "(!(sn=Miller))",
compileTest{filterStr: "(sn=*)", filterType: FilterPresent}, expectedType: ldap.FilterNot,
compileTest{filterStr: "(sn~=Miller)", filterType: FilterApproxMatch}, },
compileTest{
filterStr: "(sn=Miller)",
expectedFilter: "(sn=Miller)",
expectedType: ldap.FilterEqualityMatch,
},
compileTest{
filterStr: "(sn=Mill*)",
expectedFilter: "(sn=Mill*)",
expectedType: ldap.FilterSubstrings,
},
compileTest{
filterStr: "(sn=*Mill)",
expectedFilter: "(sn=*Mill)",
expectedType: ldap.FilterSubstrings,
},
compileTest{
filterStr: "(sn=*Mill*)",
expectedFilter: "(sn=*Mill*)",
expectedType: ldap.FilterSubstrings,
},
compileTest{
filterStr: "(sn=*i*le*)",
expectedFilter: "(sn=*i*le*)",
expectedType: ldap.FilterSubstrings,
},
compileTest{
filterStr: "(sn=Mi*l*r)",
expectedFilter: "(sn=Mi*l*r)",
expectedType: ldap.FilterSubstrings,
},
// substring filters escape properly
compileTest{
filterStr: `(sn=Mi*함*r)`,
expectedFilter: `(sn=Mi*\ed\95\a8*r)`,
expectedType: ldap.FilterSubstrings,
},
// already escaped substring filters don't get double-escaped
compileTest{
filterStr: `(sn=Mi*\ed\95\a8*r)`,
expectedFilter: `(sn=Mi*\ed\95\a8*r)`,
expectedType: ldap.FilterSubstrings,
},
compileTest{
filterStr: "(sn=Mi*le*)",
expectedFilter: "(sn=Mi*le*)",
expectedType: ldap.FilterSubstrings,
},
compileTest{
filterStr: "(sn=*i*ler)",
expectedFilter: "(sn=*i*ler)",
expectedType: ldap.FilterSubstrings,
},
compileTest{
filterStr: "(sn>=Miller)",
expectedFilter: "(sn>=Miller)",
expectedType: ldap.FilterGreaterOrEqual,
},
compileTest{
filterStr: "(sn<=Miller)",
expectedFilter: "(sn<=Miller)",
expectedType: ldap.FilterLessOrEqual,
},
compileTest{
filterStr: "(sn=*)",
expectedFilter: "(sn=*)",
expectedType: ldap.FilterPresent,
},
compileTest{
filterStr: "(sn~=Miller)",
expectedFilter: "(sn~=Miller)",
expectedType: ldap.FilterApproxMatch,
},
compileTest{
filterStr: `(objectGUID='\fc\fe\a3\ab\f9\90N\aaGm\d5I~\d12)`,
expectedFilter: `(objectGUID='\fc\fe\a3\ab\f9\90N\aaGm\d5I~\d12)`,
expectedType: ldap.FilterEqualityMatch,
},
compileTest{
filterStr: `(objectGUID=абвгдеёжзийклмнопрстуфхцчшщъыьэюя)`,
expectedFilter: `(objectGUID=\d0\b0\d0\b1\d0\b2\d0\b3\d0\b4\d0\b5\d1\91\d0\b6\d0\b7\d0\b8\d0\b9\d0\ba\d0\bb\d0\bc\d0\bd\d0\be\d0\bf\d1\80\d1\81\d1\82\d1\83\d1\84\d1\85\d1\86\d1\87\d1\88\d1\89\d1\8a\d1\8b\d1\8c\d1\8d\d1\8e\d1\8f)`,
expectedType: ldap.FilterEqualityMatch,
},
compileTest{
filterStr: `(objectGUID=함수목록)`,
expectedFilter: `(objectGUID=\ed\95\a8\ec\88\98\eb\aa\a9\eb\a1\9d)`,
expectedType: ldap.FilterEqualityMatch,
},
compileTest{
filterStr: `(objectGUID=`,
expectedFilter: ``,
expectedType: 0,
expectedErr: "unexpected end of filter",
},
compileTest{
filterStr: `(objectGUID=함수목록`,
expectedFilter: ``,
expectedType: 0,
expectedErr: "unexpected end of filter",
},
compileTest{
filterStr: `(&(objectclass=inetorgperson)(cn=中文))`,
expectedFilter: `(&(objectclass=inetorgperson)(cn=\e4\b8\ad\e6\96\87))`,
expectedType: 0,
},
// attr extension
compileTest{
filterStr: `(memberOf:=foo)`,
expectedFilter: `(memberOf:=foo)`,
expectedType: ldap.FilterExtensibleMatch,
},
// attr+named matching rule extension
compileTest{
filterStr: `(memberOf:test:=foo)`,
expectedFilter: `(memberOf:test:=foo)`,
expectedType: ldap.FilterExtensibleMatch,
},
// attr+oid matching rule extension
compileTest{
filterStr: `(cn:1.2.3.4.5:=Fred Flintstone)`,
expectedFilter: `(cn:1.2.3.4.5:=Fred Flintstone)`,
expectedType: ldap.FilterExtensibleMatch,
},
// attr+dn+oid matching rule extension
compileTest{
filterStr: `(sn:dn:2.4.6.8.10:=Barney Rubble)`,
expectedFilter: `(sn:dn:2.4.6.8.10:=Barney Rubble)`,
expectedType: ldap.FilterExtensibleMatch,
},
// attr+dn extension
compileTest{
filterStr: `(o:dn:=Ace Industry)`,
expectedFilter: `(o:dn:=Ace Industry)`,
expectedType: ldap.FilterExtensibleMatch,
},
// dn extension
compileTest{
filterStr: `(:dn:2.4.6.8.10:=Dino)`,
expectedFilter: `(:dn:2.4.6.8.10:=Dino)`,
expectedType: ldap.FilterExtensibleMatch,
},
compileTest{
filterStr: `(memberOf:1.2.840.113556.1.4.1941:=CN=User1,OU=blah,DC=mydomain,DC=net)`,
expectedFilter: `(memberOf:1.2.840.113556.1.4.1941:=CN=User1,OU=blah,DC=mydomain,DC=net)`,
expectedType: ldap.FilterExtensibleMatch,
},
// compileTest{ filterStr: "()", filterType: FilterExtensibleMatch }, // compileTest{ filterStr: "()", filterType: FilterExtensibleMatch },
} }
var testInvalidFilters = []string{
`(objectGUID=\zz)`,
`(objectGUID=\a)`,
}
func TestFilter(t *testing.T) { func TestFilter(t *testing.T) {
// Test Compiler and Decompiler // Test Compiler and Decompiler
for _, i := range testFilters { for _, i := range testFilters {
filter, err := CompileFilter(i.filterStr) filter, err := ldap.CompileFilter(i.filterStr)
if err != nil { if err != nil {
t.Errorf("Problem compiling %s - %s", i.filterStr, err.Error()) if i.expectedErr == "" || !strings.Contains(err.Error(), i.expectedErr) {
} else if filter.Tag != ber.Tag(i.filterType) { t.Errorf("Problem compiling '%s' - '%v' (expected error to contain '%v')", i.filterStr, err, i.expectedErr)
t.Errorf("%q Expected %q got %q", i.filterStr, FilterMap[uint64(i.filterType)], FilterMap[uint64(filter.Tag)])
} else {
o, err := DecompileFilter(filter)
if err != nil {
t.Errorf("Problem compiling %s - %s", i.filterStr, err.Error())
} else if i.filterStr != o {
t.Errorf("%q expected, got %q", i.filterStr, o)
} }
} else if filter.Tag != ber.Tag(i.expectedType) {
t.Errorf("%q Expected %q got %q", i.filterStr, ldap.FilterMap[uint64(i.expectedType)], ldap.FilterMap[uint64(filter.Tag)])
} else {
o, err := ldap.DecompileFilter(filter)
if err != nil {
t.Errorf("Problem compiling %s - %s", i.filterStr, err.Error())
} else if i.expectedFilter != o {
t.Errorf("%q expected, got %q", i.expectedFilter, o)
}
}
}
}
func TestInvalidFilter(t *testing.T) {
for _, filterStr := range testInvalidFilters {
if _, err := ldap.CompileFilter(filterStr); err == nil {
t.Errorf("Problem compiling %s - expected err", filterStr)
} }
} }
} }
@ -61,7 +227,7 @@ func BenchmarkFilterCompile(b *testing.B) {
maxIdx := len(filters) maxIdx := len(filters)
b.StartTimer() b.StartTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
CompileFilter(filters[i%maxIdx]) ldap.CompileFilter(filters[i%maxIdx])
} }
} }
@ -71,12 +237,12 @@ func BenchmarkFilterDecompile(b *testing.B) {
// Test Compiler and Decompiler // Test Compiler and Decompiler
for idx, i := range testFilters { for idx, i := range testFilters {
filters[idx], _ = CompileFilter(i.filterStr) filters[idx], _ = ldap.CompileFilter(i.filterStr)
} }
maxIdx := len(filters) maxIdx := len(filters)
b.StartTimer() b.StartTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
DecompileFilter(filters[i%maxIdx]) ldap.DecompileFilter(filters[i%maxIdx])
} }
} }

View File

@ -6,7 +6,6 @@ package ldap
import ( import (
"errors" "errors"
"fmt"
"io/ioutil" "io/ioutil"
"os" "os"
@ -60,98 +59,6 @@ var ApplicationMap = map[uint8]string{
ApplicationExtendedResponse: "Extended Response", ApplicationExtendedResponse: "Extended Response",
} }
// LDAP Result Codes
const (
LDAPResultSuccess = 0
LDAPResultOperationsError = 1
LDAPResultProtocolError = 2
LDAPResultTimeLimitExceeded = 3
LDAPResultSizeLimitExceeded = 4
LDAPResultCompareFalse = 5
LDAPResultCompareTrue = 6
LDAPResultAuthMethodNotSupported = 7
LDAPResultStrongAuthRequired = 8
LDAPResultReferral = 10
LDAPResultAdminLimitExceeded = 11
LDAPResultUnavailableCriticalExtension = 12
LDAPResultConfidentialityRequired = 13
LDAPResultSaslBindInProgress = 14
LDAPResultNoSuchAttribute = 16
LDAPResultUndefinedAttributeType = 17
LDAPResultInappropriateMatching = 18
LDAPResultConstraintViolation = 19
LDAPResultAttributeOrValueExists = 20
LDAPResultInvalidAttributeSyntax = 21
LDAPResultNoSuchObject = 32
LDAPResultAliasProblem = 33
LDAPResultInvalidDNSyntax = 34
LDAPResultAliasDereferencingProblem = 36
LDAPResultInappropriateAuthentication = 48
LDAPResultInvalidCredentials = 49
LDAPResultInsufficientAccessRights = 50
LDAPResultBusy = 51
LDAPResultUnavailable = 52
LDAPResultUnwillingToPerform = 53
LDAPResultLoopDetect = 54
LDAPResultNamingViolation = 64
LDAPResultObjectClassViolation = 65
LDAPResultNotAllowedOnNonLeaf = 66
LDAPResultNotAllowedOnRDN = 67
LDAPResultEntryAlreadyExists = 68
LDAPResultObjectClassModsProhibited = 69
LDAPResultAffectsMultipleDSAs = 71
LDAPResultOther = 80
ErrorNetwork = 200
ErrorFilterCompile = 201
ErrorFilterDecompile = 202
ErrorDebugging = 203
ErrorUnexpectedMessage = 204
ErrorUnexpectedResponse = 205
)
var LDAPResultCodeMap = map[uint8]string{
LDAPResultSuccess: "Success",
LDAPResultOperationsError: "Operations Error",
LDAPResultProtocolError: "Protocol Error",
LDAPResultTimeLimitExceeded: "Time Limit Exceeded",
LDAPResultSizeLimitExceeded: "Size Limit Exceeded",
LDAPResultCompareFalse: "Compare False",
LDAPResultCompareTrue: "Compare True",
LDAPResultAuthMethodNotSupported: "Auth Method Not Supported",
LDAPResultStrongAuthRequired: "Strong Auth Required",
LDAPResultReferral: "Referral",
LDAPResultAdminLimitExceeded: "Admin Limit Exceeded",
LDAPResultUnavailableCriticalExtension: "Unavailable Critical Extension",
LDAPResultConfidentialityRequired: "Confidentiality Required",
LDAPResultSaslBindInProgress: "Sasl Bind In Progress",
LDAPResultNoSuchAttribute: "No Such Attribute",
LDAPResultUndefinedAttributeType: "Undefined Attribute Type",
LDAPResultInappropriateMatching: "Inappropriate Matching",
LDAPResultConstraintViolation: "Constraint Violation",
LDAPResultAttributeOrValueExists: "Attribute Or Value Exists",
LDAPResultInvalidAttributeSyntax: "Invalid Attribute Syntax",
LDAPResultNoSuchObject: "No Such Object",
LDAPResultAliasProblem: "Alias Problem",
LDAPResultInvalidDNSyntax: "Invalid DN Syntax",
LDAPResultAliasDereferencingProblem: "Alias Dereferencing Problem",
LDAPResultInappropriateAuthentication: "Inappropriate Authentication",
LDAPResultInvalidCredentials: "Invalid Credentials",
LDAPResultInsufficientAccessRights: "Insufficient Access Rights",
LDAPResultBusy: "Busy",
LDAPResultUnavailable: "Unavailable",
LDAPResultUnwillingToPerform: "Unwilling To Perform",
LDAPResultLoopDetect: "Loop Detect",
LDAPResultNamingViolation: "Naming Violation",
LDAPResultObjectClassViolation: "Object Class Violation",
LDAPResultNotAllowedOnNonLeaf: "Not Allowed On Non Leaf",
LDAPResultNotAllowedOnRDN: "Not Allowed On RDN",
LDAPResultEntryAlreadyExists: "Entry Already Exists",
LDAPResultObjectClassModsProhibited: "Object Class Mods Prohibited",
LDAPResultAffectsMultipleDSAs: "Affects Multiple DSAs",
LDAPResultOther: "Other",
}
// Ldap Behera Password Policy Draft 10 (https://tools.ietf.org/html/draft-behera-ldap-password-policy-10) // Ldap Behera Password Policy Draft 10 (https://tools.ietf.org/html/draft-behera-ldap-password-policy-10)
const ( const (
BeheraPasswordExpired = 0 BeheraPasswordExpired = 0
@ -318,8 +225,8 @@ func addRequestDescriptions(packet *ber.Packet) {
} }
func addDefaultLDAPResponseDescriptions(packet *ber.Packet) { func addDefaultLDAPResponseDescriptions(packet *ber.Packet) {
resultCode := packet.Children[1].Children[0].Value.(int64) resultCode, _ := getLDAPResultCode(packet)
packet.Children[1].Children[0].Description = "Result Code (" + LDAPResultCodeMap[uint8(resultCode)] + ")" packet.Children[1].Children[0].Description = "Result Code (" + LDAPResultCodeMap[resultCode] + ")"
packet.Children[1].Children[1].Description = "Matched DN" packet.Children[1].Children[1].Description = "Matched DN"
packet.Children[1].Children[2].Description = "Error Message" packet.Children[1].Children[2].Description = "Error Message"
if len(packet.Children[1].Children) > 3 { if len(packet.Children[1].Children) > 3 {
@ -343,30 +250,6 @@ func DebugBinaryFile(fileName string) error {
return nil return nil
} }
type Error struct {
Err error
ResultCode uint8
}
func (e *Error) Error() string {
return fmt.Sprintf("LDAP Result Code %d %q: %s", e.ResultCode, LDAPResultCodeMap[e.ResultCode], e.Err.Error())
}
func NewError(resultCode uint8, err error) error {
return &Error{ResultCode: resultCode, Err: err}
}
func getLDAPResultCode(packet *ber.Packet) (code uint8, description string) {
if len(packet.Children) >= 2 {
response := packet.Children[1]
if response.ClassType == ber.ClassApplication && response.TagType == ber.TypeConstructed && len(response.Children) >= 3 {
return uint8(response.Children[0].Value.(int64)), response.Children[2].Value.(string)
}
}
return ErrorNetwork, "Invalid packet format"
}
var hex = "0123456789abcdef" var hex = "0123456789abcdef"
func mustEscape(c byte) bool { func mustEscape(c byte) bool {

View File

@ -1,9 +1,11 @@
package ldap package ldap_test
import ( import (
"crypto/tls" "crypto/tls"
"fmt" "fmt"
"testing" "testing"
"gopkg.in/ldap.v2"
) )
var ldapServer = "ldap.itd.umich.edu" var ldapServer = "ldap.itd.umich.edu"
@ -21,7 +23,7 @@ var attributes = []string{
func TestDial(t *testing.T) { func TestDial(t *testing.T) {
fmt.Printf("TestDial: starting...\n") fmt.Printf("TestDial: starting...\n")
l, err := Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort)) l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort))
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
@ -32,7 +34,7 @@ func TestDial(t *testing.T) {
func TestDialTLS(t *testing.T) { func TestDialTLS(t *testing.T) {
fmt.Printf("TestDialTLS: starting...\n") fmt.Printf("TestDialTLS: starting...\n")
l, err := DialTLS("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapTLSPort), &tls.Config{InsecureSkipVerify: true}) l, err := ldap.DialTLS("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapTLSPort), &tls.Config{InsecureSkipVerify: true})
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
@ -43,7 +45,7 @@ func TestDialTLS(t *testing.T) {
func TestStartTLS(t *testing.T) { func TestStartTLS(t *testing.T) {
fmt.Printf("TestStartTLS: starting...\n") fmt.Printf("TestStartTLS: starting...\n")
l, err := Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort)) l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort))
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
@ -58,16 +60,16 @@ func TestStartTLS(t *testing.T) {
func TestSearch(t *testing.T) { func TestSearch(t *testing.T) {
fmt.Printf("TestSearch: starting...\n") fmt.Printf("TestSearch: starting...\n")
l, err := Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort)) l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort))
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
} }
defer l.Close() defer l.Close()
searchRequest := NewSearchRequest( searchRequest := ldap.NewSearchRequest(
baseDN, baseDN,
ScopeWholeSubtree, DerefAlways, 0, 0, false, ldap.ScopeWholeSubtree, ldap.DerefAlways, 0, 0, false,
filter[0], filter[0],
attributes, attributes,
nil) nil)
@ -83,16 +85,16 @@ func TestSearch(t *testing.T) {
func TestSearchStartTLS(t *testing.T) { func TestSearchStartTLS(t *testing.T) {
fmt.Printf("TestSearchStartTLS: starting...\n") fmt.Printf("TestSearchStartTLS: starting...\n")
l, err := Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort)) l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort))
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
} }
defer l.Close() defer l.Close()
searchRequest := NewSearchRequest( searchRequest := ldap.NewSearchRequest(
baseDN, baseDN,
ScopeWholeSubtree, DerefAlways, 0, 0, false, ldap.ScopeWholeSubtree, ldap.DerefAlways, 0, 0, false,
filter[0], filter[0],
attributes, attributes,
nil) nil)
@ -123,7 +125,7 @@ func TestSearchStartTLS(t *testing.T) {
func TestSearchWithPaging(t *testing.T) { func TestSearchWithPaging(t *testing.T) {
fmt.Printf("TestSearchWithPaging: starting...\n") fmt.Printf("TestSearchWithPaging: starting...\n")
l, err := Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort)) l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort))
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
@ -136,9 +138,9 @@ func TestSearchWithPaging(t *testing.T) {
return return
} }
searchRequest := NewSearchRequest( searchRequest := ldap.NewSearchRequest(
baseDN, baseDN,
ScopeWholeSubtree, DerefAlways, 0, 0, false, ldap.ScopeWholeSubtree, ldap.DerefAlways, 0, 0, false,
filter[2], filter[2],
attributes, attributes,
nil) nil)
@ -149,12 +151,38 @@ func TestSearchWithPaging(t *testing.T) {
} }
fmt.Printf("TestSearchWithPaging: %s -> num of entries = %d\n", searchRequest.Filter, len(sr.Entries)) fmt.Printf("TestSearchWithPaging: %s -> num of entries = %d\n", searchRequest.Filter, len(sr.Entries))
searchRequest = ldap.NewSearchRequest(
baseDN,
ldap.ScopeWholeSubtree, ldap.DerefAlways, 0, 0, false,
filter[2],
attributes,
[]ldap.Control{ldap.NewControlPaging(5)})
sr, err = l.SearchWithPaging(searchRequest, 5)
if err != nil {
t.Errorf(err.Error())
return
} }
func searchGoroutine(t *testing.T, l *Conn, results chan *SearchResult, i int) { fmt.Printf("TestSearchWithPaging: %s -> num of entries = %d\n", searchRequest.Filter, len(sr.Entries))
searchRequest := NewSearchRequest(
searchRequest = ldap.NewSearchRequest(
baseDN, baseDN,
ScopeWholeSubtree, DerefAlways, 0, 0, false, ldap.ScopeWholeSubtree, ldap.DerefAlways, 0, 0, false,
filter[2],
attributes,
[]ldap.Control{ldap.NewControlPaging(500)})
sr, err = l.SearchWithPaging(searchRequest, 5)
if err == nil {
t.Errorf("expected an error when paging size in control in search request doesn't match size given in call, got none")
return
}
}
func searchGoroutine(t *testing.T, l *ldap.Conn, results chan *ldap.SearchResult, i int) {
searchRequest := ldap.NewSearchRequest(
baseDN,
ldap.ScopeWholeSubtree, ldap.DerefAlways, 0, 0, false,
filter[i], filter[i],
attributes, attributes,
nil) nil)
@ -169,17 +197,17 @@ func searchGoroutine(t *testing.T, l *Conn, results chan *SearchResult, i int) {
func testMultiGoroutineSearch(t *testing.T, TLS bool, startTLS bool) { func testMultiGoroutineSearch(t *testing.T, TLS bool, startTLS bool) {
fmt.Printf("TestMultiGoroutineSearch: starting...\n") fmt.Printf("TestMultiGoroutineSearch: starting...\n")
var l *Conn var l *ldap.Conn
var err error var err error
if TLS { if TLS {
l, err = DialTLS("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapTLSPort), &tls.Config{InsecureSkipVerify: true}) l, err = ldap.DialTLS("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapTLSPort), &tls.Config{InsecureSkipVerify: true})
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
} }
defer l.Close() defer l.Close()
} else { } else {
l, err = Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort)) l, err = ldap.Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort))
if err != nil { if err != nil {
t.Errorf(err.Error()) t.Errorf(err.Error())
return return
@ -195,9 +223,9 @@ func testMultiGoroutineSearch(t *testing.T, TLS bool, startTLS bool) {
} }
} }
results := make([]chan *SearchResult, len(filter)) results := make([]chan *ldap.SearchResult, len(filter))
for i := range filter { for i := range filter {
results[i] = make(chan *SearchResult) results[i] = make(chan *ldap.SearchResult)
go searchGoroutine(t, l, results[i], i) go searchGoroutine(t, l, results[i], i)
} }
for i := range filter { for i := range filter {
@ -217,17 +245,17 @@ func TestMultiGoroutineSearch(t *testing.T) {
} }
func TestEscapeFilter(t *testing.T) { func TestEscapeFilter(t *testing.T) {
if got, want := EscapeFilter("a\x00b(c)d*e\\f"), `a\00b\28c\29d\2ae\5cf`; got != want { if got, want := ldap.EscapeFilter("a\x00b(c)d*e\\f"), `a\00b\28c\29d\2ae\5cf`; got != want {
t.Errorf("Got %s, expected %s", want, got) t.Errorf("Got %s, expected %s", want, got)
} }
if got, want := EscapeFilter("Lučić"), `Lu\c4\8di\c4\87`; got != want { if got, want := ldap.EscapeFilter("Lučić"), `Lu\c4\8di\c4\87`; got != want {
t.Errorf("Got %s, expected %s", want, got) t.Errorf("Got %s, expected %s", want, got)
} }
} }
func TestCompare(t *testing.T) { func TestCompare(t *testing.T) {
fmt.Printf("TestCompare: starting...\n") fmt.Printf("TestCompare: starting...\n")
l, err := Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort)) l, err := ldap.Dial("tcp", fmt.Sprintf("%s:%d", ldapServer, ldapPort))
if err != nil { if err != nil {
t.Fatal(err.Error()) t.Fatal(err.Error())
} }
@ -243,5 +271,5 @@ func TestCompare(t *testing.T) {
return return
} }
fmt.Printf("TestCompare: -> num of entries = %d\n", sr) fmt.Printf("TestCompare: -> %v\n", sr)
} }

View File

@ -62,6 +62,7 @@ package ldap
import ( import (
"errors" "errors"
"fmt" "fmt"
"sort"
"strings" "strings"
"gopkg.in/asn1-ber.v1" "gopkg.in/asn1-ber.v1"
@ -93,6 +94,26 @@ var DerefMap = map[int]string{
DerefAlways: "DerefAlways", DerefAlways: "DerefAlways",
} }
// NewEntry returns an Entry object with the specified distinguished name and attribute key-value pairs.
// The map of attributes is accessed in alphabetical order of the keys in order to ensure that, for the
// same input map of attributes, the output entry will contain the same order of attributes
func NewEntry(dn string, attributes map[string][]string) *Entry {
var attributeNames []string
for attributeName := range attributes {
attributeNames = append(attributeNames, attributeName)
}
sort.Strings(attributeNames)
var encodedAttributes []*EntryAttribute
for _, attributeName := range attributeNames {
encodedAttributes = append(encodedAttributes, NewEntryAttribute(attributeName, attributes[attributeName]))
}
return &Entry{
DN: dn,
Attributes: encodedAttributes,
}
}
type Entry struct { type Entry struct {
DN string DN string
Attributes []*EntryAttribute Attributes []*EntryAttribute
@ -146,6 +167,19 @@ func (e *Entry) PrettyPrint(indent int) {
} }
} }
// NewEntryAttribute returns a new EntryAttribute with the desired key-value pair
func NewEntryAttribute(name string, values []string) *EntryAttribute {
var bytes [][]byte
for _, value := range values {
bytes = append(bytes, []byte(value))
}
return &EntryAttribute{
Name: name,
Values: values,
ByteValues: bytes,
}
}
type EntryAttribute struct { type EntryAttribute struct {
Name string Name string
Values []string Values []string
@ -234,13 +268,32 @@ func NewSearchRequest(
} }
} }
// SearchWithPaging accepts a search request and desired page size in order to execute LDAP queries to fulfill the
// search request. All paged LDAP query responses will be buffered and the final result will be returned atomically.
// The following four cases are possible given the arguments:
// - given SearchRequest missing a control of type ControlTypePaging: we will add one with the desired paging size
// - given SearchRequest contains a control of type ControlTypePaging that isn't actually a ControlPaging: fail without issuing any queries
// - given SearchRequest contains a control of type ControlTypePaging with pagingSize equal to the size requested: no change to the search request
// - given SearchRequest contains a control of type ControlTypePaging with pagingSize not equal to the size requested: fail without issuing any queries
// A requested pagingSize of 0 is interpreted as no limit by LDAP servers.
func (l *Conn) SearchWithPaging(searchRequest *SearchRequest, pagingSize uint32) (*SearchResult, error) { func (l *Conn) SearchWithPaging(searchRequest *SearchRequest, pagingSize uint32) (*SearchResult, error) {
if searchRequest.Controls == nil { var pagingControl *ControlPaging
searchRequest.Controls = make([]Control, 0)
control := FindControl(searchRequest.Controls, ControlTypePaging)
if control == nil {
pagingControl = NewControlPaging(pagingSize)
searchRequest.Controls = append(searchRequest.Controls, pagingControl)
} else {
castControl, ok := control.(*ControlPaging)
if !ok {
return nil, fmt.Errorf("Expected paging control to be of type *ControlPaging, got %v", control)
}
if castControl.PagingSize != pagingSize {
return nil, fmt.Errorf("Paging size given in search request (%d) conflicts with size given in search call (%d)", castControl.PagingSize, pagingSize)
}
pagingControl = castControl
} }
pagingControl := NewControlPaging(pagingSize)
searchRequest.Controls = append(searchRequest.Controls, pagingControl)
searchResult := new(SearchResult) searchResult := new(SearchResult)
for { for {
result, err := l.Search(searchRequest) result, err := l.Search(searchRequest)

View File

@ -0,0 +1,31 @@
package ldap
import (
"reflect"
"testing"
)
// TestNewEntry tests that repeated calls to NewEntry return the same value with the same input
func TestNewEntry(t *testing.T) {
dn := "testDN"
attributes := map[string][]string{
"alpha": {"value"},
"beta": {"value"},
"gamma": {"value"},
"delta": {"value"},
"epsilon": {"value"},
}
exectedEntry := NewEntry(dn, attributes)
iteration := 0
for {
if iteration == 100 {
break
}
testEntry := NewEntry(dn, attributes)
if !reflect.DeepEqual(exectedEntry, testEntry) {
t.Fatalf("consequent calls to NewEntry did not yield the same result:\n\texpected:\n\t%s\n\tgot:\n\t%s\n", exectedEntry, testEntry)
}
iteration = iteration + 1
}
}

View File

@ -0,0 +1,22 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe

View File

@ -0,0 +1,20 @@
language: go
sudo: false
matrix:
include:
- go: 1.2
- go: 1.3
- go: 1.4
- go: 1.5
- go: 1.6
- go: tip
install:
- go get golang.org/x/tools/cmd/vet
script:
- go get -t -v ./...
- diff -u <(echo -n) <(gofmt -d .)
- go vet .
- go test -v -race ./...

View File

@ -0,0 +1,8 @@
# This is the official list of Gorilla WebSocket authors for copyright
# purposes.
#
# Please keep the list sorted.
Gary Burd <gary@beagledreams.com>
Joachim Bauch <mail@joachim-bauch.de>

View File

@ -0,0 +1,22 @@
Copyright (c) 2013 The Gorilla WebSocket Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,61 @@
# Gorilla WebSocket
Gorilla WebSocket is a [Go](http://golang.org/) implementation of the
[WebSocket](http://www.rfc-editor.org/rfc/rfc6455.txt) protocol.
### Documentation
* [API Reference](http://godoc.org/github.com/gorilla/websocket)
* [Chat example](https://github.com/gorilla/websocket/tree/master/examples/chat)
* [Command example](https://github.com/gorilla/websocket/tree/master/examples/command)
* [Client and server example](https://github.com/gorilla/websocket/tree/master/examples/echo)
* [File watch example](https://github.com/gorilla/websocket/tree/master/examples/filewatch)
### Status
The Gorilla WebSocket package provides a complete and tested implementation of
the [WebSocket](http://www.rfc-editor.org/rfc/rfc6455.txt) protocol. The
package API is stable.
### Installation
go get github.com/gorilla/websocket
### Protocol Compliance
The Gorilla WebSocket package passes the server tests in the [Autobahn Test
Suite](http://autobahn.ws/testsuite) using the application in the [examples/autobahn
subdirectory](https://github.com/gorilla/websocket/tree/master/examples/autobahn).
### Gorilla WebSocket compared with other packages
<table>
<tr>
<th></th>
<th><a href="http://godoc.org/github.com/gorilla/websocket">github.com/gorilla</a></th>
<th><a href="http://godoc.org/golang.org/x/net/websocket">golang.org/x/net</a></th>
</tr>
<tr>
<tr><td colspan="3"><a href="http://tools.ietf.org/html/rfc6455">RFC 6455</a> Features</td></tr>
<tr><td>Passes <a href="http://autobahn.ws/testsuite/">Autobahn Test Suite</a></td><td><a href="https://github.com/gorilla/websocket/tree/master/examples/autobahn">Yes</a></td><td>No</td></tr>
<tr><td>Receive <a href="https://tools.ietf.org/html/rfc6455#section-5.4">fragmented</a> message<td>Yes</td><td><a href="https://code.google.com/p/go/issues/detail?id=7632">No</a>, see note 1</td></tr>
<tr><td>Send <a href="https://tools.ietf.org/html/rfc6455#section-5.5.1">close</a> message</td><td><a href="http://godoc.org/github.com/gorilla/websocket#hdr-Control_Messages">Yes</a></td><td><a href="https://code.google.com/p/go/issues/detail?id=4588">No</a></td></tr>
<tr><td>Send <a href="https://tools.ietf.org/html/rfc6455#section-5.5.2">pings</a> and receive <a href="https://tools.ietf.org/html/rfc6455#section-5.5.3">pongs</a></td><td><a href="http://godoc.org/github.com/gorilla/websocket#hdr-Control_Messages">Yes</a></td><td>No</td></tr>
<tr><td>Get the <a href="https://tools.ietf.org/html/rfc6455#section-5.6">type</a> of a received data message</td><td>Yes</td><td>Yes, see note 2</td></tr>
<tr><td colspan="3">Other Features</tr></td>
<tr><td>Limit size of received message</td><td><a href="http://godoc.org/github.com/gorilla/websocket#Conn.SetReadLimit">Yes</a></td><td><a href="https://code.google.com/p/go/issues/detail?id=5082">No</a></td></tr>
<tr><td>Read message using io.Reader</td><td><a href="http://godoc.org/github.com/gorilla/websocket#Conn.NextReader">Yes</a></td><td>No, see note 3</td></tr>
<tr><td>Write message using io.WriteCloser</td><td><a href="http://godoc.org/github.com/gorilla/websocket#Conn.NextWriter">Yes</a></td><td>No, see note 3</td></tr>
</table>
Notes:
1. Large messages are fragmented in [Chrome's new WebSocket implementation](http://www.ietf.org/mail-archive/web/hybi/current/msg10503.html).
2. The application can get the type of a received data message by implementing
a [Codec marshal](http://godoc.org/golang.org/x/net/websocket#Codec.Marshal)
function.
3. The go.net io.Reader and io.Writer operate across WebSocket frame boundaries.
Read returns when the input buffer is full or a frame boundary is
encountered. Each call to Write sends a single frame message. The Gorilla
io.Reader and io.WriteCloser operate on a single WebSocket message.

View File

@ -0,0 +1,350 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"bufio"
"bytes"
"crypto/tls"
"encoding/base64"
"errors"
"io"
"io/ioutil"
"net"
"net/http"
"net/url"
"strings"
"time"
)
// ErrBadHandshake is returned when the server response to opening handshake is
// invalid.
var ErrBadHandshake = errors.New("websocket: bad handshake")
// NewClient creates a new client connection using the given net connection.
// The URL u specifies the host and request URI. Use requestHeader to specify
// the origin (Origin), subprotocols (Sec-WebSocket-Protocol) and cookies
// (Cookie). Use the response.Header to get the selected subprotocol
// (Sec-WebSocket-Protocol) and cookies (Set-Cookie).
//
// If the WebSocket handshake fails, ErrBadHandshake is returned along with a
// non-nil *http.Response so that callers can handle redirects, authentication,
// etc.
//
// Deprecated: Use Dialer instead.
func NewClient(netConn net.Conn, u *url.URL, requestHeader http.Header, readBufSize, writeBufSize int) (c *Conn, response *http.Response, err error) {
d := Dialer{
ReadBufferSize: readBufSize,
WriteBufferSize: writeBufSize,
NetDial: func(net, addr string) (net.Conn, error) {
return netConn, nil
},
}
return d.Dial(u.String(), requestHeader)
}
// A Dialer contains options for connecting to WebSocket server.
type Dialer struct {
// NetDial specifies the dial function for creating TCP connections. If
// NetDial is nil, net.Dial is used.
NetDial func(network, addr string) (net.Conn, error)
// Proxy specifies a function to return a proxy for a given
// Request. If the function returns a non-nil error, the
// request is aborted with the provided error.
// If Proxy is nil or returns a nil *URL, no proxy is used.
Proxy func(*http.Request) (*url.URL, error)
// TLSClientConfig specifies the TLS configuration to use with tls.Client.
// If nil, the default configuration is used.
TLSClientConfig *tls.Config
// HandshakeTimeout specifies the duration for the handshake to complete.
HandshakeTimeout time.Duration
// Input and output buffer sizes. If the buffer size is zero, then a
// default value of 4096 is used.
ReadBufferSize, WriteBufferSize int
// Subprotocols specifies the client's requested subprotocols.
Subprotocols []string
}
var errMalformedURL = errors.New("malformed ws or wss URL")
// parseURL parses the URL.
//
// This function is a replacement for the standard library url.Parse function.
// In Go 1.4 and earlier, url.Parse loses information from the path.
func parseURL(s string) (*url.URL, error) {
// From the RFC:
//
// ws-URI = "ws:" "//" host [ ":" port ] path [ "?" query ]
// wss-URI = "wss:" "//" host [ ":" port ] path [ "?" query ]
var u url.URL
switch {
case strings.HasPrefix(s, "ws://"):
u.Scheme = "ws"
s = s[len("ws://"):]
case strings.HasPrefix(s, "wss://"):
u.Scheme = "wss"
s = s[len("wss://"):]
default:
return nil, errMalformedURL
}
if i := strings.Index(s, "?"); i >= 0 {
u.RawQuery = s[i+1:]
s = s[:i]
}
if i := strings.Index(s, "/"); i >= 0 {
u.Opaque = s[i:]
s = s[:i]
} else {
u.Opaque = "/"
}
u.Host = s
if strings.Contains(u.Host, "@") {
// Don't bother parsing user information because user information is
// not allowed in websocket URIs.
return nil, errMalformedURL
}
return &u, nil
}
func hostPortNoPort(u *url.URL) (hostPort, hostNoPort string) {
hostPort = u.Host
hostNoPort = u.Host
if i := strings.LastIndex(u.Host, ":"); i > strings.LastIndex(u.Host, "]") {
hostNoPort = hostNoPort[:i]
} else {
switch u.Scheme {
case "wss":
hostPort += ":443"
case "https":
hostPort += ":443"
default:
hostPort += ":80"
}
}
return hostPort, hostNoPort
}
// DefaultDialer is a dialer with all fields set to the default zero values.
var DefaultDialer = &Dialer{
Proxy: http.ProxyFromEnvironment,
}
// Dial creates a new client connection. Use requestHeader to specify the
// origin (Origin), subprotocols (Sec-WebSocket-Protocol) and cookies (Cookie).
// Use the response.Header to get the selected subprotocol
// (Sec-WebSocket-Protocol) and cookies (Set-Cookie).
//
// If the WebSocket handshake fails, ErrBadHandshake is returned along with a
// non-nil *http.Response so that callers can handle redirects, authentication,
// etcetera. The response body may not contain the entire response and does not
// need to be closed by the application.
func (d *Dialer) Dial(urlStr string, requestHeader http.Header) (*Conn, *http.Response, error) {
if d == nil {
d = &Dialer{
Proxy: http.ProxyFromEnvironment,
}
}
challengeKey, err := generateChallengeKey()
if err != nil {
return nil, nil, err
}
u, err := parseURL(urlStr)
if err != nil {
return nil, nil, err
}
switch u.Scheme {
case "ws":
u.Scheme = "http"
case "wss":
u.Scheme = "https"
default:
return nil, nil, errMalformedURL
}
if u.User != nil {
// User name and password are not allowed in websocket URIs.
return nil, nil, errMalformedURL
}
req := &http.Request{
Method: "GET",
URL: u,
Proto: "HTTP/1.1",
ProtoMajor: 1,
ProtoMinor: 1,
Header: make(http.Header),
Host: u.Host,
}
// Set the request headers using the capitalization for names and values in
// RFC examples. Although the capitalization shouldn't matter, there are
// servers that depend on it. The Header.Set method is not used because the
// method canonicalizes the header names.
req.Header["Upgrade"] = []string{"websocket"}
req.Header["Connection"] = []string{"Upgrade"}
req.Header["Sec-WebSocket-Key"] = []string{challengeKey}
req.Header["Sec-WebSocket-Version"] = []string{"13"}
if len(d.Subprotocols) > 0 {
req.Header["Sec-WebSocket-Protocol"] = []string{strings.Join(d.Subprotocols, ", ")}
}
for k, vs := range requestHeader {
switch {
case k == "Host":
if len(vs) > 0 {
req.Host = vs[0]
}
case k == "Upgrade" ||
k == "Connection" ||
k == "Sec-Websocket-Key" ||
k == "Sec-Websocket-Version" ||
(k == "Sec-Websocket-Protocol" && len(d.Subprotocols) > 0):
return nil, nil, errors.New("websocket: duplicate header not allowed: " + k)
default:
req.Header[k] = vs
}
}
hostPort, hostNoPort := hostPortNoPort(u)
var proxyURL *url.URL
// Check wether the proxy method has been configured
if d.Proxy != nil {
proxyURL, err = d.Proxy(req)
}
if err != nil {
return nil, nil, err
}
var targetHostPort string
if proxyURL != nil {
targetHostPort, _ = hostPortNoPort(proxyURL)
} else {
targetHostPort = hostPort
}
var deadline time.Time
if d.HandshakeTimeout != 0 {
deadline = time.Now().Add(d.HandshakeTimeout)
}
netDial := d.NetDial
if netDial == nil {
netDialer := &net.Dialer{Deadline: deadline}
netDial = netDialer.Dial
}
netConn, err := netDial("tcp", targetHostPort)
if err != nil {
return nil, nil, err
}
defer func() {
if netConn != nil {
netConn.Close()
}
}()
if err := netConn.SetDeadline(deadline); err != nil {
return nil, nil, err
}
if proxyURL != nil {
connectHeader := make(http.Header)
if user := proxyURL.User; user != nil {
proxyUser := user.Username()
if proxyPassword, passwordSet := user.Password(); passwordSet {
credential := base64.StdEncoding.EncodeToString([]byte(proxyUser + ":" + proxyPassword))
connectHeader.Set("Proxy-Authorization", "Basic "+credential)
}
}
connectReq := &http.Request{
Method: "CONNECT",
URL: &url.URL{Opaque: hostPort},
Host: hostPort,
Header: connectHeader,
}
connectReq.Write(netConn)
// Read response.
// Okay to use and discard buffered reader here, because
// TLS server will not speak until spoken to.
br := bufio.NewReader(netConn)
resp, err := http.ReadResponse(br, connectReq)
if err != nil {
return nil, nil, err
}
if resp.StatusCode != 200 {
f := strings.SplitN(resp.Status, " ", 2)
return nil, nil, errors.New(f[1])
}
}
if u.Scheme == "https" {
cfg := d.TLSClientConfig
if cfg == nil {
cfg = &tls.Config{ServerName: hostNoPort}
} else if cfg.ServerName == "" {
shallowCopy := *cfg
cfg = &shallowCopy
cfg.ServerName = hostNoPort
}
tlsConn := tls.Client(netConn, cfg)
netConn = tlsConn
if err := tlsConn.Handshake(); err != nil {
return nil, nil, err
}
if !cfg.InsecureSkipVerify {
if err := tlsConn.VerifyHostname(cfg.ServerName); err != nil {
return nil, nil, err
}
}
}
conn := newConn(netConn, false, d.ReadBufferSize, d.WriteBufferSize)
if err := req.Write(netConn); err != nil {
return nil, nil, err
}
resp, err := http.ReadResponse(conn.br, req)
if err != nil {
return nil, nil, err
}
if resp.StatusCode != 101 ||
!strings.EqualFold(resp.Header.Get("Upgrade"), "websocket") ||
!strings.EqualFold(resp.Header.Get("Connection"), "upgrade") ||
resp.Header.Get("Sec-Websocket-Accept") != computeAcceptKey(challengeKey) {
// Before closing the network connection on return from this
// function, slurp up some of the response to aid application
// debugging.
buf := make([]byte, 1024)
n, _ := io.ReadFull(resp.Body, buf)
resp.Body = ioutil.NopCloser(bytes.NewReader(buf[:n]))
return nil, resp, ErrBadHandshake
}
resp.Body = ioutil.NopCloser(bytes.NewReader([]byte{}))
conn.subprotocol = resp.Header.Get("Sec-Websocket-Protocol")
netConn.SetDeadline(time.Time{})
netConn = nil // to avoid close in defer.
return conn, resp, nil
}

View File

@ -0,0 +1,915 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"bufio"
"encoding/binary"
"errors"
"io"
"io/ioutil"
"math/rand"
"net"
"strconv"
"time"
)
const (
maxFrameHeaderSize = 2 + 8 + 4 // Fixed header + length + mask
maxControlFramePayloadSize = 125
finalBit = 1 << 7
maskBit = 1 << 7
writeWait = time.Second
defaultReadBufferSize = 4096
defaultWriteBufferSize = 4096
continuationFrame = 0
noFrame = -1
)
// Close codes defined in RFC 6455, section 11.7.
const (
CloseNormalClosure = 1000
CloseGoingAway = 1001
CloseProtocolError = 1002
CloseUnsupportedData = 1003
CloseNoStatusReceived = 1005
CloseAbnormalClosure = 1006
CloseInvalidFramePayloadData = 1007
ClosePolicyViolation = 1008
CloseMessageTooBig = 1009
CloseMandatoryExtension = 1010
CloseInternalServerErr = 1011
CloseTLSHandshake = 1015
)
// The message types are defined in RFC 6455, section 11.8.
const (
// TextMessage denotes a text data message. The text message payload is
// interpreted as UTF-8 encoded text data.
TextMessage = 1
// BinaryMessage denotes a binary data message.
BinaryMessage = 2
// CloseMessage denotes a close control message. The optional message
// payload contains a numeric code and text. Use the FormatCloseMessage
// function to format a close message payload.
CloseMessage = 8
// PingMessage denotes a ping control message. The optional message payload
// is UTF-8 encoded text.
PingMessage = 9
// PongMessage denotes a ping control message. The optional message payload
// is UTF-8 encoded text.
PongMessage = 10
)
// ErrCloseSent is returned when the application writes a message to the
// connection after sending a close message.
var ErrCloseSent = errors.New("websocket: close sent")
// ErrReadLimit is returned when reading a message that is larger than the
// read limit set for the connection.
var ErrReadLimit = errors.New("websocket: read limit exceeded")
// netError satisfies the net Error interface.
type netError struct {
msg string
temporary bool
timeout bool
}
func (e *netError) Error() string { return e.msg }
func (e *netError) Temporary() bool { return e.temporary }
func (e *netError) Timeout() bool { return e.timeout }
// CloseError represents close frame.
type CloseError struct {
// Code is defined in RFC 6455, section 11.7.
Code int
// Text is the optional text payload.
Text string
}
func (e *CloseError) Error() string {
s := []byte("websocket: close ")
s = strconv.AppendInt(s, int64(e.Code), 10)
switch e.Code {
case CloseNormalClosure:
s = append(s, " (normal)"...)
case CloseGoingAway:
s = append(s, " (going away)"...)
case CloseProtocolError:
s = append(s, " (protocol error)"...)
case CloseUnsupportedData:
s = append(s, " (unsupported data)"...)
case CloseNoStatusReceived:
s = append(s, " (no status)"...)
case CloseAbnormalClosure:
s = append(s, " (abnormal closure)"...)
case CloseInvalidFramePayloadData:
s = append(s, " (invalid payload data)"...)
case ClosePolicyViolation:
s = append(s, " (policy violation)"...)
case CloseMessageTooBig:
s = append(s, " (message too big)"...)
case CloseMandatoryExtension:
s = append(s, " (mandatory extension missing)"...)
case CloseInternalServerErr:
s = append(s, " (internal server error)"...)
case CloseTLSHandshake:
s = append(s, " (TLS handshake error)"...)
}
if e.Text != "" {
s = append(s, ": "...)
s = append(s, e.Text...)
}
return string(s)
}
// IsCloseError returns boolean indicating whether the error is a *CloseError
// with one of the specified codes.
func IsCloseError(err error, codes ...int) bool {
if e, ok := err.(*CloseError); ok {
for _, code := range codes {
if e.Code == code {
return true
}
}
}
return false
}
// IsUnexpectedCloseError returns boolean indicating whether the error is a
// *CloseError with a code not in the list of expected codes.
func IsUnexpectedCloseError(err error, expectedCodes ...int) bool {
if e, ok := err.(*CloseError); ok {
for _, code := range expectedCodes {
if e.Code == code {
return false
}
}
return true
}
return false
}
var (
errWriteTimeout = &netError{msg: "websocket: write timeout", timeout: true, temporary: true}
errUnexpectedEOF = &CloseError{Code: CloseAbnormalClosure, Text: io.ErrUnexpectedEOF.Error()}
errBadWriteOpCode = errors.New("websocket: bad write message type")
errWriteClosed = errors.New("websocket: write closed")
errInvalidControlFrame = errors.New("websocket: invalid control frame")
)
func hideTempErr(err error) error {
if e, ok := err.(net.Error); ok && e.Temporary() {
err = &netError{msg: e.Error(), timeout: e.Timeout()}
}
return err
}
func isControl(frameType int) bool {
return frameType == CloseMessage || frameType == PingMessage || frameType == PongMessage
}
func isData(frameType int) bool {
return frameType == TextMessage || frameType == BinaryMessage
}
func maskBytes(key [4]byte, pos int, b []byte) int {
for i := range b {
b[i] ^= key[pos&3]
pos++
}
return pos & 3
}
func newMaskKey() [4]byte {
n := rand.Uint32()
return [4]byte{byte(n), byte(n >> 8), byte(n >> 16), byte(n >> 24)}
}
// Conn represents a WebSocket connection.
type Conn struct {
conn net.Conn
isServer bool
subprotocol string
// Write fields
mu chan bool // used as mutex to protect write to conn and closeSent
closeSent bool // true if close message was sent
// Message writer fields.
writeErr error
writeBuf []byte // frame is constructed in this buffer.
writePos int // end of data in writeBuf.
writeFrameType int // type of the current frame.
writeSeq int // incremented to invalidate message writers.
writeDeadline time.Time
isWriting bool // for best-effort concurrent write detection
// Read fields
readErr error
br *bufio.Reader
readRemaining int64 // bytes remaining in current frame.
readFinal bool // true the current message has more frames.
readSeq int // incremented to invalidate message readers.
readLength int64 // Message size.
readLimit int64 // Maximum message size.
readMaskPos int
readMaskKey [4]byte
handlePong func(string) error
handlePing func(string) error
readErrCount int
}
func newConn(conn net.Conn, isServer bool, readBufferSize, writeBufferSize int) *Conn {
mu := make(chan bool, 1)
mu <- true
if readBufferSize == 0 {
readBufferSize = defaultReadBufferSize
}
if writeBufferSize == 0 {
writeBufferSize = defaultWriteBufferSize
}
c := &Conn{
isServer: isServer,
br: bufio.NewReaderSize(conn, readBufferSize),
conn: conn,
mu: mu,
readFinal: true,
writeBuf: make([]byte, writeBufferSize+maxFrameHeaderSize),
writeFrameType: noFrame,
writePos: maxFrameHeaderSize,
}
c.SetPingHandler(nil)
c.SetPongHandler(nil)
return c
}
// Subprotocol returns the negotiated protocol for the connection.
func (c *Conn) Subprotocol() string {
return c.subprotocol
}
// Close closes the underlying network connection without sending or waiting for a close frame.
func (c *Conn) Close() error {
return c.conn.Close()
}
// LocalAddr returns the local network address.
func (c *Conn) LocalAddr() net.Addr {
return c.conn.LocalAddr()
}
// RemoteAddr returns the remote network address.
func (c *Conn) RemoteAddr() net.Addr {
return c.conn.RemoteAddr()
}
// Write methods
func (c *Conn) write(frameType int, deadline time.Time, bufs ...[]byte) error {
<-c.mu
defer func() { c.mu <- true }()
if c.closeSent {
return ErrCloseSent
} else if frameType == CloseMessage {
c.closeSent = true
}
c.conn.SetWriteDeadline(deadline)
for _, buf := range bufs {
if len(buf) > 0 {
n, err := c.conn.Write(buf)
if n != len(buf) {
// Close on partial write.
c.conn.Close()
}
if err != nil {
return err
}
}
}
return nil
}
// WriteControl writes a control message with the given deadline. The allowed
// message types are CloseMessage, PingMessage and PongMessage.
func (c *Conn) WriteControl(messageType int, data []byte, deadline time.Time) error {
if !isControl(messageType) {
return errBadWriteOpCode
}
if len(data) > maxControlFramePayloadSize {
return errInvalidControlFrame
}
b0 := byte(messageType) | finalBit
b1 := byte(len(data))
if !c.isServer {
b1 |= maskBit
}
buf := make([]byte, 0, maxFrameHeaderSize+maxControlFramePayloadSize)
buf = append(buf, b0, b1)
if c.isServer {
buf = append(buf, data...)
} else {
key := newMaskKey()
buf = append(buf, key[:]...)
buf = append(buf, data...)
maskBytes(key, 0, buf[6:])
}
d := time.Hour * 1000
if !deadline.IsZero() {
d = deadline.Sub(time.Now())
if d < 0 {
return errWriteTimeout
}
}
timer := time.NewTimer(d)
select {
case <-c.mu:
timer.Stop()
case <-timer.C:
return errWriteTimeout
}
defer func() { c.mu <- true }()
if c.closeSent {
return ErrCloseSent
} else if messageType == CloseMessage {
c.closeSent = true
}
c.conn.SetWriteDeadline(deadline)
n, err := c.conn.Write(buf)
if n != 0 && n != len(buf) {
c.conn.Close()
}
return hideTempErr(err)
}
// NextWriter returns a writer for the next message to send. The writer's
// Close method flushes the complete message to the network.
//
// There can be at most one open writer on a connection. NextWriter closes the
// previous writer if the application has not already done so.
func (c *Conn) NextWriter(messageType int) (io.WriteCloser, error) {
if c.writeErr != nil {
return nil, c.writeErr
}
if c.writeFrameType != noFrame {
if err := c.flushFrame(true, nil); err != nil {
return nil, err
}
}
if !isControl(messageType) && !isData(messageType) {
return nil, errBadWriteOpCode
}
c.writeFrameType = messageType
return messageWriter{c, c.writeSeq}, nil
}
func (c *Conn) flushFrame(final bool, extra []byte) error {
length := c.writePos - maxFrameHeaderSize + len(extra)
// Check for invalid control frames.
if isControl(c.writeFrameType) &&
(!final || length > maxControlFramePayloadSize) {
c.writeSeq++
c.writeFrameType = noFrame
c.writePos = maxFrameHeaderSize
return errInvalidControlFrame
}
b0 := byte(c.writeFrameType)
if final {
b0 |= finalBit
}
b1 := byte(0)
if !c.isServer {
b1 |= maskBit
}
// Assume that the frame starts at beginning of c.writeBuf.
framePos := 0
if c.isServer {
// Adjust up if mask not included in the header.
framePos = 4
}
switch {
case length >= 65536:
c.writeBuf[framePos] = b0
c.writeBuf[framePos+1] = b1 | 127
binary.BigEndian.PutUint64(c.writeBuf[framePos+2:], uint64(length))
case length > 125:
framePos += 6
c.writeBuf[framePos] = b0
c.writeBuf[framePos+1] = b1 | 126
binary.BigEndian.PutUint16(c.writeBuf[framePos+2:], uint16(length))
default:
framePos += 8
c.writeBuf[framePos] = b0
c.writeBuf[framePos+1] = b1 | byte(length)
}
if !c.isServer {
key := newMaskKey()
copy(c.writeBuf[maxFrameHeaderSize-4:], key[:])
maskBytes(key, 0, c.writeBuf[maxFrameHeaderSize:c.writePos])
if len(extra) > 0 {
c.writeErr = errors.New("websocket: internal error, extra used in client mode")
return c.writeErr
}
}
// Write the buffers to the connection with best-effort detection of
// concurrent writes. See the concurrency section in the package
// documentation for more info.
if c.isWriting {
panic("concurrent write to websocket connection")
}
c.isWriting = true
c.writeErr = c.write(c.writeFrameType, c.writeDeadline, c.writeBuf[framePos:c.writePos], extra)
if !c.isWriting {
panic("concurrent write to websocket connection")
}
c.isWriting = false
// Setup for next frame.
c.writePos = maxFrameHeaderSize
c.writeFrameType = continuationFrame
if final {
c.writeSeq++
c.writeFrameType = noFrame
}
return c.writeErr
}
type messageWriter struct {
c *Conn
seq int
}
func (w messageWriter) err() error {
c := w.c
if c.writeSeq != w.seq {
return errWriteClosed
}
if c.writeErr != nil {
return c.writeErr
}
return nil
}
func (w messageWriter) ncopy(max int) (int, error) {
n := len(w.c.writeBuf) - w.c.writePos
if n <= 0 {
if err := w.c.flushFrame(false, nil); err != nil {
return 0, err
}
n = len(w.c.writeBuf) - w.c.writePos
}
if n > max {
n = max
}
return n, nil
}
func (w messageWriter) write(final bool, p []byte) (int, error) {
if err := w.err(); err != nil {
return 0, err
}
if len(p) > 2*len(w.c.writeBuf) && w.c.isServer {
// Don't buffer large messages.
err := w.c.flushFrame(final, p)
if err != nil {
return 0, err
}
return len(p), nil
}
nn := len(p)
for len(p) > 0 {
n, err := w.ncopy(len(p))
if err != nil {
return 0, err
}
copy(w.c.writeBuf[w.c.writePos:], p[:n])
w.c.writePos += n
p = p[n:]
}
return nn, nil
}
func (w messageWriter) Write(p []byte) (int, error) {
return w.write(false, p)
}
func (w messageWriter) WriteString(p string) (int, error) {
if err := w.err(); err != nil {
return 0, err
}
nn := len(p)
for len(p) > 0 {
n, err := w.ncopy(len(p))
if err != nil {
return 0, err
}
copy(w.c.writeBuf[w.c.writePos:], p[:n])
w.c.writePos += n
p = p[n:]
}
return nn, nil
}
func (w messageWriter) ReadFrom(r io.Reader) (nn int64, err error) {
if err := w.err(); err != nil {
return 0, err
}
for {
if w.c.writePos == len(w.c.writeBuf) {
err = w.c.flushFrame(false, nil)
if err != nil {
break
}
}
var n int
n, err = r.Read(w.c.writeBuf[w.c.writePos:])
w.c.writePos += n
nn += int64(n)
if err != nil {
if err == io.EOF {
err = nil
}
break
}
}
return nn, err
}
func (w messageWriter) Close() error {
if err := w.err(); err != nil {
return err
}
return w.c.flushFrame(true, nil)
}
// WriteMessage is a helper method for getting a writer using NextWriter,
// writing the message and closing the writer.
func (c *Conn) WriteMessage(messageType int, data []byte) error {
wr, err := c.NextWriter(messageType)
if err != nil {
return err
}
w := wr.(messageWriter)
if _, err := w.write(true, data); err != nil {
return err
}
if c.writeSeq == w.seq {
if err := c.flushFrame(true, nil); err != nil {
return err
}
}
return nil
}
// SetWriteDeadline sets the write deadline on the underlying network
// connection. After a write has timed out, the websocket state is corrupt and
// all future writes will return an error. A zero value for t means writes will
// not time out.
func (c *Conn) SetWriteDeadline(t time.Time) error {
c.writeDeadline = t
return nil
}
// Read methods
// readFull is like io.ReadFull except that io.EOF is never returned.
func (c *Conn) readFull(p []byte) (err error) {
var n int
for n < len(p) && err == nil {
var nn int
nn, err = c.br.Read(p[n:])
n += nn
}
if n == len(p) {
err = nil
} else if err == io.EOF {
err = errUnexpectedEOF
}
return
}
func (c *Conn) advanceFrame() (int, error) {
// 1. Skip remainder of previous frame.
if c.readRemaining > 0 {
if _, err := io.CopyN(ioutil.Discard, c.br, c.readRemaining); err != nil {
return noFrame, err
}
}
// 2. Read and parse first two bytes of frame header.
var b [8]byte
if err := c.readFull(b[:2]); err != nil {
return noFrame, err
}
final := b[0]&finalBit != 0
frameType := int(b[0] & 0xf)
reserved := int((b[0] >> 4) & 0x7)
mask := b[1]&maskBit != 0
c.readRemaining = int64(b[1] & 0x7f)
if reserved != 0 {
return noFrame, c.handleProtocolError("unexpected reserved bits " + strconv.Itoa(reserved))
}
switch frameType {
case CloseMessage, PingMessage, PongMessage:
if c.readRemaining > maxControlFramePayloadSize {
return noFrame, c.handleProtocolError("control frame length > 125")
}
if !final {
return noFrame, c.handleProtocolError("control frame not final")
}
case TextMessage, BinaryMessage:
if !c.readFinal {
return noFrame, c.handleProtocolError("message start before final message frame")
}
c.readFinal = final
case continuationFrame:
if c.readFinal {
return noFrame, c.handleProtocolError("continuation after final message frame")
}
c.readFinal = final
default:
return noFrame, c.handleProtocolError("unknown opcode " + strconv.Itoa(frameType))
}
// 3. Read and parse frame length.
switch c.readRemaining {
case 126:
if err := c.readFull(b[:2]); err != nil {
return noFrame, err
}
c.readRemaining = int64(binary.BigEndian.Uint16(b[:2]))
case 127:
if err := c.readFull(b[:8]); err != nil {
return noFrame, err
}
c.readRemaining = int64(binary.BigEndian.Uint64(b[:8]))
}
// 4. Handle frame masking.
if mask != c.isServer {
return noFrame, c.handleProtocolError("incorrect mask flag")
}
if mask {
c.readMaskPos = 0
if err := c.readFull(c.readMaskKey[:]); err != nil {
return noFrame, err
}
}
// 5. For text and binary messages, enforce read limit and return.
if frameType == continuationFrame || frameType == TextMessage || frameType == BinaryMessage {
c.readLength += c.readRemaining
if c.readLimit > 0 && c.readLength > c.readLimit {
c.WriteControl(CloseMessage, FormatCloseMessage(CloseMessageTooBig, ""), time.Now().Add(writeWait))
return noFrame, ErrReadLimit
}
return frameType, nil
}
// 6. Read control frame payload.
var payload []byte
if c.readRemaining > 0 {
payload = make([]byte, c.readRemaining)
c.readRemaining = 0
if err := c.readFull(payload); err != nil {
return noFrame, err
}
if c.isServer {
maskBytes(c.readMaskKey, 0, payload)
}
}
// 7. Process control frame payload.
switch frameType {
case PongMessage:
if err := c.handlePong(string(payload)); err != nil {
return noFrame, err
}
case PingMessage:
if err := c.handlePing(string(payload)); err != nil {
return noFrame, err
}
case CloseMessage:
echoMessage := []byte{}
closeCode := CloseNoStatusReceived
closeText := ""
if len(payload) >= 2 {
echoMessage = payload[:2]
closeCode = int(binary.BigEndian.Uint16(payload))
closeText = string(payload[2:])
}
c.WriteControl(CloseMessage, echoMessage, time.Now().Add(writeWait))
return noFrame, &CloseError{Code: closeCode, Text: closeText}
}
return frameType, nil
}
func (c *Conn) handleProtocolError(message string) error {
c.WriteControl(CloseMessage, FormatCloseMessage(CloseProtocolError, message), time.Now().Add(writeWait))
return errors.New("websocket: " + message)
}
// NextReader returns the next data message received from the peer. The
// returned messageType is either TextMessage or BinaryMessage.
//
// There can be at most one open reader on a connection. NextReader discards
// the previous message if the application has not already consumed it.
//
// Applications must break out of the application's read loop when this method
// returns a non-nil error value. Errors returned from this method are
// permanent. Once this method returns a non-nil error, all subsequent calls to
// this method return the same error.
func (c *Conn) NextReader() (messageType int, r io.Reader, err error) {
c.readSeq++
c.readLength = 0
for c.readErr == nil {
frameType, err := c.advanceFrame()
if err != nil {
c.readErr = hideTempErr(err)
break
}
if frameType == TextMessage || frameType == BinaryMessage {
return frameType, messageReader{c, c.readSeq}, nil
}
}
// Applications that do handle the error returned from this method spin in
// tight loop on connection failure. To help application developers detect
// this error, panic on repeated reads to the failed connection.
c.readErrCount++
if c.readErrCount >= 1000 {
panic("repeated read on failed websocket connection")
}
return noFrame, nil, c.readErr
}
type messageReader struct {
c *Conn
seq int
}
func (r messageReader) Read(b []byte) (int, error) {
if r.seq != r.c.readSeq {
return 0, io.EOF
}
for r.c.readErr == nil {
if r.c.readRemaining > 0 {
if int64(len(b)) > r.c.readRemaining {
b = b[:r.c.readRemaining]
}
n, err := r.c.br.Read(b)
r.c.readErr = hideTempErr(err)
if r.c.isServer {
r.c.readMaskPos = maskBytes(r.c.readMaskKey, r.c.readMaskPos, b[:n])
}
r.c.readRemaining -= int64(n)
return n, r.c.readErr
}
if r.c.readFinal {
r.c.readSeq++
return 0, io.EOF
}
frameType, err := r.c.advanceFrame()
switch {
case err != nil:
r.c.readErr = hideTempErr(err)
case frameType == TextMessage || frameType == BinaryMessage:
r.c.readErr = errors.New("websocket: internal error, unexpected text or binary in Reader")
}
}
err := r.c.readErr
if err == io.EOF && r.seq == r.c.readSeq {
err = errUnexpectedEOF
}
return 0, err
}
// ReadMessage is a helper method for getting a reader using NextReader and
// reading from that reader to a buffer.
func (c *Conn) ReadMessage() (messageType int, p []byte, err error) {
var r io.Reader
messageType, r, err = c.NextReader()
if err != nil {
return messageType, nil, err
}
p, err = ioutil.ReadAll(r)
return messageType, p, err
}
// SetReadDeadline sets the read deadline on the underlying network connection.
// After a read has timed out, the websocket connection state is corrupt and
// all future reads will return an error. A zero value for t means reads will
// not time out.
func (c *Conn) SetReadDeadline(t time.Time) error {
return c.conn.SetReadDeadline(t)
}
// SetReadLimit sets the maximum size for a message read from the peer. If a
// message exceeds the limit, the connection sends a close frame to the peer
// and returns ErrReadLimit to the application.
func (c *Conn) SetReadLimit(limit int64) {
c.readLimit = limit
}
// SetPingHandler sets the handler for ping messages received from the peer.
// The appData argument to h is the PING frame application data. The default
// ping handler sends a pong to the peer.
func (c *Conn) SetPingHandler(h func(appData string) error) {
if h == nil {
h = func(message string) error {
err := c.WriteControl(PongMessage, []byte(message), time.Now().Add(writeWait))
if err == ErrCloseSent {
return nil
} else if e, ok := err.(net.Error); ok && e.Temporary() {
return nil
}
return err
}
}
c.handlePing = h
}
// SetPongHandler sets the handler for pong messages received from the peer.
// The appData argument to h is the PONG frame application data. The default
// pong handler does nothing.
func (c *Conn) SetPongHandler(h func(appData string) error) {
if h == nil {
h = func(string) error { return nil }
}
c.handlePong = h
}
// UnderlyingConn returns the internal net.Conn. This can be used to further
// modifications to connection specific flags.
func (c *Conn) UnderlyingConn() net.Conn {
return c.conn
}
// FormatCloseMessage formats closeCode and text as a WebSocket close message.
func FormatCloseMessage(closeCode int, text string) []byte {
buf := make([]byte, 2+len(text))
binary.BigEndian.PutUint16(buf, uint16(closeCode))
copy(buf[2:], text)
return buf
}

View File

@ -0,0 +1,148 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package websocket implements the WebSocket protocol defined in RFC 6455.
//
// Overview
//
// The Conn type represents a WebSocket connection. A server application uses
// the Upgrade function from an Upgrader object with a HTTP request handler
// to get a pointer to a Conn:
//
// var upgrader = websocket.Upgrader{
// ReadBufferSize: 1024,
// WriteBufferSize: 1024,
// }
//
// func handler(w http.ResponseWriter, r *http.Request) {
// conn, err := upgrader.Upgrade(w, r, nil)
// if err != nil {
// log.Println(err)
// return
// }
// ... Use conn to send and receive messages.
// }
//
// Call the connection's WriteMessage and ReadMessage methods to send and
// receive messages as a slice of bytes. This snippet of code shows how to echo
// messages using these methods:
//
// for {
// messageType, p, err := conn.ReadMessage()
// if err != nil {
// return
// }
// if err = conn.WriteMessage(messageType, p); err != nil {
// return err
// }
// }
//
// In above snippet of code, p is a []byte and messageType is an int with value
// websocket.BinaryMessage or websocket.TextMessage.
//
// An application can also send and receive messages using the io.WriteCloser
// and io.Reader interfaces. To send a message, call the connection NextWriter
// method to get an io.WriteCloser, write the message to the writer and close
// the writer when done. To receive a message, call the connection NextReader
// method to get an io.Reader and read until io.EOF is returned. This snippet
// shows how to echo messages using the NextWriter and NextReader methods:
//
// for {
// messageType, r, err := conn.NextReader()
// if err != nil {
// return
// }
// w, err := conn.NextWriter(messageType)
// if err != nil {
// return err
// }
// if _, err := io.Copy(w, r); err != nil {
// return err
// }
// if err := w.Close(); err != nil {
// return err
// }
// }
//
// Data Messages
//
// The WebSocket protocol distinguishes between text and binary data messages.
// Text messages are interpreted as UTF-8 encoded text. The interpretation of
// binary messages is left to the application.
//
// This package uses the TextMessage and BinaryMessage integer constants to
// identify the two data message types. The ReadMessage and NextReader methods
// return the type of the received message. The messageType argument to the
// WriteMessage and NextWriter methods specifies the type of a sent message.
//
// It is the application's responsibility to ensure that text messages are
// valid UTF-8 encoded text.
//
// Control Messages
//
// The WebSocket protocol defines three types of control messages: close, ping
// and pong. Call the connection WriteControl, WriteMessage or NextWriter
// methods to send a control message to the peer.
//
// Connections handle received ping and pong messages by invoking callback
// functions set with SetPingHandler and SetPongHandler methods. The default
// ping handler sends a pong to the client. The callback functions can be
// invoked from the NextReader, ReadMessage or the message Read method.
//
// Connections handle received close messages by sending a close message to the
// peer and returning a *CloseError from the the NextReader, ReadMessage or the
// message Read method.
//
// The application must read the connection to process ping and close messages
// sent from the peer. If the application is not otherwise interested in
// messages from the peer, then the application should start a goroutine to
// read and discard messages from the peer. A simple example is:
//
// func readLoop(c *websocket.Conn) {
// for {
// if _, _, err := c.NextReader(); err != nil {
// c.Close()
// break
// }
// }
// }
//
// Concurrency
//
// Connections support one concurrent reader and one concurrent writer.
//
// Applications are responsible for ensuring that no more than one goroutine
// calls the write methods (NextWriter, SetWriteDeadline, WriteMessage,
// WriteJSON) concurrently and that no more than one goroutine calls the read
// methods (NextReader, SetReadDeadline, ReadMessage, ReadJSON, SetPongHandler,
// SetPingHandler) concurrently.
//
// The Close and WriteControl methods can be called concurrently with all other
// methods.
//
// Origin Considerations
//
// Web browsers allow Javascript applications to open a WebSocket connection to
// any host. It's up to the server to enforce an origin policy using the Origin
// request header sent by the browser.
//
// The Upgrader calls the function specified in the CheckOrigin field to check
// the origin. If the CheckOrigin function returns false, then the Upgrade
// method fails the WebSocket handshake with HTTP status 403.
//
// If the CheckOrigin field is nil, then the Upgrader uses a safe default: fail
// the handshake if the Origin request header is present and not equal to the
// Host request header.
//
// An application can allow connections from any origin by specifying a
// function that always returns true:
//
// var upgrader = websocket.Upgrader{
// CheckOrigin: func(r *http.Request) bool { return true },
// }
//
// The deprecated Upgrade function does not enforce an origin policy. It's the
// application's responsibility to check the Origin header before calling
// Upgrade.
package websocket

View File

@ -0,0 +1,55 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"encoding/json"
"io"
)
// WriteJSON is deprecated, use c.WriteJSON instead.
func WriteJSON(c *Conn, v interface{}) error {
return c.WriteJSON(v)
}
// WriteJSON writes the JSON encoding of v to the connection.
//
// See the documentation for encoding/json Marshal for details about the
// conversion of Go values to JSON.
func (c *Conn) WriteJSON(v interface{}) error {
w, err := c.NextWriter(TextMessage)
if err != nil {
return err
}
err1 := json.NewEncoder(w).Encode(v)
err2 := w.Close()
if err1 != nil {
return err1
}
return err2
}
// ReadJSON is deprecated, use c.ReadJSON instead.
func ReadJSON(c *Conn, v interface{}) error {
return c.ReadJSON(v)
}
// ReadJSON reads the next JSON-encoded message from the connection and stores
// it in the value pointed to by v.
//
// See the documentation for the encoding/json Unmarshal function for details
// about the conversion of JSON to a Go value.
func (c *Conn) ReadJSON(v interface{}) error {
_, r, err := c.NextReader()
if err != nil {
return err
}
err = json.NewDecoder(r).Decode(v)
if err == io.EOF {
// One value is expected in the message.
err = io.ErrUnexpectedEOF
}
return err
}

View File

@ -0,0 +1,253 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"bufio"
"errors"
"net"
"net/http"
"net/url"
"strings"
"time"
)
// HandshakeError describes an error with the handshake from the peer.
type HandshakeError struct {
message string
}
func (e HandshakeError) Error() string { return e.message }
// Upgrader specifies parameters for upgrading an HTTP connection to a
// WebSocket connection.
type Upgrader struct {
// HandshakeTimeout specifies the duration for the handshake to complete.
HandshakeTimeout time.Duration
// ReadBufferSize and WriteBufferSize specify I/O buffer sizes. If a buffer
// size is zero, then a default value of 4096 is used. The I/O buffer sizes
// do not limit the size of the messages that can be sent or received.
ReadBufferSize, WriteBufferSize int
// Subprotocols specifies the server's supported protocols in order of
// preference. If this field is set, then the Upgrade method negotiates a
// subprotocol by selecting the first match in this list with a protocol
// requested by the client.
Subprotocols []string
// Error specifies the function for generating HTTP error responses. If Error
// is nil, then http.Error is used to generate the HTTP response.
Error func(w http.ResponseWriter, r *http.Request, status int, reason error)
// CheckOrigin returns true if the request Origin header is acceptable. If
// CheckOrigin is nil, the host in the Origin header must not be set or
// must match the host of the request.
CheckOrigin func(r *http.Request) bool
}
func (u *Upgrader) returnError(w http.ResponseWriter, r *http.Request, status int, reason string) (*Conn, error) {
err := HandshakeError{reason}
if u.Error != nil {
u.Error(w, r, status, err)
} else {
http.Error(w, http.StatusText(status), status)
}
return nil, err
}
// checkSameOrigin returns true if the origin is not set or is equal to the request host.
func checkSameOrigin(r *http.Request) bool {
origin := r.Header["Origin"]
if len(origin) == 0 {
return true
}
u, err := url.Parse(origin[0])
if err != nil {
return false
}
return u.Host == r.Host
}
func (u *Upgrader) selectSubprotocol(r *http.Request, responseHeader http.Header) string {
if u.Subprotocols != nil {
clientProtocols := Subprotocols(r)
for _, serverProtocol := range u.Subprotocols {
for _, clientProtocol := range clientProtocols {
if clientProtocol == serverProtocol {
return clientProtocol
}
}
}
} else if responseHeader != nil {
return responseHeader.Get("Sec-Websocket-Protocol")
}
return ""
}
// Upgrade upgrades the HTTP server connection to the WebSocket protocol.
//
// The responseHeader is included in the response to the client's upgrade
// request. Use the responseHeader to specify cookies (Set-Cookie) and the
// application negotiated subprotocol (Sec-Websocket-Protocol).
//
// If the upgrade fails, then Upgrade replies to the client with an HTTP error
// response.
func (u *Upgrader) Upgrade(w http.ResponseWriter, r *http.Request, responseHeader http.Header) (*Conn, error) {
if r.Method != "GET" {
return u.returnError(w, r, http.StatusMethodNotAllowed, "websocket: method not GET")
}
if values := r.Header["Sec-Websocket-Version"]; len(values) == 0 || values[0] != "13" {
return u.returnError(w, r, http.StatusBadRequest, "websocket: version != 13")
}
if !tokenListContainsValue(r.Header, "Connection", "upgrade") {
return u.returnError(w, r, http.StatusBadRequest, "websocket: could not find connection header with token 'upgrade'")
}
if !tokenListContainsValue(r.Header, "Upgrade", "websocket") {
return u.returnError(w, r, http.StatusBadRequest, "websocket: could not find upgrade header with token 'websocket'")
}
checkOrigin := u.CheckOrigin
if checkOrigin == nil {
checkOrigin = checkSameOrigin
}
if !checkOrigin(r) {
return u.returnError(w, r, http.StatusForbidden, "websocket: origin not allowed")
}
challengeKey := r.Header.Get("Sec-Websocket-Key")
if challengeKey == "" {
return u.returnError(w, r, http.StatusBadRequest, "websocket: key missing or blank")
}
subprotocol := u.selectSubprotocol(r, responseHeader)
var (
netConn net.Conn
br *bufio.Reader
err error
)
h, ok := w.(http.Hijacker)
if !ok {
return u.returnError(w, r, http.StatusInternalServerError, "websocket: response does not implement http.Hijacker")
}
var rw *bufio.ReadWriter
netConn, rw, err = h.Hijack()
if err != nil {
return u.returnError(w, r, http.StatusInternalServerError, err.Error())
}
br = rw.Reader
if br.Buffered() > 0 {
netConn.Close()
return nil, errors.New("websocket: client sent data before handshake is complete")
}
c := newConn(netConn, true, u.ReadBufferSize, u.WriteBufferSize)
c.subprotocol = subprotocol
p := c.writeBuf[:0]
p = append(p, "HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: "...)
p = append(p, computeAcceptKey(challengeKey)...)
p = append(p, "\r\n"...)
if c.subprotocol != "" {
p = append(p, "Sec-Websocket-Protocol: "...)
p = append(p, c.subprotocol...)
p = append(p, "\r\n"...)
}
for k, vs := range responseHeader {
if k == "Sec-Websocket-Protocol" {
continue
}
for _, v := range vs {
p = append(p, k...)
p = append(p, ": "...)
for i := 0; i < len(v); i++ {
b := v[i]
if b <= 31 {
// prevent response splitting.
b = ' '
}
p = append(p, b)
}
p = append(p, "\r\n"...)
}
}
p = append(p, "\r\n"...)
// Clear deadlines set by HTTP server.
netConn.SetDeadline(time.Time{})
if u.HandshakeTimeout > 0 {
netConn.SetWriteDeadline(time.Now().Add(u.HandshakeTimeout))
}
if _, err = netConn.Write(p); err != nil {
netConn.Close()
return nil, err
}
if u.HandshakeTimeout > 0 {
netConn.SetWriteDeadline(time.Time{})
}
return c, nil
}
// Upgrade upgrades the HTTP server connection to the WebSocket protocol.
//
// This function is deprecated, use websocket.Upgrader instead.
//
// The application is responsible for checking the request origin before
// calling Upgrade. An example implementation of the same origin policy is:
//
// if req.Header.Get("Origin") != "http://"+req.Host {
// http.Error(w, "Origin not allowed", 403)
// return
// }
//
// If the endpoint supports subprotocols, then the application is responsible
// for negotiating the protocol used on the connection. Use the Subprotocols()
// function to get the subprotocols requested by the client. Use the
// Sec-Websocket-Protocol response header to specify the subprotocol selected
// by the application.
//
// The responseHeader is included in the response to the client's upgrade
// request. Use the responseHeader to specify cookies (Set-Cookie) and the
// negotiated subprotocol (Sec-Websocket-Protocol).
//
// The connection buffers IO to the underlying network connection. The
// readBufSize and writeBufSize parameters specify the size of the buffers to
// use. Messages can be larger than the buffers.
//
// If the request is not a valid WebSocket handshake, then Upgrade returns an
// error of type HandshakeError. Applications should handle this error by
// replying to the client with an HTTP error response.
func Upgrade(w http.ResponseWriter, r *http.Request, responseHeader http.Header, readBufSize, writeBufSize int) (*Conn, error) {
u := Upgrader{ReadBufferSize: readBufSize, WriteBufferSize: writeBufSize}
u.Error = func(w http.ResponseWriter, r *http.Request, status int, reason error) {
// don't return errors to maintain backwards compatibility
}
u.CheckOrigin = func(r *http.Request) bool {
// allow all connections by default
return true
}
return u.Upgrade(w, r, responseHeader)
}
// Subprotocols returns the subprotocols requested by the client in the
// Sec-Websocket-Protocol header.
func Subprotocols(r *http.Request) []string {
h := strings.TrimSpace(r.Header.Get("Sec-Websocket-Protocol"))
if h == "" {
return nil
}
protocols := strings.Split(h, ",")
for i := range protocols {
protocols[i] = strings.TrimSpace(protocols[i])
}
return protocols
}

View File

@ -0,0 +1,44 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"crypto/rand"
"crypto/sha1"
"encoding/base64"
"io"
"net/http"
"strings"
)
// tokenListContainsValue returns true if the 1#token header with the given
// name contains token.
func tokenListContainsValue(header http.Header, name string, value string) bool {
for _, v := range header[name] {
for _, s := range strings.Split(v, ",") {
if strings.EqualFold(value, strings.TrimSpace(s)) {
return true
}
}
}
return false
}
var keyGUID = []byte("258EAFA5-E914-47DA-95CA-C5AB0DC85B11")
func computeAcceptKey(challengeKey string) string {
h := sha1.New()
h.Write([]byte(challengeKey))
h.Write(keyGUID)
return base64.StdEncoding.EncodeToString(h.Sum(nil))
}
func generateChallengeKey() (string, error) {
p := make([]byte, 16)
if _, err := io.ReadFull(rand.Reader, p); err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString(p), nil
}

View File

@ -18,6 +18,7 @@ module.exports = function (grunt) {
} }
config.pkg.version = grunt.option('pkgVer') || config.pkg.version; config.pkg.version = grunt.option('pkgVer') || config.pkg.version;
console.log('Version', config.pkg.version);
// load plugins // load plugins
require('load-grunt-tasks')(grunt); require('load-grunt-tasks')(grunt);

View File

@ -14,10 +14,10 @@
], ],
"dependencies": { "dependencies": {
"jquery": "~2.1.4", "jquery": "~2.1.4",
"angular": "~1.5.1", "angular": "~1.5.3",
"angular-route": "~1.5.1", "angular-route": "~1.5.3",
"angular-mocks": "~1.5.1", "angular-mocks": "~1.5.3",
"angular-sanitize": "~1.5.1", "angular-sanitize": "~1.5.3",
"angular-bindonce": "~0.3.3" "angular-bindonce": "~0.3.3"
} }
} }

View File

@ -73,8 +73,7 @@ func main() {
grunt("test") grunt("test")
case "package": case "package":
//verifyGitRepoIsClean() grunt("release", fmt.Sprintf("--pkgVer=%v-%v", linuxPackageVersion, linuxPackageIteration))
grunt("release")
createLinuxPackages() createLinuxPackages()
case "pkg-rpm": case "pkg-rpm":
@ -100,12 +99,12 @@ func main() {
func makeLatestDistCopies() { func makeLatestDistCopies() {
rpmIteration := "-1" rpmIteration := "-1"
if linuxPackageIteration != "" { if linuxPackageIteration != "" {
rpmIteration = "-" + linuxPackageIteration rpmIteration = linuxPackageIteration
} }
runError("cp", "dist/grafana_"+version+"_amd64.deb", "dist/grafana_latest_amd64.deb") runError("cp", fmt.Sprintf("dist/grafana_%v-%v_amd64.deb", linuxPackageVersion, linuxPackageIteration), "dist/grafana_latest_amd64.deb")
runError("cp", "dist/grafana-"+linuxPackageVersion+rpmIteration+".x86_64.rpm", "dist/grafana-latest-1.x86_64.rpm") runError("cp", fmt.Sprintf("dist/grafana-%v-%v.x86_64.rpm", linuxPackageVersion, rpmIteration), "dist/grafana-latest-1.x86_64.rpm")
runError("cp", "dist/grafana-"+version+".linux-x64.tar.gz", "dist/grafana-latest.linux-x64.tar.gz") runError("cp", fmt.Sprintf("dist/grafana-%v-%v.linux-x64.tar.gz", linuxPackageVersion, linuxPackageIteration), "dist/grafana-latest.linux-x64.tar.gz")
} }
func readVersionFromPackageJson() { func readVersionFromPackageJson() {
@ -133,6 +132,11 @@ func readVersionFromPackageJson() {
if len(parts) > 1 { if len(parts) > 1 {
linuxPackageVersion = parts[0] linuxPackageVersion = parts[0]
linuxPackageIteration = parts[1] linuxPackageIteration = parts[1]
if linuxPackageIteration != "" {
// add timestamp to iteration
linuxPackageIteration = fmt.Sprintf("%s%v", linuxPackageIteration, time.Now().Unix())
}
log.Println(fmt.Sprintf("teration %v", linuxPackageIteration))
} }
} }

View File

@ -28,8 +28,31 @@ search_base_dns = ["dc=grafana,dc=org"]
# This is done by enabling group_search_filter below. You must also set member_of= "cn" # This is done by enabling group_search_filter below. You must also set member_of= "cn"
# in [servers.attributes] below. # in [servers.attributes] below.
# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN
# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of
# below in such a way that the user's recursive group membership is considered.
#
# Nested Groups + Active Directory (AD) Example:
#
# AD groups store the Distinguished Names (DNs) of members, so your filter must
# recursively search your groups for the authenticating user's DN. For example:
#
# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)"
# group_search_filter_user_attribute = "distinguishedName"
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
#
# [servers.attributes]
# ...
# member_of = "distinguishedName"
## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) ## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" # group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter.
## Defaults to the value of username in [server.attributes]
## Valid options are any of your values in [servers.attributes]
## If you are using nested groups you probably want to set this and member_of in
## [servers.attributes] to "distinguishedName"
# group_search_filter_user_attribute = "distinguishedName"
## An array of the base DNs to search through for groups. Typically uses ou=groups ## An array of the base DNs to search through for groups. Typically uses ou=groups
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] # group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]

View File

@ -1 +1 @@
3.0.0 4.0.0

View File

@ -86,12 +86,12 @@ pages:
- ['http_api/snapshot.md', 'API', 'Snapshot API'] - ['http_api/snapshot.md', 'API', 'Snapshot API']
- ['http_api/other.md', 'API', 'Other API'] - ['http_api/other.md', 'API', 'Other API']
- ['plugins/overview.md', 'Plugins', 'Overview'] - ['plugins/index.md', 'Plugins', 'Overview']
- ['plugins/installation.md', 'Plugins', 'Installation'] - ['plugins/installation.md', 'Plugins', 'Installation']
- ['plugins/datasources.md', 'Plugins', 'Datasource plugins'] - ['plugins/development.md', 'Plugins', 'Development']
- ['plugins/panels.md', 'Plugins', 'Panel plugins'] - ['plugins/apps.md', 'Plugins', 'Apps']
- ['plugins/development.md', 'Plugins', 'Plugin development'] - ['plugins/datasources.md', 'Plugins', 'Datasources']
- ['plugins/plugin.json.md', 'Plugins', 'Plugin json'] - ['plugins/panels.md', 'Plugins', 'Panels']
- ['tutorials/index.md', 'Tutorials', 'Tutorials'] - ['tutorials/index.md', 'Tutorials', 'Tutorials']
- ['tutorials/hubot_howto.md', 'Tutorials', 'How To integrate Hubot and Grafana'] - ['tutorials/hubot_howto.md', 'Tutorials', 'How To integrate Hubot and Grafana']

View File

@ -69,6 +69,20 @@ Name | Description
For details about the metrics CloudWatch provides, please refer to the [CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). For details about the metrics CloudWatch provides, please refer to the [CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html).
## Example templated Queries
Example dimension queries which will return list of resources for individual AWS Services:
Service | Query
------- | -----
EBS | `dimension_values(us-east-1,AWS/ELB,RequestCount,LoadBalancerName)`
ElastiCache | `dimension_values(us-east-1,AWS/ElastiCache,CPUUtilization,CacheClusterId)`
RedShift | `dimension_values(us-east-1,AWS/Redshift,CPUUtilization,ClusterIdentifier)`
RDS | `dimension_values(us-east-1,AWS/RDS,CPUUtilization,DBInstanceIdentifier)`
S3 | `dimension_values(us-east-1,AWS/S3,BucketSizeBytes,BucketName)`
## ec2_instance_attribute JSON filters
The `ec2_instance_attribute` query take `filters` in JSON format. The `ec2_instance_attribute` query take `filters` in JSON format.
You can specify [pre-defined filters of ec2:DescribeInstances](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstances.html). You can specify [pre-defined filters of ec2:DescribeInstances](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstances.html).
Specify like `{ filter_name1: [ filter_value1 ], filter_name2: [ filter_value2 ] }` Specify like `{ filter_name1: [ filter_value1 ], filter_name2: [ filter_value2 ] }`

View File

@ -1,46 +0,0 @@
---
page_title: App plugin
page_description: App plugin for Grafana
page_keywords: grafana, plugins, documentation
---
> Our goal is not to have a very extensive documentation but rather have actual code that people can look at. An example implementation of an app can be found in this [example app repo](https://github.com/grafana/example-app)
# Apps
App plugins is a new kind of grafana plugin that can bundle datasource and panel plugins within one package. It also enable the plugin author to create custom pages within grafana. The custom pages enables the plugin author to include things like documentation, sign up forms or controlling other services using HTTP requests.
Datasource and panel plugins will show up like normal plugins. The app pages will be available in the main menu.
<img class="no-shadow" src="/img/v3/app-in-main-menu.png">
## Enabling app plugins
After installing an app it have to be enabled before it show up as an datasource or panel. You can do that on the app page in the config tab.
## README.md
The readme file in the mounted folder will show up in the overview tab on the app page.
## Module exports
```javascript
export {
ExampleAppConfigCtrl as ConfigCtrl,
StreamPageCtrl,
LogsPageCtrl
};
```
The only required export is the ConfigCtrl. Both StreamPageCtrl and LogsPageCtrl are custom pages defined in plugin.json
## Custom pages
Custom pages are defined in the plugin.json like this.
```json
"pages": [
{ "name": "Live stream", "component": "StreamPageCtrl", "role": "Editor"},
{ "name": "Log view", "component": "LogsPageCtrl", "role": "Viewer"}
]
```
The component field have to match one of the components exported in the module.js in the root of the plugin.
## Bundled plugins
When Grafana starts it will scan all directories within an app plugin and load folders containing a plugin.json as an plugin.

View File

@ -0,0 +1,24 @@
---
page_title: App plugin
page_description: App plugin for Grafana
page_keywords: grafana, plugins, documentation
---
# Apps
App plugins is a new kind of grafana plugin that can bundle datasource and panel plugins within one package. It also enable the plugin author to create custom pages within grafana. The custom pages enables the plugin author to include things like documentation, sign up forms or controlling other services using HTTP requests.
Datasource and panel plugins will show up like normal plugins. The app pages will be available in the main menu.
<img class="no-shadow" src="/img/v3/app-in-main-menu.png">
## Enabling app plugins
After installing an app it have to be enabled before it show up as an datasource or panel. You can do that on the app page in the config tab.
### Develop your own App
> Our goal is not to have a very extensive documentation but rather have actual
> code that people can look at. An example implementation of an app can be found
> in this [example app repo](https://github.com/grafana/example-app)

View File

@ -4,11 +4,18 @@ page_description: Datasource plugins for Grafana
page_keywords: grafana, plugins, documentation page_keywords: grafana, plugins, documentation
--- ---
> Our goal is not to have a very extensive documentation but rather have actual code that people can look at. An example implementation of a datasource can be found in this [example datasource repo](https://github.com/grafana/simple-json-datasource)
# Datasources # Datasources
Datasource plugins enables people to develop plugins for any database that communicates over http. Its up to the plugin to transform the data into time series data so that any grafana panel can then show it. Datasource plugins enables people to develop plugins for any database that
communicates over http. Its up to the plugin to transform the data into
time series data so that any grafana panel can then show it.
## Datasource development
> Our goal is not to have a very extensive documentation but rather have actual
> code that people can look at. An example implementation of a datasource can be
> found in this [example datasource repo](https://github.com/grafana/simple-json-datasource)
To interact with the rest of grafana the plugins module file can export 5 different components. To interact with the rest of grafana the plugins module file can export 5 different components.
@ -19,11 +26,14 @@ To interact with the rest of grafana the plugins module file can export 5 differ
- AnnotationsQueryCtrl - AnnotationsQueryCtrl
## Plugin json ## Plugin json
There are two datasource specific settings for the plugin.json There are two datasource specific settings for the plugin.json
```javascript ```javascript
"metrics": true, "metrics": true,
"annotations": false, "annotations": false,
``` ```
These settings indicates what kind of data the plugin can deliver. At least one of them have to be true These settings indicates what kind of data the plugin can deliver. At least one of them have to be true
## Datasource ## Datasource

View File

@ -1,28 +0,0 @@
---
page_title: Plugin development
page_description: Plugin development for Grafana
page_keywords: grafana, plugins, documentation, development
---
# Plugin development
From grafana 3.0 it's very easy to develop your own plugins and share them with other grafana users.
## What languages?
Since everything turns into javascript its up to you to choose which language you want. That said its proberbly a good idea to choose es6 or typescript since we use es6 classes in Grafana.
##Buildscript
You can use any buildsystem you like that support systemjs. All the built content should endup in a folder named dist and commited to the repository.
##Loading plugins
The easiset way to try your plugin with grafana is to [setup grafana for development](https://github.com/grafana/grafana/blob/master/DEVELOPMENT.md) and place your plugin in the /data/plugins folder in grafana. When grafana starts it will scan that folder for folders that contains a plugin.json file and mount them as plugins. If your plugin folder contains a folder named dist it will mount that folder instead of the plugin base folder.
## Examples / boilerplate
We currently have three different examples that you can fork to get started developing your grafana plugin.
- [simple-json-datasource](https://github.com/grafana/simple-json-datasource) (small datasource plugin for quering json data from backends)
- [panel-boilderplate-es5](https://github.com/grafana/grafana/tree/master/examples/panel-boilerplate-es5)
- [example-app](https://github.com/grafana/example-app)

View File

@ -0,0 +1,52 @@
---
page_title: Plugin development guide
page_description: Plugin development for Grafana
page_keywords: grafana, plugins, documentation, development
---
# Plugin development
From grafana 3.0 it's very easy to develop your own plugins and share them with other grafana users.
## Short version
1. [Setup grafana](https://github.com/grafana/grafana/blob/master/DEVELOPMENT.md)
2. Clone an example plugin into ```/var/lib/grafana/plugins``` or `data/plugins` (relative to grafana git repo if your running development version from source dir)
3. Code away!
## What languages?
Since everything turns into javascript it's up to you to choose which language you want. That said it's probably a good idea to choose es6 or typescript since we use es6 classes in Grafana. So it's easier to get inspiration from the Grafana repo is you choose one of those languages.
## Buildscript
You can use any build system you like that support systemjs. All the built content should end up in a folder named ```dist``` and committed to the repository.By committing the dist folder the person who installs your plugin does not have to run any buildscript.
All our example plugins have build scripted configured.
## module.(js|ts)
This is the entry point for every plugin. This is the place where you should export
your plugin implementation. Depending on what kind of plugin you are developing you
will be expected to export different things. You can find what's expected for [datasource](./datasources.md), [panels](./panels.md)
and [apps](./apps.md) plugins in the documentation.
## Start developing your plugin
There are two ways that you can start developing a Grafana plugin.
1. Setup a Grafana development environment. [(described here)](https://github.com/grafana/grafana/blob/master/DEVELOPMENT.md) and place your plugin in the ```data/plugins``` folder.
2. Install Grafana and place your plugin in the plugins directory which is set in your [config file](../installation/configuration.md). By default this is `/var/lib/grafana/plugins` on Linux systems.
3. Place your plugin directory anywhere you like and specify it grafana.ini.
We encourage people to setup the full Grafana environment so that you can get inspiration from the rest of grafana code base.
When Grafana starts it will scan the plugin folders and mount every folder that contains a plugin.json file unless
the folder contains a subfolder named dist. In that case grafana will mount the dist folder instead.
This makes it possible to have both built and src content in the same plugin git repo.
## Examples
We currently have three different examples that you can fork/download to get started developing your grafana plugin.
- [simple-json-datasource](https://github.com/grafana/simple-json-datasource) (small datasource plugin for querying json data from backends)
- [piechart-panel](https://github.com/grafana/piechart-panel)
- [example-app](https://github.com/grafana/example-app)

View File

@ -0,0 +1,21 @@
---
page_title: Plugin overview
page_description: Plugins for Grafana
page_keywords: grafana, plugins, documentation
---
# Plugins
From Grafana 3.0 not only datasource plugins are supported but also panel plugins and apps.
Having panels as plugins make it easy to create and add any kind of panel, to show your data
or improve your favorite dashboards. Apps is something new in Grafana that enables
bundling of datasources, panels, dashboards and Grafana pages into a cohesive experiance.
Grafana already have a strong community of contributors and plugin developers.
By making it easier to develop and install plugins we hope that the community
can grow even stronger and develop new plugins that we would never think about.
You can discover available plugins on [Grafana.net](http://grafana.net)

View File

@ -4,43 +4,41 @@ page_description: Plugin installation for Grafana
page_keywords: grafana, plugins, documentation page_keywords: grafana, plugins, documentation
--- ---
# Plugins # Installing plugins
## Installing plugins
The easiest way to install plugins is by using the CLI tool grafana-cli which is bundled with grafana. Before any modification take place after modifying plugins, grafana-server needs to be restarted. The easiest way to install plugins is by using the CLI tool grafana-cli which is bundled with grafana. Before any modification take place after modifying plugins, grafana-server needs to be restarted.
### Grafana plugin directory ### Grafana plugin directory
On Linux systems the grafana-cli will assume that the grafana plugin directory is "/var/lib/grafana/plugins". It's possible to override the directory which grafana-cli will operate on by specifing the --path flag. On Windows systems this parameter have to be specified for every call. On Linux systems the grafana-cli will assume that the grafana plugin directory is `/var/lib/grafana/plugins`. It's possible to override the directory which grafana-cli will operate on by specifying the --path flag. On Windows systems this parameter have to be specified for every call.
### Grafana-cli commands ### Grafana-cli commands
List available plugins List available plugins
``` ```
grafana-cli list-remote grafana-cli plugins list-remote
``` ```
Install a plugin type Install a plugin type
``` ```
grafana-cli install <plugin-id> grafana-cli plugins install <plugin-id>
``` ```
List installed plugins List installed plugins
``` ```
grafana-cli ls grafana-cli plugins ls
``` ```
Upgrade all installed plugins Upgrade all installed plugins
``` ```
grafana-cli upgrade-all grafana-cli plugins upgrade-all
``` ```
Upgrade one plugin Upgrade one plugin
``` ```
grafana-cli upgrade <plugin-id> grafana-cli plugins upgrade <plugin-id>
``` ```
Remove one plugin Remove one plugin
``` ```
grafana-cli remove <plugin-id> grafana-cli plugins remove <plugin-id>
``` ```

View File

@ -1,12 +0,0 @@
---
page_title: Plugin overview
page_description: Plugins for Grafana
page_keywords: grafana, plugins, documentation
---
# Plugins
From Grafana 3.0 not only datasource plugins are supported but also panel plugins and apps. Having panels as plugins make it easy to create and add any kind of panel, to show your data or improve your favorite dashboards. Apps is something new in Grafana that enables bundling of datasources, panels that belongs together.
Grafana already have a strong community of contributors and plugin developers. By making it easier to develop and install plugins we hope that the community can grow even stronger and develop new plugins that we would never think about.

View File

@ -4,26 +4,15 @@ page_description: Panel plugins for Grafana
page_keywords: grafana, plugins, documentation page_keywords: grafana, plugins, documentation
--- ---
> Our goal is not to have a very extensive documentation but rather have actual code that people can look at. An example implementation of a datasource can be found in the grafana repo under /examples/panel-boilerplate-es5
# Panels # Panels
To interact with the rest of grafana the panel plugin need to export a class in the module.js. Panels are the main bulding block of dashboards.
This class have to inherit from sdk.PanelCtrl or sdk.MetricsPanelCtrl and be exported as PanelCtrl.
```javascript ## Panel development
return {
PanelCtrl: BoilerPlatePanelCtrl
};
```
This class will be instantiated once for every panel of its kind in a dashboard and treated as an AngularJs controller. Examples
## MetricsPanelCtrl or PanelCtrl - [clock-panel](https://github.com/grafana/clock-panel)
- [singlestat-panel](https://github.com/grafana/grafana/blob/master/public/app/plugins/panel/singlestat/module.ts)
MetricsPanelCtrl inherits from PanelCtrl and adds some common features for datasource usage. So if your Panel will be working with a datasource you should inherit from MetricsPanelCtrl. If don't need to access any datasource then you should inherit from PanelCtrl instead.
## Implementing a MetricsPanelCtrl
If you choose to inherit from MetricsPanelCtrl you should implement a function called refreshData that will take a datasource as in parameter when its time to get new data. Its recommended that the refreshData function calls the issueQueries in the base class but its not mandatory. An examples of such implementation can be found in our [example panel](https://github.com/grafana/grafana/blob/master/examples/panel-boilerplate-es5/module.js#L27-L38)

View File

@ -1,3 +1,4 @@
<li><a class='version' href='/v3.0'>Version v3.0</a></li>
<li><a class='version' href='/v2.6'>Version v2.6</a></li> <li><a class='version' href='/v2.6'>Version v2.6</a></li>
<li><a class='version' href='/v2.5'>Version v2.5</a></li> <li><a class='version' href='/v2.5'>Version v2.5</a></li>
<li><a class='version' href='/v2.1'>Version v2.1</a></li> <li><a class='version' href='/v2.1'>Version v2.1</a></li>

View File

@ -8,8 +8,6 @@ module.exports = function(config) {
// list of files / patterns to load in the browser // list of files / patterns to load in the browser
files: [ files: [
'vendor/npm/es5-shim/es5-shim.js',
'vendor/npm/es5-shim/es5-sham.js',
'vendor/npm/es6-shim/es6-shim.js', 'vendor/npm/es6-shim/es6-shim.js',
'vendor/npm/es6-promise/dist/es6-promise.js', 'vendor/npm/es6-promise/dist/es6-promise.js',
'vendor/npm/systemjs/dist/system.src.js', 'vendor/npm/systemjs/dist/system.src.js',

View File

@ -4,16 +4,17 @@
"company": "Coding Instinct AB" "company": "Coding Instinct AB"
}, },
"name": "grafana", "name": "grafana",
"version": "3.0.0-pre1", "version": "3.0.0-pre",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "http://github.com/grafana/grafana.git" "url": "http://github.com/grafana/grafana.git"
}, },
"devDependencies": { "devDependencies": {
"angular2": "2.0.0-beta.0", "angular2": "2.0.0-beta.12",
"zone.js": "^0.6.6",
"autoprefixer": "^6.3.3", "autoprefixer": "^6.3.3",
"es6-promise": "^3.0.2", "es6-promise": "^3.0.2",
"es6-shim": "^0.33.3", "es6-shim": "^0.35.0",
"expect.js": "~0.2.0", "expect.js": "~0.2.0",
"glob": "~3.2.7", "glob": "~3.2.7",
"grunt": "~0.4.0", "grunt": "~0.4.0",
@ -37,7 +38,7 @@
"grunt-postcss": "^0.8.0", "grunt-postcss": "^0.8.0",
"grunt-sass": "^1.1.0", "grunt-sass": "^1.1.0",
"grunt-string-replace": "~1.2.1", "grunt-string-replace": "~1.2.1",
"grunt-systemjs-builder": "^0.2.5", "grunt-systemjs-builder": "^0.2.6",
"grunt-tslint": "^3.0.2", "grunt-tslint": "^3.0.2",
"grunt-typescript": "^0.8.0", "grunt-typescript": "^0.8.0",
"grunt-usemin": "3.0.0", "grunt-usemin": "3.0.0",
@ -48,15 +49,14 @@
"karma-coveralls": "1.1.2", "karma-coveralls": "1.1.2",
"karma-expect": "~1.1.0", "karma-expect": "~1.1.0",
"karma-mocha": "~0.2.1", "karma-mocha": "~0.2.1",
"karma-phantomjs-launcher": "0.2.1", "karma-phantomjs-launcher": "1.0.0",
"load-grunt-tasks": "3.4.0", "load-grunt-tasks": "3.4.0",
"mocha": "2.3.4", "mocha": "2.3.4",
"phantomjs": "~2.1.3", "phantomjs-prebuilt": "^2.1.3",
"reflect-metadata": "0.1.2", "reflect-metadata": "0.1.2",
"rxjs": "5.0.0-beta.0", "rxjs": "5.0.0-beta.2",
"sass-lint": "^1.5.0", "sass-lint": "^1.5.0",
"systemjs": "0.19.20", "systemjs": "0.19.24"
"zone.js": "0.5.10"
}, },
"engines": { "engines": {
"node": "0.4.x", "node": "0.4.x",
@ -68,7 +68,6 @@
}, },
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"es5-shim": "^4.4.1",
"grunt-jscs": "~1.5.x", "grunt-jscs": "~1.5.x",
"grunt-sass-lint": "^0.1.0", "grunt-sass-lint": "^0.1.0",
"grunt-sync": "^0.4.1", "grunt-sync": "^0.4.1",
@ -76,7 +75,7 @@
"lodash": "^2.4.1", "lodash": "^2.4.1",
"remarkable": "^1.6.2", "remarkable": "^1.6.2",
"sinon": "1.16.1", "sinon": "1.16.1",
"systemjs-builder": "^0.15.7", "systemjs-builder": "^0.15.13",
"tether": "^1.2.0", "tether": "^1.2.0",
"tether-drop": "^1.4.2", "tether-drop": "^1.4.2",
"tslint": "^3.4.0", "tslint": "^3.4.0",

View File

@ -1,17 +1,22 @@
#! /usr/bin/env bash #! /usr/bin/env bash
version=2.6.0 deb_ver=3.0.0-pre1459365183
rpm_ver=3.0.0-pre1459365183
wget https://grafanarel.s3.amazonaws.com/builds/grafana_${version}_amd64.deb #rpm_ver=3.0.0-1
package_cloud push grafana/stable/debian/jessie grafana_${version}_amd64.deb # wget https://grafanarel.s3.amazonaws.com/builds/grafana_${deb_ver}_amd64.deb
package_cloud push grafana/stable/debian/wheezy grafana_${version}_amd64.deb
package_cloud push grafana/testing/debian/jessie grafana_${version}_amd64.deb
package_cloud push grafana/testing/debian/wheezy grafana_${version}_amd64.deb
wget https://grafanarel.s3.amazonaws.com/builds/grafana-${version}-1.x86_64.rpm # package_cloud push grafana/stable/debian/jessie grafana_${deb_ver}_amd64.deb
# package_cloud push grafana/stable/debian/wheezy grafana_${deb_ver}_amd64.deb
package_cloud push grafana/testing/el/6 grafana-${version}-1.x86_64.rpm package_cloud push grafana/testing/debian/jessie grafana_${deb_ver}_amd64.deb
package_cloud push grafana/testing/el/7 grafana-${version}-1.x86_64.rpm package_cloud push grafana/testing/debian/wheezy grafana_${deb_ver}_amd64.deb
package_cloud push grafana/stable/el/7 grafana-${version}-1.x86_64.rpm
package_cloud push grafana/stable/el/6 grafana-${version}-1.x86_64.rpm wget https://grafanarel.s3.amazonaws.com/builds/grafana-${rpm_ver}.x86_64.rpm
package_cloud push grafana/testing/el/6 grafana-${rpm_ver}.x86_64.rpm
package_cloud push grafana/testing/el/7 grafana-${rpm_ver}.x86_64.rpm
# package_cloud push grafana/stable/el/7 grafana-${version}-1.x86_64.rpm
# package_cloud push grafana/stable/el/6 grafana-${version}-1.x86_64.rpm

View File

@ -4,6 +4,7 @@ import (
"github.com/go-macaron/binding" "github.com/go-macaron/binding"
"github.com/grafana/grafana/pkg/api/avatar" "github.com/grafana/grafana/pkg/api/avatar"
"github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/api/live"
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
m "github.com/grafana/grafana/pkg/models" m "github.com/grafana/grafana/pkg/models"
"gopkg.in/macaron.v1" "gopkg.in/macaron.v1"
@ -35,6 +36,7 @@ func Register(r *macaron.Macaron) {
r.Get("/org/users/", reqSignedIn, Index) r.Get("/org/users/", reqSignedIn, Index)
r.Get("/org/apikeys/", reqSignedIn, Index) r.Get("/org/apikeys/", reqSignedIn, Index)
r.Get("/dashboard/import/", reqSignedIn, Index) r.Get("/dashboard/import/", reqSignedIn, Index)
r.Get("/admin", reqGrafanaAdmin, Index)
r.Get("/admin/settings", reqGrafanaAdmin, Index) r.Get("/admin/settings", reqGrafanaAdmin, Index)
r.Get("/admin/users", reqGrafanaAdmin, Index) r.Get("/admin/users", reqGrafanaAdmin, Index)
r.Get("/admin/users/create", reqGrafanaAdmin, Index) r.Get("/admin/users/create", reqGrafanaAdmin, Index)
@ -43,6 +45,8 @@ func Register(r *macaron.Macaron) {
r.Get("/admin/orgs/edit/:id", reqGrafanaAdmin, Index) r.Get("/admin/orgs/edit/:id", reqGrafanaAdmin, Index)
r.Get("/admin/stats", reqGrafanaAdmin, Index) r.Get("/admin/stats", reqGrafanaAdmin, Index)
r.Get("/styleguide", reqSignedIn, Index)
r.Get("/plugins", reqSignedIn, Index) r.Get("/plugins", reqSignedIn, Index)
r.Get("/plugins/:id/edit", reqSignedIn, Index) r.Get("/plugins/:id/edit", reqSignedIn, Index)
r.Get("/plugins/:id/page/:page", reqSignedIn, Index) r.Get("/plugins/:id/page/:page", reqSignedIn, Index)
@ -243,7 +247,13 @@ func Register(r *macaron.Macaron) {
avt := avatar.CacheServer() avt := avatar.CacheServer()
r.Get("/avatar/:hash", avt.ServeHTTP) r.Get("/avatar/:hash", avt.ServeHTTP)
// Websocket
liveConn := live.New()
r.Any("/ws", liveConn.Serve)
// streams
r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream)
InitAppPluginRoutes(r) InitAppPluginRoutes(r)
r.NotFound(NotFoundHandler)
} }

View File

@ -61,7 +61,7 @@ func NewReverseProxy(ds *m.DataSource, proxyPath string, targetUrl *url.URL) *ht
req.Header.Del("Set-Cookie") req.Header.Del("Set-Cookie")
} }
return &httputil.ReverseProxy{Director: director} return &httputil.ReverseProxy{Director: director, FlushInterval: time.Millisecond * 200}
} }
func getDatasource(id int64, orgId int64) (*m.DataSource, error) { func getDatasource(id int64, orgId int64) (*m.DataSource, error) {

View File

@ -40,7 +40,7 @@ func GetDataSources(c *middleware.Context) {
if plugin, exists := plugins.DataSources[ds.Type]; exists { if plugin, exists := plugins.DataSources[ds.Type]; exists {
dsItem.TypeLogoUrl = plugin.Info.Logos.Small dsItem.TypeLogoUrl = plugin.Info.Logos.Small
} else { } else {
dsItem.TypeLogoUrl = "public/img/icn-datasources.svg" dsItem.TypeLogoUrl = "public/img/icn-datasource.svg"
} }
result = append(result, dsItem) result = append(result, dsItem)

View File

@ -11,10 +11,10 @@ type PluginSetting struct {
Module string `json:"module"` Module string `json:"module"`
BaseUrl string `json:"baseUrl"` BaseUrl string `json:"baseUrl"`
Info *plugins.PluginInfo `json:"info"` Info *plugins.PluginInfo `json:"info"`
Pages []*plugins.AppPluginPage `json:"pages"`
Includes []*plugins.PluginInclude `json:"includes"` Includes []*plugins.PluginInclude `json:"includes"`
Dependencies *plugins.PluginDependencies `json:"dependencies"` Dependencies *plugins.PluginDependencies `json:"dependencies"`
JsonData map[string]interface{} `json:"jsonData"` JsonData map[string]interface{} `json:"jsonData"`
DefaultNavUrl string `json:"defaultNavUrl"`
} }
type PluginListItem struct { type PluginListItem struct {

13
pkg/api/dtos/stream.go Normal file
View File

@ -0,0 +1,13 @@
package dtos
import "encoding/json"
type StreamMessage struct {
Stream string `json:"stream"`
Series []StreamMessageSeries `json:"series"`
}
type StreamMessageSeries struct {
Name string `json:"name"`
Datapoints [][]json.Number `json:"datapoints"`
}

View File

@ -56,8 +56,8 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR { if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR {
dashboardChildNavs = append(dashboardChildNavs, &dtos.NavLink{Divider: true}) dashboardChildNavs = append(dashboardChildNavs, &dtos.NavLink{Divider: true})
dashboardChildNavs = append(dashboardChildNavs, &dtos.NavLink{Text: "New", Url: setting.AppSubUrl + "/dashboard/new"}) dashboardChildNavs = append(dashboardChildNavs, &dtos.NavLink{Text: "New", Icon: "fa fa-plus", Url: setting.AppSubUrl + "/dashboard/new"})
dashboardChildNavs = append(dashboardChildNavs, &dtos.NavLink{Text: "Import", Url: setting.AppSubUrl + "/import/dashboard"}) dashboardChildNavs = append(dashboardChildNavs, &dtos.NavLink{Text: "Import", Icon: "fa fa-download", Url: setting.AppSubUrl + "/import/dashboard"})
} }
data.MainNavLinks = append(data.MainNavLinks, &dtos.NavLink{ data.MainNavLinks = append(data.MainNavLinks, &dtos.NavLink{
@ -88,22 +88,35 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
for _, plugin := range enabledPlugins.Apps { for _, plugin := range enabledPlugins.Apps {
if plugin.Pinned { if plugin.Pinned {
pageLink := &dtos.NavLink{ appLink := &dtos.NavLink{
Text: plugin.Name, Text: plugin.Name,
Url: setting.AppSubUrl + "/plugins/" + plugin.Id + "/edit", Url: plugin.DefaultNavUrl,
Img: plugin.Info.Logos.Small, Img: plugin.Info.Logos.Small,
} }
for _, page := range plugin.Pages { for _, include := range plugin.Includes {
if !page.SuppressNav { if include.Type == "page" && include.AddToNav {
pageLink.Children = append(pageLink.Children, &dtos.NavLink{ link := &dtos.NavLink{
Url: setting.AppSubUrl + "/plugins/" + plugin.Id + "/page/" + page.Slug, Url: setting.AppSubUrl + "/plugins/" + plugin.Id + "/page/" + include.Slug,
Text: page.Name, Text: include.Name,
}) }
appLink.Children = append(appLink.Children, link)
}
if include.Type == "dashboard" && include.AddToNav {
link := &dtos.NavLink{
Url: setting.AppSubUrl + "/dashboard/db/" + include.Slug,
Text: include.Name,
}
appLink.Children = append(appLink.Children, link)
} }
} }
data.MainNavLinks = append(data.MainNavLinks, pageLink) if c.OrgRole == m.ROLE_ADMIN {
appLink.Children = append(appLink.Children, &dtos.NavLink{Divider: true})
appLink.Children = append(appLink.Children, &dtos.NavLink{Text: "Plugin Config", Icon: "fa fa-cog", Url: setting.AppSubUrl + "/plugins/" + plugin.Id + "/edit"})
}
data.MainNavLinks = append(data.MainNavLinks, appLink)
} }
} }
@ -113,10 +126,10 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
Icon: "fa fa-fw fa-cogs", Icon: "fa fa-fw fa-cogs",
Url: setting.AppSubUrl + "/admin", Url: setting.AppSubUrl + "/admin",
Children: []*dtos.NavLink{ Children: []*dtos.NavLink{
{Text: "Global Users", Icon: "fa fa-fw fa-cogs", Url: setting.AppSubUrl + "/admin/users"}, {Text: "Global Users", Url: setting.AppSubUrl + "/admin/users"},
{Text: "Global Orgs", Icon: "fa fa-fw fa-cogs", Url: setting.AppSubUrl + "/admin/orgs"}, {Text: "Global Orgs", Url: setting.AppSubUrl + "/admin/orgs"},
{Text: "Server Settings", Icon: "fa fa-fw fa-cogs", Url: setting.AppSubUrl + "/admin/settings"}, {Text: "Server Settings", Url: setting.AppSubUrl + "/admin/settings"},
{Text: "Server Stats", Icon: "fa-fw fa-cogs", Url: setting.AppSubUrl + "/admin/stats"}, {Text: "Server Stats", Url: setting.AppSubUrl + "/admin/stats"},
}, },
}) })
} }

118
pkg/api/live/conn.go Normal file
View File

@ -0,0 +1,118 @@
package live
import (
"net/http"
"time"
"github.com/gorilla/websocket"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/log"
)
const (
// Time allowed to write a message to the peer.
writeWait = 10 * time.Second
// Time allowed to read the next pong message from the peer.
pongWait = 60 * time.Second
// Send pings to peer with this period. Must be less than pongWait.
pingPeriod = (pongWait * 9) / 10
// Maximum message size allowed from peer.
maxMessageSize = 512
)
var upgrader = websocket.Upgrader{
ReadBufferSize: 1024,
WriteBufferSize: 1024,
CheckOrigin: func(r *http.Request) bool {
return true
},
}
type connection struct {
ws *websocket.Conn
send chan []byte
}
func newConnection(ws *websocket.Conn) *connection {
return &connection{
send: make(chan []byte, 256),
ws: ws,
}
}
func (c *connection) readPump() {
defer func() {
h.unregister <- c
c.ws.Close()
}()
c.ws.SetReadLimit(maxMessageSize)
c.ws.SetReadDeadline(time.Now().Add(pongWait))
c.ws.SetPongHandler(func(string) error { c.ws.SetReadDeadline(time.Now().Add(pongWait)); return nil })
for {
_, message, err := c.ws.ReadMessage()
if err != nil {
if websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway) {
log.Info("error: %v", err)
}
break
}
c.handleMessage(message)
}
}
func (c *connection) handleMessage(message []byte) {
json, err := simplejson.NewJson(message)
if err != nil {
log.Error(3, "Unreadable message on websocket channel:", err)
}
msgType := json.Get("action").MustString()
streamName := json.Get("stream").MustString()
if len(streamName) == 0 {
log.Error(3, "Not allowed to subscribe to empty stream name")
return
}
switch msgType {
case "subscribe":
h.subChannel <- &streamSubscription{name: streamName, conn: c}
case "unsubscribe":
h.subChannel <- &streamSubscription{name: streamName, conn: c, remove: true}
}
}
func (c *connection) write(mt int, payload []byte) error {
c.ws.SetWriteDeadline(time.Now().Add(writeWait))
return c.ws.WriteMessage(mt, payload)
}
// writePump pumps messages from the hub to the websocket connection.
func (c *connection) writePump() {
ticker := time.NewTicker(pingPeriod)
defer func() {
ticker.Stop()
c.ws.Close()
}()
for {
select {
case message, ok := <-c.send:
if !ok {
c.write(websocket.CloseMessage, []byte{})
return
}
if err := c.write(websocket.TextMessage, message); err != nil {
return
}
case <-ticker.C:
if err := c.write(websocket.PingMessage, []byte{}); err != nil {
return
}
}
}
}

94
pkg/api/live/hub.go Normal file
View File

@ -0,0 +1,94 @@
package live
import (
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/log"
)
type hub struct {
connections map[*connection]bool
streams map[string]map[*connection]bool
register chan *connection
unregister chan *connection
streamChannel chan *dtos.StreamMessage
subChannel chan *streamSubscription
}
type streamSubscription struct {
conn *connection
name string
remove bool
}
var h = hub{
connections: make(map[*connection]bool),
streams: make(map[string]map[*connection]bool),
register: make(chan *connection),
unregister: make(chan *connection),
streamChannel: make(chan *dtos.StreamMessage),
subChannel: make(chan *streamSubscription),
}
func (h *hub) removeConnection() {
}
func (h *hub) run() {
for {
select {
case c := <-h.register:
h.connections[c] = true
log.Info("Live: New connection (Total count: %v)", len(h.connections))
case c := <-h.unregister:
if _, ok := h.connections[c]; ok {
log.Info("Live: Closing Connection (Total count: %v)", len(h.connections))
delete(h.connections, c)
close(c.send)
}
// hand stream subscriptions
case sub := <-h.subChannel:
log.Info("Live: Subscribing to: %v, remove: %v", sub.name, sub.remove)
subscribers, exists := h.streams[sub.name]
// handle unsubscribe
if exists && sub.remove {
delete(subscribers, sub.conn)
continue
}
if !exists {
subscribers = make(map[*connection]bool)
h.streams[sub.name] = subscribers
}
subscribers[sub.conn] = true
// handle stream messages
case message := <-h.streamChannel:
subscribers, exists := h.streams[message.Stream]
if !exists || len(subscribers) == 0 {
log.Info("Live: Message to stream without subscribers: %v", message.Stream)
continue
}
messageBytes, _ := simplejson.NewFromAny(message).Encode()
for sub := range subscribers {
// check if channel is open
if _, ok := h.connections[sub]; !ok {
delete(subscribers, sub)
continue
}
select {
case sub.send <- messageBytes:
default:
close(sub.send)
delete(h.connections, sub)
delete(subscribers, sub)
}
}
}
}
}

36
pkg/api/live/live.go Normal file
View File

@ -0,0 +1,36 @@
package live
import (
"net/http"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/middleware"
)
type LiveConn struct {
}
func New() *LiveConn {
go h.run()
return &LiveConn{}
}
func (lc *LiveConn) Serve(w http.ResponseWriter, r *http.Request) {
log.Info("Live: Upgrading to WebSocket")
ws, err := upgrader.Upgrade(w, r, nil)
if err != nil {
log.Error(3, "Live: Failed to upgrade connection to WebSocket", err)
return
}
c := newConnection(ws)
h.register <- c
go c.writePump()
c.readPump()
}
func (lc *LiveConn) PushToStream(c *middleware.Context, message dtos.StreamMessage) {
h.streamChannel <- &message
c.JsonOK("Message recevived")
}

View File

@ -80,10 +80,7 @@ func GetPluginSettingById(c *middleware.Context) Response {
Includes: def.Includes, Includes: def.Includes,
BaseUrl: def.BaseUrl, BaseUrl: def.BaseUrl,
Module: def.Module, Module: def.Module,
} DefaultNavUrl: def.DefaultNavUrl,
if app, exists := plugins.Apps[pluginId]; exists {
dto.Pages = app.Pages
} }
query := m.GetPluginSettingByIdQuery{PluginId: pluginId, OrgId: c.OrgId} query := m.GetPluginSettingByIdQuery{PluginId: pluginId, OrgId: c.OrgId}

View File

@ -1,10 +1,11 @@
package api package api
import ( import (
"strconv"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/services/search"
"strconv"
) )
func Search(c *middleware.Context) { func Search(c *middleware.Context) {

View File

@ -22,7 +22,7 @@ func runCommand(command func(commandLine CommandLine) error) func(context *cli.C
} }
} }
var Commands = []cli.Command{ var pluginCommands = []cli.Command{
{ {
Name: "install", Name: "install",
Usage: "install <plugin name>", Usage: "install <plugin name>",
@ -49,3 +49,11 @@ var Commands = []cli.Command{
Action: runCommand(removeCommand), Action: runCommand(removeCommand),
}, },
} }
var Commands = []cli.Command{
{
Name: "plugins",
Usage: "Manage plugins for grafana",
Subcommands: pluginCommands,
},
}

View File

@ -5,10 +5,6 @@ import (
"bytes" "bytes"
"errors" "errors"
"fmt" "fmt"
"github.com/fatih/color"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/log"
m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models"
s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services"
"io" "io"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
@ -16,6 +12,11 @@ import (
"path" "path"
"regexp" "regexp"
"strings" "strings"
"github.com/fatih/color"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/log"
m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models"
s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services"
) )
func validateInput(c CommandLine, pluginFolder string) error { func validateInput(c CommandLine, pluginFolder string) error {
@ -24,17 +25,16 @@ func validateInput(c CommandLine, pluginFolder string) error {
return errors.New("please specify plugin to install") return errors.New("please specify plugin to install")
} }
pluginDir := c.GlobalString("path") pluginsDir := c.GlobalString("pluginsDir")
if pluginDir == "" { if pluginsDir == "" {
return errors.New("missing path flag") return errors.New("missing pluginsDir flag")
} }
fileInfo, err := os.Stat(pluginDir) fileInfo, err := os.Stat(pluginsDir)
if err != nil { if err != nil {
if err = os.MkdirAll(pluginDir, os.ModePerm); err != nil { if err = os.MkdirAll(pluginsDir, os.ModePerm); err != nil {
return errors.New("path is not a directory") return errors.New(fmt.Sprintf("pluginsDir (%s) is not a directory", pluginsDir))
} }
return nil return nil
} }
@ -46,7 +46,7 @@ func validateInput(c CommandLine, pluginFolder string) error {
} }
func installCommand(c CommandLine) error { func installCommand(c CommandLine) error {
pluginFolder := c.GlobalString("path") pluginFolder := c.GlobalString("pluginsDir")
if err := validateInput(c, pluginFolder); err != nil { if err := validateInput(c, pluginFolder); err != nil {
return err return err
} }
@ -59,7 +59,7 @@ func installCommand(c CommandLine) error {
func InstallPlugin(pluginName, version string, c CommandLine) error { func InstallPlugin(pluginName, version string, c CommandLine) error {
plugin, err := s.GetPlugin(pluginName, c.GlobalString("repo")) plugin, err := s.GetPlugin(pluginName, c.GlobalString("repo"))
pluginFolder := c.GlobalString("path") pluginFolder := c.GlobalString("pluginsDir")
if err != nil { if err != nil {
return err return err
} }

View File

@ -3,6 +3,7 @@ package commands
import ( import (
"errors" "errors"
"fmt" "fmt"
"github.com/fatih/color" "github.com/fatih/color"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/log" "github.com/grafana/grafana/pkg/cmd/grafana-cli/log"
m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models"
@ -31,7 +32,7 @@ var validateLsCommand = func(pluginDir string) error {
} }
func lsCommand(c CommandLine) error { func lsCommand(c CommandLine) error {
pluginDir := c.GlobalString("path") pluginDir := c.GlobalString("pluginsDir")
if err := validateLsCommand(pluginDir); err != nil { if err := validateLsCommand(pluginDir); err != nil {
return err return err
} }

View File

@ -2,6 +2,7 @@ package commands
import ( import (
"errors" "errors"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/log" "github.com/grafana/grafana/pkg/cmd/grafana-cli/log"
m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models"
services "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" services "github.com/grafana/grafana/pkg/cmd/grafana-cli/services"
@ -11,7 +12,7 @@ var getPluginss func(path string) []m.InstalledPlugin = services.GetLocalPlugins
var removePlugin func(pluginPath, id string) error = services.RemoveInstalledPlugin var removePlugin func(pluginPath, id string) error = services.RemoveInstalledPlugin
func removeCommand(c CommandLine) error { func removeCommand(c CommandLine) error {
pluginPath := c.GlobalString("path") pluginPath := c.GlobalString("pluginsDir")
localPlugins := getPluginss(pluginPath) localPlugins := getPluginss(pluginPath)
log.Info("remove!\n") log.Info("remove!\n")

View File

@ -28,9 +28,9 @@ func ShouldUpgrade(installed string, remote m.Plugin) bool {
} }
func upgradeAllCommand(c CommandLine) error { func upgradeAllCommand(c CommandLine) error {
pluginDir := c.GlobalString("path") pluginsDir := c.GlobalString("pluginsDir")
localPlugins := s.GetLocalPlugins(pluginDir) localPlugins := s.GetLocalPlugins(pluginsDir)
remotePlugins, err := s.ListAllPlugins(c.GlobalString("repo")) remotePlugins, err := s.ListAllPlugins(c.GlobalString("repo"))
@ -53,7 +53,7 @@ func upgradeAllCommand(c CommandLine) error {
for _, p := range pluginsToUpgrade { for _, p := range pluginsToUpgrade {
log.Infof("Upgrading %v \n", p.Id) log.Infof("Upgrading %v \n", p.Id)
s.RemoveInstalledPlugin(pluginDir, p.Id) s.RemoveInstalledPlugin(pluginsDir, p.Id)
InstallPlugin(p.Id, "", c) InstallPlugin(p.Id, "", c)
} }

View File

@ -5,10 +5,10 @@ import (
) )
func upgradeCommand(c CommandLine) error { func upgradeCommand(c CommandLine) error {
pluginDir := c.GlobalString("path") pluginsDir := c.GlobalString("pluginsDir")
pluginName := c.Args().First() pluginName := c.Args().First()
localPlugin, err := s.ReadPlugin(pluginDir, pluginName) localPlugin, err := s.ReadPlugin(pluginsDir, pluginName)
if err != nil { if err != nil {
return err return err
@ -23,7 +23,7 @@ func upgradeCommand(c CommandLine) error {
for _, v := range remotePlugins.Plugins { for _, v := range remotePlugins.Plugins {
if localPlugin.Id == v.Id { if localPlugin.Id == v.Id {
if ShouldUpgrade(localPlugin.Info.Version, v) { if ShouldUpgrade(localPlugin.Info.Version, v) {
s.RemoveInstalledPlugin(pluginDir, pluginName) s.RemoveInstalledPlugin(pluginsDir, pluginName)
return InstallPlugin(localPlugin.Id, "", c) return InstallPlugin(localPlugin.Id, "", c)
} }
} }

View File

@ -2,20 +2,17 @@ package main
import ( import (
"fmt" "fmt"
"os"
"runtime"
"github.com/codegangsta/cli" "github.com/codegangsta/cli"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/commands" "github.com/grafana/grafana/pkg/cmd/grafana-cli/commands"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/log" "github.com/grafana/grafana/pkg/cmd/grafana-cli/log"
"os"
"runtime"
) )
var version = "master" var version = "master"
func getGrafanaPluginPath() string { func getGrafanaPluginDir() string {
if os.Getenv("GF_PLUGIN_DIR") != "" {
return os.Getenv("GF_PLUGIN_DIR")
}
os := runtime.GOOS os := runtime.GOOS
if os == "windows" { if os == "windows" {
return "C:\\opt\\grafana\\plugins" return "C:\\opt\\grafana\\plugins"
@ -29,19 +26,22 @@ func main() {
app := cli.NewApp() app := cli.NewApp()
app.Name = "Grafana cli" app.Name = "Grafana cli"
app.Author = "raintank" app.Usage = ""
app.Author = "Grafana Project"
app.Email = "https://github.com/grafana/grafana" app.Email = "https://github.com/grafana/grafana"
app.Version = version app.Version = version
app.Flags = []cli.Flag{ app.Flags = []cli.Flag{
cli.StringFlag{ cli.StringFlag{
Name: "path", Name: "pluginsDir",
Usage: "path to the grafana installation", Usage: "path to the grafana plugin directory",
Value: getGrafanaPluginPath(), Value: getGrafanaPluginDir(),
EnvVar: "GF_PLUGIN_DIR",
}, },
cli.StringFlag{ cli.StringFlag{
Name: "repo", Name: "repo",
Usage: "url to the plugin repository", Usage: "url to the plugin repository",
Value: "https://grafana-net.raintank.io/api/plugins", Value: "https://grafana.net/api/plugins",
EnvVar: "GF_PLUGIN_REPO",
}, },
cli.BoolFlag{ cli.BoolFlag{
Name: "debug, d", Name: "debug, d",

View File

@ -4,24 +4,27 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"path"
"github.com/franela/goreq" "github.com/franela/goreq"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/log" "github.com/grafana/grafana/pkg/cmd/grafana-cli/log"
m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models"
"path"
) )
var IoHelper m.IoUtil = IoUtilImp{} var IoHelper m.IoUtil = IoUtilImp{}
func ListAllPlugins(repoUrl string) (m.PluginRepo, error) { func ListAllPlugins(repoUrl string) (m.PluginRepo, error) {
fullUrl := repoUrl + "/repo" fullUrl := repoUrl + "/repo"
res, _ := goreq.Request{Uri: fullUrl, MaxRedirects: 3}.Do() res, err := goreq.Request{Uri: fullUrl, MaxRedirects: 3}.Do()
if err != nil {
return m.PluginRepo{}, err
}
if res.StatusCode != 200 { if res.StatusCode != 200 {
return m.PluginRepo{}, fmt.Errorf("Could not access %s statuscode %v", fullUrl, res.StatusCode) return m.PluginRepo{}, fmt.Errorf("Could not access %s statuscode %v", fullUrl, res.StatusCode)
} }
var resp m.PluginRepo var resp m.PluginRepo
err := res.Body.FromJsonTo(&resp) err = res.Body.FromJsonTo(&resp)
if err != nil { if err != nil {
return m.PluginRepo{}, errors.New("Could not load plugin data") return m.PluginRepo{}, errors.New("Could not load plugin data")
} }
@ -66,9 +69,7 @@ func RemoveInstalledPlugin(pluginPath, id string) error {
} }
func GetPlugin(pluginId, repoUrl string) (m.Plugin, error) { func GetPlugin(pluginId, repoUrl string) (m.Plugin, error) {
resp, err := ListAllPlugins(repoUrl) resp, _ := ListAllPlugins(repoUrl)
if err != nil {
}
for _, i := range resp.Plugins { for _, i := range resp.Plugins {
if i.Id == pluginId { if i.Id == pluginId {

View File

@ -318,7 +318,12 @@ func (a *ldapAuther) searchForUser(username string) (*ldapUserInfo, error) {
// If we are using a POSIX LDAP schema it won't support memberOf, so we manually search the groups // If we are using a POSIX LDAP schema it won't support memberOf, so we manually search the groups
var groupSearchResult *ldap.SearchResult var groupSearchResult *ldap.SearchResult
for _, groupSearchBase := range a.server.GroupSearchBaseDNs { for _, groupSearchBase := range a.server.GroupSearchBaseDNs {
filter := strings.Replace(a.server.GroupSearchFilter, "%s", username, -1) var filter_replace string
filter_replace = getLdapAttr(a.server.GroupSearchFilterUserAttribute, searchResult)
if a.server.GroupSearchFilterUserAttribute == "" {
filter_replace = getLdapAttr(a.server.Attr.Username, searchResult)
}
filter := strings.Replace(a.server.GroupSearchFilter, "%s", filter_replace, -1)
if ldapCfg.VerboseLogging { if ldapCfg.VerboseLogging {
log.Info("LDAP: Searching for user's groups: %s", filter) log.Info("LDAP: Searching for user's groups: %s", filter)

View File

@ -28,6 +28,7 @@ type LdapServerConf struct {
SearchBaseDNs []string `toml:"search_base_dns"` SearchBaseDNs []string `toml:"search_base_dns"`
GroupSearchFilter string `toml:"group_search_filter"` GroupSearchFilter string `toml:"group_search_filter"`
GroupSearchFilterUserAttribute string `toml:"group_search_filter_user_attribute"`
GroupSearchBaseDNs []string `toml:"group_search_base_dns"` GroupSearchBaseDNs []string `toml:"group_search_base_dns"`
LdapGroups []*LdapGroupToOrgRole `toml:"group_mappings"` LdapGroups []*LdapGroupToOrgRole `toml:"group_mappings"`

View File

@ -6,16 +6,9 @@ import (
"github.com/gosimple/slug" "github.com/gosimple/slug"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
) )
type AppPluginPage struct {
Name string `json:"name"`
Slug string `json:"slug"`
Component string `json:"component"`
Role models.RoleType `json:"role"`
SuppressNav bool `json:"suppressNav"`
}
type AppPluginCss struct { type AppPluginCss struct {
Light string `json:"light"` Light string `json:"light"`
Dark string `json:"dark"` Dark string `json:"dark"`
@ -23,7 +16,6 @@ type AppPluginCss struct {
type AppPlugin struct { type AppPlugin struct {
FrontendPluginBase FrontendPluginBase
Pages []*AppPluginPage `json:"pages"`
Routes []*AppPluginRoute `json:"routes"` Routes []*AppPluginRoute `json:"routes"`
FoundChildPlugins []*PluginInclude `json:"-"` FoundChildPlugins []*PluginInclude `json:"-"`
@ -84,10 +76,18 @@ func (app *AppPlugin) initApp() {
} }
} }
app.DefaultNavUrl = setting.AppSubUrl + "/plugins/" + app.Id + "/edit"
// slugify pages // slugify pages
for _, page := range app.Pages { for _, include := range app.Includes {
if page.Slug == "" { if include.Slug == "" {
page.Slug = slug.Make(page.Name) include.Slug = slug.Make(include.Name)
}
if include.Type == "page" && include.DefaultNav {
app.DefaultNavUrl = setting.AppSubUrl + "/plugins/" + app.Id + "/page/" + include.Slug
}
if include.Type == "dashboard" && include.DefaultNav {
app.DefaultNavUrl = setting.AppSubUrl + "/dashboard/db/" + include.Slug
} }
} }
} }

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
) )
@ -41,6 +42,7 @@ type PluginBase struct {
IncludedInAppId string `json:"-"` IncludedInAppId string `json:"-"`
PluginDir string `json:"-"` PluginDir string `json:"-"`
DefaultNavUrl string `json:"-"`
// cache for readme file contents // cache for readme file contents
Readme []byte `json:"-"` Readme []byte `json:"-"`
@ -77,7 +79,13 @@ type PluginInclude struct {
Name string `json:"name"` Name string `json:"name"`
Path string `json:"path"` Path string `json:"path"`
Type string `json:"type"` Type string `json:"type"`
Id string `json:"id"` Component string `json:"component"`
Role models.RoleType `json:"role"`
AddToNav bool `json:"addToNav"`
DefaultNav bool `json:"defaultNav"`
Slug string `json:"slug"`
Id string `json:"-"`
} }
type PluginDependencyItem struct { type PluginDependencyItem struct {

View File

@ -0,0 +1,6 @@
///<reference path="../headers/common.d.ts" />
import {Emitter} from './utils/emitter';
var appEvents = new Emitter();
export default appEvents;

View File

@ -18,7 +18,7 @@ export function infoPopover() {
var offset = attrs.offset || '0 -10px'; var offset = attrs.offset || '0 -10px';
var position = attrs.position || 'right middle'; var position = attrs.position || 'right middle';
var classes = 'drop-help'; var classes = 'drop-help drop-hide-out-of-bounds';
if (attrs.wide) { if (attrs.wide) {
classes += ' drop-wide'; classes += ' drop-wide';
} }
@ -40,14 +40,6 @@ export function infoPopover() {
} }
}); });
// inputElem.on('focus.popover', function() {
// drop.open();
// });
//
// inputElem.on('blur.popover', function() {
// close();
// });
scope.$on('$destroy', function() { scope.$on('$destroy', function() {
drop.destroy(); drop.destroy();
}); });

View File

@ -8,9 +8,10 @@
<i class="fa fa-chevron-left"></i> <i class="fa fa-chevron-left"></i>
</a> </a>
<a href="{{::ctrl.titleUrl}}" class="navbar-page-btn" ng-show="ctrl.title"> <a href="{{ctrl.titleUrl}}" class="navbar-page-btn" ng-show="ctrl.title">
<i class="{{::ctrl.icon}}"></i> <i class="{{ctrl.icon}}" ng-show="ctrl.icon"></i>
{{::ctrl.title}} <img ng-src="{{ctrl.iconUrl}}" ng-show="ctrl.iconUrl"></i>
{{ctrl.title}}
</a> </a>
<div ng-transclude></div> <div ng-transclude></div>

View File

@ -22,6 +22,7 @@ export function navbarDirective() {
scope: { scope: {
title: "@", title: "@",
titleUrl: "@", titleUrl: "@",
iconUrl: "@",
}, },
link: function(scope, elem, attrs, ctrl) { link: function(scope, elem, attrs, ctrl) {
ctrl.icon = attrs.icon; ctrl.icon = attrs.icon;

View File

@ -40,7 +40,10 @@
</a> </a>
<ul class="dropdown-menu" role="menu" ng-if="::item.children"> <ul class="dropdown-menu" role="menu" ng-if="::item.children">
<li ng-repeat="child in ::item.children" ng-class="{divider: child.divider}"> <li ng-repeat="child in ::item.children" ng-class="{divider: child.divider}">
<a href="{{::child.url}}">{{::child.text}}</a> <a href="{{::child.url}}">
<i class="{{::child.icon}}" ng-show="::child.icon"></i>
{{::child.text}}
</a>
</li> </li>
</ul> </ul>
</li> </li>

View File

@ -86,7 +86,7 @@ export class SideMenuCtrl {
switchOrg(orgId) { switchOrg(orgId) {
this.backendSrv.post('/api/user/using/' + orgId).then(() => { this.backendSrv.post('/api/user/using/' + orgId).then(() => {
window.location.href = window.location.href; window.location.href = `${config.appSubUrl}/`;
}); });
}; };
} }

View File

@ -0,0 +1,74 @@
///<reference path="../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module';
import Drop from 'tether-drop';
var template = `
<label for="check-{{ctrl.id}}" class="gf-form-label {{ctrl.labelClass}} pointer">{{ctrl.label}}</label>
<div class="gf-form-switch {{ctrl.switchClass}}" ng-if="ctrl.show">
<input id="check-{{ctrl.id}}" type="checkbox" ng-model="ctrl.checked" ng-change="ctrl.internalOnChange()">
<label for="check-{{ctrl.id}}" data-on="Yes" data-off="No"></label>
</div>
`;
export class SwitchCtrl {
onChange: any;
checked: any;
show: any;
id: any;
/** @ngInject */
constructor($scope) {
this.show = true;
this.id = $scope.$id;
}
internalOnChange() {
return new Promise(resolve => {
setTimeout(() => {
this.onChange();
resolve();
});
});
}
}
export function switchDirective() {
return {
restrict: 'E',
controller: SwitchCtrl,
controllerAs: 'ctrl',
bindToController: true,
scope: {
checked: "=",
label: "@",
labelClass: "@",
tooltip: "@",
switchClass: "@",
onChange: "&",
},
template: template,
link: (scope, elem) => {
if (scope.ctrl.tooltip) {
var drop = new Drop({
target: elem[0],
content: scope.ctrl.tooltip,
position: "right middle",
classes: 'drop-help',
openOn: 'hover',
hoverOpenDelay: 400,
});
scope.$on('$destroy', function() {
drop.destroy();
});
}
}
};
}
coreModule.directive('gfFormSwitch', switchDirective);

View File

@ -28,12 +28,17 @@ import {infoPopover} from './components/info_popover';
import {colorPicker} from './components/colorpicker'; import {colorPicker} from './components/colorpicker';
import {navbarDirective} from './components/navbar/navbar'; import {navbarDirective} from './components/navbar/navbar';
import {arrayJoin} from './directives/array_join'; import {arrayJoin} from './directives/array_join';
import {liveSrv} from './live/live_srv';
import {Emitter} from './utils/emitter';
import {layoutSelector} from './components/layout_selector/layout_selector'; import {layoutSelector} from './components/layout_selector/layout_selector';
import {switchDirective} from './components/switch';
import 'app/core/controllers/all'; import 'app/core/controllers/all';
import 'app/core/services/all'; import 'app/core/services/all';
import 'app/core/routes/routes'; import 'app/core/routes/routes';
import './filters/filters'; import './filters/filters';
import coreModule from './core_module'; import coreModule from './core_module';
import appEvents from './app_events';
export { export {
arrayJoin, arrayJoin,
@ -43,6 +48,10 @@ export {
navbarDirective, navbarDirective,
searchDirective, searchDirective,
colorPicker, colorPicker,
liveSrv,
layoutSelector, layoutSelector,
infoPopover switchDirective,
infoPopover,
Emitter,
appEvents,
}; };

View File

@ -119,4 +119,118 @@ function (_, $, coreModule) {
} }
}; };
}); });
coreModule.default.directive('dropdownTypeahead2', function($compile) {
var inputTemplate = '<input type="text"'+
' class="gf-form-input"' +
' spellcheck="false" style="display:none"></input>';
var buttonTemplate = '<a class="gf-form-input dropdown-toggle"' +
' tabindex="1" gf-dropdown="menuItems" data-toggle="dropdown"' +
' data-placement="top"><i class="fa fa-plus"></i></a>';
return {
scope: {
menuItems: "=dropdownTypeahead2",
dropdownTypeaheadOnSelect: "&dropdownTypeaheadOnSelect",
model: '=ngModel'
},
link: function($scope, elem, attrs) {
var $input = $(inputTemplate);
var $button = $(buttonTemplate);
$input.appendTo(elem);
$button.appendTo(elem);
if (attrs.linkText) {
$button.html(attrs.linkText);
}
if (attrs.ngModel) {
$scope.$watch('model', function(newValue) {
_.each($scope.menuItems, function(item) {
_.each(item.submenu, function(subItem) {
if (subItem.value === newValue) {
$button.html(subItem.text);
}
});
});
});
}
var typeaheadValues = _.reduce($scope.menuItems, function(memo, value, index) {
if (!value.submenu) {
value.click = 'menuItemSelected(' + index + ')';
memo.push(value.text);
} else {
_.each(value.submenu, function(item, subIndex) {
item.click = 'menuItemSelected(' + index + ',' + subIndex + ')';
memo.push(value.text + ' ' + item.text);
});
}
return memo;
}, []);
$scope.menuItemSelected = function(index, subIndex) {
var menuItem = $scope.menuItems[index];
var payload = {$item: menuItem};
if (menuItem.submenu && subIndex !== void 0) {
payload.$subItem = menuItem.submenu[subIndex];
}
$scope.dropdownTypeaheadOnSelect(payload);
};
$input.attr('data-provide', 'typeahead');
$input.typeahead({
source: typeaheadValues,
minLength: 1,
items: 10,
updater: function (value) {
var result = {};
_.each($scope.menuItems, function(menuItem) {
_.each(menuItem.submenu, function(submenuItem) {
if (value === (menuItem.text + ' ' + submenuItem.text)) {
result.$subItem = submenuItem;
result.$item = menuItem;
}
});
});
if (result.$item) {
$scope.$apply(function() {
$scope.dropdownTypeaheadOnSelect(result);
});
}
$input.trigger('blur');
return '';
}
});
$button.click(function() {
$button.hide();
$input.show();
$input.focus();
});
$input.keyup(function() {
elem.toggleClass('open', $input.val() === '');
});
$input.blur(function() {
$input.hide();
$input.val('');
$button.show();
$button.focus();
// clicking the function dropdown menu wont
// work if you remove class at once
setTimeout(function() {
elem.removeClass('open');
}, 200);
});
$compile(elem.contents())($scope);
}
};
});
}); });

View File

@ -146,10 +146,14 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
}; };
}); });
} }
// ConfigCtrl // Datasource ConfigCtrl
case 'datasource-config-ctrl': { case 'datasource-config-ctrl': {
var dsMeta = scope.ctrl.datasourceMeta; var dsMeta = scope.ctrl.datasourceMeta;
return System.import(dsMeta.module).then(function(dsModule) { return System.import(dsMeta.module).then(function(dsModule): any {
if (!dsModule.ConfigCtrl) {
return {notFound: true};
}
return { return {
baseUrl: dsMeta.baseUrl, baseUrl: dsMeta.baseUrl,
name: 'ds-config-' + dsMeta.id, name: 'ds-config-' + dsMeta.id,

View File

@ -0,0 +1,136 @@
///<reference path="../../headers/common.d.ts" />
import _ from 'lodash';
import config from 'app/core/config';
import coreModule from 'app/core/core_module';
import {Observable} from 'vendor/npm/rxjs/Observable';
export class LiveSrv {
conn: any;
observers: any;
initPromise: any;
constructor() {
this.observers = {};
}
getWebSocketUrl() {
var l = window.location;
return ((l.protocol === "https:") ? "wss://" : "ws://") + l.host + config.appSubUrl + '/ws';
}
getConnection() {
if (this.initPromise) {
return this.initPromise;
}
if (this.conn && this.conn.readyState === 1) {
return Promise.resolve(this.conn);
}
this.initPromise = new Promise((resolve, reject) => {
console.log('Live: connecting...');
this.conn = new WebSocket(this.getWebSocketUrl());
this.conn.onclose = (evt) => {
console.log("Live: websocket onclose", evt);
reject({message: 'Connection closed'});
this.initPromise = null;
setTimeout(this.reconnect.bind(this), 2000);
};
this.conn.onmessage = (evt) => {
this.handleMessage(evt.data);
};
this.conn.onerror = (evt) => {
this.initPromise = null;
reject({message: 'Connection error'});
console.log("Live: websocket error", evt);
};
this.conn.onopen = (evt) => {
console.log('opened');
this.initPromise = null;
resolve(this.conn);
};
});
return this.initPromise;
}
handleMessage(message) {
message = JSON.parse(message);
if (!message.stream) {
console.log("Error: stream message without stream!", message);
return;
}
var observer = this.observers[message.stream];
if (!observer) {
this.removeObserver(message.stream, null);
return;
}
observer.next(message);
}
reconnect() {
// no need to reconnect if no one cares
if (_.keys(this.observers).length === 0) {
return;
}
console.log('LiveSrv: Reconnecting');
this.getConnection().then(conn => {
_.each(this.observers, (value, key) => {
this.send({action: 'subscribe', stream: key});
});
});
}
send(data) {
this.conn.send(JSON.stringify(data));
}
addObserver(stream, observer) {
this.observers[stream] = observer;
this.getConnection().then(conn => {
this.send({action: 'subscribe', stream: stream});
});
}
removeObserver(stream, observer) {
console.log('unsubscribe', stream);
delete this.observers[stream];
this.getConnection().then(conn => {
this.send({action: 'unsubscribe', stream: stream});
});
}
subscribe(streamName) {
console.log('LiveSrv.subscribe: ' + streamName);
return Observable.create(observer => {
this.addObserver(streamName, observer);
return () => {
this.removeObserver(streamName, observer);
};
});
// return this.init().then(() => {
// this.send({action: 'subscribe', stream: name});
// });
}
}
var instance = new LiveSrv();
export {instance as liveSrv};

View File

@ -1,91 +0,0 @@
define([
'angular',
'lodash',
'../core_module',
],
function (angular, _, coreModule) {
'use strict';
coreModule.default.service('alertSrv', function($timeout, $sce, $rootScope, $modal, $q) {
var self = this;
this.init = function() {
$rootScope.onAppEvent('alert-error', function(e, alert) {
self.set(alert[0], alert[1], 'error');
}, $rootScope);
$rootScope.onAppEvent('alert-warning', function(e, alert) {
self.set(alert[0], alert[1], 'warning', 5000);
}, $rootScope);
$rootScope.onAppEvent('alert-success', function(e, alert) {
self.set(alert[0], alert[1], 'success', 3000);
}, $rootScope);
$rootScope.onAppEvent('confirm-modal', this.showConfirmModal, $rootScope);
};
// List of all alert objects
this.list = [];
this.set = function(title,text,severity,timeout) {
var newAlert = {
title: title || '',
text: text || '',
severity: severity || 'info',
};
var newAlertJson = angular.toJson(newAlert);
// remove same alert if it already exists
_.remove(self.list, function(value) {
return angular.toJson(value) === newAlertJson;
});
self.list.push(newAlert);
if (timeout > 0) {
$timeout(function() {
self.list = _.without(self.list,newAlert);
}, timeout);
}
if (!$rootScope.$$phase) {
$rootScope.$digest();
}
return(newAlert);
};
this.clear = function(alert) {
self.list = _.without(self.list,alert);
};
this.clearAll = function() {
self.list = [];
};
this.showConfirmModal = function(e, payload) {
var scope = $rootScope.$new();
scope.title = payload.title;
scope.text = payload.text;
scope.text2 = payload.text2;
scope.onConfirm = payload.onConfirm;
scope.icon = payload.icon || "fa-check";
scope.yesText = payload.yesText || "Yes";
scope.noText = payload.noText || "Cancel";
var confirmModal = $modal({
template: 'public/app/partials/confirm_modal.html',
persist: false,
modalClass: 'confirm-modal',
show: false,
scope: scope,
keyboard: false
});
$q.when(confirmModal).then(function(modalEl) {
modalEl.modal('show');
});
};
});
});

View File

@ -0,0 +1,99 @@
///<reference path="../../headers/common.d.ts" />
import angular from 'angular';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events';
export class AlertSrv {
list: any[];
/** @ngInject */
constructor(private $timeout, private $sce, private $rootScope, private $modal) {
this.list = [];
}
init() {
this.$rootScope.onAppEvent('alert-error', (e, alert) => {
this.set(alert[0], alert[1], 'error', 0);
}, this.$rootScope);
this.$rootScope.onAppEvent('alert-warning', (e, alert) => {
this.set(alert[0], alert[1], 'warning', 5000);
}, this.$rootScope);
this.$rootScope.onAppEvent('alert-success', (e, alert) => {
this.set(alert[0], alert[1], 'success', 3000);
}, this.$rootScope);
appEvents.on('confirm-modal', this.showConfirmModal.bind(this));
this.$rootScope.onAppEvent('confirm-modal', (e, data) => {
this.showConfirmModal(data);
}, this.$rootScope);
}
set(title, text, severity, timeout) {
var newAlert = {
title: title || '',
text: text || '',
severity: severity || 'info',
};
var newAlertJson = angular.toJson(newAlert);
// remove same alert if it already exists
_.remove(this.list, function(value) {
return angular.toJson(value) === newAlertJson;
});
this.list.push(newAlert);
if (timeout > 0) {
this.$timeout(() => {
this.list = _.without(this.list, newAlert);
}, timeout);
}
if (!this.$rootScope.$$phase) {
this.$rootScope.$digest();
}
return(newAlert);
}
clear(alert) {
this.list = _.without(this.list, alert);
}
clearAll() {
this.list = [];
}
showConfirmModal(payload) {
var scope = this.$rootScope.$new();
scope.title = payload.title;
scope.text = payload.text;
scope.text2 = payload.text2;
scope.onConfirm = payload.onConfirm;
scope.icon = payload.icon || "fa-check";
scope.yesText = payload.yesText || "Yes";
scope.noText = payload.noText || "Cancel";
var confirmModal = this.$modal({
template: 'public/app/partials/confirm_modal.html',
persist: false,
modalClass: 'confirm-modal',
show: false,
scope: scope,
keyboard: false
});
confirmModal.then(function(modalEl) {
modalEl.modal('show');
});
}
}
coreModule.service('alertSrv', AlertSrv);

View File

@ -105,6 +105,13 @@ function (angular, _, coreModule, config) {
}); });
} }
//populate error obj on Internal Error
if (_.isString(err.data) && err.status === 500) {
err.data = {
error: err.statusText
};
}
// for Prometheus // for Prometheus
if (!err.data.message && _.isString(err.data.error)) { if (!err.data.message && _.isString(err.data.error)) {
err.data.message = err.data.error; err.data.message = err.data.error;

View File

@ -0,0 +1,56 @@
///<reference path="../../headers/common.d.ts" />
import {Subject} from 'vendor/npm/rxjs/Subject';
var hasOwnProp = {}.hasOwnProperty;
function createName(name) {
return '$' + name;
}
export class Emitter {
subjects: any;
constructor() {
this.subjects = {};
}
emit(name, data?) {
var fnName = createName(name);
this.subjects[fnName] || (this.subjects[fnName] = new Subject());
this.subjects[fnName].next(data);
}
on(name, handler, scope?) {
var fnName = createName(name);
this.subjects[fnName] || (this.subjects[fnName] = new Subject());
var subscription = this.subjects[fnName].subscribe(handler);
if (scope) {
scope.$on('$destroy', function() {
subscription.unsubscribe();
});
}
return subscription;
};
off(name, handler) {
var fnName = createName(name);
if (this.subjects[fnName]) {
this.subjects[fnName].dispose();
delete this.subjects[fnName];
}
}
dispose() {
var subjects = this.subjects;
for (var prop in subjects) {
if (hasOwnProp.call(subjects, prop)) {
subjects[prop].dispose();
}
}
this.subjects = {};
}
}

View File

@ -19,7 +19,8 @@ class AdminSettingsCtrl {
class AdminHomeCtrl { class AdminHomeCtrl {
/** @ngInject **/ /** @ngInject **/
constructor() {} constructor() {
}
} }
export class AdminStatsCtrl { export class AdminStatsCtrl {

View File

@ -212,7 +212,7 @@ function (angular, $, _, moment) {
var i, j, k; var i, j, k;
var oldVersion = this.schemaVersion; var oldVersion = this.schemaVersion;
var panelUpgrades = []; var panelUpgrades = [];
this.schemaVersion = 11; this.schemaVersion = 12;
if (oldVersion === this.schemaVersion) { if (oldVersion === this.schemaVersion) {
return; return;
@ -401,11 +401,61 @@ function (angular, $, _, moment) {
}); });
} }
if (oldVersion < 11) { if (oldVersion < 12) {
// update template variables // update template variables
_.each(this.templating.list, function(templateVariable) { _.each(this.templating.list, function(templateVariable) {
if (templateVariable.refresh) { templateVariable.refresh = 1; } if (templateVariable.refresh) { templateVariable.refresh = 1; }
if (!templateVariable.refresh) { templateVariable.refresh = 0; } if (!templateVariable.refresh) { templateVariable.refresh = 0; }
if (templateVariable.hideVariable) {
templateVariable.hide = 2;
} else if (templateVariable.hideLabel) {
templateVariable.hide = 1;
} else {
templateVariable.hide = 0;
}
});
}
if (oldVersion < 12) {
// update graph yaxes changes
panelUpgrades.push(function(panel) {
if (panel.type !== 'graph') { return; }
if (!panel.yaxes) {
panel.yaxes = [
{
show: panel['y-axis'],
min: panel.grid.leftMin,
max: panel.grid.leftMax,
logBase: panel.grid.leftLogBase,
format: panel.y_formats[0],
label: panel.leftYAxisLabel,
},
{
show: panel['y-axis'],
min: panel.grid.rightMin,
max: panel.grid.rightMax,
logBase: panel.grid.rightLogBase,
format: panel.y_formats[1],
label: panel.rightYAxisLabel,
}
];
panel.xaxis = {
show: panel['x-axis'],
};
delete panel.grid.leftMin;
delete panel.grid.leftMax;
delete panel.grid.leftLogBase;
delete panel.grid.rightMin;
delete panel.grid.rightMax;
delete panel.grid.rightLogBase;
delete panel.y_formats;
delete panel.leftYAxisLabel;
delete panel.rightYAxisLabel;
delete panel['y-axis'];
delete panel['x-axis'];
}
}); });
} }

View File

@ -19,10 +19,11 @@
<div class="tabbed-view-body"> <div class="tabbed-view-body">
<div ng-if="editor.index == 0"> <div ng-if="editor.index == 0">
<div class="gf-form-group"> <div class="gf-form-group section">
<h5 class="section-heading">Details</h5>
<div class="gf-form"> <div class="gf-form">
<label class="gf-form-label width-7">Title</label> <label class="gf-form-label width-7">Title</label>
<input type="text" class="gf-form-input max-width-25" ng-model='dashboard.title'></input> <input type="text" class="gf-form-input width-25" ng-model='dashboard.title'></input>
</div> </div>
<div class="gf-form"> <div class="gf-form">
<label class="gf-form-label width-7">Tags<tip>Press enter to a add tag</tip></label> <label class="gf-form-label width-7">Tags<tip>Press enter to a add tag</tip></label>
@ -38,18 +39,27 @@
</div> </div>
</div> </div>
<h5 class="section-heading">On/Off Toggles</h5> <div class="section">
<h5 class="section-heading">Toggles</h5>
<div class="gf-form-group"> <div class="gf-form-group">
<div class="gf-form-inline"> <gf-form-switch class="gf-form"
<div class="gf-form"> label="Editable"
<editor-checkbox text="Editable" model="dashboard.editable"></editor-checkbox> tooltip="Uncheck, then save and reload to disable all dashboard editing"
</div> checked="dashboard.editable"
<div class="gf-form"> label-class="width-10">
<editor-checkbox text="Hide Controls (CTRL+H)" model="dashboard.hideControls"></editor-checkbox> </gf-form-switch>
</div> <gf-form-switch class="gf-form"
<div class="gf-form"> label="Hide Controls"
<editor-checkbox text="Shared Crosshair (CTRL+O)" model="dashboard.sharedCrosshair"></editor-checkbox> tooltip="Hide row controls. Shortcut: CTRL+H"
</div> checked="dashboard.hideControls"
label-class="width-10">
</gf-form-switch>
<gf-form-switch class="gf-form"
label="Shared Crosshair"
tooltip="Shared Crosshair line on all graphs. Shortcut: CTRL+O"
checked="dashboard.sharedCrosshair"
label-class="width-10">
</gf-form-switch>
</div> </div>
</div> </div>
</div> </div>
@ -62,19 +72,19 @@
<div class="gf-form"> <div class="gf-form">
<span class="gf-form-label">Title</span> <span class="gf-form-label">Title</span>
<input type="text" class="gf-form-input max-width-14" ng-model='row.title'></input> <input type="text" class="gf-form-input max-width-14" ng-model='row.title'></input>
<editor-checkbox text="Show title" model="row.showTitle"></editor-checkbox>
</div> </div>
<gf-form-switch class="gf-form" label="Show title" checked="row.showTitle" switch-class="max-width-6"></gf-form-switch>
<div class="gf-form"> <div class="gf-form">
<button class="btn btn-inverse btn-mini" style="margin-right: 5px;" ng-click="dashboard.rows = _.without(dashboard.rows,row)"> <button class="btn btn-inverse gf-form-btn" ng-click="_.move(dashboard.rows,$index,$index-1)">
<i class="fa fa-trash"></i>
</button>
<button class="btn btn-inverse btn-mini" ng-hide="$first" style="margin-right: 5px;" ng-click="_.move(dashboard.rows,$index,$index-1)">
<i ng-class="{'invisible': $first}" class="fa fa-arrow-up"></i> <i ng-class="{'invisible': $first}" class="fa fa-arrow-up"></i>
</button> </button>
<button class="btn btn-inverse btn-mini" ng-hide="$last" style="margin-right: 5px;" ng-click="_.move(dashboard.rows,$index,$index+1)"> <button class="btn btn-inverse gf-from-btn" ng-click="_.move(dashboard.rows,$index,$index+1)">
<i ng-class="{'invisible': $last}" class="fa fa-arrow-down"></i> <i ng-class="{'invisible': $last}" class="fa fa-arrow-down"></i>
</button> </button>
<button class="btn btn-inverse gf-form-btn" click="dashboard.rows = _.without(dashboard.rows,row)">
<i class="fa fa-trash"></i>
</button>
</div> </div>
</div> </div>
</div> </div>

View File

@ -30,11 +30,6 @@ function (angular, _, config) {
$scope.toggleRow = function(row) { $scope.toggleRow = function(row) {
row.collapse = row.collapse ? false : true; row.collapse = row.collapse ? false : true;
if (!row.collapse) {
$timeout(function() {
$scope.$broadcast('render');
});
}
}; };
$scope.addPanel = function(panel) { $scope.addPanel = function(panel) {

View File

@ -1,7 +1,7 @@
<div class="submenu-controls"> <div class="submenu-controls">
<ul ng-if="ctrl.dashboard.templating.list.length > 0"> <ul ng-if="ctrl.dashboard.templating.list.length > 0">
<li ng-repeat="variable in ctrl.variables" ng-show="!variable.hideVariable" class="submenu-item"> <li ng-repeat="variable in ctrl.variables" ng-hide="variable.hide === 2" class="submenu-item">
<span class="submenu-item-label template-variable " ng-show="!variable.hideLabel"> <span class="submenu-item-label template-variable " ng-hide="variable.hide === 1">
{{variable.label || variable.name}}: {{variable.label || variable.name}}:
</span> </span>
<value-select-dropdown variable="variable" on-updated="ctrl.variableUpdated(variable)" get-values-for-tag="ctrl.getValuesForTag(variable, tagKey)"></value-select-dropdown> <value-select-dropdown variable="variable" on-updated="ctrl.variableUpdated(variable)" get-values-for-tag="ctrl.getValuesForTag(variable, tagKey)"></value-select-dropdown>

View File

@ -139,7 +139,7 @@ function (angular, _, $) {
self.$scope.broadcastRefresh(); self.$scope.broadcastRefresh();
} }
else { else {
self.fullscreenPanel.$broadcast('render'); ctrl.render();
} }
delete self.fullscreenPanel; delete self.fullscreenPanel;
}); });
@ -159,7 +159,7 @@ function (angular, _, $) {
this.$scope.appEvent('panel-fullscreen-enter', {panelId: ctrl.panel.id}); this.$scope.appEvent('panel-fullscreen-enter', {panelId: ctrl.panel.id});
$timeout(function() { $timeout(function() {
panelScope.$broadcast('render'); ctrl.render();
}); });
}; };

View File

@ -1,21 +1,21 @@
<navbar title="Organization" icon="icon-gf icon-gf-users"> <navbar title="Organization" icon="icon-gf icon-gf-users">
</navbar> </navbar>
<div class="page-container"> <div class="page-container" ng-form="playlistEditForm">
<div class="page"> <div class="page-header">
<h1>New Organization</h1>
</div>
<h2 style="margin-top: 30px;">Add Organization</h2> <p class="playlist-description">Each organization contains their own dashboards, data sources and configuration, and cannot be shared between orgs. While users may belong to more than one, mutiple organization are most frequently used in multi-tenant deployments. </p>
<form name="form" class="gf-form-group"> <div class="gf-form-group">
<div class="gf-form"> <div class="gf-form">
<span class="gf-form-label width-10">Org. name</span> <span class="gf-form-label width-10">Org. name</span>
<input type="text" ng-model="newOrg.name" required class="gf-form-input" placeholder="organization name"> <input type="text" ng-model="newOrg.name" required class="gf-form-input max-width-21" placeholder="organization name">
</div> </div>
<br> <br>
<div class="gf-form-buttons-row"> <div class="gf-form-buttons-row">
<button class="btn btn-success pull-right" ng-click="createOrg()">Create</button> <button class="btn btn-success" ng-click="createOrg()">Create</button>
</div> </div>
</form>
</div> </div>
</div> </div>

View File

@ -3,7 +3,7 @@
<div class="page-container"> <div class="page-container">
<div class="page-header"> <div class="page-header">
<h1>Preferences</h1> <h1>Org Preferences</h1>
</div> </div>
<h3 class="page-heading">General</h3> <h3 class="page-heading">General</h3>

View File

@ -9,6 +9,8 @@ import {PanelCtrl} from './panel_ctrl';
import * as rangeUtil from 'app/core/utils/rangeutil'; import * as rangeUtil from 'app/core/utils/rangeutil';
import * as dateMath from 'app/core/utils/datemath'; import * as dateMath from 'app/core/utils/datemath';
import {Subject} from 'vendor/npm/rxjs/Subject';
class MetricsPanelCtrl extends PanelCtrl { class MetricsPanelCtrl extends PanelCtrl {
error: boolean; error: boolean;
loading: boolean; loading: boolean;
@ -26,6 +28,8 @@ class MetricsPanelCtrl extends PanelCtrl {
timeInfo: any; timeInfo: any;
skipDataOnInit: boolean; skipDataOnInit: boolean;
datasources: any[]; datasources: any[];
dataStream: any;
dataSubscription: any;
constructor($scope, $injector) { constructor($scope, $injector) {
super($scope, $injector); super($scope, $injector);
@ -40,35 +44,36 @@ class MetricsPanelCtrl extends PanelCtrl {
if (!this.panel.targets) { if (!this.panel.targets) {
this.panel.targets = [{}]; this.panel.targets = [{}];
} }
this.events.on('refresh', this.onMetricsPanelRefresh.bind(this));
this.events.on('init-edit-mode', this.onInitMetricsPanelEditMode.bind(this));
} }
initEditMode() { private onInitMetricsPanelEditMode() {
super.initEditMode();
this.addEditorTab('Metrics', 'public/app/partials/metrics.html'); this.addEditorTab('Metrics', 'public/app/partials/metrics.html');
this.addEditorTab('Time range', 'public/app/features/panel/partials/panelTime.html'); this.addEditorTab('Time range', 'public/app/features/panel/partials/panelTime.html');
this.datasources = this.datasourceSrv.getMetricSources(); this.datasources = this.datasourceSrv.getMetricSources();
} }
refreshData(data) { private onMetricsPanelRefresh() {
// null op
return this.$q.when(data);
}
loadSnapshot(data) {
// null op
return data;
}
refresh() {
// ignore fetching data if another panel is in fullscreen // ignore fetching data if another panel is in fullscreen
if (this.otherPanelInFullscreenMode()) { return; } if (this.otherPanelInFullscreenMode()) { return; }
// if we have snapshot data use that // if we have snapshot data use that
if (this.panel.snapshotData) { if (this.panel.snapshotData) {
if (this.loadSnapshot) {
this.updateTimeRange(); this.updateTimeRange();
this.loadSnapshot(this.panel.snapshotData); var data = this.panel.snapshotData;
// backward compatability
if (!_.isArray(data)) {
data = data;
} }
this.events.emit('data-snapshot-load', data);
return;
}
// // ignore if we have data stream
if (this.dataStream) {
return; return;
} }
@ -77,16 +82,15 @@ class MetricsPanelCtrl extends PanelCtrl {
this.loading = true; this.loading = true;
// load datasource service // load datasource service
this.datasourceSrv.get(this.panel.datasource).then(datasource => { this.datasourceSrv.get(this.panel.datasource)
this.datasource = datasource; .then(this.issueQueries.bind(this))
return this.refreshData(this.datasource); .then(this.handleQueryResult.bind(this))
}).then(() => { .catch(err => {
this.loading = false;
}).catch(err => {
console.log('Panel data error:', err);
this.loading = false; this.loading = false;
this.error = err.message || "Timeseries data request error"; this.error = err.message || "Timeseries data request error";
this.inspector = {error: err}; this.inspector = {error: err};
this.events.emit('data-error', err);
console.log('Panel data error:', err);
}); });
} }
@ -167,6 +171,7 @@ class MetricsPanelCtrl extends PanelCtrl {
} }
var metricsQuery = { var metricsQuery = {
panelId: this.panel.id,
range: this.range, range: this.range,
rangeRaw: this.rangeRaw, rangeRaw: this.rangeRaw,
interval: this.interval, interval: this.interval,
@ -178,19 +183,50 @@ class MetricsPanelCtrl extends PanelCtrl {
}; };
this.setTimeQueryStart(); this.setTimeQueryStart();
try { return datasource.query(metricsQuery);
return datasource.query(metricsQuery).then(results => { }
handleQueryResult(result) {
this.setTimeQueryEnd(); this.setTimeQueryEnd();
this.loading = false;
// check for if data source returns subject
if (result && result.subscribe) {
this.handleDataStream(result);
return;
}
if (this.dashboard.snapshot) { if (this.dashboard.snapshot) {
this.panel.snapshotData = results; this.panel.snapshotData = result.data;
} }
return results; return this.events.emit('data-received', result.data);
});
} catch (err) {
return this.$q.reject(err);
} }
handleDataStream(stream) {
// if we already have a connection
if (this.dataStream) {
console.log('two stream observables!');
return;
}
this.dataStream = stream;
this.dataSubscription = stream.subscribe({
next: (data) => {
console.log('dataSubject next!');
if (data.range) {
this.range = data.range;
}
this.events.emit('data-received', data.data);
},
error: (error) => {
this.events.emit('data-error', error);
console.log('panel: observer got error');
},
complete: () => {
console.log('panel: observer got complete');
}
});
} }
setDatasource(datasource) { setDatasource(datasource) {

View File

@ -9,6 +9,8 @@ const TITLE_HEIGHT = 25;
const EMPTY_TITLE_HEIGHT = 9; const EMPTY_TITLE_HEIGHT = 9;
const PANEL_PADDING = 5; const PANEL_PADDING = 5;
import {Emitter} from 'app/core/core';
export class PanelCtrl { export class PanelCtrl {
panel: any; panel: any;
row: any; row: any;
@ -16,7 +18,6 @@ export class PanelCtrl {
editorTabIndex: number; editorTabIndex: number;
pluginName: string; pluginName: string;
pluginId: string; pluginId: string;
icon: string;
editorTabs: any; editorTabs: any;
$scope: any; $scope: any;
$injector: any; $injector: any;
@ -28,12 +29,14 @@ export class PanelCtrl {
editMode: any; editMode: any;
height: any; height: any;
containerHeight: any; containerHeight: any;
events: Emitter;
constructor($scope, $injector) { constructor($scope, $injector) {
this.$injector = $injector; this.$injector = $injector;
this.$scope = $scope; this.$scope = $scope;
this.$timeout = $injector.get('$timeout'); this.$timeout = $injector.get('$timeout');
this.editorTabIndex = 0; this.editorTabIndex = 0;
this.events = new Emitter();
var plugin = config.panels[this.panel.type]; var plugin = config.panels[this.panel.type];
if (plugin) { if (plugin) {
@ -56,7 +59,7 @@ export class PanelCtrl {
} }
refresh() { refresh() {
return; this.events.emit('refresh', null);
} }
publishAppEvent(evtName, evt) { publishAppEvent(evtName, evt) {
@ -85,6 +88,7 @@ export class PanelCtrl {
this.editorTabs = []; this.editorTabs = [];
this.addEditorTab('General', 'public/app/partials/panelgeneral.html'); this.addEditorTab('General', 'public/app/partials/panelgeneral.html');
this.editModeInitiated = true; this.editModeInitiated = true;
this.events.emit('init-edit-mode', null);
} }
addEditorTab(title, directiveFn, index?) { addEditorTab(title, directiveFn, index?) {
@ -114,7 +118,9 @@ export class PanelCtrl {
} }
getExtendedMenu() { getExtendedMenu() {
return [{text: 'Panel JSON', click: 'ctrl.editPanelJson(); dismiss();'}]; var actions = [{text: 'Panel JSON', click: 'ctrl.editPanelJson(); dismiss();'}];
this.events.emit('init-panel-actions', actions);
return actions;
} }
otherPanelInFullscreenMode() { otherPanelInFullscreenMode() {
@ -122,7 +128,6 @@ export class PanelCtrl {
} }
calculatePanelHeight() { calculatePanelHeight() {
if (this.fullscreen) { if (this.fullscreen) {
var docHeight = $(window).height(); var docHeight = $(window).height();
var editHeight = Math.floor(docHeight * 0.3); var editHeight = Math.floor(docHeight * 0.3);
@ -138,8 +143,13 @@ export class PanelCtrl {
this.height = this.containerHeight - (PANEL_PADDING + (this.panel.title ? TITLE_HEIGHT : EMPTY_TITLE_HEIGHT)); this.height = this.containerHeight - (PANEL_PADDING + (this.panel.title ? TITLE_HEIGHT : EMPTY_TITLE_HEIGHT));
} }
broadcastRender(arg1?, arg2?) { render(payload?) {
this.$scope.$broadcast('render', arg1, arg2); // ignore if other panel is in fullscreen mode
if (this.otherPanelInFullscreenMode()) {
return;
}
this.events.emit('render', payload);
} }
toggleEditorHelp(index) { toggleEditorHelp(index) {
@ -157,7 +167,7 @@ export class PanelCtrl {
updateColumnSpan(span) { updateColumnSpan(span) {
this.panel.span = Math.min(Math.max(Math.floor(this.panel.span + span), 1), 12); this.panel.span = Math.min(Math.max(Math.floor(this.panel.span + span), 1), 12);
this.$timeout(() => { this.$timeout(() => {
this.broadcastRender(); this.render();
}); });
} }

View File

@ -121,7 +121,7 @@ module.directive('panelResizer', function($rootScope) {
} }
scope.$apply(function() { scope.$apply(function() {
scope.$broadcast('render'); ctrl.render();
}); });
} }

View File

@ -56,6 +56,7 @@ function (angular, $, _, Tether) {
template += '<a class="panel-menu-link" '; template += '<a class="panel-menu-link" ';
if (item.click) { template += ' ng-click="' + item.click + '"'; } if (item.click) { template += ' ng-click="' + item.click + '"'; }
if (item.href) { template += ' href="' + item.href + '"'; }
template += '>'; template += '>';
template += item.text + '</a>'; template += item.text + '</a>';
}); });

Some files were not shown because too many files have changed in this diff Show More