Merge pull request #2462 from mattermost/plt-2340

PLT-2340 Webpack optimizations and fixes
This commit is contained in:
Christopher Speller
2016-03-17 12:04:51 -04:00
11 changed files with 285 additions and 27 deletions

4
Godeps/Godeps.json generated
View File

@@ -5,6 +5,10 @@
"./..."
],
"Deps": [
{
"ImportPath": "github.com/NYTimes/gziphandler",
"Rev": "a88790d49798560db24af70fb6a10a66e2549a72"
},
{
"ImportPath": "github.com/alecthomas/log4go",
"Rev": "8e9057c3b25c409a34c0b9737cdc82cbcafeabce"

View File

@@ -0,0 +1,13 @@
Copyright (c) 2015 The New York Times Company
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this library except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,52 @@
Gzip Handler
============
This is a tiny Go package which wraps HTTP handlers to transparently gzip the
response body, for clients which support it. Although it's usually simpler to
leave that to a reverse proxy (like nginx or Varnish), this package is useful
when that's undesirable.
## Usage
Call `GzipHandler` with any handler (an object which implements the
`http.Handler` interface), and it'll return a new handler which gzips the
response. For example:
```go
package main
import (
"io"
"net/http"
"github.com/NYTimes/gziphandler"
)
func main() {
withoutGz := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
io.WriteString(w, "Hello, World")
})
withGz := gziphandler.GzipHandler(withoutGz)
http.Handle("/", withGz)
http.ListenAndServe("0.0.0.0:8000", nil)
}
```
## Documentation
The docs can be found at [godoc.org] [docs], as usual.
## License
[Apache 2.0] [license].
[docs]: https://godoc.org/github.com/nytimes/gziphandler
[license]: https://github.com/nytimes/gziphandler/blob/master/LICENSE.md

View File

@@ -0,0 +1,140 @@
package gziphandler
import (
"compress/gzip"
"fmt"
"net/http"
"strconv"
"strings"
"sync"
)
const (
vary = "Vary"
acceptEncoding = "Accept-Encoding"
contentEncoding = "Content-Encoding"
)
type codings map[string]float64
// The default qvalue to assign to an encoding if no explicit qvalue is set.
// This is actually kind of ambiguous in RFC 2616, so hopefully it's correct.
// The examples seem to indicate that it is.
const DEFAULT_QVALUE = 1.0
var gzipWriterPool = sync.Pool{
New: func() interface{} { return gzip.NewWriter(nil) },
}
// GzipResponseWriter provides an http.ResponseWriter interface, which gzips
// bytes before writing them to the underlying response. This doesn't set the
// Content-Encoding header, nor close the writers, so don't forget to do that.
type GzipResponseWriter struct {
gw *gzip.Writer
http.ResponseWriter
}
// Write appends data to the gzip writer.
func (w GzipResponseWriter) Write(b []byte) (int, error) {
return w.gw.Write(b)
}
// Flush flushes the underlying *gzip.Writer and then the underlying
// http.ResponseWriter if it is an http.Flusher. This makes GzipResponseWriter
// an http.Flusher.
func (w GzipResponseWriter) Flush() {
w.gw.Flush()
if fw, ok := w.ResponseWriter.(http.Flusher); ok {
fw.Flush()
}
}
// GzipHandler wraps an HTTP handler, to transparently gzip the response body if
// the client supports it (via the Accept-Encoding header).
func GzipHandler(h http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Add(vary, acceptEncoding)
if acceptsGzip(r) {
// Bytes written during ServeHTTP are redirected to this gzip writer
// before being written to the underlying response.
gzw := gzipWriterPool.Get().(*gzip.Writer)
defer gzipWriterPool.Put(gzw)
gzw.Reset(w)
defer gzw.Close()
w.Header().Set(contentEncoding, "gzip")
h.ServeHTTP(GzipResponseWriter{gzw, w}, r)
} else {
h.ServeHTTP(w, r)
}
})
}
// acceptsGzip returns true if the given HTTP request indicates that it will
// accept a gzippped response.
func acceptsGzip(r *http.Request) bool {
acceptedEncodings, _ := parseEncodings(r.Header.Get(acceptEncoding))
return acceptedEncodings["gzip"] > 0.0
}
// parseEncodings attempts to parse a list of codings, per RFC 2616, as might
// appear in an Accept-Encoding header. It returns a map of content-codings to
// quality values, and an error containing the errors encounted. It's probably
// safe to ignore those, because silently ignoring errors is how the internet
// works.
//
// See: http://tools.ietf.org/html/rfc2616#section-14.3
func parseEncodings(s string) (codings, error) {
c := make(codings)
e := make([]string, 0)
for _, ss := range strings.Split(s, ",") {
coding, qvalue, err := parseCoding(ss)
if err != nil {
e = append(e, err.Error())
} else {
c[coding] = qvalue
}
}
// TODO (adammck): Use a proper multi-error struct, so the individual errors
// can be extracted if anyone cares.
if len(e) > 0 {
return c, fmt.Errorf("errors while parsing encodings: %s", strings.Join(e, ", "))
}
return c, nil
}
// parseCoding parses a single conding (content-coding with an optional qvalue),
// as might appear in an Accept-Encoding header. It attempts to forgive minor
// formatting errors.
func parseCoding(s string) (coding string, qvalue float64, err error) {
for n, part := range strings.Split(s, ";") {
part = strings.TrimSpace(part)
qvalue = DEFAULT_QVALUE
if n == 0 {
coding = strings.ToLower(part)
} else if strings.HasPrefix(part, "q=") {
qvalue, err = strconv.ParseFloat(strings.TrimPrefix(part, "q="), 64)
if qvalue < 0.0 {
qvalue = 0.0
} else if qvalue > 1.0 {
qvalue = 1.0
}
}
}
if coding == "" {
err = fmt.Errorf("empty content-coding")
}
return
}

View File

@@ -136,6 +136,7 @@ package:
tar -C dist -czf $(DIST_PATH).tar.gz mattermost
build-client:
mkdir -p webapp/dist/files
cd webapp && make build
go-test:
@@ -196,15 +197,10 @@ clean: stop-docker
rm -Rf $(DIST_ROOT)
go clean $(GOFLAGS) -i ./...
rm -rf web/react/node_modules
rm -f web/static/js/bundle*.js
rm -f web/static/js/bundle*.js.map
rm -f web/static/js/libs*.js
rm -f web/static/css/styles.css
cd webapp && make clean
rm -rf api/data
rm -rf logs
rm -rf web/sass-files/.sass-cache
rm -rf Godeps/_workspace/pkg/
@@ -220,14 +216,17 @@ nuke: | clean clean-docker
touch $@
run: start-docker run-server run-client
run: | start-docker run-client run-server
run-server: .prepare-go
@echo Starting go web server
$(GO) run $(GOFLAGS) mattermost.go -config=config.json &
run-client: build-client
@echo Starting react processo
run-client:
@echo Starting client
mkdir -p webapp/dist/files
cd webapp && make run
@if [ "$(BUILD_ENTERPRISE)" = "true" ] && [ -d "$(ENTERPRISE_DIR)" ]; then \
cp ./config/config.json ./config/config.json.bak; \

View File

@@ -21,7 +21,8 @@
"SessionLengthSSOInDays": 30,
"SessionCacheInMinutes": 10,
"WebsocketSecurePort": 443,
"WebsocketPort": 80
"WebsocketPort": 80,
"WebserverMode": "regular"
},
"TeamSettings": {
"SiteName": "Mattermost",
@@ -139,4 +140,4 @@
"IdAttribute": null,
"QueryTimeout": 60
}
}
}

View File

@@ -21,6 +21,10 @@ const (
SERVICE_GITLAB = "gitlab"
SERVICE_GOOGLE = "google"
WEBSERVER_MODE_REGULAR = "regular"
WEBSERVER_MODE_GZIP = "gzip"
WEBSERVER_MODE_DISABLED = "disabled"
)
type ServiceSettings struct {
@@ -46,6 +50,7 @@ type ServiceSettings struct {
SessionCacheInMinutes *int
WebsocketSecurePort *int
WebsocketPort *int
WebserverMode *string
}
type SSOSettings struct {
@@ -383,6 +388,11 @@ func (o *Config) SetDefaults() {
o.ServiceSettings.AllowCorsFrom = new(string)
*o.ServiceSettings.AllowCorsFrom = ""
}
if o.ServiceSettings.WebserverMode == nil {
o.ServiceSettings.WebserverMode = new(string)
*o.ServiceSettings.WebserverMode = "regular"
}
}
func (o *Config) IsValid() *AppError {

View File

@@ -7,6 +7,8 @@ import (
"net/http"
"strings"
"github.com/NYTimes/gziphandler"
l4g "github.com/alecthomas/log4go"
"github.com/mattermost/platform/api"
"github.com/mattermost/platform/model"
@@ -23,11 +25,17 @@ func InitWeb() {
mainrouter := api.Srv.Router
staticDir := utils.FindDir(CLIENT_DIR)
l4g.Debug("Using client directory at %v", staticDir)
mainrouter.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir(staticDir))))
if *utils.Cfg.ServiceSettings.WebserverMode != "disabled" {
staticDir := utils.FindDir(CLIENT_DIR)
l4g.Debug("Using client directory at %v", staticDir)
if *utils.Cfg.ServiceSettings.WebserverMode == "gzip" {
mainrouter.PathPrefix("/static/").Handler(gziphandler.GzipHandler(http.StripPrefix("/static/", http.FileServer(http.Dir(staticDir)))))
} else {
mainrouter.PathPrefix("/static/").Handler(http.StripPrefix("/static/", http.FileServer(http.Dir(staticDir))))
}
mainrouter.Handle("/{anything:.*}", api.AppHandlerIndependent(root)).Methods("GET")
mainrouter.Handle("/{anything:.*}", api.AppHandlerIndependent(root)).Methods("GET")
}
}
var browsersNotSupported string = "MSIE/8;MSIE/9;MSIE/10;Internet Explorer/8;Internet Explorer/9;Internet Explorer/10;Safari/7;Safari/8"

View File

@@ -1,4 +1,4 @@
.PHONY: build test
.PHONY: build test run clean
test:
@echo Checking for style guide compliance
@@ -12,7 +12,20 @@ test:
touch $@
build: .npminstall
@echo Building mattermost web client
build: | .npminstall test
@echo Building mattermost Webapp
npm run build
run: .npminstall
@echo Running mattermost Webapp for development
npm run run
clean:
@echo Cleaning Webapp
rm -rf dist
rm -rf node_modules
rm .npminstall

View File

@@ -31,7 +31,7 @@
"babel-loader": "6.2.4",
"babel-plugin-transform-runtime": "6.6.0",
"babel-polyfill": "6.7.2",
"babel-preset-es2015": "6.6.0",
"babel-preset-es2015-webpack": "6.4.0",
"babel-preset-react": "6.5.0",
"babel-preset-stage-0": "6.5.0",
"eslint": "2.2.0",
@@ -39,6 +39,7 @@
"exports-loader": "0.6.3",
"extract-text-webpack-plugin": "1.0.1",
"file-loader": "0.8.5",
"url-loader": "0.5.7",
"html-loader": "0.4.3",
"copy-webpack-plugin": "1.1.1",
"css-loader": "0.23.1",
@@ -52,6 +53,7 @@
},
"scripts": {
"check": "eslint --ext \".jsx\" --ignore-pattern node_modules --quiet .",
"build": "webpack --progress"
"build": "webpack --optimize-dedupe",
"run": "webpack --progress"
}
}

View File

@@ -17,20 +17,21 @@ module.exports = {
loaders: [
{
test: /\.jsx?$/,
loader: 'babel-loader',
loader: 'babel',
exclude: /(node_modules|non_npm_dependencies)/,
query: {
presets: ['react', 'es2015', 'stage-0'],
plugins: ['transform-runtime']
presets: ['react', 'es2015-webpack', 'stage-0'],
plugins: ['transform-runtime'],
cacheDirectory: true
}
},
{
test: /\.json$/,
loader: 'json-loader'
loader: 'json'
},
{
test: /(node_modules|non_npm_dependencies)\/.+\.(js|jsx)$/,
loader: 'imports-loader',
loader: 'imports',
query: {
$: 'jquery',
jQuery: 'jquery'
@@ -46,7 +47,7 @@ module.exports = {
},
{
test: /\.(png|eot|tiff|svg|woff2|woff|ttf|gif)$/,
loader: 'file-loader',
loader: 'file',
query: {
name: 'files/[hash].[ext]'
}
@@ -67,7 +68,22 @@ module.exports = {
htmlExtract,
new CopyWebpackPlugin([
{from: 'images/emoji', to: 'emoji'}
])
]),
new webpack.optimize.UglifyJsPlugin({
'screw-ie8': true,
mangle: {
toplevel: false
},
compress: {
warnings: false
},
comments: false
}),
new webpack.optimize.AggressiveMergingPlugin(),
new webpack.LoaderOptionsPlugin({
minimize: true,
debug: false
})
],
resolve: {
alias: {