fix: dep ensure. now without gofmt on ventor directory

This commit is contained in:
Marcus Efraimsson 2018-03-20 09:23:18 +01:00
parent 4033283254
commit b816f18b3d
17 changed files with 61 additions and 63 deletions

View File

@ -457,7 +457,7 @@ func (c *Client) GetMulti(keys []string) (map[string]*Item, error) {
}
var err error
for range keyMap {
for _ = range keyMap {
if ge := <-ch; ge != nil {
err = ge
}

View File

@ -582,7 +582,7 @@ type Rows struct {
func (rc *Rows) Close() error {
rc.cancel()
for range rc.tokchan {
for _ = range rc.tokchan {
}
rc.tokchan = nil
return nil

View File

@ -166,7 +166,7 @@ func writePrelogin(w *tdsBuffer, fields map[uint8][]byte) error {
w.BeginPacket(packPrelogin, false)
offset := uint16(5*len(fields) + 1)
keys := make(KeySlice, 0, len(fields))
for k := range fields {
for k, _ := range fields {
keys = append(keys, k)
}
sort.Sort(keys)
@ -1147,7 +1147,7 @@ func dialConnection(ctx context.Context, p connectParams) (conn net.Conn, err er
}
// Wait for either the *first* successful connection, or all the errors
wait_loop:
for i := range ips {
for i, _ := range ips {
select {
case conn = <-connChan:
// Got a connection to use, close any others

View File

@ -567,7 +567,7 @@ func (c *Client) Start() (addr net.Addr, err error) {
// so they don't block since it is an io.Pipe
defer func() {
go func() {
for range linesCh {
for _ = range linesCh {
}
}()
}()

View File

@ -75,7 +75,7 @@ func NewRPCClient(conn io.ReadWriteCloser, plugins map[string]Plugin) (*RPCClien
// Connect stdout, stderr streams
stdstream := make([]net.Conn, 2)
for i := range stdstream {
for i, _ := range stdstream {
stdstream[i], err = mux.Open()
if err != nil {
mux.Close()

View File

@ -78,7 +78,7 @@ func (s *RPCServer) ServeConn(conn io.ReadWriteCloser) {
// Connect the stdstreams (in, out, err)
stdstream := make([]net.Conn, 2)
for i := range stdstream {
for i, _ := range stdstream {
stdstream[i], err = mux.Accept()
if err != nil {
mux.Close()

View File

@ -85,7 +85,7 @@ func (dmp *DiffMatchPatch) diffMainRunes(text1, text2 []rune, checklines bool, d
// Restore the prefix and suffix.
if len(commonprefix) != 0 {
diffs = append([]Diff{{DiffEqual, string(commonprefix)}}, diffs...)
diffs = append([]Diff{Diff{DiffEqual, string(commonprefix)}}, diffs...)
}
if len(commonsuffix) != 0 {
diffs = append(diffs, Diff{DiffEqual, string(commonsuffix)})
@ -122,16 +122,16 @@ func (dmp *DiffMatchPatch) diffCompute(text1, text2 []rune, checklines bool, dea
}
// Shorter text is inside the longer text (speedup).
return []Diff{
{op, string(longtext[:i])},
{DiffEqual, string(shorttext)},
{op, string(longtext[i+len(shorttext):])},
Diff{op, string(longtext[:i])},
Diff{DiffEqual, string(shorttext)},
Diff{op, string(longtext[i+len(shorttext):])},
}
} else if len(shorttext) == 1 {
// Single character string.
// After the previous speedup, the character can't be an equality.
return []Diff{
{DiffDelete, string(text1)},
{DiffInsert, string(text2)},
Diff{DiffDelete, string(text1)},
Diff{DiffInsert, string(text2)},
}
// Check to see if the problem can be split in two.
} else if hm := dmp.diffHalfMatch(text1, text2); hm != nil {
@ -145,7 +145,7 @@ func (dmp *DiffMatchPatch) diffCompute(text1, text2 []rune, checklines bool, dea
diffsA := dmp.diffMainRunes(text1A, text2A, checklines, deadline)
diffsB := dmp.diffMainRunes(text1B, text2B, checklines, deadline)
// Merge the results.
return append(diffsA, append([]Diff{{DiffEqual, string(midCommon)}}, diffsB...)...)
return append(diffsA, append([]Diff{Diff{DiffEqual, string(midCommon)}}, diffsB...)...)
} else if checklines && len(text1) > 100 && len(text2) > 100 {
return dmp.diffLineMode(text1, text2, deadline)
}
@ -330,8 +330,8 @@ func (dmp *DiffMatchPatch) diffBisect(runes1, runes2 []rune, deadline time.Time)
}
// Diff took too long and hit the deadline or number of diffs equals number of characters, no commonality at all.
return []Diff{
{DiffDelete, string(runes1)},
{DiffInsert, string(runes2)},
Diff{DiffDelete, string(runes1)},
Diff{DiffInsert, string(runes2)},
}
}
@ -673,7 +673,7 @@ func (dmp *DiffMatchPatch) DiffCleanupSemantic(diffs []Diff) []Diff {
insPoint := equalities.data
diffs = append(
diffs[:insPoint],
append([]Diff{{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
append([]Diff{Diff{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
// Change second copy to insert.
diffs[insPoint+1].Type = DiffInsert
@ -726,7 +726,7 @@ func (dmp *DiffMatchPatch) DiffCleanupSemantic(diffs []Diff) []Diff {
// Overlap found. Insert an equality and trim the surrounding edits.
diffs = append(
diffs[:pointer],
append([]Diff{{DiffEqual, insertion[:overlapLength1]}}, diffs[pointer:]...)...)
append([]Diff{Diff{DiffEqual, insertion[:overlapLength1]}}, diffs[pointer:]...)...)
diffs[pointer-1].Text =
deletion[0 : len(deletion)-overlapLength1]
@ -955,7 +955,7 @@ func (dmp *DiffMatchPatch) DiffCleanupEfficiency(diffs []Diff) []Diff {
// Duplicate record.
diffs = append(diffs[:insPoint],
append([]Diff{{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
append([]Diff{Diff{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
// Change second copy to insert.
diffs[insPoint+1].Type = DiffInsert
@ -1028,7 +1028,7 @@ func (dmp *DiffMatchPatch) DiffCleanupMerge(diffs []Diff) []Diff {
if x > 0 && diffs[x-1].Type == DiffEqual {
diffs[x-1].Text += string(textInsert[:commonlength])
} else {
diffs = append([]Diff{{DiffEqual, string(textInsert[:commonlength])}}, diffs...)
diffs = append([]Diff{Diff{DiffEqual, string(textInsert[:commonlength])}}, diffs...)
pointer++
}
textInsert = textInsert[commonlength:]

View File

@ -93,7 +93,7 @@ func (dmp *DiffMatchPatch) PatchAddContext(patch Patch, text string) Patch {
// Add the prefix.
prefix := text[max(0, patch.Start2-padding):patch.Start2]
if len(prefix) != 0 {
patch.diffs = append([]Diff{{DiffEqual, prefix}}, patch.diffs...)
patch.diffs = append([]Diff{Diff{DiffEqual, prefix}}, patch.diffs...)
}
// Add the suffix.
suffix := text[patch.Start2+patch.Length1 : min(len(text), patch.Start2+patch.Length1+padding)]
@ -336,7 +336,7 @@ func (dmp *DiffMatchPatch) PatchAddPadding(patches []Patch) string {
// Add some padding on start of first diff.
if len(patches[0].diffs) == 0 || patches[0].diffs[0].Type != DiffEqual {
// Add nullPadding equality.
patches[0].diffs = append([]Diff{{DiffEqual, nullPadding}}, patches[0].diffs...)
patches[0].diffs = append([]Diff{Diff{DiffEqual, nullPadding}}, patches[0].diffs...)
patches[0].Start1 -= paddingLength // Should be 0.
patches[0].Start2 -= paddingLength // Should be 0.
patches[0].Length1 += paddingLength

View File

@ -321,9 +321,7 @@ func (noCachedConnError) Error() string { return "http2: no cached c
// or its equivalent renamed type in net/http2's h2_bundle.go. Both types
// may coexist in the same running program.
func isNoCachedConnError(err error) bool {
_, ok := err.(interface {
IsHTTP2NoCachedConnError()
})
_, ok := err.(interface{ IsHTTP2NoCachedConnError() })
return ok
}

View File

@ -1050,7 +1050,7 @@ func (b *builder) writeRegion() {
m49Index := [9]int16{}
fromM49 := []uint16{}
m49 := []int{}
for k := range fromM49map {
for k, _ := range fromM49map {
m49 = append(m49, int(k))
}
sort.Ints(m49)

View File

@ -344,39 +344,39 @@ var (
// grandfatheredMap holds a mapping from legacy and grandfathered tags to
// their base language or index to more elaborate tag.
grandfatheredMap = map[[maxLen]byte]int16{
{'a', 'r', 't', '-', 'l', 'o', 'j', 'b', 'a', 'n'}: _jbo, // art-lojban
{'i', '-', 'a', 'm', 'i'}: _ami, // i-ami
{'i', '-', 'b', 'n', 'n'}: _bnn, // i-bnn
{'i', '-', 'h', 'a', 'k'}: _hak, // i-hak
{'i', '-', 'k', 'l', 'i', 'n', 'g', 'o', 'n'}: _tlh, // i-klingon
{'i', '-', 'l', 'u', 'x'}: _lb, // i-lux
{'i', '-', 'n', 'a', 'v', 'a', 'j', 'o'}: _nv, // i-navajo
{'i', '-', 'p', 'w', 'n'}: _pwn, // i-pwn
{'i', '-', 't', 'a', 'o'}: _tao, // i-tao
{'i', '-', 't', 'a', 'y'}: _tay, // i-tay
{'i', '-', 't', 's', 'u'}: _tsu, // i-tsu
{'n', 'o', '-', 'b', 'o', 'k'}: _nb, // no-bok
{'n', 'o', '-', 'n', 'y', 'n'}: _nn, // no-nyn
{'s', 'g', 'n', '-', 'b', 'e', '-', 'f', 'r'}: _sfb, // sgn-BE-FR
{'s', 'g', 'n', '-', 'b', 'e', '-', 'n', 'l'}: _vgt, // sgn-BE-NL
{'s', 'g', 'n', '-', 'c', 'h', '-', 'd', 'e'}: _sgg, // sgn-CH-DE
{'z', 'h', '-', 'g', 'u', 'o', 'y', 'u'}: _cmn, // zh-guoyu
{'z', 'h', '-', 'h', 'a', 'k', 'k', 'a'}: _hak, // zh-hakka
{'z', 'h', '-', 'm', 'i', 'n', '-', 'n', 'a', 'n'}: _nan, // zh-min-nan
{'z', 'h', '-', 'x', 'i', 'a', 'n', 'g'}: _hsn, // zh-xiang
[maxLen]byte{'a', 'r', 't', '-', 'l', 'o', 'j', 'b', 'a', 'n'}: _jbo, // art-lojban
[maxLen]byte{'i', '-', 'a', 'm', 'i'}: _ami, // i-ami
[maxLen]byte{'i', '-', 'b', 'n', 'n'}: _bnn, // i-bnn
[maxLen]byte{'i', '-', 'h', 'a', 'k'}: _hak, // i-hak
[maxLen]byte{'i', '-', 'k', 'l', 'i', 'n', 'g', 'o', 'n'}: _tlh, // i-klingon
[maxLen]byte{'i', '-', 'l', 'u', 'x'}: _lb, // i-lux
[maxLen]byte{'i', '-', 'n', 'a', 'v', 'a', 'j', 'o'}: _nv, // i-navajo
[maxLen]byte{'i', '-', 'p', 'w', 'n'}: _pwn, // i-pwn
[maxLen]byte{'i', '-', 't', 'a', 'o'}: _tao, // i-tao
[maxLen]byte{'i', '-', 't', 'a', 'y'}: _tay, // i-tay
[maxLen]byte{'i', '-', 't', 's', 'u'}: _tsu, // i-tsu
[maxLen]byte{'n', 'o', '-', 'b', 'o', 'k'}: _nb, // no-bok
[maxLen]byte{'n', 'o', '-', 'n', 'y', 'n'}: _nn, // no-nyn
[maxLen]byte{'s', 'g', 'n', '-', 'b', 'e', '-', 'f', 'r'}: _sfb, // sgn-BE-FR
[maxLen]byte{'s', 'g', 'n', '-', 'b', 'e', '-', 'n', 'l'}: _vgt, // sgn-BE-NL
[maxLen]byte{'s', 'g', 'n', '-', 'c', 'h', '-', 'd', 'e'}: _sgg, // sgn-CH-DE
[maxLen]byte{'z', 'h', '-', 'g', 'u', 'o', 'y', 'u'}: _cmn, // zh-guoyu
[maxLen]byte{'z', 'h', '-', 'h', 'a', 'k', 'k', 'a'}: _hak, // zh-hakka
[maxLen]byte{'z', 'h', '-', 'm', 'i', 'n', '-', 'n', 'a', 'n'}: _nan, // zh-min-nan
[maxLen]byte{'z', 'h', '-', 'x', 'i', 'a', 'n', 'g'}: _hsn, // zh-xiang
// Grandfathered tags with no modern replacement will be converted as
// follows:
{'c', 'e', 'l', '-', 'g', 'a', 'u', 'l', 'i', 's', 'h'}: -1, // cel-gaulish
{'e', 'n', '-', 'g', 'b', '-', 'o', 'e', 'd'}: -2, // en-GB-oed
{'i', '-', 'd', 'e', 'f', 'a', 'u', 'l', 't'}: -3, // i-default
{'i', '-', 'e', 'n', 'o', 'c', 'h', 'i', 'a', 'n'}: -4, // i-enochian
{'i', '-', 'm', 'i', 'n', 'g', 'o'}: -5, // i-mingo
{'z', 'h', '-', 'm', 'i', 'n'}: -6, // zh-min
[maxLen]byte{'c', 'e', 'l', '-', 'g', 'a', 'u', 'l', 'i', 's', 'h'}: -1, // cel-gaulish
[maxLen]byte{'e', 'n', '-', 'g', 'b', '-', 'o', 'e', 'd'}: -2, // en-GB-oed
[maxLen]byte{'i', '-', 'd', 'e', 'f', 'a', 'u', 'l', 't'}: -3, // i-default
[maxLen]byte{'i', '-', 'e', 'n', 'o', 'c', 'h', 'i', 'a', 'n'}: -4, // i-enochian
[maxLen]byte{'i', '-', 'm', 'i', 'n', 'g', 'o'}: -5, // i-mingo
[maxLen]byte{'z', 'h', '-', 'm', 'i', 'n'}: -6, // zh-min
// CLDR-specific tag.
{'r', 'o', 'o', 't'}: 0, // root
{'e', 'n', '-', 'u', 's', '-', 'p', 'o', 's', 'i', 'x'}: -7, // en_US_POSIX"
[maxLen]byte{'r', 'o', 'o', 't'}: 0, // root
[maxLen]byte{'e', 'n', '-', 'u', 's', '-', 'p', 'o', 's', 'i', 'x'}: -7, // en_US_POSIX"
}
altTagIndex = [...]uint8{0, 17, 31, 45, 61, 74, 86, 102}

View File

@ -3348,9 +3348,9 @@ var regionToGroups = [358]uint8{
// Size: 18 bytes, 3 elements
var paradigmLocales = [3][3]uint16{
0: {0x139, 0x0, 0x7b},
1: {0x13e, 0x0, 0x1f},
2: {0x3c0, 0x41, 0xee},
0: [3]uint16{0x139, 0x0, 0x7b},
1: [3]uint16{0x13e, 0x0, 0x1f},
2: [3]uint16{0x3c0, 0x41, 0xee},
}
type mutualIntelligibility struct {

View File

@ -110,7 +110,7 @@ func (cldr *CLDR) Supplemental() *SupplementalData {
func (cldr *CLDR) Locales() []string {
loc := []string{"root"}
hasRoot := false
for l := range cldr.locale {
for l, _ := range cldr.locale {
if l == "root" {
hasRoot = true
continue

View File

@ -289,7 +289,7 @@ var distinguishing = map[string][]string{
"mzone": nil,
"from": nil,
"to": nil,
"type": {
"type": []string{
"abbreviationFallback",
"default",
"mapping",
@ -527,7 +527,7 @@ func (cldr *CLDR) inheritSlice(enc, v, parent reflect.Value) (res reflect.Value,
}
}
keys := make([]string, 0, len(index))
for k := range index {
for k, _ := range index {
keys = append(keys, k)
}
sort.Strings(keys)

View File

@ -83,7 +83,7 @@ func (s Slice) Group(fn func(e Elem) string) []Slice {
m[key] = append(m[key], vi)
}
keys := []string{}
for k := range m {
for k, _ := range m {
keys = append(keys, k)
}
sort.Strings(keys)

View File

@ -241,7 +241,7 @@ func compactCCC() {
m[c.ccc] = 0
}
cccs := []int{}
for v := range m {
for v, _ := range m {
cccs = append(cccs, int(v))
}
sort.Ints(cccs)

View File

@ -270,7 +270,7 @@ func (ctx *Context) SetParams(name, val string) {
// ReplaceAllParams replace all current params with given params
func (ctx *Context) ReplaceAllParams(params Params) {
ctx.params = params
ctx.params = params;
}
// ParamsEscape returns escapred params result.