[dev.boringcrypto.go1.18] all: merge go1.18.7 into dev.boringcrypto.go1.18

Change-Id: Ic05b278d920e50a7e04a98c657f4fe1cf620deff
This commit is contained in:
Carlos Amedee 2022-10-04 20:05:08 +00:00
commit e3d1f0e7da
26 changed files with 564 additions and 71 deletions

View File

@ -143,6 +143,10 @@ const (
blockSize = 512 // Size of each block in a tar stream
nameSize = 100 // Max length of the name field in USTAR format
prefixSize = 155 // Max length of the prefix field in USTAR format
// Max length of a special file (PAX header, GNU long name or link).
// This matches the limit used by libarchive.
maxSpecialFileSize = 1 << 20
)
// blockPadding computes the number of bytes needed to pad offset up to the

View File

@ -103,7 +103,7 @@ func (tr *Reader) next() (*Header, error) {
continue // This is a meta header affecting the next header
case TypeGNULongName, TypeGNULongLink:
format.mayOnlyBe(FormatGNU)
realname, err := io.ReadAll(tr)
realname, err := readSpecialFile(tr)
if err != nil {
return nil, err
}
@ -293,7 +293,7 @@ func mergePAX(hdr *Header, paxHdrs map[string]string) (err error) {
// parsePAX parses PAX headers.
// If an extended header (type 'x') is invalid, ErrHeader is returned
func parsePAX(r io.Reader) (map[string]string, error) {
buf, err := io.ReadAll(r)
buf, err := readSpecialFile(r)
if err != nil {
return nil, err
}
@ -828,6 +828,16 @@ func tryReadFull(r io.Reader, b []byte) (n int, err error) {
return n, err
}
// readSpecialFile is like io.ReadAll except it returns
// ErrFieldTooLong if more than maxSpecialFileSize is read.
func readSpecialFile(r io.Reader) ([]byte, error) {
buf, err := io.ReadAll(io.LimitReader(r, maxSpecialFileSize+1))
if len(buf) > maxSpecialFileSize {
return nil, ErrFieldTooLong
}
return buf, err
}
// discard skips n bytes in r, reporting an error if unable to do so.
func discard(r io.Reader, n int64) error {
// If possible, Seek to the last byte before the end of the data section.

View File

@ -6,6 +6,7 @@ package tar
import (
"bytes"
"compress/bzip2"
"crypto/md5"
"errors"
"fmt"
@ -243,6 +244,9 @@ func TestReader(t *testing.T) {
}, {
file: "testdata/pax-bad-hdr-file.tar",
err: ErrHeader,
}, {
file: "testdata/pax-bad-hdr-large.tar.bz2",
err: ErrFieldTooLong,
}, {
file: "testdata/pax-bad-mtime-file.tar",
err: ErrHeader,
@ -625,9 +629,14 @@ func TestReader(t *testing.T) {
}
defer f.Close()
var fr io.Reader = f
if strings.HasSuffix(v.file, ".bz2") {
fr = bzip2.NewReader(fr)
}
// Capture all headers and checksums.
var (
tr = NewReader(f)
tr = NewReader(fr)
hdrs []*Header
chksums []string
rdbuf = make([]byte, 8)

Binary file not shown.

View File

@ -199,6 +199,9 @@ func (tw *Writer) writePAXHeader(hdr *Header, paxHdrs map[string]string) error {
flag = TypeXHeader
}
data := buf.String()
if len(data) > maxSpecialFileSize {
return ErrFieldTooLong
}
if err := tw.writeRawFile(name, data, flag, FormatPAX); err != nil || isGlobal {
return err // Global headers return here
}

View File

@ -1004,6 +1004,33 @@ func TestIssue12594(t *testing.T) {
}
}
func TestWriteLongHeader(t *testing.T) {
for _, test := range []struct {
name string
h *Header
}{{
name: "name too long",
h: &Header{Name: strings.Repeat("a", maxSpecialFileSize)},
}, {
name: "linkname too long",
h: &Header{Linkname: strings.Repeat("a", maxSpecialFileSize)},
}, {
name: "uname too long",
h: &Header{Uname: strings.Repeat("a", maxSpecialFileSize)},
}, {
name: "gname too long",
h: &Header{Gname: strings.Repeat("a", maxSpecialFileSize)},
}, {
name: "PAX header too long",
h: &Header{PAXRecords: map[string]string{"GOLANG.x": strings.Repeat("a", maxSpecialFileSize)}},
}} {
w := NewWriter(io.Discard)
if err := w.WriteHeader(test.h); err != ErrFieldTooLong {
t.Errorf("%v: w.WriteHeader() = %v, want ErrFieldTooLong", test.name, err)
}
}
}
// testNonEmptyWriter wraps an io.Writer and ensures that
// Write is never called with an empty buffer.
type testNonEmptyWriter struct{ io.Writer }

View File

@ -771,18 +771,18 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// CalleeEffects appends any side effects from evaluating callee to init.
func CalleeEffects(init *ir.Nodes, callee ir.Node) {
for {
init.Append(ir.TakeInit(callee)...)
switch callee.Op() {
case ir.ONAME, ir.OCLOSURE, ir.OMETHEXPR:
return // done
case ir.OCONVNOP:
conv := callee.(*ir.ConvExpr)
init.Append(ir.TakeInit(conv)...)
callee = conv.X
case ir.OINLCALL:
ic := callee.(*ir.InlinedCallExpr)
init.Append(ir.TakeInit(ic)...)
init.Append(ic.Body.Take()...)
callee = ic.SingleResult()

View File

@ -2082,7 +2082,13 @@
// Inline small or disjoint runtime.memmove calls with constant length.
// See the comment in op Move in genericOps.go for discussion of the type.
//
// Note that we've lost any knowledge of the type and alignment requirements
// of the source and destination. We only know the size, and that the type
// contains no pointers.
// The type of the move is not necessarily v.Args[0].Type().Elem()!
// See issue 55122 for details.
//
// Because expand calls runs after prove, constants useful to this pattern may not appear.
// Both versions need to exist; the memory and register variants.
//
@ -2090,31 +2096,28 @@
(SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const(64|32) [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
&& sz >= 0
&& isSameCall(sym, "runtime.memmove")
&& t.IsPtr() // avoids TUNSAFEPTR, see issue 30061
&& s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1
&& isInlinableMemmove(dst, src, int64(sz), config)
&& clobber(s1, s2, s3, call)
=> (Move {t.Elem()} [int64(sz)] dst src mem)
=> (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// Match post-expansion calls, register version.
(SelectN [0] call:(StaticCall {sym} dst src (Const(64|32) [sz]) mem))
&& sz >= 0
&& call.Uses == 1 // this will exclude all calls with results
&& isSameCall(sym, "runtime.memmove")
&& dst.Type.IsPtr() // avoids TUNSAFEPTR, see issue 30061
&& isInlinableMemmove(dst, src, int64(sz), config)
&& clobber(call)
=> (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
=> (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// Match pre-expansion calls.
(SelectN [0] call:(StaticLECall {sym} dst src (Const(64|32) [sz]) mem))
&& sz >= 0
&& call.Uses == 1 // this will exclude all calls with results
&& isSameCall(sym, "runtime.memmove")
&& dst.Type.IsPtr() // avoids TUNSAFEPTR, see issue 30061
&& isInlinableMemmove(dst, src, int64(sz), config)
&& clobber(call)
=> (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
=> (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
// De-virtualize late-expanded interface calls into late-expanded static calls.
// Note that (ITab (IMake)) doesn't get rewritten until after the first opt pass,

View File

@ -21053,8 +21053,8 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
return true
}
// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
// result: (Move {t.Elem()} [int64(sz)] dst src mem)
// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@ -21084,21 +21084,20 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
if s3.Op != OpStore {
break
}
t := auxToType(s3.Aux)
mem := s3.Args[2]
dst := s3.Args[1]
if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
v.Aux = typeToAux(t.Elem())
v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
// result: (Move {t.Elem()} [int64(sz)] dst src mem)
// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@ -21128,21 +21127,20 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
if s3.Op != OpStore {
break
}
t := auxToType(s3.Aux)
mem := s3.Args[2]
dst := s3.Args[1]
if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && t.IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
v.Aux = typeToAux(t.Elem())
v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticCall {sym} dst src (Const64 [sz]) mem))
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@ -21160,18 +21158,18 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt64(call_2.AuxInt)
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
v.Aux = typeToAux(dst.Type.Elem())
v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticCall {sym} dst src (Const32 [sz]) mem))
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@ -21189,18 +21187,18 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt32(call_2.AuxInt)
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
v.Aux = typeToAux(dst.Type.Elem())
v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem))
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@ -21218,18 +21216,18 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt64(call_2.AuxInt)
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
v.Aux = typeToAux(dst.Type.Elem())
v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}
// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem))
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem)
// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
@ -21247,12 +21245,12 @@ func rewriteValuegeneric_OpSelectN(v *Value) bool {
break
}
sz := auxIntToInt32(call_2.AuxInt)
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
break
}
v.reset(OpMove)
v.AuxInt = int64ToAuxInt(int64(sz))
v.Aux = typeToAux(dst.Type.Elem())
v.Aux = typeToAux(types.Types[types.TUINT8])
v.AddArg3(dst, src, mem)
return true
}

View File

@ -637,3 +637,40 @@ func TestIssue50646(t *testing.T) {
t.Errorf("comparable not assignable to any")
}
}
func TestIssue55030(t *testing.T) {
// makeSig makes the signature func(typ...)
makeSig := func(typ Type) {
par := NewVar(nopos, nil, "", typ)
params := NewTuple(par)
NewSignatureType(nil, nil, nil, params, nil, true)
}
// makeSig must not panic for the following (example) types:
// []int
makeSig(NewSlice(Typ[Int]))
// string
makeSig(Typ[String])
// P where P's core type is string
{
P := NewTypeName(nopos, nil, "P", nil) // [P string]
makeSig(NewTypeParam(P, NewInterfaceType(nil, []Type{Typ[String]})))
}
// P where P's core type is an (unnamed) slice
{
P := NewTypeName(nopos, nil, "P", nil) // [P []int]
makeSig(NewTypeParam(P, NewInterfaceType(nil, []Type{NewSlice(Typ[Int])})))
}
// P where P's core type is bytestring (i.e., string or []byte)
{
t1 := NewTerm(true, Typ[String]) // ~string
t2 := NewTerm(false, NewSlice(Typ[Byte])) // []byte
u := NewUnion([]*Term{t1, t2}) // ~string | []byte
P := NewTypeName(nopos, nil, "P", nil) // [P ~string | []byte]
makeSig(NewTypeParam(P, NewInterfaceType(nil, []Type{u})))
}
}

View File

@ -4,7 +4,10 @@
package types2
import "cmd/compile/internal/syntax"
import (
"cmd/compile/internal/syntax"
"fmt"
)
// ----------------------------------------------------------------------------
// API
@ -28,16 +31,18 @@ type Signature struct {
// NewSignatureType creates a new function type for the given receiver,
// receiver type parameters, type parameters, parameters, and results. If
// variadic is set, params must hold at least one parameter and the last
// parameter must be of unnamed slice type. If recv is non-nil, typeParams must
// be empty. If recvTypeParams is non-empty, recv must be non-nil.
// parameter's core type must be of unnamed slice or bytestring type.
// If recv is non-nil, typeParams must be empty. If recvTypeParams is
// non-empty, recv must be non-nil.
func NewSignatureType(recv *Var, recvTypeParams, typeParams []*TypeParam, params, results *Tuple, variadic bool) *Signature {
if variadic {
n := params.Len()
if n == 0 {
panic("variadic function must have at least one parameter")
}
if _, ok := params.At(n - 1).typ.(*Slice); !ok {
panic("variadic parameter must be of unnamed slice type")
core := coreString(params.At(n - 1).typ)
if _, ok := core.(*Slice); !ok && !isString(core) {
panic(fmt.Sprintf("got %s, want variadic parameter with unnamed slice type or string as core type", core.String()))
}
}
sig := &Signature{recv: recv, params: params, results: results, variadic: variadic}

View File

@ -34,8 +34,6 @@ import (
// See issues 36852, 41409, and 43687.
// (Also see golang.org/issue/27348.)
func TestAllDependencies(t *testing.T) {
t.Skip("TODO(#53977): 1.18.5 contains unreleased changes from vendored modules")
goBin := testenv.GoToolPath(t)
// Ensure that all packages imported within GOROOT

View File

@ -1325,7 +1325,6 @@ func (ctxt *Link) hostlink() {
if ctxt.HeadType == objabi.Hdarwin {
if machoPlatform == PLATFORM_MACOS && ctxt.IsAMD64() {
argv = append(argv, "-Wl,-no_pie")
argv = append(argv, "-Wl,-pagezero_size,4000000")
}
}
case BuildModePIE:
@ -1661,6 +1660,13 @@ func (ctxt *Link) hostlink() {
if len(out) > 0 {
// always print external output even if the command is successful, so that we don't
// swallow linker warnings (see https://golang.org/issue/17935).
if ctxt.IsDarwin() && ctxt.IsAMD64() {
const noPieWarning = "ld: warning: -no_pie is deprecated when targeting new OS versions\n"
if i := bytes.Index(out, []byte(noPieWarning)); i >= 0 {
// swallow -no_pie deprecation warning, issue 54482
out = append(out[:i], out[i+len(noPieWarning):]...)
}
}
ctxt.Logf("%s", out)
}

View File

@ -4,7 +4,7 @@ go 1.18
require (
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3
golang.org/x/net v0.0.0-20211209124913-491a49abca63
golang.org/x/net v0.0.0-20220907013725-0a43f88f7ef0
)
require (

View File

@ -1,7 +1,7 @@
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3 h1:0es+/5331RGQPcXlMfP+WrnIIS6dNnNRe0WB02W0F4M=
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/net v0.0.0-20211209124913-491a49abca63 h1:iocB37TsdFuN6IBRZ+ry36wrkoV51/tl5vOWqkcPGvY=
golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220907013725-0a43f88f7ef0 h1:XXaSUSplyi6wsRNJGB7vUBvDjbxc8UPYBsf9ukBQ3KA=
golang.org/x/net v0.0.0-20220907013725-0a43f88f7ef0/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5 h1:y/woIyUBFbpQGKS0u1aHF/40WUDnek3fPOyD08H5Vng=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/text v0.3.8-0.20211105212822-18b340fc7af2 h1:GLw7MR8AfAG2GmGcmVgObFOHXYypgGjnGno25RDwn3Y=

View File

@ -664,3 +664,40 @@ func TestIssue50646(t *testing.T) {
t.Errorf("comparable not assignable to any")
}
}
func TestIssue55030(t *testing.T) {
// makeSig makes the signature func(typ...)
makeSig := func(typ Type) {
par := NewVar(token.NoPos, nil, "", typ)
params := NewTuple(par)
NewSignatureType(nil, nil, nil, params, nil, true)
}
// makeSig must not panic for the following (example) types:
// []int
makeSig(NewSlice(Typ[Int]))
// string
makeSig(Typ[String])
// P where P's core type is string
{
P := NewTypeName(token.NoPos, nil, "P", nil) // [P string]
makeSig(NewTypeParam(P, NewInterfaceType(nil, []Type{Typ[String]})))
}
// P where P's core type is an (unnamed) slice
{
P := NewTypeName(token.NoPos, nil, "P", nil) // [P []int]
makeSig(NewTypeParam(P, NewInterfaceType(nil, []Type{NewSlice(Typ[Int])})))
}
// P where P's core type is bytestring (i.e., string or []byte)
{
t1 := NewTerm(true, Typ[String]) // ~string
t2 := NewTerm(false, NewSlice(Typ[Byte])) // []byte
u := NewUnion([]*Term{t1, t2}) // ~string | []byte
P := NewTypeName(token.NoPos, nil, "P", nil) // [P ~string | []byte]
makeSig(NewTypeParam(P, NewInterfaceType(nil, []Type{u})))
}
}

View File

@ -5,6 +5,7 @@
package types
import (
"fmt"
"go/ast"
"go/token"
)
@ -41,16 +42,18 @@ func NewSignature(recv *Var, params, results *Tuple, variadic bool) *Signature {
// NewSignatureType creates a new function type for the given receiver,
// receiver type parameters, type parameters, parameters, and results. If
// variadic is set, params must hold at least one parameter and the last
// parameter must be of unnamed slice type. If recv is non-nil, typeParams must
// be empty. If recvTypeParams is non-empty, recv must be non-nil.
// parameter's core type must be of unnamed slice or bytestring type.
// If recv is non-nil, typeParams must be empty. If recvTypeParams is
// non-empty, recv must be non-nil.
func NewSignatureType(recv *Var, recvTypeParams, typeParams []*TypeParam, params, results *Tuple, variadic bool) *Signature {
if variadic {
n := params.Len()
if n == 0 {
panic("variadic function must have at least one parameter")
}
if _, ok := params.At(n - 1).typ.(*Slice); !ok {
panic("variadic parameter must be of unnamed slice type")
core := coreString(params.At(n - 1).typ)
if _, ok := core.(*Slice); !ok && !isString(core) {
panic(fmt.Sprintf("got %s, want variadic parameter with unnamed slice type or string as core type", core.String()))
}
}
sig := &Signature{recv: recv, params: params, results: results, variadic: variadic}

View File

@ -3384,11 +3384,10 @@ func (s http2SettingID) String() string {
// name (key). See httpguts.ValidHeaderName for the base rules.
//
// Further, http2 says:
//
// "Just as in HTTP/1.x, header field names are strings of ASCII
// characters that are compared in a case-insensitive
// fashion. However, header field names MUST be converted to
// lowercase prior to their encoding in HTTP/2. "
// "Just as in HTTP/1.x, header field names are strings of ASCII
// characters that are compared in a case-insensitive
// fashion. However, header field names MUST be converted to
// lowercase prior to their encoding in HTTP/2. "
func http2validWireHeaderFieldName(v string) bool {
if len(v) == 0 {
return false
@ -3579,8 +3578,8 @@ func (s *http2sorter) SortStrings(ss []string) {
// validPseudoPath reports whether v is a valid :path pseudo-header
// value. It must be either:
//
// *) a non-empty string starting with '/'
// *) the string '*', for OPTIONS requests.
// *) a non-empty string starting with '/'
// *) the string '*', for OPTIONS requests.
//
// For now this is only used a quick check for deciding when to clean
// up Opaque URLs before sending requests from the Transport.

View File

@ -250,6 +250,9 @@ func (p *ReverseProxy) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
}
p.Director(outreq)
if outreq.Form != nil {
outreq.URL.RawQuery = cleanQueryParams(outreq.URL.RawQuery)
}
outreq.Close = false
reqUpType := upgradeType(outreq.Header)
@ -629,3 +632,36 @@ func (c switchProtocolCopier) copyToBackend(errc chan<- error) {
_, err := io.Copy(c.backend, c.user)
errc <- err
}
func cleanQueryParams(s string) string {
reencode := func(s string) string {
v, _ := url.ParseQuery(s)
return v.Encode()
}
for i := 0; i < len(s); {
switch s[i] {
case ';':
return reencode(s)
case '%':
if i+2 >= len(s) || !ishex(s[i+1]) || !ishex(s[i+2]) {
return reencode(s)
}
i += 3
default:
i++
}
}
return s
}
func ishex(c byte) bool {
switch {
case '0' <= c && c <= '9':
return true
case 'a' <= c && c <= 'f':
return true
case 'A' <= c && c <= 'F':
return true
}
return false
}

View File

@ -1537,3 +1537,77 @@ func TestJoinURLPath(t *testing.T) {
}
}
}
const (
testWantsCleanQuery = true
testWantsRawQuery = false
)
func TestReverseProxyQueryParameterSmugglingDirectorDoesNotParseForm(t *testing.T) {
testReverseProxyQueryParameterSmuggling(t, testWantsRawQuery, func(u *url.URL) *ReverseProxy {
proxyHandler := NewSingleHostReverseProxy(u)
oldDirector := proxyHandler.Director
proxyHandler.Director = func(r *http.Request) {
oldDirector(r)
}
return proxyHandler
})
}
func TestReverseProxyQueryParameterSmugglingDirectorParsesForm(t *testing.T) {
testReverseProxyQueryParameterSmuggling(t, testWantsCleanQuery, func(u *url.URL) *ReverseProxy {
proxyHandler := NewSingleHostReverseProxy(u)
oldDirector := proxyHandler.Director
proxyHandler.Director = func(r *http.Request) {
// Parsing the form causes ReverseProxy to remove unparsable
// query parameters before forwarding.
r.FormValue("a")
oldDirector(r)
}
return proxyHandler
})
}
func testReverseProxyQueryParameterSmuggling(t *testing.T, wantCleanQuery bool, newProxy func(*url.URL) *ReverseProxy) {
const content = "response_content"
backend := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(r.URL.RawQuery))
}))
defer backend.Close()
backendURL, err := url.Parse(backend.URL)
if err != nil {
t.Fatal(err)
}
proxyHandler := newProxy(backendURL)
frontend := httptest.NewServer(proxyHandler)
defer frontend.Close()
// Don't spam output with logs of queries containing semicolons.
backend.Config.ErrorLog = log.New(io.Discard, "", 0)
frontend.Config.ErrorLog = log.New(io.Discard, "", 0)
for _, test := range []struct {
rawQuery string
cleanQuery string
}{{
rawQuery: "a=1&a=2;b=3",
cleanQuery: "a=1",
}, {
rawQuery: "a=1&a=%zz&b=3",
cleanQuery: "a=1&b=3",
}} {
res, err := frontend.Client().Get(frontend.URL + "?" + test.rawQuery)
if err != nil {
t.Fatalf("Get: %v", err)
}
defer res.Body.Close()
body, _ := io.ReadAll(res.Body)
wantQuery := test.rawQuery
if wantCleanQuery {
wantQuery = test.cleanQuery
}
if got, want := string(body), wantQuery; got != want {
t.Errorf("proxy forwarded raw query %q as %q, want %q", test.rawQuery, got, want)
}
}
}

View File

@ -90,15 +90,49 @@ const (
// until we've allocated at least maxHeight Regexp structures.
const maxHeight = 1000
// maxSize is the maximum size of a compiled regexp in Insts.
// It too is somewhat arbitrarily chosen, but the idea is to be large enough
// to allow significant regexps while at the same time small enough that
// the compiled form will not take up too much memory.
// 128 MB is enough for a 3.3 million Inst structures, which roughly
// corresponds to a 3.3 MB regexp.
const (
maxSize = 128 << 20 / instSize
instSize = 5 * 8 // byte, 2 uint32, slice is 5 64-bit words
)
// maxRunes is the maximum number of runes allowed in a regexp tree
// counting the runes in all the nodes.
// Ignoring character classes p.numRunes is always less than the length of the regexp.
// Character classes can make it much larger: each \pL adds 1292 runes.
// 128 MB is enough for 32M runes, which is over 26k \pL instances.
// Note that repetitions do not make copies of the rune slices,
// so \pL{1000} is only one rune slice, not 1000.
// We could keep a cache of character classes we've seen,
// so that all the \pL we see use the same rune list,
// but that doesn't remove the problem entirely:
// consider something like [\pL01234][\pL01235][\pL01236]...[\pL^&*()].
// And because the Rune slice is exposed directly in the Regexp,
// there is not an opportunity to change the representation to allow
// partial sharing between different character classes.
// So the limit is the best we can do.
const (
maxRunes = 128 << 20 / runeSize
runeSize = 4 // rune is int32
)
type parser struct {
flags Flags // parse mode flags
stack []*Regexp // stack of parsed expressions
free *Regexp
numCap int // number of capturing groups seen
wholeRegexp string
tmpClass []rune // temporary char class work space
numRegexp int // number of regexps allocated
height map[*Regexp]int // regexp height for height limit check
tmpClass []rune // temporary char class work space
numRegexp int // number of regexps allocated
numRunes int // number of runes in char classes
repeats int64 // product of all repetitions seen
height map[*Regexp]int // regexp height, for height limit check
size map[*Regexp]int64 // regexp compiled size, for size limit check
}
func (p *parser) newRegexp(op Op) *Regexp {
@ -122,6 +156,104 @@ func (p *parser) reuse(re *Regexp) {
p.free = re
}
func (p *parser) checkLimits(re *Regexp) {
if p.numRunes > maxRunes {
panic(ErrInternalError)
}
p.checkSize(re)
p.checkHeight(re)
}
func (p *parser) checkSize(re *Regexp) {
if p.size == nil {
// We haven't started tracking size yet.
// Do a relatively cheap check to see if we need to start.
// Maintain the product of all the repeats we've seen
// and don't track if the total number of regexp nodes
// we've seen times the repeat product is in budget.
if p.repeats == 0 {
p.repeats = 1
}
if re.Op == OpRepeat {
n := re.Max
if n == -1 {
n = re.Min
}
if n <= 0 {
n = 1
}
if int64(n) > maxSize/p.repeats {
p.repeats = maxSize
} else {
p.repeats *= int64(n)
}
}
if int64(p.numRegexp) < maxSize/p.repeats {
return
}
// We need to start tracking size.
// Make the map and belatedly populate it
// with info about everything we've constructed so far.
p.size = make(map[*Regexp]int64)
for _, re := range p.stack {
p.checkSize(re)
}
}
if p.calcSize(re, true) > maxSize {
panic(ErrInternalError)
}
}
func (p *parser) calcSize(re *Regexp, force bool) int64 {
if !force {
if size, ok := p.size[re]; ok {
return size
}
}
var size int64
switch re.Op {
case OpLiteral:
size = int64(len(re.Rune))
case OpCapture, OpStar:
// star can be 1+ or 2+; assume 2 pessimistically
size = 2 + p.calcSize(re.Sub[0], false)
case OpPlus, OpQuest:
size = 1 + p.calcSize(re.Sub[0], false)
case OpConcat:
for _, sub := range re.Sub {
size += p.calcSize(sub, false)
}
case OpAlternate:
for _, sub := range re.Sub {
size += p.calcSize(sub, false)
}
if len(re.Sub) > 1 {
size += int64(len(re.Sub)) - 1
}
case OpRepeat:
sub := p.calcSize(re.Sub[0], false)
if re.Max == -1 {
if re.Min == 0 {
size = 2 + sub // x*
} else {
size = 1 + int64(re.Min)*sub // xxx+
}
break
}
// x{2,5} = xx(x(x(x)?)?)?
size = int64(re.Max)*sub + int64(re.Max-re.Min)
}
if size < 1 {
size = 1
}
p.size[re] = size
return size
}
func (p *parser) checkHeight(re *Regexp) {
if p.numRegexp < maxHeight {
return
@ -158,6 +290,7 @@ func (p *parser) calcHeight(re *Regexp, force bool) int {
// push pushes the regexp re onto the parse stack and returns the regexp.
func (p *parser) push(re *Regexp) *Regexp {
p.numRunes += len(re.Rune)
if re.Op == OpCharClass && len(re.Rune) == 2 && re.Rune[0] == re.Rune[1] {
// Single rune.
if p.maybeConcat(re.Rune[0], p.flags&^FoldCase) {
@ -189,7 +322,7 @@ func (p *parser) push(re *Regexp) *Regexp {
}
p.stack = append(p.stack, re)
p.checkHeight(re)
p.checkLimits(re)
return re
}
@ -299,7 +432,7 @@ func (p *parser) repeat(op Op, min, max int, before, after, lastRepeat string) (
re.Sub = re.Sub0[:1]
re.Sub[0] = sub
p.stack[n-1] = re
p.checkHeight(re)
p.checkLimits(re)
if op == OpRepeat && (min >= 2 || max >= 2) && !repeatIsValid(re, 1000) {
return "", &Error{ErrInvalidRepeatSize, before[:len(before)-len(after)]}
@ -503,6 +636,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp {
for j := start; j < i; j++ {
sub[j] = p.removeLeadingString(sub[j], len(str))
p.checkLimits(sub[j])
}
suffix := p.collapse(sub[start:i], OpAlternate) // recurse
@ -560,6 +694,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp {
for j := start; j < i; j++ {
reuse := j != start // prefix came from sub[start]
sub[j] = p.removeLeadingRegexp(sub[j], reuse)
p.checkLimits(sub[j])
}
suffix := p.collapse(sub[start:i], OpAlternate) // recurse

View File

@ -484,12 +484,15 @@ var invalidRegexps = []string{
`(?P<>a)`,
`[a-Z]`,
`(?i)[a-Z]`,
`a{100000}`,
`a{100000,}`,
"((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})",
strings.Repeat("(", 1000) + strings.Repeat(")", 1000),
strings.Repeat("(?:", 1000) + strings.Repeat(")*", 1000),
`\Q\E*`,
`a{100000}`, // too much repetition
`a{100000,}`, // too much repetition
"((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})", // too much repetition
strings.Repeat("(", 1000) + strings.Repeat(")", 1000), // too deep
strings.Repeat("(?:", 1000) + strings.Repeat(")*", 1000), // too deep
"(" + strings.Repeat("(xx?)", 1000) + "){1000}", // too long
strings.Repeat("(xx?){1000}", 1000), // too long
strings.Repeat(`\pL`, 27000), // too many runes
}
var onlyPerl = []string{

View File

@ -9,7 +9,7 @@ golang.org/x/crypto/curve25519/internal/field
golang.org/x/crypto/hkdf
golang.org/x/crypto/internal/poly1305
golang.org/x/crypto/internal/subtle
# golang.org/x/net v0.0.0-20211209124913-491a49abca63
# golang.org/x/net v0.0.0-20220907013725-0a43f88f7ef0
## explicit; go 1.17
golang.org/x/net/dns/dnsmessage
golang.org/x/net/http/httpguts

View File

@ -0,0 +1,42 @@
// run
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
func main() {
for i := 0; i < 10000; i++ {
h(i)
sink = make([]byte, 1024) // generate some garbage
}
}
func h(iter int) {
var x [32]byte
for i := 0; i < 32; i++ {
x[i] = 99
}
g(&x)
if x == ([32]byte{}) {
return
}
for i := 0; i < 32; i++ {
println(x[i])
}
panic(iter)
}
//go:noinline
func g(x interface{}) {
switch e := x.(type) {
case *[32]byte:
var c [32]byte
*e = c
case *[]byte:
*e = nil
}
}
var sink []byte

View File

@ -0,0 +1,43 @@
// run
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
func main() {
for i := 0; i < 10000; i++ {
h(i)
sink = make([]byte, 1024) // generate some garbage
}
}
func h(iter int) {
var x [32]byte
for i := 0; i < 32; i++ {
x[i] = 99
}
g(&x)
if x == ([32]byte{}) {
return
}
for i := 0; i < 32; i++ {
println(x[i])
}
panic(iter)
}
//go:noinline
func g(x interface{}) {
switch e := x.(type) {
case *[32]byte:
var c [32]byte
*e = c
case *[3]*byte:
var c [3]*byte
*e = c
}
}
var sink []byte

View File

@ -0,0 +1,21 @@
// compile -G=3
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
type Set[T comparable] map[T]struct{}
func (s Set[T]) Add() Set[T] {
return s
}
func (s Set[T]) Copy() Set[T] {
return Set[T].Add(s)
}
func main() {
_ = Set[int]{42: {}}
}