mirror of
https://github.com/golang/go
synced 2024-11-02 09:28:34 +00:00
cmd/compile: Unify & improve struct comparisons
Partially fixes https://github.com/golang/go/issues/38674
The first commit has the actual unification, the second commit just cleans things up by moving shared code into its own package for clarity.
Change-Id: I85067f8b247df02f94684ec1297a1a42263bba0c
GitHub-Last-Rev: 370a4ecad3
GitHub-Pull-Request: golang/go#52315
Reviewed-on: https://go-review.googlesource.com/c/go/+/399542
Reviewed-by: Keith Randall <khr@golang.org>
Reviewed-by: Keith Randall <khr@google.com>
Reviewed-by: Ian Lance Taylor <iant@google.com>
This commit is contained in:
parent
555bed939d
commit
3c29aca436
4 changed files with 303 additions and 269 deletions
272
src/cmd/compile/internal/compare/compare.go
Normal file
272
src/cmd/compile/internal/compare/compare.go
Normal file
|
@ -0,0 +1,272 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package compare contains code for generating comparison
|
||||
// routines for structs, strings and interfaces.
|
||||
package compare
|
||||
|
||||
import (
|
||||
"cmd/compile/internal/base"
|
||||
"cmd/compile/internal/ir"
|
||||
"cmd/compile/internal/typecheck"
|
||||
"cmd/compile/internal/types"
|
||||
"fmt"
|
||||
"math/bits"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// IsRegularMemory reports whether t can be compared/hashed as regular memory.
|
||||
func IsRegularMemory(t *types.Type) bool {
|
||||
a, _ := types.AlgType(t)
|
||||
return a == types.AMEM
|
||||
}
|
||||
|
||||
// Memrun finds runs of struct fields for which memory-only algs are appropriate.
|
||||
// t is the parent struct type, and start is the field index at which to start the run.
|
||||
// size is the length in bytes of the memory included in the run.
|
||||
// next is the index just after the end of the memory run.
|
||||
func Memrun(t *types.Type, start int) (size int64, next int) {
|
||||
next = start
|
||||
for {
|
||||
next++
|
||||
if next == t.NumFields() {
|
||||
break
|
||||
}
|
||||
// Stop run after a padded field.
|
||||
if types.IsPaddedField(t, next-1) {
|
||||
break
|
||||
}
|
||||
// Also, stop before a blank or non-memory field.
|
||||
if f := t.Field(next); f.Sym.IsBlank() || !IsRegularMemory(f.Type) {
|
||||
break
|
||||
}
|
||||
// For issue 46283, don't combine fields if the resulting load would
|
||||
// require a larger alignment than the component fields.
|
||||
if base.Ctxt.Arch.Alignment > 1 {
|
||||
align := t.Alignment()
|
||||
if off := t.Field(start).Offset; off&(align-1) != 0 {
|
||||
// Offset is less aligned than the containing type.
|
||||
// Use offset to determine alignment.
|
||||
align = 1 << uint(bits.TrailingZeros64(uint64(off)))
|
||||
}
|
||||
size := t.Field(next).End() - t.Field(start).Offset
|
||||
if size > align {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return t.Field(next-1).End() - t.Field(start).Offset, next
|
||||
}
|
||||
|
||||
// EqCanPanic reports whether == on type t could panic (has an interface somewhere).
|
||||
// t must be comparable.
|
||||
func EqCanPanic(t *types.Type) bool {
|
||||
switch t.Kind() {
|
||||
default:
|
||||
return false
|
||||
case types.TINTER:
|
||||
return true
|
||||
case types.TARRAY:
|
||||
return EqCanPanic(t.Elem())
|
||||
case types.TSTRUCT:
|
||||
for _, f := range t.FieldSlice() {
|
||||
if !f.Sym.IsBlank() && EqCanPanic(f.Type) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// EqStruct compares two structs np and nq for equality.
|
||||
// It works by building a list of boolean conditions to satisfy.
|
||||
// Conditions must be evaluated in the returned order and
|
||||
// properly short circuited by the caller.
|
||||
func EqStruct(t *types.Type, np, nq ir.Node) []ir.Node {
|
||||
// The conditions are a list-of-lists. Conditions are reorderable
|
||||
// within each inner list. The outer lists must be evaluated in order.
|
||||
var conds [][]ir.Node
|
||||
conds = append(conds, []ir.Node{})
|
||||
and := func(n ir.Node) {
|
||||
i := len(conds) - 1
|
||||
conds[i] = append(conds[i], n)
|
||||
}
|
||||
|
||||
// Walk the struct using memequal for runs of AMEM
|
||||
// and calling specific equality tests for the others.
|
||||
for i, fields := 0, t.FieldSlice(); i < len(fields); {
|
||||
f := fields[i]
|
||||
|
||||
// Skip blank-named fields.
|
||||
if f.Sym.IsBlank() {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Compare non-memory fields with field equality.
|
||||
if !IsRegularMemory(f.Type) {
|
||||
if EqCanPanic(f.Type) {
|
||||
// Enforce ordering by starting a new set of reorderable conditions.
|
||||
conds = append(conds, []ir.Node{})
|
||||
}
|
||||
p := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym)
|
||||
q := ir.NewSelectorExpr(base.Pos, ir.OXDOT, nq, f.Sym)
|
||||
switch {
|
||||
case f.Type.IsString():
|
||||
eqlen, eqmem := EqString(p, q)
|
||||
and(eqlen)
|
||||
and(eqmem)
|
||||
default:
|
||||
and(ir.NewBinaryExpr(base.Pos, ir.OEQ, p, q))
|
||||
}
|
||||
if EqCanPanic(f.Type) {
|
||||
// Also enforce ordering after something that can panic.
|
||||
conds = append(conds, []ir.Node{})
|
||||
}
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Find maximal length run of memory-only fields.
|
||||
size, next := Memrun(t, i)
|
||||
|
||||
// TODO(rsc): All the calls to newname are wrong for
|
||||
// cross-package unexported fields.
|
||||
if s := fields[i:next]; len(s) <= 2 {
|
||||
// Two or fewer fields: use plain field equality.
|
||||
for _, f := range s {
|
||||
and(eqfield(np, nq, ir.OEQ, f.Sym))
|
||||
}
|
||||
} else {
|
||||
// More than two fields: use memequal.
|
||||
cc := eqmem(np, nq, f.Sym, size)
|
||||
and(cc)
|
||||
}
|
||||
i = next
|
||||
}
|
||||
|
||||
// Sort conditions to put runtime calls last.
|
||||
// Preserve the rest of the ordering.
|
||||
var flatConds []ir.Node
|
||||
for _, c := range conds {
|
||||
isCall := func(n ir.Node) bool {
|
||||
return n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC
|
||||
}
|
||||
sort.SliceStable(c, func(i, j int) bool {
|
||||
return !isCall(c[i]) && isCall(c[j])
|
||||
})
|
||||
flatConds = append(flatConds, c...)
|
||||
}
|
||||
return flatConds
|
||||
}
|
||||
|
||||
// EqString returns the nodes
|
||||
//
|
||||
// len(s) == len(t)
|
||||
//
|
||||
// and
|
||||
//
|
||||
// memequal(s.ptr, t.ptr, len(s))
|
||||
//
|
||||
// which can be used to construct string equality comparison.
|
||||
// eqlen must be evaluated before eqmem, and shortcircuiting is required.
|
||||
func EqString(s, t ir.Node) (eqlen *ir.BinaryExpr, eqmem *ir.CallExpr) {
|
||||
s = typecheck.Conv(s, types.Types[types.TSTRING])
|
||||
t = typecheck.Conv(t, types.Types[types.TSTRING])
|
||||
sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
|
||||
tptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, t)
|
||||
slen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, s), types.Types[types.TUINTPTR])
|
||||
tlen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, t), types.Types[types.TUINTPTR])
|
||||
|
||||
fn := typecheck.LookupRuntime("memequal")
|
||||
fn = typecheck.SubstArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
|
||||
call := typecheck.Call(base.Pos, fn, []ir.Node{sptr, tptr, ir.Copy(slen)}, false).(*ir.CallExpr)
|
||||
|
||||
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, slen, tlen)
|
||||
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
|
||||
cmp.SetType(types.Types[types.TBOOL])
|
||||
return cmp, call
|
||||
}
|
||||
|
||||
// EqInterface returns the nodes
|
||||
//
|
||||
// s.tab == t.tab (or s.typ == t.typ, as appropriate)
|
||||
//
|
||||
// and
|
||||
//
|
||||
// ifaceeq(s.tab, s.data, t.data) (or efaceeq(s.typ, s.data, t.data), as appropriate)
|
||||
//
|
||||
// which can be used to construct interface equality comparison.
|
||||
// eqtab must be evaluated before eqdata, and shortcircuiting is required.
|
||||
func EqInterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
|
||||
if !types.Identical(s.Type(), t.Type()) {
|
||||
base.Fatalf("EqInterface %v %v", s.Type(), t.Type())
|
||||
}
|
||||
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
|
||||
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
|
||||
var fn ir.Node
|
||||
if s.Type().IsEmptyInterface() {
|
||||
fn = typecheck.LookupRuntime("efaceeq")
|
||||
} else {
|
||||
fn = typecheck.LookupRuntime("ifaceeq")
|
||||
}
|
||||
|
||||
stab := ir.NewUnaryExpr(base.Pos, ir.OITAB, s)
|
||||
ttab := ir.NewUnaryExpr(base.Pos, ir.OITAB, t)
|
||||
sdata := ir.NewUnaryExpr(base.Pos, ir.OIDATA, s)
|
||||
tdata := ir.NewUnaryExpr(base.Pos, ir.OIDATA, t)
|
||||
sdata.SetType(types.Types[types.TUNSAFEPTR])
|
||||
tdata.SetType(types.Types[types.TUNSAFEPTR])
|
||||
sdata.SetTypecheck(1)
|
||||
tdata.SetTypecheck(1)
|
||||
|
||||
call := typecheck.Call(base.Pos, fn, []ir.Node{stab, sdata, tdata}, false).(*ir.CallExpr)
|
||||
|
||||
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, stab, ttab)
|
||||
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
|
||||
cmp.SetType(types.Types[types.TBOOL])
|
||||
return cmp, call
|
||||
}
|
||||
|
||||
// eqfield returns the node
|
||||
//
|
||||
// p.field == q.field
|
||||
func eqfield(p ir.Node, q ir.Node, op ir.Op, field *types.Sym) ir.Node {
|
||||
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)
|
||||
ny := ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)
|
||||
ne := ir.NewBinaryExpr(base.Pos, op, nx, ny)
|
||||
return ne
|
||||
}
|
||||
|
||||
// eqmem returns the node
|
||||
//
|
||||
// memequal(&p.field, &q.field, size])
|
||||
func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
|
||||
nx := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)))
|
||||
ny := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)))
|
||||
|
||||
fn, needsize := eqmemfunc(size, nx.Type().Elem())
|
||||
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
|
||||
call.Args.Append(nx)
|
||||
call.Args.Append(ny)
|
||||
if needsize {
|
||||
call.Args.Append(ir.NewInt(size))
|
||||
}
|
||||
|
||||
return call
|
||||
}
|
||||
|
||||
func eqmemfunc(size int64, t *types.Type) (fn *ir.Name, needsize bool) {
|
||||
switch size {
|
||||
default:
|
||||
fn = typecheck.LookupRuntime("memequal")
|
||||
needsize = true
|
||||
case 1, 2, 4, 8, 16:
|
||||
buf := fmt.Sprintf("memequal%d", int(size)*8)
|
||||
fn = typecheck.LookupRuntime(buf)
|
||||
}
|
||||
|
||||
fn = typecheck.SubstArgTypes(fn, t, t)
|
||||
return fn, needsize
|
||||
}
|
|
@ -6,10 +6,9 @@ package reflectdata
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"math/bits"
|
||||
"sort"
|
||||
|
||||
"cmd/compile/internal/base"
|
||||
"cmd/compile/internal/compare"
|
||||
"cmd/compile/internal/ir"
|
||||
"cmd/compile/internal/objw"
|
||||
"cmd/compile/internal/typecheck"
|
||||
|
@ -17,32 +16,6 @@ import (
|
|||
"cmd/internal/obj"
|
||||
)
|
||||
|
||||
// isRegularMemory reports whether t can be compared/hashed as regular memory.
|
||||
func isRegularMemory(t *types.Type) bool {
|
||||
a, _ := types.AlgType(t)
|
||||
return a == types.AMEM
|
||||
}
|
||||
|
||||
// eqCanPanic reports whether == on type t could panic (has an interface somewhere).
|
||||
// t must be comparable.
|
||||
func eqCanPanic(t *types.Type) bool {
|
||||
switch t.Kind() {
|
||||
default:
|
||||
return false
|
||||
case types.TINTER:
|
||||
return true
|
||||
case types.TARRAY:
|
||||
return eqCanPanic(t.Elem())
|
||||
case types.TSTRUCT:
|
||||
for _, f := range t.FieldSlice() {
|
||||
if !f.Sym.IsBlank() && eqCanPanic(f.Type) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// AlgType returns the fixed-width AMEMxx variants instead of the general
|
||||
// AMEM kind when possible.
|
||||
func AlgType(t *types.Type) types.AlgKind {
|
||||
|
@ -206,7 +179,7 @@ func genhash(t *types.Type) *obj.LSym {
|
|||
}
|
||||
|
||||
// Hash non-memory fields with appropriate hash function.
|
||||
if !isRegularMemory(f.Type) {
|
||||
if !compare.IsRegularMemory(f.Type) {
|
||||
hashel := hashfor(f.Type)
|
||||
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
|
||||
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
|
||||
|
@ -219,7 +192,7 @@ func genhash(t *types.Type) *obj.LSym {
|
|||
}
|
||||
|
||||
// Otherwise, hash a maximal length run of raw memory.
|
||||
size, next := memrun(t, i)
|
||||
size, next := compare.Memrun(t, i)
|
||||
|
||||
// h = hashel(&p.first, size, h)
|
||||
hashel := hashmem(f.Type)
|
||||
|
@ -510,12 +483,12 @@ func geneq(t *types.Type) *obj.LSym {
|
|||
// Second, check that all the contents match (expensive).
|
||||
checkAll(3, false, func(pi, qi ir.Node) ir.Node {
|
||||
// Compare lengths.
|
||||
eqlen, _ := EqString(pi, qi)
|
||||
eqlen, _ := compare.EqString(pi, qi)
|
||||
return eqlen
|
||||
})
|
||||
checkAll(1, true, func(pi, qi ir.Node) ir.Node {
|
||||
// Compare contents.
|
||||
_, eqmem := EqString(pi, qi)
|
||||
_, eqmem := compare.EqString(pi, qi)
|
||||
return eqmem
|
||||
})
|
||||
case types.TFLOAT32, types.TFLOAT64:
|
||||
|
@ -532,81 +505,7 @@ func geneq(t *types.Type) *obj.LSym {
|
|||
}
|
||||
|
||||
case types.TSTRUCT:
|
||||
// Build a list of conditions to satisfy.
|
||||
// The conditions are a list-of-lists. Conditions are reorderable
|
||||
// within each inner list. The outer lists must be evaluated in order.
|
||||
var conds [][]ir.Node
|
||||
conds = append(conds, []ir.Node{})
|
||||
and := func(n ir.Node) {
|
||||
i := len(conds) - 1
|
||||
conds[i] = append(conds[i], n)
|
||||
}
|
||||
|
||||
// Walk the struct using memequal for runs of AMEM
|
||||
// and calling specific equality tests for the others.
|
||||
for i, fields := 0, t.FieldSlice(); i < len(fields); {
|
||||
f := fields[i]
|
||||
|
||||
// Skip blank-named fields.
|
||||
if f.Sym.IsBlank() {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Compare non-memory fields with field equality.
|
||||
if !isRegularMemory(f.Type) {
|
||||
if eqCanPanic(f.Type) {
|
||||
// Enforce ordering by starting a new set of reorderable conditions.
|
||||
conds = append(conds, []ir.Node{})
|
||||
}
|
||||
p := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym)
|
||||
q := ir.NewSelectorExpr(base.Pos, ir.OXDOT, nq, f.Sym)
|
||||
switch {
|
||||
case f.Type.IsString():
|
||||
eqlen, eqmem := EqString(p, q)
|
||||
and(eqlen)
|
||||
and(eqmem)
|
||||
default:
|
||||
and(ir.NewBinaryExpr(base.Pos, ir.OEQ, p, q))
|
||||
}
|
||||
if eqCanPanic(f.Type) {
|
||||
// Also enforce ordering after something that can panic.
|
||||
conds = append(conds, []ir.Node{})
|
||||
}
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
// Find maximal length run of memory-only fields.
|
||||
size, next := memrun(t, i)
|
||||
|
||||
// TODO(rsc): All the calls to newname are wrong for
|
||||
// cross-package unexported fields.
|
||||
if s := fields[i:next]; len(s) <= 2 {
|
||||
// Two or fewer fields: use plain field equality.
|
||||
for _, f := range s {
|
||||
and(eqfield(np, nq, f.Sym))
|
||||
}
|
||||
} else {
|
||||
// More than two fields: use memequal.
|
||||
and(eqmem(np, nq, f.Sym, size))
|
||||
}
|
||||
i = next
|
||||
}
|
||||
|
||||
// Sort conditions to put runtime calls last.
|
||||
// Preserve the rest of the ordering.
|
||||
var flatConds []ir.Node
|
||||
for _, c := range conds {
|
||||
isCall := func(n ir.Node) bool {
|
||||
return n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC
|
||||
}
|
||||
sort.SliceStable(c, func(i, j int) bool {
|
||||
return !isCall(c[i]) && isCall(c[j])
|
||||
})
|
||||
flatConds = append(flatConds, c...)
|
||||
}
|
||||
|
||||
flatConds := compare.EqStruct(t, np, nq)
|
||||
if len(flatConds) == 0 {
|
||||
fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(true)))
|
||||
} else {
|
||||
|
@ -631,7 +530,7 @@ func geneq(t *types.Type) *obj.LSym {
|
|||
// return (or goto ret)
|
||||
fn.Body.Append(ir.NewLabelStmt(base.Pos, neq))
|
||||
fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(false)))
|
||||
if eqCanPanic(t) || anyCall(fn) {
|
||||
if compare.EqCanPanic(t) || anyCall(fn) {
|
||||
// Epilogue is large, so share it with the equal case.
|
||||
fn.Body.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, ret))
|
||||
} else {
|
||||
|
@ -680,153 +579,6 @@ func anyCall(fn *ir.Func) bool {
|
|||
})
|
||||
}
|
||||
|
||||
// eqfield returns the node
|
||||
//
|
||||
// p.field == q.field
|
||||
func eqfield(p ir.Node, q ir.Node, field *types.Sym) ir.Node {
|
||||
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)
|
||||
ny := ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)
|
||||
ne := ir.NewBinaryExpr(base.Pos, ir.OEQ, nx, ny)
|
||||
return ne
|
||||
}
|
||||
|
||||
// EqString returns the nodes
|
||||
//
|
||||
// len(s) == len(t)
|
||||
//
|
||||
// and
|
||||
//
|
||||
// memequal(s.ptr, t.ptr, len(s))
|
||||
//
|
||||
// which can be used to construct string equality comparison.
|
||||
// eqlen must be evaluated before eqmem, and shortcircuiting is required.
|
||||
func EqString(s, t ir.Node) (eqlen *ir.BinaryExpr, eqmem *ir.CallExpr) {
|
||||
s = typecheck.Conv(s, types.Types[types.TSTRING])
|
||||
t = typecheck.Conv(t, types.Types[types.TSTRING])
|
||||
sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
|
||||
tptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, t)
|
||||
slen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, s), types.Types[types.TUINTPTR])
|
||||
tlen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, t), types.Types[types.TUINTPTR])
|
||||
|
||||
fn := typecheck.LookupRuntime("memequal")
|
||||
fn = typecheck.SubstArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
|
||||
call := typecheck.Call(base.Pos, fn, []ir.Node{sptr, tptr, ir.Copy(slen)}, false).(*ir.CallExpr)
|
||||
|
||||
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, slen, tlen)
|
||||
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
|
||||
cmp.SetType(types.Types[types.TBOOL])
|
||||
return cmp, call
|
||||
}
|
||||
|
||||
// EqInterface returns the nodes
|
||||
//
|
||||
// s.tab == t.tab (or s.typ == t.typ, as appropriate)
|
||||
//
|
||||
// and
|
||||
//
|
||||
// ifaceeq(s.tab, s.data, t.data) (or efaceeq(s.typ, s.data, t.data), as appropriate)
|
||||
//
|
||||
// which can be used to construct interface equality comparison.
|
||||
// eqtab must be evaluated before eqdata, and shortcircuiting is required.
|
||||
func EqInterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
|
||||
if !types.Identical(s.Type(), t.Type()) {
|
||||
base.Fatalf("EqInterface %v %v", s.Type(), t.Type())
|
||||
}
|
||||
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
|
||||
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
|
||||
var fn ir.Node
|
||||
if s.Type().IsEmptyInterface() {
|
||||
fn = typecheck.LookupRuntime("efaceeq")
|
||||
} else {
|
||||
fn = typecheck.LookupRuntime("ifaceeq")
|
||||
}
|
||||
|
||||
stab := ir.NewUnaryExpr(base.Pos, ir.OITAB, s)
|
||||
ttab := ir.NewUnaryExpr(base.Pos, ir.OITAB, t)
|
||||
sdata := ir.NewUnaryExpr(base.Pos, ir.OIDATA, s)
|
||||
tdata := ir.NewUnaryExpr(base.Pos, ir.OIDATA, t)
|
||||
sdata.SetType(types.Types[types.TUNSAFEPTR])
|
||||
tdata.SetType(types.Types[types.TUNSAFEPTR])
|
||||
sdata.SetTypecheck(1)
|
||||
tdata.SetTypecheck(1)
|
||||
|
||||
call := typecheck.Call(base.Pos, fn, []ir.Node{stab, sdata, tdata}, false).(*ir.CallExpr)
|
||||
|
||||
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, stab, ttab)
|
||||
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
|
||||
cmp.SetType(types.Types[types.TBOOL])
|
||||
return cmp, call
|
||||
}
|
||||
|
||||
// eqmem returns the node
|
||||
//
|
||||
// memequal(&p.field, &q.field [, size])
|
||||
func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
|
||||
nx := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)))
|
||||
ny := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)))
|
||||
|
||||
fn, needsize := eqmemfunc(size, nx.Type().Elem())
|
||||
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
|
||||
call.Args.Append(nx)
|
||||
call.Args.Append(ny)
|
||||
if needsize {
|
||||
call.Args.Append(ir.NewInt(size))
|
||||
}
|
||||
|
||||
return call
|
||||
}
|
||||
|
||||
func eqmemfunc(size int64, t *types.Type) (fn *ir.Name, needsize bool) {
|
||||
switch size {
|
||||
default:
|
||||
fn = typecheck.LookupRuntime("memequal")
|
||||
needsize = true
|
||||
case 1, 2, 4, 8, 16:
|
||||
buf := fmt.Sprintf("memequal%d", int(size)*8)
|
||||
fn = typecheck.LookupRuntime(buf)
|
||||
}
|
||||
|
||||
fn = typecheck.SubstArgTypes(fn, t, t)
|
||||
return fn, needsize
|
||||
}
|
||||
|
||||
// memrun finds runs of struct fields for which memory-only algs are appropriate.
|
||||
// t is the parent struct type, and start is the field index at which to start the run.
|
||||
// size is the length in bytes of the memory included in the run.
|
||||
// next is the index just after the end of the memory run.
|
||||
func memrun(t *types.Type, start int) (size int64, next int) {
|
||||
next = start
|
||||
for {
|
||||
next++
|
||||
if next == t.NumFields() {
|
||||
break
|
||||
}
|
||||
// Stop run after a padded field.
|
||||
if types.IsPaddedField(t, next-1) {
|
||||
break
|
||||
}
|
||||
// Also, stop before a blank or non-memory field.
|
||||
if f := t.Field(next); f.Sym.IsBlank() || !isRegularMemory(f.Type) {
|
||||
break
|
||||
}
|
||||
// For issue 46283, don't combine fields if the resulting load would
|
||||
// require a larger alignment than the component fields.
|
||||
if base.Ctxt.Arch.Alignment > 1 {
|
||||
align := t.Alignment()
|
||||
if off := t.Field(start).Offset; off&(align-1) != 0 {
|
||||
// Offset is less aligned than the containing type.
|
||||
// Use offset to determine alignment.
|
||||
align = 1 << uint(bits.TrailingZeros64(uint64(off)))
|
||||
}
|
||||
size := t.Field(next).End() - t.Field(start).Offset
|
||||
if size > align {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return t.Field(next-1).End() - t.Field(start).Offset, next
|
||||
}
|
||||
|
||||
func hashmem(t *types.Type) ir.Node {
|
||||
sym := ir.Pkgs.Runtime.Lookup("memhash")
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ import (
|
|||
|
||||
"cmd/compile/internal/base"
|
||||
"cmd/compile/internal/bitvec"
|
||||
"cmd/compile/internal/compare"
|
||||
"cmd/compile/internal/escape"
|
||||
"cmd/compile/internal/inline"
|
||||
"cmd/compile/internal/ir"
|
||||
|
@ -728,7 +729,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
|
|||
if t.Sym() != nil && t.Sym().Name != "" {
|
||||
tflag |= tflagNamed
|
||||
}
|
||||
if isRegularMemory(t) {
|
||||
if compare.IsRegularMemory(t) {
|
||||
tflag |= tflagRegularMemory
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"go/constant"
|
||||
|
||||
"cmd/compile/internal/base"
|
||||
"cmd/compile/internal/compare"
|
||||
"cmd/compile/internal/ir"
|
||||
"cmd/compile/internal/reflectdata"
|
||||
"cmd/compile/internal/ssagen"
|
||||
|
@ -178,7 +179,7 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
|
|||
andor = ir.OOROR
|
||||
}
|
||||
var expr ir.Node
|
||||
compare := func(el, er ir.Node) {
|
||||
comp := func(el, er ir.Node) {
|
||||
a := ir.NewBinaryExpr(base.Pos, n.Op(), el, er)
|
||||
if expr == nil {
|
||||
expr = a
|
||||
|
@ -186,18 +187,26 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
|
|||
expr = ir.NewLogicalExpr(base.Pos, andor, expr, a)
|
||||
}
|
||||
}
|
||||
and := func(cond ir.Node) {
|
||||
if expr == nil {
|
||||
expr = cond
|
||||
} else {
|
||||
expr = ir.NewLogicalExpr(base.Pos, andor, expr, cond)
|
||||
}
|
||||
}
|
||||
cmpl = safeExpr(cmpl, init)
|
||||
cmpr = safeExpr(cmpr, init)
|
||||
if t.IsStruct() {
|
||||
for _, f := range t.Fields().Slice() {
|
||||
sym := f.Sym
|
||||
if sym.IsBlank() {
|
||||
continue
|
||||
conds := compare.EqStruct(t, cmpl, cmpr)
|
||||
if n.Op() == ir.OEQ {
|
||||
for _, cond := range conds {
|
||||
and(cond)
|
||||
}
|
||||
} else {
|
||||
for _, cond := range conds {
|
||||
notCond := ir.NewUnaryExpr(base.Pos, ir.ONOT, cond)
|
||||
and(notCond)
|
||||
}
|
||||
compare(
|
||||
ir.NewSelectorExpr(base.Pos, ir.OXDOT, cmpl, sym),
|
||||
ir.NewSelectorExpr(base.Pos, ir.OXDOT, cmpr, sym),
|
||||
)
|
||||
}
|
||||
} else {
|
||||
step := int64(1)
|
||||
|
@ -221,7 +230,7 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
|
|||
step = 1
|
||||
}
|
||||
if step == 1 {
|
||||
compare(
|
||||
comp(
|
||||
ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(i)),
|
||||
ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(i)),
|
||||
)
|
||||
|
@ -249,7 +258,7 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
|
|||
rb = ir.NewBinaryExpr(base.Pos, ir.OLSH, rb, ir.NewInt(8*t.Elem().Size()*offset))
|
||||
cmprw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmprw, rb)
|
||||
}
|
||||
compare(cmplw, cmprw)
|
||||
comp(cmplw, cmprw)
|
||||
i += step
|
||||
remains -= step * t.Elem().Size()
|
||||
}
|
||||
|
@ -270,7 +279,7 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
|
|||
func walkCompareInterface(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
|
||||
n.Y = cheapExpr(n.Y, init)
|
||||
n.X = cheapExpr(n.X, init)
|
||||
eqtab, eqdata := reflectdata.EqInterface(n.X, n.Y)
|
||||
eqtab, eqdata := compare.EqInterface(n.X, n.Y)
|
||||
var cmp ir.Node
|
||||
if n.Op() == ir.OEQ {
|
||||
cmp = ir.NewLogicalExpr(base.Pos, ir.OANDAND, eqtab, eqdata)
|
||||
|
@ -384,7 +393,7 @@ func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
|
|||
// prepare for rewrite below
|
||||
n.X = cheapExpr(n.X, init)
|
||||
n.Y = cheapExpr(n.Y, init)
|
||||
eqlen, eqmem := reflectdata.EqString(n.X, n.Y)
|
||||
eqlen, eqmem := compare.EqString(n.X, n.Y)
|
||||
// quick check of len before full compare for == or !=.
|
||||
// memequal then tests equality up to length len.
|
||||
if n.Op() == ir.OEQ {
|
||||
|
|
Loading…
Reference in a new issue