1
0
mirror of https://github.com/golang/go synced 2024-07-01 07:56:09 +00:00

[dev.typeparams] all: merge dev.regabi (07569da) into dev.typeparams

Conflicts:

- test/fixedbugs/issue27595.go
- test/fixedbugs/issue30087.go
- test/used.go

Merge List:

+ 2020-12-28 07569dac4e [dev.regabi] all: merge master (1d78139) into dev.regabi
+ 2020-12-28 76136be027 [dev.regabi] cmd/compile: check for recursive import in ImportBody
+ 2020-12-28 fda7ec3a3f [dev.regabi] cmd/compile: remove Name.IsDDD, etc
+ 2020-12-28 098a6490b9 [dev.regabi] cmd/compile: remove Declare in makepartialcall
+ 2020-12-28 137f0d2e06 [dev.regabi] cmd/compile: remove unnecessary Name.Sym call
+ 2020-12-28 3383b5c74a [dev.regabi] cmd/compile: flatten dependency graph [generated]
+ 2020-12-28 f8afb8216a [dev.regabi] cmd/compile: rename CommStmt and CaseStmt [generated]
+ 2020-12-28 5f3bd59a0d [dev.regabi] cmd/compile: remove some unneeded code in package ir
+ 2020-12-28 3bdafb0d82 [dev.regabi] cmd/compile: remove CommStmt.List
+ 2020-12-28 2ecf52b841 [dev.regabi] cmd/compile: separate CommStmt from CaseStmt
+ 2020-12-28 ed9772e130 [dev.regabi] cmd/compile: add explicit file name in types generation
+ 2020-12-28 a59d26603f [dev.regabi] cmd/compile: use []*CaseStmt in {Select,Switch}Stmt
+ 2020-12-28 fbc4458c06 [dev.regabi] cmd/compile: simplify some tree traversal code
+ 2020-12-28 6c67677541 [dev.regabi] cmd/compile: simplify FuncName and PkgFuncName
+ 2020-12-28 676d794b81 [dev.regabi] cmd/compile: remove refersToCommonName
+ 2020-12-28 c98548e110 [dev.regabi] cmd/compile: merge ascompatee, ascompatee1, and reorder3
+ 2020-12-28 4c215c4fa9 [dev.regabi] cmd/compile: simplify and optimize reorder3
+ 2020-12-28 e6c973198d [dev.regabi] cmd/compile: stop mangling SelectorExpr.Sel for ODOTMETH
+ 2020-12-28 135ce1c485 [dev.regabi] cmd/compile: desugar OMETHEXPR into ONAME during walk
+ 2020-12-28 0f732f8c91 [dev.regabi] cmd/compile: minor walkExpr cleanups
+ 2020-12-28 0de8eafd98 [dev.regabi] cmd/compile: remove SelectorExpr.Offset field
+ 2020-12-28 a4f335f420 [dev.regabi] cmd/compile: always use a Field for ODOTPTR expressions
+ 2020-12-26 1d78139128 runtime/cgo: fix Android build with NDK 22
+ 2020-12-25 2018b68a65 net/mail: don't use MDT in test
+ 2020-12-25 e4f293d853 [dev.regabi] cmd/compile: fix OCALLMETH desugaring
+ 2020-12-25 1d9a1f67d5 [dev.regabi] cmd/compile: don't emit reflect data for method types
+ 2020-12-25 396b6c2e7c [dev.regabi] cmd/compile: cleanup assignment typechecking
+ 2020-12-25 e24d2f3d05 [dev.regabi] cmd/compile: remove typ from RangeStmt
+ 2020-12-25 2785c691c2 [dev.regabi] cmd/compile: cleanup devirtualization docs
+ 2020-12-25 4b1d0fe66f [dev.regabi] cmd/compile: new devirtualization pkg [generated]
+ 2020-12-24 082cc8b7d9 [dev.regabi] cmd/compile: change ir.IsAssignable -> ir.IsAddressable
+ 2020-12-24 27b248b307 [dev.regabi] cmd/compile: separate range stmt Vars to Key, Value nodes
+ 2020-12-23 40818038bf [dev.regabi] cmd/compile: change CaseStmt.Vars to Var
+ 2020-12-23 b116404444 runtime: shift timeHistogram buckets and allow negative durations
+ 2020-12-23 8db7e2fecd runtime: fix allocs-by-size and frees-by-size buckets
+ 2020-12-23 fb96f07e1a runtime: fix nStackRoots comment about stack roots
+ 2020-12-23 d1502b3c72 lib/time, time/tzdata: update tzdata to 2020e
+ 2020-12-23 30c99cbb7a cmd/go: add the Retract field to 'go help mod edit' definition of the GoMod struct
+ 2020-12-23 49d0b239cb doc: fix a typo in contribute.html
+ 2020-12-23 9eeed291bc [dev.regabi] cmd/compile: eliminate usage of ir.Node in liveness
+ 2020-12-23 d1d64e4cea [dev.regabi] cmd/compile: split SliceExpr.List into separate fields
+ 2020-12-23 98a73030b0 cmd/go: in 'go get', promote named implicit dependencies to explicit
+ 2020-12-23 d19018e8f1 [dev.regabi] cmd/compile: split SliceHeaderExpr.LenCap into separate fields
+ 2020-12-23 53f082b0ee [dev.regabi] cmd/compile: cleanup export code further
+ 2020-12-23 31267f82e1 [dev.regabi] cmd/compile: simplify function/interface/struct typechecking
+ 2020-12-23 addade2cce [dev.regabi] cmd/compile: prefer types constructors over typecheck
+ 2020-12-23 18ebfb49e9 [dev.regabi] cmd/compile: cleanup noder
+ 2020-12-23 87a592b356 [dev.regabi] cmd/compile: cleanup import/export code
+ 2020-12-23 5898025026 [dev.regabi] cmd/compile: update mkbuiltin.go to use new type constructors
+ 2020-12-23 63c96c2ee7 [dev.regabi] cmd/compile: update mkbuiltin.go and re-enable TestBuiltin
+ 2020-12-23 fd6ba1c8a2 os/signal: fix a deadlock with syscall.AllThreadsSyscall() use
+ 2020-12-23 b0b0d98283 runtime: linux iscgo support for not blocking nptl signals
+ 2020-12-22 223331fc0c cmd/go/internal/modload: add hint for missing implicit dependency

Change-Id: Iecb8a7dfb401b6ab383e97101cd81bfc201683f6
This commit is contained in:
Matthew Dempsky 2020-12-28 00:39:13 -08:00
commit a800acaae1
90 changed files with 8552 additions and 8722 deletions

View File

@ -1129,7 +1129,7 @@ sometimes required because the standard library code you're modifying
might require a newer version than the stable one you have installed).
<pre>
$ cd $GODIR/src/hash/sha1
$ cd $GODIR/src/crypto/sha1
$ [make changes...]
$ $GODIR/bin/go test .
</pre>

View File

@ -8,8 +8,8 @@
# Consult https://www.iana.org/time-zones for the latest versions.
# Versions to use.
CODE=2020d
DATA=2020d
CODE=2020e
DATA=2020e
set -e
rm -rf work

Binary file not shown.

View File

@ -0,0 +1,85 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package devirtualize implements a simple "devirtualization"
// optimization pass, which replaces interface method calls with
// direct concrete-type method calls where possible.
package devirtualize
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
)
// Func devirtualizes calls within fn where possible.
func Func(fn *ir.Func) {
ir.CurFunc = fn
ir.VisitList(fn.Body, func(n ir.Node) {
if call, ok := n.(*ir.CallExpr); ok {
Call(call)
}
})
}
// Call devirtualizes the given call if possible.
func Call(call *ir.CallExpr) {
if call.Op() != ir.OCALLINTER {
return
}
sel := call.X.(*ir.SelectorExpr)
r := ir.StaticValue(sel.X)
if r.Op() != ir.OCONVIFACE {
return
}
recv := r.(*ir.ConvExpr)
typ := recv.X.Type()
if typ.IsInterface() {
return
}
dt := ir.NewTypeAssertExpr(sel.Pos(), sel.X, nil)
dt.SetType(typ)
x := typecheck.Callee(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel))
switch x.Op() {
case ir.ODOTMETH:
x := x.(*ir.SelectorExpr)
if base.Flag.LowerM != 0 {
base.WarnfAt(call.Pos(), "devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLMETH)
call.X = x
case ir.ODOTINTER:
// Promoted method from embedded interface-typed field (#42279).
x := x.(*ir.SelectorExpr)
if base.Flag.LowerM != 0 {
base.WarnfAt(call.Pos(), "partially devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLINTER)
call.X = x
default:
// TODO(mdempsky): Turn back into Fatalf after more testing.
if base.Flag.LowerM != 0 {
base.WarnfAt(call.Pos(), "failed to devirtualize %v (%v)", x, x.Op())
}
return
}
// Duplicated logic from typecheck for function call return
// value types.
//
// Receiver parameter size may have changed; need to update
// call.Type to get correct stack offsets for result
// parameters.
types.CheckSize(x.Type())
switch ft := x.Type(); ft.NumResults() {
case 0:
case 1:
call.SetType(ft.Results().Field(0).Type)
default:
call.SetType(ft.Results())
}
}

View File

@ -347,21 +347,19 @@ func (e *escape) stmt(n ir.Node) {
e.loopDepth--
case ir.ORANGE:
// for List = range Right { Nbody }
// for Key, Value = range X { Body }
n := n.(*ir.RangeStmt)
e.loopDepth++
ks := e.addrs(n.Vars)
e.addr(n.Key)
k := e.addr(n.Value)
e.block(n.Body)
e.loopDepth--
// Right is evaluated outside the loop.
k := e.discardHole()
if len(ks) >= 2 {
if n.X.Type().IsArray() {
k = ks[1].note(n, "range")
} else {
k = ks[1].deref(n, "range-deref")
}
// X is evaluated outside the loop.
if n.X.Type().IsArray() {
k = k.note(n, "range")
} else {
k = k.deref(n, "range-deref")
}
e.expr(e.later(k), n.X)
@ -371,9 +369,8 @@ func (e *escape) stmt(n ir.Node) {
var ks []hole
for _, cas := range n.Cases { // cases
cas := cas.(*ir.CaseStmt)
if typesw && n.Tag.(*ir.TypeSwitchGuard).Tag != nil {
cv := cas.Vars[0]
cv := cas.Var
k := e.dcl(cv) // type switch variables have no ODCL.
if cv.Type().HasPointers() {
ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
@ -393,7 +390,6 @@ func (e *escape) stmt(n ir.Node) {
case ir.OSELECT:
n := n.(*ir.SelectStmt)
for _, cas := range n.Cases {
cas := cas.(*ir.CaseStmt)
e.stmt(cas.Comm)
e.block(cas.Body)
}
@ -559,10 +555,9 @@ func (e *escape) exprSkipInit(k hole, n ir.Node) {
case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR:
n := n.(*ir.SliceExpr)
e.expr(k.note(n, "slice"), n.X)
low, high, max := n.SliceBounds()
e.discard(low)
e.discard(high)
e.discard(max)
e.discard(n.Low)
e.discard(n.High)
e.discard(n.Max)
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)

View File

@ -10,6 +10,7 @@ import (
"bufio"
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/devirtualize"
"cmd/compile/internal/dwarfgen"
"cmd/compile/internal/escape"
"cmd/compile/internal/inline"
@ -243,7 +244,7 @@ func Main(archInit func(*ssagen.ArchInfo)) {
// Devirtualize.
for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
inline.Devirtualize(n.(*ir.Func))
devirtualize.Func(n.(*ir.Func))
}
}
ir.CurFunc = nil

View File

@ -324,19 +324,17 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
if t == nil {
base.Fatalf("no function type for [%p] %+v\n", n.X, n.X)
}
if types.IsRuntimePkg(n.X.Sym().Pkg) {
fn := n.X.Sym().Name
if fn == "heapBits.nextArena" {
// Special case: explicitly allow
// mid-stack inlining of
// runtime.heapBits.next even though
// it calls slow-path
// runtime.heapBits.nextArena.
break
}
fn := ir.MethodExprName(n.X).Func
if types.IsRuntimePkg(fn.Sym().Pkg) && fn.Sym().Name == "heapBits.nextArena" {
// Special case: explicitly allow
// mid-stack inlining of
// runtime.heapBits.next even though
// it calls slow-path
// runtime.heapBits.nextArena.
break
}
if inlfn := ir.MethodExprName(n.X).Func; inlfn.Inl != nil {
v.budget -= inlfn.Inl.Cost
if fn.Inl != nil {
v.budget -= fn.Inl.Cost
break
}
// Call cost for non-leaf inlining.
@ -531,7 +529,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
// Prevent inlining some reflect.Value methods when using checkptr,
// even when package reflect was compiled without it (#35073).
n := n.(*ir.CallExpr)
if s := n.X.Sym(); base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
if s := ir.MethodExprName(n.X).Sym(); base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
return n
}
}
@ -1203,73 +1201,6 @@ func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
return s
}
// Devirtualize replaces interface method calls within fn with direct
// concrete-type method calls where applicable.
func Devirtualize(fn *ir.Func) {
ir.CurFunc = fn
ir.VisitList(fn.Body, func(n ir.Node) {
if n.Op() == ir.OCALLINTER {
devirtualizeCall(n.(*ir.CallExpr))
}
})
}
func devirtualizeCall(call *ir.CallExpr) {
sel := call.X.(*ir.SelectorExpr)
r := ir.StaticValue(sel.X)
if r.Op() != ir.OCONVIFACE {
return
}
recv := r.(*ir.ConvExpr)
typ := recv.X.Type()
if typ.IsInterface() {
return
}
dt := ir.NewTypeAssertExpr(sel.Pos(), sel.X, nil)
dt.SetType(typ)
x := typecheck.Callee(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel))
switch x.Op() {
case ir.ODOTMETH:
x := x.(*ir.SelectorExpr)
if base.Flag.LowerM != 0 {
base.WarnfAt(call.Pos(), "devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLMETH)
call.X = x
case ir.ODOTINTER:
// Promoted method from embedded interface-typed field (#42279).
x := x.(*ir.SelectorExpr)
if base.Flag.LowerM != 0 {
base.WarnfAt(call.Pos(), "partially devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLINTER)
call.X = x
default:
// TODO(mdempsky): Turn back into Fatalf after more testing.
if base.Flag.LowerM != 0 {
base.WarnfAt(call.Pos(), "failed to devirtualize %v (%v)", x, x.Op())
}
return
}
// Duplicated logic from typecheck for function call return
// value types.
//
// Receiver parameter size may have changed; need to update
// call.Type to get correct stack offsets for result
// parameters.
types.CheckSize(x.Type())
switch ft := x.Type(); ft.NumResults() {
case 0:
case 1:
call.SetType(ft.Results().Field(0).Type)
default:
call.SetType(ft.Results())
}
}
// numNonClosures returns the number of functions in list which are not closures.
func numNonClosures(list []*ir.Func) int {
count := 0

View File

@ -572,14 +572,12 @@ type SelectorExpr struct {
miniExpr
X Node
Sel *types.Sym
Offset int64
Selection *types.Field
}
func NewSelectorExpr(pos src.XPos, op Op, x Node, sel *types.Sym) *SelectorExpr {
n := &SelectorExpr{X: x, Sel: sel}
n.pos = pos
n.Offset = types.BADWIDTH
n.SetOp(op)
return n
}
@ -596,6 +594,7 @@ func (n *SelectorExpr) SetOp(op Op) {
func (n *SelectorExpr) Sym() *types.Sym { return n.Sel }
func (n *SelectorExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *SelectorExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *SelectorExpr) Offset() int64 { return n.Selection.Offset }
// Before type-checking, bytes.Buffer is a SelectorExpr.
// After type-checking it becomes a Name.
@ -605,11 +604,13 @@ func (*SelectorExpr) CanBeNtype() {}
type SliceExpr struct {
miniExpr
X Node
List Nodes // TODO(rsc): Use separate Nodes
Low Node
High Node
Max Node
}
func NewSliceExpr(pos src.XPos, op Op, x Node) *SliceExpr {
n := &SliceExpr{X: x}
func NewSliceExpr(pos src.XPos, op Op, x, low, high, max Node) *SliceExpr {
n := &SliceExpr{X: x, Low: low, High: high, Max: max}
n.pos = pos
n.op = op
return n
@ -624,61 +625,6 @@ func (n *SliceExpr) SetOp(op Op) {
}
}
// SliceBounds returns n's slice bounds: low, high, and max in expr[low:high:max].
// n must be a slice expression. max is nil if n is a simple slice expression.
func (n *SliceExpr) SliceBounds() (low, high, max Node) {
if len(n.List) == 0 {
return nil, nil, nil
}
switch n.Op() {
case OSLICE, OSLICEARR, OSLICESTR:
s := n.List
return s[0], s[1], nil
case OSLICE3, OSLICE3ARR:
s := n.List
return s[0], s[1], s[2]
}
base.Fatalf("SliceBounds op %v: %v", n.Op(), n)
return nil, nil, nil
}
// SetSliceBounds sets n's slice bounds, where n is a slice expression.
// n must be a slice expression. If max is non-nil, n must be a full slice expression.
func (n *SliceExpr) SetSliceBounds(low, high, max Node) {
switch n.Op() {
case OSLICE, OSLICEARR, OSLICESTR:
if max != nil {
base.Fatalf("SetSliceBounds %v given three bounds", n.Op())
}
s := n.List
if s == nil {
if low == nil && high == nil {
return
}
n.List = []Node{low, high}
return
}
s[0] = low
s[1] = high
return
case OSLICE3, OSLICE3ARR:
s := n.List
if s == nil {
if low == nil && high == nil && max == nil {
return
}
n.List = []Node{low, high, max}
return
}
s[0] = low
s[1] = high
s[2] = max
return
}
base.Fatalf("SetSliceBounds op %v: %v", n.Op(), n)
}
// IsSlice3 reports whether o is a slice3 op (OSLICE3, OSLICE3ARR).
// o must be a slicing op.
func (o Op) IsSlice3() bool {
@ -695,16 +641,16 @@ func (o Op) IsSlice3() bool {
// A SliceHeader expression constructs a slice header from its parts.
type SliceHeaderExpr struct {
miniExpr
Ptr Node
LenCap Nodes // TODO(rsc): Split into two Node fields
Ptr Node
Len Node
Cap Node
}
func NewSliceHeaderExpr(pos src.XPos, typ *types.Type, ptr, len, cap Node) *SliceHeaderExpr {
n := &SliceHeaderExpr{Ptr: ptr}
n := &SliceHeaderExpr{Ptr: ptr, Len: len, Cap: cap}
n.pos = pos
n.op = OSLICEHEADER
n.typ = typ
n.LenCap = []Node{len, cap}
return n
}
@ -829,12 +775,12 @@ func IsZero(n Node) bool {
}
// lvalue etc
func IsAssignable(n Node) bool {
func IsAddressable(n Node) bool {
switch n.Op() {
case OINDEX:
n := n.(*IndexExpr)
if n.X.Type() != nil && n.X.Type().IsArray() {
return IsAssignable(n.X)
return IsAddressable(n.X)
}
if n.X.Type() != nil && n.X.Type().IsString() {
return false
@ -845,7 +791,7 @@ func IsAssignable(n Node) bool {
case ODOT:
n := n.(*SelectorExpr)
return IsAssignable(n.X)
return IsAddressable(n.X)
case ONAME:
n := n.(*Name)

View File

@ -444,12 +444,15 @@ func stmtFmt(n Node, s fmt.State) {
break
}
if len(n.Vars) == 0 {
fmt.Fprintf(s, "for range %v { %v }", n.X, n.Body)
break
fmt.Fprint(s, "for")
if n.Key != nil {
fmt.Fprintf(s, " %v", n.Key)
if n.Value != nil {
fmt.Fprintf(s, ", %v", n.Value)
}
fmt.Fprint(s, " =")
}
fmt.Fprintf(s, "for %.v = range %v { %v }", n.Vars, n.X, n.Body)
fmt.Fprintf(s, " range %v { %v }", n.X, n.Body)
case OSELECT:
n := n.(*SelectStmt)
@ -475,7 +478,7 @@ func stmtFmt(n Node, s fmt.State) {
fmt.Fprintf(s, " { %v }", n.Cases)
case OCASE:
n := n.(*CaseStmt)
n := n.(*CaseClause)
if len(n.List) != 0 {
fmt.Fprintf(s, "case %.v", n.List)
} else {
@ -753,7 +756,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
fmt.Fprint(s, ".<nil>")
return
}
fmt.Fprintf(s, ".%s", types.SymMethodName(n.Method.Sym))
fmt.Fprintf(s, ".%s", n.Method.Sym.Name)
case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
n := n.(*SelectorExpr)
@ -762,7 +765,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
fmt.Fprint(s, ".<nil>")
return
}
fmt.Fprintf(s, ".%s", types.SymMethodName(n.Sel))
fmt.Fprintf(s, ".%s", n.Sel.Name)
case ODOTTYPE, ODOTTYPE2:
n := n.(*TypeAssertExpr)
@ -782,28 +785,24 @@ func exprFmt(n Node, s fmt.State, prec int) {
n := n.(*SliceExpr)
exprFmt(n.X, s, nprec)
fmt.Fprint(s, "[")
low, high, max := n.SliceBounds()
if low != nil {
fmt.Fprint(s, low)
if n.Low != nil {
fmt.Fprint(s, n.Low)
}
fmt.Fprint(s, ":")
if high != nil {
fmt.Fprint(s, high)
if n.High != nil {
fmt.Fprint(s, n.High)
}
if n.Op().IsSlice3() {
fmt.Fprint(s, ":")
if max != nil {
fmt.Fprint(s, max)
if n.Max != nil {
fmt.Fprint(s, n.Max)
}
}
fmt.Fprint(s, "]")
case OSLICEHEADER:
n := n.(*SliceHeaderExpr)
if len(n.LenCap) != 2 {
base.Fatalf("bad OSLICEHEADER list length %d", len(n.LenCap))
}
fmt.Fprintf(s, "sliceheader{%v,%v,%v}", n.Ptr, n.LenCap[0], n.LenCap[1])
fmt.Fprintf(s, "sliceheader{%v,%v,%v}", n.Ptr, n.Len, n.Cap)
case OCOMPLEX, OCOPY:
n := n.(*BinaryExpr)

View File

@ -206,50 +206,22 @@ func (f *Func) SetWBPos(pos src.XPos) {
}
// funcname returns the name (without the package) of the function n.
func FuncName(n Node) string {
var f *Func
switch n := n.(type) {
case *Func:
f = n
case *Name:
f = n.Func
case *CallPartExpr:
f = n.Func
case *ClosureExpr:
f = n.Func
}
func FuncName(f *Func) string {
if f == nil || f.Nname == nil {
return "<nil>"
}
return f.Nname.Sym().Name
return f.Sym().Name
}
// pkgFuncName returns the name of the function referenced by n, with package prepended.
// This differs from the compiler's internal convention where local functions lack a package
// because the ultimate consumer of this is a human looking at an IDE; package is only empty
// if the compilation package is actually the empty string.
func PkgFuncName(n Node) string {
var s *types.Sym
if n == nil {
func PkgFuncName(f *Func) string {
if f == nil || f.Nname == nil {
return "<nil>"
}
if n.Op() == ONAME {
s = n.Sym()
} else {
var f *Func
switch n := n.(type) {
case *CallPartExpr:
f = n.Func
case *ClosureExpr:
f = n.Func
case *Func:
f = n
}
if f == nil || f.Nname == nil {
return "<nil>"
}
s = f.Nname.Sym()
}
s := f.Sym()
pkg := s.Pkg
p := base.Ctxt.Pkgpath

View File

@ -37,6 +37,8 @@ func main() {
nodeType := lookup("Node")
ntypeType := lookup("Ntype")
nodesType := lookup("Nodes")
slicePtrCaseClauseType := types.NewSlice(types.NewPointer(lookup("CaseClause")))
slicePtrCommClauseType := types.NewSlice(types.NewPointer(lookup("CommClause")))
ptrFieldType := types.NewPointer(lookup("Field"))
slicePtrFieldType := types.NewSlice(ptrFieldType)
ptrIdentType := types.NewPointer(lookup("Ident"))
@ -76,6 +78,10 @@ func main() {
switch {
case is(nodesType):
fmt.Fprintf(&buf, "c.%s = c.%s.Copy()\n", name, name)
case is(slicePtrCaseClauseType):
fmt.Fprintf(&buf, "c.%s = copyCases(c.%s)\n", name, name)
case is(slicePtrCommClauseType):
fmt.Fprintf(&buf, "c.%s = copyComms(c.%s)\n", name, name)
case is(ptrFieldType):
fmt.Fprintf(&buf, "if c.%s != nil { c.%s = c.%s.copy() }\n", name, name, name)
case is(slicePtrFieldType):
@ -94,6 +100,10 @@ func main() {
fmt.Fprintf(&buf, "err = maybeDo(n.%s, err, do)\n", name)
case is(nodesType):
fmt.Fprintf(&buf, "err = maybeDoList(n.%s, err, do)\n", name)
case is(slicePtrCaseClauseType):
fmt.Fprintf(&buf, "err = maybeDoCases(n.%s, err, do)\n", name)
case is(slicePtrCommClauseType):
fmt.Fprintf(&buf, "err = maybeDoComms(n.%s, err, do)\n", name)
case is(ptrFieldType):
fmt.Fprintf(&buf, "err = maybeDoField(n.%s, err, do)\n", name)
case is(slicePtrFieldType):
@ -113,6 +123,10 @@ func main() {
fmt.Fprintf(&buf, "n.%s = toNtype(maybeEdit(n.%s, edit))\n", name, name)
case is(nodesType):
fmt.Fprintf(&buf, "editList(n.%s, edit)\n", name)
case is(slicePtrCaseClauseType):
fmt.Fprintf(&buf, "editCases(n.%s, edit)\n", name)
case is(slicePtrCommClauseType):
fmt.Fprintf(&buf, "editComms(n.%s, edit)\n", name)
case is(ptrFieldType):
fmt.Fprintf(&buf, "editField(n.%s, edit)\n", name)
case is(slicePtrFieldType):
@ -155,10 +169,6 @@ func forNodeFields(typName string, typ *types.Struct, f func(name string, is fun
case "orig":
continue
}
switch typName + "." + v.Name() {
case "AddStringExpr.Alloc":
continue
}
f(v.Name(), func(t types.Type) bool { return types.Identical(t, v.Type()) })
}
}

View File

@ -268,7 +268,6 @@ const (
nameInlLocal // PAUTO created by inliner, derived from callee local
nameOpenDeferSlot // if temporary var storing info for open-coded defers
nameLibfuzzerExtraCounter // if PEXTERN should be assigned to __libfuzzer_extra_counters section
nameIsDDD // is function argument a ...
nameAlias // is type name an alias
)
@ -286,7 +285,6 @@ func (n *Name) InlFormal() bool { return n.flags&nameInlFormal != 0
func (n *Name) InlLocal() bool { return n.flags&nameInlLocal != 0 }
func (n *Name) OpenDeferSlot() bool { return n.flags&nameOpenDeferSlot != 0 }
func (n *Name) LibfuzzerExtraCounter() bool { return n.flags&nameLibfuzzerExtraCounter != 0 }
func (n *Name) IsDDD() bool { return n.flags&nameIsDDD != 0 }
func (n *Name) SetCaptured(b bool) { n.flags.set(nameCaptured, b) }
func (n *Name) setReadonly(b bool) { n.flags.set(nameReadonly, b) }
@ -302,7 +300,6 @@ func (n *Name) SetInlFormal(b bool) { n.flags.set(nameInlFormal, b)
func (n *Name) SetInlLocal(b bool) { n.flags.set(nameInlLocal, b) }
func (n *Name) SetOpenDeferSlot(b bool) { n.flags.set(nameOpenDeferSlot, b) }
func (n *Name) SetLibfuzzerExtraCounter(b bool) { n.flags.set(nameLibfuzzerExtraCounter, b) }
func (n *Name) SetIsDDD(b bool) { n.flags.set(nameIsDDD, b) }
// MarkReadonly indicates that n is an ONAME with readonly contents.
func (n *Name) MarkReadonly() {

View File

@ -226,29 +226,26 @@ func (n *CallPartExpr) editChildren(edit func(Node) Node) {
n.X = maybeEdit(n.X, edit)
}
func (n *CaseStmt) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *CaseStmt) copy() Node {
func (n *CaseClause) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *CaseClause) copy() Node {
c := *n
c.init = c.init.Copy()
c.Vars = c.Vars.Copy()
c.List = c.List.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *CaseStmt) doChildren(do func(Node) error) error {
func (n *CaseClause) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.Vars, err, do)
err = maybeDo(n.Var, err, do)
err = maybeDoList(n.List, err, do)
err = maybeDo(n.Comm, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *CaseStmt) editChildren(edit func(Node) Node) {
func (n *CaseClause) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.Vars, edit)
n.Var = maybeEdit(n.Var, edit)
editList(n.List, edit)
n.Comm = maybeEdit(n.Comm, edit)
editList(n.Body, edit)
}
@ -296,6 +293,26 @@ func (n *ClosureReadExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
}
func (n *CommClause) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *CommClause) copy() Node {
c := *n
c.init = c.init.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *CommClause) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.Comm, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *CommClause) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.Comm = maybeEdit(n.Comm, edit)
editList(n.Body, edit)
}
func (n *CompLitExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *CompLitExpr) copy() Node {
c := *n
@ -725,22 +742,23 @@ func (n *RangeStmt) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *RangeStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.Vars = c.Vars.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *RangeStmt) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.Vars, err, do)
err = maybeDo(n.X, err, do)
err = maybeDo(n.Key, err, do)
err = maybeDo(n.Value, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *RangeStmt) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.Vars, edit)
n.X = maybeEdit(n.X, edit)
n.Key = maybeEdit(n.Key, edit)
n.Value = maybeEdit(n.Value, edit)
editList(n.Body, edit)
}
@ -781,20 +799,20 @@ func (n *SelectStmt) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *SelectStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.Cases = c.Cases.Copy()
c.Cases = copyComms(c.Cases)
c.Compiled = c.Compiled.Copy()
return &c
}
func (n *SelectStmt) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.Cases, err, do)
err = maybeDoComms(n.Cases, err, do)
err = maybeDoList(n.Compiled, err, do)
return err
}
func (n *SelectStmt) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.Cases, edit)
editComms(n.Cases, edit)
editList(n.Compiled, edit)
}
@ -838,40 +856,44 @@ func (n *SliceExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *SliceExpr) copy() Node {
c := *n
c.init = c.init.Copy()
c.List = c.List.Copy()
return &c
}
func (n *SliceExpr) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.X, err, do)
err = maybeDoList(n.List, err, do)
err = maybeDo(n.Low, err, do)
err = maybeDo(n.High, err, do)
err = maybeDo(n.Max, err, do)
return err
}
func (n *SliceExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.X = maybeEdit(n.X, edit)
editList(n.List, edit)
n.Low = maybeEdit(n.Low, edit)
n.High = maybeEdit(n.High, edit)
n.Max = maybeEdit(n.Max, edit)
}
func (n *SliceHeaderExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *SliceHeaderExpr) copy() Node {
c := *n
c.init = c.init.Copy()
c.LenCap = c.LenCap.Copy()
return &c
}
func (n *SliceHeaderExpr) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.Ptr, err, do)
err = maybeDoList(n.LenCap, err, do)
err = maybeDo(n.Len, err, do)
err = maybeDo(n.Cap, err, do)
return err
}
func (n *SliceHeaderExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.Ptr = maybeEdit(n.Ptr, edit)
editList(n.LenCap, edit)
n.Len = maybeEdit(n.Len, edit)
n.Cap = maybeEdit(n.Cap, edit)
}
func (n *SliceType) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -941,7 +963,7 @@ func (n *SwitchStmt) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *SwitchStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.Cases = c.Cases.Copy()
c.Cases = copyCases(c.Cases)
c.Compiled = c.Compiled.Copy()
return &c
}
@ -949,14 +971,14 @@ func (n *SwitchStmt) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.Tag, err, do)
err = maybeDoList(n.Cases, err, do)
err = maybeDoCases(n.Cases, err, do)
err = maybeDoList(n.Compiled, err, do)
return err
}
func (n *SwitchStmt) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.Tag = maybeEdit(n.Tag, edit)
editList(n.Cases, edit)
editCases(n.Cases, edit)
editList(n.Compiled, edit)
}

View File

@ -76,48 +76,27 @@ func (v *bottomUpVisitor) visit(n *Func) uint32 {
min := v.visitgen
v.stack = append(v.stack, n)
do := func(defn Node) {
if defn != nil {
if m := v.visit(defn.(*Func)); m < min {
min = m
}
}
}
Visit(n, func(n Node) {
switch n.Op() {
case ONAME:
n := n.(*Name)
if n.Class_ == PFUNC {
if n != nil && n.Name().Defn != nil {
if m := v.visit(n.Name().Defn.(*Func)); m < min {
min = m
}
}
if n := n.(*Name); n.Class_ == PFUNC {
do(n.Defn)
}
case OMETHEXPR:
n := n.(*MethodExpr)
fn := MethodExprName(n)
if fn != nil && fn.Defn != nil {
if m := v.visit(fn.Defn.(*Func)); m < min {
min = m
}
}
case ODOTMETH:
n := n.(*SelectorExpr)
fn := MethodExprName(n)
if fn != nil && fn.Op() == ONAME && fn.Class_ == PFUNC && fn.Defn != nil {
if m := v.visit(fn.Defn.(*Func)); m < min {
min = m
}
}
case OCALLPART:
n := n.(*CallPartExpr)
fn := AsNode(n.Method.Nname)
if fn != nil && fn.Op() == ONAME {
if fn := fn.(*Name); fn.Class_ == PFUNC && fn.Name().Defn != nil {
if m := v.visit(fn.Name().Defn.(*Func)); m < min {
min = m
}
}
case ODOTMETH, OCALLPART, OMETHEXPR:
if fn := MethodExprName(n); fn != nil {
do(fn.Defn)
}
case OCLOSURE:
n := n.(*ClosureExpr)
if m := v.visit(n.Func); m < min {
min = m
}
do(n.Func)
}
})

View File

@ -173,24 +173,94 @@ func NewBranchStmt(pos src.XPos, op Op, label *types.Sym) *BranchStmt {
func (n *BranchStmt) Sym() *types.Sym { return n.Label }
// A CaseStmt is a case statement in a switch or select: case List: Body.
type CaseStmt struct {
// A CaseClause is a case statement in a switch or select: case List: Body.
type CaseClause struct {
miniStmt
Vars Nodes // declared variable for this case in type switch
Var Node // declared variable for this case in type switch
List Nodes // list of expressions for switch, early select
Comm Node // communication case (Exprs[0]) after select is type-checked
Body Nodes
}
func NewCaseStmt(pos src.XPos, list, body []Node) *CaseStmt {
n := &CaseStmt{}
func NewCaseStmt(pos src.XPos, list, body []Node) *CaseClause {
n := &CaseClause{List: list, Body: body}
n.pos = pos
n.op = OCASE
n.List.Set(list)
n.Body.Set(body)
return n
}
// TODO(mdempsky): Generate these with mknode.go.
func copyCases(list []*CaseClause) []*CaseClause {
if list == nil {
return nil
}
c := make([]*CaseClause, len(list))
copy(c, list)
return c
}
func maybeDoCases(list []*CaseClause, err error, do func(Node) error) error {
if err != nil {
return err
}
for _, x := range list {
if x != nil {
if err := do(x); err != nil {
return err
}
}
}
return nil
}
func editCases(list []*CaseClause, edit func(Node) Node) {
for i, x := range list {
if x != nil {
list[i] = edit(x).(*CaseClause)
}
}
}
type CommClause struct {
miniStmt
Comm Node // communication case
Body Nodes
}
func NewCommStmt(pos src.XPos, comm Node, body []Node) *CommClause {
n := &CommClause{Comm: comm, Body: body}
n.pos = pos
n.op = OCASE
return n
}
// TODO(mdempsky): Generate these with mknode.go.
func copyComms(list []*CommClause) []*CommClause {
if list == nil {
return nil
}
c := make([]*CommClause, len(list))
copy(c, list)
return c
}
func maybeDoComms(list []*CommClause, err error, do func(Node) error) error {
if err != nil {
return err
}
for _, x := range list {
if x != nil {
if err := do(x); err != nil {
return err
}
}
}
return nil
}
func editComms(list []*CommClause, edit func(Node) Node) {
for i, x := range list {
if x != nil {
list[i] = edit(x).(*CommClause)
}
}
}
// A ForStmt is a non-range for loop: for Init; Cond; Post { Body }
// Op can be OFOR or OFORUNTIL (!Cond).
type ForStmt struct {
@ -203,11 +273,13 @@ type ForStmt struct {
HasBreak bool
}
func NewForStmt(pos src.XPos, init []Node, cond, post Node, body []Node) *ForStmt {
func NewForStmt(pos src.XPos, init Node, cond, post Node, body []Node) *ForStmt {
n := &ForStmt{Cond: cond, Post: post}
n.pos = pos
n.op = OFOR
n.init.Set(init)
if init != nil {
n.init = []Node{init}
}
n.Body.Set(body)
return n
}
@ -290,32 +362,27 @@ func NewLabelStmt(pos src.XPos, label *types.Sym) *LabelStmt {
func (n *LabelStmt) Sym() *types.Sym { return n.Label }
// A RangeStmt is a range loop: for Vars = range X { Stmts }
// Op can be OFOR or OFORUNTIL (!Cond).
// A RangeStmt is a range loop: for Key, Value = range X { Body }
type RangeStmt struct {
miniStmt
Label *types.Sym
Vars Nodes // TODO(rsc): Replace with Key, Value Node
Def bool
X Node
Key Node
Value Node
Body Nodes
HasBreak bool
typ *types.Type // TODO(rsc): Remove - use X.Type() instead
Prealloc *Name
}
func NewRangeStmt(pos src.XPos, vars []Node, x Node, body []Node) *RangeStmt {
n := &RangeStmt{X: x}
func NewRangeStmt(pos src.XPos, key, value, x Node, body []Node) *RangeStmt {
n := &RangeStmt{X: x, Key: key, Value: value}
n.pos = pos
n.op = ORANGE
n.Vars.Set(vars)
n.Body.Set(body)
return n
}
func (n *RangeStmt) Type() *types.Type { return n.typ }
func (n *RangeStmt) SetType(x *types.Type) { n.typ = x }
// A ReturnStmt is a return statement.
type ReturnStmt struct {
miniStmt
@ -339,18 +406,17 @@ func (n *ReturnStmt) SetOrig(x Node) { n.orig = x }
type SelectStmt struct {
miniStmt
Label *types.Sym
Cases Nodes
Cases []*CommClause
HasBreak bool
// TODO(rsc): Instead of recording here, replace with a block?
Compiled Nodes // compiled form, after walkswitch
}
func NewSelectStmt(pos src.XPos, cases []Node) *SelectStmt {
n := &SelectStmt{}
func NewSelectStmt(pos src.XPos, cases []*CommClause) *SelectStmt {
n := &SelectStmt{Cases: cases}
n.pos = pos
n.op = OSELECT
n.Cases.Set(cases)
return n
}
@ -372,7 +438,7 @@ func NewSendStmt(pos src.XPos, ch, value Node) *SendStmt {
type SwitchStmt struct {
miniStmt
Tag Node
Cases Nodes // list of *CaseStmt
Cases []*CaseClause
Label *types.Sym
HasBreak bool
@ -380,11 +446,10 @@ type SwitchStmt struct {
Compiled Nodes // compiled form, after walkswitch
}
func NewSwitchStmt(pos src.XPos, tag Node, cases []Node) *SwitchStmt {
n := &SwitchStmt{Tag: tag}
func NewSwitchStmt(pos src.XPos, tag Node, cases []*CaseClause) *SwitchStmt {
n := &SwitchStmt{Tag: tag, Cases: cases}
n.pos = pos
n.op = OSWITCH
n.Cases.Set(cases)
return n
}

View File

@ -115,14 +115,6 @@ func (n *StructType) SetOTYPE(t *types.Type) {
n.Fields = nil
}
func deepCopyFields(pos src.XPos, fields []*Field) []*Field {
var out []*Field
for _, f := range fields {
out = append(out, f.deepCopy(pos))
}
return out
}
// An InterfaceType represents a struct { ... } type syntax.
type InterfaceType struct {
miniType
@ -250,26 +242,6 @@ func editFields(list []*Field, edit func(Node) Node) {
}
}
func (f *Field) deepCopy(pos src.XPos) *Field {
if f == nil {
return nil
}
fpos := pos
if !pos.IsKnown() {
fpos = f.Pos
}
decl := f.Decl
if decl != nil {
decl = DeepCopy(pos, decl).(*Name)
}
ntype := f.Ntype
if ntype != nil {
ntype = DeepCopy(pos, ntype).(Ntype)
}
// No keyed literal here: if a new struct field is added, we want this to stop compiling.
return &Field{fpos, f.Sym, ntype, f.Type, f.Embedded, f.IsDDD, f.Note, decl}
}
// A SliceType represents a []Elem type syntax.
// If DDD is true, it's the ...Elem at the end of a function list.
type SliceType struct {

View File

@ -217,10 +217,9 @@ func EditChildren(n Node, edit func(Node) Node) {
// Note that editList only calls edit on the nodes in the list, not their children.
// If x's children should be processed, edit(x) must call EditChildren(x, edit) itself.
func editList(list Nodes, edit func(Node) Node) {
s := list
for i, x := range list {
if x != nil {
s[i] = edit(x)
list[i] = edit(x)
}
}
}

View File

@ -24,6 +24,7 @@ import (
"cmd/compile/internal/ir"
"cmd/compile/internal/objw"
"cmd/compile/internal/ssa"
"cmd/compile/internal/typebits"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
@ -179,11 +180,7 @@ type progeffectscache struct {
// nor do we care about non-local variables,
// nor do we care about empty structs (handled by the pointer check),
// nor do we care about the fake PAUTOHEAP variables.
func ShouldTrack(nn ir.Node) bool {
if nn.Op() != ir.ONAME {
return false
}
n := nn.(*ir.Name)
func ShouldTrack(n *ir.Name) bool {
return (n.Class_ == ir.PAUTO || n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT) && n.Type().HasPointers()
}
@ -248,19 +245,17 @@ const (
// liveness effects v has on that variable.
// If v does not affect any tracked variables, it returns -1, 0.
func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
n, e := affectedNode(v)
if e == 0 || n == nil || n.Op() != ir.ONAME { // cheapest checks first
n, e := affectedVar(v)
if e == 0 || n == nil { // cheapest checks first
return -1, 0
}
nn := n.(*ir.Name)
// AllocFrame has dropped unused variables from
// lv.fn.Func.Dcl, but they might still be referenced by
// OpVarFoo pseudo-ops. Ignore them to prevent "lost track of
// variable" ICEs (issue 19632).
switch v.Op {
case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive:
if !nn.Name().Used() {
if !n.Name().Used() {
return -1, 0
}
}
@ -283,14 +278,14 @@ func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
return -1, 0
}
if pos, ok := lv.idx[nn]; ok {
if pos, ok := lv.idx[n]; ok {
return pos, effect
}
return -1, 0
}
// affectedNode returns the *Node affected by v
func affectedNode(v *ssa.Value) (ir.Node, ssa.SymEffect) {
// affectedVar returns the *ir.Name node affected by v
func affectedVar(v *ssa.Value) (*ir.Name, ssa.SymEffect) {
// Special cases.
switch v.Op {
case ssa.OpLoadReg:
@ -381,82 +376,6 @@ func (lv *liveness) blockEffects(b *ssa.Block) *blockEffects {
return &lv.be[b.ID]
}
// NOTE: The bitmap for a specific type t could be cached in t after
// the first run and then simply copied into bv at the correct offset
// on future calls with the same type t.
func SetTypeBits(t *types.Type, off int64, bv bitvec.BitVec) {
if t.Align > 0 && off&int64(t.Align-1) != 0 {
base.Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
}
if !t.HasPointers() {
// Note: this case ensures that pointers to go:notinheap types
// are not considered pointers by garbage collection and stack copying.
return
}
switch t.Kind() {
case types.TPTR, types.TUNSAFEPTR, types.TFUNC, types.TCHAN, types.TMAP:
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(types.PtrSize))) // pointer
case types.TSTRING:
// struct { byte *str; intgo len; }
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(types.PtrSize))) //pointer in first slot
case types.TINTER:
// struct { Itab *tab; void *data; }
// or, when isnilinter(t)==true:
// struct { Type *type; void *data; }
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
// The first word of an interface is a pointer, but we don't
// treat it as such.
// 1. If it is a non-empty interface, the pointer points to an itab
// which is always in persistentalloc space.
// 2. If it is an empty interface, the pointer points to a _type.
// a. If it is a compile-time-allocated type, it points into
// the read-only data section.
// b. If it is a reflect-allocated type, it points into the Go heap.
// Reflect is responsible for keeping a reference to
// the underlying type so it won't be GCd.
// If we ever have a moving GC, we need to change this for 2b (as
// well as scan itabs to update their itab._type fields).
bv.Set(int32(off/int64(types.PtrSize) + 1)) // pointer in second slot
case types.TSLICE:
// struct { byte *array; uintgo len; uintgo cap; }
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
}
bv.Set(int32(off / int64(types.PtrSize))) // pointer in first slot (BitsPointer)
case types.TARRAY:
elt := t.Elem()
if elt.Width == 0 {
// Short-circuit for #20739.
break
}
for i := int64(0); i < t.NumElem(); i++ {
SetTypeBits(elt, off, bv)
off += elt.Width
}
case types.TSTRUCT:
for _, f := range t.Fields().Slice() {
SetTypeBits(f.Type, off+f.Offset, bv)
}
default:
base.Fatalf("onebitwalktype1: unexpected type, %v", t)
}
}
// Generates live pointer value maps for arguments and local variables. The
// this argument and the in arguments are always assumed live. The vars
// argument is a slice of *Nodes.
@ -469,10 +388,10 @@ func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, loc
node := vars[i]
switch node.Class_ {
case ir.PAUTO:
SetTypeBits(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
case ir.PPARAM, ir.PPARAMOUT:
SetTypeBits(node.Type(), node.FrameOffset(), args)
typebits.Set(node.Type(), node.FrameOffset(), args)
}
}
}
@ -1315,15 +1234,15 @@ func WriteFuncMap(fn *ir.Func) {
off = objw.Uint32(lsym, off, uint32(bv.N))
if ir.IsMethod(fn) {
SetTypeBits(fn.Type().Recvs(), 0, bv)
typebits.Set(fn.Type().Recvs(), 0, bv)
}
if fn.Type().NumParams() > 0 {
SetTypeBits(fn.Type().Params(), 0, bv)
typebits.Set(fn.Type().Params(), 0, bv)
}
off = objw.BitVec(lsym, off, bv)
if fn.Type().NumResults() > 0 {
SetTypeBits(fn.Type().Results(), 0, bv)
typebits.Set(fn.Type().Results(), 0, bv)
off = objw.BitVec(lsym, off, bv)
}

View File

@ -510,11 +510,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
names := p.declNames(ir.ONAME, decl.NameList)
typ := p.typeExprOrNil(decl.Type)
var exprs []ir.Node
if decl.Values != nil {
exprs = p.exprList(decl.Values)
}
exprs := p.exprList(decl.Values)
if pragma, ok := decl.Pragma.(*pragmas); ok {
if len(pragma.Embeds) > 0 {
@ -753,10 +749,14 @@ func (p *noder) param(param *syntax.Field, dddOk, final bool) *ir.Field {
}
func (p *noder) exprList(expr syntax.Expr) []ir.Node {
if list, ok := expr.(*syntax.ListExpr); ok {
return p.exprs(list.ElemList)
switch expr := expr.(type) {
case nil:
return nil
case *syntax.ListExpr:
return p.exprs(expr.ElemList)
default:
return []ir.Node{p.expr(expr)}
}
return []ir.Node{p.expr(expr)}
}
func (p *noder) exprs(exprs []syntax.Expr) []ir.Node {
@ -775,17 +775,14 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
case *syntax.Name:
return p.mkname(expr)
case *syntax.BasicLit:
n := ir.NewLiteral(p.basicLit(expr))
n := ir.NewBasicLit(p.pos(expr), p.basicLit(expr))
if expr.Kind == syntax.RuneLit {
n.SetType(types.UntypedRune)
}
n.SetDiag(expr.Bad) // avoid follow-on errors if there was a syntax error
return n
case *syntax.CompositeLit:
n := ir.NewCompLitExpr(p.pos(expr), ir.OCOMPLIT, nil, nil)
if expr.Type != nil {
n.Ntype = ir.Node(p.expr(expr.Type)).(ir.Ntype)
}
n := ir.NewCompLitExpr(p.pos(expr), ir.OCOMPLIT, p.typeExpr(expr.Type), nil)
l := p.exprs(expr.ElemList)
for i, e := range l {
l[i] = p.wrapname(expr.ElemList[i], e)
@ -818,17 +815,16 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
if expr.Full {
op = ir.OSLICE3
}
n := ir.NewSliceExpr(p.pos(expr), op, p.expr(expr.X))
x := p.expr(expr.X)
var index [3]ir.Node
for i, x := range &expr.Index {
if x != nil {
index[i] = p.expr(x)
for i, n := range &expr.Index {
if n != nil {
index[i] = p.expr(n)
}
}
n.SetSliceBounds(index[0], index[1], index[2])
return n
return ir.NewSliceExpr(p.pos(expr), op, x, index[0], index[1], index[2])
case *syntax.AssertExpr:
return ir.NewTypeAssertExpr(p.pos(expr), p.expr(expr.X), p.typeExpr(expr.Type).(ir.Ntype))
return ir.NewTypeAssertExpr(p.pos(expr), p.expr(expr.X), p.typeExpr(expr.Type))
case *syntax.Operation:
if expr.Op == syntax.Add && expr.Y != nil {
return p.sum(expr)
@ -852,8 +848,7 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
}
return ir.NewBinaryExpr(pos, op, x, y)
case *syntax.CallExpr:
n := ir.NewCallExpr(p.pos(expr), ir.OCALL, p.expr(expr.Fun), nil)
n.Args.Set(p.exprs(expr.ArgList))
n := ir.NewCallExpr(p.pos(expr), ir.OCALL, p.expr(expr.Fun), p.exprs(expr.ArgList))
n.IsDDD = expr.HasDots
return n
@ -1120,7 +1115,7 @@ func (p *noder) stmt(stmt syntax.Stmt) ir.Node {
func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
p.setlineno(stmt)
switch stmt := stmt.(type) {
case *syntax.EmptyStmt:
case nil, *syntax.EmptyStmt:
return nil
case *syntax.LabeledStmt:
return p.labeledStmt(stmt, fallOK)
@ -1193,12 +1188,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
}
return ir.NewGoDeferStmt(p.pos(stmt), op, p.expr(stmt.Call))
case *syntax.ReturnStmt:
var results []ir.Node
if stmt.Results != nil {
results = p.exprList(stmt.Results)
}
n := ir.NewReturnStmt(p.pos(stmt), nil)
n.Results.Set(results)
n := ir.NewReturnStmt(p.pos(stmt), p.exprList(stmt.Results))
if len(n.Results) == 0 && ir.CurFunc != nil {
for _, ln := range ir.CurFunc.Dcl {
if ln.Class_ == ir.PPARAM {
@ -1292,14 +1282,11 @@ func (p *noder) blockStmt(stmt *syntax.BlockStmt) []ir.Node {
func (p *noder) ifStmt(stmt *syntax.IfStmt) ir.Node {
p.openScope(stmt.Pos())
n := ir.NewIfStmt(p.pos(stmt), nil, nil, nil)
if stmt.Init != nil {
*n.PtrInit() = []ir.Node{p.stmt(stmt.Init)}
init := p.stmt(stmt.Init)
n := ir.NewIfStmt(p.pos(stmt), p.expr(stmt.Cond), p.blockStmt(stmt.Then), nil)
if init != nil {
*n.PtrInit() = []ir.Node{init}
}
if stmt.Cond != nil {
n.Cond = p.expr(stmt.Cond)
}
n.Body.Set(p.blockStmt(stmt.Then))
if stmt.Else != nil {
e := p.stmt(stmt.Else)
if e.Op() == ir.OBLOCK {
@ -1320,53 +1307,46 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node {
panic("unexpected RangeClause")
}
n := ir.NewRangeStmt(p.pos(r), nil, p.expr(r.X), nil)
n := ir.NewRangeStmt(p.pos(r), nil, nil, p.expr(r.X), nil)
if r.Lhs != nil {
n.Def = r.Def
n.Vars.Set(p.assignList(r.Lhs, n, n.Def))
lhs := p.assignList(r.Lhs, n, n.Def)
n.Key = lhs[0]
if len(lhs) > 1 {
n.Value = lhs[1]
}
}
n.Body.Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
n := ir.NewForStmt(p.pos(stmt), nil, nil, nil, nil)
if stmt.Init != nil {
*n.PtrInit() = []ir.Node{p.stmt(stmt.Init)}
}
if stmt.Cond != nil {
n.Cond = p.expr(stmt.Cond)
}
if stmt.Post != nil {
n.Post = p.stmt(stmt.Post)
}
n.Body.Set(p.blockStmt(stmt.Body))
n := ir.NewForStmt(p.pos(stmt), p.stmt(stmt.Init), p.expr(stmt.Cond), p.stmt(stmt.Post), p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
func (p *noder) switchStmt(stmt *syntax.SwitchStmt) ir.Node {
p.openScope(stmt.Pos())
n := ir.NewSwitchStmt(p.pos(stmt), nil, nil)
if stmt.Init != nil {
*n.PtrInit() = []ir.Node{p.stmt(stmt.Init)}
}
if stmt.Tag != nil {
n.Tag = p.expr(stmt.Tag)
init := p.stmt(stmt.Init)
n := ir.NewSwitchStmt(p.pos(stmt), p.expr(stmt.Tag), nil)
if init != nil {
*n.PtrInit() = []ir.Node{init}
}
var tswitch *ir.TypeSwitchGuard
if l := n.Tag; l != nil && l.Op() == ir.OTYPESW {
tswitch = l.(*ir.TypeSwitchGuard)
}
n.Cases.Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
n.Cases = p.caseClauses(stmt.Body, tswitch, stmt.Rbrace)
p.closeScope(stmt.Rbrace)
return n
}
func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitchGuard, rbrace syntax.Pos) []ir.Node {
nodes := make([]ir.Node, 0, len(clauses))
func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitchGuard, rbrace syntax.Pos) []*ir.CaseClause {
nodes := make([]*ir.CaseClause, 0, len(clauses))
for i, clause := range clauses {
p.setlineno(clause)
if i > 0 {
@ -1374,14 +1354,11 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
}
p.openScope(clause.Pos())
n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Cases != nil {
n.List.Set(p.exprList(clause.Cases))
}
n := ir.NewCaseStmt(p.pos(clause), p.exprList(clause.Cases), nil)
if tswitch != nil && tswitch.Tag != nil {
nn := typecheck.NewName(tswitch.Tag.Sym())
typecheck.Declare(nn, typecheck.DeclContext)
n.Vars = []ir.Node{nn}
n.Var = nn
// keep track of the instances for reporting unused
nn.Defn = tswitch
}
@ -1416,13 +1393,11 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
}
func (p *noder) selectStmt(stmt *syntax.SelectStmt) ir.Node {
n := ir.NewSelectStmt(p.pos(stmt), nil)
n.Cases.Set(p.commClauses(stmt.Body, stmt.Rbrace))
return n
return ir.NewSelectStmt(p.pos(stmt), p.commClauses(stmt.Body, stmt.Rbrace))
}
func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []ir.Node {
nodes := make([]ir.Node, 0, len(clauses))
func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []*ir.CommClause {
nodes := make([]*ir.CommClause, len(clauses))
for i, clause := range clauses {
p.setlineno(clause)
if i > 0 {
@ -1430,12 +1405,7 @@ func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []i
}
p.openScope(clause.Pos())
n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Comm != nil {
n.List = []ir.Node{p.stmt(clause.Comm)}
}
n.Body.Set(p.stmts(clause.Body))
nodes = append(nodes, n)
nodes[i] = ir.NewCommStmt(p.pos(clause), p.stmt(clause.Comm), p.stmts(clause.Body))
}
if len(clauses) > 0 {
p.closeScope(rbrace)
@ -2001,7 +1971,6 @@ func oldname(s *types.Sym) ir.Node {
c = typecheck.NewName(s)
c.Class_ = ir.PAUTOHEAP
c.SetIsClosureVar(true)
c.SetIsDDD(n.IsDDD())
c.Defn = n
// Link into list of active closure variables.

View File

@ -33,7 +33,6 @@ package objw
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/internal/obj"
"cmd/internal/objabi"
"cmd/internal/src"
@ -173,7 +172,7 @@ func (pp *Progs) Prog(as obj.As) *obj.Prog {
p.Pos = pp.Pos
if pp.Pos.IsStmt() == src.PosIsStmt {
// Clear IsStmt for later Progs at this pos provided that as can be marked as a stmt
if ssa.LosesStmtMark(as) {
if LosesStmtMark(as) {
return p
}
pp.Pos = pp.Pos.WithNotStmt()
@ -216,3 +215,12 @@ func (pp *Progs) SetText(fn *ir.Func) {
ptxt.From.Name = obj.NAME_EXTERN
ptxt.From.Sym = fn.LSym
}
// LosesStmtMark reports whether a prog with op as loses its statement mark on the way to DWARF.
// The attributes from some opcodes are lost in translation.
// TODO: this is an artifact of how funcpctab combines information for instructions at a single PC.
// Should try to fix it there.
func LosesStmtMark(as obj.As) bool {
// is_stmt does not work for these; it DOES for ANOP even though that generates no code.
return as == obj.APCDATA || as == obj.AFUNCDATA
}

View File

@ -289,10 +289,6 @@ func (d *initDeps) inspectList(l ir.Nodes) { ir.VisitList(l, d.cachedVisit()) }
// referenced by n, if any.
func (d *initDeps) visit(n ir.Node) {
switch n.Op() {
case ir.OMETHEXPR:
n := n.(*ir.MethodExpr)
d.foundDep(ir.MethodExprName(n))
case ir.ONAME:
n := n.(*ir.Name)
switch n.Class_ {
@ -304,7 +300,7 @@ func (d *initDeps) visit(n ir.Node) {
n := n.(*ir.ClosureExpr)
d.inspectList(n.Func.Body)
case ir.ODOTMETH, ir.OCALLPART:
case ir.ODOTMETH, ir.OCALLPART, ir.OMETHEXPR:
d.foundDep(ir.MethodExprName(n))
}
}

View File

@ -289,11 +289,11 @@ func hashfor(t *types.Type) ir.Node {
n := typecheck.NewName(sym)
ir.MarkFunc(n)
n.SetType(typecheck.NewFuncType(nil, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
}, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
n.SetType(types.NewSignature(types.NoPkg, nil, []*types.Field{
types.NewField(base.Pos, nil, types.NewPtr(t)),
types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
}, []*types.Field{
types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
}))
return n
}
@ -777,12 +777,12 @@ func hashmem(t *types.Type) ir.Node {
n := typecheck.NewName(sym)
ir.MarkFunc(n)
n.SetType(typecheck.NewFuncType(nil, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
}, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
n.SetType(types.NewSignature(types.NoPkg, nil, []*types.Field{
types.NewField(base.Pos, nil, types.NewPtr(t)),
types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
}, []*types.Field{
types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
}))
return n
}

View File

@ -16,8 +16,8 @@ import (
"cmd/compile/internal/escape"
"cmd/compile/internal/inline"
"cmd/compile/internal/ir"
"cmd/compile/internal/liveness"
"cmd/compile/internal/objw"
"cmd/compile/internal/typebits"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/gcprog"
@ -835,6 +835,9 @@ func TypeSym(t *types.Type) *types.Sym {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() {
base.Fatalf("typenamesym %v", t)
}
if t.Kind() == types.TFUNC && t.Recv() != nil {
base.Fatalf("misuse of method type: %v", t)
}
s := types.TypeSym(t)
signatmu.Lock()
NeedRuntimeType(t)
@ -1419,7 +1422,11 @@ func WriteBasicTypes() {
// The latter is the type of an auto-generated wrapper.
WriteType(types.NewPtr(types.ErrorType))
WriteType(typecheck.NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.ErrorType)}, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.Types[types.TSTRING])}))
WriteType(types.NewSignature(types.NoPkg, nil, []*types.Field{
types.NewField(base.Pos, nil, types.ErrorType),
}, []*types.Field{
types.NewField(base.Pos, nil, types.Types[types.TSTRING]),
}))
// add paths for runtime and main, which 6l imports implicitly.
dimportpath(ir.Pkgs.Runtime)
@ -1545,7 +1552,7 @@ func fillptrmask(t *types.Type, ptrmask []byte) {
}
vec := bitvec.New(8 * int32(len(ptrmask)))
liveness.SetTypeBits(t, 0, vec)
typebits.Set(t, 0, vec)
nptr := types.PtrDataSize(t) / int64(types.PtrSize)
for i := int64(0); i < nptr; i++ {
@ -1856,7 +1863,7 @@ func MarkUsedIfaceMethod(n *ir.CallExpr) {
r.Sym = tsym
// dot.Xoffset is the method index * Widthptr (the offset of code pointer
// in itab).
midx := dot.Offset / int64(types.PtrSize)
midx := dot.Offset() / int64(types.PtrSize)
r.Add = InterfaceMethodOffset(ityp, midx)
r.Type = objabi.R_USEIFACEMETHOD
}

View File

@ -5,7 +5,6 @@
package ssa
import (
"cmd/internal/obj"
"cmd/internal/src"
"fmt"
"sort"
@ -23,15 +22,6 @@ func isPoorStatementOp(op Op) bool {
return false
}
// LosesStmtMark reports whether a prog with op as loses its statement mark on the way to DWARF.
// The attributes from some opcodes are lost in translation.
// TODO: this is an artifact of how funcpctab combines information for instructions at a single PC.
// Should try to fix it there.
func LosesStmtMark(as obj.As) bool {
// is_stmt does not work for these; it DOES for ANOP even though that generates no code.
return as == obj.APCDATA || as == obj.AFUNCDATA
}
// nextGoodStatementIndex returns an index at i or later that is believed
// to be a good place to start the statement for b. This decision is
// based on v's Op, the possibility of a better later operation, and

View File

@ -214,10 +214,7 @@ func InitConfig() {
func getParam(n *ir.CallExpr, i int) *types.Field {
t := n.X.Type()
if n.Op() == ir.OCALLMETH {
if i == 0 {
return t.Recv()
}
return t.Params().Field(i - 1)
base.Fatalf("OCALLMETH missed by walkCall")
}
return t.Params().Field(i)
}
@ -1166,7 +1163,7 @@ func (s *state) stmt(n ir.Node) {
}
fallthrough
case ir.OCALLMETH, ir.OCALLINTER:
case ir.OCALLINTER:
n := n.(*ir.CallExpr)
s.callResult(n, callNormal)
if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class_ == ir.PFUNC {
@ -1367,7 +1364,7 @@ func (s *state) stmt(n ir.Node) {
// We're assigning a slicing operation back to its source.
// Don't write back fields we aren't changing. See issue #14855.
rhs := rhs.(*ir.SliceExpr)
i, j, k := rhs.SliceBounds()
i, j, k := rhs.Low, rhs.High, rhs.Max
if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
// [0:...] is the same as [:...]
i = nil
@ -2111,10 +2108,6 @@ func (s *state) expr(n ir.Node) *ssa.Value {
n := n.(*ir.UnaryExpr)
aux := n.X.Sym().Linksym()
return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
case ir.OMETHEXPR:
n := n.(*ir.MethodExpr)
sym := staticdata.FuncSym(n.FuncName().Sym()).Linksym()
return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
case ir.ONAME:
n := n.(*ir.Name)
if n.Class_ == ir.PFUNC {
@ -2736,7 +2729,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
// SSA, then load just the selected field. This
// prevents false memory dependencies in race/msan
// instrumentation.
if ir.IsAssignable(n) && !s.canSSA(n) {
if ir.IsAddressable(n) && !s.canSSA(n) {
p := s.addr(n)
return s.load(n.Type(), p)
}
@ -2746,7 +2739,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
p := s.exprPtr(n.X, n.Bounded(), n.Pos())
p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset, p)
p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
return s.load(n.Type(), p)
case ir.OINDEX:
@ -2844,23 +2837,22 @@ func (s *state) expr(n ir.Node) *ssa.Value {
case ir.OSLICEHEADER:
n := n.(*ir.SliceHeaderExpr)
p := s.expr(n.Ptr)
l := s.expr(n.LenCap[0])
c := s.expr(n.LenCap[1])
l := s.expr(n.Len)
c := s.expr(n.Cap)
return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
n := n.(*ir.SliceExpr)
v := s.expr(n.X)
var i, j, k *ssa.Value
low, high, max := n.SliceBounds()
if low != nil {
i = s.expr(low)
if n.Low != nil {
i = s.expr(n.Low)
}
if high != nil {
j = s.expr(high)
if n.High != nil {
j = s.expr(n.High)
}
if max != nil {
k = s.expr(max)
if n.Max != nil {
k = s.expr(n.Max)
}
p, l, c := s.slice(v, i, j, k, n.Bounded())
return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
@ -2869,12 +2861,11 @@ func (s *state) expr(n ir.Node) *ssa.Value {
n := n.(*ir.SliceExpr)
v := s.expr(n.X)
var i, j *ssa.Value
low, high, _ := n.SliceBounds()
if low != nil {
i = s.expr(low)
if n.Low != nil {
i = s.expr(n.Low)
}
if high != nil {
j = s.expr(high)
if n.High != nil {
j = s.expr(n.High)
}
p, l, _ := s.slice(v, i, j, nil, n.Bounded())
return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
@ -4398,16 +4389,7 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
opendefer.closure = closure
}
} else if n.Op() == ir.OCALLMETH {
if fn.Op() != ir.ODOTMETH {
base.Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn)
}
fn := fn.(*ir.SelectorExpr)
closureVal := s.getMethodClosure(fn)
// We must always store the function value in a stack slot for the
// runtime panic code to use. But in the defer exit code, we will
// call the method directly.
closure := s.openDeferSave(nil, fn.Type(), closureVal)
opendefer.closureNode = closure.Aux.(*ir.Name)
base.Fatalf("OCALLMETH missed by walkCall")
} else {
if fn.Op() != ir.ODOTINTER {
base.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
@ -4681,18 +4663,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
s.maybeNilCheckClosure(closure, k)
}
case ir.OCALLMETH:
if fn.Op() != ir.ODOTMETH {
s.Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn)
}
fn := fn.(*ir.SelectorExpr)
testLateExpansion = k != callDeferStack && ssa.LateCallExpansionEnabledWithin(s.f)
if k == callNormal {
sym = fn.Sel
break
}
closure = s.getMethodClosure(fn)
// Note: receiver is already present in n.Rlist, so we don't
// want to set it here.
base.Fatalf("OCALLMETH missed by walkCall")
case ir.OCALLINTER:
if fn.Op() != ir.ODOTINTER {
s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
@ -4757,9 +4728,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
}
// Set receiver (for method calls).
if n.Op() == ir.OCALLMETH {
f := ft.Recv()
s.storeArgWithBase(args[0], f.Type, addr, off+f.Offset)
args = args[1:]
base.Fatalf("OCALLMETH missed by walkCall")
}
// Set other args.
for _, f := range ft.Params().Fields().Slice() {
@ -4827,11 +4796,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
t := n.X.Type()
args := n.Rargs
if n.Op() == ir.OCALLMETH {
f := t.Recv()
ACArg, arg := s.putArg(args[0], f.Type, argStart+f.Offset, testLateExpansion)
ACArgs = append(ACArgs, ACArg)
callArgs = append(callArgs, arg)
args = args[1:]
base.Fatalf("OCALLMETH missed by walkCall")
}
for i, n := range args {
f := t.Params().Field(i)
@ -4949,29 +4914,13 @@ func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
}
}
// getMethodClosure returns a value representing the closure for a method call
func (s *state) getMethodClosure(fn *ir.SelectorExpr) *ssa.Value {
// Make a name n2 for the function.
// fn.Sym might be sync.(*Mutex).Unlock.
// Make a PFUNC node out of that, then evaluate it.
// We get back an SSA value representing &sync.(*Mutex).Unlock·f.
// We can then pass that to defer or go.
n2 := ir.NewNameAt(fn.Pos(), fn.Sel)
n2.Curfn = s.curfn
n2.Class_ = ir.PFUNC
// n2.Sym already existed, so it's already marked as a function.
n2.SetPos(fn.Pos())
n2.SetType(types.Types[types.TUINT8]) // fake type for a static closure. Could use runtime.funcval if we had it.
return s.expr(n2)
}
// getClosureAndRcvr returns values for the appropriate closure and receiver of an
// interface call
func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
i := s.expr(fn.X)
itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
s.nilCheck(itab)
itabidx := fn.Offset + 2*int64(types.PtrSize) + 8 // offset of fun field in runtime.itab
itabidx := fn.Offset() + 2*int64(types.PtrSize) + 8 // offset of fun field in runtime.itab
closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
return closure, rcvr
@ -5075,11 +5024,11 @@ func (s *state) addr(n ir.Node) *ssa.Value {
case ir.ODOT:
n := n.(*ir.SelectorExpr)
p := s.addr(n.X)
return s.newValue1I(ssa.OpOffPtr, t, n.Offset, p)
return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
p := s.exprPtr(n.X, n.Bounded(), n.Pos())
return s.newValue1I(ssa.OpOffPtr, t, n.Offset, p)
return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
case ir.OCLOSUREREAD:
n := n.(*ir.ClosureReadExpr)
return s.newValue1I(ssa.OpOffPtr, t, n.Offset,
@ -5091,7 +5040,7 @@ func (s *state) addr(n ir.Node) *ssa.Value {
}
addr := s.addr(n.X)
return s.newValue1(ssa.OpCopy, t, addr) // ensure that addr has the right type
case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
case ir.OCALLFUNC, ir.OCALLINTER:
n := n.(*ir.CallExpr)
return s.callAddr(n, callNormal)
case ir.ODOTTYPE:
@ -6328,7 +6277,7 @@ type State struct {
// Prog appends a new Prog.
func (s *State) Prog(as obj.As) *obj.Prog {
p := s.pp.Prog(as)
if ssa.LosesStmtMark(as) {
if objw.LosesStmtMark(as) {
return p
}
// Float a statement start to the beginning of any same-line run.
@ -7116,21 +7065,17 @@ func (s *State) UseArgs(n int64) {
// fieldIdx finds the index of the field referred to by the ODOT node n.
func fieldIdx(n *ir.SelectorExpr) int {
t := n.X.Type()
f := n.Sel
if !t.IsStruct() {
panic("ODOT's LHS is not a struct")
}
var i int
for _, t1 := range t.Fields().Slice() {
if t1.Sym != f {
i++
continue
for i, f := range t.Fields().Slice() {
if f.Sym == n.Sel {
if f.Offset != n.Offset() {
panic("field offset doesn't match")
}
return i
}
if t1.Offset != n.Offset {
panic("field offset doesn't match")
}
return i
}
panic(fmt.Sprintf("can't find field in expr %v\n", n))

View File

@ -469,7 +469,7 @@ func StaticLoc(n ir.Node) (name *ir.Name, offset int64, ok bool) {
if name, offset, ok = StaticLoc(n.X); !ok {
break
}
offset += n.Offset
offset += n.Offset()
return name, offset, true
case ir.OINDEX:

View File

@ -0,0 +1,87 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typebits
import (
"cmd/compile/internal/base"
"cmd/compile/internal/bitvec"
"cmd/compile/internal/types"
)
// NOTE: The bitmap for a specific type t could be cached in t after
// the first run and then simply copied into bv at the correct offset
// on future calls with the same type t.
func Set(t *types.Type, off int64, bv bitvec.BitVec) {
if t.Align > 0 && off&int64(t.Align-1) != 0 {
base.Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
}
if !t.HasPointers() {
// Note: this case ensures that pointers to go:notinheap types
// are not considered pointers by garbage collection and stack copying.
return
}
switch t.Kind() {
case types.TPTR, types.TUNSAFEPTR, types.TFUNC, types.TCHAN, types.TMAP:
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(types.PtrSize))) // pointer
case types.TSTRING:
// struct { byte *str; intgo len; }
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(types.PtrSize))) //pointer in first slot
case types.TINTER:
// struct { Itab *tab; void *data; }
// or, when isnilinter(t)==true:
// struct { Type *type; void *data; }
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
// The first word of an interface is a pointer, but we don't
// treat it as such.
// 1. If it is a non-empty interface, the pointer points to an itab
// which is always in persistentalloc space.
// 2. If it is an empty interface, the pointer points to a _type.
// a. If it is a compile-time-allocated type, it points into
// the read-only data section.
// b. If it is a reflect-allocated type, it points into the Go heap.
// Reflect is responsible for keeping a reference to
// the underlying type so it won't be GCd.
// If we ever have a moving GC, we need to change this for 2b (as
// well as scan itabs to update their itab._type fields).
bv.Set(int32(off/int64(types.PtrSize) + 1)) // pointer in second slot
case types.TSLICE:
// struct { byte *array; uintgo len; uintgo cap; }
if off&int64(types.PtrSize-1) != 0 {
base.Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
}
bv.Set(int32(off / int64(types.PtrSize))) // pointer in first slot (BitsPointer)
case types.TARRAY:
elt := t.Elem()
if elt.Width == 0 {
// Short-circuit for #20739.
break
}
for i := int64(0); i < t.NumElem(); i++ {
Set(elt, off, bv)
off += elt.Width
}
case types.TSTRUCT:
for _, f := range t.Fields().Slice() {
Set(f.Type, off+f.Offset, bv)
}
default:
base.Fatalf("onebitwalktype1: unexpected type, %v", t)
}
}

View File

@ -3,9 +3,8 @@
package typecheck
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
)
var runtimeDecls = [...]struct {
@ -212,133 +211,133 @@ func runtimeTypes() []*types.Type {
typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[types.TANY]
typs[3] = types.NewPtr(typs[2])
typs[4] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[4] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[3])})
typs[5] = types.Types[types.TUINTPTR]
typs[6] = types.Types[types.TBOOL]
typs[7] = types.Types[types.TUNSAFEPTR]
typs[8] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[9] = NewFuncType(nil, nil, nil)
typs[8] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[5]), types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[6])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[7])})
typs[9] = types.NewSignature(types.NoPkg, nil, nil, nil)
typs[10] = types.Types[types.TINTER]
typs[11] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])}, nil)
typs[11] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[10])}, nil)
typs[12] = types.Types[types.TINT32]
typs[13] = types.NewPtr(typs[12])
typs[14] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[13])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])})
typs[14] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[13])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[10])})
typs[15] = types.Types[types.TINT]
typs[16] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
typs[16] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[15])}, nil)
typs[17] = types.Types[types.TUINT]
typs[18] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[17]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
typs[19] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])}, nil)
typs[18] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[17]), types.NewField(src.NoXPos, nil, typs[15])}, nil)
typs[19] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])}, nil)
typs[20] = types.Types[types.TFLOAT64]
typs[21] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, nil)
typs[21] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[20])}, nil)
typs[22] = types.Types[types.TINT64]
typs[23] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, nil)
typs[23] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[22])}, nil)
typs[24] = types.Types[types.TUINT64]
typs[25] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
typs[25] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[24])}, nil)
typs[26] = types.Types[types.TCOMPLEX128]
typs[27] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])}, nil)
typs[27] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[26])}, nil)
typs[28] = types.Types[types.TSTRING]
typs[29] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, nil)
typs[30] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
typs[31] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[29] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])}, nil)
typs[30] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[2])}, nil)
typs[31] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[5])}, nil)
typs[32] = types.NewArray(typs[0], 32)
typs[33] = types.NewPtr(typs[32])
typs[34] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[35] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[36] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[37] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[34] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[35] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[36] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[37] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[38] = types.NewSlice(typs[28])
typs[39] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[38])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[40] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[39] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[38])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[40] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[15])})
typs[41] = types.NewArray(typs[0], 4)
typs[42] = types.NewPtr(typs[41])
typs[43] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[42]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[44] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[45] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[43] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[42]), types.NewField(src.NoXPos, nil, typs[22])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[44] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[15])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[45] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[15])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[46] = types.RuneType
typs[47] = types.NewSlice(typs[46])
typs[48] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[47])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[48] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[47])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])})
typs[49] = types.NewSlice(typs[0])
typs[50] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[49])})
typs[50] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[33]), types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[49])})
typs[51] = types.NewArray(typs[46], 32)
typs[52] = types.NewPtr(typs[51])
typs[53] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[52]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[47])})
typs[54] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[55] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[46]), ir.NewField(base.Pos, nil, nil, typs[15])})
typs[56] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[57] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
typs[58] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[59] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
typs[60] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[61] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
typs[62] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
typs[53] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[52]), types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[47])})
typs[54] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[5])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[15])})
typs[55] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[28]), types.NewField(src.NoXPos, nil, typs[15])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[46]), types.NewField(src.NoXPos, nil, typs[15])})
typs[56] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[28])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[15])})
typs[57] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[2])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[2])})
typs[58] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[2])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[7])})
typs[59] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[2])})
typs[60] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[2])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[2]), types.NewField(src.NoXPos, nil, typs[6])})
typs[61] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[1])}, nil)
typs[62] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1])}, nil)
typs[63] = types.NewPtr(typs[5])
typs[64] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[64] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[63]), types.NewField(src.NoXPos, nil, typs[7]), types.NewField(src.NoXPos, nil, typs[7])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[65] = types.Types[types.TUINT32]
typs[66] = NewFuncType(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
typs[66] = types.NewSignature(types.NoPkg, nil, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[65])})
typs[67] = types.NewMap(typs[2], typs[2])
typs[68] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[69] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[70] = NewFuncType(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[71] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[72] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[73] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[74] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[75] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[76] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[77] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[78] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
typs[79] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[80] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67])}, nil)
typs[68] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[22]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[67])})
typs[69] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[67])})
typs[70] = types.NewSignature(types.NoPkg, nil, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[67])})
typs[71] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[3])})
typs[72] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[2])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[3])})
typs[73] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[1])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[3])})
typs[74] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[6])})
typs[75] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[2])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[6])})
typs[76] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[1])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[6])})
typs[77] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[3])}, nil)
typs[78] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67]), types.NewField(src.NoXPos, nil, typs[2])}, nil)
typs[79] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[3])}, nil)
typs[80] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[67])}, nil)
typs[81] = types.NewChan(typs[2], types.Cboth)
typs[82] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
typs[83] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
typs[82] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[22])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[81])})
typs[83] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[15])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[81])})
typs[84] = types.NewChan(typs[2], types.Crecv)
typs[85] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[86] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[85] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[84]), types.NewField(src.NoXPos, nil, typs[3])}, nil)
typs[86] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[84]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[87] = types.NewChan(typs[2], types.Csend)
typs[88] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[88] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[87]), types.NewField(src.NoXPos, nil, typs[3])}, nil)
typs[89] = types.NewArray(typs[0], 3)
typs[90] = NewStructType([]*ir.Field{ir.NewField(base.Pos, Lookup("enabled"), nil, typs[6]), ir.NewField(base.Pos, Lookup("pad"), nil, typs[89]), ir.NewField(base.Pos, Lookup("needed"), nil, typs[6]), ir.NewField(base.Pos, Lookup("cgo"), nil, typs[6]), ir.NewField(base.Pos, Lookup("alignme"), nil, typs[24])})
typs[91] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[92] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[93] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[94] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[95] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[90] = types.NewStruct(types.NoPkg, []*types.Field{types.NewField(src.NoXPos, Lookup("enabled"), typs[6]), types.NewField(src.NoXPos, Lookup("pad"), typs[89]), types.NewField(src.NoXPos, Lookup("needed"), typs[6]), types.NewField(src.NoXPos, Lookup("cgo"), typs[6]), types.NewField(src.NoXPos, Lookup("alignme"), typs[24])})
typs[91] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[3])}, nil)
typs[92] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[3])}, nil)
typs[93] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[15])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[15])})
typs[94] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[87]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[95] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[84])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[96] = types.NewPtr(typs[6])
typs[97] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[96]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[98] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63])}, nil)
typs[99] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[100] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[101] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[102] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[97] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[96]), types.NewField(src.NoXPos, nil, typs[84])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[98] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[63])}, nil)
typs[99] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[63]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[6])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[6])})
typs[100] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[15])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[7])})
typs[101] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[22]), types.NewField(src.NoXPos, nil, typs[22])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[7])})
typs[102] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[15]), types.NewField(src.NoXPos, nil, typs[7])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[7])})
typs[103] = types.NewSlice(typs[2])
typs[104] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[103]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[103])})
typs[105] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[106] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[107] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[108] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[109] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[110] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
typs[111] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
typs[112] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
typs[113] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
typs[114] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
typs[115] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
typs[116] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
typs[117] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[118] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[119] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[120] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26]), ir.NewField(base.Pos, nil, nil, typs[26])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])})
typs[121] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[122] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[123] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[104] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[103]), types.NewField(src.NoXPos, nil, typs[15])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[103])})
typs[105] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[5])}, nil)
typs[106] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[7]), types.NewField(src.NoXPos, nil, typs[5])}, nil)
typs[107] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[5])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[108] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[3]), types.NewField(src.NoXPos, nil, typs[3])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[109] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[7]), types.NewField(src.NoXPos, nil, typs[7])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[6])})
typs[110] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[7]), types.NewField(src.NoXPos, nil, typs[5]), types.NewField(src.NoXPos, nil, typs[5])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[5])})
typs[111] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[7]), types.NewField(src.NoXPos, nil, typs[5])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[5])})
typs[112] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[22]), types.NewField(src.NoXPos, nil, typs[22])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[22])})
typs[113] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[24]), types.NewField(src.NoXPos, nil, typs[24])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[24])})
typs[114] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[20])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[22])})
typs[115] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[20])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[24])})
typs[116] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[20])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[65])})
typs[117] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[22])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[20])})
typs[118] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[24])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[20])})
typs[119] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[65])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[20])})
typs[120] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[26]), types.NewField(src.NoXPos, nil, typs[26])}, []*types.Field{types.NewField(src.NoXPos, nil, typs[26])})
typs[121] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[5]), types.NewField(src.NoXPos, nil, typs[5])}, nil)
typs[122] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[5]), types.NewField(src.NoXPos, nil, typs[5]), types.NewField(src.NoXPos, nil, typs[5])}, nil)
typs[123] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[7]), types.NewField(src.NoXPos, nil, typs[1]), types.NewField(src.NoXPos, nil, typs[5])}, nil)
typs[124] = types.NewSlice(typs[7])
typs[125] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[124])}, nil)
typs[125] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[7]), types.NewField(src.NoXPos, nil, typs[124])}, nil)
typs[126] = types.Types[types.TUINT8]
typs[127] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[126]), ir.NewField(base.Pos, nil, nil, typs[126])}, nil)
typs[127] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[126]), types.NewField(src.NoXPos, nil, typs[126])}, nil)
typs[128] = types.Types[types.TUINT16]
typs[129] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[128]), ir.NewField(base.Pos, nil, nil, typs[128])}, nil)
typs[130] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65]), ir.NewField(base.Pos, nil, nil, typs[65])}, nil)
typs[131] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
typs[129] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[128]), types.NewField(src.NoXPos, nil, typs[128])}, nil)
typs[130] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[65]), types.NewField(src.NoXPos, nil, typs[65])}, nil)
typs[131] = types.NewSignature(types.NoPkg, nil, []*types.Field{types.NewField(src.NoXPos, nil, typs[24]), types.NewField(src.NoXPos, nil, typs[24])}, nil)
return typs[:]
}

View File

@ -13,7 +13,6 @@ import (
)
func TestBuiltin(t *testing.T) {
t.Skip("mkbuiltin needs fixing")
testenv.MustHaveGoRun(t)
t.Parallel()

View File

@ -929,7 +929,7 @@ func evalunsafe(n ir.Node) int64 {
fallthrough
case ir.ODOT:
r := r.(*ir.SelectorExpr)
v += r.Offset
v += r.Offset()
next = r.X
default:
ir.Dump("unsafenmagic", tsel)

View File

@ -281,72 +281,6 @@ func CheckFuncStack() {
}
}
// turn a parsed function declaration into a type
func NewFuncType(nrecv *ir.Field, nparams, nresults []*ir.Field) *types.Type {
funarg := func(n *ir.Field) *types.Field {
lno := base.Pos
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
f := types.NewField(n.Pos, n.Sym, n.Type)
f.SetIsDDD(n.IsDDD)
if n.Decl != nil {
n.Decl.SetType(f.Type)
f.Nname = n.Decl
}
base.Pos = lno
return f
}
funargs := func(nn []*ir.Field) []*types.Field {
res := make([]*types.Field, len(nn))
for i, n := range nn {
res[i] = funarg(n)
}
return res
}
var recv *types.Field
if nrecv != nil {
recv = funarg(nrecv)
}
t := types.NewSignature(types.LocalPkg, recv, funargs(nparams), funargs(nresults))
checkdupfields("argument", t.Recvs().FieldSlice(), t.Params().FieldSlice(), t.Results().FieldSlice())
return t
}
// convert a parsed id/type list into
// a type for struct/interface/arglist
func NewStructType(l []*ir.Field) *types.Type {
lno := base.Pos
fields := make([]*types.Field, len(l))
for i, n := range l {
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
f := types.NewField(n.Pos, n.Sym, n.Type)
if n.Embedded {
checkembeddedtype(n.Type)
f.Embedded = 1
}
f.Note = n.Note
fields[i] = f
}
checkdupfields("field", fields)
base.Pos = lno
return types.NewStruct(types.LocalPkg, fields)
}
// Add a method, declared as a function.
// - msym is the method symbol
// - t is function type (with receiver)
@ -513,7 +447,6 @@ func funcarg(n *ir.Field, ctxt ir.Class) {
name := ir.NewNameAt(n.Pos, n.Sym)
n.Decl = name
name.Ntype = n.Ntype
name.SetIsDDD(n.IsDDD)
Declare(name, ctxt)
vargen++
@ -527,7 +460,6 @@ func funcarg2(f *types.Field, ctxt ir.Class) {
n := ir.NewNameAt(f.Pos, f.Sym)
f.Nname = n
n.SetType(f.Type)
n.SetIsDDD(f.IsDDD())
Declare(n, ctxt)
}
@ -604,27 +536,6 @@ func initname(s string) bool {
return s == "init"
}
func tointerface(nmethods []*ir.Field) *types.Type {
if len(nmethods) == 0 {
return types.Types[types.TINTER]
}
lno := base.Pos
methods := make([]*types.Field, len(nmethods))
for i, n := range nmethods {
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
methods[i] = types.NewField(n.Pos, n.Sym, n.Type)
}
base.Pos = lno
return types.NewInterface(types.LocalPkg, methods)
}
var vargen int
func Temp(t *types.Type) *ir.Name {
@ -643,6 +554,9 @@ func TempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
if t == nil {
base.Fatalf("tempAt called with nil type")
}
if t.Kind() == types.TFUNC && t.Recv() != nil {
base.Fatalf("misuse of method type: %v", t)
}
s := &types.Sym{
Name: autotmpname(len(curfn.Dcl)),
@ -676,30 +590,26 @@ func autotmpname(n int) string {
// f is method type, with receiver.
// return function type, receiver as first argument (or not).
func NewMethodType(f *types.Type, receiver *types.Type) *types.Type {
inLen := f.Params().Fields().Len()
if receiver != nil {
inLen++
}
in := make([]*ir.Field, 0, inLen)
if receiver != nil {
d := ir.NewField(base.Pos, nil, nil, receiver)
in = append(in, d)
func NewMethodType(sig *types.Type, recv *types.Type) *types.Type {
nrecvs := 0
if recv != nil {
nrecvs++
}
for _, t := range f.Params().Fields().Slice() {
d := ir.NewField(base.Pos, nil, nil, t.Type)
d.IsDDD = t.IsDDD()
in = append(in, d)
params := make([]*types.Field, nrecvs+sig.Params().Fields().Len())
if recv != nil {
params[0] = types.NewField(base.Pos, nil, recv)
}
for i, param := range sig.Params().Fields().Slice() {
d := types.NewField(base.Pos, nil, param.Type)
d.SetIsDDD(param.IsDDD())
params[nrecvs+i] = d
}
outLen := f.Results().Fields().Len()
out := make([]*ir.Field, 0, outLen)
for _, t := range f.Results().Fields().Slice() {
d := ir.NewField(base.Pos, nil, nil, t.Type)
out = append(out, d)
results := make([]*types.Field, sig.Results().Fields().Len())
for i, t := range sig.Results().Fields().Slice() {
results[i] = types.NewField(base.Pos, nil, t.Type)
}
return NewFuncType(nil, in, out)
return types.NewSignature(types.LocalPkg, nil, params, results)
}

View File

@ -571,7 +571,6 @@ func tcDot(n *ir.SelectorExpr, top int) ir.Node {
}
n.X = typecheck(n.X, ctxExpr|ctxType)
n.X = DefaultLit(n.X, nil)
t := n.X.Type()
@ -581,8 +580,6 @@ func tcDot(n *ir.SelectorExpr, top int) ir.Node {
return n
}
s := n.Sel
if n.X.Op() == ir.OTYPE {
return typecheckMethodExpr(n)
}
@ -629,7 +626,10 @@ func tcDot(n *ir.SelectorExpr, top int) ir.Node {
}
if (n.Op() == ir.ODOTINTER || n.Op() == ir.ODOTMETH) && top&ctxCallee == 0 {
return tcCallPart(n, s)
// Create top-level function.
fn := makepartialcall(n)
return ir.NewCallPartExpr(n.Pos(), n.X, n.Selection, fn)
}
return n
}
@ -831,24 +831,18 @@ func tcSPtr(n *ir.UnaryExpr) ir.Node {
// tcSlice typechecks an OSLICE or OSLICE3 node.
func tcSlice(n *ir.SliceExpr) ir.Node {
n.X = Expr(n.X)
low, high, max := n.SliceBounds()
n.X = DefaultLit(Expr(n.X), nil)
n.Low = indexlit(Expr(n.Low))
n.High = indexlit(Expr(n.High))
n.Max = indexlit(Expr(n.Max))
hasmax := n.Op().IsSlice3()
low = Expr(low)
high = Expr(high)
max = Expr(max)
n.X = DefaultLit(n.X, nil)
low = indexlit(low)
high = indexlit(high)
max = indexlit(max)
n.SetSliceBounds(low, high, max)
l := n.X
if l.Type() == nil {
n.SetType(nil)
return n
}
if l.Type().IsArray() {
if !ir.IsAssignable(n.X) {
if !ir.IsAddressable(n.X) {
base.Errorf("invalid operation %v (slice of unaddressable value)", n)
n.SetType(nil)
return n
@ -886,19 +880,19 @@ func tcSlice(n *ir.SliceExpr) ir.Node {
return n
}
if low != nil && !checksliceindex(l, low, tp) {
if n.Low != nil && !checksliceindex(l, n.Low, tp) {
n.SetType(nil)
return n
}
if high != nil && !checksliceindex(l, high, tp) {
if n.High != nil && !checksliceindex(l, n.High, tp) {
n.SetType(nil)
return n
}
if max != nil && !checksliceindex(l, max, tp) {
if n.Max != nil && !checksliceindex(l, n.Max, tp) {
n.SetType(nil)
return n
}
if !checksliceconst(low, high) || !checksliceconst(low, max) || !checksliceconst(high, max) {
if !checksliceconst(n.Low, n.High) || !checksliceconst(n.Low, n.Max) || !checksliceconst(n.High, n.Max) {
n.SetType(nil)
return n
}
@ -924,30 +918,22 @@ func tcSliceHeader(n *ir.SliceHeaderExpr) ir.Node {
base.Fatalf("need unsafe.Pointer for OSLICEHEADER")
}
if x := len(n.LenCap); x != 2 {
base.Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x)
}
n.Ptr = Expr(n.Ptr)
l := Expr(n.LenCap[0])
c := Expr(n.LenCap[1])
l = DefaultLit(l, types.Types[types.TINT])
c = DefaultLit(c, types.Types[types.TINT])
n.Len = DefaultLit(Expr(n.Len), types.Types[types.TINT])
n.Cap = DefaultLit(Expr(n.Cap), types.Types[types.TINT])
if ir.IsConst(l, constant.Int) && ir.Int64Val(l) < 0 {
if ir.IsConst(n.Len, constant.Int) && ir.Int64Val(n.Len) < 0 {
base.Fatalf("len for OSLICEHEADER must be non-negative")
}
if ir.IsConst(c, constant.Int) && ir.Int64Val(c) < 0 {
if ir.IsConst(n.Cap, constant.Int) && ir.Int64Val(n.Cap) < 0 {
base.Fatalf("cap for OSLICEHEADER must be non-negative")
}
if ir.IsConst(l, constant.Int) && ir.IsConst(c, constant.Int) && constant.Compare(l.Val(), token.GTR, c.Val()) {
if ir.IsConst(n.Len, constant.Int) && ir.IsConst(n.Cap, constant.Int) && constant.Compare(n.Len.Val(), token.GTR, n.Cap.Val()) {
base.Fatalf("len larger than cap for OSLICEHEADER")
}
n.LenCap[0] = l
n.LenCap[1] = c
return n
}

View File

@ -73,17 +73,17 @@ func ClosureType(clo *ir.ClosureExpr) *types.Type {
// The information appears in the binary in the form of type descriptors;
// the struct is unnamed so that closures in multiple packages with the
// same struct type can share the descriptor.
fields := []*ir.Field{
ir.NewField(base.Pos, Lookup(".F"), nil, types.Types[types.TUINTPTR]),
fields := []*types.Field{
types.NewField(base.Pos, Lookup(".F"), types.Types[types.TUINTPTR]),
}
for _, v := range clo.Func.ClosureVars {
typ := v.Type()
if !v.Byval() {
typ = types.NewPtr(typ)
}
fields = append(fields, ir.NewField(base.Pos, v.Sym(), nil, typ))
fields = append(fields, types.NewField(base.Pos, v.Sym(), typ))
}
typ := NewStructType(fields)
typ := types.NewStruct(types.NoPkg, fields)
typ.SetNoalg(true)
return typ
}
@ -92,9 +92,9 @@ func ClosureType(clo *ir.ClosureExpr) *types.Type {
// needed in the closure for n (n must be a OCALLPART node).
// The address of a variable of the returned type can be cast to a func.
func PartialCallType(n *ir.CallPartExpr) *types.Type {
t := NewStructType([]*ir.Field{
ir.NewField(base.Pos, Lookup("F"), nil, types.Types[types.TUINTPTR]),
ir.NewField(base.Pos, Lookup("R"), nil, n.X.Type()),
t := types.NewStruct(types.NoPkg, []*types.Field{
types.NewField(base.Pos, Lookup("F"), types.Types[types.TUINTPTR]),
types.NewField(base.Pos, Lookup("R"), n.X.Type()),
})
t.SetNoalg(true)
return t
@ -249,7 +249,9 @@ var globClosgen int32
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.Func {
func makepartialcall(dot *ir.SelectorExpr) *ir.Func {
t0 := dot.Type()
meth := dot.Sel
rcvrtype := dot.X.Type()
sym := ir.MethodSymSuffix(rcvrtype, meth, "-fm")
@ -263,11 +265,10 @@ func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.
ir.CurFunc = nil
// Set line number equal to the line number where the method is declared.
var m *types.Field
if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
base.Pos = m.Pos
if pos := dot.Selection.Pos; pos.IsKnown() {
base.Pos = pos
}
// Note: !m.Pos.IsKnown() happens for method expressions where
// Note: !dot.Selection.Pos.IsKnown() happens for method expressions where
// the method is implicitly declared. The Error method of the
// built-in error type is one such method. We leave the line
// number at the use of the method expression in this
@ -280,18 +281,17 @@ func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.
fn := DeclFunc(sym, tfn)
fn.SetDupok(true)
fn.SetNeedctxt(true)
fn.SetWrapper(true)
// Declare and initialize variable holding receiver.
cr := ir.NewClosureRead(rcvrtype, types.Rnd(int64(types.PtrSize), int64(rcvrtype.Align)))
ptr := NewName(Lookup(".this"))
Declare(ptr, ir.PAUTO)
ptr.SetUsed(true)
var ptr *ir.Name
var body []ir.Node
if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
ptr.SetType(rcvrtype)
ptr = Temp(rcvrtype)
body = append(body, ir.NewAssignStmt(base.Pos, ptr, cr))
} else {
ptr.SetType(types.NewPtr(rcvrtype))
ptr = Temp(types.NewPtr(rcvrtype))
body = append(body, ir.NewAssignStmt(base.Pos, ptr, NodAddr(cr)))
}
@ -382,23 +382,6 @@ func tcClosure(clo *ir.ClosureExpr, top int) {
Target.Decls = append(Target.Decls, fn)
}
func tcCallPart(n ir.Node, sym *types.Sym) *ir.CallPartExpr {
switch n.Op() {
case ir.ODOTINTER, ir.ODOTMETH:
break
default:
base.Fatalf("invalid typecheckpartialcall")
}
dot := n.(*ir.SelectorExpr)
// Create top-level function.
fn := makepartialcall(dot, dot.Type(), sym)
fn.SetWrapper(true)
return ir.NewCallPartExpr(dot.Pos(), dot.X, dot.Selection, fn)
}
// type check function definition
// To be called by typecheck, not directly.
// (Call typecheckFunc instead.)
@ -542,9 +525,7 @@ func tcCall(n *ir.CallExpr, top int) ir.Node {
default:
n.SetOp(ir.OCALLFUNC)
if t.Kind() != types.TFUNC {
// TODO(mdempsky): Remove "o.Sym() != nil" once we stop
// using ir.Name for numeric literals.
if o := ir.Orig(l); o.Name() != nil && o.Sym() != nil && types.BuiltinPkg.Lookup(o.Sym().Name).Def != nil {
if o := ir.Orig(l); o.Name() != nil && types.BuiltinPkg.Lookup(o.Sym().Name).Def != nil {
// be more specific when the non-function
// name matches a predeclared function
base.Errorf("cannot call non-function %L, declared at %s",

View File

@ -594,23 +594,15 @@ func (w *exportWriter) selector(s *types.Sym) {
base.Fatalf("missing currPkg")
}
// Method selectors are rewritten into method symbols (of the
// form T.M) during typechecking, but we want to write out
// just the bare method name.
name := s.Name
if i := strings.LastIndex(name, "."); i >= 0 {
name = name[i+1:]
} else {
pkg := w.currPkg
if types.IsExported(name) {
pkg = types.LocalPkg
}
if s.Pkg != pkg {
base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
}
pkg := w.currPkg
if types.IsExported(s.Name) {
pkg = types.LocalPkg
}
if s.Pkg != pkg {
base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
}
w.string(name)
w.string(s.Name)
}
func (w *exportWriter) typ(t *types.Type) {
@ -858,8 +850,6 @@ func intSize(typ *types.Type) (signed bool, maxBytes uint) {
// according to the maximum number of bytes needed to encode a value
// of type typ. As a special case, 8-bit types are always encoded as a
// single byte.
//
// TODO(mdempsky): Is this level of complexity really worthwhile?
func (w *exportWriter) mpint(x constant.Value, typ *types.Type) {
signed, maxBytes := intSize(typ)
@ -1145,7 +1135,7 @@ func (w *exportWriter) stmt(n ir.Node) {
n := n.(*ir.RangeStmt)
w.op(ir.ORANGE)
w.pos(n.Pos())
w.stmtList(n.Vars)
w.exprsOrNil(n.Key, n.Value)
w.expr(n.X)
w.stmtList(n.Body)
@ -1154,8 +1144,7 @@ func (w *exportWriter) stmt(n ir.Node) {
w.op(n.Op())
w.pos(n.Pos())
w.stmtList(n.Init())
w.exprsOrNil(nil, nil) // TODO(rsc): Delete (and fix importer).
w.caseList(n)
w.commList(n.Cases)
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
@ -1163,7 +1152,7 @@ func (w *exportWriter) stmt(n ir.Node) {
w.pos(n.Pos())
w.stmtList(n.Init())
w.exprsOrNil(n.Tag, nil)
w.caseList(n)
w.caseList(n.Cases, isNamedTypeSwitch(n.Tag))
// case OCASE:
// handled by caseList
@ -1187,39 +1176,32 @@ func (w *exportWriter) stmt(n ir.Node) {
}
}
func isNamedTypeSwitch(n ir.Node) bool {
if n.Op() != ir.OSWITCH {
return false
}
sw := n.(*ir.SwitchStmt)
if sw.Tag == nil || sw.Tag.Op() != ir.OTYPESW {
return false
}
guard := sw.Tag.(*ir.TypeSwitchGuard)
return guard.Tag != nil
func isNamedTypeSwitch(x ir.Node) bool {
guard, ok := x.(*ir.TypeSwitchGuard)
return ok && guard.Tag != nil
}
func (w *exportWriter) caseList(sw ir.Node) {
namedTypeSwitch := isNamedTypeSwitch(sw)
var cases []ir.Node
if sw.Op() == ir.OSWITCH {
cases = sw.(*ir.SwitchStmt).Cases
} else {
cases = sw.(*ir.SelectStmt).Cases
}
func (w *exportWriter) caseList(cases []*ir.CaseClause, namedTypeSwitch bool) {
w.uint64(uint64(len(cases)))
for _, cas := range cases {
cas := cas.(*ir.CaseStmt)
w.pos(cas.Pos())
w.stmtList(cas.List)
if namedTypeSwitch {
w.localName(cas.Vars[0].(*ir.Name))
w.localName(cas.Var.(*ir.Name))
}
w.stmtList(cas.Body)
}
}
func (w *exportWriter) commList(cases []*ir.CommClause) {
w.uint64(uint64(len(cases)))
for _, cas := range cases {
w.pos(cas.Pos())
w.node(cas.Comm)
w.stmtList(cas.Body)
}
}
func (w *exportWriter) exprList(list ir.Nodes) {
for _, n := range list {
w.expr(n)
@ -1313,7 +1295,7 @@ func (w *exportWriter) expr(n ir.Node) {
s = n.Tag.Sym()
}
w.localIdent(s, 0) // declared pseudo-variable, if any
w.exprsOrNil(n.X, nil)
w.expr(n.X)
// case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
// should have been resolved by typechecking - handled by default case
@ -1348,7 +1330,8 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.KeyExpr)
w.op(ir.OKEY)
w.pos(n.Pos())
w.exprsOrNil(n.Key, n.Value)
w.expr(n.Key)
w.expr(n.Value)
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
@ -1387,17 +1370,15 @@ func (w *exportWriter) expr(n ir.Node) {
w.op(ir.OSLICE)
w.pos(n.Pos())
w.expr(n.X)
low, high, _ := n.SliceBounds()
w.exprsOrNil(low, high)
w.exprsOrNil(n.Low, n.High)
case ir.OSLICE3, ir.OSLICE3ARR:
n := n.(*ir.SliceExpr)
w.op(ir.OSLICE3)
w.pos(n.Pos())
w.expr(n.X)
low, high, max := n.SliceBounds()
w.exprsOrNil(low, high)
w.expr(max)
w.exprsOrNil(n.Low, n.High)
w.expr(n.Max)
case ir.OCOPY, ir.OCOMPLEX:
// treated like other builtin calls (see e.g., OREAL)
@ -1412,8 +1393,8 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.ConvExpr)
w.op(ir.OCONV)
w.pos(n.Pos())
w.expr(n.X)
w.typ(n.Type())
w.expr(n.X)
case ir.OREAL, ir.OIMAG, ir.OCAP, ir.OCLOSE, ir.OLEN, ir.ONEW, ir.OPANIC:
n := n.(*ir.UnaryExpr)
@ -1544,6 +1525,7 @@ func (w *exportWriter) fieldList(list ir.Nodes) {
w.uint64(uint64(len(list)))
for _, n := range list {
n := n.(*ir.StructKeyExpr)
w.pos(n.Pos())
w.selector(n.Field)
w.expr(n.Value)
}

View File

@ -71,7 +71,12 @@ func ImportBody(fn *ir.Func) {
base.Fatalf("missing import reader for %v", fn)
}
if inimport {
base.Fatalf("recursive inimport")
}
inimport = true
r.doInline(fn)
inimport = false
}
func importReaderFor(sym *types.Sym, importers map[*types.Sym]iimporterAndOffset) *importReader {
@ -767,10 +772,10 @@ func (r *importReader) stmtList() []ir.Node {
return list
}
func (r *importReader) caseList(sw ir.Node) []ir.Node {
namedTypeSwitch := isNamedTypeSwitch(sw)
func (r *importReader) caseList(switchExpr ir.Node) []*ir.CaseClause {
namedTypeSwitch := isNamedTypeSwitch(switchExpr)
cases := make([]ir.Node, r.uint64())
cases := make([]*ir.CaseClause, r.uint64())
for i := range cases {
cas := ir.NewCaseStmt(r.pos(), nil, nil)
cas.List.Set(r.stmtList())
@ -780,8 +785,8 @@ func (r *importReader) caseList(sw ir.Node) []ir.Node {
// Sym for diagnostics anyway.
caseVar := ir.NewNameAt(cas.Pos(), r.ident())
Declare(caseVar, DeclContext)
cas.Vars = []ir.Node{caseVar}
caseVar.Defn = sw.(*ir.SwitchStmt).Tag
cas.Var = caseVar
caseVar.Defn = switchExpr
}
cas.Body.Set(r.stmtList())
cases[i] = cas
@ -789,6 +794,14 @@ func (r *importReader) caseList(sw ir.Node) []ir.Node {
return cases
}
func (r *importReader) commList() []*ir.CommClause {
cases := make([]*ir.CommClause, r.uint64())
for i := range cases {
cases[i] = ir.NewCommStmt(r.pos(), r.node(), r.stmtList())
}
return cases
}
func (r *importReader) exprList() []ir.Node {
var list []ir.Node
for {
@ -821,7 +834,7 @@ func (r *importReader) node() ir.Node {
pos := r.pos()
typ := r.typ()
n := npos(pos, NodNil())
n := ir.NewNilExpr(pos)
n.SetType(typ)
return n
@ -829,7 +842,7 @@ func (r *importReader) node() ir.Node {
pos := r.pos()
typ := r.typ()
n := npos(pos, ir.NewLiteral(r.value(typ)))
n := ir.NewBasicLit(pos, r.value(typ))
n.SetType(typ)
return n
@ -851,8 +864,7 @@ func (r *importReader) node() ir.Node {
if s := r.ident(); s != nil {
tag = ir.NewIdent(pos, s)
}
expr, _ := r.exprsOrNil()
return ir.NewTypeSwitchGuard(pos, tag, expr)
return ir.NewTypeSwitchGuard(pos, tag, r.expr())
// case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
// unreachable - should have been resolved by typechecking
@ -864,26 +876,16 @@ func (r *importReader) node() ir.Node {
// unreachable - mapped to case OADDR below by exporter
case ir.OSTRUCTLIT:
// TODO(mdempsky): Export position information for OSTRUCTKEY nodes.
savedlineno := base.Pos
base.Pos = r.pos()
n := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
n.List.Set(r.elemList()) // special handling of field names
base.Pos = savedlineno
return n
return ir.NewCompLitExpr(r.pos(), ir.OCOMPLIT, ir.TypeNode(r.typ()), r.fieldList())
// case OARRAYLIT, OSLICELIT, OMAPLIT:
// unreachable - mapped to case OCOMPLIT below by exporter
case ir.OCOMPLIT:
n := ir.NewCompLitExpr(r.pos(), ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
n.List.Set(r.exprList())
return n
return ir.NewCompLitExpr(r.pos(), ir.OCOMPLIT, ir.TypeNode(r.typ()), r.exprList())
case ir.OKEY:
pos := r.pos()
left, right := r.exprsOrNil()
return ir.NewKeyExpr(pos, left, right)
return ir.NewKeyExpr(r.pos(), r.expr(), r.expr())
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
@ -913,22 +915,19 @@ func (r *importReader) node() ir.Node {
return ir.NewIndexExpr(r.pos(), r.expr(), r.expr())
case ir.OSLICE, ir.OSLICE3:
n := ir.NewSliceExpr(r.pos(), op, r.expr())
pos, x := r.pos(), r.expr()
low, high := r.exprsOrNil()
var max ir.Node
if n.Op().IsSlice3() {
if op.IsSlice3() {
max = r.expr()
}
n.SetSliceBounds(low, high, max)
return n
return ir.NewSliceExpr(pos, op, x, low, high, max)
// case OCONV, OCONVIFACE, OCONVNOP, OBYTES2STR, ORUNES2STR, OSTR2BYTES, OSTR2RUNES, ORUNESTR:
// unreachable - mapped to OCONV case below by exporter
case ir.OCONV:
n := ir.NewConvExpr(r.pos(), ir.OCONV, nil, r.expr())
n.SetType(r.typ())
return n
return ir.NewConvExpr(r.pos(), ir.OCONV, r.typ(), r.expr())
case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := builtinCall(r.pos(), op)
@ -942,10 +941,10 @@ func (r *importReader) node() ir.Node {
// unreachable - mapped to OCALL case below by exporter
case ir.OCALL:
n := ir.NewCallExpr(r.pos(), ir.OCALL, nil, nil)
n.PtrInit().Set(r.stmtList())
n.X = r.expr()
n.Args.Set(r.exprList())
pos := r.pos()
init := r.stmtList()
n := ir.NewCallExpr(pos, ir.OCALL, r.expr(), r.exprList())
n.PtrInit().Set(init)
n.IsDDD = r.bool()
return n
@ -979,7 +978,7 @@ func (r *importReader) node() ir.Node {
case ir.OADDSTR:
pos := r.pos()
list := r.exprList()
x := npos(pos, list[0])
x := list[0]
for _, y := range list[1:] {
x = ir.NewBinaryExpr(pos, ir.OADD, x, y)
}
@ -1006,9 +1005,7 @@ func (r *importReader) node() ir.Node {
return ir.NewAssignStmt(r.pos(), r.expr(), r.expr())
case ir.OASOP:
n := ir.NewAssignOpStmt(r.pos(), ir.OXXX, nil, nil)
n.AsOp = r.op()
n.X = r.expr()
n := ir.NewAssignOpStmt(r.pos(), r.op(), r.expr(), nil)
if !r.bool() {
n.Y = ir.NewInt(1)
n.IncDec = true
@ -1021,15 +1018,10 @@ func (r *importReader) node() ir.Node {
// unreachable - mapped to OAS2 case below by exporter
case ir.OAS2:
n := ir.NewAssignListStmt(r.pos(), ir.OAS2, nil, nil)
n.Lhs.Set(r.exprList())
n.Rhs.Set(r.exprList())
return n
return ir.NewAssignListStmt(r.pos(), ir.OAS2, r.exprList(), r.exprList())
case ir.ORETURN:
n := ir.NewReturnStmt(r.pos(), nil)
n.Results.Set(r.exprList())
return n
return ir.NewReturnStmt(r.pos(), r.exprList())
// case ORETJMP:
// unreachable - generated by compiler for trampolin routines (not exported)
@ -1038,57 +1030,50 @@ func (r *importReader) node() ir.Node {
return ir.NewGoDeferStmt(r.pos(), op, r.expr())
case ir.OIF:
n := ir.NewIfStmt(r.pos(), nil, nil, nil)
n.PtrInit().Set(r.stmtList())
n.Cond = r.expr()
n.Body.Set(r.stmtList())
n.Else.Set(r.stmtList())
pos, init := r.pos(), r.stmtList()
n := ir.NewIfStmt(pos, r.expr(), r.stmtList(), r.stmtList())
n.PtrInit().Set(init)
return n
case ir.OFOR:
n := ir.NewForStmt(r.pos(), nil, nil, nil, nil)
n.PtrInit().Set(r.stmtList())
left, right := r.exprsOrNil()
n.Cond = left
n.Post = right
n.Body.Set(r.stmtList())
pos, init := r.pos(), r.stmtList()
cond, post := r.exprsOrNil()
n := ir.NewForStmt(pos, nil, cond, post, r.stmtList())
n.PtrInit().Set(init)
return n
case ir.ORANGE:
n := ir.NewRangeStmt(r.pos(), nil, nil, nil)
n.Vars.Set(r.stmtList())
n.X = r.expr()
n.Body.Set(r.stmtList())
return n
pos := r.pos()
k, v := r.exprsOrNil()
return ir.NewRangeStmt(pos, k, v, r.expr(), r.stmtList())
case ir.OSELECT:
n := ir.NewSelectStmt(r.pos(), nil)
n.PtrInit().Set(r.stmtList())
r.exprsOrNil() // TODO(rsc): Delete (and fix exporter). These are always nil.
n.Cases.Set(r.caseList(n))
pos := r.pos()
init := r.stmtList()
n := ir.NewSelectStmt(pos, r.commList())
n.PtrInit().Set(init)
return n
case ir.OSWITCH:
n := ir.NewSwitchStmt(r.pos(), nil, nil)
n.PtrInit().Set(r.stmtList())
left, _ := r.exprsOrNil()
n.Tag = left
n.Cases.Set(r.caseList(n))
pos := r.pos()
init := r.stmtList()
x, _ := r.exprsOrNil()
n := ir.NewSwitchStmt(pos, x, r.caseList(x))
n.PtrInit().Set(init)
return n
// case OCASE:
// handled by caseList
case ir.OFALL:
n := ir.NewBranchStmt(r.pos(), ir.OFALL, nil)
return n
return ir.NewBranchStmt(r.pos(), ir.OFALL, nil)
// case OEMPTY:
// unreachable - not emitted by exporter
case ir.OBREAK, ir.OCONTINUE, ir.OGOTO:
var sym *types.Sym
pos := r.pos()
var sym *types.Sym
if label := r.string(); label != "" {
sym = Lookup(label)
}
@ -1111,12 +1096,10 @@ func (r *importReader) op() ir.Op {
return ir.Op(r.uint64())
}
func (r *importReader) elemList() []ir.Node {
c := r.uint64()
list := make([]ir.Node, c)
func (r *importReader) fieldList() []ir.Node {
list := make([]ir.Node, r.uint64())
for i := range list {
s := r.ident()
list[i] = ir.NewStructKeyExpr(base.Pos, s, r.expr())
list[i] = ir.NewStructKeyExpr(r.pos(), r.ident(), r.expr())
}
return list
}
@ -1135,8 +1118,3 @@ func (r *importReader) exprsOrNil() (a, b ir.Node) {
func builtinCall(pos src.XPos, op ir.Op) *ir.CallExpr {
return ir.NewCallExpr(pos, ir.OCALL, ir.NewIdent(base.Pos, types.BuiltinPkg.Lookup(ir.OpNames[op])), nil)
}
func npos(pos src.XPos, n ir.Node) ir.Node {
n.SetPos(pos)
return n
}

View File

@ -36,8 +36,8 @@ func main() {
fmt.Fprintln(&b, "package typecheck")
fmt.Fprintln(&b)
fmt.Fprintln(&b, `import (`)
fmt.Fprintln(&b, ` "cmd/compile/internal/ir"`)
fmt.Fprintln(&b, ` "cmd/compile/internal/types"`)
fmt.Fprintln(&b, ` "cmd/internal/src"`)
fmt.Fprintln(&b, `)`)
mkbuiltin(&b, "runtime")
@ -169,7 +169,7 @@ func (i *typeInterner) mktype(t ast.Expr) string {
}
return fmt.Sprintf("types.NewChan(%s, %s)", i.subtype(t.Value), dir)
case *ast.FuncType:
return fmt.Sprintf("functype(nil, %s, %s)", i.fields(t.Params, false), i.fields(t.Results, false))
return fmt.Sprintf("types.NewSignature(types.NoPkg, nil, %s, %s)", i.fields(t.Params, false), i.fields(t.Results, false))
case *ast.InterfaceType:
if len(t.Methods.List) != 0 {
log.Fatal("non-empty interfaces unsupported")
@ -180,7 +180,7 @@ func (i *typeInterner) mktype(t ast.Expr) string {
case *ast.StarExpr:
return fmt.Sprintf("types.NewPtr(%s)", i.subtype(t.X))
case *ast.StructType:
return fmt.Sprintf("tostruct(%s)", i.fields(t.Fields, true))
return fmt.Sprintf("types.NewStruct(types.NoPkg, %s)", i.fields(t.Fields, true))
default:
log.Fatalf("unhandled type: %#v", t)
@ -196,18 +196,18 @@ func (i *typeInterner) fields(fl *ast.FieldList, keepNames bool) string {
for _, f := range fl.List {
typ := i.subtype(f.Type)
if len(f.Names) == 0 {
res = append(res, fmt.Sprintf("anonfield(%s)", typ))
res = append(res, fmt.Sprintf("types.NewField(src.NoXPos, nil, %s)", typ))
} else {
for _, name := range f.Names {
if keepNames {
res = append(res, fmt.Sprintf("namedfield(%q, %s)", name.Name, typ))
res = append(res, fmt.Sprintf("types.NewField(src.NoXPos, Lookup(%q), %s)", name.Name, typ))
} else {
res = append(res, fmt.Sprintf("anonfield(%s)", typ))
res = append(res, fmt.Sprintf("types.NewField(src.NoXPos, nil, %s)", typ))
}
}
}
}
return fmt.Sprintf("[]*ir.Field{%s}", strings.Join(res, ", "))
return fmt.Sprintf("[]*types.Field{%s}", strings.Join(res, ", "))
}
func intconst(e ast.Expr) int64 {

View File

@ -11,27 +11,29 @@ import (
"cmd/internal/src"
)
func RangeExprType(t *types.Type) *types.Type {
if t.IsPtr() && t.Elem().IsArray() {
return t.Elem()
}
return t
}
func typecheckrangeExpr(n *ir.RangeStmt) {
n.X = Expr(n.X)
t := n.X.Type()
if t == nil {
if n.X.Type() == nil {
return
}
t := RangeExprType(n.X.Type())
// delicate little dance. see typecheckas2
ls := n.Vars
for i1, n1 := range ls {
if !ir.DeclaredBy(n1, n) {
ls[i1] = AssignExpr(ls[i1])
}
if n.Key != nil && !ir.DeclaredBy(n.Key, n) {
n.Key = AssignExpr(n.Key)
}
if n.Value != nil && !ir.DeclaredBy(n.Value, n) {
n.Value = AssignExpr(n.Value)
}
if t.IsPtr() && t.Elem().IsArray() {
t = t.Elem()
}
n.SetType(t)
var t1, t2 *types.Type
var tk, tv *types.Type
toomany := false
switch t.Kind() {
default:
@ -39,12 +41,12 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
return
case types.TARRAY, types.TSLICE:
t1 = types.Types[types.TINT]
t2 = t.Elem()
tk = types.Types[types.TINT]
tv = t.Elem()
case types.TMAP:
t1 = t.Key()
t2 = t.Elem()
tk = t.Key()
tv = t.Elem()
case types.TCHAN:
if !t.ChanDir().CanRecv() {
@ -52,61 +54,35 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
return
}
t1 = t.Elem()
t2 = nil
if len(n.Vars) == 2 {
tk = t.Elem()
tv = nil
if n.Value != nil {
toomany = true
}
case types.TSTRING:
t1 = types.Types[types.TINT]
t2 = types.RuneType
tk = types.Types[types.TINT]
tv = types.RuneType
}
if len(n.Vars) > 2 || toomany {
if toomany {
base.ErrorfAt(n.Pos(), "too many variables in range")
}
var v1, v2 ir.Node
if len(n.Vars) != 0 {
v1 = n.Vars[0]
}
if len(n.Vars) > 1 {
v2 = n.Vars[1]
}
// this is not only an optimization but also a requirement in the spec.
// "if the second iteration variable is the blank identifier, the range
// clause is equivalent to the same clause with only the first variable
// present."
if ir.IsBlank(v2) {
if v1 != nil {
n.Vars = []ir.Node{v1}
}
v2 = nil
}
if v1 != nil {
if ir.DeclaredBy(v1, n) {
v1.SetType(t1)
} else if v1.Type() != nil {
if op, why := assignop(t1, v1.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t1, v1, why)
do := func(nn ir.Node, t *types.Type) {
if nn != nil {
if ir.DeclaredBy(nn, n) {
nn.SetType(t)
} else if nn.Type() != nil {
if op, why := assignop(t, nn.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t, nn, why)
}
}
checkassign(n, nn)
}
checkassign(n, v1)
}
if v2 != nil {
if ir.DeclaredBy(v2, n) {
v2.SetType(t2)
} else if v2.Type() != nil {
if op, why := assignop(t2, v2.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t2, v2, why)
}
}
checkassign(n, v2)
}
do(n.Key, tk)
do(n.Value, tv)
}
// type check assignment.
@ -117,47 +93,16 @@ func tcAssign(n *ir.AssignStmt) {
defer tracePrint("typecheckas", n)(nil)
}
// delicate little dance.
// the definition of n may refer to this assignment
// as its definition, in which case it will call typecheckas.
// in that case, do not call typecheck back, or it will cycle.
// if the variable has a type (ntype) then typechecking
// will not look at defn, so it is okay (and desirable,
// so that the conversion below happens).
n.X = Resolve(n.X)
if !ir.DeclaredBy(n.X, n) || n.X.Name().Ntype != nil {
if n.Y == nil {
n.X = AssignExpr(n.X)
return
}
// Use ctxMultiOK so we can emit an "N variables but M values" error
// to be consistent with typecheckas2 (#26616).
n.Y = typecheck(n.Y, ctxExpr|ctxMultiOK)
checkassign(n, n.X)
if n.Y != nil && n.Y.Type() != nil {
if n.Y.Type().IsFuncArgStruct() {
base.Errorf("assignment mismatch: 1 variable but %v returns %d values", n.Y.(*ir.CallExpr).X, n.Y.Type().NumFields())
// Multi-value RHS isn't actually valid for OAS; nil out
// to indicate failed typechecking.
n.Y.SetType(nil)
} else if n.X.Type() != nil {
n.Y = AssignConv(n.Y, n.X.Type(), "assignment")
}
}
lhs, rhs := []ir.Node{n.X}, []ir.Node{n.Y}
assign(n, lhs, rhs)
n.X, n.Y = lhs[0], rhs[0]
if ir.DeclaredBy(n.X, n) && n.X.Name().Ntype == nil {
n.Y = DefaultLit(n.Y, nil)
n.X.SetType(n.Y.Type())
}
// second half of dance.
// now that right is done, typecheck the left
// just to get it over with. see dance above.
n.SetTypecheck(1)
if n.X.Typecheck() == 0 {
n.X = AssignExpr(n.X)
}
// TODO(mdempsky): This seems out of place.
if !ir.IsBlank(n.X) {
types.CheckSize(n.X.Type()) // ensure width is calculated for backend
}
@ -168,132 +113,118 @@ func tcAssignList(n *ir.AssignListStmt) {
defer tracePrint("typecheckas2", n)(nil)
}
ls := n.Lhs
for i1, n1 := range ls {
// delicate little dance.
n1 = Resolve(n1)
ls[i1] = n1
assign(n, n.Lhs, n.Rhs)
}
if !ir.DeclaredBy(n1, n) || n1.Name().Ntype != nil {
ls[i1] = AssignExpr(ls[i1])
func assign(stmt ir.Node, lhs, rhs []ir.Node) {
// delicate little dance.
// the definition of lhs may refer to this assignment
// as its definition, in which case it will call typecheckas.
// in that case, do not call typecheck back, or it will cycle.
// if the variable has a type (ntype) then typechecking
// will not look at defn, so it is okay (and desirable,
// so that the conversion below happens).
checkLHS := func(i int, typ *types.Type) {
lhs[i] = Resolve(lhs[i])
if n := lhs[i]; typ != nil && ir.DeclaredBy(n, stmt) && n.Name().Ntype == nil {
if typ.Kind() != types.TNIL {
n.SetType(defaultType(typ))
} else {
base.Errorf("use of untyped nil")
}
}
if lhs[i].Typecheck() == 0 {
lhs[i] = AssignExpr(lhs[i])
}
checkassign(stmt, lhs[i])
}
assignType := func(i int, typ *types.Type) {
checkLHS(i, typ)
if typ != nil {
checkassignto(typ, lhs[i])
}
}
cl := len(n.Lhs)
cr := len(n.Rhs)
if cl > 1 && cr == 1 {
n.Rhs[0] = typecheck(n.Rhs[0], ctxExpr|ctxMultiOK)
cr := len(rhs)
if len(rhs) == 1 {
rhs[0] = typecheck(rhs[0], ctxExpr|ctxMultiOK)
if rtyp := rhs[0].Type(); rtyp != nil && rtyp.IsFuncArgStruct() {
cr = rtyp.NumFields()
}
} else {
Exprs(n.Rhs)
}
checkassignlist(n, n.Lhs)
var l ir.Node
var r ir.Node
if cl == cr {
// easy
ls := n.Lhs
rs := n.Rhs
for il, nl := range ls {
nr := rs[il]
if nl.Type() != nil && nr.Type() != nil {
rs[il] = AssignConv(nr, nl.Type(), "assignment")
}
if ir.DeclaredBy(nl, n) && nl.Name().Ntype == nil {
rs[il] = DefaultLit(rs[il], nil)
nl.SetType(rs[il].Type())
}
}
goto out
}
l = n.Lhs[0]
r = n.Rhs[0]
// x,y,z = f()
if cr == 1 {
if r.Type() == nil {
goto out
}
switch r.Op() {
case ir.OCALLMETH, ir.OCALLINTER, ir.OCALLFUNC:
if !r.Type().IsFuncArgStruct() {
break
}
cr = r.Type().NumFields()
if cr != cl {
goto mismatch
}
r.(*ir.CallExpr).Use = ir.CallUseList
n.SetOp(ir.OAS2FUNC)
for i, l := range n.Lhs {
f := r.Type().Field(i)
if f.Type != nil && l.Type() != nil {
checkassignto(f.Type, l)
}
if ir.DeclaredBy(l, n) && l.Name().Ntype == nil {
l.SetType(f.Type)
}
}
goto out
}
Exprs(rhs)
}
// x, ok = y
if cl == 2 && cr == 1 {
if r.Type() == nil {
goto out
}
assignOK:
for len(lhs) == 2 && cr == 1 {
stmt := stmt.(*ir.AssignListStmt)
r := rhs[0]
switch r.Op() {
case ir.OINDEXMAP, ir.ORECV, ir.ODOTTYPE:
switch r.Op() {
case ir.OINDEXMAP:
n.SetOp(ir.OAS2MAPR)
case ir.ORECV:
n.SetOp(ir.OAS2RECV)
case ir.ODOTTYPE:
r := r.(*ir.TypeAssertExpr)
n.SetOp(ir.OAS2DOTTYPE)
r.SetOp(ir.ODOTTYPE2)
}
if l.Type() != nil {
checkassignto(r.Type(), l)
}
if ir.DeclaredBy(l, n) {
l.SetType(r.Type())
}
l := n.Lhs[1]
if l.Type() != nil && !l.Type().IsBoolean() {
checkassignto(types.Types[types.TBOOL], l)
}
if ir.DeclaredBy(l, n) && l.Name().Ntype == nil {
l.SetType(types.Types[types.TBOOL])
}
goto out
case ir.OINDEXMAP:
stmt.SetOp(ir.OAS2MAPR)
case ir.ORECV:
stmt.SetOp(ir.OAS2RECV)
case ir.ODOTTYPE:
r := r.(*ir.TypeAssertExpr)
stmt.SetOp(ir.OAS2DOTTYPE)
r.SetOp(ir.ODOTTYPE2)
default:
break assignOK
}
assignType(0, r.Type())
assignType(1, types.UntypedBool)
return
}
mismatch:
switch r.Op() {
default:
base.Errorf("assignment mismatch: %d variables but %d values", cl, cr)
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
r := r.(*ir.CallExpr)
base.Errorf("assignment mismatch: %d variables but %v returns %d values", cl, r.X, cr)
if len(lhs) != cr {
if r, ok := rhs[0].(*ir.CallExpr); ok && len(rhs) == 1 {
if r.Type() != nil {
base.ErrorfAt(stmt.Pos(), "assignment mismatch: %d variable%s but %v returns %d value%s", len(lhs), plural(len(lhs)), r.X, cr, plural(cr))
}
} else {
base.ErrorfAt(stmt.Pos(), "assignment mismatch: %d variable%s but %v value%s", len(lhs), plural(len(lhs)), len(rhs), plural(len(rhs)))
}
for i := range lhs {
checkLHS(i, nil)
}
return
}
// second half of dance
out:
n.SetTypecheck(1)
ls = n.Lhs
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = AssignExpr(ls[i1])
// x,y,z = f()
if cr > len(rhs) {
stmt := stmt.(*ir.AssignListStmt)
stmt.SetOp(ir.OAS2FUNC)
r := rhs[0].(*ir.CallExpr)
r.Use = ir.CallUseList
rtyp := r.Type()
for i := range lhs {
assignType(i, rtyp.Field(i).Type)
}
return
}
for i, r := range rhs {
checkLHS(i, r.Type())
if lhs[i].Type() != nil {
rhs[i] = AssignConv(r, lhs[i].Type(), "assignment")
}
}
}
func plural(n int) string {
if n == 1 {
return ""
}
return "s"
}
// tcFor typechecks an OFOR node.
func tcFor(n *ir.ForStmt) ir.Node {
Stmts(n.Init())
@ -399,11 +330,11 @@ func tcRange(n *ir.RangeStmt) {
// second half of dance, the first half being typecheckrangeExpr
n.SetTypecheck(1)
ls := n.Vars
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = AssignExpr(ls[i1])
}
if n.Key != nil && n.Key.Typecheck() == 0 {
n.Key = AssignExpr(n.Key)
}
if n.Value != nil && n.Value.Typecheck() == 0 {
n.Value = AssignExpr(n.Value)
}
decldepth++
@ -429,31 +360,23 @@ func tcReturn(n *ir.ReturnStmt) ir.Node {
// select
func tcSelect(sel *ir.SelectStmt) {
var def ir.Node
var def *ir.CommClause
lno := ir.SetPos(sel)
Stmts(sel.Init())
for _, ncase := range sel.Cases {
ncase := ncase.(*ir.CaseStmt)
if len(ncase.List) == 0 {
if ncase.Comm == nil {
// default
if def != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
} else {
def = ncase
}
} else if len(ncase.List) > 1 {
base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
} else {
ncase.List[0] = Stmt(ncase.List[0])
n := ncase.List[0]
n := Stmt(ncase.Comm)
ncase.Comm = n
ncase.List.Set(nil)
oselrecv2 := func(dst, recv ir.Node, colas bool) {
n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, nil, nil)
n.Lhs = []ir.Node{dst, ir.BlankNode}
n.Rhs = []ir.Node{recv}
n.Def = colas
oselrecv2 := func(dst, recv ir.Node, def bool) {
n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, []ir.Node{dst, ir.BlankNode}, []ir.Node{recv})
n.Def = def
n.SetTypecheck(1)
ncase.Comm = n
}
@ -577,7 +500,6 @@ func tcSwitchExpr(n *ir.SwitchStmt) {
var defCase ir.Node
var cs constSet
for _, ncase := range n.Cases {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List
if len(ls) == 0 { // default:
if defCase != nil {
@ -646,7 +568,6 @@ func tcSwitchType(n *ir.SwitchStmt) {
var defCase, nilCase ir.Node
var ts typeSet
for _, ncase := range n.Cases {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List
if len(ls) == 0 { // default:
if defCase != nil {
@ -694,7 +615,7 @@ func tcSwitchType(n *ir.SwitchStmt) {
ts.add(ncase.Pos(), n1.Type())
}
if len(ncase.Vars) != 0 {
if ncase.Var != nil {
// Assign the clause variable's type.
vt := t
if len(ls) == 1 {
@ -707,7 +628,7 @@ func tcSwitchType(n *ir.SwitchStmt) {
}
}
nvar := ncase.Vars[0]
nvar := ncase.Var
nvar.SetType(vt)
if vt != nil {
nvar = AssignExpr(nvar)
@ -716,7 +637,7 @@ func tcSwitchType(n *ir.SwitchStmt) {
nvar.SetTypecheck(1)
nvar.SetWalkdef(1)
}
ncase.Vars[0] = nvar
ncase.Var = nvar
}
Stmts(ncase.Body)

View File

@ -73,13 +73,42 @@ func tcChanType(n *ir.ChanType) ir.Node {
// tcFuncType typechecks an OTFUNC node.
func tcFuncType(n *ir.FuncType) ir.Node {
n.SetOTYPE(NewFuncType(n.Recv, n.Params, n.Results))
misc := func(f *types.Field, nf *ir.Field) {
f.SetIsDDD(nf.IsDDD)
if nf.Decl != nil {
nf.Decl.SetType(f.Type)
f.Nname = nf.Decl
}
}
lno := base.Pos
var recv *types.Field
if n.Recv != nil {
recv = tcField(n.Recv, misc)
}
t := types.NewSignature(types.LocalPkg, recv, tcFields(n.Params, misc), tcFields(n.Results, misc))
checkdupfields("argument", t.Recvs().FieldSlice(), t.Params().FieldSlice(), t.Results().FieldSlice())
base.Pos = lno
n.SetOTYPE(t)
return n
}
// tcInterfaceType typechecks an OTINTER node.
func tcInterfaceType(n *ir.InterfaceType) ir.Node {
n.SetOTYPE(tointerface(n.Methods))
if len(n.Methods) == 0 {
n.SetOTYPE(types.Types[types.TINTER])
return n
}
lno := base.Pos
methods := tcFields(n.Methods, nil)
base.Pos = lno
n.SetOTYPE(types.NewInterface(types.LocalPkg, methods))
return n
}
@ -117,6 +146,43 @@ func tcSliceType(n *ir.SliceType) ir.Node {
// tcStructType typechecks an OTSTRUCT node.
func tcStructType(n *ir.StructType) ir.Node {
n.SetOTYPE(NewStructType(n.Fields))
lno := base.Pos
fields := tcFields(n.Fields, func(f *types.Field, nf *ir.Field) {
if nf.Embedded {
checkembeddedtype(f.Type)
f.Embedded = 1
}
f.Note = nf.Note
})
checkdupfields("field", fields)
base.Pos = lno
n.SetOTYPE(types.NewStruct(types.LocalPkg, fields))
return n
}
// tcField typechecks a generic Field.
// misc can be provided to handle specialized typechecking.
func tcField(n *ir.Field, misc func(*types.Field, *ir.Field)) *types.Field {
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
f := types.NewField(n.Pos, n.Sym, n.Type)
if misc != nil {
misc(f, n)
}
return f
}
// tcFields typechecks a slice of generic Fields.
// misc can be provided to handle specialized typechecking.
func tcFields(l []*ir.Field, misc func(*types.Field, *ir.Field)) []*types.Field {
fields := make([]*types.Field, len(l))
for i, n := range l {
fields[i] = tcField(n, misc)
}
return fields
}

View File

@ -251,13 +251,7 @@ func Resolve(n ir.Node) (res ir.Node) {
}
}
if inimport {
base.Fatalf("recursive inimport")
}
inimport = true
n = expandDecl(n)
inimport = false
return n
return expandDecl(n)
}
r := ir.AsNode(n.Sym().Def)
@ -1232,7 +1226,7 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
if f1.Offset == types.BADWIDTH {
base.Fatalf("lookdot badwidth %v %p", f1, f1)
}
n.Offset = f1.Offset
n.Selection = f1
n.SetType(f1.Type)
if t.IsInterface() {
if n.X.Type().IsPtr() {
@ -1243,7 +1237,6 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
n.SetOp(ir.ODOTINTER)
}
n.Selection = f1
return f1
}
@ -1298,11 +1291,9 @@ func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
return nil
}
n.Sel = ir.MethodSym(n.X.Type(), f2.Sym)
n.Offset = f2.Offset
n.Selection = f2
n.SetType(f2.Type)
n.SetOp(ir.ODOTMETH)
n.Selection = f2
return f2
}
@ -1638,7 +1629,7 @@ func nonexported(sym *types.Sym) bool {
}
func checklvalue(n ir.Node, verb string) {
if !ir.IsAssignable(n) {
if !ir.IsAddressable(n) {
base.Errorf("cannot %s %v", verb, n)
}
}
@ -1656,7 +1647,7 @@ func checkassign(stmt ir.Node, n ir.Node) {
}
}
if ir.IsAssignable(n) {
if ir.IsAddressable(n) {
return
}
if n.Op() == ir.OINDEXMAP {
@ -1690,6 +1681,11 @@ func checkassignlist(stmt ir.Node, l ir.Nodes) {
}
func checkassignto(src *types.Type, dst ir.Node) {
// TODO(mdempsky): Handle all untyped types correctly.
if src == types.UntypedBool && dst.Type().IsBoolean() {
return
}
if op, why := assignop(src, dst.Type()); op == ir.OXXX {
base.Errorf("cannot assign %v to %L in multiple assignment%s", src, dst, why)
return
@ -2101,7 +2097,6 @@ func isTermNode(n ir.Node) bool {
}
def := false
for _, cas := range n.Cases {
cas := cas.(*ir.CaseStmt)
if !isTermNodes(cas.Body) {
return false
}
@ -2117,7 +2112,6 @@ func isTermNode(n ir.Node) bool {
return false
}
for _, cas := range n.Cases {
cas := cas.(*ir.CaseStmt)
if !isTermNodes(cas.Body) {
return false
}
@ -2216,9 +2210,6 @@ func deadcodeslice(nn *ir.Nodes) {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
deadcodeslice(&n.List)
case ir.OCASE:
n := n.(*ir.CaseStmt)
deadcodeslice(&n.Body)
case ir.OFOR:
n := n.(*ir.ForStmt)
deadcodeslice(&n.Body)
@ -2231,10 +2222,14 @@ func deadcodeslice(nn *ir.Nodes) {
deadcodeslice(&n.Body)
case ir.OSELECT:
n := n.(*ir.SelectStmt)
deadcodeslice(&n.Cases)
for _, cas := range n.Cases {
deadcodeslice(&cas.Body)
}
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
deadcodeslice(&n.Cases)
for _, cas := range n.Cases {
deadcodeslice(&cas.Body)
}
}
if cut {

View File

@ -10,7 +10,7 @@ import "cmd/compile/internal/base"
// hashing a Type.
type AlgKind int
//go:generate stringer -type AlgKind -trimprefix A
//go:generate stringer -type AlgKind -trimprefix A alg.go
const (
// These values are known by runtime.

View File

@ -1,4 +1,4 @@
// Code generated by "stringer -type AlgKind -trimprefix A"; DO NOT EDIT.
// Code generated by "stringer -type AlgKind -trimprefix A alg.go"; DO NOT EDIT.
package types

View File

@ -180,15 +180,6 @@ func symfmt(b *bytes.Buffer, s *Sym, verb rune, mode fmtMode) {
b.WriteString(s.Name)
}
func SymMethodName(s *Sym) string {
// Skip leading "type." in method name
name := s.Name
if i := strings.LastIndex(name, "."); i >= 0 {
name = name[i+1:]
}
return name
}
// Type
var BasicTypeNames = []string{
@ -595,7 +586,10 @@ func fldconv(b *bytes.Buffer, f *Field, verb rune, mode fmtMode, visited map[*Ty
if funarg != FunargNone {
name = fmt.Sprint(f.Nname)
} else if verb == 'L' {
name = SymMethodName(s)
name = s.Name
if name == ".F" {
name = "F" // Hack for toolstash -cmp.
}
if !IsExported(name) && mode != fmtTypeIDName {
name = sconv(s, 0, mode) // qualify non-exported names (used on structs, not on funarg)
}

View File

@ -2,8 +2,6 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run mkbuiltin.go
package types
import (

View File

@ -1,4 +1,4 @@
// Code generated by "stringer -type EType -trimprefix T"; DO NOT EDIT.
// Code generated by "stringer -type Kind -trimprefix T type.go"; DO NOT EDIT.
package types
@ -48,13 +48,13 @@ func _() {
_ = x[NTYPE-37]
}
const _EType_name = "xxxINT8UINT8INT16UINT16INT32UINT32INT64UINT64INTUINTUINTPTRCOMPLEX64COMPLEX128FLOAT32FLOAT64BOOLPTRFUNCSLICEARRAYSTRUCTCHANMAPINTERFORWANYSTRINGUNSAFEPTRIDEALNILBLANKFUNCARGSCHANARGSSSATUPLERESULTSNTYPE"
const _Kind_name = "xxxINT8UINT8INT16UINT16INT32UINT32INT64UINT64INTUINTUINTPTRCOMPLEX64COMPLEX128FLOAT32FLOAT64BOOLPTRFUNCSLICEARRAYSTRUCTCHANMAPINTERFORWANYSTRINGUNSAFEPTRIDEALNILBLANKFUNCARGSCHANARGSSSATUPLERESULTSNTYPE"
var _EType_index = [...]uint8{0, 3, 7, 12, 17, 23, 28, 34, 39, 45, 48, 52, 59, 68, 78, 85, 92, 96, 99, 103, 108, 113, 119, 123, 126, 131, 135, 138, 144, 153, 158, 161, 166, 174, 182, 185, 190, 197, 202}
var _Kind_index = [...]uint8{0, 3, 7, 12, 17, 23, 28, 34, 39, 45, 48, 52, 59, 68, 78, 85, 92, 96, 99, 103, 108, 113, 119, 123, 126, 131, 135, 138, 144, 153, 158, 161, 166, 174, 182, 185, 190, 197, 202}
func (i Kind) String() string {
if i >= Kind(len(_EType_index)-1) {
return "EType(" + strconv.FormatInt(int64(i), 10) + ")"
if i >= Kind(len(_Kind_index)-1) {
return "Kind(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _EType_name[_EType_index[i]:_EType_index[i+1]]
return _Kind_name[_Kind_index[i]:_Kind_index[i+1]]
}

View File

@ -33,9 +33,9 @@ type VarObject interface {
RecordFrameOffset(int64) // save frame offset
}
//go:generate stringer -type EType -trimprefix T
//go:generate stringer -type Kind -trimprefix T type.go
// EType describes a kind of type.
// Kind describes a kind of type.
type Kind uint8
const (

View File

@ -297,54 +297,6 @@ func fncall(l ir.Node, rt *types.Type) bool {
return true
}
func ascompatee(op ir.Op, nl, nr []ir.Node, init *ir.Nodes) []ir.Node {
// check assign expression list to
// an expression list. called in
// expr-list = expr-list
// ensure order of evaluation for function calls
for i := range nl {
nl[i] = safeExpr(nl[i], init)
}
for i1 := range nr {
nr[i1] = safeExpr(nr[i1], init)
}
var nn []*ir.AssignStmt
i := 0
for ; i < len(nl); i++ {
if i >= len(nr) {
break
}
// Do not generate 'x = x' during return. See issue 4014.
if op == ir.ORETURN && ir.SameSafeExpr(nl[i], nr[i]) {
continue
}
nn = append(nn, ascompatee1(nl[i], nr[i], init))
}
// cannot happen: caller checked that lists had same length
if i < len(nl) || i < len(nr) {
var nln, nrn ir.Nodes
nln.Set(nl)
nrn.Set(nr)
base.Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), ir.FuncName(ir.CurFunc))
}
return reorder3(nn)
}
func ascompatee1(l ir.Node, r ir.Node, init *ir.Nodes) *ir.AssignStmt {
// convas will turn map assigns into function calls,
// making it impossible for reorder3 to work.
n := ir.NewAssignStmt(base.Pos, l, r)
if l.Op() == ir.OINDEXMAP {
return n
}
return convas(n, init)
}
// check assign type list to
// an expression list. called in
// expr-list = func()
@ -387,39 +339,79 @@ func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
return append(nn, mm...)
}
// reorder3
// from ascompatee
// a,b = c,d
// simultaneous assignment. there cannot
// be later use of an earlier lvalue.
//
// function calls have been removed.
func reorder3(all []*ir.AssignStmt) []ir.Node {
// check assign expression list to
// an expression list. called in
// expr-list = expr-list
func ascompatee(op ir.Op, nl, nr []ir.Node, init *ir.Nodes) []ir.Node {
// cannot happen: should have been rejected during type checking
if len(nl) != len(nr) {
base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
}
// ensure order of evaluation for function calls
for i := range nl {
nl[i] = safeExpr(nl[i], init)
}
for i := range nr {
nr[i] = safeExpr(nr[i], init)
}
var assigned ir.NameSet
var memWrite bool
// affected reports whether expression n could be affected by
// the assignments applied so far.
affected := func(n ir.Node) bool {
return ir.Any(n, func(n ir.Node) bool {
if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
return true
}
if memWrite && readsMemory(n) {
return true
}
return false
})
}
// If a needed expression may be affected by an
// earlier assignment, make an early copy of that
// expression and use the copy instead.
var early []ir.Node
save := func(np *ir.Node) {
if n := *np; affected(n) {
tmp := ir.Node(typecheck.Temp(n.Type()))
as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, n))
early = append(early, as)
*np = tmp
}
}
var mapinit ir.Nodes
for i, n := range all {
l := n.X
var late []ir.Node
for i, l := range nl {
r := nr[i]
// Do not generate 'x = x' during return. See issue 4014.
if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
continue
}
as := ir.NewAssignStmt(base.Pos, l, r)
// Save subexpressions needed on left side.
// Drill through non-dereferences.
for {
switch ll := l; ll.Op() {
case ir.ODOT:
ll := ll.(*ir.SelectorExpr)
l = ll.X
continue
case ir.OPAREN:
ll := ll.(*ir.ParenExpr)
l = ll.X
continue
case ir.OINDEX:
ll := ll.(*ir.IndexExpr)
switch ll := l.(type) {
case *ir.IndexExpr:
if ll.X.Type().IsArray() {
ll.Index = reorder3save(ll.Index, all, i, &early)
save(&ll.Index)
l = ll.X
continue
}
case *ir.ParenExpr:
l = ll.X
continue
case *ir.SelectorExpr:
if ll.Op() == ir.ODOT {
l = ll.X
continue
}
@ -427,230 +419,82 @@ func reorder3(all []*ir.AssignStmt) []ir.Node {
break
}
var name *ir.Name
switch l.Op() {
default:
base.Fatalf("reorder3 unexpected lvalue %v", l.Op())
base.Fatalf("unexpected lvalue %v", l.Op())
case ir.ONAME:
break
name = l.(*ir.Name)
case ir.OINDEX, ir.OINDEXMAP:
l := l.(*ir.IndexExpr)
l.X = reorder3save(l.X, all, i, &early)
l.Index = reorder3save(l.Index, all, i, &early)
if l.Op() == ir.OINDEXMAP {
all[i] = convas(all[i], &mapinit)
}
save(&l.X)
save(&l.Index)
case ir.ODEREF:
l := l.(*ir.StarExpr)
l.X = reorder3save(l.X, all, i, &early)
save(&l.X)
case ir.ODOTPTR:
l := l.(*ir.SelectorExpr)
l.X = reorder3save(l.X, all, i, &early)
save(&l.X)
}
// Save expression on right side.
all[i].Y = reorder3save(all[i].Y, all, i, &early)
save(&as.Y)
late = append(late, convas(as, init))
if name == nil || name.Addrtaken() || name.Class_ == ir.PEXTERN || name.Class_ == ir.PAUTOHEAP {
memWrite = true
continue
}
if ir.IsBlank(name) {
// We can ignore assignments to blank.
continue
}
assigned.Add(name)
}
early = append(mapinit, early...)
for _, as := range all {
early = append(early, as)
}
return early
return append(early, late...)
}
// if the evaluation of *np would be affected by the
// assignments in all up to but not including the ith assignment,
// copy into a temporary during *early and
// replace *np with that temp.
// The result of reorder3save MUST be assigned back to n, e.g.
// n.Left = reorder3save(n.Left, all, i, early)
func reorder3save(n ir.Node, all []*ir.AssignStmt, i int, early *[]ir.Node) ir.Node {
if !aliased(n, all[:i]) {
return n
}
// readsMemory reports whether the evaluation n directly reads from
// memory that might be written to indirectly.
func readsMemory(n ir.Node) bool {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
return n.Class_ == ir.PEXTERN || n.Class_ == ir.PAUTOHEAP || n.Addrtaken()
q := ir.Node(typecheck.Temp(n.Type()))
as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, q, n))
*early = append(*early, as)
return q
}
// Is it possible that the computation of r might be
// affected by assignments in all?
func aliased(r ir.Node, all []*ir.AssignStmt) bool {
if r == nil {
case ir.OADD,
ir.OAND,
ir.OANDAND,
ir.OANDNOT,
ir.OBITNOT,
ir.OCONV,
ir.OCONVIFACE,
ir.OCONVNOP,
ir.ODIV,
ir.ODOT,
ir.ODOTTYPE,
ir.OLITERAL,
ir.OLSH,
ir.OMOD,
ir.OMUL,
ir.ONEG,
ir.ONIL,
ir.OOR,
ir.OOROR,
ir.OPAREN,
ir.OPLUS,
ir.ORSH,
ir.OSUB,
ir.OXOR:
return false
}
// Treat all fields of a struct as referring to the whole struct.
// We could do better but we would have to keep track of the fields.
for r.Op() == ir.ODOT {
r = r.(*ir.SelectorExpr).X
}
// Look for obvious aliasing: a variable being assigned
// during the all list and appearing in n.
// Also record whether there are any writes to addressable
// memory (either main memory or variables whose addresses
// have been taken).
memwrite := false
for _, as := range all {
// We can ignore assignments to blank.
if ir.IsBlank(as.X) {
continue
}
lv := ir.OuterValue(as.X)
if lv.Op() != ir.ONAME {
memwrite = true
continue
}
l := lv.(*ir.Name)
switch l.Class_ {
default:
base.Fatalf("unexpected class: %v, %v", l, l.Class_)
case ir.PAUTOHEAP, ir.PEXTERN:
memwrite = true
continue
case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
if l.Name().Addrtaken() {
memwrite = true
continue
}
if refersToName(l, r) {
// Direct hit: l appears in r.
return true
}
}
}
// The variables being written do not appear in r.
// However, r might refer to computed addresses
// that are being written.
// If no computed addresses are affected by the writes, no aliasing.
if !memwrite {
return false
}
// If r does not refer to any variables whose addresses have been taken,
// then the only possible writes to r would be directly to the variables,
// and we checked those above, so no aliasing problems.
if !anyAddrTaken(r) {
return false
}
// Otherwise, both the writes and r refer to computed memory addresses.
// Assume that they might conflict.
// Be conservative.
return true
}
// anyAddrTaken reports whether the evaluation n,
// which appears on the left side of an assignment,
// may refer to variables whose addresses have been taken.
func anyAddrTaken(n ir.Node) bool {
return ir.Any(n, func(n ir.Node) bool {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
return n.Class_ == ir.PEXTERN || n.Class_ == ir.PAUTOHEAP || n.Name().Addrtaken()
case ir.ODOT: // but not ODOTPTR - should have been handled in aliased.
base.Fatalf("anyAddrTaken unexpected ODOT")
case ir.OADD,
ir.OAND,
ir.OANDAND,
ir.OANDNOT,
ir.OBITNOT,
ir.OCONV,
ir.OCONVIFACE,
ir.OCONVNOP,
ir.ODIV,
ir.ODOTTYPE,
ir.OLITERAL,
ir.OLSH,
ir.OMOD,
ir.OMUL,
ir.ONEG,
ir.ONIL,
ir.OOR,
ir.OOROR,
ir.OPAREN,
ir.OPLUS,
ir.ORSH,
ir.OSUB,
ir.OXOR:
return false
}
// Be conservative.
return true
})
}
// refersToName reports whether r refers to name.
func refersToName(name *ir.Name, r ir.Node) bool {
return ir.Any(r, func(r ir.Node) bool {
return r.Op() == ir.ONAME && r == name
})
}
// refersToCommonName reports whether any name
// appears in common between l and r.
// This is called from sinit.go.
func refersToCommonName(l ir.Node, r ir.Node) bool {
if l == nil || r == nil {
return false
}
// This could be written elegantly as a Find nested inside a Find:
//
// found := ir.Find(l, func(l ir.Node) interface{} {
// if l.Op() == ir.ONAME {
// return ir.Find(r, func(r ir.Node) interface{} {
// if r.Op() == ir.ONAME && l.Name() == r.Name() {
// return r
// }
// return nil
// })
// }
// return nil
// })
// return found != nil
//
// But that would allocate a new closure for the inner Find
// for each name found on the left side.
// It may not matter at all, but the below way of writing it
// only allocates two closures, not O(|L|) closures.
var doL, doR func(ir.Node) error
var targetL *ir.Name
doR = func(r ir.Node) error {
if r.Op() == ir.ONAME && r.Name() == targetL {
return stop
}
return ir.DoChildren(r, doR)
}
doL = func(l ir.Node) error {
if l.Op() == ir.ONAME {
l := l.(*ir.Name)
targetL = l.Name()
if doR(r) == stop {
return stop
}
}
return ir.DoChildren(l, doL)
}
return doL(l) == stop
}
// expand append(l1, l2...) to
// init {
// s := l1
@ -700,17 +544,15 @@ func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
nodes.Append(nif)
// s = s[:n]
nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s)
nt.SetSliceBounds(nil, nn, nil)
nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
nt.SetBounded(true)
nodes.Append(ir.NewAssignStmt(base.Pos, s, nt))
var ncopy ir.Node
if elemtype.HasPointers() {
// copy(s[len(l1):], l2)
slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s)
slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
slice.SetType(s.Type())
slice.SetSliceBounds(ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
ir.CurFunc.SetWBPos(n.Pos())
@ -724,9 +566,8 @@ func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
// rely on runtime to instrument:
// copy(s[len(l1):], l2)
// l2 can be a slice or string.
slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s)
slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
slice.SetType(s.Type())
slice.SetSliceBounds(ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
ptr2, len2 := backingArrayPtrLen(l2)
@ -870,8 +711,7 @@ func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
nodes = append(nodes, nif)
// s = s[:n]
nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s)
nt.SetSliceBounds(nil, nn, nil)
nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
nt.SetBounded(true)
nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, nt))

View File

@ -95,8 +95,7 @@ func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
nn := typecheck.Temp(types.Types[types.TINT])
l = append(l, ir.NewAssignStmt(base.Pos, nn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ns))) // n = len(s)
slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, ns) // ...s[:n+argc]
slice.SetSliceBounds(nil, ir.NewBinaryExpr(base.Pos, ir.OADD, nn, na), nil)
slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, ns, nil, ir.NewBinaryExpr(base.Pos, ir.OADD, nn, na), nil) // ...s[:n+argc]
slice.SetBounded(true)
l = append(l, ir.NewAssignStmt(base.Pos, ns, slice)) // s = s[:n+argc]
@ -407,9 +406,8 @@ func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
t = types.NewArray(t.Elem(), i) // [r]T
var_ := typecheck.Temp(t)
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil)) // zero temp
r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_) // arr[:l]
r.SetSliceBounds(nil, l, nil)
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil)) // zero temp
r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_, nil, l, nil) // arr[:l]
// The conv is necessary in case n.Type is named.
return walkExpr(typecheck.Expr(typecheck.Conv(r, n.Type())), init)
}
@ -438,7 +436,8 @@ func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
fn := typecheck.LookupRuntime(fnname)
m.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.TypePtr(t.Elem()), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
m.Ptr.MarkNonNil()
m.LenCap = []ir.Node{typecheck.Conv(len, types.Types[types.TINT]), typecheck.Conv(cap, types.Types[types.TINT])}
m.Len = typecheck.Conv(len, types.Types[types.TINT])
m.Cap = typecheck.Conv(cap, types.Types[types.TINT])
return walkExpr(typecheck.Expr(m), init)
}
@ -471,7 +470,8 @@ func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
sh := ir.NewSliceHeaderExpr(base.Pos, nil, nil, nil, nil)
sh.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(false))
sh.Ptr.MarkNonNil()
sh.LenCap = []ir.Node{length, length}
sh.Len = length
sh.Cap = length
sh.SetType(t)
s := typecheck.Temp(t)
@ -493,7 +493,8 @@ func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
s := ir.NewSliceHeaderExpr(base.Pos, nil, nil, nil, nil)
s.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.TypePtr(t.Elem()), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
s.Ptr.MarkNonNil()
s.LenCap = []ir.Node{length, length}
s.Len = length
s.Cap = length
s.SetType(t)
return walkExpr(typecheck.Expr(s), init)
}

View File

@ -155,7 +155,7 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
// Chose not to inline. Call equality function directly.
if !inline {
// eq algs take pointers; cmpl and cmpr must be addressable
if !ir.IsAssignable(cmpl) || !ir.IsAssignable(cmpr) {
if !ir.IsAddressable(cmpl) || !ir.IsAddressable(cmpr) {
base.Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
}
@ -428,11 +428,11 @@ func eqFor(t *types.Type) (n ir.Node, needsize bool) {
sym := reflectdata.TypeSymPrefix(".eq", t)
n := typecheck.NewName(sym)
ir.MarkFunc(n)
n.SetType(typecheck.NewFuncType(nil, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
}, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.Types[types.TBOOL]),
n.SetType(types.NewSignature(types.NoPkg, nil, []*types.Field{
types.NewField(base.Pos, nil, types.NewPtr(t)),
types.NewField(base.Pos, nil, types.NewPtr(t)),
}, []*types.Field{
types.NewField(base.Pos, nil, types.Types[types.TBOOL]),
}))
return n, false
}

View File

@ -425,7 +425,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
}
// make slice out of heap (6)
a = ir.NewAssignStmt(base.Pos, var_, ir.NewSliceExpr(base.Pos, ir.OSLICE, vauto))
a = ir.NewAssignStmt(base.Pos, var_, ir.NewSliceExpr(base.Pos, ir.OSLICE, vauto, nil, nil, nil))
a = typecheck.Stmt(a)
a = orderStmtInPlace(a, map[string][]*ir.Name{})
@ -629,6 +629,7 @@ func oaslit(n *ir.AssignStmt, init *ir.Nodes) bool {
// not a special composite literal assignment
return false
}
x := n.X.(*ir.Name)
if !types.Identical(n.X.Type(), n.Y.Type()) {
// not a special composite literal assignment
return false
@ -640,7 +641,7 @@ func oaslit(n *ir.AssignStmt, init *ir.Nodes) bool {
return false
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
if refersToCommonName(n.X, n.Y) {
if ir.Any(n.Y, func(y ir.Node) bool { return ir.Uses(y, x) }) {
// not a special composite literal assignment
return false
}

View File

@ -178,7 +178,7 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// with a non-interface, especially in a switch on interface value
// with non-interface cases, is not visible to order.stmt, so we
// have to fall back on allocating a temp here.
if !ir.IsAssignable(v) {
if !ir.IsAddressable(v) {
v = copyExpr(v, v.Type(), init)
}
v = typecheck.NodAddr(v)
@ -260,7 +260,7 @@ func walkStringToBytes(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
}
// Slice the [n]byte to a []byte.
slice := ir.NewSliceExpr(n.Pos(), ir.OSLICEARR, p)
slice := ir.NewSliceExpr(n.Pos(), ir.OSLICEARR, p, nil, nil, nil)
slice.SetType(n.Type())
slice.SetTypecheck(1)
return walkExpr(slice, init)

View File

@ -26,15 +26,6 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
return n
}
// Eagerly checkwidth all expressions for the back end.
if n.Type() != nil && !n.Type().WidthCalculated() {
switch n.Type().Kind() {
case types.TBLANK, types.TNIL, types.TIDEAL:
default:
types.CheckSize(n.Type())
}
}
if init == n.PtrInit() {
// not okay to use n->ninit when walking n,
// because we might replace n with some other node
@ -70,23 +61,14 @@ func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
n = walkExpr1(n, init)
// Expressions that are constant at run time but not
// considered const by the language spec are not turned into
// constants until walk. For example, if n is y%1 == 0, the
// walk of y%1 may have replaced it by 0.
// Check whether n with its updated args is itself now a constant.
t := n.Type()
n = typecheck.EvalConst(n)
if n.Type() != t {
base.Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type())
// Eagerly compute sizes of all expressions for the back end.
if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
types.CheckSize(typ)
}
if n.Op() == ir.OLITERAL {
n = typecheck.Expr(n)
if ir.IsConst(n, constant.String) {
// Emit string symbol now to avoid emitting
// any concurrently during the backend.
if v := n.Val(); v.Kind() == constant.String {
_ = staticdata.StringSym(n.Pos(), constant.StringVal(v))
}
_ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
}
updateHasCall(n)
@ -106,7 +88,7 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
base.Fatalf("walkexpr: switch 1 unknown op %+v", n.Op())
panic("unreachable")
case ir.ONONAME, ir.OGETG, ir.ONEWOBJ, ir.OMETHEXPR:
case ir.ONONAME, ir.OGETG, ir.ONEWOBJ:
return n
case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.ONAMEOFFSET:
@ -116,6 +98,11 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
// stringsym for constant strings.
return n
case ir.OMETHEXPR:
// TODO(mdempsky): Do this right after type checking.
n := n.(*ir.MethodExpr)
return n.FuncName()
case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
n := n.(*ir.UnaryExpr)
n.X = walkExpr(n.X, init)
@ -429,7 +416,7 @@ func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
}
// make a copy; must not be used as an lvalue
if ir.IsAssignable(n) {
if ir.IsAddressable(n) {
base.Fatalf("missing lvalue case in safeexpr: %v", n)
}
return cheapExpr(n, init)
@ -535,22 +522,28 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
return // already walked
}
params := n.X.Type().Params()
// If this is a method call t.M(...),
// rewrite into a function call T.M(t, ...).
// TODO(mdempsky): Do this right after type checking.
if n.Op() == ir.OCALLMETH {
withRecv := make([]ir.Node, len(n.Args)+1)
dot := n.X.(*ir.SelectorExpr)
withRecv[0] = dot.X
copy(withRecv[1:], n.Args)
n.Args = withRecv
dot = ir.NewSelectorExpr(dot.Pos(), ir.OXDOT, ir.TypeNode(dot.X.Type()), dot.Selection.Sym)
n.SetOp(ir.OCALLFUNC)
n.X = typecheck.Expr(dot)
}
args := n.Args
params := n.X.Type().Params()
n.X = walkExpr(n.X, init)
walkExprList(args, init)
// If this is a method call, add the receiver at the beginning of the args.
if n.Op() == ir.OCALLMETH {
withRecv := make([]ir.Node, len(args)+1)
dot := n.X.(*ir.SelectorExpr)
withRecv[0] = dot.X
dot.X = nil
copy(withRecv[1:], args)
args = withRecv
}
// For any argument whose evaluation might require a function call,
// store that argument into a temporary variable,
// to prevent that calls from clobbering arguments already on the stack.
@ -559,16 +552,7 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
for i, arg := range args {
updateHasCall(arg)
// Determine param type.
var t *types.Type
if n.Op() == ir.OCALLMETH {
if i == 0 {
t = n.X.Type().Recv().Type
} else {
t = params.Field(i - 1).Type
}
} else {
t = params.Field(i).Type
}
t := params.Field(i).Type
if base.Flag.Cfg.Instrumenting || fncall(arg, t) {
// make assignment of fncall to tempAt
tmp := typecheck.Temp(t)
@ -786,21 +770,19 @@ func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
n.X = walkExpr(n.X, init)
}
low, high, max := n.SliceBounds()
low = walkExpr(low, init)
if low != nil && ir.IsZero(low) {
n.Low = walkExpr(n.Low, init)
if n.Low != nil && ir.IsZero(n.Low) {
// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
low = nil
n.Low = nil
}
high = walkExpr(high, init)
max = walkExpr(max, init)
n.SetSliceBounds(low, high, max)
n.High = walkExpr(n.High, init)
n.Max = walkExpr(n.Max, init)
if checkSlice {
n.X = walkCheckPtrAlignment(n.X.(*ir.ConvExpr), init, max)
n.X = walkCheckPtrAlignment(n.X.(*ir.ConvExpr), init, n.Max)
}
if n.Op().IsSlice3() {
if max != nil && max.Op() == ir.OCAP && ir.SameSafeExpr(n.X, max.(*ir.UnaryExpr).X) {
if n.Max != nil && n.Max.Op() == ir.OCAP && ir.SameSafeExpr(n.X, n.Max.(*ir.UnaryExpr).X) {
// Reduce x[i:j:cap(x)] to x[i:j].
if n.Op() == ir.OSLICE3 {
n.SetOp(ir.OSLICE)
@ -817,20 +799,18 @@ func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
// walkSliceHeader walks an OSLICEHEADER node.
func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
n.Ptr = walkExpr(n.Ptr, init)
n.LenCap[0] = walkExpr(n.LenCap[0], init)
n.LenCap[1] = walkExpr(n.LenCap[1], init)
n.Len = walkExpr(n.Len, init)
n.Cap = walkExpr(n.Cap, init)
return n
}
// TODO(josharian): combine this with its caller and simplify
func reduceSlice(n *ir.SliceExpr) ir.Node {
low, high, max := n.SliceBounds()
if high != nil && high.Op() == ir.OLEN && ir.SameSafeExpr(n.X, high.(*ir.UnaryExpr).X) {
if n.High != nil && n.High.Op() == ir.OLEN && ir.SameSafeExpr(n.X, n.High.(*ir.UnaryExpr).X) {
// Reduce x[i:len(x)] to x[i:].
high = nil
n.High = nil
}
n.SetSliceBounds(low, high, max)
if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && low == nil && high == nil {
if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
// Reduce x[:] to x.
if base.Debug.Slice > 0 {
base.Warn("slice: omit slice operation")
@ -969,22 +949,13 @@ func usefield(n *ir.SelectorExpr) {
case ir.ODOT, ir.ODOTPTR:
break
}
if n.Sel == nil {
// No field name. This DOTPTR was built by the compiler for access
// to runtime data structures. Ignore.
return
}
t := n.X.Type()
if t.IsPtr() {
t = t.Elem()
}
field := n.Selection
if field == nil {
base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
}
if field.Sym != n.Sel || field.Offset != n.Offset {
base.Fatalf("field inconsistency: %v,%v != %v,%v", field.Sym, field.Offset, n.Sel, n.Offset)
if field.Sym != n.Sel {
base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
}
if !strings.Contains(field.Note, "go:\"track\"") {
return

View File

@ -235,7 +235,7 @@ func (o *orderState) safeExpr(n ir.Node) ir.Node {
// because we emit explicit VARKILL instructions marking the end of those
// temporaries' lifetimes.
func isaddrokay(n ir.Node) bool {
return ir.IsAssignable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class_ == ir.PEXTERN || ir.IsAutoTmp(n))
return ir.IsAddressable(n) && (n.Op() != ir.ONAME || n.(*ir.Name).Class_ == ir.PEXTERN || ir.IsAutoTmp(n))
}
// addrTemp ensures that n is okay to pass by address to runtime routines.
@ -843,12 +843,13 @@ func (o *orderState) stmt(n ir.Node) {
n.X = o.expr(n.X, nil)
orderBody := true
switch n.Type().Kind() {
xt := typecheck.RangeExprType(n.X.Type())
switch xt.Kind() {
default:
base.Fatalf("order.stmt range %v", n.Type())
case types.TARRAY, types.TSLICE:
if len(n.Vars) < 2 || ir.IsBlank(n.Vars[1]) {
if n.Value == nil || ir.IsBlank(n.Value) {
// for i := range x will only use x once, to compute len(x).
// No need to copy it.
break
@ -885,9 +886,10 @@ func (o *orderState) stmt(n ir.Node) {
// n.Prealloc is the temp for the iterator.
// hiter contains pointers and needs to be zeroed.
n.Prealloc = o.newTemp(reflectdata.MapIterType(n.Type()), true)
n.Prealloc = o.newTemp(reflectdata.MapIterType(xt), true)
}
o.exprListInPlace(n.Vars)
n.Key = o.exprInPlace(n.Key)
n.Value = o.exprInPlace(n.Value)
if orderBody {
orderBlock(&n.Body, o.free)
}
@ -912,7 +914,6 @@ func (o *orderState) stmt(n ir.Node) {
n := n.(*ir.SelectStmt)
t := o.markTemp()
for _, ncas := range n.Cases {
ncas := ncas.(*ir.CaseStmt)
r := ncas.Comm
ir.SetPos(ncas)
@ -994,7 +995,6 @@ func (o *orderState) stmt(n ir.Node) {
// Also insert any ninit queued during the previous loop.
// (The temporary cleaning must follow that ninit work.)
for _, cas := range n.Cases {
cas := cas.(*ir.CaseStmt)
orderBlock(&cas.Body, o.free)
cas.Body.Prepend(o.cleanTempNoPop(t)...)
@ -1034,13 +1034,12 @@ func (o *orderState) stmt(n ir.Node) {
n := n.(*ir.SwitchStmt)
if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
// Add empty "default:" case for instrumentation.
n.Cases.Append(ir.NewCaseStmt(base.Pos, nil, nil))
n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
}
t := o.markTemp()
n.Tag = o.expr(n.Tag, nil)
for _, ncas := range n.Cases {
ncas := ncas.(*ir.CaseStmt)
o.exprListInPlace(ncas.List)
orderBlock(&ncas.Body, o.free)
}
@ -1054,7 +1053,6 @@ func (o *orderState) stmt(n ir.Node) {
func hasDefaultCase(n *ir.SwitchStmt) bool {
for _, ncas := range n.Cases {
ncas := ncas.(*ir.CaseStmt)
if len(ncas.List) == 0 {
return true
}
@ -1296,14 +1294,9 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
n := n.(*ir.SliceExpr)
n.X = o.expr(n.X, nil)
low, high, max := n.SliceBounds()
low = o.expr(low, nil)
low = o.cheapExpr(low)
high = o.expr(high, nil)
high = o.cheapExpr(high)
max = o.expr(max, nil)
max = o.cheapExpr(max)
n.SetSliceBounds(low, high, max)
n.Low = o.cheapExpr(o.expr(n.Low, nil))
n.High = o.cheapExpr(o.expr(n.High, nil))
n.Max = o.cheapExpr(o.expr(n.Max, nil))
if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
return o.copyExpr(n)
}

View File

@ -56,20 +56,11 @@ func walkRange(nrange *ir.RangeStmt) ir.Node {
// hb: hidden bool
// a, v1, v2: not hidden aggregate, val 1, 2
t := nrange.Type()
a := nrange.X
t := typecheck.RangeExprType(a.Type())
lno := ir.SetPos(a)
var v1, v2 ir.Node
l := len(nrange.Vars)
if l > 0 {
v1 = nrange.Vars[0]
}
if l > 1 {
v2 = nrange.Vars[1]
}
v1, v2 := nrange.Key, nrange.Value
if ir.IsBlank(v2) {
v2 = nil
@ -121,7 +112,7 @@ func walkRange(nrange *ir.RangeStmt) ir.Node {
}
// for v1, v2 := range ha { body }
if cheapComputableIndex(nrange.Type().Elem().Width) {
if cheapComputableIndex(t.Elem().Width) {
// v1, v2 = hv1, ha[hv1]
tmp := ir.NewIndexExpr(base.Pos, ha, hv1)
tmp.SetBounded(true)
@ -150,7 +141,7 @@ func walkRange(nrange *ir.RangeStmt) ir.Node {
ifGuard.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
nfor.SetOp(ir.OFORUNTIL)
hp := typecheck.Temp(types.NewPtr(nrange.Type().Elem()))
hp := typecheck.Temp(types.NewPtr(t.Elem()))
tmp := ir.NewIndexExpr(base.Pos, ha, ir.NewInt(0))
tmp.SetBounded(true)
init = append(init, ir.NewAssignStmt(base.Pos, hp, typecheck.NodAddr(tmp)))
@ -343,15 +334,12 @@ func isMapClear(n *ir.RangeStmt) bool {
return false
}
if n.Op() != ir.ORANGE || n.Type().Kind() != types.TMAP || len(n.Vars) != 1 {
return false
}
k := n.Vars[0]
if k == nil || ir.IsBlank(k) {
t := n.X.Type()
if n.Op() != ir.ORANGE || t.Kind() != types.TMAP || n.Key == nil || n.Value != nil {
return false
}
k := n.Key
// Require k to be a new variable name.
if !ir.DeclaredBy(k, n) {
return false
@ -372,7 +360,7 @@ func isMapClear(n *ir.RangeStmt) bool {
}
// Keys where equality is not reflexive can not be deleted from maps.
if !types.IsReflexive(m.Type().Key()) {
if !types.IsReflexive(t.Key()) {
return false
}
@ -428,7 +416,7 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
return nil
}
elemsize := loop.Type().Elem().Width
elemsize := typecheck.RangeExprType(loop.X.Type()).Elem().Width
if elemsize <= 0 || !ir.IsZero(stmt.Y) {
return nil
}

View File

@ -21,7 +21,7 @@ func walkSelect(sel *ir.SelectStmt) {
sel.PtrInit().Set(nil)
init = append(init, walkSelectCases(sel.Cases)...)
sel.Cases = ir.Nodes{}
sel.Cases = nil
sel.Compiled.Set(init)
walkStmtList(sel.Compiled)
@ -29,7 +29,7 @@ func walkSelect(sel *ir.SelectStmt) {
base.Pos = lno
}
func walkSelectCases(cases ir.Nodes) []ir.Node {
func walkSelectCases(cases []*ir.CommClause) []ir.Node {
ncas := len(cases)
sellineno := base.Pos
@ -40,7 +40,7 @@ func walkSelectCases(cases ir.Nodes) []ir.Node {
// optimization: one-case select: single op.
if ncas == 1 {
cas := cases[0].(*ir.CaseStmt)
cas := cases[0]
ir.SetPos(cas)
l := cas.Init()
if cas.Comm != nil { // not default:
@ -73,9 +73,8 @@ func walkSelectCases(cases ir.Nodes) []ir.Node {
// convert case value arguments to addresses.
// this rewrite is used by both the general code and the next optimization.
var dflt *ir.CaseStmt
var dflt *ir.CommClause
for _, cas := range cases {
cas := cas.(*ir.CaseStmt)
ir.SetPos(cas)
n := cas.Comm
if n == nil {
@ -99,9 +98,9 @@ func walkSelectCases(cases ir.Nodes) []ir.Node {
// optimization: two-case select but one is default: single non-blocking op.
if ncas == 2 && dflt != nil {
cas := cases[0].(*ir.CaseStmt)
cas := cases[0]
if cas == dflt {
cas = cases[1].(*ir.CaseStmt)
cas = cases[1]
}
n := cas.Comm
@ -147,7 +146,7 @@ func walkSelectCases(cases ir.Nodes) []ir.Node {
if dflt != nil {
ncas--
}
casorder := make([]*ir.CaseStmt, ncas)
casorder := make([]*ir.CommClause, ncas)
nsends, nrecvs := 0, 0
var init []ir.Node
@ -170,7 +169,6 @@ func walkSelectCases(cases ir.Nodes) []ir.Node {
// register cases
for _, cas := range cases {
cas := cas.(*ir.CaseStmt)
ir.SetPos(cas)
init = append(init, cas.Init()...)
@ -244,7 +242,7 @@ func walkSelectCases(cases ir.Nodes) []ir.Node {
}
// dispatch cases
dispatch := func(cond ir.Node, cas *ir.CaseStmt) {
dispatch := func(cond ir.Node, cas *ir.CommClause) {
cond = typecheck.Expr(cond)
cond = typecheck.DefaultLit(cond, nil)
@ -287,9 +285,9 @@ var scase *types.Type
// Keep in sync with src/runtime/select.go.
func scasetype() *types.Type {
if scase == nil {
scase = typecheck.NewStructType([]*ir.Field{
ir.NewField(base.Pos, typecheck.Lookup("c"), nil, types.Types[types.TUNSAFEPTR]),
ir.NewField(base.Pos, typecheck.Lookup("elem"), nil, types.Types[types.TUNSAFEPTR]),
scase = types.NewStruct(types.NoPkg, []*types.Field{
types.NewField(base.Pos, typecheck.Lookup("c"), types.Types[types.TUNSAFEPTR]),
types.NewField(base.Pos, typecheck.Lookup("elem"), types.Types[types.TUNSAFEPTR]),
})
scase.SetNoalg(true)
}

View File

@ -71,7 +71,6 @@ func walkSwitchExpr(sw *ir.SwitchStmt) {
var defaultGoto ir.Node
var body ir.Nodes
for _, ncase := range sw.Cases {
ncase := ncase.(*ir.CaseStmt)
label := typecheck.AutoLabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
@ -96,7 +95,7 @@ func walkSwitchExpr(sw *ir.SwitchStmt) {
body.Append(br)
}
}
sw.Cases.Set(nil)
sw.Cases = nil
if defaultGoto == nil {
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
@ -259,7 +258,6 @@ func allCaseExprsAreSideEffectFree(sw *ir.SwitchStmt) bool {
// enough.
for _, ncase := range sw.Cases {
ncase := ncase.(*ir.CaseStmt)
for _, v := range ncase.List {
if v.Op() != ir.OLITERAL {
return false
@ -318,26 +316,14 @@ func walkSwitchType(sw *ir.SwitchStmt) {
sw.Compiled.Append(ifNil)
// Load hash from type or itab.
dotHash := ir.NewSelectorExpr(base.Pos, ir.ODOTPTR, itab, nil)
dotHash.SetType(types.Types[types.TUINT32])
dotHash.SetTypecheck(1)
if s.facename.Type().IsEmptyInterface() {
dotHash.Offset = int64(2 * types.PtrSize) // offset of hash in runtime._type
} else {
dotHash.Offset = int64(2 * types.PtrSize) // offset of hash in runtime.itab
}
dotHash.SetBounded(true) // guaranteed not to fault
dotHash := typeHashFieldOf(base.Pos, itab)
s.hashname = copyExpr(dotHash, dotHash.Type(), &sw.Compiled)
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
var defaultGoto, nilGoto ir.Node
var body ir.Nodes
for _, ncase := range sw.Cases {
ncase := ncase.(*ir.CaseStmt)
var caseVar ir.Node
if len(ncase.Vars) != 0 {
caseVar = ncase.Vars[0]
}
caseVar := ncase.Var
// For single-type cases with an interface type,
// we initialize the case variable as part of the type assertion.
@ -395,7 +381,7 @@ func walkSwitchType(sw *ir.SwitchStmt) {
body.Append(ncase.Body...)
body.Append(br)
}
sw.Cases.Set(nil)
sw.Cases = nil
if defaultGoto == nil {
defaultGoto = br
@ -412,6 +398,32 @@ func walkSwitchType(sw *ir.SwitchStmt) {
walkStmtList(sw.Compiled)
}
// typeHashFieldOf returns an expression to select the type hash field
// from an interface's descriptor word (whether a *runtime._type or
// *runtime.itab pointer).
func typeHashFieldOf(pos src.XPos, itab *ir.UnaryExpr) *ir.SelectorExpr {
if itab.Op() != ir.OITAB {
base.Fatalf("expected OITAB, got %v", itab.Op())
}
var hashField *types.Field
if itab.X.Type().IsEmptyInterface() {
// runtime._type's hash field
if rtypeHashField == nil {
rtypeHashField = runtimeField("hash", int64(2*types.PtrSize), types.Types[types.TUINT32])
}
hashField = rtypeHashField
} else {
// runtime.itab's hash field
if itabHashField == nil {
itabHashField = runtimeField("hash", int64(2*types.PtrSize), types.Types[types.TUINT32])
}
hashField = itabHashField
}
return boundedDotPtr(pos, itab, hashField)
}
var rtypeHashField, itabHashField *types.Field
// A typeSwitch walks a type switch.
type typeSwitch struct {
// Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:

View File

@ -539,12 +539,30 @@ func calcHasCall(n ir.Node) bool {
// itabType loads the _type field from a runtime.itab struct.
func itabType(itab ir.Node) ir.Node {
typ := ir.NewSelectorExpr(base.Pos, ir.ODOTPTR, itab, nil)
typ.SetType(types.NewPtr(types.Types[types.TUINT8]))
typ.SetTypecheck(1)
typ.Offset = int64(types.PtrSize) // offset of _type in runtime.itab
typ.SetBounded(true) // guaranteed not to fault
return typ
if itabTypeField == nil {
// runtime.itab's _type field
itabTypeField = runtimeField("_type", int64(types.PtrSize), types.NewPtr(types.Types[types.TUINT8]))
}
return boundedDotPtr(base.Pos, itab, itabTypeField)
}
var itabTypeField *types.Field
// boundedDotPtr returns a selector expression representing ptr.field
// and omits nil-pointer checks for ptr.
func boundedDotPtr(pos src.XPos, ptr ir.Node, field *types.Field) *ir.SelectorExpr {
sel := ir.NewSelectorExpr(pos, ir.ODOTPTR, ptr, field.Sym)
sel.Selection = field
sel.SetType(field.Type)
sel.SetTypecheck(1)
sel.SetBounded(true) // guaranteed not to fault
return sel
}
func runtimeField(name string, offset int64, typ *types.Type) *types.Field {
f := types.NewField(src.NoXPos, ir.Pkgs.Runtime.Lookup(name), typ)
f.Offset = offset
return f
}
// ifaceData loads the data field from an interface.

View File

@ -1192,6 +1192,7 @@
// Require []Require
// Exclude []Module
// Replace []Replace
// Retract []Retract
// }
//
// type Require struct {

View File

@ -95,6 +95,7 @@ writing it back to go.mod. The JSON output corresponds to these Go types:
Require []Require
Exclude []Module
Replace []Replace
Retract []Retract
}
type Require struct {

View File

@ -28,6 +28,11 @@ import (
//
var buildList []module.Version
// additionalExplicitRequirements is a list of modules paths for which
// WriteGoMod should record explicit requirements, even if they would be
// selected without those requirements. Each path must also appear in buildList.
var additionalExplicitRequirements []string
// capVersionSlice returns s with its cap reduced to its length.
func capVersionSlice(s []module.Version) []module.Version {
return s[:len(s):len(s)]
@ -121,6 +126,12 @@ func EditBuildList(ctx context.Context, add, mustSelect []module.Version) error
if !inconsistent {
buildList = final
additionalExplicitRequirements = make([]string, 0, len(mustSelect))
for _, m := range mustSelect {
if m.Version != "none" {
additionalExplicitRequirements = append(additionalExplicitRequirements, m.Path)
}
}
return nil
}

View File

@ -15,6 +15,7 @@ import (
"os"
"path"
"path/filepath"
"sort"
"strconv"
"strings"
"sync"
@ -27,6 +28,7 @@ import (
"cmd/go/internal/modfetch"
"cmd/go/internal/mvs"
"cmd/go/internal/search"
"cmd/go/internal/str"
"golang.org/x/mod/modfile"
"golang.org/x/mod/module"
@ -845,13 +847,15 @@ func AllowWriteGoMod() {
// MinReqs returns a Reqs with minimal additional dependencies of Target,
// as will be written to go.mod.
func MinReqs() mvs.Reqs {
var retain []string
retain := append([]string{}, additionalExplicitRequirements...)
for _, m := range buildList[1:] {
_, explicit := index.require[m]
if explicit || loaded.direct[m.Path] {
retain = append(retain, m.Path)
}
}
sort.Strings(retain)
str.Uniq(&retain)
min, err := mvs.Req(Target, retain, &mvsReqs{buildList: buildList})
if err != nil {
base.Fatalf("go: %v", err)

View File

@ -863,12 +863,21 @@ func loadFromRoots(params loaderParams) *loader {
for _, pkg := range ld.pkgs {
if pkg.mod == Target {
for _, dep := range pkg.imports {
if dep.mod.Path != "" {
if dep.mod.Path != "" && dep.mod.Path != Target.Path && index != nil {
_, explicit := index.require[dep.mod]
if allowWriteGoMod && cfg.BuildMod == "readonly" && !explicit {
// TODO(#40775): attach error to package instead of using
// base.Errorf. Ideally, 'go list' should not fail because of this,
// but today, LoadPackages calls WriteGoMod unconditionally, which
// would fail with a less clear message.
base.Errorf("go: %[1]s: package %[2]s imported from implicitly required module; try 'go get -d %[1]s' to add missing requirements", pkg.path, dep.path)
}
ld.direct[dep.mod.Path] = true
}
}
}
}
base.ExitIfErrors()
// If we didn't scan all of the imports from the main module, or didn't use
// imports.AnyTags, then we didn't necessarily load every package that

View File

@ -21,6 +21,7 @@ import (
"cmd/go/internal/imports"
"cmd/go/internal/modfetch"
"cmd/go/internal/search"
"cmd/go/internal/str"
"cmd/go/internal/trace"
"golang.org/x/mod/module"
@ -1005,13 +1006,8 @@ func (rr *replacementRepo) Versions(prefix string) ([]string, error) {
sort.Slice(versions, func(i, j int) bool {
return semver.Compare(versions[i], versions[j]) < 0
})
uniq := versions[:1]
for _, v := range versions {
if v != uniq[len(uniq)-1] {
uniq = append(uniq, v)
}
}
return uniq, nil
str.Uniq(&versions)
return versions, nil
}
func (rr *replacementRepo) Stat(rev string) (*modfetch.RevInfo, error) {

View File

@ -96,6 +96,20 @@ func Contains(x []string, s string) bool {
return false
}
// Uniq removes consecutive duplicate strings from ss.
func Uniq(ss *[]string) {
if len(*ss) <= 1 {
return
}
uniq := (*ss)[:1]
for _, s := range *ss {
if s != uniq[len(uniq)-1] {
uniq = append(uniq, s)
}
}
*ss = uniq
}
func isSpaceByte(c byte) bool {
return c == ' ' || c == '\t' || c == '\n' || c == '\r'
}

View File

@ -0,0 +1,88 @@
cp go.mod.orig go.mod
# If we list a package in an implicit dependency imported from the main module,
# we should get an error because the dependency should have an explicit
# requirement.
go list -m indirect-with-pkg
stdout '^indirect-with-pkg v1.0.0 => ./indirect-with-pkg$'
! go list ./use-indirect
stderr '^go: m/use-indirect: package indirect-with-pkg imported from implicitly required module; try ''go get -d m/use-indirect'' to add missing requirements$'
# We can promote the implicit requirement by getting the importing package,
# as hinted.
go get -d m/use-indirect
cmp go.mod go.mod.use
cp go.mod.orig go.mod
# We can also promote implicit requirements using 'go get' on them, or their
# packages. This gives us "// indirect" requirements, since 'go get' doesn't
# know they're needed by the main module. See #43131 for the rationale.
go get -d indirect-with-pkg indirect-without-pkg
cmp go.mod go.mod.indirect
-- go.mod.orig --
module m
go 1.16
require direct v1.0.0
replace (
direct v1.0.0 => ./direct
indirect-with-pkg v1.0.0 => ./indirect-with-pkg
indirect-without-pkg v1.0.0 => ./indirect-without-pkg
)
-- go.mod.use --
module m
go 1.16
require (
direct v1.0.0
indirect-with-pkg v1.0.0
)
replace (
direct v1.0.0 => ./direct
indirect-with-pkg v1.0.0 => ./indirect-with-pkg
indirect-without-pkg v1.0.0 => ./indirect-without-pkg
)
-- go.mod.indirect --
module m
go 1.16
require (
direct v1.0.0
indirect-with-pkg v1.0.0 // indirect
indirect-without-pkg v1.0.0 // indirect
)
replace (
direct v1.0.0 => ./direct
indirect-with-pkg v1.0.0 => ./indirect-with-pkg
indirect-without-pkg v1.0.0 => ./indirect-without-pkg
)
-- use-indirect/use-indirect.go --
package use
import _ "indirect-with-pkg"
-- direct/go.mod --
module direct
go 1.16
require (
indirect-with-pkg v1.0.0
indirect-without-pkg v1.0.0
)
-- indirect-with-pkg/go.mod --
module indirect-with-pkg
go 1.16
-- indirect-with-pkg/p.go --
package p
-- indirect-without-pkg/go.mod --
module indirect-without-pkg
go 1.16

View File

@ -107,8 +107,8 @@ func TestDateParsing(t *testing.T) {
time.Date(1997, 11, 20, 9, 55, 6, 0, time.FixedZone("", -6*60*60)),
},
{
"Thu, 20 Nov 1997 09:55:06 MDT (MDT)",
time.Date(1997, 11, 20, 9, 55, 6, 0, time.FixedZone("MDT", 0)),
"Thu, 20 Nov 1997 09:55:06 GMT (GMT)",
time.Date(1997, 11, 20, 9, 55, 6, 0, time.UTC),
},
{
"Fri, 21 Nov 1997 09:55:06 +1300 (TOT)",
@ -278,8 +278,8 @@ func TestDateParsingCFWS(t *testing.T) {
true,
},
{
"Fri, 21 Nov 1997 09:55:06 MDT (MDT)",
time.Date(1997, 11, 21, 9, 55, 6, 0, time.FixedZone("MDT", 0)),
"Fri, 21 Nov 1997 09:55:06 GMT (GMT)",
time.Date(1997, 11, 21, 9, 55, 6, 0, time.UTC),
true,
},
}

View File

@ -0,0 +1,42 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux
package signal
import (
"os"
"syscall"
"testing"
"time"
)
const prSetKeepCaps = 8
// This test validates that syscall.AllThreadsSyscall() can reliably
// reach all 'm' (threads) of the nocgo runtime even when one thread
// is blocked waiting to receive signals from the kernel. This monitors
// for a regression vs. the fix for #43149.
func TestAllThreadsSyscallSignals(t *testing.T) {
if _, _, err := syscall.AllThreadsSyscall(syscall.SYS_PRCTL, prSetKeepCaps, 0, 0); err == syscall.ENOTSUP {
t.Skip("AllThreadsSyscall disabled with cgo")
}
sig := make(chan os.Signal, 1)
Notify(sig, os.Interrupt)
for i := 0; i <= 100; i++ {
if _, _, errno := syscall.AllThreadsSyscall(syscall.SYS_PRCTL, prSetKeepCaps, uintptr(i&1), 0); errno != 0 {
t.Fatalf("[%d] failed to set KEEP_CAPS=%d: %v", i, i&1, errno)
}
}
select {
case <-time.After(10 * time.Millisecond):
case <-sig:
t.Fatal("unexpected signal")
}
Stop(sig)
}

View File

@ -12,7 +12,7 @@ static void *threadentry(void*);
static void (*setg_gcc)(void*);
// This will be set in gcc_android.c for android-specific customization.
void (*x_cgo_inittls)(void **tlsg, void **tlsbase);
void (*x_cgo_inittls)(void **tlsg, void **tlsbase) __attribute__((common));
void
x_cgo_init(G *g, void (*setg)(void*), void **tlsg, void **tlsbase)

View File

@ -14,7 +14,7 @@ static void* threadentry(void*);
static void (*setg_gcc)(void*);
// This will be set in gcc_android.c for android-specific customization.
void (*x_cgo_inittls)(void **tlsg, void **tlsbase);
void (*x_cgo_inittls)(void **tlsg, void **tlsbase) __attribute__((common));
void
x_cgo_init(G *g, void (*setg)(void*), void **tlsg, void **tlsbase)

View File

@ -10,7 +10,7 @@
static void *threadentry(void*);
void (*x_cgo_inittls)(void **tlsg, void **tlsbase);
void (*x_cgo_inittls)(void **tlsg, void **tlsbase) __attribute__((common));
static void (*setg_gcc)(void*);
void

View File

@ -12,7 +12,7 @@
static void *threadentry(void*);
void (*x_cgo_inittls)(void **tlsg, void **tlsbase);
void (*x_cgo_inittls)(void **tlsg, void **tlsbase) __attribute__((common));
static void (*setg_gcc)(void*);
void

View File

@ -1201,12 +1201,12 @@ type TimeHistogram timeHistogram
// Counts returns the counts for the given bucket, subBucket indices.
// Returns true if the bucket was valid, otherwise returns the counts
// for the overflow bucket and false.
// for the underflow bucket and false.
func (th *TimeHistogram) Count(bucket, subBucket uint) (uint64, bool) {
t := (*timeHistogram)(th)
i := bucket*TimeHistNumSubBuckets + subBucket
if i >= uint(len(t.counts)) {
return t.overflow, false
return t.underflow, false
}
return t.counts[i], true
}

View File

@ -69,17 +69,15 @@ const (
// for concurrent use. It is also safe to read all the values
// atomically.
type timeHistogram struct {
counts [timeHistNumSuperBuckets * timeHistNumSubBuckets]uint64
overflow uint64
counts [timeHistNumSuperBuckets * timeHistNumSubBuckets]uint64
underflow uint64
}
// record adds the given duration to the distribution.
//
// Although the duration is an int64 to facilitate ease-of-use
// with e.g. nanotime, the duration must be non-negative.
func (h *timeHistogram) record(duration int64) {
if duration < 0 {
throw("timeHistogram encountered negative duration")
atomic.Xadd64(&h.underflow, 1)
return
}
// The index of the exponential bucket is just the index
// of the highest set bit adjusted for how many bits we
@ -92,15 +90,17 @@ func (h *timeHistogram) record(duration int64) {
superBucket = uint(sys.Len64(uint64(duration))) - timeHistSubBucketBits
if superBucket*timeHistNumSubBuckets >= uint(len(h.counts)) {
// The bucket index we got is larger than what we support, so
// add into the special overflow bucket.
atomic.Xadd64(&h.overflow, 1)
return
// include this count in the highest bucket, which extends to
// infinity.
superBucket = timeHistNumSuperBuckets - 1
subBucket = timeHistNumSubBuckets - 1
} else {
// The linear subbucket index is just the timeHistSubBucketsBits
// bits after the top bit. To extract that value, shift down
// the duration such that we leave the top bit and the next bits
// intact, then extract the index.
subBucket = uint((duration >> (superBucket - 1)) % timeHistNumSubBuckets)
}
// The linear subbucket index is just the timeHistSubBucketsBits
// bits after the top bit. To extract that value, shift down
// the duration such that we leave the top bit and the next bits
// intact, then extract the index.
subBucket = uint((duration >> (superBucket - 1)) % timeHistNumSubBuckets)
} else {
subBucket = uint(duration)
}
@ -128,7 +128,7 @@ func timeHistogramMetricsBuckets() []float64 {
// index to combine it with the bucketMin.
subBucketShift := uint(0)
if i > 1 {
// The first two buckets are exact with respect to integers,
// The first two super buckets are exact with respect to integers,
// so we'll never have to shift the sub-bucket index. Thereafter,
// we shift up by 1 with each subsequent bucket.
subBucketShift = uint(i - 2)

View File

@ -5,6 +5,7 @@
package runtime_test
import (
"math"
. "runtime"
"testing"
)
@ -32,8 +33,8 @@ func TestTimeHistogram(t *testing.T) {
h.Record(base + v)
}
}
// Hit the overflow bucket.
h.Record(int64(^uint64(0) >> 1))
// Hit the underflow bucket.
h.Record(int64(-1))
// Check to make sure there's exactly one count in each
// bucket.
@ -41,7 +42,7 @@ func TestTimeHistogram(t *testing.T) {
for j := uint(0); j < TimeHistNumSubBuckets; j++ {
c, ok := h.Count(i, j)
if !ok {
t.Errorf("hit overflow bucket unexpectedly: (%d, %d)", i, j)
t.Errorf("hit underflow bucket unexpectedly: (%d, %d)", i, j)
} else if c != 1 {
t.Errorf("bucket (%d, %d) has count that is not 1: %d", i, j, c)
}
@ -49,10 +50,21 @@ func TestTimeHistogram(t *testing.T) {
}
c, ok := h.Count(TimeHistNumSuperBuckets, 0)
if ok {
t.Errorf("expected to hit overflow bucket: (%d, %d)", TimeHistNumSuperBuckets, 0)
t.Errorf("expected to hit underflow bucket: (%d, %d)", TimeHistNumSuperBuckets, 0)
}
if c != 1 {
t.Errorf("overflow bucket has count that is not 1: %d", c)
t.Errorf("underflow bucket has count that is not 1: %d", c)
}
// Check overflow behavior.
// By hitting a high value, we should just be adding into the highest bucket.
h.Record(math.MaxInt64)
c, ok = h.Count(TimeHistNumSuperBuckets-1, TimeHistNumSubBuckets-1)
if !ok {
t.Error("hit underflow bucket in highest bucket unexpectedly")
} else if c != 2 {
t.Errorf("highest has count that is not 2: %d", c)
}
dummyTimeHistogram = TimeHistogram{}
}

View File

@ -43,7 +43,18 @@ func initMetrics() {
}
sizeClassBuckets = make([]float64, _NumSizeClasses)
for i := range sizeClassBuckets {
sizeClassBuckets[i] = float64(class_to_size[i])
// Size classes have an inclusive upper-bound
// and exclusive lower bound (e.g. 48-byte size class is
// (32, 48]) whereas we want and inclusive lower-bound
// and exclusive upper-bound (e.g. 48-byte size class is
// [33, 49). We can achieve this by shifting all bucket
// boundaries up by 1.
//
// Also, a float64 can precisely represent integers with
// value up to 2^53 and size classes are relatively small
// (nowhere near 2^48 even) so this will give us exact
// boundaries.
sizeClassBuckets[i] = float64(class_to_size[i] + 1)
}
timeHistBuckets = timeHistogramMetricsBuckets()
metrics = map[string]metricData{
@ -105,9 +116,9 @@ func initMetrics() {
"/gc/pauses:seconds": {
compute: func(_ *statAggregate, out *metricValue) {
hist := out.float64HistOrInit(timeHistBuckets)
hist.counts[len(hist.counts)-1] = atomic.Load64(&memstats.gcPauseDist.overflow)
hist.counts[0] = atomic.Load64(&memstats.gcPauseDist.underflow)
for i := range hist.buckets {
hist.counts[i] = atomic.Load64(&memstats.gcPauseDist.counts[i])
hist.counts[i+1] = atomic.Load64(&memstats.gcPauseDist.counts[i])
}
},
},

View File

@ -154,6 +154,12 @@ func TestReadMetricsConsistency(t *testing.T) {
if totalVirtual.got != totalVirtual.want {
t.Errorf(`"/memory/classes/total:bytes" does not match sum of /memory/classes/**: got %d, want %d`, totalVirtual.got, totalVirtual.want)
}
if objects.alloc.Counts[0] > 0 {
t.Error("found counts for objects of non-positive size in allocs-by-size")
}
if objects.free.Counts[0] > 0 {
t.Error("found counts for objects of non-positive size in frees-by-size")
}
if len(objects.alloc.Buckets) != len(objects.free.Buckets) {
t.Error("allocs-by-size and frees-by-size buckets don't match in length")
} else if len(objects.alloc.Counts) != len(objects.free.Counts) {

View File

@ -101,8 +101,7 @@ func gcMarkRootPrepare() {
// Gs may be created after this point, but it's okay that we
// ignore them because they begin life without any roots, so
// there's nothing to scan, and any roots they create during
// the concurrent phase will be scanned during mark
// termination.
// the concurrent phase will be caught by the write barrier.
work.nStackRoots = int(atomic.Loaduintptr(&allglen))
work.markrootNext = 0

View File

@ -72,7 +72,7 @@ func clearSignalHandlers() {
}
//go:nosplit
func sigblock() {
func sigblock(exiting bool) {
}
// Called to initialize a new m (including the bootstrap m).

View File

@ -301,6 +301,24 @@ func getHugePageSize() uintptr {
func osinit() {
ncpu = getproccount()
physHugePageSize = getHugePageSize()
if iscgo {
// #42494 glibc and musl reserve some signals for
// internal use and require they not be blocked by
// the rest of a normal C runtime. When the go runtime
// blocks...unblocks signals, temporarily, the blocked
// interval of time is generally very short. As such,
// these expectations of *libc code are mostly met by
// the combined go+cgo system of threads. However,
// when go causes a thread to exit, via a return from
// mstart(), the combined runtime can deadlock if
// these signals are blocked. Thus, don't block these
// signals when exiting threads.
// - glibc: SIGCANCEL (32), SIGSETXID (33)
// - musl: SIGTIMER (32), SIGCANCEL (33), SIGSYNCCALL (34)
sigdelset(&sigsetAllExiting, 32)
sigdelset(&sigsetAllExiting, 33)
sigdelset(&sigsetAllExiting, 34)
}
osArchInit()
}

View File

@ -195,7 +195,7 @@ func msigrestore(sigmask sigset) {
func clearSignalHandlers() {
}
func sigblock() {
func sigblock(exiting bool) {
}
// Called to initialize a new m (including the bootstrap m).

View File

@ -886,7 +886,7 @@ func clearSignalHandlers() {
}
//go:nosplit
func sigblock() {
func sigblock(exiting bool) {
}
// Called to initialize a new m (including the bootstrap m).

View File

@ -1313,7 +1313,7 @@ func mexit(osStack bool) {
throw("locked m0 woke up")
}
sigblock()
sigblock(true)
unminit()
// Free the gsignal stack.
@ -1515,6 +1515,7 @@ func syscall_runtime_doAllThreadsSyscall(fn func(bool) bool) {
if netpollinited() {
netpollBreak()
}
sigRecvPrepareForFixup()
_g_ := getg()
if raceenabled {
// For m's running without racectx, we loan out the
@ -1754,7 +1755,7 @@ func needm() {
// starting a new m to run Go code via newosproc.
var sigmask sigset
sigsave(&sigmask)
sigblock()
sigblock(false)
// Lock extra list, take head, unlock popped list.
// nilokay=false is safe here because of the invariant above,
@ -1903,7 +1904,7 @@ func dropm() {
// Setg(nil) clears g, which is the signal handler's cue not to run Go handlers.
// It's important not to try to handle a signal between those two steps.
sigmask := mp.sigmask
sigblock()
sigblock(false)
unminit()
mnext := lockextra(true)
@ -3776,7 +3777,7 @@ func beforefork() {
// group. See issue #18600.
gp.m.locks++
sigsave(&gp.m.sigmask)
sigblock()
sigblock(false)
// This function is called before fork in syscall package.
// Code between fork and exec must not allocate memory nor even try to grow stack.

View File

@ -1042,15 +1042,26 @@ func msigrestore(sigmask sigset) {
sigprocmask(_SIG_SETMASK, &sigmask, nil)
}
// sigblock blocks all signals in the current thread's signal mask.
// sigsetAllExiting is used by sigblock(true) when a thread is
// exiting. sigset_all is defined in OS specific code, and per GOOS
// behavior may override this default for sigsetAllExiting: see
// osinit().
var sigsetAllExiting = sigset_all
// sigblock blocks signals in the current thread's signal mask.
// This is used to block signals while setting up and tearing down g
// when a non-Go thread calls a Go function.
// The OS-specific code is expected to define sigset_all.
// when a non-Go thread calls a Go function. When a thread is exiting
// we use the sigsetAllExiting value, otherwise the OS specific
// definition of sigset_all is used.
// This is nosplit and nowritebarrierrec because it is called by needm
// which may be called on a non-Go thread with no g available.
//go:nosplit
//go:nowritebarrierrec
func sigblock() {
func sigblock(exiting bool) {
if exiting {
sigprocmask(_SIG_SETMASK, &sigsetAllExiting, nil)
return
}
sigprocmask(_SIG_SETMASK, &sigset_all, nil)
}

View File

@ -12,12 +12,16 @@
// sigsend is called by the signal handler to queue a new signal.
// signal_recv is called by the Go program to receive a newly queued signal.
// Synchronization between sigsend and signal_recv is based on the sig.state
// variable. It can be in 3 states: sigIdle, sigReceiving and sigSending.
// variable. It can be in 4 states: sigIdle, sigReceiving, sigSending and sigFixup.
// sigReceiving means that signal_recv is blocked on sig.Note and there are no
// new pending signals.
// sigSending means that sig.mask *may* contain new pending signals,
// signal_recv can't be blocked in this state.
// sigIdle means that there are no new pending signals and signal_recv is not blocked.
// sigFixup is a transient state that can only exist as a short
// transition from sigReceiving and then on to sigIdle: it is
// used to ensure the AllThreadsSyscall()'s mDoFixup() operation
// occurs on the sleeping m, waiting to receive a signal.
// Transitions between states are done atomically with CAS.
// When signal_recv is unblocked, it resets sig.Note and rechecks sig.mask.
// If several sigsends and signal_recv execute concurrently, it can lead to
@ -59,6 +63,7 @@ const (
sigIdle = iota
sigReceiving
sigSending
sigFixup
)
// sigsend delivers a signal from sighandler to the internal signal delivery queue.
@ -112,6 +117,9 @@ Send:
notewakeup(&sig.note)
break Send
}
case sigFixup:
// nothing to do - we need to wait for sigIdle.
osyield()
}
}
@ -119,6 +127,19 @@ Send:
return true
}
// sigRecvPrepareForFixup is used to temporarily wake up the
// signal_recv() running thread while it is blocked waiting for the
// arrival of a signal. If it causes the thread to wake up, the
// sig.state travels through this sequence: sigReceiving -> sigFixup
// -> sigIdle -> sigReceiving and resumes. (This is only called while
// GC is disabled.)
//go:nosplit
func sigRecvPrepareForFixup() {
if atomic.Cas(&sig.state, sigReceiving, sigFixup) {
notewakeup(&sig.note)
}
}
// Called to receive the next queued signal.
// Must only be called from a single goroutine at a time.
//go:linkname signal_recv os/signal.signal_recv
@ -146,7 +167,16 @@ func signal_recv() uint32 {
}
notetsleepg(&sig.note, -1)
noteclear(&sig.note)
break Receive
if !atomic.Cas(&sig.state, sigFixup, sigIdle) {
break Receive
}
// Getting here, the code will
// loop around again to sleep
// in state sigReceiving. This
// path is taken when
// sigRecvPrepareForFixup()
// has been called by another
// thread.
}
case sigSending:
if atomic.Cas(&sig.state, sigSending, sigIdle) {

View File

@ -92,6 +92,13 @@ func sendNote(s *byte) bool {
return true
}
// sigRecvPrepareForFixup is a no-op on plan9. (This would only be
// called while GC is disabled.)
//
//go:nosplit
func sigRecvPrepareForFixup() {
}
// Called to receive the next queued signal.
// Must only be called from a single goroutine at a time.
//go:linkname signal_recv os/signal.signal_recv

View File

@ -597,6 +597,14 @@ func compareStatus(filter, expect string) error {
return nil
}
// killAThread locks the goroutine to an OS thread and exits; this
// causes an OS thread to terminate.
func killAThread(c <-chan struct{}) {
runtime.LockOSThread()
<-c
return
}
// TestSetuidEtc performs tests on all of the wrapped system calls
// that mirror to the 9 glibc syscalls with POSIX semantics. The test
// here is considered authoritative and should compile and run
@ -647,6 +655,11 @@ func TestSetuidEtc(t *testing.T) {
}
for i, v := range vs {
// Generate some thread churn as we execute the tests.
c := make(chan struct{})
go killAThread(c)
close(c)
if err := v.fn(); err != nil {
t.Errorf("[%d] %q failed: %v", i, v.call, err)
continue

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,7 @@ package main
var a = twoResults() // ERROR "assignment mismatch: 1 variable but twoResults returns 2 values|2\-valued"
var b, c, d = twoResults() // ERROR "assignment mismatch: 3 variables but twoResults returns 2 values|cannot initialize"
var e, f = oneResult() // ERROR "assignment mismatch: 2 variables but oneResult returns 1 values|cannot initialize"
var e, f = oneResult() // ERROR "assignment mismatch: 2 variables but oneResult returns 1 value|cannot initialize"
func twoResults() (int, int) {
return 1, 2

View File

@ -7,9 +7,9 @@
package main
func main() {
var a, b = 1 // ERROR "assignment mismatch: 2 variables but 1 values|wrong number of initializations|cannot initialize"
_ = 1, 2 // ERROR "assignment mismatch: 1 variables but 2 values|number of variables does not match|cannot assign"
c, d := 1 // ERROR "assignment mismatch: 2 variables but 1 values|wrong number of initializations|cannot initialize"
var a, b = 1 // ERROR "assignment mismatch: 2 variables but 1 value|wrong number of initializations|cannot initialize"
_ = 1, 2 // ERROR "assignment mismatch: 1 variable but 2 values|number of variables does not match|cannot assign"
c, d := 1 // ERROR "assignment mismatch: 2 variables but 1 value|wrong number of initializations|cannot initialize"
e, f := 1, 2, 3 // ERROR "assignment mismatch: 2 variables but 3 values|wrong number of initializations|cannot initialize"
_, _, _, _ = c, d, e, f
}

View File

@ -35,8 +35,8 @@ func main() {
_ = f.Exported
_ = f.exported // ERROR "f.exported undefined .type f1.Foo has no field or method exported, but does have Exported."
_ = f.Unexported // ERROR "f.Unexported undefined .type f1.Foo has no field or method Unexported."
_ = f.unexported // ERROR "f.unexported undefined .cannot refer to unexported field or method f1..\*Foo..unexported."
f.unexported = 10 // ERROR "f.unexported undefined .cannot refer to unexported field or method f1..\*Foo..unexported."
f.unexported() // ERROR "f.unexported undefined .cannot refer to unexported field or method f1..\*Foo..unexported."
_ = f.unexported // ERROR "f.unexported undefined .cannot refer to unexported field or method unexported."
f.unexported = 10 // ERROR "f.unexported undefined .cannot refer to unexported field or method unexported."
f.unexported() // ERROR "f.unexported undefined .cannot refer to unexported field or method unexported."
_ = f.hook // ERROR "f.hook undefined .cannot refer to unexported field or method hook."
}

View File

@ -63,6 +63,7 @@ func _() {
_ = f1() // ok
_, _ = f2() // ok
_ = f2() // ERROR "assignment mismatch: 1 variable but f2 returns 2 values|cannot assign"
_ = f1(), 0 // ERROR "assignment mismatch: 1 variable but 2 values|cannot assign"
T.M0 // ERROR "T.M0 .* not used"
t.M0 // ERROR "t.M0 .* not used"
cap // ERROR "use of builtin cap not in function call|must be called"