diff --git a/src/cmd/compile/internal/escape/call.go b/src/cmd/compile/internal/escape/call.go index 62727a8ef8..46bfe65aff 100644 --- a/src/cmd/compile/internal/escape/call.go +++ b/src/cmd/compile/internal/escape/call.go @@ -68,7 +68,7 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir fn = ir.MethodExprName(v) } case ir.OCALLMETH: - fn = ir.MethodExprName(call.X) + base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck") } fntype := call.X.Type() diff --git a/src/cmd/compile/internal/inline/inl.go b/src/cmd/compile/internal/inline/inl.go index 0620191bbf..3086d093c0 100644 --- a/src/cmd/compile/internal/inline/inl.go +++ b/src/cmd/compile/internal/inline/inl.go @@ -273,6 +273,19 @@ func (v *hairyVisitor) doNode(n ir.Node) bool { } } } + if n.X.Op() == ir.OMETHEXPR { + if meth := ir.MethodExprName(n.X); meth != nil { + fn := meth.Func + if fn != nil && types.IsRuntimePkg(fn.Sym().Pkg) && fn.Sym().Name == "heapBits.nextArena" { + // Special case: explicitly allow + // mid-stack inlining of + // runtime.heapBits.next even though + // it calls slow-path + // runtime.heapBits.nextArena. + break + } + } + } if ir.IsIntrinsicCall(n) { // Treat like any other node. @@ -287,28 +300,8 @@ func (v *hairyVisitor) doNode(n ir.Node) bool { // Call cost for non-leaf inlining. v.budget -= v.extraCallCost - // Call is okay if inlinable and we have the budget for the body. case ir.OCALLMETH: - n := n.(*ir.CallExpr) - t := n.X.Type() - if t == nil { - base.Fatalf("no function type for [%p] %+v\n", n.X, n.X) - } - fn := ir.MethodExprName(n.X).Func - if types.IsRuntimePkg(fn.Sym().Pkg) && fn.Sym().Name == "heapBits.nextArena" { - // Special case: explicitly allow - // mid-stack inlining of - // runtime.heapBits.next even though - // it calls slow-path - // runtime.heapBits.nextArena. - break - } - if fn.Inl != nil { - v.budget -= fn.Inl.Cost - break - } - // Call cost for non-leaf inlining. - v.budget -= v.extraCallCost + base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck") // Things that are too hairy, irrespective of the budget case ir.OCALL, ir.OCALLINTER: @@ -575,7 +568,9 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No case ir.ODEFER, ir.OGO: n := n.(*ir.GoDeferStmt) switch call := n.Call; call.Op() { - case ir.OCALLFUNC, ir.OCALLMETH: + case ir.OCALLMETH: + base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck") + case ir.OCALLFUNC: call := call.(*ir.CallExpr) call.NoInline = true } @@ -585,11 +580,18 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No case ir.OCLOSURE: return n case ir.OCALLMETH: - // Prevent inlining some reflect.Value methods when using checkptr, - // even when package reflect was compiled without it (#35073). + base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck") + case ir.OCALLFUNC: n := n.(*ir.CallExpr) - if s := ir.MethodExprName(n.X).Sym(); base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") { - return n + if n.X.Op() == ir.OMETHEXPR { + // Prevent inlining some reflect.Value methods when using checkptr, + // even when package reflect was compiled without it (#35073). + if meth := ir.MethodExprName(n.X); meth != nil { + s := meth.Sym() + if base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") { + return n + } + } } } @@ -611,7 +613,9 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No // transmogrify this node itself unless inhibited by the // switch at the top of this function. switch n.Op() { - case ir.OCALLFUNC, ir.OCALLMETH: + case ir.OCALLMETH: + base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck") + case ir.OCALLFUNC: n := n.(*ir.CallExpr) if n.NoInline { return n @@ -631,19 +635,8 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No if fn := inlCallee(call.X); fn != nil && fn.Inl != nil { n = mkinlcall(call, fn, maxCost, inlMap, edit) } - case ir.OCALLMETH: - call = n.(*ir.CallExpr) - if base.Flag.LowerM > 3 { - fmt.Printf("%v:call to meth %v\n", ir.Line(n), call.X.(*ir.SelectorExpr).Sel) - } - - // typecheck should have resolved ODOTMETH->type, whose nname points to the actual function. - if call.X.Type() == nil { - base.Fatalf("no function type for [%p] %+v\n", call.X, call.X) - } - - n = mkinlcall(call, ir.MethodExprName(call.X).Func, maxCost, inlMap, edit) + base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck") } base.Pos = lno @@ -723,7 +716,7 @@ var SSADumpInline = func(*ir.Func) {} // instead. var NewInline = func(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr { return nil } -// If n is a call node (OCALLFUNC or OCALLMETH), and fn is an ONAME node for a +// If n is a OCALLFUNC node, and fn is an ONAME node for a // function with an inlinable body, return an OINLCALL node that can replace n. // The returned node's Ninit has the parameter assignments, the Nbody is the // inlined function body, and (List, Rlist) contain the (input, output) @@ -906,11 +899,7 @@ func oldInline(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil) as.Def = true if call.Op() == ir.OCALLMETH { - sel := call.X.(*ir.SelectorExpr) - if sel.X == nil { - base.Fatalf("method call without receiver: %+v", call) - } - as.Rhs.Append(sel.X) + base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck") } as.Rhs.Append(call.Args...) diff --git a/src/cmd/compile/internal/noder/reader.go b/src/cmd/compile/internal/noder/reader.go index 459e1d1703..e5ad3f4b8e 100644 --- a/src/cmd/compile/internal/noder/reader.go +++ b/src/cmd/compile/internal/noder/reader.go @@ -1807,8 +1807,7 @@ func InlineCall(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExp var args ir.Nodes if call.Op() == ir.OCALLMETH { - assert(call.X.Op() == ir.ODOTMETH) - args.Append(call.X.(*ir.SelectorExpr).X) + base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck") } args.Append(call.Args...) diff --git a/src/cmd/compile/internal/noder/transform.go b/src/cmd/compile/internal/noder/transform.go index 946d335f07..30d6e34ae4 100644 --- a/src/cmd/compile/internal/noder/transform.go +++ b/src/cmd/compile/internal/noder/transform.go @@ -149,6 +149,9 @@ func transformCall(n *ir.CallExpr) { } typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args) + if l.Op() == ir.ODOTMETH && len(deref(n.X.Type().Recv().Type).RParams()) == 0 { + typecheck.FixMethodCall(n) + } if t.NumResults() == 1 { n.SetType(l.Type().Results().Field(0).Type) diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go index 00770c87cf..20b991be56 100644 --- a/src/cmd/compile/internal/typecheck/func.go +++ b/src/cmd/compile/internal/typecheck/func.go @@ -516,6 +516,7 @@ func tcCall(n *ir.CallExpr, top int) ir.Node { } typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, func() string { return fmt.Sprintf("argument to %v", n.X) }) + FixMethodCall(n) if t.NumResults() == 0 { return n } diff --git a/src/cmd/compile/internal/walk/convert.go b/src/cmd/compile/internal/walk/convert.go index 5297332f6b..e659ee59f1 100644 --- a/src/cmd/compile/internal/walk/convert.go +++ b/src/cmd/compile/internal/walk/convert.go @@ -462,7 +462,9 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node { // TODO(mdempsky): Make stricter. We only need to exempt // reflect.Value.Pointer and reflect.Value.UnsafeAddr. switch n.X.Op() { - case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER: + case ir.OCALLMETH: + base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck") + case ir.OCALLFUNC, ir.OCALLINTER: return n } diff --git a/src/cmd/compile/internal/walk/expr.go b/src/cmd/compile/internal/walk/expr.go index 86c6d44501..f7675c3b7d 100644 --- a/src/cmd/compile/internal/walk/expr.go +++ b/src/cmd/compile/internal/walk/expr.go @@ -167,7 +167,7 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node { case ir.OCFUNC: return n - case ir.OCALLINTER, ir.OCALLFUNC, ir.OCALLMETH: + case ir.OCALLINTER, ir.OCALLFUNC: n := n.(*ir.CallExpr) return walkCall(n, init) @@ -487,9 +487,12 @@ func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node { return r1 } -// walkCall walks an OCALLFUNC, OCALLINTER, or OCALLMETH node. +// walkCall walks an OCALLFUNC or OCALLINTER node. func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node { - if n.Op() == ir.OCALLINTER || n.Op() == ir.OCALLMETH || n.X.Op() == ir.OMETHEXPR { + if n.Op() == ir.OCALLMETH { + base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck") + } + if n.Op() == ir.OCALLINTER || n.X.Op() == ir.OMETHEXPR { // We expect both interface call reflect.Type.Method and concrete // call reflect.(*rtype).Method. usemethod(n) @@ -550,7 +553,7 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) { n.SetWalked(true) if n.Op() == ir.OCALLMETH { - typecheck.FixMethodCall(n) + base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck") } args := n.Args diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go index 9912feba63..16a124d2ff 100644 --- a/src/cmd/compile/internal/walk/order.go +++ b/src/cmd/compile/internal/walk/order.go @@ -506,15 +506,18 @@ func (o *orderState) init(n ir.Node) { } // call orders the call expression n. -// n.Op is OCALLMETH/OCALLFUNC/OCALLINTER or a builtin like OCOPY. +// n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY. func (o *orderState) call(nn ir.Node) { if len(nn.Init()) > 0 { // Caller should have already called o.init(nn). base.Fatalf("%v with unexpected ninit", nn.Op()) } + if nn.Op() == ir.OCALLMETH { + base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck") + } // Builtin functions. - if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLMETH && nn.Op() != ir.OCALLINTER { + if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER { switch n := nn.(type) { default: base.Fatalf("unexpected call: %+v", n) @@ -707,7 +710,7 @@ func (o *orderState) stmt(n ir.Node) { o.out = append(o.out, n) // Special: handle call arguments. - case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH: + case ir.OCALLFUNC, ir.OCALLINTER: n := n.(*ir.CallExpr) t := o.markTemp() o.call(n) @@ -1147,7 +1150,10 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node { case ir.OCONVNOP: n := n.(*ir.ConvExpr) - if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER || n.X.Op() == ir.OCALLMETH) { + if n.X.Op() == ir.OCALLMETH { + base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck") + } + if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) { call := n.X.(*ir.CallExpr) // When reordering unsafe.Pointer(f()) into a separate // statement, the conversion and function call must stay @@ -1200,9 +1206,12 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node { o.out = append(o.out, nif) return r + case ir.OCALLMETH: + base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck") + panic("unreachable") + case ir.OCALLFUNC, ir.OCALLINTER, - ir.OCALLMETH, ir.OCAP, ir.OCOMPLEX, ir.OCOPY, diff --git a/src/cmd/compile/internal/walk/stmt.go b/src/cmd/compile/internal/walk/stmt.go index 0c216d2e8a..4581bca3df 100644 --- a/src/cmd/compile/internal/walk/stmt.go +++ b/src/cmd/compile/internal/walk/stmt.go @@ -40,7 +40,6 @@ func walkStmt(n ir.Node) ir.Node { ir.OAS2MAPR, ir.OCLOSE, ir.OCOPY, - ir.OCALLMETH, ir.OCALLINTER, ir.OCALL, ir.OCALLFUNC, diff --git a/src/cmd/compile/internal/walk/walk.go b/src/cmd/compile/internal/walk/walk.go index 6e992a91b8..f687127fee 100644 --- a/src/cmd/compile/internal/walk/walk.go +++ b/src/cmd/compile/internal/walk/walk.go @@ -308,7 +308,7 @@ func mayCall(n ir.Node) bool { default: base.FatalfAt(n.Pos(), "mayCall %+v", n) - case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, + case ir.OCALLFUNC, ir.OCALLINTER, ir.OUNSAFEADD, ir.OUNSAFESLICE: return true