Merge Call and DivergingCall diffs into CallKind

This merges two separate Call terminators and uses a separate CallKind sub-enum instead.

A little bit unrelatedly, copying into destination value for a certain kind of invoke, is also
implemented here. See the associated comment in code for various details that arise with this
implementation.
This commit is contained in:
Simonas Kazlauskas 2015-12-22 01:46:56 +02:00
parent 50107034c0
commit d1c644c1e9
6 changed files with 203 additions and 185 deletions

View file

@ -250,51 +250,58 @@ pub enum Terminator<'tcx> {
func: Operand<'tcx>,
/// Arguments the function is called with
args: Vec<Operand<'tcx>>,
/// Location to write the return value into
destination: Lvalue<'tcx>,
targets: CallTargets,
/// The kind of call with associated information
kind: CallKind<'tcx>,
},
}
/// Block ends with a call of a diverging function.
DivergingCall {
/// The function thats being called
func: Operand<'tcx>,
/// Arguments the function is called with
args: Vec<Operand<'tcx>>,
/// Some, if theres any cleanup to be done when function unwinds
cleanup: Option<BasicBlock>,
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub enum CallKind<'tcx> {
/// Diverging function without associated cleanup
Diverging,
/// Diverging function with associated cleanup
DivergingCleanup(BasicBlock),
/// Converging function without associated cleanup
Converging {
/// Destination where the call result is written
destination: Lvalue<'tcx>,
/// Block to branch into on successful return
target: BasicBlock,
},
ConvergingCleanup {
/// Destination where the call result is written
destination: Lvalue<'tcx>,
/// First target is branched to on successful return.
/// Second block contains the cleanups to do on unwind.
targets: (BasicBlock, BasicBlock)
}
}
#[derive(Clone, Copy, RustcEncodable, RustcDecodable)]
pub enum CallTargets {
/// The only target that should be entered when function returns normally.
Return(BasicBlock),
/// In addition to the normal-return block, function has associated cleanup that should be done
/// when function unwinds.
WithCleanup((BasicBlock, BasicBlock))
}
impl CallTargets {
pub fn new(ret: BasicBlock, cleanup: Option<BasicBlock>) -> CallTargets {
if let Some(c) = cleanup {
CallTargets::WithCleanup((ret, c))
} else {
CallTargets::Return(ret)
impl<'tcx> CallKind<'tcx> {
pub fn successors(&self) -> &[BasicBlock] {
match *self {
CallKind::Diverging => &[],
CallKind::DivergingCleanup(ref b) |
CallKind::Converging { target: ref b, .. } => slice::ref_slice(b),
CallKind::ConvergingCleanup { ref targets, .. } => targets.as_slice(),
}
}
pub fn as_slice(&self) -> &[BasicBlock] {
pub fn successors_mut(&mut self) -> &mut [BasicBlock] {
match *self {
CallTargets::Return(ref b) => slice::ref_slice(b),
CallTargets::WithCleanup(ref bs) => bs.as_slice()
CallKind::Diverging => &mut [],
CallKind::DivergingCleanup(ref mut b) |
CallKind::Converging { target: ref mut b, .. } => slice::mut_ref_slice(b),
CallKind::ConvergingCleanup { ref mut targets, .. } => targets.as_mut_slice(),
}
}
pub fn as_mut_slice(&mut self) -> &mut [BasicBlock] {
pub fn destination(&self) -> Option<Lvalue<'tcx>> {
match *self {
CallTargets::Return(ref mut b) => slice::mut_ref_slice(b),
CallTargets::WithCleanup(ref mut bs) => bs.as_mut_slice()
CallKind::Converging { ref destination, .. } |
CallKind::ConvergingCleanup { ref destination, .. } => Some(destination.clone()),
CallKind::Diverging |
CallKind::DivergingCleanup(_) => None
}
}
}
@ -309,12 +316,7 @@ pub fn successors(&self) -> &[BasicBlock] {
SwitchInt { targets: ref b, .. } => b,
Resume => &[],
Return => &[],
Call { targets: ref b, .. } => b.as_slice(),
DivergingCall { cleanup: ref b, .. } => if let Some(b) = b.as_ref() {
slice::ref_slice(b)
} else {
&mut []
},
Call { ref kind, .. } => kind.successors(),
}
}
@ -327,12 +329,7 @@ pub fn successors_mut(&mut self) -> &mut [BasicBlock] {
SwitchInt { targets: ref mut b, .. } => b,
Resume => &mut [],
Return => &mut [],
Call { targets: ref mut b, .. } => b.as_mut_slice(),
DivergingCall { cleanup: ref mut b, .. } => if let Some(b) = b.as_mut() {
slice::mut_ref_slice(b)
} else {
&mut []
},
Call { ref mut kind, .. } => kind.successors_mut(),
}
}
}
@ -399,13 +396,18 @@ pub fn fmt_head<W: Write>(&self, fmt: &mut W) -> fmt::Result {
SwitchInt { discr: ref lv, .. } => write!(fmt, "switchInt({:?})", lv),
Return => write!(fmt, "return"),
Resume => write!(fmt, "resume"),
Call { .. } => {
// the author didnt bother rebasing this
unimplemented!()
},
DivergingCall { .. } => {
// the author didnt bother rebasing this
unimplemented!()
Call { ref kind, ref func, ref args } => {
if let Some(destination) = kind.destination() {
try!(write!(fmt, "{:?} = ", destination));
}
try!(write!(fmt, "{:?}(", func));
for (index, arg) in args.iter().enumerate() {
if index > 0 {
try!(write!(fmt, ", "));
}
try!(write!(fmt, "{:?}", arg));
}
write!(fmt, ")")
}
}
}
@ -417,8 +419,6 @@ pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
Return | Resume => vec![],
Goto { .. } => vec!["".into_cow()],
If { .. } => vec!["true".into_cow(), "false".into_cow()],
Call { .. } => vec!["return".into_cow(), "unwind".into_cow()],
DivergingCall { .. } => vec!["unwind".into_cow()],
Switch { ref adt_def, .. } => {
adt_def.variants
.iter()
@ -435,6 +435,16 @@ pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
.chain(iter::once(String::from("otherwise").into_cow()))
.collect()
}
Call { ref kind, .. } => match *kind {
CallKind::Diverging =>
vec![],
CallKind::DivergingCleanup(..) =>
vec!["unwind".into_cow()],
CallKind::Converging { .. } =>
vec!["return".into_cow()],
CallKind::ConvergingCleanup { .. } =>
vec!["return".into_cow(), "unwind".into_cow()],
},
}
}
}

View file

@ -136,23 +136,15 @@ fn super_terminator(&mut self, block: BasicBlock, terminator: &Terminator<'tcx>)
Terminator::Return => {
}
Terminator::Call { ref func, ref args, ref destination, ref targets } => {
self.visit_lvalue(destination, LvalueContext::Store);
Terminator::Call { ref func, ref args, ref kind } => {
if let Some(ref destination) = kind.destination() {
self.visit_lvalue(destination, LvalueContext::Store);
}
self.visit_operand(func);
for arg in args {
self.visit_operand(arg);
}
for &target in targets.as_slice() {
self.visit_branch(block, target);
}
}
Terminator::DivergingCall { ref func, ref args, ref cleanup } => {
self.visit_operand(func);
for arg in args {
self.visit_operand(arg);
}
for &target in cleanup.as_ref() {
for &target in kind.successors() {
self.visit_branch(block, target);
}
}
@ -432,26 +424,15 @@ fn super_terminator(&mut self,
Terminator::Return => {
}
Terminator::Call { ref mut func,
ref mut args,
ref mut destination,
ref mut targets } => {
self.visit_lvalue(destination, LvalueContext::Store);
Terminator::Call { ref mut func, ref mut args, ref mut kind } => {
if let Some(ref mut destination) = kind.destination() {
self.visit_lvalue(destination, LvalueContext::Store);
}
self.visit_operand(func);
for arg in args {
self.visit_operand(arg);
}
for &target in targets.as_slice() {
self.visit_branch(block, target);
}
}
Terminator::DivergingCall { ref mut func, ref mut args, ref mut cleanup } => {
self.visit_operand(func);
for arg in args {
self.visit_operand(arg);
}
for &target in cleanup.as_ref() {
for &target in kind.successors() {
self.visit_branch(block, target);
}
}

View file

@ -224,17 +224,22 @@ pub fn into_expr(&mut self,
let success = this.cfg.start_new_block();
let cleanup = this.diverge_cleanup();
let term = if diverges {
Terminator::DivergingCall { func: fun, args: args, cleanup: cleanup }
} else {
Terminator::Call {
func: fun,
args: args,
destination: destination.clone(),
targets: CallTargets::new(success, cleanup)
this.cfg.terminate(block, Terminator::Call {
func: fun,
args: args,
kind: match (cleanup, diverges) {
(None, true) => CallKind::Diverging,
(Some(c), true) => CallKind::DivergingCleanup(c),
(None, false) => CallKind::Converging {
destination: destination.clone(),
target: success
},
(Some(c), false) => CallKind::ConvergingCleanup {
destination: destination.clone(),
targets: (success, c)
}
}
};
this.cfg.terminate(block, term);
});
success.unit()
}

View file

@ -302,7 +302,6 @@ pub fn panic_bound_check(&mut self,
index: Operand<'tcx>,
len: Operand<'tcx>,
span: Span) {
let cleanup = self.diverge_cleanup();
let func = self.lang_function(lang_items::PanicBoundsCheckFnLangItem);
let str_ty = self.hir.tcx().mk_static_str();
let tup_ty = self.hir.tcx().mk_tup(vec![str_ty, self.hir.tcx().types.u32]);
@ -316,16 +315,19 @@ pub fn panic_bound_check(&mut self,
// FIXME: ReStatic might be wrong here?
self.cfg.push_assign(block, DUMMY_SP, &tuple_ref, // tuple_ref = &tuple;
Rvalue::Ref(*ref_region, BorrowKind::Unique, tuple));
self.cfg.terminate(block, Terminator::DivergingCall {
let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, Terminator::Call {
func: func,
args: vec![Operand::Consume(tuple_ref), index, len],
cleanup: cleanup,
kind: match cleanup {
None => CallKind::Diverging,
Some(c) => CallKind::DivergingCleanup(c)
}
});
}
/// Create diverge cleanup and branch to it from `block`.
pub fn panic(&mut self, block: BasicBlock, msg: &'static str, span: Span) {
let cleanup = self.diverge_cleanup();
let func = self.lang_function(lang_items::PanicFnLangItem);
let str_ty = self.hir.tcx().mk_static_str();
@ -348,11 +350,14 @@ pub fn panic(&mut self, block: BasicBlock, msg: &'static str, span: Span) {
// FIXME: ReStatic might be wrong here?
self.cfg.push_assign(block, DUMMY_SP, &tuple_ref, // tuple_ref = &tuple;
Rvalue::Ref(*ref_region, BorrowKind::Unique, tuple));
self.cfg.terminate(block, Terminator::DivergingCall {
let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, Terminator::Call {
func: func,
args: vec![Operand::Consume(tuple_ref)],
cleanup: cleanup,
kind: match cleanup {
None => CallKind::Diverging,
Some(c) => CallKind::DivergingCleanup(c)
}
});
}

View file

@ -93,14 +93,10 @@ fn erase_regions_terminator(&mut self,
self.erase_regions_lvalue(discr);
*switch_ty = self.tcx.erase_regions(switch_ty);
},
Terminator::Call { ref mut destination, ref mut func, ref mut args, .. } => {
self.erase_regions_lvalue(destination);
self.erase_regions_operand(func);
for arg in &mut *args {
self.erase_regions_operand(arg);
Terminator::Call { ref mut func, ref mut args, ref mut kind } => {
if let Some(ref mut destination) = kind.destination() {
self.erase_regions_lvalue(destination);
}
}
Terminator::DivergingCall { ref mut func, ref mut args, .. } => {
self.erase_regions_operand(func);
for arg in &mut *args {
self.erase_regions_operand(arg);

View file

@ -94,82 +94,29 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
base::build_return_block(bcx.fcx, bcx, return_ty, DebugLoc::None);
}
mir::Terminator::Call { ref func, ref args, ref destination, ref targets } => {
// The location we'll write the result of the call into.
let call_dest = self.trans_lvalue(bcx, destination);
let ret_ty = call_dest.ty.to_ty(bcx.tcx());
// Create the callee. This will always be a fn
// ptr and hence a kind of scalar.
mir::Terminator::Call { ref func, ref args, ref kind } => {
// Create the callee. This will always be a fn ptr and hence a kind of scalar.
let callee = self.trans_operand(bcx, func);
let attrs = attributes::from_fn_type(bcx.ccx(), callee.ty);
let debugloc = DebugLoc::None;
// The arguments we'll be passing. Plus one to account for outptr, if used.
let mut llargs = Vec::with_capacity(args.len() + 1);
// Does the fn use an outptr? If so, we have an extra first argument.
let return_outptr = type_of::return_uses_outptr(bcx.ccx(), ret_ty);
// The arguments we'll be passing.
let mut llargs = if return_outptr {
let mut vec = Vec::with_capacity(args.len() + 1);
vec.push(call_dest.llval);
vec
// Prepare the return value destination
let (ret_dest_ty, must_copy_dest) = if let Some(ref d) = kind.destination() {
let dest = self.trans_lvalue(bcx, d);
let ret_ty = dest.ty.to_ty(bcx.tcx());
if type_of::return_uses_outptr(bcx.ccx(), ret_ty) {
llargs.push(dest.llval);
(Some((dest, ret_ty)), false)
} else {
(Some((dest, ret_ty)), !common::type_is_zero_size(bcx.ccx(), ret_ty))
}
} else {
Vec::with_capacity(args.len())
(None, false)
};
// Process the rest of the args.
for arg in args {
let arg_op = self.trans_operand(bcx, arg);
match arg_op.val {
Ref(llval) | Immediate(llval) => llargs.push(llval),
FatPtr(base, extra) => {
// The two words in a fat ptr are passed separately
llargs.push(base);
llargs.push(extra);
}
}
}
let debugloc = DebugLoc::None;
let attrs = attributes::from_fn_type(bcx.ccx(), callee.ty);
match (*targets, base::avoid_invoke(bcx)) {
(mir::CallTargets::WithCleanup((ret, cleanup)), false) => {
let cleanup = self.bcx(cleanup);
let landingpad = self.make_landing_pad(cleanup);
build::Invoke(bcx,
callee.immediate(),
&llargs[..],
self.llblock(ret),
landingpad.llbb,
Some(attrs),
debugloc);
if !return_outptr && !common::type_is_zero_size(bcx.ccx(), ret_ty) {
// FIXME: What do we do here?
unimplemented!()
}
},
(t, _) => {
let ret = match t {
mir::CallTargets::Return(ret) => ret,
mir::CallTargets::WithCleanup((ret, _)) => {
// make a landing pad regardless (so it sets the personality slot.
let block = self.unreachable_block();
self.make_landing_pad(block);
ret
}
};
let llret = build::Call(bcx,
callee.immediate(),
&llargs[..],
Some(attrs),
debugloc);
if !return_outptr && !common::type_is_zero_size(bcx.ccx(), ret_ty) {
base::store_ty(bcx, llret, call_dest.llval, ret_ty);
}
build::Br(bcx, self.llblock(ret), debugloc)
}
}
},
mir::Terminator::DivergingCall { ref func, ref args, ref cleanup } => {
let callee = self.trans_operand(bcx, func);
let mut llargs = Vec::with_capacity(args.len());
for arg in args {
match self.trans_operand(bcx, arg).val {
Ref(llval) | Immediate(llval) => llargs.push(llval),
@ -179,23 +126,73 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
}
}
}
let debugloc = DebugLoc::None;
let attrs = attributes::from_fn_type(bcx.ccx(), callee.ty);
match (*cleanup, base::avoid_invoke(bcx)) {
(Some(cleanup), false) => {
// Many different ways to call a function handled here
match (base::avoid_invoke(bcx), kind) {
// The two cases below are the only ones to use LLVMs `invoke`.
(false, &mir::CallKind::DivergingCleanup(cleanup)) => {
let cleanup = self.bcx(cleanup);
let landingpad = self.make_landing_pad(cleanup);
let unreachable = self.unreachable_block();
build::Invoke(bcx,
callee.immediate(),
&llargs[..],
unreachable.llbb,
self.unreachable_block().llbb,
landingpad.llbb,
Some(attrs),
debugloc);
}
(t, _) => {
if t.is_some() {
},
(false, &mir::CallKind::ConvergingCleanup { ref targets, .. }) => {
let cleanup = self.bcx(targets.1);
let landingpad = self.make_landing_pad(cleanup);
let (target, postinvoke) = if must_copy_dest {
(bcx.fcx.new_block(false, "", None), Some(self.bcx(targets.0)))
} else {
(self.bcx(targets.0), None)
};
let invokeret = build::Invoke(bcx,
callee.immediate(),
&llargs[..],
target.llbb,
landingpad.llbb,
Some(attrs),
debugloc);
if let Some(postinvoketarget) = postinvoke {
// We translate the copy into a temoprary block. The temporary block is
// necessary because the current block has already been terminated (by
// `invoke`) and we cannot really translate into the target block
// because:
// * The target block may have more than a single precedesor;
// * Some LLVM insns cannot have a preceeding store insn (phi,
// cleanuppad), and adding/prepending the store now may render
// those other instructions invalid.
//
// NB: This approach still may break some LLVM code. For example if the
// target block starts with a `phi` (which may only match on immediate
// precedesors), it cannot know about this temporary block thus
// resulting in an invalid code:
//
// this:
// …
// %0 = …
// %1 = invoke to label %temp …
// temp:
// store ty %1, ty* %dest
// br label %actualtargetblock
// actualtargetblock: ; preds: %temp, …
// phi … [%this, …], [%0, …] ; ERROR: phi requires to match only on
// ; immediate precedesors
let (ret_dest, ret_ty) = ret_dest_ty
.expect("return destination and type not set");
base::store_ty(target, invokeret, ret_dest.llval, ret_ty);
build::Br(target, postinvoketarget.llbb, debugloc);
}
},
// Everything else uses the regular `Call`, but we have to be careful to
// generate landing pads for later, even if we do not use it.
// FIXME: maybe just change Resume to not panic in that case?
(_, k@&mir::CallKind::DivergingCleanup(_)) |
(_, k@&mir::CallKind::Diverging) => {
if let mir::CallKind::DivergingCleanup(_) = *k {
// make a landing pad regardless, so it sets the personality slot.
let block = self.unreachable_block();
self.make_landing_pad(block);
@ -203,6 +200,30 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
build::Call(bcx, callee.immediate(), &llargs[..], Some(attrs), debugloc);
build::Unreachable(bcx);
}
(_, k@&mir::CallKind::ConvergingCleanup { .. }) |
(_, k@&mir::CallKind::Converging { .. }) => {
let ret = match *k {
mir::CallKind::Converging { target, .. } => target,
mir::CallKind::ConvergingCleanup { targets, .. } => {
// make a landing pad regardless (so it sets the personality slot.
let block = self.unreachable_block();
self.make_landing_pad(block);
targets.0
},
_ => unreachable!()
};
let llret = build::Call(bcx,
callee.immediate(),
&llargs[..],
Some(attrs),
debugloc);
if must_copy_dest {
let (ret_dest, ret_ty) = ret_dest_ty
.expect("return destination and type not set");
base::store_ty(bcx, llret, ret_dest.llval, ret_ty);
}
build::Br(bcx, self.llblock(ret), debugloc)
}
}
}
}