%
% (c) The University of Glasgow 2006
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
% Code generation for tail calls.
\begin{code}
module CgTailCall (
cgTailCall, performTailCall,
performReturn, performPrimReturn,
returnUnboxedTuple, ccallReturnUnboxedTuple,
pushUnboxedTuple,
tailCallPrimOp,
tailCallPrimCall,
pushReturnAddress
) where
#include "HsVersions.h"
import CgMonad
import CgBindery
import CgInfoTbls
import CgCallConv
import CgStackery
import CgHeapery
import CgUtils
import CgTicky
import ClosureInfo
import SMRep
import OldCmm
import OldCmmUtils
import CLabel
import Type
import Id
import StgSyn
import PrimOp
import Outputable
import StaticFlags
import Control.Monad
cgTailCall :: Id -> [StgArg] -> Code
cgTailCall fun args
= do { fun_info <- getCgIdInfo fun
; if isUnLiftedType (idType fun)
then
ASSERT( null args )
do { fun_amode <- idInfoToAmode fun_info
; performPrimReturn (cgIdInfoArgRep fun_info) fun_amode }
else
do { arg_amodes <- getArgAmodes args
; performTailCall fun_info arg_amodes noStmts }
}
performTailCall
:: CgIdInfo
-> [(CgRep,CmmExpr)]
-> CmmStmts
-> Code
performTailCall fun_info arg_amodes pending_assts
| Just join_sp <- maybeLetNoEscape fun_info
=
do { (final_sp, arg_assts) <- pushUnboxedTuple join_sp arg_amodes
; emitSimultaneously (pending_assts `plusStmts` arg_assts)
; let lbl = enterReturnPtLabel (idUnique (cgIdInfoId fun_info))
; doFinalJump final_sp True (jumpToLbl lbl) }
| otherwise
= do { fun_amode <- idInfoToAmode fun_info
; let assignSt = CmmAssign nodeReg fun_amode
node_asst = oneStmt assignSt
opt_node_asst | nodeMustPointToIt lf_info = node_asst
| otherwise = noStmts
; EndOfBlockInfo sp _ <- getEndOfBlockInfo
; dflags <- getDynFlags
; case (getCallMethod dflags fun_name fun_has_cafs lf_info (length arg_amodes)) of
EnterIt -> do
{ emitSimultaneously (node_asst `plusStmts` pending_assts)
; let target = entryCode (closureInfoPtr (CmmReg nodeReg))
enterClosure = stmtC (CmmJump target [])
jumpInstr = getEndOfBlockInfo >>=
maybeSwitchOnCons enterClosure
; doFinalJump sp False jumpInstr }
ReturnIt -> do
{ emitSimultaneously (node_asst `plusStmts` pending_assts)
; doFinalJump sp False emitReturnInstr }
ReturnCon _ -> do
{ emitSimultaneously (node_asst `plusStmts` pending_assts)
; doFinalJump sp False emitReturnInstr }
JumpToIt lbl -> do
{ emitSimultaneously (opt_node_asst `plusStmts` pending_assts)
; doFinalJump sp False (jumpToLbl lbl) }
SlowCall -> do
{ when (not (null arg_amodes)) $ do
{ if (isKnownFun lf_info)
then tickyKnownCallTooFewArgs
else tickyUnknownCall
; tickySlowCallPat (map fst arg_amodes)
}
; let (apply_lbl, args, extra_args)
= constructSlowCall arg_amodes
; directCall sp apply_lbl args extra_args
(node_asst `plusStmts` pending_assts)
}
DirectEntry lbl arity -> do
{ if arity == length arg_amodes
then tickyKnownCallExact
else do tickyKnownCallExtraArgs
tickySlowCallPat (map fst (drop arity arg_amodes))
; let
(arity_args, extra_args) = splitAt arity arg_amodes
; directCall sp lbl arity_args extra_args
(opt_node_asst `plusStmts` pending_assts)
}
}
where
fun_id = cgIdInfoId fun_info
fun_name = idName fun_id
lf_info = cgIdInfoLF fun_info
fun_has_cafs = idCafInfo fun_id
untag_node = CmmAssign nodeReg (cmmUntag (CmmReg nodeReg))
maybeSwitchOnCons enterClosure eob
| EndOfBlockInfo _ (CaseAlts lbl _ _) <- eob,
not opt_SccProfilingOn
= do { is_constr <- newLabelC
; stmtC (CmmCondBranch (cmmIsTagged (CmmReg nodeReg))
is_constr)
; enterClosure
; labelC is_constr
; stmtC (CmmJump (entryCode $ CmmLit (CmmLabel lbl)) [])
}
| otherwise
= do { stmtC untag_node
; enterClosure
}
where
directCall :: VirtualSpOffset -> CLabel -> [(CgRep, CmmExpr)]
-> [(CgRep, CmmExpr)] -> CmmStmts
-> Code
directCall sp lbl args extra_args assts = do
let
(reg_arg_amodes, stk_args) = assignCallRegs args
slow_stk_args = slowArgs extra_args
reg_assts = assignToRegs reg_arg_amodes
(final_sp, stk_assts) <- mkStkAmodes sp (stk_args ++ slow_stk_args)
emitSimultaneously (reg_assts `plusStmts`
stk_assts `plusStmts`
assts)
doFinalJump final_sp False (jumpToLbl lbl)
doFinalJump :: VirtualSpOffset -> Bool -> Code -> Code
doFinalJump final_sp is_let_no_escape jump_code
= do {
adjustStackHW final_sp
; eob <- getEndOfBlockInfo
; whenC (not is_let_no_escape) (pushReturnAddress eob)
; adjustSpAndHp final_sp
; jump_code }
performReturn :: Code
-> Code
performReturn finish_code
= do { EndOfBlockInfo args_sp _sequel <- getEndOfBlockInfo
; doFinalJump args_sp False finish_code }
performPrimReturn :: CgRep -> CmmExpr
-> Code
performPrimReturn rep amode
= do { whenC (not (isVoidArg rep))
(stmtC (CmmAssign ret_reg amode))
; performReturn emitReturnInstr }
where
ret_reg = dataReturnConvPrim rep
returnUnboxedTuple :: [(CgRep, CmmExpr)] -> Code
returnUnboxedTuple amodes
= do { (EndOfBlockInfo args_sp _sequel) <- getEndOfBlockInfo
; tickyUnboxedTupleReturn (length amodes)
; (final_sp, assts) <- pushUnboxedTuple args_sp amodes
; emitSimultaneously assts
; doFinalJump final_sp False emitReturnInstr }
pushUnboxedTuple :: VirtualSpOffset
-> [(CgRep, CmmExpr)]
-> FCode (VirtualSpOffset,
CmmStmts)
pushUnboxedTuple sp []
= return (sp, noStmts)
pushUnboxedTuple sp amodes
= do { let (reg_arg_amodes, stk_arg_amodes) = assignReturnRegs amodes
(ptr_args, nptr_args) = separateByPtrFollowness stk_arg_amodes
reg_arg_assts = assignToRegs reg_arg_amodes
; (ptr_sp, ptr_assts) <- mkStkAmodes sp ptr_args
; (final_sp, nptr_assts) <- mkStkAmodes ptr_sp nptr_args
; returnFC (final_sp,
reg_arg_assts `plusStmts`
ptr_assts `plusStmts` nptr_assts) }
ccallReturnUnboxedTuple :: [(CgRep, CmmExpr)] -> Code -> Code
ccallReturnUnboxedTuple amodes before_jump
= do { eob@(EndOfBlockInfo args_sp _) <- getEndOfBlockInfo
; pushReturnAddress eob
; setEndOfBlockInfo (EndOfBlockInfo args_sp OnStack)
(do { adjustSpAndHp args_sp
; before_jump
; returnUnboxedTuple amodes })
}
tailCallPrimOp :: PrimOp -> [StgArg] -> Code
tailCallPrimOp op
= tailCallPrim (mkRtsPrimOpLabel op)
tailCallPrimCall :: PrimCall -> [StgArg] -> Code
tailCallPrimCall primcall
= tailCallPrim (mkPrimCallLabel primcall)
tailCallPrim :: CLabel -> [StgArg] -> Code
tailCallPrim lbl args
= do {
arg_amodes <- getArgAmodes args
; let (arg_regs, leftovers) = assignPrimOpCallRegs arg_amodes
jump_to_primop = jumpToLbl lbl
; ASSERT(null leftovers)
emitSimultaneously (assignToRegs arg_regs)
; EndOfBlockInfo args_sp _ <- getEndOfBlockInfo
; doFinalJump args_sp False jump_to_primop }
pushReturnAddress :: EndOfBlockInfo -> Code
pushReturnAddress (EndOfBlockInfo args_sp (CaseAlts lbl _ _))
= do { sp_rel <- getSpRelOffset args_sp
; stmtC (CmmStore sp_rel (mkLblExpr lbl)) }
pushReturnAddress _ = nopC
jumpToLbl :: CLabel -> Code
jumpToLbl lbl = stmtC (CmmJump (CmmLit (CmmLabel lbl)) [])
assignToRegs :: [(CmmExpr, GlobalReg)] -> CmmStmts
assignToRegs reg_args
= mkStmts [ CmmAssign (CmmGlobal reg_id) expr
| (expr, reg_id) <- reg_args ]
\end{code}
%************************************************************************
%* *
\subsection[CgStackery-adjust]{Adjusting the stack pointers}
%* *
%************************************************************************
This function adjusts the stack and heap pointers just before a tail
call or return. The stack pointer is adjusted to its final position
(i.e. to point to the last argument for a tail call, or the activation
record for a return). The heap pointer may be moved backwards, in
cases where we overallocated at the beginning of the basic block (see
CgCase.lhs for discussion).
These functions {\em do not} deal with high-water-mark adjustment.
That's done by functions which allocate stack space.
\begin{code}
adjustSpAndHp :: VirtualSpOffset
-> Code
adjustSpAndHp newRealSp
= do {
; new_sp <- getSpRelOffset newRealSp
; checkedAbsC (CmmAssign spReg new_sp)
; setRealSp newRealSp
; hp_usg <- getHpUsage
; let rHp = realHp hp_usg
vHp = virtHp hp_usg
; new_hp <- getHpRelOffset vHp
; checkedAbsC (CmmAssign hpReg new_hp)
; tickyAllocHeap (vHp rHp)
; setRealHp vHp
}
\end{code}