{-# LANGUAGE DeriveFunctor, GeneralizedNewtypeDeriving, TupleSections, QuasiQuotes, ScopedTypeVariables #-} module CodeGen(codegen) where import Control.Monad import Control.Monad.Except import Control.Monad.State.Strict import Data.Char import Data.List import Data.Maybe import Data.Map.Strict ((!)) import qualified Data.Map.Strict as Map import Debug.Trace import AST import Defs import Intermediate import qualified LifetimeAnalysis as LA import qualified LifetimeAnalysisOld as LAO import RegAlloc import ReplaceRefs import Utils import X64 (Register(..), CondCode(..), XRef(..), Ins(..), xref) import qualified X64 as X64 import X64Optimiser data CGState = CGState { nextId :: Int, regsToRestore :: [Register], spillSize :: Size, x64Result :: X64.Asm } newtype CGMonad a = CGMonad { unCGMonad :: StateT CGState (Except String) a } deriving (Functor, Applicative, Monad, MonadState CGState, MonadError String) initState :: CGState initState = CGState {nextId = 1, regsToRestore = [], spillSize = 0, x64Result = X64.Asm []} execCGMonad :: CGMonad a -> Error X64.Asm execCGMonad = fmap x64Result . runExcept . flip execStateT initState . unCGMonad addIns :: X64.Ins -> CGMonad () addIns ins = modify $ \s -> let (X64.Asm funcs) = x64Result s (pre, (lab, inss)) = (init funcs, last funcs) in s {x64Result = X64.Asm $ pre ++ [(lab, inss ++ [ins])]} newLabel :: String -> CGMonad () newLabel lab = modify $ \s -> let (X64.Asm funcs) = x64Result s in s {x64Result = X64.Asm $ funcs ++ [(lab, [])]} -- genId :: CGMonad Int -- genId = state $ \s -> (nextId s, s {nextId = nextId s + 1}) setRegsToRestore :: [Register] -> CGMonad () setRegsToRestore regs = modify $ \s -> s {regsToRestore = regs} setSpillSize :: Size -> CGMonad () setSpillSize sz = modify $ \s -> s {spillSize = sz} codegen :: IRProgram -> Error String codegen (IRProgram vars funcs) = do x64 <- execCGMonad $ mapM_ codegenFunc funcs -- traceShowM x64 X64.verify x64 -- traceM $ X64.stringify x64 varcg <- liftM unlines $ mapM codegenVar vars x64opt <- x64Optimise x64 return $ "extern putc, putint, getc, exit, _builtin_malloc, _builtin_outofbounds\n" ++ "global main\ndefault rel\nsection .text\n" ++ X64.stringify x64opt ++ "\nsection .data\n" ++ varcg codegenVar :: DVar -> Error String codegenVar (DVar TInt n (ELit (LInt i) _)) = return $ n ++ ": dq " ++ show i codegenVar (DVar TChar n (ELit (LChar c) _)) = return $ n ++ ": db " ++ show (ord c) codegenVar (DVar (TArr TChar _) n (ELit (LStr s) _)) = return $ "$" ++ n ++ ":\n" ++ "\tdq " ++ show (length s + 1) ++ "\n" ++ "\tdb " ++ (intercalate ", " $ map show $ map ord s ++ [0]) codegenVar (DVar t@(TStruct _) n e@(ELit (LStruct _) _)) = (("$" ++ n ++ ":\n") ++) <$> genDataFor t e where genDataFor :: Type -> Expression -> Error String genDataFor TInt (ELit (LInt i) _) = return $ "dq " ++ show i genDataFor TChar (ELit (LChar c) _) = return $ "db " ++ show (ord c) genDataFor (TStruct ms') (ELit (LStruct exprtups') _) = liftM (intercalate "\n" . map ('\t' :) . concatMap lines) $ forM (zip ms' exprtups') $ \((typ, name), (name2, expr)) -> if name /= name2 then Left $ "Invalid struct literal member order somewhere " ++ "in a global variable declaration" else genDataFor typ expr genDataFor _ _ = Left "Unsupported expression in struct literal in global variable declaration" codegenVar _ = Left "Unsupported global variable declaration" type AllocMap = Map.Map Ref XRef codegenFunc :: IRFunc -> CGMonad () codegenFunc (IRFunc _ name al bbs sid) = do let temprefsperbb = collectTempRefs bbs alltemprefs = uniq $ sort $ map LA.unAccess $ concat $ concat $ map fst temprefsperbb lifespans = map (\r -> (findLifeSpan r, r)) alltemprefs where findLifeSpan ref = let la = LA.lifetimeAnalysis ref temprefsperbb lao = LAO.lifetimeAnalysis ref temprefsperbb in (if la == lao then id else traceShow (ref,bbs,la,lao)) $ fromJust $ findFirstLast id $ concat $ LA.lifetimeAnalysis ref temprefsperbb aliascandidates = findAliasCandidates bbs :: [(Ref, Ref)] gpRegs = [R8, R9, R10, R11, R12, R13, R14, R15, RDI, RSI] -- gpRegs = [R8] allocation = regalloc lifespans gpRegs aliascandidates :: Map.Map Ref (Allocation Register) spillrefs = map fst $ filter (isAllocMem . snd) $ Map.toList allocation (spilloffsets, spillsz) = initLast $ scanl (+) 0 $ map refSize spillrefs spilloffsetmap = Map.fromList $ zip spillrefs spilloffsets structrefs = filter isStructTemp $ findAllRefsBBList bbs structspace = sum $ map refSize structrefs usedregs = uniq $ sort $ catMaybes $ flip map (Map.toList allocation) $ \(_, a) -> case a of AllocReg reg -> Just reg AllocMem -> Nothing -- traceShowM temprefsperbb -- traceShowM lifespans -- traceM $ "ALLOCATION: " ++ show allocation let nsaves = length usedregs framesize' = 8 {- ret addr -} + 8 {- rbp -} + 8 * nsaves + fromIntegral structspace + fromIntegral spillsz alignoff = roundUp framesize' 16 - framesize' framesize = framesize' + alignoff allocationXref = flip Map.mapWithKey allocation $ \ref alloc -> case alloc of AllocReg reg -> XReg (fromIntegral $ refSize ref) reg AllocMem -> XMem (fromIntegral $ refSize ref) (Just RSP) (0, RAX) Nothing (fromIntegral $ spilloffsetmap ! ref) allocmap' = fst $ foldl arginserter (allocationXref, 0) al where arginserter (m, off) (t, n) = (Map.insert (Argument (sizeof t) n) (XMem (fromIntegral $ sizeof t) (Just RSP) (0, RAX) Nothing (fromIntegral framesize + off)) m, off + fromIntegral (sizeof t)) allocmap = fst $ foldl structinserter (allocmap', spillsz) structrefs where structinserter (m, off) temp@(StructTemp sz _) = (Map.insert temp (XMem (fromIntegral sz) (Just RSP) (0, RAX) Nothing (fromIntegral off)) m, off + sz) structinserter _ _ = undefined -- traceM $ "nsaves = " ++ show nsaves -- traceM $ "structspace = " ++ show structspace -- traceM $ "spillsz = " ++ show spillsz -- traceM $ "framesize' = " ++ show framesize' -- traceM $ "alignoff = " ++ show alignoff -- traceM $ "framesize = " ++ show framesize newLabel name addIns $ PUSH (xref $ XReg 8 RBP) addIns $ MOV (xref $ XReg 8 RBP) (xref $ XReg 8 RSP) forM_ usedregs $ \reg -> addIns $ PUSH (xref $ XReg 8 reg) let stackspill = spillsz + structspace + fromIntegral alignoff when (stackspill /= 0) $ addIns $ SUB (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral stackspill) setRegsToRestore usedregs setSpillSize stackspill let ([startbb], rest) = partition (\(BB i _ _) -> i == sid) bbs codegenBB allocmap startbb mapM_ (codegenBB allocmap) rest findAliasCandidates :: [BB] -> [(Ref, Ref)] findAliasCandidates = concatMap (\(BB _ inss _) -> concatMap goI inss) where goI :: IRIns -> [(Ref, Ref)] goI (IMov d s) = [(d, s)] goI (IAri at d s1 s2) | isCommutative at = [(d, s1), (d, s2)] | otherwise = [(d, s1)] goI _ = [] findFirstLast :: forall a. (a -> Bool) -> [a] -> Maybe (Int, Int) findFirstLast f l = go Nothing 0 l where go :: Maybe (Int, Int) -> Int -> [a] -> Maybe (Int, Int) go mr _ [] = mr go mr i (x:xs) | f x = go (note mr i) (i+1) xs | otherwise = go mr (i+1) xs note :: Maybe (Int, Int) -> Int -> Maybe (Int, Int) note Nothing i = Just (i, i) note (Just (a, _)) i = Just (a, i) isAllocMem :: Allocation a -> Bool isAllocMem AllocMem = True isAllocMem _ = False initLast :: [a] -> ([a], a) initLast [] = undefined initLast [x] = ([], x) initLast (x:xs) = let (acc, l) = initLast xs in (x : acc, l) codegenBB :: AllocMap -> BB -> CGMonad () codegenBB allocmap (BB bid inss term) = do newLabel $ ".bb" ++ show bid mapM_ (codegenIns allocmap) inss codegenTerm allocmap term mkxref :: Ref -> AllocMap -> XRef mkxref (Constant _ v) _ = XImm v mkxref (Global sz n) _ = XMem (fromIntegral sz) Nothing (0, RAX) (Just n) 0 mkxref r m = fromJust $ Map.lookup r m mkmov :: XRef -> XRef -> X64.Ins mkmov a@(XReg _ _) b@(XReg _ _) = MOV (xref a) (xref b) mkmov a@(XReg _ _) b@(XMem _ _ _ _ _) = MOV (xref a) (xref b) mkmov a@(XReg _ _) b@(XImm _) = MOVi (xref a) (xref b) mkmov a@(XMem _ _ _ _ _) b@(XReg _ _) = MOV (xref a) (xref b) mkmov a@(XMem _ _ _ _ _) b@(XImm v) | v < 2 ^ (32 :: Int) = MOV (xref a) (xref b) mkmov a b = CALL $ "Invalid mkmov: " ++ show a ++ "; " ++ show b -- mkmov a b = error $ "Invalid mkmov: " ++ show a ++ "; " ++ show b emitmemcpy :: XRef -> XRef -> CGMonad () emitmemcpy dst@(XMem sz _ _ _ _) src@(XMem sz2 _ _ _ _) | sz /= sz2 = error $ "Inconsistent sizes in emitmemcpy: " ++ show dst ++ "; " ++ show src | sz `elem` [1, 2, 4, 8] = do addIns $ mkmov (XReg sz RAX) src addIns $ mkmov dst (XReg sz RAX) | sz > 8 = do addIns $ mkmov (XReg 8 RAX) (X64.xrefSetSize 8 src) addIns $ mkmov (X64.xrefSetSize 8 dst) (XReg 8 RAX) emitmemcpy (X64.offsetXMem 8 $ X64.xrefSetSize (sz - 8) dst) (X64.offsetXMem 8 $ X64.xrefSetSize (sz - 8) src) | otherwise = error $ "Invalid size in emitmemcpy: " ++ show dst ++ "; " ++ show src emitmemcpy _ _ = undefined mkcmp :: XRef -> XRef -> X64.Ins mkcmp a b@(XImm _) = CMPi (xref a) (xref b) mkcmp a b = CMP (xref a) (xref b) codegenIns :: AllocMap -> IRIns -> CGMonad () codegenIns m (IMov d s) | dm == sm = return () | X64.isXMem dm && X64.isXMem sm = do emitmemcpy dm sm | otherwise = addIns $ mkmov dm sm where dm = mkxref d m sm = mkxref s m codegenIns m (ILea d n) | X64.isXMem dm = do addIns $ LEA (xref $ XReg (fromIntegral $ refSize d) RAX) (xref sm) addIns $ mkmov dm (XReg (fromIntegral $ refSize d) RAX) | otherwise = addIns $ LEA (xref dm) (xref sm) where dm = mkxref d m sm = mkxref (Global 8 n) m codegenIns m (IStore d s) = do sourcexref <- if X64.isXMem sm then do addIns $ mkmov (XReg sz RDX) sm return $ XReg sz RDX else return sm destxref <- case dm of XReg _ r -> return $ XMem sz (Just r) (0, RAX) Nothing 0 x@(XMem xsz _ _ _ _) -> do addIns $ mkmov (XReg xsz RAX) x return $ XMem sz (Just RAX) (0, RAX) Nothing 0 XImm _ -> throwError $ "IStore to [immediate] not expected" addIns $ mkmov destxref sourcexref where dm = mkxref d m sm = mkxref s m sz = fromIntegral $ refSize s codegenIns m (ILoad d s) = do sourcexref <- case sm of XReg _ r -> return $ XMem sz (Just r) (0, RAX) Nothing 0 x@(XMem xsz _ _ _ _) -> do addIns $ mkmov (XReg xsz RAX) x return $ XMem sz (Just RAX) (0, RAX) Nothing 0 XImm _ -> throwError $ "ILoad from [immediate] not expected" if X64.isXMem dm then do addIns $ mkmov (XReg sz RAX) sourcexref addIns $ mkmov dm (XReg sz RAX) else do addIns $ mkmov dm sourcexref where dm = mkxref d m sm = mkxref s m sz = fromIntegral $ refSize d codegenIns m (ISet d off s) | X64.isXMem sm = do addIns $ mkmov (XReg sz RAX) sm addIns $ mkmov dm (XReg sz RAX) | otherwise = do addIns $ mkmov dm sm where dm = X64.xrefSetSize sz $ X64.offsetXMem (fromIntegral off) $ mkxref d m sm = mkxref s m sz = fromIntegral $ refSize s codegenIns m (IGet d s off) | X64.isXMem dm = do addIns $ mkmov (XReg sz RAX) sm addIns $ mkmov dm (XReg sz RAX) | otherwise = do addIns $ mkmov dm sm where dm = mkxref d m sm = X64.xrefSetSize sz $ X64.offsetXMem (fromIntegral off) $ mkxref s m sz = fromIntegral $ refSize d codegenIns m (IAri AMul d s1 s2) | X64.isXImm s1m && X64.isXImm s2m = undefined | X64.isXImm s1m = codegenIns m (IAri AMul d s2 s1) | dm == s2m = if dm == s1m then if X64.isXMem dm then do addIns $ mkmov (XReg sz RAX) dm addIns $ IMUL (xref $ XReg sz RAX) (xref $ XReg sz RAX) addIns $ mkmov dm (XReg sz RAX) else addIns $ IMUL (xref dm) (xref dm) else codegenIns m (IAri AMul d s2 s1) | otherwise = do -- regmem dm, regmem s1m, regmemimm s2m if X64.isXImm s2m then if X64.isXMem dm then do -- mem dm, regmem s1m, imm s2m addIns $ IMUL3 (xref $ XReg sz RAX) (xref s1m) (xref s2m) addIns $ mkmov dm (XReg sz RAX) else do -- reg dm, regmem s1m, imm s2m addIns $ IMUL3 (xref dm) (xref s1m) (xref s2m) else if X64.isXMem dm then do -- mem dm, regmem s1m, regmem s2m addIns $ mkmov (XReg sz RAX) s1m addIns $ IMUL (xref $ XReg sz RAX) (xref s2m) addIns $ mkmov dm (XReg sz RAX) else if dm == s1m then do -- reg dm = reg s1m, regmem s2m addIns $ IMUL (xref dm) (xref s2m) else do -- reg dm, regmem s1m, regmem s2m addIns $ mkmov dm s1m addIns $ IMUL (xref dm) (xref s2m) where dm = mkxref d m s1m = mkxref s1 m s2m = mkxref s2 m sz = fromIntegral $ refSize d codegenIns m (IAri ADiv d s1 s2) = do let sz = fromIntegral $ refSize d addIns $ XOR (xref $ XReg 4 RDX) (xref $ XReg 4 RDX) addIns $ mkmov (XReg sz RAX) (mkxref s1 m) arg <- if X64.isXImm s2m then do addIns $ mkmov (XReg sz RBX) s2m return (XReg sz RBX) else return s2m addIns $ IDIVDA (xref arg) addIns $ mkmov (mkxref d m) (XReg sz RAX) where s2m = mkxref s2 m codegenIns m (IAri AMod d s1 s2) = do let sz = fromIntegral $ refSize d addIns $ XOR (xref $ XReg 4 RDX) (xref $ XReg 4 RDX) addIns $ mkmov (XReg sz RAX) (mkxref s1 m) arg <- if X64.isXImm s2m then do addIns $ mkmov (XReg sz RBX) s2m return (XReg sz RBX) else return s2m addIns $ IDIVDA (xref arg) addIns $ mkmov (mkxref d m) (XReg sz RDX) where s2m = mkxref s2 m codegenIns m (IAri at d s1 s2) = case arithTypeToCondCode at of Just cc -> do arg2 <- if X64.isXMem s1m && X64.isXMem s2m then do addIns $ mkmov (XReg (fromIntegral $ refSize s2) RAX) s2m return $ XReg (fromIntegral $ refSize s2) RAX else return s2m addIns $ mkcmp s1m arg2 addIns $ SETCC cc (xref $ X64.xrefSetSize 1 dm) addIns $ AND (xref $ X64.xrefSetSize 4 dm) (xref $ XImm 0xff) Nothing -> do (s1', s1m', s2', s2m') <- if dm == s2m then if dm == s1m then return (s1, s1m, s2, s2m) else if isCommutative at then return (s2, s2m, s1, s1m) else throwError "Noncommutative op with d==s2/=s1" else return (s1, s1m, s2, s2m) arg2 <- if X64.isXMem s1m' && X64.isXMem s2m' then do addIns $ mkmov (XReg (fromIntegral $ refSize s2') RAX) s2m' return $ XReg (fromIntegral $ refSize s2') RAX else return s2m' when (dm /= s1m') $ if X64.isXMem dm && X64.isXMem s1m' then do addIns $ mkmov (XReg (fromIntegral $ refSize s1') RDX) s1m' addIns $ mkmov dm (XReg (fromIntegral $ refSize s1') RDX) else do addIns $ mkmov dm s1m' addIns $ fromJust (arithTypeToIns at) dm arg2 where dm = mkxref d m s1m = mkxref s1 m s2m = mkxref s2 m codegenIns m (ICall n rs) = do let sizes = map (flip roundUp 8 . refSize) rs offsets = init $ scanl (+) 0 $ reverse sizes totalsize = sum sizes alignment = roundUp totalsize 16 - totalsize forM_ (zip rs offsets) $ \(r, off) -> let sz = fromIntegral $ refSize r src = mkxref r m dst = XMem sz (Just RSP) (0, RAX) Nothing (fromIntegral $ off - alignment - totalsize) in if X64.isXMem src then do -- traceM $ "call stuff with dst = " ++ show dst ++ ", src = " ++ show src emitmemcpy dst src else addIns $ mkmov dst src when (alignment /= 0) $ addIns $ SUB (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral alignment) when (length rs > 0) $ addIns $ SUB (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral totalsize) addIns $ CALL n when (length rs > 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral totalsize) when (alignment /= 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral alignment) codegenIns m (ICallr d n rs) = do codegenIns m (ICall n rs) addIns $ mkmov (mkxref d m) (XReg (fromIntegral $ refSize d) RAX) codegenIns m fullins@(IResize d s) = do let dsz = fromIntegral $ refSize d ssz = fromIntegral $ refSize s dm = mkxref d m sm = mkxref s m when (X64.isXImm sm) $ throwError $ "Resized value is an immediate in " ++ show fullins ++ "; (dm = " ++ show dm ++ "; sm = " ++ show sm ++ ")" case compare dsz ssz of EQ -> codegenIns m (IMov d s) GT -> if X64.isXMem dm then do addIns $ MOVSX (xref $ XReg dsz RAX) (xref sm) addIns $ mkmov dm (XReg dsz RAX) else do addIns $ MOVSX (xref dm) (xref sm) LT -> if X64.isXMem dm && X64.isXMem sm then do addIns $ mkmov (XReg dsz RAX) (X64.xrefSetSize dsz sm) addIns $ mkmov dm (XReg dsz RAX) else do addIns $ mkmov dm (X64.xrefSetSize dsz sm) codegenIns _ IDebugger = do addIns $ INT3 codegenIns _ INop = return () arithTypeToCondCode :: ArithType -> Maybe X64.CondCode arithTypeToCondCode AEq = Just CCE arithTypeToCondCode ANeq = Just CCNE arithTypeToCondCode AGt = Just CCG arithTypeToCondCode ALt = Just CCL arithTypeToCondCode AGeq = Just CCGE arithTypeToCondCode ALeq = Just CCLE arithTypeToCondCode _ = Nothing cmpTypeToCondCode :: CmpType -> X64.CondCode cmpTypeToCondCode CEq = CCE cmpTypeToCondCode CNeq = CCNE cmpTypeToCondCode CGt = CCG cmpTypeToCondCode CLt = CCL cmpTypeToCondCode CGeq = CCGE cmpTypeToCondCode CLeq = CCLE cmpTypeToCondCode CUGt = CCA cmpTypeToCondCode CULt = CCB cmpTypeToCondCode CUGeq = CCAE cmpTypeToCondCode CULeq = CCBE arithTypeToIns :: ArithType -> Maybe (XRef -> XRef -> X64.Ins) arithTypeToIns AAdd = Just $ \a b -> ADD (xref a) (xref b) arithTypeToIns ASub = Just $ \a b -> SUB (xref a) (xref b) arithTypeToIns AAnd = Just $ \a b -> AND (xref a) (xref b) arithTypeToIns AOr = Just $ \a b -> OR (xref a) (xref b) arithTypeToIns AXor = Just $ \a b -> XOR (xref a) (xref b) arithTypeToIns _ = Nothing codegenTerm :: AllocMap -> IRTerm -> CGMonad () codegenTerm m (IJcc ct a b t e) = do if X64.isXMem am && X64.isXMem bm then do addIns $ mkmov (XReg (fromIntegral $ refSize b) RAX) bm addIns $ mkcmp am (XReg (fromIntegral $ refSize b) RAX) else do addIns $ mkcmp am bm addIns $ JCC (cmpTypeToCondCode ct) (".bb" ++ show t) addIns $ JMP (".bb" ++ show e) where am = mkxref a m bm = mkxref b m codegenTerm _ (IJmp i) = addIns $ JMP (".bb" ++ show i) codegenTerm _ IRet = do spillsz <- gets spillSize when (spillsz /= 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz) usedregs <- gets regsToRestore forM_ (reverse usedregs) $ \reg -> addIns $ POP (xref $ XReg 8 reg) addIns $ mkmov (XReg 8 RSP) (XReg 8 RBP) addIns $ POP (xref $ XReg 8 RBP) addIns RET codegenTerm m (IRetr r) = do addIns $ mkmov (XReg (fromIntegral $ refSize r) RAX) (mkxref r m) spillsz <- gets spillSize when (spillsz /= 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz) usedregs <- gets regsToRestore forM_ (reverse usedregs) $ \reg -> addIns $ POP (xref $ XReg 8 reg) addIns $ mkmov (XReg 8 RSP) (XReg 8 RBP) addIns $ POP (xref $ XReg 8 RBP) addIns RET codegenTerm _ IUnreachable = return () codegenTerm _ ITermNone = undefined collectTempRefs :: [BB] -> [([[LA.Access Ref]], [Int])] collectTempRefs bbs = flip map bbs $ \(BB _ inss term) -> let refs = map (filter (isTemp . LA.unAccess)) $ concatMap listRefsIns inss ++ listRefsTerm term nexts = map (\i -> fromJust $ findIndex (\(BB j _ _) -> j == i) bbs) $ listNextIds term in (refs, nexts) where listRefsIns :: IRIns -> [[LA.Access Ref]] listRefsIns (IMov a b) = [[LA.Read b], [LA.Write a]] listRefsIns (ILea a _) = [[LA.Write a]] listRefsIns (IStore a b) = [[LA.Read a, LA.Read b]] listRefsIns (ILoad a b) = [[LA.Read b], [LA.Write a]] listRefsIns (ISet a _ b) = [[LA.Read b, LA.Write a]] listRefsIns (IGet a b _) = [[LA.Read b, LA.Write a]] listRefsIns (IAri at a b c) -- if not commutative, we don't want to have to xchg the operands | isCommutative at = [[LA.Read b, LA.Read c], [LA.Write a]] | otherwise = [[LA.Read b], [LA.Read c, LA.Write a]] listRefsIns (ICall _ l) = [map LA.Read l] listRefsIns (ICallr a _ l) = [LA.Write a : map LA.Read l] listRefsIns (IResize a b) = [[LA.Read b], [LA.Write a]] listRefsIns IDebugger = [[]] listRefsIns INop = [[]] listRefsTerm :: IRTerm -> [[LA.Access Ref]] listRefsTerm (IJcc _ a b _ _) = [[LA.Read a, LA.Read b]] listRefsTerm (IJmp _) = [[]] listRefsTerm IRet = [[]] listRefsTerm (IRetr a) = [[LA.Read a]] listRefsTerm IUnreachable = [] listRefsTerm ITermNone = undefined listNextIds :: IRTerm -> [Id] listNextIds (IJcc _ _ _ a b) = [a, b] listNextIds (IJmp a) = [a] listNextIds IRet = [] listNextIds (IRetr _) = [] listNextIds IUnreachable = [] listNextIds ITermNone = undefined isTemp :: Ref -> Bool isTemp (Temp _ _) = True isTemp _ = False