aboutsummaryrefslogtreecommitdiff
path: root/CodeGen.hs
diff options
context:
space:
mode:
Diffstat (limited to 'CodeGen.hs')
-rw-r--r--CodeGen.hs388
1 files changed, 388 insertions, 0 deletions
diff --git a/CodeGen.hs b/CodeGen.hs
new file mode 100644
index 0000000..d4c9439
--- /dev/null
+++ b/CodeGen.hs
@@ -0,0 +1,388 @@
+{-# LANGUAGE DeriveFunctor, GeneralizedNewtypeDeriving, TupleSections, QuasiQuotes, ScopedTypeVariables #-}
+
+module CodeGen(codegen) where
+
+import Control.Monad
+import Control.Monad.Except
+import Control.Monad.State.Strict
+import Data.List
+import Data.Maybe
+import Data.Map.Strict ((!))
+import qualified Data.Map.Strict as Map
+import Text.Heredoc
+import Debug.Trace
+
+import AST
+import Defs
+import Intermediate
+import qualified LifetimeAnalysis as LA
+import RegAlloc
+import Utils
+import X64 (Register(..), CondCode(..), XRef(..), Ins(..), xref)
+import qualified X64 as X64
+
+
+data CGState = CGState
+ { nextId :: Int,
+ regsToRestore :: [Register],
+ spillSize :: Size,
+ x64Result :: X64.Asm }
+
+newtype CGMonad a = CGMonad { unCGMonad :: StateT CGState (Except String) a }
+ deriving (Functor, Applicative, Monad, MonadState CGState, MonadError String)
+
+initState :: CGState
+initState = CGState {nextId = 1, regsToRestore = [], spillSize = 0, x64Result = X64.Asm []}
+
+execCGMonad :: CGMonad a -> Error X64.Asm
+execCGMonad = fmap x64Result . runExcept . flip execStateT initState . unCGMonad
+
+addIns :: X64.Ins -> CGMonad ()
+addIns ins = modify $ \s ->
+ let (X64.Asm funcs) = x64Result s
+ (pre, (lab, inss)) = (init funcs, last funcs)
+ in s {x64Result = X64.Asm $ pre ++ [(lab, inss ++ [ins])]}
+
+newLabel :: String -> CGMonad ()
+newLabel lab = modify $ \s ->
+ let (X64.Asm funcs) = x64Result s
+ in s {x64Result = X64.Asm $ funcs ++ [(lab, [])]}
+
+-- genId :: CGMonad Int
+-- genId = state $ \s -> (nextId s, s {nextId = nextId s + 1})
+
+setRegsToRestore :: [Register] -> CGMonad ()
+setRegsToRestore regs = modify $ \s -> s {regsToRestore = regs}
+
+setSpillSize :: Size -> CGMonad ()
+setSpillSize sz = modify $ \s -> s {spillSize = sz}
+
+
+codegen :: IRProgram -> Error String
+codegen (IRProgram vars funcs) = do
+ x64 <- execCGMonad $ mapM_ codegenFunc funcs
+ -- traceShowM x64
+ X64.verify x64
+ varcg <- liftM unlines $ mapM codegenVar vars
+ return $ [there|prologue.asm|] ++ "\n" ++ X64.stringify x64 ++
+ "\nsection .data\n" ++ (if length vars > 0 then varcg else "db 0 ; keep dyld happy\n")
+
+
+codegenVar :: DVar -> Error String
+codegenVar (DVar TInt n (ELit (LInt i) (Just TInt))) = Right $ n ++ ": dq " ++ show i
+codegenVar _ = Left "Unsupported global variable declaration"
+
+
+type AllocMap = Map.Map Ref XRef
+
+codegenFunc :: IRFunc -> CGMonad ()
+codegenFunc (IRFunc _ name al bbs sid) = do
+ let temprefsperbb = collectTempRefs bbs
+ alltemprefs = uniq $ sort $ map LA.unAccess $ concat $ concat $ map fst temprefsperbb
+ lifespans = map (\r -> (findLifeSpan r, r)) alltemprefs
+ where findLifeSpan ref =
+ fromJust $ findFirstLast id $ concat $ LA.lifetimeAnalysis ref temprefsperbb
+
+ aliascandidates = findAliasCandidates bbs :: [(Ref, Ref)]
+
+ gpRegs = [R8, R9, R10, R11, R12, R13, R14, R15]
+ allocation = regalloc lifespans gpRegs aliascandidates :: Map.Map Ref (Allocation Register)
+
+ spillrefs = map fst $ filter (isAllocMem . snd) $ Map.toList allocation
+ (spilloffsets, spillsz) = initLast $ scanl (+) 0 $ map refSize spillrefs
+ spilloffsetmap = Map.fromList $ zip spillrefs spilloffsets
+
+ usedregs = uniq $ sort $ catMaybes $ flip map (Map.toList allocation) $ \(_, a) -> case a of
+ AllocReg reg -> Just reg
+ AllocMem -> Nothing
+
+ traceShowM temprefsperbb
+ traceShowM lifespans
+ -- traceM $ "ALLOCATION: " ++ show allocation
+
+ let nsaves = length usedregs
+ allocationXref = flip Map.mapWithKey allocation $ \ref alloc -> case alloc of
+ AllocReg reg -> XReg (fromIntegral $ refSize ref) reg
+ AllocMem -> XMem (fromIntegral $ refSize ref)
+ (Just RSP) (0, RAX) Nothing
+ (fromIntegral $ spilloffsetmap ! ref)
+ allocmap = foldl inserter allocationXref (zip al [0::Int ..])
+ where
+ inserter m ((t, n), i) =
+ let offset = fromIntegral spillsz + 8 * nsaves + 8 {- rbp -} + 8 {- ret addr -} + 8 * i
+ in Map.insert (Argument (sizeof t) n)
+ (XMem (fromIntegral $ sizeof t)
+ (Just RSP) (0, RAX) Nothing
+ (fromIntegral offset))
+ m
+
+ newLabel name
+ addIns $ PUSH (xref $ XReg 8 RBP)
+ addIns $ MOV (xref $ XReg 8 RBP) (xref $ XReg 8 RSP)
+ forM_ usedregs $ \reg -> addIns $ PUSH (xref $ XReg 8 reg)
+ when (spillsz /= 0) $ addIns $ SUB (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz)
+ setRegsToRestore usedregs
+ setSpillSize spillsz
+
+ let ([startbb], rest) = partition (\(BB i _ _) -> i == sid) bbs
+ codegenBB allocmap startbb
+ mapM_ (codegenBB allocmap) rest
+
+findAliasCandidates :: [BB] -> [(Ref, Ref)]
+findAliasCandidates = concatMap (\(BB _ inss _) -> concatMap goI inss)
+ where
+ goI :: IRIns -> [(Ref, Ref)]
+ goI (IMov d s) = [(d, s)]
+ goI _ = []
+
+findFirstLast :: forall a. (a -> Bool) -> [a] -> Maybe (Int, Int)
+findFirstLast f l = go Nothing 0 l
+ where
+ go :: Maybe (Int, Int) -> Int -> [a] -> Maybe (Int, Int)
+ go mr _ [] = mr
+ go mr i (x:xs)
+ | f x = go (note mr i) (i+1) xs
+ | otherwise = go mr (i+1) xs
+
+ note :: Maybe (Int, Int) -> Int -> Maybe (Int, Int)
+ note Nothing i = Just (i, i)
+ note (Just (a, _)) i = Just (a, i)
+
+isAllocMem :: Allocation a -> Bool
+isAllocMem AllocMem = True
+isAllocMem _ = False
+
+initLast :: [a] -> ([a], a)
+initLast [] = undefined
+initLast [x] = ([], x)
+initLast (x:xs) = let (acc, l) = initLast xs in (x : acc, l)
+
+codegenBB :: AllocMap -> BB -> CGMonad ()
+codegenBB allocmap (BB bid inss term) = do
+ newLabel $ ".bb" ++ show bid
+ mapM_ (codegenIns allocmap) inss
+ codegenTerm allocmap term
+
+mkxref :: Ref -> AllocMap -> XRef
+mkxref (Constant _ v) _ = XImm v
+mkxref (Global sz n) _ = XMem (fromIntegral sz) Nothing (0, RAX) (Just n) 0
+mkxref r m = fromJust $ Map.lookup r m
+
+mkmov :: XRef -> XRef -> X64.Ins
+mkmov a@(XReg _ _) b@(XReg _ _) = MOV (xref a) (xref b)
+mkmov a@(XReg _ _) b@(XMem _ _ _ _ _) = MOV (xref a) (xref b)
+mkmov a@(XReg _ _) b@(XImm _) = MOVi64 (xref a) (xref b)
+mkmov a@(XMem _ _ _ _ _) b@(XReg _ _) = MOV (xref a) (xref b)
+mkmov a@(XMem _ _ _ _ _) b@(XImm v) | v < 2 ^ (32 :: Int) = MOVi (xref a) (xref b)
+mkmov a b = error $ "Invalid mkmov: " ++ show a ++ "; " ++ show b
+
+mkcmp :: XRef -> XRef -> X64.Ins
+mkcmp a b@(XImm _) = CMPi (xref a) (xref b)
+mkcmp a b = CMP (xref a) (xref b)
+
+codegenIns :: AllocMap -> IRIns -> CGMonad ()
+codegenIns m (IMov d s)
+ | dm == sm = return ()
+ | X64.isXMem dm && X64.isXMem sm = do
+ addIns $ mkmov (XReg (fromIntegral $ refSize s) RAX) sm
+ addIns $ mkmov dm (XReg (fromIntegral $ refSize d) RAX)
+ | otherwise = addIns $ mkmov dm sm
+ where dm = mkxref d m
+ sm = mkxref s m
+codegenIns m (IStore d s) = do
+ sourcexref <- if X64.isXMem sm
+ then do
+ addIns $ mkmov (XReg sz RBX) sm
+ return $ XReg sz RBX
+ else return sm
+ destxref <- case dm of
+ XReg _ r -> return $ XMem sz (Just r) (0, RAX) Nothing 0
+ x@(XMem xsz _ _ _ _) -> do
+ addIns $ mkmov (XReg xsz RAX) x
+ return $ XMem sz (Just RAX) (0, RAX) Nothing 0
+ XImm _ -> throwError $ "IStore to [immediate] not expected"
+ addIns $ mkmov destxref sourcexref
+ where dm = mkxref d m
+ sm = mkxref s m
+ sz = fromIntegral $ refSize s
+codegenIns m (ILoad d s) = do
+ sourcexref <- case sm of
+ XReg _ r -> return $ XMem sz (Just r) (0, RAX) Nothing 0
+ x@(XMem xsz _ _ _ _) -> do
+ addIns $ mkmov (XReg xsz RAX) x
+ return $ XMem sz (Just RAX) (0, RAX) Nothing 0
+ XImm _ -> throwError $ "ILoad from [immediate] not expected"
+ if X64.isXMem dm
+ then do
+ addIns $ mkmov (XReg sz RAX) sourcexref
+ addIns $ mkmov dm (XReg sz RAX)
+ else do
+ addIns $ mkmov dm sourcexref
+ where dm = mkxref d m
+ sm = mkxref s m
+ sz = fromIntegral $ refSize d
+codegenIns m (IAri AMul d s) = do
+ let sz = fromIntegral $ refSize d
+ addIns $ mkmov (XReg sz RAX) (mkxref d m)
+ addIns $ mkmov (XReg sz RBX) (mkxref s m)
+ addIns $ IMULDA (xref $ XReg sz RBX)
+ addIns $ mkmov (mkxref d m) (XReg sz RAX)
+codegenIns m (IAri ADiv d s) = do
+ let sz = fromIntegral $ refSize d
+ addIns $ XOR (xref $ XReg 4 RDX) (xref $ XReg 4 RDX)
+ addIns $ mkmov (XReg sz RAX) (mkxref d m)
+ addIns $ mkmov (XReg sz RBX) (mkxref s m)
+ addIns $ IDIVDA (xref $ XReg sz RBX)
+ addIns $ mkmov (mkxref d m) (XReg sz RAX)
+codegenIns m (IAri AMod d s) = do
+ let sz = fromIntegral $ refSize d
+ addIns $ XOR (xref $ XReg 4 RDX) (xref $ XReg 4 RDX)
+ addIns $ mkmov (XReg sz RAX) (mkxref d m)
+ addIns $ mkmov (XReg sz RBX) (mkxref s m)
+ addIns $ IDIVDA (xref $ XReg sz RBX)
+ addIns $ mkmov (mkxref d m) (XReg sz RDX)
+codegenIns m (IAri at d s) = case arithTypeToCondCode at of
+ Just cc -> do
+ arg2 <- if X64.isXMem dm && X64.isXMem sm
+ then do
+ addIns $ mkmov (XReg (fromIntegral $ refSize s) RAX) sm
+ return $ XReg (fromIntegral $ refSize s) RAX
+ else return sm
+ addIns $ mkcmp dm arg2
+ addIns $ MOVi (xref dm) (xref $ XImm 0)
+ addIns $ SETCC cc (xref $ X64.xrefSetSize 1 dm)
+ Nothing -> do
+ arg2 <- if X64.isXMem dm && X64.isXMem sm
+ then do
+ addIns $ mkmov (XReg (fromIntegral $ refSize s) RAX) sm
+ return $ XReg (fromIntegral $ refSize s) RAX
+ else return sm
+ addIns $ fromJust (arithTypeToIns at) dm arg2
+ where dm = mkxref d m
+ sm = mkxref s m
+codegenIns m (ICall n rs) = do
+ forM_ (zip (reverse rs) [1::Int ..]) $ \(r, i) ->
+ let sz = fromIntegral $ refSize r
+ src = (mkxref r m)
+ dst = (XMem sz (Just RSP) (0, RAX) Nothing (fromIntegral $ (-8) * i))
+ in if X64.isXMem (mkxref r m)
+ then do
+ addIns $ mkmov (XReg sz RAX) src
+ addIns $ mkmov dst (XReg sz RAX)
+ else do
+ addIns $ mkmov dst src
+ when (length rs > 0) $ addIns $ SUB (xref $ XReg 8 RSP) (xref $ XImm (fromIntegral $ 8 * length rs))
+ addIns $ CALL n
+ when (length rs > 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm (fromIntegral $ 8 * length rs))
+codegenIns m (ICallr d n rs) = do
+ codegenIns m (ICall n rs)
+ addIns $ mkmov (mkxref d m) (XReg (fromIntegral $ refSize d) RAX)
+codegenIns m fullins@(IResize d s) = do
+ let dsz = fromIntegral $ refSize d
+ ssz = fromIntegral $ refSize s
+ dm = mkxref d m
+ sm = mkxref s m
+ when (X64.isXImm sm) $
+ throwError $ "Resized value is an immediate in " ++ show fullins ++
+ "; (dm = " ++ show dm ++ "; sm = " ++ show sm ++ ")"
+ case compare dsz ssz of
+ EQ -> codegenIns m (IMov d s)
+ GT -> if X64.isXMem dm
+ then do
+ addIns $ MOVSX (xref $ XReg dsz RAX) (xref sm)
+ addIns $ mkmov dm (XReg dsz RAX)
+ else do
+ addIns $ MOVSX (xref dm) (xref sm)
+ LT -> if X64.isXMem dm && X64.isXMem sm
+ then do
+ addIns $ mkmov (XReg dsz RAX) (X64.xrefSetSize dsz sm)
+ addIns $ mkmov dm (XReg dsz RAX)
+ else do
+ addIns $ mkmov dm (X64.xrefSetSize dsz sm)
+codegenIns _ INop = return ()
+
+arithTypeToCondCode :: ArithType -> Maybe X64.CondCode
+arithTypeToCondCode AEq = Just CCE
+arithTypeToCondCode ANeq = Just CCNE
+arithTypeToCondCode AGt = Just CCG
+arithTypeToCondCode ALt = Just CCL
+arithTypeToCondCode AGeq = Just CCGE
+arithTypeToCondCode ALeq = Just CCLE
+arithTypeToCondCode _ = Nothing
+
+cmpTypeToCondCode :: CmpType -> X64.CondCode
+cmpTypeToCondCode CEq = CCE
+cmpTypeToCondCode CNeq = CCNE
+cmpTypeToCondCode CGt = CCG
+cmpTypeToCondCode CLt = CCL
+cmpTypeToCondCode CGeq = CCGE
+cmpTypeToCondCode CLeq = CCLE
+
+arithTypeToIns :: ArithType -> Maybe (XRef -> XRef -> X64.Ins)
+arithTypeToIns AAdd = Just $ \a b -> ADD (xref a) (xref b)
+arithTypeToIns ASub = Just $ \a b -> SUB (xref a) (xref b)
+arithTypeToIns AAnd = Just $ \a b -> AND (xref a) (xref b)
+arithTypeToIns AOr = Just $ \a b -> OR (xref a) (xref b)
+arithTypeToIns AXor = Just $ \a b -> XOR (xref a) (xref b)
+arithTypeToIns _ = Nothing
+
+codegenTerm :: AllocMap -> IRTerm -> CGMonad ()
+codegenTerm m (IJcc ct a b t e) = do
+ addIns $ mkcmp (mkxref a m) (mkxref b m)
+ addIns $ JCC (cmpTypeToCondCode ct) (".bb" ++ show t)
+ addIns $ JMP (".bb" ++ show e)
+codegenTerm _ (IJmp i) = addIns $ JMP (".bb" ++ show i)
+codegenTerm _ IRet = do
+ spillsz <- gets spillSize
+ when (spillsz /= 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz)
+ usedregs <- gets regsToRestore
+ forM_ (reverse usedregs) $ \reg -> addIns $ POP (xref $ XReg 8 reg)
+ addIns $ mkmov (XReg 8 RSP) (XReg 8 RBP)
+ addIns $ POP (xref $ XReg 8 RBP)
+ addIns RET
+codegenTerm m (IRetr r) = do
+ addIns $ mkmov (XReg (fromIntegral $ refSize r) RAX) (mkxref r m)
+ spillsz <- gets spillSize
+ when (spillsz /= 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz)
+ usedregs <- gets regsToRestore
+ forM_ (reverse usedregs) $ \reg -> addIns $ POP (xref $ XReg 8 reg)
+ addIns $ mkmov (XReg 8 RSP) (XReg 8 RBP)
+ addIns $ POP (xref $ XReg 8 RBP)
+ addIns RET
+codegenTerm _ ITermNone = undefined
+
+
+collectTempRefs :: [BB] -> [([[LA.Access Ref]], [Int])]
+collectTempRefs bbs =
+ flip map bbs $ \(BB _ inss term) ->
+ let refs = map (filter (isTemp . LA.unAccess)) $ concatMap listRefsIns inss ++ listRefsTerm term
+ nexts = map (\i -> fromJust $ findIndex (\(BB j _ _) -> j == i) bbs) $ listNextIds term
+ in (refs, nexts)
+ where
+ listRefsIns :: IRIns -> [[LA.Access Ref]]
+ listRefsIns (IMov a b) = [[LA.Read b], [LA.Write a]]
+ listRefsIns (IStore a b) = [[LA.Read a, LA.Read b]]
+ listRefsIns (ILoad a b) = [[LA.Read b], [LA.Write a]]
+ listRefsIns (IAri _ a b) = [[LA.Write a, LA.Read b]]
+ listRefsIns (ICall _ l) = [map LA.Read l]
+ listRefsIns (ICallr a _ l) = [LA.Write a : map LA.Read l]
+ listRefsIns (IResize a b) = [[LA.Read b], [LA.Write a]]
+ listRefsIns INop = [[]]
+
+ listRefsTerm :: IRTerm -> [[LA.Access Ref]]
+ listRefsTerm (IJcc _ a b _ _) = [[LA.Read a, LA.Read b]]
+ listRefsTerm (IJmp _) = [[]]
+ listRefsTerm IRet = [[]]
+ listRefsTerm (IRetr a) = [[LA.Read a]]
+ listRefsTerm ITermNone = undefined
+
+ listNextIds :: IRTerm -> [Id]
+ listNextIds (IJcc _ _ _ a b) = [a, b]
+ listNextIds (IJmp a) = [a]
+ listNextIds IRet = []
+ listNextIds (IRetr _) = []
+ listNextIds ITermNone = undefined
+
+ isTemp :: Ref -> Bool
+ isTemp (Temp _ _) = True
+ isTemp _ = False