1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
|
{-# LANGUAGE DeriveFunctor, GeneralizedNewtypeDeriving, TupleSections, QuasiQuotes, ScopedTypeVariables #-}
module CodeGen(codegen) where
import Control.Monad
import Control.Monad.Except
import Control.Monad.State.Strict
import Data.List
import Data.Maybe
import Data.Map.Strict ((!))
import qualified Data.Map.Strict as Map
import Debug.Trace
import AST
import Defs
import Intermediate
import qualified LifetimeAnalysis as LA
import RegAlloc
import Utils
import X64 (Register(..), CondCode(..), XRef(..), Ins(..), xref)
import qualified X64 as X64
import X64Optimiser
data CGState = CGState
{ nextId :: Int,
regsToRestore :: [Register],
spillSize :: Size,
x64Result :: X64.Asm }
newtype CGMonad a = CGMonad { unCGMonad :: StateT CGState (Except String) a }
deriving (Functor, Applicative, Monad, MonadState CGState, MonadError String)
initState :: CGState
initState = CGState {nextId = 1, regsToRestore = [], spillSize = 0, x64Result = X64.Asm []}
execCGMonad :: CGMonad a -> Error X64.Asm
execCGMonad = fmap x64Result . runExcept . flip execStateT initState . unCGMonad
addIns :: X64.Ins -> CGMonad ()
addIns ins = modify $ \s ->
let (X64.Asm funcs) = x64Result s
(pre, (lab, inss)) = (init funcs, last funcs)
in s {x64Result = X64.Asm $ pre ++ [(lab, inss ++ [ins])]}
newLabel :: String -> CGMonad ()
newLabel lab = modify $ \s ->
let (X64.Asm funcs) = x64Result s
in s {x64Result = X64.Asm $ funcs ++ [(lab, [])]}
-- genId :: CGMonad Int
-- genId = state $ \s -> (nextId s, s {nextId = nextId s + 1})
setRegsToRestore :: [Register] -> CGMonad ()
setRegsToRestore regs = modify $ \s -> s {regsToRestore = regs}
setSpillSize :: Size -> CGMonad ()
setSpillSize sz = modify $ \s -> s {spillSize = sz}
codegen :: IRProgram -> Error String
codegen (IRProgram vars funcs) = do
x64 <- execCGMonad $ mapM_ codegenFunc funcs
-- traceShowM x64
X64.verify x64
varcg <- liftM unlines $ mapM codegenVar vars
x64opt <- x64Optimise x64
return $ "extern putc, putint, getc, _builtin_malloc\n" ++
"global main\ndefault rel\nsection .text\n" ++
X64.stringify x64opt ++
"\nsection .data\n" ++ varcg
codegenVar :: DVar -> Error String
codegenVar (DVar TInt n (ELit (LInt i) (Just TInt))) = Right $ n ++ ": dq " ++ show i
codegenVar _ = Left "Unsupported global variable declaration"
type AllocMap = Map.Map Ref XRef
codegenFunc :: IRFunc -> CGMonad ()
codegenFunc (IRFunc _ name al bbs sid) = do
let temprefsperbb = collectTempRefs bbs
alltemprefs = uniq $ sort $ map LA.unAccess $ concat $ concat $ map fst temprefsperbb
lifespans = map (\r -> (findLifeSpan r, r)) alltemprefs
where findLifeSpan ref =
fromJust $ findFirstLast id $ concat $ LA.lifetimeAnalysis ref temprefsperbb
aliascandidates = findAliasCandidates bbs :: [(Ref, Ref)]
gpRegs = [R8, R9, R10, R11, R12, R13, R14, R15]
allocation = regalloc lifespans gpRegs aliascandidates :: Map.Map Ref (Allocation Register)
spillrefs = map fst $ filter (isAllocMem . snd) $ Map.toList allocation
(spilloffsets, spillsz) = initLast $ scanl (+) 0 $ map refSize spillrefs
spilloffsetmap = Map.fromList $ zip spillrefs spilloffsets
usedregs = uniq $ sort $ catMaybes $ flip map (Map.toList allocation) $ \(_, a) -> case a of
AllocReg reg -> Just reg
AllocMem -> Nothing
-- traceShowM temprefsperbb
-- traceShowM lifespans
traceM $ "ALLOCATION: " ++ show allocation
let nsaves = length usedregs
allocationXref = flip Map.mapWithKey allocation $ \ref alloc -> case alloc of
AllocReg reg -> XReg (fromIntegral $ refSize ref) reg
AllocMem -> XMem (fromIntegral $ refSize ref)
(Just RSP) (0, RAX) Nothing
(fromIntegral $ spilloffsetmap ! ref)
allocmap = foldl inserter allocationXref (zip al [0::Int ..])
where
inserter m ((t, n), i) =
let offset = fromIntegral spillsz + 8 * nsaves + 8 {- rbp -} + 8 {- ret addr -} + 8 * i
in Map.insert (Argument (sizeof t) n)
(XMem (fromIntegral $ sizeof t)
(Just RSP) (0, RAX) Nothing
(fromIntegral offset))
m
newLabel name
addIns $ PUSH (xref $ XReg 8 RBP)
addIns $ MOV (xref $ XReg 8 RBP) (xref $ XReg 8 RSP)
forM_ usedregs $ \reg -> addIns $ PUSH (xref $ XReg 8 reg)
when (spillsz /= 0) $ addIns $ SUB (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz)
setRegsToRestore usedregs
setSpillSize spillsz
let ([startbb], rest) = partition (\(BB i _ _) -> i == sid) bbs
codegenBB allocmap startbb
mapM_ (codegenBB allocmap) rest
findAliasCandidates :: [BB] -> [(Ref, Ref)]
findAliasCandidates = concatMap (\(BB _ inss _) -> concatMap goI inss)
where
goI :: IRIns -> [(Ref, Ref)]
goI (IMov d s) = [(d, s)]
goI (IAri at d s1 s2)
| isCommutative at = [(d, s1), (d, s2)]
| otherwise = [(d, s1)]
goI _ = []
findFirstLast :: forall a. (a -> Bool) -> [a] -> Maybe (Int, Int)
findFirstLast f l = go Nothing 0 l
where
go :: Maybe (Int, Int) -> Int -> [a] -> Maybe (Int, Int)
go mr _ [] = mr
go mr i (x:xs)
| f x = go (note mr i) (i+1) xs
| otherwise = go mr (i+1) xs
note :: Maybe (Int, Int) -> Int -> Maybe (Int, Int)
note Nothing i = Just (i, i)
note (Just (a, _)) i = Just (a, i)
isAllocMem :: Allocation a -> Bool
isAllocMem AllocMem = True
isAllocMem _ = False
initLast :: [a] -> ([a], a)
initLast [] = undefined
initLast [x] = ([], x)
initLast (x:xs) = let (acc, l) = initLast xs in (x : acc, l)
codegenBB :: AllocMap -> BB -> CGMonad ()
codegenBB allocmap (BB bid inss term) = do
newLabel $ ".bb" ++ show bid
mapM_ (codegenIns allocmap) inss
codegenTerm allocmap term
mkxref :: Ref -> AllocMap -> XRef
mkxref (Constant _ v) _ = XImm v
mkxref (Global sz n) _ = XMem (fromIntegral sz) Nothing (0, RAX) (Just n) 0
mkxref r m = fromJust $ Map.lookup r m
mkmov :: XRef -> XRef -> X64.Ins
mkmov a@(XReg _ _) b@(XReg _ _) = MOV (xref a) (xref b)
mkmov a@(XReg _ _) b@(XMem _ _ _ _ _) = MOV (xref a) (xref b)
mkmov a@(XReg _ _) b@(XImm _) = MOVi64 (xref a) (xref b)
mkmov a@(XMem _ _ _ _ _) b@(XReg _ _) = MOV (xref a) (xref b)
mkmov a@(XMem _ _ _ _ _) b@(XImm v) | v < 2 ^ (32 :: Int) = MOVi (xref a) (xref b)
mkmov a b = CALL $ "Invalid mkmov: " ++ show a ++ "; " ++ show b
-- mkmov a b = error $ "Invalid mkmov: " ++ show a ++ "; " ++ show b
mkcmp :: XRef -> XRef -> X64.Ins
mkcmp a b@(XImm _) = CMPi (xref a) (xref b)
mkcmp a b = CMP (xref a) (xref b)
codegenIns :: AllocMap -> IRIns -> CGMonad ()
codegenIns m (IMov d s)
| dm == sm = return ()
| X64.isXMem dm && X64.isXMem sm = do
addIns $ mkmov (XReg (fromIntegral $ refSize s) RAX) sm
addIns $ mkmov dm (XReg (fromIntegral $ refSize d) RAX)
| otherwise = addIns $ mkmov dm sm
where dm = mkxref d m
sm = mkxref s m
codegenIns m (IStore d s) = do
sourcexref <- if X64.isXMem sm
then do
addIns $ mkmov (XReg sz RBX) sm
return $ XReg sz RBX
else return sm
destxref <- case dm of
XReg _ r -> return $ XMem sz (Just r) (0, RAX) Nothing 0
x@(XMem xsz _ _ _ _) -> do
addIns $ mkmov (XReg xsz RAX) x
return $ XMem sz (Just RAX) (0, RAX) Nothing 0
XImm _ -> throwError $ "IStore to [immediate] not expected"
addIns $ mkmov destxref sourcexref
where dm = mkxref d m
sm = mkxref s m
sz = fromIntegral $ refSize s
codegenIns m (ILoad d s) = do
sourcexref <- case sm of
XReg _ r -> return $ XMem sz (Just r) (0, RAX) Nothing 0
x@(XMem xsz _ _ _ _) -> do
addIns $ mkmov (XReg xsz RAX) x
return $ XMem sz (Just RAX) (0, RAX) Nothing 0
XImm _ -> throwError $ "ILoad from [immediate] not expected"
if X64.isXMem dm
then do
addIns $ mkmov (XReg sz RAX) sourcexref
addIns $ mkmov dm (XReg sz RAX)
else do
addIns $ mkmov dm sourcexref
where dm = mkxref d m
sm = mkxref s m
sz = fromIntegral $ refSize d
codegenIns m (IAri AMul d s1 s2) = do
let sz = fromIntegral $ refSize d
addIns $ mkmov (XReg sz RAX) (mkxref s1 m)
addIns $ mkmov (XReg sz RBX) (mkxref s2 m)
addIns $ IMULDA (xref $ XReg sz RBX)
addIns $ mkmov (mkxref d m) (XReg sz RAX)
codegenIns m (IAri ADiv d s1 s2) = do
let sz = fromIntegral $ refSize d
addIns $ XOR (xref $ XReg 4 RDX) (xref $ XReg 4 RDX)
addIns $ mkmov (XReg sz RAX) (mkxref s1 m)
addIns $ mkmov (XReg sz RBX) (mkxref s2 m)
addIns $ IDIVDA (xref $ XReg sz RBX)
addIns $ mkmov (mkxref d m) (XReg sz RAX)
codegenIns m (IAri AMod d s1 s2) = do
let sz = fromIntegral $ refSize d
addIns $ XOR (xref $ XReg 4 RDX) (xref $ XReg 4 RDX)
addIns $ mkmov (XReg sz RAX) (mkxref s1 m)
addIns $ mkmov (XReg sz RBX) (mkxref s2 m)
addIns $ IDIVDA (xref $ XReg sz RBX)
addIns $ mkmov (mkxref d m) (XReg sz RDX)
codegenIns m (IAri at d s1 s2) = case arithTypeToCondCode at of
Just cc -> do
arg2 <- if X64.isXMem s1m && X64.isXMem s2m
then do
addIns $ mkmov (XReg (fromIntegral $ refSize s2) RAX) s2m
return $ XReg (fromIntegral $ refSize s2) RAX
else return s2m
addIns $ mkcmp s1m arg2
addIns $ SETCC cc (xref $ X64.xrefSetSize 1 dm)
addIns $ AND (xref $ X64.xrefSetSize 4 dm) (xref $ XImm 0xff)
Nothing -> do
(_, s1m', s2', s2m') <-
if dm == s2m
then if dm == s1m
then return (s1, s1m, s2, s2m)
else if isCommutative at
then return (s2, s2m, s1, s1m)
else throwError "Noncommutative op with d==s2/=s1"
else return (s1, s1m, s2, s2m)
arg2 <- if X64.isXMem s1m' && X64.isXMem s2m'
then do
addIns $ mkmov (XReg (fromIntegral $ refSize s2') RAX) s2m'
return $ XReg (fromIntegral $ refSize s2') RAX
else return s2m'
when (dm /= s1m') $ addIns $ mkmov dm s1m'
addIns $ fromJust (arithTypeToIns at) dm arg2
where dm = mkxref d m
s1m = mkxref s1 m
s2m = mkxref s2 m
codegenIns m (ICall n rs) = do
forM_ (zip (reverse rs) [1::Int ..]) $ \(r, i) ->
let sz = fromIntegral $ refSize r
src = (mkxref r m)
dst = (XMem sz (Just RSP) (0, RAX) Nothing (fromIntegral $ (-8) * i))
in if X64.isXMem (mkxref r m)
then do
addIns $ mkmov (XReg sz RAX) src
addIns $ mkmov dst (XReg sz RAX)
else do
addIns $ mkmov dst src
when (length rs > 0) $ addIns $ SUB (xref $ XReg 8 RSP) (xref $ XImm (fromIntegral $ 8 * length rs))
addIns $ CALL n
when (length rs > 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm (fromIntegral $ 8 * length rs))
codegenIns m (ICallr d n rs) = do
codegenIns m (ICall n rs)
addIns $ mkmov (mkxref d m) (XReg (fromIntegral $ refSize d) RAX)
codegenIns m fullins@(IResize d s) = do
let dsz = fromIntegral $ refSize d
ssz = fromIntegral $ refSize s
dm = mkxref d m
sm = mkxref s m
when (X64.isXImm sm) $
throwError $ "Resized value is an immediate in " ++ show fullins ++
"; (dm = " ++ show dm ++ "; sm = " ++ show sm ++ ")"
case compare dsz ssz of
EQ -> codegenIns m (IMov d s)
GT -> if X64.isXMem dm
then do
addIns $ MOVSX (xref $ XReg dsz RAX) (xref sm)
addIns $ mkmov dm (XReg dsz RAX)
else do
addIns $ MOVSX (xref dm) (xref sm)
LT -> if X64.isXMem dm && X64.isXMem sm
then do
addIns $ mkmov (XReg dsz RAX) (X64.xrefSetSize dsz sm)
addIns $ mkmov dm (XReg dsz RAX)
else do
addIns $ mkmov dm (X64.xrefSetSize dsz sm)
codegenIns _ INop = return ()
arithTypeToCondCode :: ArithType -> Maybe X64.CondCode
arithTypeToCondCode AEq = Just CCE
arithTypeToCondCode ANeq = Just CCNE
arithTypeToCondCode AGt = Just CCG
arithTypeToCondCode ALt = Just CCL
arithTypeToCondCode AGeq = Just CCGE
arithTypeToCondCode ALeq = Just CCLE
arithTypeToCondCode _ = Nothing
cmpTypeToCondCode :: CmpType -> X64.CondCode
cmpTypeToCondCode CEq = CCE
cmpTypeToCondCode CNeq = CCNE
cmpTypeToCondCode CGt = CCG
cmpTypeToCondCode CLt = CCL
cmpTypeToCondCode CGeq = CCGE
cmpTypeToCondCode CLeq = CCLE
arithTypeToIns :: ArithType -> Maybe (XRef -> XRef -> X64.Ins)
arithTypeToIns AAdd = Just $ \a b -> ADD (xref a) (xref b)
arithTypeToIns ASub = Just $ \a b -> SUB (xref a) (xref b)
arithTypeToIns AAnd = Just $ \a b -> AND (xref a) (xref b)
arithTypeToIns AOr = Just $ \a b -> OR (xref a) (xref b)
arithTypeToIns AXor = Just $ \a b -> XOR (xref a) (xref b)
arithTypeToIns _ = Nothing
codegenTerm :: AllocMap -> IRTerm -> CGMonad ()
codegenTerm m (IJcc ct a b t e) = do
if X64.isXMem am && X64.isXMem bm
then do
addIns $ mkmov (XReg (fromIntegral $ refSize b) RAX) bm
addIns $ mkcmp am (XReg (fromIntegral $ refSize b) RAX)
else do
addIns $ mkcmp am bm
addIns $ JCC (cmpTypeToCondCode ct) (".bb" ++ show t)
addIns $ JMP (".bb" ++ show e)
where
am = mkxref a m
bm = mkxref b m
codegenTerm _ (IJmp i) = addIns $ JMP (".bb" ++ show i)
codegenTerm _ IRet = do
spillsz <- gets spillSize
when (spillsz /= 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz)
usedregs <- gets regsToRestore
forM_ (reverse usedregs) $ \reg -> addIns $ POP (xref $ XReg 8 reg)
addIns $ mkmov (XReg 8 RSP) (XReg 8 RBP)
addIns $ POP (xref $ XReg 8 RBP)
addIns RET
codegenTerm m (IRetr r) = do
addIns $ mkmov (XReg (fromIntegral $ refSize r) RAX) (mkxref r m)
spillsz <- gets spillSize
when (spillsz /= 0) $ addIns $ ADD (xref $ XReg 8 RSP) (xref $ XImm $ fromIntegral spillsz)
usedregs <- gets regsToRestore
forM_ (reverse usedregs) $ \reg -> addIns $ POP (xref $ XReg 8 reg)
addIns $ mkmov (XReg 8 RSP) (XReg 8 RBP)
addIns $ POP (xref $ XReg 8 RBP)
addIns RET
codegenTerm _ ITermNone = undefined
collectTempRefs :: [BB] -> [([[LA.Access Ref]], [Int])]
collectTempRefs bbs =
flip map bbs $ \(BB _ inss term) ->
let refs = map (filter (isTemp . LA.unAccess)) $ concatMap listRefsIns inss ++ listRefsTerm term
nexts = map (\i -> fromJust $ findIndex (\(BB j _ _) -> j == i) bbs) $ listNextIds term
in (refs, nexts)
where
listRefsIns :: IRIns -> [[LA.Access Ref]]
listRefsIns (IMov a b) = [[LA.Read b], [LA.Write a]]
listRefsIns (IStore a b) = [[LA.Read a, LA.Read b]]
listRefsIns (ILoad a b) = [[LA.Read b], [LA.Write a]]
listRefsIns (IAri at a b c)
-- if not commutative, we don't want to have to xchg the operands
| isCommutative at = [[LA.Read b, LA.Read c], [LA.Write a]]
| otherwise = [[LA.Read b], [LA.Read c, LA.Write a]]
listRefsIns (ICall _ l) = [map LA.Read l]
listRefsIns (ICallr a _ l) = [LA.Write a : map LA.Read l]
listRefsIns (IResize a b) = [[LA.Read b], [LA.Write a]]
listRefsIns INop = [[]]
listRefsTerm :: IRTerm -> [[LA.Access Ref]]
listRefsTerm (IJcc _ a b _ _) = [[LA.Read a, LA.Read b]]
listRefsTerm (IJmp _) = [[]]
listRefsTerm IRet = [[]]
listRefsTerm (IRetr a) = [[LA.Read a]]
listRefsTerm ITermNone = undefined
listNextIds :: IRTerm -> [Id]
listNextIds (IJcc _ _ _ a b) = [a, b]
listNextIds (IJmp a) = [a]
listNextIds IRet = []
listNextIds (IRetr _) = []
listNextIds ITermNone = undefined
isTemp :: Ref -> Bool
isTemp (Temp _ _) = True
isTemp _ = False
|