1 (* Copyright (C) 2009 Matthew Fluet.
2 * Copyright (C) 1999-2008 Henry Cejtin, Matthew Fluet, Suresh
3 * Jagannathan, and Stephen Weeks.
4 * Copyright (C) 1997-2000 NEC Research Institute.
6 * MLton is released under a BSD-style license.
7 * See the file MLton-LICENSE for details.
10 functor amd64GenerateTransfers(S: AMD64_GENERATE_TRANSFERS_STRUCTS): AMD64_GENERATE_TRANSFERS =
24 structure CFunction = CFunction
27 val ones : int * WordSize.t -> WordX.t
28 = fn (i, ws) => (WordX.notb o WordX.lshift)
30 WordX.fromIntInf (IntInf.fromInt i, ws))
32 val tracerTop = amd64.tracerTop
34 structure amd64LiveTransfers
35 = amd64LiveTransfers(structure amd64 = amd64
36 structure amd64Liveness = amd64Liveness
37 structure amd64JumpInfo = amd64JumpInfo
38 structure amd64LoopInfo = amd64LoopInfo)
40 val pointerSize = amd64MLton.pointerSize
41 val wordSize = amd64MLton.wordSize
104 {frontierReg = Register.r12,
105 stackTopReg = Register.rbp,
106 transferRegs = fn Entry.Jump _ => transferRegs
107 | Entry.CReturn _ => Register.rax::Register.eax::Register.al::transferRegs
109 transferXmmRegs = fn Entry.Jump _ => transferXmmRegs
110 | Entry.CReturn _ => XmmRegister.xmm0D::XmmRegister.xmm0S::transferXmmRegs
158 val transferXmmRegs =
175 {frontierReg = Register.r12,
176 stackTopReg = Register.rbp,
177 transferRegs = fn Entry.Jump _ => transferRegs
178 | Entry.CReturn _ => Register.rax::Register.eax::Register.al::transferRegs
180 transferXmmRegs = fn Entry.Jump _ => transferXmmRegs
181 | Entry.CReturn _ => XmmRegister.xmm0D::XmmRegister.xmm0S::transferXmmRegs
185 val indexReg = amd64.Register.rax
187 val stackTop = amd64MLton.gcState_stackTopContents
188 val frontier = amd64MLton.gcState_frontierContents
190 datatype gef = GEF of {generate : gef ->
194 Assembly.t AppendList.t,
197 transfer : Transfer.t} ->
198 Assembly.t AppendList.t,
202 Assembly.t AppendList.t}
204 fun generateTransfers {chunk as Chunk.T {data, blocks, ...},
206 newProfileLabel: amd64.ProfileLabel.t -> amd64.ProfileLabel.t,
207 liveInfo : amd64Liveness.LiveInfo.t,
208 jumpInfo : amd64JumpInfo.t,
211 val {frontierReg, stackTopReg, transferRegs, transferXmmRegs} =
215 val allClasses = !amd64MLton.Classes.allClasses
216 val livenessClasses = !amd64MLton.Classes.livenessClasses
217 val livenessClasses = ClassSet.+(livenessClasses,
219 [amd64MLton.Classes.StaticNonTemp,
220 amd64MLton.Classes.CArg])
221 val nonlivenessClasses = ClassSet.-(allClasses, livenessClasses)
222 val holdClasses = !amd64MLton.Classes.holdClasses
223 val farflushClasses = ClassSet.-(nonlivenessClasses, holdClasses)
224 val nearflushClasses = ClassSet.-(nonlivenessClasses, holdClasses)
225 val runtimeClasses = !amd64MLton.Classes.runtimeClasses
226 val cstaticClasses = !amd64MLton.Classes.cstaticClasses
227 val heapClasses = !amd64MLton.Classes.heapClasses
228 val ccallflushClasses = ClassSet.+(cstaticClasses, heapClasses)
230 fun removeHoldMemLocs memlocs
233 fn m => not (ClassSet.contains(holdClasses, MemLoc.class m)))
235 val stackAssume = {register = stackTopReg,
236 memloc = stackTop (),
240 val frontierAssume = {register = frontierReg,
241 memloc = frontier (),
245 val cStackAssume = {register = Register.rsp,
246 memloc = amd64MLton.c_stackPContents,
247 weight = 2048, (* ??? *)
253 val l = frontierAssume :: stackAssume :: l
255 Assembly.directive_assume {assumes = if reserveRsp
256 then cStackAssume :: l
260 fun runtimeTransfer live setup trans
263 (Assembly.directive_force
264 {commit_memlocs = removeHoldMemLocs live,
265 commit_classes = ClassSet.empty,
266 remove_memlocs = MemLocSet.empty,
267 remove_classes = ClassSet.empty,
268 dead_memlocs = MemLocSet.empty,
269 dead_classes = ClassSet.empty}),
272 [(Assembly.directive_force
273 {commit_memlocs = MemLocSet.empty,
274 commit_classes = farflushClasses,
275 remove_memlocs = MemLocSet.empty,
276 remove_classes = ClassSet.empty,
277 dead_memlocs = MemLocSet.empty,
278 dead_classes = ClassSet.empty})],
281 fun farEntry l = AppendList.cons (blockAssumes [], l)
283 fun farTransfer live setup trans
286 (Assembly.directive_force
287 {commit_memlocs = removeHoldMemLocs live,
288 commit_classes = ClassSet.empty,
289 remove_memlocs = MemLocSet.empty,
290 remove_classes = ClassSet.empty,
291 dead_memlocs = MemLocSet.empty,
292 dead_classes = ClassSet.empty}),
295 [(Assembly.directive_cache
296 {caches = [{register = stackTopReg,
297 memloc = stackTop (),
299 {register = frontierReg,
300 memloc = frontier (),
302 (Assembly.directive_xmmcache
304 (Assembly.directive_force
305 {commit_memlocs = MemLocSet.empty,
306 commit_classes = farflushClasses,
307 remove_memlocs = MemLocSet.empty,
308 remove_classes = ClassSet.empty,
309 dead_memlocs = MemLocSet.empty,
310 dead_classes = ClassSet.empty})],
313 val profileStackTopCommit' =
314 amd64.Assembly.directive_force
315 {commit_memlocs = MemLocSet.singleton (stackTop ()),
316 commit_classes = ClassSet.empty,
317 remove_memlocs = MemLocSet.empty,
318 remove_classes = ClassSet.empty,
319 dead_memlocs = MemLocSet.empty,
320 dead_classes = ClassSet.empty}
321 val profileStackTopCommit =
322 if !Control.profile <> Control.ProfileNone
323 then AppendList.single profileStackTopCommit'
324 else AppendList.empty
328 ("amd64GenerateTransfers.verifyLiveInfo",
329 fn () => amd64Liveness.LiveInfo.verifyLiveInfo {chunk = chunk,
330 liveInfo = liveInfo})
333 ("amd64GenerateTransfers.verifyJumpInfo",
334 fn () => amd64JumpInfo.verifyJumpInfo {chunk = chunk,
335 jumpInfo = jumpInfo})
339 ("amd64GenerateTransfers.verifyEntryTransfer",
340 fn () => amd64EntryTransfer.verifyEntryTransfer {chunk = chunk})
343 val {get: Label.t -> {block:Block.t},
346 = Property.destGetSetOnce
347 (Label.plist, Property.initRaise ("gotoInfo", Label.layout))
352 fn (block as Block.T {entry, ...}, labels)
354 val label = Entry.label entry
356 set(label, {block = block}) ;
364 (labels, ([], false),
365 fn (label, (labels, b))
366 => case amd64JumpInfo.getNear (jumpInfo, label)
367 of amd64JumpInfo.Count 0
369 val {block = Block.T {transfer, ...}}
373 (Transfer.nearTargets transfer,
375 => amd64JumpInfo.decNear (jumpInfo, label));
378 | _ => (label::labels, b))
382 else List.map (labels, #block o get)
384 val blocks = loop labels
388 val chunk = Chunk.T {data = data, blocks = blocks}
392 = amd64LoopInfo.createLoopInfo {chunk = chunk, farLoops = false}
394 = fn label => isLoopHeader(loopInfo, label)
398 = amd64LiveTransfers.computeLiveTransfers
400 transferRegs = transferRegs,
401 transferXmmRegs = transferXmmRegs,
406 val getLiveRegsTransfers
407 = #1 o amd64LiveTransfers.getLiveTransfers
408 val getLiveXmmRegsTransfers
409 = #2 o amd64LiveTransfers.getLiveTransfers
411 val {get = getLayoutInfo : Label.t -> Block.t option,
413 destroy = destLayoutInfo}
414 = Property.destGetSet(Label.plist,
415 Property.initRaise ("layoutInfo", Label.layout))
419 fn block as Block.T {entry, ...}
421 val label = Entry.label entry
423 setLayoutInfo(label, SOME block)
426 val {get = getProfileLabel : Label.t -> ProfileLabel.t option,
427 set = setProfileLabel,
428 destroy = destProfileLabel}
429 = Property.destGetSetOnce
431 Property.initRaise ("profileLabel", Label.layout))
435 fn Block.T {entry, profileLabel, ...}
437 val label = Entry.label entry
439 setProfileLabel(label, profileLabel)
444 val queue = ref (Queue.empty ())
446 fun enque x = queue := Queue.enque(!queue, x)
447 fun push x = stack := x::(!stack)
449 fun deque () = (case (!stack)
450 of [] => (case Queue.deque(!queue)
452 | SOME(queue', x) => (queue := queue';
454 | x::stack' => (stack := stack';
458 fun pushCompensationBlock {label, id}
460 val label' = Label.new label
461 val live = getLive(liveInfo, label)
462 val profileLabel = getProfileLabel label
463 val profileLabel' = Option.map (profileLabel, newProfileLabel)
465 = Block.T {entry = Entry.jump {label = label'},
466 profileLabel = profileLabel',
468 = (Assembly.directive_restoreregalloc
469 {live = MemLocSet.add
471 (LiveSet.toMemLocSet live,
476 transfer = Transfer.goto {target = label}}
478 setLive(liveInfo, label', live);
479 setProfileLabel(label', profileLabel');
480 incNear(jumpInfo, label');
481 Assert.assert("amd64GenerateTransfers.pushCompensationBlock",
482 fn () => getNear(jumpInfo, label') = Count 1);
483 amd64LiveTransfers.setLiveTransfersEmpty(liveTransfers, label');
484 setLayoutInfo(label', SOME block);
489 val c_stackP = amd64MLton.c_stackPContentsOperand
493 then AppendList.empty
496 ((* explicit cache in case there are no args *)
497 Assembly.directive_cache
498 {caches = [{register = Register.rsp,
499 memloc = valOf (Operand.deMemloc c_stackP),
502 fun unreserveRsp () =
504 then AppendList.empty
505 else AppendList.single (Assembly.directive_unreserve
506 {registers = [Register.rsp]})
508 datatype z = datatype Entry.t
509 datatype z = datatype Transfer.t
510 fun generateAll (gef as GEF {effect,...})
511 {label, falling, unique} :
512 Assembly.t AppendList.t
513 = (case getLayoutInfo label
514 of NONE => AppendList.empty
515 | SOME (Block.T {entry, profileLabel, statements, transfer})
517 val _ = setLayoutInfo(label, NONE)
519 val isLoopHeader = fn _ => false
524 if isLoopHeader label handle _ => false
527 (Assembly.pseudoop_p2align
530 SOME (Immediate.int 7)))
532 then AppendList.empty
535 (Assembly.pseudoop_p2align
540 if falling andalso unique
541 then AppendList.empty
543 (* near entry & live transfer assumptions *)
547 (getLiveRegsTransfers
548 (liveTransfers, label),
549 fn (memloc,register,sync)
550 => {register = register,
555 (Assembly.directive_xmmassume
558 (getLiveXmmRegsTransfers
559 (liveTransfers, label),
560 fn (memloc,register,sync)
561 => {register = register,
565 reserve = false}))})]
570 (Assembly.label label),
572 (ProfileLabel.toAssemblyOpt profileLabel),
579 | CReturn {dsts, frameInfo, func, label}
582 if Vector.isEmpty dsts
583 then AppendList.empty
588 (Operand.cReturnTemps
589 (CFunction.return func),
592 (AppendList.fromList o Vector.fold2)
593 (dsts, srcs, [], fn ((dst,dstsize),src,stmts) =>
594 case Size.class dstsize of
596 (amd64.Assembly.instruction_mov
598 src = Operand.memloc src,
599 size = dstsize})::stmts
601 (amd64.Assembly.instruction_sse_movs
603 src = Operand.memloc src,
604 size = dstsize})::stmts)
610 val FrameInfo.T {size, frameLayoutsIndex}
616 = amd64MLton.gcState_stackTopContentsOperand ()
618 = amd64.Operand.immediate_int (~ size)
621 ((* stackTop += bytes *)
622 amd64.Assembly.instruction_binal
623 {oper = amd64.Instruction.ADD,
627 profileStackTopCommit)
634 [Assembly.pseudoop_p2align
635 (Immediate.int 4, NONE, NONE),
636 Assembly.pseudoop_long
637 [Immediate.int frameLayoutsIndex],
638 Assembly.label label],
640 (ProfileLabel.toAssemblyOpt profileLabel),
641 if CFunction.maySwitchThreads func
642 then (* entry from far assumptions *)
644 else (* near entry & live transfer assumptions *)
649 (getLiveRegsTransfers
650 (liveTransfers, label),
651 fn (memloc,register,sync)
652 => {register = register,
657 (Assembly.directive_xmmassume
660 (getLiveXmmRegsTransfers
661 (liveTransfers, label),
662 fn (memloc,register,sync)
663 => {register = register,
667 reserve = false}))})],
671 AppendList.append (near label, getReturn ())
674 => AppendList.appends
676 [Assembly.pseudoop_p2align
677 (Immediate.int 4, NONE, NONE),
678 Assembly.pseudoop_global label,
679 Assembly.pseudoop_hidden label,
680 Assembly.label label],
682 (ProfileLabel.toAssemblyOpt profileLabel),
683 (* entry from far assumptions *)
684 (farEntry AppendList.empty)]
686 frameInfo = FrameInfo.T {size,
692 [Assembly.pseudoop_p2align
693 (Immediate.int 4, NONE, NONE),
694 Assembly.pseudoop_long
695 [Immediate.int frameLayoutsIndex],
696 Assembly.label label],
698 (ProfileLabel.toAssemblyOpt profileLabel),
699 (* entry from far assumptions *)
703 = amd64MLton.gcState_stackTopContentsOperand ()
705 = amd64.Operand.immediate_int (~ size)
708 ((* stackTop += bytes *)
709 amd64.Assembly.instruction_binal
710 {oper = amd64.Instruction.ADD,
714 profileStackTopCommit)
716 | Handler {frameInfo = (FrameInfo.T
717 {frameLayoutsIndex, size}),
720 => AppendList.appends
722 [Assembly.pseudoop_p2align
723 (Immediate.int 4, NONE, NONE),
724 Assembly.pseudoop_long
725 [Immediate.int frameLayoutsIndex],
726 Assembly.label label],
728 (ProfileLabel.toAssemblyOpt profileLabel),
729 (* entry from far assumptions *)
733 = amd64MLton.gcState_stackTopContentsOperand ()
735 = amd64.Operand.immediate_int (~ size)
738 ((* stackTop += bytes *)
739 amd64.Assembly.instruction_binal
740 {oper = amd64.Instruction.ADD,
744 profileStackTopCommit)
748 [if !Control.Native.commented > 1
749 then AppendList.single
750 (Assembly.comment (Entry.toString entry))
751 else AppendList.empty,
752 if !Control.Native.commented > 2
753 then AppendList.single
756 (getLive(liveInfo, label),
760 MemLoc.toString memloc,
762 else AppendList.empty,
770 (livenessTransfer {transfer = transfer,
771 liveInfo = liveInfo})),
772 fn (assembly,(statements,live))
774 val Liveness.T {liveIn,dead, ...}
775 = livenessAssembly {assembly = assembly,
778 (if LiveSet.isEmpty dead
779 then assembly::statements
781 (Assembly.directive_force
782 {commit_memlocs = MemLocSet.empty,
783 commit_classes = ClassSet.empty,
784 remove_memlocs = MemLocSet.empty,
785 remove_classes = ClassSet.empty,
786 dead_memlocs = LiveSet.toMemLocSet dead,
787 dead_classes = ClassSet.empty})::
792 val statements = AppendList.fromList statements
794 val transfer = effect gef {label = label,
803 and effectDefault (gef as GEF {fall,...})
804 {label, transfer} : Assembly.t AppendList.t
806 (if !Control.Native.commented > 1
807 then AppendList.single
809 (Transfer.toString transfer))
810 else AppendList.empty,
815 live = getLive(liveInfo, target)}
816 | Iff {condition, truee, falsee}
819 = Instruction.condition_negate condition
822 = getLive(liveInfo, truee)
823 val truee_live_length
824 = LiveSet.size truee_live
827 = getLive(liveInfo, falsee)
828 val falsee_live_length
829 = LiveSet.size falsee_live
833 val id = Directive.Id.new ()
835 = pushCompensationBlock {label = falsee,
840 [Assembly.directive_force
841 {commit_memlocs = MemLocSet.empty,
842 commit_classes = nearflushClasses,
843 remove_memlocs = MemLocSet.empty,
844 remove_classes = ClassSet.empty,
845 dead_memlocs = MemLocSet.empty,
846 dead_classes = ClassSet.empty},
847 Assembly.instruction_jcc
848 {condition = condition_neg,
849 target = Operand.label falsee'},
850 Assembly.directive_saveregalloc
851 {live = MemLocSet.add
853 (LiveSet.toMemLocSet falsee_live,
864 val id = Directive.Id.new ()
865 val truee' = pushCompensationBlock {label = truee,
870 [Assembly.directive_force
871 {commit_memlocs = MemLocSet.empty,
872 commit_classes = nearflushClasses,
873 remove_memlocs = MemLocSet.empty,
874 remove_classes = ClassSet.empty,
875 dead_memlocs = MemLocSet.empty,
876 dead_classes = ClassSet.empty},
877 Assembly.instruction_jcc
878 {condition = condition,
879 target = Operand.label truee'},
880 Assembly.directive_saveregalloc
881 {live = MemLocSet.add
883 (LiveSet.toMemLocSet truee_live,
889 live = falsee_live}))
892 case (getLayoutInfo truee,
893 getLayoutInfo falsee)
894 of (NONE, SOME _) => fall_falsee ()
895 | (SOME _, NONE) => fall_truee ()
899 = if truee_live_length <= falsee_live_length
904 = case (getNear(jumpInfo, truee),
905 getNear(jumpInfo, falsee))
906 of (Count 1, Count 1) => default' ()
907 | (Count 1, _) => fall_truee ()
908 | (_, Count 1) => fall_falsee ()
911 case (getLoopDistance(loopInfo, label, truee),
912 getLoopDistance(loopInfo, label, falsee))
913 of (NONE, NONE) => default ()
914 | (SOME _, NONE) => fall_truee ()
915 | (NONE, SOME _) => fall_falsee ()
916 | (SOME dtruee, SOME dfalsee)
917 => (case Int.compare(dtruee, dfalsee)
918 of EQUAL => default ()
919 | LESS => fall_falsee ()
920 | GREATER => fall_truee ())
923 | Switch {test, cases, default}
925 val Liveness.T {dead, ...}
926 = livenessTransfer {transfer = transfer,
930 = case Operand.size test
935 = getLive(liveInfo, default)
938 = Transfer.Cases.mapToList
943 = getLive(liveInfo, target)
944 val id = Directive.Id.new ()
945 val target' = pushCompensationBlock
950 [Assembly.instruction_cmp
952 src2 = Operand.immediate_word k,
954 Assembly.instruction_jcc
955 {condition = Instruction.E,
956 target = Operand.label target'},
957 Assembly.directive_saveregalloc
958 {live = MemLocSet.add
960 (LiveSet.toMemLocSet target_live,
968 (Assembly.directive_force
969 {commit_memlocs = MemLocSet.empty,
970 commit_classes = nearflushClasses,
971 remove_memlocs = MemLocSet.empty,
972 remove_classes = ClassSet.empty,
973 dead_memlocs = MemLocSet.empty,
974 dead_classes = ClassSet.empty}),
975 AppendList.appends cases,
976 if LiveSet.isEmpty dead
977 then AppendList.empty
978 else AppendList.single
979 (Assembly.directive_force
980 {commit_memlocs = MemLocSet.empty,
981 commit_classes = ClassSet.empty,
982 remove_memlocs = MemLocSet.empty,
983 remove_classes = ClassSet.empty,
984 dead_memlocs = LiveSet.toMemLocSet dead,
985 dead_classes = ClassSet.empty}),
988 live = default_live})]
990 | Tail {target, live}
991 => (* flushing at far transfer *)
995 (Assembly.instruction_jmp
996 {target = Operand.label target,
998 | NonTail {target, live, return, handler, size}
1000 val _ = enque return
1001 val _ = case handler
1002 of SOME handler => enque handler
1006 = amd64MLton.stackTopTempContentsOperand ()
1007 val stackTopTempMinusWordDeref'
1008 = amd64MLton.stackTopTempMinusWordDeref ()
1009 val stackTopTempMinusWordDeref
1010 = amd64MLton.stackTopTempMinusWordDerefOperand ()
1012 = amd64MLton.gcState_stackTopContentsOperand ()
1013 val stackTopMinusWordDeref'
1014 = amd64MLton.gcState_stackTopMinusWordDeref ()
1015 val stackTopMinusWordDeref
1016 = amd64MLton.gcState_stackTopMinusWordDerefOperand ()
1018 = amd64.Operand.immediate_int size
1020 val liveReturn = amd64Liveness.LiveInfo.getLive(liveInfo, return)
1024 => amd64Liveness.LiveInfo.getLive(liveInfo, handler)
1025 | _ => LiveSet.empty
1026 val live = MemLocSet.unions [live,
1027 LiveSet.toMemLocSet liveReturn,
1028 LiveSet.toMemLocSet liveHandler]
1030 (* flushing at far transfer *)
1032 (if !Control.profile <> Control.ProfileNone
1033 then (AppendList.fromList
1034 [(* stackTopTemp = stackTop + bytes *)
1035 amd64.Assembly.instruction_mov
1036 {dst = stackTopTemp,
1038 size = pointerSize},
1039 amd64.Assembly.instruction_binal
1040 {oper = amd64.Instruction.ADD,
1043 size = pointerSize},
1044 (* *(stackTopTemp - WORD_SIZE) = return *)
1045 amd64.Assembly.instruction_lea
1046 {dst = stackTopTempMinusWordDeref,
1047 src = Operand.memloc_label return,
1048 size = pointerSize},
1049 amd64.Assembly.directive_force
1050 {commit_memlocs = MemLocSet.singleton stackTopTempMinusWordDeref',
1051 commit_classes = ClassSet.empty,
1052 remove_memlocs = MemLocSet.empty,
1053 remove_classes = ClassSet.empty,
1054 dead_memlocs = MemLocSet.empty,
1055 dead_classes = ClassSet.empty},
1056 (* stackTop = stackTopTemp *)
1057 amd64.Assembly.instruction_mov
1060 size = pointerSize},
1061 profileStackTopCommit'])
1062 else (AppendList.fromList
1063 [(* stackTop += bytes *)
1064 amd64.Assembly.instruction_binal
1065 {oper = amd64.Instruction.ADD,
1068 size = pointerSize},
1069 (* *(stackTop - WORD_SIZE) = return *)
1070 amd64.Assembly.instruction_lea
1071 {dst = stackTopMinusWordDeref,
1072 src = Operand.memloc_label return,
1073 size = pointerSize},
1074 amd64.Assembly.directive_force
1075 {commit_memlocs = MemLocSet.singleton stackTopMinusWordDeref',
1076 commit_classes = ClassSet.empty,
1077 remove_memlocs = MemLocSet.empty,
1078 remove_classes = ClassSet.empty,
1079 dead_memlocs = MemLocSet.empty,
1080 dead_classes = ClassSet.empty}]))
1082 (Assembly.instruction_jmp
1083 {target = Operand.label target,
1084 absolute = false})))
1088 val stackTopMinusWordDeref
1089 = amd64MLton.gcState_stackTopMinusWordDerefOperand ()
1091 (* flushing at far transfer *)
1095 (* jmp *(stackTop - WORD_SIZE) *)
1096 (amd64.Assembly.instruction_jmp
1097 {target = stackTopMinusWordDeref,
1103 = amd64MLton.gcState_exnStackContentsOperand ()
1105 = amd64MLton.stackTopTempContentsOperand ()
1107 = amd64MLton.gcState_stackTopContentsOperand ()
1109 = amd64MLton.gcState_stackBottomContentsOperand ()
1111 (* flushing at far transfer *)
1113 (if !Control.profile <> Control.ProfileNone
1114 then (AppendList.fromList
1115 [(* stackTopTemp = stackBottom + exnStack *)
1116 amd64.Assembly.instruction_mov
1117 {dst = stackTopTemp,
1119 size = pointerSize},
1120 amd64.Assembly.instruction_binal
1121 {oper = amd64.Instruction.ADD,
1124 size = pointerSize},
1125 (* stackTop = stackTopTemp *)
1126 amd64.Assembly.instruction_mov
1129 size = pointerSize},
1130 profileStackTopCommit'])
1131 else (AppendList.fromList
1132 [(* stackTop = stackBottom + exnStack *)
1133 amd64.Assembly.instruction_mov
1136 size = pointerSize},
1137 amd64.Assembly.instruction_binal
1138 {oper = amd64.Instruction.ADD,
1141 size = pointerSize}]))
1143 (* jmp *(stackTop - WORD_SIZE) *)
1144 (amd64.Assembly.instruction_jmp
1145 {target = amd64MLton.gcState_stackTopMinusWordDerefOperand (),
1148 | CCall {args, frameInfo, func, return}
1150 datatype z = datatype CFunction.Convention.t
1151 datatype z = datatype CFunction.SymbolScope.t
1152 datatype z = datatype CFunction.Target.t
1153 val CFunction.T {convention=_,
1157 val stackTopMinusWordDeref
1158 = amd64MLton.gcState_stackTopMinusWordDerefOperand ()
1159 val Liveness.T {dead, ...}
1160 = livenessTransfer {transfer = transfer,
1161 liveInfo = liveInfo}
1162 val c_stackP = amd64MLton.c_stackPContentsOperand
1163 val c_stackPDerefWord = amd64MLton.c_stackPDerefWordOperand
1164 val c_stackPDerefFloat = amd64MLton.c_stackPDerefFloatOperand
1165 val c_stackPDerefDouble = amd64MLton.c_stackPDerefDoubleOperand
1166 val applyFFTempFun = amd64MLton.applyFFTempFunContentsOperand
1167 val applyFFTempRegArg = amd64MLton.applyFFTempRegArgContents
1168 val applyFFTempXmmRegArg = amd64MLton.applyFFTempXmmRegArgContents
1169 val (fptrArg, args) =
1171 Direct _ => (AppendList.empty, args)
1174 val (fptrArg, args) =
1176 fptrArg::args => (fptrArg, args)
1177 | _ => Error.bug "amd64GenerateTransfers.generateAll: CCall"
1180 (Assembly.instruction_mov
1182 dst = applyFFTempFun,
1183 size = #2 fptrArg}),
1186 val win64 = case !Control.Target.os of
1187 MLton.Platform.OS.Cygwin => true
1188 | MLton.Platform.OS.MinGW => true
1191 (reg_args, xmmreg_args),
1194 (args, (AppendList.empty,
1197 then [Register.rcx,Register.rdx,
1198 Register.r8,Register.r9]
1199 else [Register.rdi,Register.rsi,Register.rdx,
1200 Register.rcx,Register.r8,Register.r9],
1202 then [(XmmRegister.xmm0D,XmmRegister.xmm0S),
1203 (XmmRegister.xmm1D,XmmRegister.xmm1S),
1204 (XmmRegister.xmm2D,XmmRegister.xmm2S),
1205 (XmmRegister.xmm3D,XmmRegister.xmm3S)]
1206 else [(XmmRegister.xmm0D,XmmRegister.xmm0S),
1207 (XmmRegister.xmm1D,XmmRegister.xmm1S),
1208 (XmmRegister.xmm2D,XmmRegister.xmm2S),
1209 (XmmRegister.xmm3D,XmmRegister.xmm3S),
1210 (XmmRegister.xmm4D,XmmRegister.xmm4S),
1211 (XmmRegister.xmm5D,XmmRegister.xmm5S),
1212 (XmmRegister.xmm6D,XmmRegister.xmm6S),
1213 (XmmRegister.xmm7D,XmmRegister.xmm7S)])),
1216 (reg_args, xmmreg_args),
1218 (regs, xmmregs))) =>
1221 | prune (x::r) = if win64 then r else (x::r)
1223 (reg_args, xmmreg_args),
1226 if Size.eq (size, Size.DBLE)
1227 orelse Size.eq (size, Size.SNGL)
1228 then (case xmmregs of
1231 val i = List.length xmmregs
1232 val mem = applyFFTempXmmRegArg (size, i)
1234 if Size.eq (size, Size.DBLE)
1238 (AppendList.fromList
1239 [Assembly.instruction_sse_movs
1241 dst = Operand.memloc mem,
1243 Assembly.directive_xmmcache
1244 {caches = [{register = xmmreg,
1248 (mem, xmmreg)::xmmreg_args),
1250 (prune regs, xmmregs))
1253 (AppendList.fromList
1254 [Assembly.instruction_binal
1255 {oper = Instruction.SUB,
1257 src = Operand.immediate_int 8,
1258 size = pointerSize},
1259 Assembly.instruction_sse_movs
1261 dst = if Size.eq (size, Size.DBLE)
1262 then c_stackPDerefDouble
1263 else c_stackPDerefFloat,
1265 (reg_args, xmmreg_args),
1268 else if Size.eq (size, Size.BYTE)
1269 orelse Size.eq (size, Size.WORD)
1270 orelse Size.eq (size, Size.LONG)
1271 orelse Size.eq (size, Size.QUAD)
1275 val i = List.length regs
1276 val mem = applyFFTempRegArg i
1278 (AppendList.fromList
1279 [if Size.lt (size, Size.QUAD)
1280 then Assembly.instruction_movx
1281 {oper = Instruction.MOVZX,
1283 dst = Operand.memloc mem,
1286 else Assembly.instruction_mov
1288 dst = Operand.memloc mem,
1290 Assembly.directive_cache
1291 {caches = [{register = reg,
1294 ((mem,reg)::reg_args,
1297 (regs, prune xmmregs))
1300 (AppendList.fromList
1301 [Assembly.instruction_binal
1302 {oper = Instruction.SUB,
1304 src = Operand.immediate_int 8,
1305 size = pointerSize},
1306 if Size.lt (size, Size.QUAD)
1307 then Assembly.instruction_movx
1308 {oper = Instruction.MOVZX,
1310 dst = c_stackPDerefWord,
1313 else Assembly.instruction_mov
1315 dst = c_stackPDerefWord,
1317 (reg_args, xmmreg_args),
1320 else Error.bug "amd64GenerateTransfers.generateAll: CCall"
1322 (AppendList.append (setup_arg, setup_args),
1323 (reg_args, xmmreg_args),
1324 size_stack_arg + size_stack_args,
1327 val (setup_args, size_stack_args) =
1329 val space = 16 - (size_stack_args mod 16)
1332 then (setup_args, size_stack_args)
1333 else (AppendList.append
1335 (Assembly.instruction_binal
1336 {oper = Instruction.SUB,
1338 src = Operand.immediate_int space,
1339 size = pointerSize}),
1341 size_stack_args + space)
1343 (* Allocate shadow space *)
1344 val (setup_args, size_stack_args) =
1346 then (AppendList.append
1349 (Assembly.instruction_binal
1350 {oper = Instruction.SUB,
1352 src = Operand.immediate_int 32,
1353 size = pointerSize})),
1354 size_stack_args + 32)
1355 else (setup_args, size_stack_args)
1356 (* SysV ABI AMD64 requires %rax set to the number
1357 * of xmms registers passed for varags functions;
1358 * since %rax is caller-save, we conservatively
1359 * set %rax for all functions (not just varargs).
1361 val (reg_args, setup_args) =
1364 val mem = applyFFTempRegArg 8
1365 val reg = Register.rax
1367 ((mem,reg) :: reg_args,
1371 [Assembly.instruction_mov
1372 {src = Operand.immediate_int (List.length xmmreg_args),
1373 dst = Operand.memloc mem,
1375 Assembly.directive_cache
1376 {caches = [{register = reg,
1378 reserve = true}]}]))
1380 else (reg_args, setup_args)
1384 [amd64.Assembly.directive_xmmcache
1386 (xmmreg_args, fn (mem,reg) =>
1387 {register = reg, memloc = mem,
1389 amd64.Assembly.directive_cache
1391 (reg_args, fn (mem,reg) =>
1392 {register = reg, memloc = mem,
1397 SOME (FrameInfo.T {size, ...}) =>
1398 (* Entering runtime *)
1400 val return = valOf return
1401 val _ = enque return
1404 = amd64MLton.stackTopTempContentsOperand ()
1405 val stackTopTempMinusWordDeref'
1406 = amd64MLton.stackTopTempMinusWordDeref ()
1407 val stackTopTempMinusWordDeref
1408 = amd64MLton.stackTopTempMinusWordDerefOperand ()
1410 = amd64MLton.gcState_stackTopContentsOperand ()
1411 val stackTopMinusWordDeref'
1412 = amd64MLton.gcState_stackTopMinusWordDeref ()
1413 val stackTopMinusWordDeref
1414 = amd64MLton.gcState_stackTopMinusWordDerefOperand ()
1415 val bytes = amd64.Operand.immediate_int size
1418 amd64Liveness.LiveInfo.getLive(liveInfo, return)
1419 val {defs, ...} = Transfer.uses_defs_kills transfer
1425 case Operand.deMemloc oper of
1426 SOME memloc => LiveSet.remove (live, memloc)
1429 (runtimeTransfer (LiveSet.toMemLocSet live)
1430 (if !Control.profile <> Control.ProfileNone
1431 then (AppendList.fromList
1432 [(* stackTopTemp = stackTop + bytes *)
1433 amd64.Assembly.instruction_mov
1434 {dst = stackTopTemp,
1436 size = pointerSize},
1437 amd64.Assembly.instruction_binal
1438 {oper = amd64.Instruction.ADD,
1441 size = pointerSize},
1442 (* *(stackTopTemp - WORD_SIZE) = return *)
1443 amd64.Assembly.instruction_lea
1444 {dst = stackTopTempMinusWordDeref,
1445 src = Operand.memloc_label return,
1446 size = pointerSize},
1447 amd64.Assembly.directive_force
1448 {commit_memlocs = MemLocSet.singleton stackTopTempMinusWordDeref',
1449 commit_classes = ClassSet.empty,
1450 remove_memlocs = MemLocSet.empty,
1451 remove_classes = ClassSet.empty,
1452 dead_memlocs = MemLocSet.empty,
1453 dead_classes = ClassSet.empty},
1454 (* stackTop = stackTopTemp *)
1455 amd64.Assembly.instruction_mov
1458 size = pointerSize},
1459 profileStackTopCommit'])
1460 else (AppendList.fromList
1461 [(* stackTop += bytes *)
1462 amd64.Assembly.instruction_binal
1463 {oper = amd64.Instruction.ADD,
1466 size = pointerSize},
1467 (* *(stackTop - WORD_SIZE) = return *)
1468 amd64.Assembly.instruction_lea
1469 {dst = stackTopMinusWordDeref,
1470 src = Operand.memloc_label return,
1471 size = pointerSize},
1472 amd64.Assembly.directive_force
1473 {commit_memlocs = MemLocSet.singleton stackTopMinusWordDeref',
1474 commit_classes = ClassSet.empty,
1475 remove_memlocs = MemLocSet.empty,
1476 remove_classes = ClassSet.empty,
1477 dead_memlocs = MemLocSet.empty,
1478 dead_classes = ClassSet.empty}]))
1480 (Assembly.directive_force
1481 {commit_memlocs = LiveSet.toMemLocSet live,
1482 commit_classes = runtimeClasses,
1483 remove_memlocs = MemLocSet.empty,
1484 remove_classes = ClassSet.empty,
1485 dead_memlocs = MemLocSet.empty,
1486 dead_classes = ClassSet.empty})))
1490 (Assembly.directive_force
1491 {commit_memlocs = let
1492 val s = MemLocSet.empty
1493 val s = if CFunction.modifiesFrontier func
1497 val s = if CFunction.readsStackTop func
1504 commit_classes = ccallflushClasses,
1505 remove_memlocs = MemLocSet.empty,
1506 remove_classes = ClassSet.empty,
1507 dead_memlocs = LiveSet.toMemLocSet dead,
1508 dead_classes = ClassSet.empty})
1513 datatype z = datatype MLton.Platform.OS.t
1514 datatype z = datatype Control.Format.t
1516 val label = fn () => Label.fromString name
1518 (* how to access imported functions: *)
1519 (* Windows rewrites the symbol __imp__name *)
1520 val coff = fn () => Label.fromString ("_imp__" ^ name)
1521 val macho = fn () => label () (* @PLT is implicit *)
1522 val elf = fn () => Label.fromString (name ^ "@PLT")
1524 val importLabel = fn () =>
1525 case !Control.Target.os of
1527 | Darwin => macho ()
1531 val direct = fn () =>
1533 [Assembly.directive_ccall (),
1534 Assembly.instruction_call
1535 {target = Operand.label (label ()),
1540 [Assembly.directive_ccall (),
1541 Assembly.instruction_call
1542 {target = Operand.label (importLabel ()),
1545 val indirect = fn () =>
1547 [Assembly.directive_ccall (),
1548 Assembly.instruction_call
1549 {target = Operand.memloc_label (importLabel ()),
1554 !Control.positionIndependent) of
1555 (* Private functions can be easily reached
1556 * with a direct (rip-relative) call.
1558 (Private, _, _) => direct ()
1559 (* Call at the point of definition. *)
1560 | (Public, MinGW, _) => direct ()
1561 | (Public, Cygwin, _) => direct ()
1562 | (Public, Darwin, _) => direct ()
1563 (* ELF requires PLT even for public fns. *)
1564 | (Public, _, true) => plt ()
1565 | (Public, _, false) => direct ()
1566 (* Windows always does indirect calls to
1567 * imported functions. The importLabel has
1568 * the function address written to it.
1570 | (External, MinGW, _) => indirect ()
1571 | (External, Cygwin, _) => indirect ()
1572 | (External, Darwin, _) => plt ()
1573 (* ELF systems (and darwin too) create
1574 * procedure lookup tables (PLT) which
1575 * proxy the call to libraries. The PLT
1576 * does not contain an address, but instead
1577 * a stub function. Often the PLT is auto-
1578 * matically created. This applies to all
1579 * darwin-x86_64 function calls and calls
1580 * made from an ELF executable.
1582 | (External, _, true) => plt ()
1583 | (External, _, false) => direct ()
1587 [Assembly.directive_ccall (),
1588 Assembly.instruction_call
1589 {target = applyFFTempFun,
1591 val unreserve_args =
1593 [amd64.Assembly.directive_xmmunreserve
1594 {registers = List.map (xmmreg_args, #2)},
1595 amd64.Assembly.directive_unreserve
1596 {registers = List.map (reg_args, #2)}]
1598 = if isSome frameInfo
1599 then AppendList.single
1600 (Assembly.directive_force
1601 {commit_memlocs = MemLocSet.empty,
1602 commit_classes = ClassSet.empty,
1603 remove_memlocs = MemLocSet.empty,
1604 remove_classes = ClassSet.empty,
1605 dead_memlocs = MemLocSet.empty,
1606 dead_classes = runtimeClasses})
1607 else AppendList.single
1608 (Assembly.directive_force
1609 {commit_memlocs = MemLocSet.empty,
1610 commit_classes = ClassSet.empty,
1611 remove_memlocs = MemLocSet.empty,
1612 remove_classes = ClassSet.empty,
1614 val s = MemLocSet.empty
1615 val s = if CFunction.modifiesFrontier func
1619 val s = if CFunction.writesStackTop func
1626 dead_classes = ccallflushClasses})
1629 (Assembly.directive_return
1630 {returns = Operand.cReturnTemps returnTy})
1632 if size_stack_args > 0
1633 then (AppendList.single
1634 (Assembly.instruction_binal
1635 {oper = Instruction.ADD,
1637 src = Operand.immediate_int size_stack_args,
1638 size = pointerSize}))
1639 else AppendList.empty
1641 = if CFunction.maySwitchThreads func
1642 then (* Returning from runtime *)
1643 (farTransfer MemLocSet.empty
1646 (* jmp *(stackTop - WORD_SIZE) *)
1647 (amd64.Assembly.instruction_jmp
1648 {target = stackTopMinusWordDeref,
1651 of NONE => AppendList.empty
1652 | SOME l => (if isSome frameInfo
1653 then (* Don't need to trampoline,
1654 * since didn't switch threads,
1655 * but can't fall because
1656 * frame layout data is prefixed
1657 * to l's code; use fallNone
1658 * to force a jmp with near
1665 live = getLive (liveInfo, l)}
1682 and effectJumpTable (gef as GEF {...})
1683 {label, transfer} : Assembly.t AppendList.t
1685 of Switch {test, cases, default}
1688 case Operand.size test of
1689 SOME Size.BYTE => WordSize.word8
1690 | SOME Size.WORD => WordSize.word16
1691 | SOME Size.LONG => WordSize.word32
1692 | SOME Size.QUAD => WordSize.word64
1693 | _ => Error.bug "amd64GenerateTransfers.effectJumpTable: Switch"
1695 val zero = WordX.zero ws
1696 val one = WordX.one ws
1697 val two = WordX.add (one, one)
1698 fun even w = WordX.isZero (WordX.mod (w, two, {signed = false}))
1699 fun incFn w = WordX.add (w, one)
1700 fun decFn w = WordX.sub (w, one)
1701 fun halfFn w = WordX.div (w, two, {signed = false})
1702 fun ltFn (w1, w2) = WordX.lt (w1, w2, {signed = false})
1703 val min = WordX.min (ws, {signed = false})
1704 fun minFn (w1, w2) = if WordX.lt (w1, w2, {signed = false})
1707 val max = WordX.max (ws, {signed = false})
1708 fun maxFn (w1, w2) = if WordX.gt (w1, w2, {signed = false})
1711 fun range (w1, w2) = WordX.sub (w2, w1)
1713 val Liveness.T {dead, ...}
1714 = livenessTransfer {transfer = transfer,
1715 liveInfo = liveInfo}
1721 val (minK,maxK,length,
1736 allEven andalso isEven,
1737 allOdd andalso not isEven)
1740 if length > 1 andalso
1741 (allEven orelse allOdd)
1753 minK'', maxK'', length'',
1757 val shift' = 1 + shift''
1760 (WordX.lshift(mask'', WordX.one WordSize.word64),
1762 then WordX.one WordSize.word64
1763 else WordX.zero WordSize.word64)
1766 minK'', maxK'', length'',
1771 0, WordX.zero WordSize.word64)
1777 fun doitTable(cases,
1778 minK, _, rangeK, shift, mask)
1780 val jump_table_label
1781 = Label.newString "jumpTable"
1783 val idT = Directive.Id.new ()
1788 val _ = incNear(jumpInfo, default)
1790 pushCompensationBlock
1797 | (cases as (i,target)::cases',j)
1798 => if WordX.equals (i, j)
1801 = pushCompensationBlock
1805 (Immediate.label target')::
1806 (filler(cases', incFn j))
1808 else (Immediate.label
1809 (Promise.force defaultT))::
1810 (filler(cases, incFn j))
1812 val jump_table = filler (cases, minK)
1814 val idD = Directive.Id.new ()
1815 val defaultD = pushCompensationBlock
1819 val default_live = getLive(liveInfo, default)
1824 fn ((_,target), live)
1825 => LiveSet.+(live, getLive(liveInfo, target)))
1829 {base = Immediate.label (Label.fromString "indexTemp"),
1830 index = Immediate.zero,
1831 scale = Scale.Eight,
1833 class = MemLoc.Class.Temp}
1836 {base = Immediate.label (Label.fromString "checkTemp"),
1837 index = Immediate.zero,
1838 scale = Scale.Eight,
1840 class = MemLoc.Class.Temp}
1843 {base = Immediate.label jump_table_label,
1845 scale = Scale.Eight,
1847 class = MemLoc.Class.Code}
1850 = case Operand.size test
1851 of SOME size => size
1853 val indexTemp' = indexTemp
1854 val indexTemp = Operand.memloc indexTemp
1855 val checkTemp' = checkTemp
1856 val checkTemp = Operand.memloc checkTemp
1857 val address = Operand.memloc address
1860 [if Size.lt(size, Size.QUAD)
1861 then AppendList.single
1862 (Assembly.instruction_movx
1863 {oper = Instruction.MOVZX,
1867 dstsize = Size.QUAD})
1868 else AppendList.single
1869 (Assembly.instruction_mov
1873 if LiveSet.isEmpty dead
1874 then AppendList.empty
1875 else AppendList.single
1876 (Assembly.directive_force
1877 {commit_memlocs = MemLocSet.empty,
1878 commit_classes = ClassSet.empty,
1879 remove_memlocs = MemLocSet.empty,
1880 remove_classes = ClassSet.empty,
1881 dead_memlocs = LiveSet.toMemLocSet dead,
1882 dead_classes = ClassSet.empty}),
1885 val idC = Directive.Id.new ()
1886 val defaultC = pushCompensationBlock
1889 val _ = incNear(jumpInfo, default)
1892 [AppendList.fromList
1893 [Assembly.instruction_mov
1897 Assembly.instruction_binal
1898 {oper = Instruction.AND,
1899 src = Operand.immediate_word
1900 (ones (shift, WordSize.word64)),
1903 if WordX.isZero mask
1904 then AppendList.empty
1905 else AppendList.single
1906 (Assembly.instruction_binal
1907 {oper = Instruction.SUB,
1908 src = Operand.immediate_word mask,
1912 [Assembly.directive_force
1913 {commit_memlocs = MemLocSet.empty,
1914 commit_classes = nearflushClasses,
1915 remove_memlocs = MemLocSet.empty,
1916 remove_classes = ClassSet.empty,
1917 dead_memlocs = MemLocSet.singleton checkTemp',
1918 dead_classes = ClassSet.empty},
1919 Assembly.instruction_jcc
1920 {condition = Instruction.NZ,
1921 target = Operand.label defaultC},
1922 Assembly.directive_saveregalloc
1924 live = MemLocSet.add
1926 (LiveSet.toMemLocSet default_live,
1929 Assembly.instruction_sral
1930 {oper = Instruction.SAR,
1931 count = Operand.immediate_int shift,
1935 else AppendList.empty,
1936 if WordX.equals (minK, zero)
1937 then AppendList.empty
1938 else AppendList.single
1939 (Assembly.instruction_binal
1940 {oper = Instruction.SUB,
1941 src = Operand.immediate_word minK,
1945 [Assembly.directive_force
1946 {commit_memlocs = MemLocSet.empty,
1947 commit_classes = nearflushClasses,
1948 remove_memlocs = MemLocSet.empty,
1949 remove_classes = ClassSet.empty,
1950 dead_memlocs = MemLocSet.empty,
1951 dead_classes = ClassSet.empty},
1952 Assembly.directive_cache
1953 {caches = [{register = indexReg,
1954 memloc = indexTemp',
1956 Assembly.instruction_cmp
1958 src2 = Operand.immediate_word rangeK,
1960 Assembly.instruction_jcc
1961 {condition = Instruction.A,
1962 target = Operand.label defaultD},
1963 Assembly.directive_saveregalloc
1965 live = MemLocSet.add
1967 (LiveSet.toMemLocSet default_live,
1970 Assembly.instruction_jmp
1973 Assembly.directive_saveregalloc
1975 live = MemLocSet.add
1977 (LiveSet.toMemLocSet live,
1980 Assembly.directive_force
1981 {commit_memlocs = MemLocSet.empty,
1982 commit_classes = ClassSet.empty,
1983 remove_memlocs = MemLocSet.empty,
1984 remove_classes = ClassSet.empty,
1985 dead_memlocs = MemLocSet.singleton indexTemp',
1986 dead_classes = ClassSet.empty}],
1988 [Assembly.pseudoop_data (),
1989 Assembly.pseudoop_p2align
1990 (Immediate.int 4, NONE, NONE),
1991 Assembly.label jump_table_label,
1992 Assembly.pseudoop_quad jump_table,
1993 Assembly.pseudoop_text ()]]
2008 WordX.lt (WordX.div(rangeK,two,{signed=false}),
2009 WordX.fromIntInf (IntInf.fromInt length, ws),
2013 = List.insertionSort
2022 else effectDefault gef
2024 transfer = transfer}
2028 of Transfer.Cases.Word cases
2031 | _ => effectDefault gef
2033 transfer = transfer}
2035 and fallNone (GEF {...})
2036 {label, live} : Assembly.t AppendList.t
2038 val liveRegsTransfer = getLiveRegsTransfers
2039 (liveTransfers, label)
2040 val liveXmmRegsTransfer = getLiveXmmRegsTransfers
2041 (liveTransfers, label)
2047 fn ((memloc,_,_),live)
2048 => LiveSet.remove(live,memloc))
2051 (liveXmmRegsTransfer,
2053 fn ((memloc,_,_),live)
2054 => LiveSet.remove(live,memloc))
2057 = AppendList.fromList
2058 ((* flushing at near transfer *)
2059 (Assembly.directive_cache
2060 {caches = [{register = stackTopReg,
2061 memloc = stackTop (),
2063 {register = frontierReg,
2064 memloc = frontier (),
2065 reserve = true}]})::
2066 (Assembly.directive_xmmcache
2069 (liveXmmRegsTransfer,
2070 fn (temp,register,_)
2071 => {register = register,
2073 reserve = true})})::
2074 (Assembly.directive_cache
2078 fn (temp,register,_)
2079 => {register = register,
2081 reserve = true})})::
2082 (Assembly.directive_force
2083 {commit_memlocs = LiveSet.toMemLocSet live,
2084 commit_classes = nearflushClasses,
2085 remove_memlocs = MemLocSet.empty,
2086 remove_classes = ClassSet.empty,
2087 dead_memlocs = MemLocSet.empty,
2088 dead_classes = ClassSet.empty})::
2089 (Assembly.instruction_jmp
2090 {target = Operand.label label,
2091 absolute = false})::
2092 (Assembly.directive_unreserve
2100 (Assembly.directive_xmmunreserve
2103 (liveXmmRegsTransfer,
2108 case getLayoutInfo label
2111 | SOME (Block.T {...})
2116 and fallDefault (gef as GEF {generate,...})
2117 {label, live} : Assembly.t AppendList.t
2119 datatype z = datatype amd64JumpInfo.status
2120 val liveRegsTransfer = getLiveRegsTransfers
2121 (liveTransfers, label)
2122 val liveXmmRegsTransfer = getLiveXmmRegsTransfers
2123 (liveTransfers, label)
2129 fn ((memloc,_,_),live)
2130 => LiveSet.remove(live,memloc))
2133 (liveXmmRegsTransfer,
2135 fn ((memloc,_,_),live)
2136 => LiveSet.remove(live,memloc))
2139 = AppendList.appends
2140 [AppendList.fromList
2141 [(* flushing at near transfer *)
2142 (Assembly.directive_cache
2143 {caches = [{register = stackTopReg,
2144 memloc = stackTop (),
2146 {register = frontierReg,
2147 memloc = frontier (),
2149 (Assembly.directive_cache
2153 fn (temp,register,_)
2154 => {register = register,
2157 (Assembly.directive_xmmcache
2160 (liveXmmRegsTransfer,
2161 fn (temp,register,_)
2162 => {register = register,
2165 (Assembly.directive_force
2166 {commit_memlocs = LiveSet.toMemLocSet live,
2167 commit_classes = nearflushClasses,
2168 remove_memlocs = MemLocSet.empty,
2169 remove_classes = ClassSet.empty,
2170 dead_memlocs = MemLocSet.empty,
2171 dead_classes = ClassSet.empty})],
2173 then AppendList.single
2174 (Assembly.instruction_jmp
2175 {target = Operand.label label,
2177 else AppendList.empty,
2179 [(Assembly.directive_unreserve
2187 (Assembly.directive_xmmunreserve
2190 (liveXmmRegsTransfer,
2194 case getLayoutInfo label
2197 | SOME (Block.T {...})
2198 => (case getNear(jumpInfo, label)
2204 | _ => AppendList.append
2207 (Assembly.directive_reset (),
2214 fun make {generate, effect, fall}
2215 = generate (GEF {generate = generate,
2221 of 0 => make {generate = generateAll,
2222 effect = effectDefault,
2224 | _ => make {generate = generateAll,
2225 effect = effectJumpTable,
2228 val _ = List.foreach
2230 fn Block.T {entry, ...}
2232 of Func {label, ...} => enque label
2234 fun doit () : Assembly.t list list
2238 => (case AppendList.toList (generate {label = label,
2242 | block => block::(doit ())))
2243 val assembly = doit ()
2244 val _ = destLayoutInfo ()
2245 val _ = destProfileLabel ()
2247 val assembly = [Assembly.pseudoop_text ()]::assembly
2249 if List.isEmpty data
2256 val (generateTransfers, generateTransfers_msg)
2261 fun generateTransfers_totals ()
2262 = (generateTransfers_msg ();
2264 amd64Liveness.LiveInfo.verifyLiveInfo_msg ();
2265 amd64JumpInfo.verifyJumpInfo_msg ();
2266 amd64EntryTransfer.verifyEntryTransfer_msg ();
2267 amd64LoopInfo.createLoopInfo_msg ();
2268 amd64LiveTransfers.computeLiveTransfers_totals ();
2269 Control.unindent ())