aboutsummaryrefslogtreecommitdiff
path: root/src/ARMeilleure/CodeGen/RegisterAllocators
diff options
context:
space:
mode:
authorTSR Berry <20988865+TSRBerry@users.noreply.github.com>2023-04-08 01:22:00 +0200
committerMary <thog@protonmail.com>2023-04-27 23:51:14 +0200
commitcee712105850ac3385cd0091a923438167433f9f (patch)
tree4a5274b21d8b7f938c0d0ce18736d3f2993b11b1 /src/ARMeilleure/CodeGen/RegisterAllocators
parentcd124bda587ef09668a971fa1cac1c3f0cfc9f21 (diff)
Move solution and projects to src
Diffstat (limited to 'src/ARMeilleure/CodeGen/RegisterAllocators')
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/AllocationResult.cs19
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/CopyResolver.cs259
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/HybridAllocator.cs454
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/IRegisterAllocator.cs12
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/LinearScanAllocator.cs1101
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/LiveInterval.cs396
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/LiveIntervalList.cs40
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/LiveRange.cs74
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/RegisterMasks.cs50
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/StackAllocator.cs25
-rw-r--r--src/ARMeilleure/CodeGen/RegisterAllocators/UseList.cs84
11 files changed, 2514 insertions, 0 deletions
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/AllocationResult.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/AllocationResult.cs
new file mode 100644
index 00000000..43e5c7e2
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/AllocationResult.cs
@@ -0,0 +1,19 @@
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ readonly struct AllocationResult
+ {
+ public int IntUsedRegisters { get; }
+ public int VecUsedRegisters { get; }
+ public int SpillRegionSize { get; }
+
+ public AllocationResult(
+ int intUsedRegisters,
+ int vecUsedRegisters,
+ int spillRegionSize)
+ {
+ IntUsedRegisters = intUsedRegisters;
+ VecUsedRegisters = vecUsedRegisters;
+ SpillRegionSize = spillRegionSize;
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/CopyResolver.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/CopyResolver.cs
new file mode 100644
index 00000000..587b1a02
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/CopyResolver.cs
@@ -0,0 +1,259 @@
+using ARMeilleure.IntermediateRepresentation;
+using System;
+using System.Collections.Generic;
+
+using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
+using static ARMeilleure.IntermediateRepresentation.Operation.Factory;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ class CopyResolver
+ {
+ private class ParallelCopy
+ {
+ private readonly struct Copy
+ {
+ public Register Dest { get; }
+ public Register Source { get; }
+
+ public OperandType Type { get; }
+
+ public Copy(Register dest, Register source, OperandType type)
+ {
+ Dest = dest;
+ Source = source;
+ Type = type;
+ }
+ }
+
+ private readonly List<Copy> _copies;
+
+ public int Count => _copies.Count;
+
+ public ParallelCopy()
+ {
+ _copies = new List<Copy>();
+ }
+
+ public void AddCopy(Register dest, Register source, OperandType type)
+ {
+ _copies.Add(new Copy(dest, source, type));
+ }
+
+ public void Sequence(List<Operation> sequence)
+ {
+ Dictionary<Register, Register> locations = new Dictionary<Register, Register>();
+ Dictionary<Register, Register> sources = new Dictionary<Register, Register>();
+
+ Dictionary<Register, OperandType> types = new Dictionary<Register, OperandType>();
+
+ Queue<Register> pendingQueue = new Queue<Register>();
+ Queue<Register> readyQueue = new Queue<Register>();
+
+ foreach (Copy copy in _copies)
+ {
+ locations[copy.Source] = copy.Source;
+ sources[copy.Dest] = copy.Source;
+ types[copy.Dest] = copy.Type;
+
+ pendingQueue.Enqueue(copy.Dest);
+ }
+
+ foreach (Copy copy in _copies)
+ {
+ // If the destination is not used anywhere, we can assign it immediately.
+ if (!locations.ContainsKey(copy.Dest))
+ {
+ readyQueue.Enqueue(copy.Dest);
+ }
+ }
+
+ while (pendingQueue.TryDequeue(out Register current))
+ {
+ Register copyDest;
+ Register origSource;
+ Register copySource;
+
+ while (readyQueue.TryDequeue(out copyDest))
+ {
+ origSource = sources[copyDest];
+ copySource = locations[origSource];
+
+ OperandType type = types[copyDest];
+
+ EmitCopy(sequence, GetRegister(copyDest, type), GetRegister(copySource, type));
+
+ locations[origSource] = copyDest;
+
+ if (origSource == copySource && sources.ContainsKey(origSource))
+ {
+ readyQueue.Enqueue(origSource);
+ }
+ }
+
+ copyDest = current;
+ origSource = sources[copyDest];
+ copySource = locations[origSource];
+
+ if (copyDest != copySource)
+ {
+ OperandType type = types[copyDest];
+
+ type = type.IsInteger() ? OperandType.I64 : OperandType.V128;
+
+ EmitXorSwap(sequence, GetRegister(copyDest, type), GetRegister(copySource, type));
+
+ locations[origSource] = copyDest;
+
+ Register swapOther = copySource;
+
+ if (copyDest != locations[sources[copySource]])
+ {
+ // Find the other swap destination register.
+ // To do that, we search all the pending registers, and pick
+ // the one where the copy source register is equal to the
+ // current destination register being processed (copyDest).
+ foreach (Register pending in pendingQueue)
+ {
+ // Is this a copy of pending <- copyDest?
+ if (copyDest == locations[sources[pending]])
+ {
+ swapOther = pending;
+
+ break;
+ }
+ }
+ }
+
+ // The value that was previously at "copyDest" now lives on
+ // "copySource" thanks to the swap, now we need to update the
+ // location for the next copy that is supposed to copy the value
+ // that used to live on "copyDest".
+ locations[sources[swapOther]] = copySource;
+ }
+ }
+ }
+
+ private static void EmitCopy(List<Operation> sequence, Operand x, Operand y)
+ {
+ sequence.Add(Operation(Instruction.Copy, x, y));
+ }
+
+ private static void EmitXorSwap(List<Operation> sequence, Operand x, Operand y)
+ {
+ sequence.Add(Operation(Instruction.BitwiseExclusiveOr, x, x, y));
+ sequence.Add(Operation(Instruction.BitwiseExclusiveOr, y, y, x));
+ sequence.Add(Operation(Instruction.BitwiseExclusiveOr, x, x, y));
+ }
+ }
+
+ private Queue<Operation> _fillQueue = null;
+ private Queue<Operation> _spillQueue = null;
+ private ParallelCopy _parallelCopy = null;
+
+ public bool HasCopy { get; private set; }
+
+ public void AddSplit(LiveInterval left, LiveInterval right)
+ {
+ if (left.Local != right.Local)
+ {
+ throw new ArgumentException("Intervals of different variables are not allowed.");
+ }
+
+ OperandType type = left.Local.Type;
+
+ if (left.IsSpilled && !right.IsSpilled)
+ {
+ // Move from the stack to a register.
+ AddSplitFill(left, right, type);
+ }
+ else if (!left.IsSpilled && right.IsSpilled)
+ {
+ // Move from a register to the stack.
+ AddSplitSpill(left, right, type);
+ }
+ else if (!left.IsSpilled && !right.IsSpilled && left.Register != right.Register)
+ {
+ // Move from one register to another.
+ AddSplitCopy(left, right, type);
+ }
+ else if (left.SpillOffset != right.SpillOffset)
+ {
+ // This would be the stack-to-stack move case, but this is not supported.
+ throw new ArgumentException("Both intervals were spilled.");
+ }
+ }
+
+ private void AddSplitFill(LiveInterval left, LiveInterval right, OperandType type)
+ {
+ if (_fillQueue == null)
+ {
+ _fillQueue = new Queue<Operation>();
+ }
+
+ Operand register = GetRegister(right.Register, type);
+ Operand offset = Const(left.SpillOffset);
+
+ _fillQueue.Enqueue(Operation(Instruction.Fill, register, offset));
+
+ HasCopy = true;
+ }
+
+ private void AddSplitSpill(LiveInterval left, LiveInterval right, OperandType type)
+ {
+ if (_spillQueue == null)
+ {
+ _spillQueue = new Queue<Operation>();
+ }
+
+ Operand offset = Const(right.SpillOffset);
+ Operand register = GetRegister(left.Register, type);
+
+ _spillQueue.Enqueue(Operation(Instruction.Spill, default, offset, register));
+
+ HasCopy = true;
+ }
+
+ private void AddSplitCopy(LiveInterval left, LiveInterval right, OperandType type)
+ {
+ if (_parallelCopy == null)
+ {
+ _parallelCopy = new ParallelCopy();
+ }
+
+ _parallelCopy.AddCopy(right.Register, left.Register, type);
+
+ HasCopy = true;
+ }
+
+ public Operation[] Sequence()
+ {
+ List<Operation> sequence = new List<Operation>();
+
+ if (_spillQueue != null)
+ {
+ while (_spillQueue.TryDequeue(out Operation spillOp))
+ {
+ sequence.Add(spillOp);
+ }
+ }
+
+ _parallelCopy?.Sequence(sequence);
+
+ if (_fillQueue != null)
+ {
+ while (_fillQueue.TryDequeue(out Operation fillOp))
+ {
+ sequence.Add(fillOp);
+ }
+ }
+
+ return sequence.ToArray();
+ }
+
+ private static Operand GetRegister(Register reg, OperandType type)
+ {
+ return Register(reg.Index, reg.Type, type);
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/HybridAllocator.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/HybridAllocator.cs
new file mode 100644
index 00000000..25952c77
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/HybridAllocator.cs
@@ -0,0 +1,454 @@
+using ARMeilleure.IntermediateRepresentation;
+using ARMeilleure.Translation;
+using System;
+using System.Diagnostics;
+using System.Numerics;
+using System.Runtime.CompilerServices;
+using static ARMeilleure.IntermediateRepresentation.Operand.Factory;
+using static ARMeilleure.IntermediateRepresentation.Operation.Factory;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ class HybridAllocator : IRegisterAllocator
+ {
+ private readonly struct BlockInfo
+ {
+ public bool HasCall { get; }
+
+ public int IntFixedRegisters { get; }
+ public int VecFixedRegisters { get; }
+
+ public BlockInfo(bool hasCall, int intFixedRegisters, int vecFixedRegisters)
+ {
+ HasCall = hasCall;
+ IntFixedRegisters = intFixedRegisters;
+ VecFixedRegisters = vecFixedRegisters;
+ }
+ }
+
+ private struct LocalInfo
+ {
+ public int Uses { get; set; }
+ public int UsesAllocated { get; set; }
+ public int Sequence { get; set; }
+ public Operand Temp { get; set; }
+ public Operand Register { get; set; }
+ public Operand SpillOffset { get; set; }
+ public OperandType Type { get; }
+
+ private int _first;
+ private int _last;
+
+ public bool IsBlockLocal => _first == _last;
+
+ public LocalInfo(OperandType type, int uses, int blkIndex)
+ {
+ Uses = uses;
+ Type = type;
+
+ UsesAllocated = 0;
+ Sequence = 0;
+ Temp = default;
+ Register = default;
+ SpillOffset = default;
+
+ _first = -1;
+ _last = -1;
+
+ SetBlockIndex(blkIndex);
+ }
+
+ public void SetBlockIndex(int blkIndex)
+ {
+ if (_first == -1 || blkIndex < _first)
+ {
+ _first = blkIndex;
+ }
+
+ if (_last == -1 || blkIndex > _last)
+ {
+ _last = blkIndex;
+ }
+ }
+ }
+
+ private const int MaxIROperands = 4;
+ // The "visited" state is stored in the MSB of the local's value.
+ private const ulong VisitedMask = 1ul << 63;
+
+ private BlockInfo[] _blockInfo;
+ private LocalInfo[] _localInfo;
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static bool IsVisited(Operand local)
+ {
+ Debug.Assert(local.Kind == OperandKind.LocalVariable);
+
+ return (local.GetValueUnsafe() & VisitedMask) != 0;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static void SetVisited(Operand local)
+ {
+ Debug.Assert(local.Kind == OperandKind.LocalVariable);
+
+ local.GetValueUnsafe() |= VisitedMask;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private ref LocalInfo GetLocalInfo(Operand local)
+ {
+ Debug.Assert(local.Kind == OperandKind.LocalVariable);
+ Debug.Assert(IsVisited(local), "Local variable not visited. Used before defined?");
+
+ return ref _localInfo[(uint)local.GetValueUnsafe() - 1];
+ }
+
+ public AllocationResult RunPass(ControlFlowGraph cfg, StackAllocator stackAlloc, RegisterMasks regMasks)
+ {
+ int intUsedRegisters = 0;
+ int vecUsedRegisters = 0;
+
+ int intFreeRegisters = regMasks.IntAvailableRegisters;
+ int vecFreeRegisters = regMasks.VecAvailableRegisters;
+
+ _blockInfo = new BlockInfo[cfg.Blocks.Count];
+ _localInfo = new LocalInfo[cfg.Blocks.Count * 3];
+
+ int localInfoCount = 0;
+
+ for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
+ {
+ BasicBlock block = cfg.PostOrderBlocks[index];
+
+ int intFixedRegisters = 0;
+ int vecFixedRegisters = 0;
+
+ bool hasCall = false;
+
+ for (Operation node = block.Operations.First; node != default; node = node.ListNext)
+ {
+ if (node.Instruction == Instruction.Call)
+ {
+ hasCall = true;
+ }
+
+ foreach (Operand source in node.SourcesUnsafe)
+ {
+ if (source.Kind == OperandKind.LocalVariable)
+ {
+ GetLocalInfo(source).SetBlockIndex(block.Index);
+ }
+ else if (source.Kind == OperandKind.Memory)
+ {
+ MemoryOperand memOp = source.GetMemory();
+
+ if (memOp.BaseAddress != default)
+ {
+ GetLocalInfo(memOp.BaseAddress).SetBlockIndex(block.Index);
+ }
+
+ if (memOp.Index != default)
+ {
+ GetLocalInfo(memOp.Index).SetBlockIndex(block.Index);
+ }
+ }
+ }
+
+ foreach (Operand dest in node.DestinationsUnsafe)
+ {
+ if (dest.Kind == OperandKind.LocalVariable)
+ {
+ if (IsVisited(dest))
+ {
+ GetLocalInfo(dest).SetBlockIndex(block.Index);
+ }
+ else
+ {
+ dest.NumberLocal(++localInfoCount);
+
+ if (localInfoCount > _localInfo.Length)
+ {
+ Array.Resize(ref _localInfo, localInfoCount * 2);
+ }
+
+ SetVisited(dest);
+ GetLocalInfo(dest) = new LocalInfo(dest.Type, UsesCount(dest), block.Index);
+ }
+ }
+ else if (dest.Kind == OperandKind.Register)
+ {
+ if (dest.Type.IsInteger())
+ {
+ intFixedRegisters |= 1 << dest.GetRegister().Index;
+ }
+ else
+ {
+ vecFixedRegisters |= 1 << dest.GetRegister().Index;
+ }
+ }
+ }
+ }
+
+ _blockInfo[block.Index] = new BlockInfo(hasCall, intFixedRegisters, vecFixedRegisters);
+ }
+
+ int sequence = 0;
+
+ for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
+ {
+ BasicBlock block = cfg.PostOrderBlocks[index];
+
+ ref BlockInfo blkInfo = ref _blockInfo[block.Index];
+
+ int intLocalFreeRegisters = intFreeRegisters & ~blkInfo.IntFixedRegisters;
+ int vecLocalFreeRegisters = vecFreeRegisters & ~blkInfo.VecFixedRegisters;
+
+ int intCallerSavedRegisters = blkInfo.HasCall ? regMasks.IntCallerSavedRegisters : 0;
+ int vecCallerSavedRegisters = blkInfo.HasCall ? regMasks.VecCallerSavedRegisters : 0;
+
+ int intSpillTempRegisters = SelectSpillTemps(
+ intCallerSavedRegisters & ~blkInfo.IntFixedRegisters,
+ intLocalFreeRegisters);
+ int vecSpillTempRegisters = SelectSpillTemps(
+ vecCallerSavedRegisters & ~blkInfo.VecFixedRegisters,
+ vecLocalFreeRegisters);
+
+ intLocalFreeRegisters &= ~(intSpillTempRegisters | intCallerSavedRegisters);
+ vecLocalFreeRegisters &= ~(vecSpillTempRegisters | vecCallerSavedRegisters);
+
+ for (Operation node = block.Operations.First; node != default; node = node.ListNext)
+ {
+ int intLocalUse = 0;
+ int vecLocalUse = 0;
+
+ Operand AllocateRegister(Operand local)
+ {
+ ref LocalInfo info = ref GetLocalInfo(local);
+
+ info.UsesAllocated++;
+
+ Debug.Assert(info.UsesAllocated <= info.Uses);
+
+ if (info.Register != default)
+ {
+ if (info.UsesAllocated == info.Uses)
+ {
+ Register reg = info.Register.GetRegister();
+
+ if (local.Type.IsInteger())
+ {
+ intLocalFreeRegisters |= 1 << reg.Index;
+ }
+ else
+ {
+ vecLocalFreeRegisters |= 1 << reg.Index;
+ }
+ }
+
+ return info.Register;
+ }
+ else
+ {
+ Operand temp = info.Temp;
+
+ if (temp == default || info.Sequence != sequence)
+ {
+ temp = local.Type.IsInteger()
+ ? GetSpillTemp(local, intSpillTempRegisters, ref intLocalUse)
+ : GetSpillTemp(local, vecSpillTempRegisters, ref vecLocalUse);
+
+ info.Sequence = sequence;
+ info.Temp = temp;
+ }
+
+ Operation fillOp = Operation(Instruction.Fill, temp, info.SpillOffset);
+
+ block.Operations.AddBefore(node, fillOp);
+
+ return temp;
+ }
+ }
+
+ bool folded = false;
+
+ // If operation is a copy of a local and that local is living on the stack, we turn the copy into
+ // a fill, instead of inserting a fill before it.
+ if (node.Instruction == Instruction.Copy)
+ {
+ Operand source = node.GetSource(0);
+
+ if (source.Kind == OperandKind.LocalVariable)
+ {
+ ref LocalInfo info = ref GetLocalInfo(source);
+
+ if (info.Register == default)
+ {
+ Operation fillOp = Operation(Instruction.Fill, node.Destination, info.SpillOffset);
+
+ block.Operations.AddBefore(node, fillOp);
+ block.Operations.Remove(node);
+
+ node = fillOp;
+
+ folded = true;
+ }
+ }
+ }
+
+ if (!folded)
+ {
+ foreach (ref Operand source in node.SourcesUnsafe)
+ {
+ if (source.Kind == OperandKind.LocalVariable)
+ {
+ source = AllocateRegister(source);
+ }
+ else if (source.Kind == OperandKind.Memory)
+ {
+ MemoryOperand memOp = source.GetMemory();
+
+ if (memOp.BaseAddress != default)
+ {
+ memOp.BaseAddress = AllocateRegister(memOp.BaseAddress);
+ }
+
+ if (memOp.Index != default)
+ {
+ memOp.Index = AllocateRegister(memOp.Index);
+ }
+ }
+ }
+ }
+
+ int intLocalAsg = 0;
+ int vecLocalAsg = 0;
+
+ foreach (ref Operand dest in node.DestinationsUnsafe)
+ {
+ if (dest.Kind != OperandKind.LocalVariable)
+ {
+ continue;
+ }
+
+ ref LocalInfo info = ref GetLocalInfo(dest);
+
+ if (info.UsesAllocated == 0)
+ {
+ int mask = dest.Type.IsInteger()
+ ? intLocalFreeRegisters
+ : vecLocalFreeRegisters;
+
+ if (info.IsBlockLocal && mask != 0)
+ {
+ int selectedReg = BitOperations.TrailingZeroCount(mask);
+
+ info.Register = Register(selectedReg, info.Type.ToRegisterType(), info.Type);
+
+ if (dest.Type.IsInteger())
+ {
+ intLocalFreeRegisters &= ~(1 << selectedReg);
+ intUsedRegisters |= 1 << selectedReg;
+ }
+ else
+ {
+ vecLocalFreeRegisters &= ~(1 << selectedReg);
+ vecUsedRegisters |= 1 << selectedReg;
+ }
+ }
+ else
+ {
+ info.Register = default;
+ info.SpillOffset = Const(stackAlloc.Allocate(dest.Type.GetSizeInBytes()));
+ }
+ }
+
+ info.UsesAllocated++;
+
+ Debug.Assert(info.UsesAllocated <= info.Uses);
+
+ if (info.Register != default)
+ {
+ dest = info.Register;
+ }
+ else
+ {
+ Operand temp = info.Temp;
+
+ if (temp == default || info.Sequence != sequence)
+ {
+ temp = dest.Type.IsInteger()
+ ? GetSpillTemp(dest, intSpillTempRegisters, ref intLocalAsg)
+ : GetSpillTemp(dest, vecSpillTempRegisters, ref vecLocalAsg);
+
+ info.Sequence = sequence;
+ info.Temp = temp;
+ }
+
+ dest = temp;
+
+ Operation spillOp = Operation(Instruction.Spill, default, info.SpillOffset, temp);
+
+ block.Operations.AddAfter(node, spillOp);
+
+ node = spillOp;
+ }
+ }
+
+ sequence++;
+
+ intUsedRegisters |= intLocalAsg | intLocalUse;
+ vecUsedRegisters |= vecLocalAsg | vecLocalUse;
+ }
+ }
+
+ return new AllocationResult(intUsedRegisters, vecUsedRegisters, stackAlloc.TotalSize);
+ }
+
+ private static int SelectSpillTemps(int mask0, int mask1)
+ {
+ int selection = 0;
+ int count = 0;
+
+ while (count < MaxIROperands && mask0 != 0)
+ {
+ int mask = mask0 & -mask0;
+
+ selection |= mask;
+
+ mask0 &= ~mask;
+
+ count++;
+ }
+
+ while (count < MaxIROperands && mask1 != 0)
+ {
+ int mask = mask1 & -mask1;
+
+ selection |= mask;
+
+ mask1 &= ~mask;
+
+ count++;
+ }
+
+ Debug.Assert(count == MaxIROperands, "No enough registers for spill temps.");
+
+ return selection;
+ }
+
+ private static Operand GetSpillTemp(Operand local, int freeMask, ref int useMask)
+ {
+ int selectedReg = BitOperations.TrailingZeroCount(freeMask & ~useMask);
+
+ useMask |= 1 << selectedReg;
+
+ return Register(selectedReg, local.Type.ToRegisterType(), local.Type);
+ }
+
+ private static int UsesCount(Operand local)
+ {
+ return local.AssignmentsCount + local.UsesCount;
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/IRegisterAllocator.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/IRegisterAllocator.cs
new file mode 100644
index 00000000..8f236c25
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/IRegisterAllocator.cs
@@ -0,0 +1,12 @@
+using ARMeilleure.Translation;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ interface IRegisterAllocator
+ {
+ AllocationResult RunPass(
+ ControlFlowGraph cfg,
+ StackAllocator stackAlloc,
+ RegisterMasks regMasks);
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/LinearScanAllocator.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/LinearScanAllocator.cs
new file mode 100644
index 00000000..d80157af
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/LinearScanAllocator.cs
@@ -0,0 +1,1101 @@
+using ARMeilleure.Common;
+using ARMeilleure.IntermediateRepresentation;
+using ARMeilleure.Translation;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using System.Numerics;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ // Based on:
+ // "Linear Scan Register Allocation for the Java(tm) HotSpot Client Compiler".
+ // http://www.christianwimmer.at/Publications/Wimmer04a/Wimmer04a.pdf
+ class LinearScanAllocator : IRegisterAllocator
+ {
+ private const int InstructionGap = 2;
+ private const int InstructionGapMask = InstructionGap - 1;
+
+ private HashSet<int> _blockEdges;
+ private LiveRange[] _blockRanges;
+ private BitMap[] _blockLiveIn;
+
+ private List<LiveInterval> _intervals;
+ private LiveInterval[] _parentIntervals;
+
+ private List<(IntrusiveList<Operation>, Operation)> _operationNodes;
+ private int _operationsCount;
+
+ private class AllocationContext
+ {
+ public RegisterMasks Masks { get; }
+
+ public StackAllocator StackAlloc { get; }
+
+ public BitMap Active { get; }
+ public BitMap Inactive { get; }
+
+ public int IntUsedRegisters { get; set; }
+ public int VecUsedRegisters { get; set; }
+
+ private readonly int[] _intFreePositions;
+ private readonly int[] _vecFreePositions;
+ private readonly int _intFreePositionsCount;
+ private readonly int _vecFreePositionsCount;
+
+ public AllocationContext(StackAllocator stackAlloc, RegisterMasks masks, int intervalsCount)
+ {
+ StackAlloc = stackAlloc;
+ Masks = masks;
+
+ Active = new BitMap(Allocators.Default, intervalsCount);
+ Inactive = new BitMap(Allocators.Default, intervalsCount);
+
+ PopulateFreePositions(RegisterType.Integer, out _intFreePositions, out _intFreePositionsCount);
+ PopulateFreePositions(RegisterType.Vector, out _vecFreePositions, out _vecFreePositionsCount);
+
+ void PopulateFreePositions(RegisterType type, out int[] positions, out int count)
+ {
+ positions = new int[masks.RegistersCount];
+ count = BitOperations.PopCount((uint)masks.GetAvailableRegisters(type));
+
+ int mask = masks.GetAvailableRegisters(type);
+
+ for (int i = 0; i < positions.Length; i++)
+ {
+ if ((mask & (1 << i)) != 0)
+ {
+ positions[i] = int.MaxValue;
+ }
+ }
+ }
+ }
+
+ public void GetFreePositions(RegisterType type, in Span<int> positions, out int count)
+ {
+ if (type == RegisterType.Integer)
+ {
+ _intFreePositions.CopyTo(positions);
+
+ count = _intFreePositionsCount;
+ }
+ else
+ {
+ Debug.Assert(type == RegisterType.Vector);
+
+ _vecFreePositions.CopyTo(positions);
+
+ count = _vecFreePositionsCount;
+ }
+ }
+
+ public void MoveActiveToInactive(int bit)
+ {
+ Move(Active, Inactive, bit);
+ }
+
+ public void MoveInactiveToActive(int bit)
+ {
+ Move(Inactive, Active, bit);
+ }
+
+ private static void Move(BitMap source, BitMap dest, int bit)
+ {
+ source.Clear(bit);
+
+ dest.Set(bit);
+ }
+ }
+
+ public AllocationResult RunPass(
+ ControlFlowGraph cfg,
+ StackAllocator stackAlloc,
+ RegisterMasks regMasks)
+ {
+ NumberLocals(cfg, regMasks.RegistersCount);
+
+ var context = new AllocationContext(stackAlloc, regMasks, _intervals.Count);
+
+ BuildIntervals(cfg, context);
+
+ for (int index = 0; index < _intervals.Count; index++)
+ {
+ LiveInterval current = _intervals[index];
+
+ if (current.IsEmpty)
+ {
+ continue;
+ }
+
+ if (current.IsFixed)
+ {
+ context.Active.Set(index);
+
+ if (current.IsFixedAndUsed)
+ {
+ if (current.Register.Type == RegisterType.Integer)
+ {
+ context.IntUsedRegisters |= 1 << current.Register.Index;
+ }
+ else /* if (interval.Register.Type == RegisterType.Vector) */
+ {
+ context.VecUsedRegisters |= 1 << current.Register.Index;
+ }
+ }
+
+ continue;
+ }
+
+ AllocateInterval(context, current, index, regMasks.RegistersCount);
+ }
+
+ for (int index = regMasks.RegistersCount * 2; index < _intervals.Count; index++)
+ {
+ if (!_intervals[index].IsSpilled)
+ {
+ ReplaceLocalWithRegister(_intervals[index]);
+ }
+ }
+
+ InsertSplitCopies();
+ InsertSplitCopiesAtEdges(cfg);
+
+ return new AllocationResult(context.IntUsedRegisters, context.VecUsedRegisters, context.StackAlloc.TotalSize);
+ }
+
+ private void AllocateInterval(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
+ {
+ // Check active intervals that already ended.
+ foreach (int iIndex in context.Active)
+ {
+ LiveInterval interval = _intervals[iIndex];
+
+ interval.Forward(current.GetStart());
+
+ if (interval.GetEnd() < current.GetStart())
+ {
+ context.Active.Clear(iIndex);
+ }
+ else if (!interval.Overlaps(current.GetStart()))
+ {
+ context.MoveActiveToInactive(iIndex);
+ }
+ }
+
+ // Check inactive intervals that already ended or were reactivated.
+ foreach (int iIndex in context.Inactive)
+ {
+ LiveInterval interval = _intervals[iIndex];
+
+ interval.Forward(current.GetStart());
+
+ if (interval.GetEnd() < current.GetStart())
+ {
+ context.Inactive.Clear(iIndex);
+ }
+ else if (interval.Overlaps(current.GetStart()))
+ {
+ context.MoveInactiveToActive(iIndex);
+ }
+ }
+
+ if (!TryAllocateRegWithoutSpill(context, current, cIndex, registersCount))
+ {
+ AllocateRegWithSpill(context, current, cIndex, registersCount);
+ }
+ }
+
+ private bool TryAllocateRegWithoutSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
+ {
+ RegisterType regType = current.Local.Type.ToRegisterType();
+
+ Span<int> freePositions = stackalloc int[registersCount];
+
+ context.GetFreePositions(regType, freePositions, out int freePositionsCount);
+
+ foreach (int iIndex in context.Active)
+ {
+ LiveInterval interval = _intervals[iIndex];
+ Register reg = interval.Register;
+
+ if (reg.Type == regType)
+ {
+ freePositions[reg.Index] = 0;
+ freePositionsCount--;
+ }
+ }
+
+ // If all registers are already active, return early. No point in inspecting the inactive set to look for
+ // holes.
+ if (freePositionsCount == 0)
+ {
+ return false;
+ }
+
+ foreach (int iIndex in context.Inactive)
+ {
+ LiveInterval interval = _intervals[iIndex];
+ Register reg = interval.Register;
+
+ ref int freePosition = ref freePositions[reg.Index];
+
+ if (reg.Type == regType && freePosition != 0)
+ {
+ int overlapPosition = interval.GetOverlapPosition(current);
+
+ if (overlapPosition != LiveInterval.NotFound && freePosition > overlapPosition)
+ {
+ freePosition = overlapPosition;
+ }
+ }
+ }
+
+ int selectedReg = GetHighestValueIndex(freePositions);
+ int selectedNextUse = freePositions[selectedReg];
+
+ // Intervals starts and ends at odd positions, unless they span an entire
+ // block, in this case they will have ranges at a even position.
+ // When a interval is loaded from the stack to a register, we can only
+ // do the split at a odd position, because otherwise the split interval
+ // that is inserted on the list to be processed may clobber a register
+ // used by the instruction at the same position as the split.
+ // The problem only happens when a interval ends exactly at this instruction,
+ // because otherwise they would interfere, and the register wouldn't be selected.
+ // When the interval is aligned and the above happens, there's no problem as
+ // the instruction that is actually with the last use is the one
+ // before that position.
+ selectedNextUse &= ~InstructionGapMask;
+
+ if (selectedNextUse <= current.GetStart())
+ {
+ return false;
+ }
+ else if (selectedNextUse < current.GetEnd())
+ {
+ LiveInterval splitChild = current.Split(selectedNextUse);
+
+ if (splitChild.UsesCount != 0)
+ {
+ Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
+
+ InsertInterval(splitChild, registersCount);
+ }
+ else
+ {
+ Spill(context, splitChild);
+ }
+ }
+
+ current.Register = new Register(selectedReg, regType);
+
+ if (regType == RegisterType.Integer)
+ {
+ context.IntUsedRegisters |= 1 << selectedReg;
+ }
+ else /* if (regType == RegisterType.Vector) */
+ {
+ context.VecUsedRegisters |= 1 << selectedReg;
+ }
+
+ context.Active.Set(cIndex);
+
+ return true;
+ }
+
+ private void AllocateRegWithSpill(AllocationContext context, LiveInterval current, int cIndex, int registersCount)
+ {
+ RegisterType regType = current.Local.Type.ToRegisterType();
+
+ Span<int> usePositions = stackalloc int[registersCount];
+ Span<int> blockedPositions = stackalloc int[registersCount];
+
+ context.GetFreePositions(regType, usePositions, out _);
+ context.GetFreePositions(regType, blockedPositions, out _);
+
+ foreach (int iIndex in context.Active)
+ {
+ LiveInterval interval = _intervals[iIndex];
+ Register reg = interval.Register;
+
+ if (reg.Type == regType)
+ {
+ ref int usePosition = ref usePositions[reg.Index];
+ ref int blockedPosition = ref blockedPositions[reg.Index];
+
+ if (interval.IsFixed)
+ {
+ usePosition = 0;
+ blockedPosition = 0;
+ }
+ else
+ {
+ int nextUse = interval.NextUseAfter(current.GetStart());
+
+ if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
+ {
+ usePosition = nextUse;
+ }
+ }
+ }
+ }
+
+ foreach (int iIndex in context.Inactive)
+ {
+ LiveInterval interval = _intervals[iIndex];
+ Register reg = interval.Register;
+
+ if (reg.Type == regType)
+ {
+ ref int usePosition = ref usePositions[reg.Index];
+ ref int blockedPosition = ref blockedPositions[reg.Index];
+
+ if (interval.IsFixed)
+ {
+ int overlapPosition = interval.GetOverlapPosition(current);
+
+ if (overlapPosition != LiveInterval.NotFound)
+ {
+ blockedPosition = Math.Min(blockedPosition, overlapPosition);
+ usePosition = Math.Min(usePosition, overlapPosition);
+ }
+ }
+ else if (interval.Overlaps(current))
+ {
+ int nextUse = interval.NextUseAfter(current.GetStart());
+
+ if (nextUse != LiveInterval.NotFound && usePosition > nextUse)
+ {
+ usePosition = nextUse;
+ }
+ }
+ }
+ }
+
+ int selectedReg = GetHighestValueIndex(usePositions);
+ int currentFirstUse = current.FirstUse();
+
+ Debug.Assert(currentFirstUse >= 0, "Current interval has no uses.");
+
+ if (usePositions[selectedReg] < currentFirstUse)
+ {
+ // All intervals on inactive and active are being used before current,
+ // so spill the current interval.
+ Debug.Assert(currentFirstUse > current.GetStart(), "Trying to spill a interval currently being used.");
+
+ LiveInterval splitChild = current.Split(currentFirstUse);
+
+ Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
+
+ InsertInterval(splitChild, registersCount);
+
+ Spill(context, current);
+ }
+ else if (blockedPositions[selectedReg] > current.GetEnd())
+ {
+ // Spill made the register available for the entire current lifetime,
+ // so we only need to split the intervals using the selected register.
+ current.Register = new Register(selectedReg, regType);
+
+ SplitAndSpillOverlappingIntervals(context, current, registersCount);
+
+ context.Active.Set(cIndex);
+ }
+ else
+ {
+ // There are conflicts even after spill due to the use of fixed registers
+ // that can't be spilled, so we need to also split current at the point of
+ // the first fixed register use.
+ current.Register = new Register(selectedReg, regType);
+
+ int splitPosition = blockedPositions[selectedReg] & ~InstructionGapMask;
+
+ Debug.Assert(splitPosition > current.GetStart(), "Trying to split a interval at a invalid position.");
+
+ LiveInterval splitChild = current.Split(splitPosition);
+
+ if (splitChild.UsesCount != 0)
+ {
+ Debug.Assert(splitChild.GetStart() > current.GetStart(), "Split interval has an invalid start position.");
+
+ InsertInterval(splitChild, registersCount);
+ }
+ else
+ {
+ Spill(context, splitChild);
+ }
+
+ SplitAndSpillOverlappingIntervals(context, current, registersCount);
+
+ context.Active.Set(cIndex);
+ }
+ }
+
+ private static int GetHighestValueIndex(Span<int> span)
+ {
+ int highest = int.MinValue;
+
+ int selected = 0;
+
+ for (int index = 0; index < span.Length; index++)
+ {
+ int current = span[index];
+
+ if (highest < current)
+ {
+ highest = current;
+ selected = index;
+
+ if (current == int.MaxValue)
+ {
+ break;
+ }
+ }
+ }
+
+ return selected;
+ }
+
+ private void SplitAndSpillOverlappingIntervals(AllocationContext context, LiveInterval current, int registersCount)
+ {
+ foreach (int iIndex in context.Active)
+ {
+ LiveInterval interval = _intervals[iIndex];
+
+ if (!interval.IsFixed && interval.Register == current.Register)
+ {
+ SplitAndSpillOverlappingInterval(context, current, interval, registersCount);
+
+ context.Active.Clear(iIndex);
+ }
+ }
+
+ foreach (int iIndex in context.Inactive)
+ {
+ LiveInterval interval = _intervals[iIndex];
+
+ if (!interval.IsFixed && interval.Register == current.Register && interval.Overlaps(current))
+ {
+ SplitAndSpillOverlappingInterval(context, current, interval, registersCount);
+
+ context.Inactive.Clear(iIndex);
+ }
+ }
+ }
+
+ private void SplitAndSpillOverlappingInterval(
+ AllocationContext context,
+ LiveInterval current,
+ LiveInterval interval,
+ int registersCount)
+ {
+ // If there's a next use after the start of the current interval,
+ // we need to split the spilled interval twice, and re-insert it
+ // on the "pending" list to ensure that it will get a new register
+ // on that use position.
+ int nextUse = interval.NextUseAfter(current.GetStart());
+
+ LiveInterval splitChild;
+
+ if (interval.GetStart() < current.GetStart())
+ {
+ splitChild = interval.Split(current.GetStart());
+ }
+ else
+ {
+ splitChild = interval;
+ }
+
+ if (nextUse != -1)
+ {
+ Debug.Assert(nextUse > current.GetStart(), "Trying to spill a interval currently being used.");
+
+ if (nextUse > splitChild.GetStart())
+ {
+ LiveInterval right = splitChild.Split(nextUse);
+
+ Spill(context, splitChild);
+
+ splitChild = right;
+ }
+
+ InsertInterval(splitChild, registersCount);
+ }
+ else
+ {
+ Spill(context, splitChild);
+ }
+ }
+
+ private void InsertInterval(LiveInterval interval, int registersCount)
+ {
+ Debug.Assert(interval.UsesCount != 0, "Trying to insert a interval without uses.");
+ Debug.Assert(!interval.IsEmpty, "Trying to insert a empty interval.");
+ Debug.Assert(!interval.IsSpilled, "Trying to insert a spilled interval.");
+
+ int startIndex = registersCount * 2;
+
+ int insertIndex = _intervals.BinarySearch(startIndex, _intervals.Count - startIndex, interval, null);
+
+ if (insertIndex < 0)
+ {
+ insertIndex = ~insertIndex;
+ }
+
+ _intervals.Insert(insertIndex, interval);
+ }
+
+ private void Spill(AllocationContext context, LiveInterval interval)
+ {
+ Debug.Assert(!interval.IsFixed, "Trying to spill a fixed interval.");
+ Debug.Assert(interval.UsesCount == 0, "Trying to spill a interval with uses.");
+
+ // We first check if any of the siblings were spilled, if so we can reuse
+ // the stack offset. Otherwise, we allocate a new space on the stack.
+ // This prevents stack-to-stack copies being necessary for a split interval.
+ if (!interval.TrySpillWithSiblingOffset())
+ {
+ interval.Spill(context.StackAlloc.Allocate(interval.Local.Type));
+ }
+ }
+
+ private void InsertSplitCopies()
+ {
+ Dictionary<int, CopyResolver> copyResolvers = new Dictionary<int, CopyResolver>();
+
+ CopyResolver GetCopyResolver(int position)
+ {
+ if (!copyResolvers.TryGetValue(position, out CopyResolver copyResolver))
+ {
+ copyResolver = new CopyResolver();
+
+ copyResolvers.Add(position, copyResolver);
+ }
+
+ return copyResolver;
+ }
+
+ foreach (LiveInterval interval in _intervals.Where(x => x.IsSplit))
+ {
+ LiveInterval previous = interval;
+
+ foreach (LiveInterval splitChild in interval.SplitChildren())
+ {
+ int splitPosition = splitChild.GetStart();
+
+ if (!_blockEdges.Contains(splitPosition) && previous.GetEnd() == splitPosition)
+ {
+ GetCopyResolver(splitPosition).AddSplit(previous, splitChild);
+ }
+
+ previous = splitChild;
+ }
+ }
+
+ foreach (KeyValuePair<int, CopyResolver> kv in copyResolvers)
+ {
+ CopyResolver copyResolver = kv.Value;
+
+ if (!copyResolver.HasCopy)
+ {
+ continue;
+ }
+
+ int splitPosition = kv.Key;
+
+ (IntrusiveList<Operation> nodes, Operation node) = GetOperationNode(splitPosition);
+
+ Operation[] sequence = copyResolver.Sequence();
+
+ nodes.AddBefore(node, sequence[0]);
+
+ node = sequence[0];
+
+ for (int index = 1; index < sequence.Length; index++)
+ {
+ nodes.AddAfter(node, sequence[index]);
+
+ node = sequence[index];
+ }
+ }
+ }
+
+ private void InsertSplitCopiesAtEdges(ControlFlowGraph cfg)
+ {
+ int blocksCount = cfg.Blocks.Count;
+
+ bool IsSplitEdgeBlock(BasicBlock block)
+ {
+ return block.Index >= blocksCount;
+ }
+
+ // Reset iterators to beginning because GetSplitChild depends on the state of the iterator.
+ foreach (LiveInterval interval in _intervals)
+ {
+ interval.Reset();
+ }
+
+ for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
+ {
+ if (IsSplitEdgeBlock(block))
+ {
+ continue;
+ }
+
+ bool hasSingleOrNoSuccessor = block.SuccessorsCount <= 1;
+
+ for (int i = 0; i < block.SuccessorsCount; i++)
+ {
+ BasicBlock successor = block.GetSuccessor(i);
+
+ int succIndex = successor.Index;
+
+ // If the current node is a split node, then the actual successor node
+ // (the successor before the split) should be right after it.
+ if (IsSplitEdgeBlock(successor))
+ {
+ succIndex = successor.GetSuccessor(0).Index;
+ }
+
+ CopyResolver copyResolver = null;
+
+ foreach (int iIndex in _blockLiveIn[succIndex])
+ {
+ LiveInterval interval = _parentIntervals[iIndex];
+
+ if (!interval.IsSplit)
+ {
+ continue;
+ }
+
+ int lEnd = _blockRanges[block.Index].End - 1;
+ int rStart = _blockRanges[succIndex].Start;
+
+ LiveInterval left = interval.GetSplitChild(lEnd);
+ LiveInterval right = interval.GetSplitChild(rStart);
+
+ if (left != default && right != default && left != right)
+ {
+ if (copyResolver == null)
+ {
+ copyResolver = new CopyResolver();
+ }
+
+ copyResolver.AddSplit(left, right);
+ }
+ }
+
+ if (copyResolver == null || !copyResolver.HasCopy)
+ {
+ continue;
+ }
+
+ Operation[] sequence = copyResolver.Sequence();
+
+ if (hasSingleOrNoSuccessor)
+ {
+ foreach (Operation operation in sequence)
+ {
+ block.Append(operation);
+ }
+ }
+ else if (successor.Predecessors.Count == 1)
+ {
+ successor.Operations.AddFirst(sequence[0]);
+
+ Operation prependNode = sequence[0];
+
+ for (int index = 1; index < sequence.Length; index++)
+ {
+ Operation operation = sequence[index];
+
+ successor.Operations.AddAfter(prependNode, operation);
+
+ prependNode = operation;
+ }
+ }
+ else
+ {
+ // Split the critical edge.
+ BasicBlock splitBlock = cfg.SplitEdge(block, successor);
+
+ foreach (Operation operation in sequence)
+ {
+ splitBlock.Append(operation);
+ }
+ }
+ }
+ }
+ }
+
+ private void ReplaceLocalWithRegister(LiveInterval current)
+ {
+ Operand register = GetRegister(current);
+
+ foreach (int usePosition in current.UsePositions())
+ {
+ (_, Operation operation) = GetOperationNode(usePosition);
+
+ for (int index = 0; index < operation.SourcesCount; index++)
+ {
+ Operand source = operation.GetSource(index);
+
+ if (source == current.Local)
+ {
+ operation.SetSource(index, register);
+ }
+ else if (source.Kind == OperandKind.Memory)
+ {
+ MemoryOperand memOp = source.GetMemory();
+
+ if (memOp.BaseAddress == current.Local)
+ {
+ memOp.BaseAddress = register;
+ }
+
+ if (memOp.Index == current.Local)
+ {
+ memOp.Index = register;
+ }
+ }
+ }
+
+ for (int index = 0; index < operation.DestinationsCount; index++)
+ {
+ Operand dest = operation.GetDestination(index);
+
+ if (dest == current.Local)
+ {
+ operation.SetDestination(index, register);
+ }
+ }
+ }
+ }
+
+ private static Operand GetRegister(LiveInterval interval)
+ {
+ Debug.Assert(!interval.IsSpilled, "Spilled intervals are not allowed.");
+
+ return Operand.Factory.Register(
+ interval.Register.Index,
+ interval.Register.Type,
+ interval.Local.Type);
+ }
+
+ private (IntrusiveList<Operation>, Operation) GetOperationNode(int position)
+ {
+ return _operationNodes[position / InstructionGap];
+ }
+
+ private void NumberLocals(ControlFlowGraph cfg, int registersCount)
+ {
+ _operationNodes = new List<(IntrusiveList<Operation>, Operation)>();
+ _intervals = new List<LiveInterval>();
+
+ for (int index = 0; index < registersCount; index++)
+ {
+ _intervals.Add(new LiveInterval(new Register(index, RegisterType.Integer)));
+ _intervals.Add(new LiveInterval(new Register(index, RegisterType.Vector)));
+ }
+
+ // The "visited" state is stored in the MSB of the local's value.
+ const ulong VisitedMask = 1ul << 63;
+
+ bool IsVisited(Operand local)
+ {
+ return (local.GetValueUnsafe() & VisitedMask) != 0;
+ }
+
+ void SetVisited(Operand local)
+ {
+ local.GetValueUnsafe() |= VisitedMask;
+ }
+
+ _operationsCount = 0;
+
+ for (int index = cfg.PostOrderBlocks.Length - 1; index >= 0; index--)
+ {
+ BasicBlock block = cfg.PostOrderBlocks[index];
+
+ for (Operation node = block.Operations.First; node != default; node = node.ListNext)
+ {
+ _operationNodes.Add((block.Operations, node));
+
+ for (int i = 0; i < node.DestinationsCount; i++)
+ {
+ Operand dest = node.GetDestination(i);
+
+ if (dest.Kind == OperandKind.LocalVariable && !IsVisited(dest))
+ {
+ dest.NumberLocal(_intervals.Count);
+
+ _intervals.Add(new LiveInterval(dest));
+
+ SetVisited(dest);
+ }
+ }
+ }
+
+ _operationsCount += block.Operations.Count * InstructionGap;
+
+ if (block.Operations.Count == 0)
+ {
+ // Pretend we have a dummy instruction on the empty block.
+ _operationNodes.Add((default, default));
+
+ _operationsCount += InstructionGap;
+ }
+ }
+
+ _parentIntervals = _intervals.ToArray();
+ }
+
+ private void BuildIntervals(ControlFlowGraph cfg, AllocationContext context)
+ {
+ _blockRanges = new LiveRange[cfg.Blocks.Count];
+
+ int mapSize = _intervals.Count;
+
+ BitMap[] blkLiveGen = new BitMap[cfg.Blocks.Count];
+ BitMap[] blkLiveKill = new BitMap[cfg.Blocks.Count];
+
+ // Compute local live sets.
+ for (BasicBlock block = cfg.Blocks.First; block != null; block = block.ListNext)
+ {
+ BitMap liveGen = new BitMap(Allocators.Default, mapSize);
+ BitMap liveKill = new BitMap(Allocators.Default, mapSize);
+
+ for (Operation node = block.Operations.First; node != default; node = node.ListNext)
+ {
+ for (int i = 0; i < node.SourcesCount; i++)
+ {
+ VisitSource(node.GetSource(i));
+ }
+
+ for (int i = 0; i < node.DestinationsCount; i++)
+ {
+ VisitDestination(node.GetDestination(i));
+ }
+
+ void VisitSource(Operand source)
+ {
+ if (IsLocalOrRegister(source.Kind))
+ {
+ int id = GetOperandId(source);
+
+ if (!liveKill.IsSet(id))
+ {
+ liveGen.Set(id);
+ }
+ }
+ else if (source.Kind == OperandKind.Memory)
+ {
+ MemoryOperand memOp = source.GetMemory();
+
+ if (memOp.BaseAddress != default)
+ {
+ VisitSource(memOp.BaseAddress);
+ }
+
+ if (memOp.Index != default)
+ {
+ VisitSource(memOp.Index);
+ }
+ }
+ }
+
+ void VisitDestination(Operand dest)
+ {
+ liveKill.Set(GetOperandId(dest));
+ }
+ }
+
+ blkLiveGen [block.Index] = liveGen;
+ blkLiveKill[block.Index] = liveKill;
+ }
+
+ // Compute global live sets.
+ BitMap[] blkLiveIn = new BitMap[cfg.Blocks.Count];
+ BitMap[] blkLiveOut = new BitMap[cfg.Blocks.Count];
+
+ for (int index = 0; index < cfg.Blocks.Count; index++)
+ {
+ blkLiveIn [index] = new BitMap(Allocators.Default, mapSize);
+ blkLiveOut[index] = new BitMap(Allocators.Default, mapSize);
+ }
+
+ bool modified;
+
+ do
+ {
+ modified = false;
+
+ for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
+ {
+ BasicBlock block = cfg.PostOrderBlocks[index];
+
+ BitMap liveOut = blkLiveOut[block.Index];
+
+ for (int i = 0; i < block.SuccessorsCount; i++)
+ {
+ BasicBlock succ = block.GetSuccessor(i);
+
+ modified |= liveOut.Set(blkLiveIn[succ.Index]);
+ }
+
+ BitMap liveIn = blkLiveIn[block.Index];
+
+ liveIn.Set (liveOut);
+ liveIn.Clear(blkLiveKill[block.Index]);
+ liveIn.Set (blkLiveGen [block.Index]);
+ }
+ }
+ while (modified);
+
+ _blockLiveIn = blkLiveIn;
+
+ _blockEdges = new HashSet<int>();
+
+ // Compute lifetime intervals.
+ int operationPos = _operationsCount;
+
+ for (int index = 0; index < cfg.PostOrderBlocks.Length; index++)
+ {
+ BasicBlock block = cfg.PostOrderBlocks[index];
+
+ // We handle empty blocks by pretending they have a dummy instruction,
+ // because otherwise the block would have the same start and end position,
+ // and this is not valid.
+ int instCount = Math.Max(block.Operations.Count, 1);
+
+ int blockStart = operationPos - instCount * InstructionGap;
+ int blockEnd = operationPos;
+
+ _blockRanges[block.Index] = new LiveRange(blockStart, blockEnd);
+
+ _blockEdges.Add(blockStart);
+
+ BitMap liveOut = blkLiveOut[block.Index];
+
+ foreach (int id in liveOut)
+ {
+ _intervals[id].AddRange(blockStart, blockEnd);
+ }
+
+ if (block.Operations.Count == 0)
+ {
+ operationPos -= InstructionGap;
+
+ continue;
+ }
+
+ for (Operation node = block.Operations.Last; node != default; node = node.ListPrevious)
+ {
+ operationPos -= InstructionGap;
+
+ for (int i = 0; i < node.DestinationsCount; i++)
+ {
+ VisitDestination(node.GetDestination(i));
+ }
+
+ for (int i = 0; i < node.SourcesCount; i++)
+ {
+ VisitSource(node.GetSource(i));
+ }
+
+ if (node.Instruction == Instruction.Call)
+ {
+ AddIntervalCallerSavedReg(context.Masks.IntCallerSavedRegisters, operationPos, RegisterType.Integer);
+ AddIntervalCallerSavedReg(context.Masks.VecCallerSavedRegisters, operationPos, RegisterType.Vector);
+ }
+
+ void VisitSource(Operand source)
+ {
+ if (IsLocalOrRegister(source.Kind))
+ {
+ LiveInterval interval = _intervals[GetOperandId(source)];
+
+ interval.AddRange(blockStart, operationPos + 1);
+ interval.AddUsePosition(operationPos);
+ }
+ else if (source.Kind == OperandKind.Memory)
+ {
+ MemoryOperand memOp = source.GetMemory();
+
+ if (memOp.BaseAddress != default)
+ {
+ VisitSource(memOp.BaseAddress);
+ }
+
+ if (memOp.Index != default)
+ {
+ VisitSource(memOp.Index);
+ }
+ }
+ }
+
+ void VisitDestination(Operand dest)
+ {
+ LiveInterval interval = _intervals[GetOperandId(dest)];
+
+ if (interval.IsFixed)
+ {
+ interval.IsFixedAndUsed = true;
+ }
+
+ interval.SetStart(operationPos + 1);
+ interval.AddUsePosition(operationPos + 1);
+ }
+ }
+ }
+
+ foreach (LiveInterval interval in _parentIntervals)
+ {
+ interval.Reset();
+ }
+ }
+
+ private void AddIntervalCallerSavedReg(int mask, int operationPos, RegisterType regType)
+ {
+ while (mask != 0)
+ {
+ int regIndex = BitOperations.TrailingZeroCount(mask);
+
+ Register callerSavedReg = new Register(regIndex, regType);
+
+ LiveInterval interval = _intervals[GetRegisterId(callerSavedReg)];
+
+ interval.AddRange(operationPos + 1, operationPos + InstructionGap);
+
+ mask &= ~(1 << regIndex);
+ }
+ }
+
+ private static int GetOperandId(Operand operand)
+ {
+ if (operand.Kind == OperandKind.LocalVariable)
+ {
+ return operand.GetLocalNumber();
+ }
+ else if (operand.Kind == OperandKind.Register)
+ {
+ return GetRegisterId(operand.GetRegister());
+ }
+ else
+ {
+ throw new ArgumentException($"Invalid operand kind \"{operand.Kind}\".");
+ }
+ }
+
+ private static int GetRegisterId(Register register)
+ {
+ return (register.Index << 1) | (register.Type == RegisterType.Vector ? 1 : 0);
+ }
+
+ private static bool IsLocalOrRegister(OperandKind kind)
+ {
+ return kind == OperandKind.LocalVariable ||
+ kind == OperandKind.Register;
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/LiveInterval.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/LiveInterval.cs
new file mode 100644
index 00000000..d739ad28
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/LiveInterval.cs
@@ -0,0 +1,396 @@
+using ARMeilleure.IntermediateRepresentation;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ unsafe readonly struct LiveInterval : IComparable<LiveInterval>
+ {
+ public const int NotFound = -1;
+
+ private struct Data
+ {
+ public int End;
+ public int SpillOffset;
+
+ public LiveRange FirstRange;
+ public LiveRange PrevRange;
+ public LiveRange CurrRange;
+
+ public LiveInterval Parent;
+
+ public UseList Uses;
+ public LiveIntervalList Children;
+
+ public Operand Local;
+ public Register Register;
+
+ public bool IsFixed;
+ public bool IsFixedAndUsed;
+ }
+
+ private readonly Data* _data;
+
+ private ref int End => ref _data->End;
+ private ref LiveRange FirstRange => ref _data->FirstRange;
+ private ref LiveRange CurrRange => ref _data->CurrRange;
+ private ref LiveRange PrevRange => ref _data->PrevRange;
+ private ref LiveInterval Parent => ref _data->Parent;
+ private ref UseList Uses => ref _data->Uses;
+ private ref LiveIntervalList Children => ref _data->Children;
+
+ public Operand Local => _data->Local;
+ public ref Register Register => ref _data->Register;
+ public ref int SpillOffset => ref _data->SpillOffset;
+
+ public bool IsFixed => _data->IsFixed;
+ public ref bool IsFixedAndUsed => ref _data->IsFixedAndUsed;
+ public bool IsEmpty => FirstRange == default;
+ public bool IsSplit => Children.Count != 0;
+ public bool IsSpilled => SpillOffset != -1;
+
+ public int UsesCount => Uses.Count;
+
+ public LiveInterval(Operand local = default, LiveInterval parent = default)
+ {
+ _data = Allocators.LiveIntervals.Allocate<Data>();
+ *_data = default;
+
+ _data->IsFixed = false;
+ _data->Local = local;
+
+ Parent = parent == default ? this : parent;
+ Uses = new UseList();
+ Children = new LiveIntervalList();
+
+ FirstRange = default;
+ CurrRange = default;
+ PrevRange = default;
+
+ SpillOffset = -1;
+ }
+
+ public LiveInterval(Register register) : this(local: default, parent: default)
+ {
+ _data->IsFixed = true;
+
+ Register = register;
+ }
+
+ public void Reset()
+ {
+ PrevRange = default;
+ CurrRange = FirstRange;
+ }
+
+ public void Forward(int position)
+ {
+ LiveRange prev = PrevRange;
+ LiveRange curr = CurrRange;
+
+ while (curr != default && curr.Start < position && !curr.Overlaps(position))
+ {
+ prev = curr;
+ curr = curr.Next;
+ }
+
+ PrevRange = prev;
+ CurrRange = curr;
+ }
+
+ public int GetStart()
+ {
+ Debug.Assert(!IsEmpty, "Empty LiveInterval cannot have a start position.");
+
+ return FirstRange.Start;
+ }
+
+ public void SetStart(int position)
+ {
+ if (FirstRange != default)
+ {
+ Debug.Assert(position != FirstRange.End);
+
+ FirstRange.Start = position;
+ }
+ else
+ {
+ FirstRange = new LiveRange(position, position + 1);
+ End = position + 1;
+ }
+ }
+
+ public int GetEnd()
+ {
+ Debug.Assert(!IsEmpty, "Empty LiveInterval cannot have an end position.");
+
+ return End;
+ }
+
+ public void AddRange(int start, int end)
+ {
+ Debug.Assert(start < end, $"Invalid range start position {start}, {end}");
+
+ if (FirstRange != default)
+ {
+ // If the new range ends exactly where the first range start, then coalesce together.
+ if (end == FirstRange.Start)
+ {
+ FirstRange.Start = start;
+
+ return;
+ }
+ // If the new range is already contained, then coalesce together.
+ else if (FirstRange.Overlaps(start, end))
+ {
+ FirstRange.Start = Math.Min(FirstRange.Start, start);
+ FirstRange.End = Math.Max(FirstRange.End, end);
+ End = Math.Max(End, end);
+
+ Debug.Assert(FirstRange.Next == default || !FirstRange.Overlaps(FirstRange.Next));
+ return;
+ }
+ }
+
+ FirstRange = new LiveRange(start, end, FirstRange);
+ End = Math.Max(End, end);
+
+ Debug.Assert(FirstRange.Next == default || !FirstRange.Overlaps(FirstRange.Next));
+ }
+
+ public void AddUsePosition(int position)
+ {
+ Uses.Add(position);
+ }
+
+ public bool Overlaps(int position)
+ {
+ LiveRange curr = CurrRange;
+
+ while (curr != default && curr.Start <= position)
+ {
+ if (curr.Overlaps(position))
+ {
+ return true;
+ }
+
+ curr = curr.Next;
+ }
+
+ return false;
+ }
+
+ public bool Overlaps(LiveInterval other)
+ {
+ return GetOverlapPosition(other) != NotFound;
+ }
+
+ public int GetOverlapPosition(LiveInterval other)
+ {
+ LiveRange a = CurrRange;
+ LiveRange b = other.CurrRange;
+
+ while (a != default)
+ {
+ while (b != default && b.Start < a.Start)
+ {
+ if (a.Overlaps(b))
+ {
+ return a.Start;
+ }
+
+ b = b.Next;
+ }
+
+ if (b == default)
+ {
+ break;
+ }
+ else if (a.Overlaps(b))
+ {
+ return a.Start;
+ }
+
+ a = a.Next;
+ }
+
+ return NotFound;
+ }
+
+ public ReadOnlySpan<LiveInterval> SplitChildren()
+ {
+ return Parent.Children.Span;
+ }
+
+ public ReadOnlySpan<int> UsePositions()
+ {
+ return Uses.Span;
+ }
+
+ public int FirstUse()
+ {
+ return Uses.FirstUse;
+ }
+
+ public int NextUseAfter(int position)
+ {
+ return Uses.NextUse(position);
+ }
+
+ public LiveInterval Split(int position)
+ {
+ LiveInterval result = new(Local, Parent);
+ result.End = End;
+
+ LiveRange prev = PrevRange;
+ LiveRange curr = CurrRange;
+
+ while (curr != default && curr.Start < position && !curr.Overlaps(position))
+ {
+ prev = curr;
+ curr = curr.Next;
+ }
+
+ if (curr.Start >= position)
+ {
+ prev.Next = default;
+
+ result.FirstRange = curr;
+
+ End = prev.End;
+ }
+ else
+ {
+ result.FirstRange = new LiveRange(position, curr.End, curr.Next);
+
+ curr.End = position;
+ curr.Next = default;
+
+ End = curr.End;
+ }
+
+ result.Uses = Uses.Split(position);
+
+ AddSplitChild(result);
+
+ Debug.Assert(!IsEmpty, "Left interval is empty after split.");
+ Debug.Assert(!result.IsEmpty, "Right interval is empty after split.");
+
+ // Make sure the iterator in the new split is pointing to the start.
+ result.Reset();
+
+ return result;
+ }
+
+ private void AddSplitChild(LiveInterval child)
+ {
+ Debug.Assert(!child.IsEmpty, "Trying to insert an empty interval.");
+
+ Parent.Children.Add(child);
+ }
+
+ public LiveInterval GetSplitChild(int position)
+ {
+ if (Overlaps(position))
+ {
+ return this;
+ }
+
+ foreach (LiveInterval splitChild in SplitChildren())
+ {
+ if (splitChild.Overlaps(position))
+ {
+ return splitChild;
+ }
+ else if (splitChild.GetStart() > position)
+ {
+ break;
+ }
+ }
+
+ return default;
+ }
+
+ public bool TrySpillWithSiblingOffset()
+ {
+ foreach (LiveInterval splitChild in SplitChildren())
+ {
+ if (splitChild.IsSpilled)
+ {
+ Spill(splitChild.SpillOffset);
+
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ public void Spill(int offset)
+ {
+ SpillOffset = offset;
+ }
+
+ public int CompareTo(LiveInterval interval)
+ {
+ if (FirstRange == default || interval.FirstRange == default)
+ {
+ return 0;
+ }
+
+ return GetStart().CompareTo(interval.GetStart());
+ }
+
+ public bool Equals(LiveInterval interval)
+ {
+ return interval._data == _data;
+ }
+
+ public override bool Equals(object obj)
+ {
+ return obj is LiveInterval interval && Equals(interval);
+ }
+
+ public static bool operator ==(LiveInterval a, LiveInterval b)
+ {
+ return a.Equals(b);
+ }
+
+ public static bool operator !=(LiveInterval a, LiveInterval b)
+ {
+ return !a.Equals(b);
+ }
+
+ public override int GetHashCode()
+ {
+ return HashCode.Combine((IntPtr)_data);
+ }
+
+ public override string ToString()
+ {
+ LiveInterval self = this;
+
+ IEnumerable<string> GetRanges()
+ {
+ LiveRange curr = self.CurrRange;
+
+ while (curr != default)
+ {
+ if (curr == self.CurrRange)
+ {
+ yield return "*" + curr;
+ }
+ else
+ {
+ yield return curr.ToString();
+ }
+
+ curr = curr.Next;
+ }
+ }
+
+ return string.Join(", ", GetRanges());
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/LiveIntervalList.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/LiveIntervalList.cs
new file mode 100644
index 00000000..06b979ea
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/LiveIntervalList.cs
@@ -0,0 +1,40 @@
+using System;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ unsafe struct LiveIntervalList
+ {
+ private LiveInterval* _items;
+ private int _count;
+ private int _capacity;
+
+ public int Count => _count;
+ public Span<LiveInterval> Span => new(_items, _count);
+
+ public void Add(LiveInterval interval)
+ {
+ if (_count + 1 > _capacity)
+ {
+ var oldSpan = Span;
+
+ _capacity = Math.Max(4, _capacity * 2);
+ _items = Allocators.References.Allocate<LiveInterval>((uint)_capacity);
+
+ var newSpan = Span;
+
+ oldSpan.CopyTo(newSpan);
+ }
+
+ int position = interval.GetStart();
+ int i = _count - 1;
+
+ while (i >= 0 && _items[i].GetStart() > position)
+ {
+ _items[i + 1] = _items[i--];
+ }
+
+ _items[i + 1] = interval;
+ _count++;
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/LiveRange.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/LiveRange.cs
new file mode 100644
index 00000000..e38b5190
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/LiveRange.cs
@@ -0,0 +1,74 @@
+using System;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ unsafe readonly struct LiveRange : IEquatable<LiveRange>
+ {
+ private struct Data
+ {
+ public int Start;
+ public int End;
+ public LiveRange Next;
+ }
+
+ private readonly Data* _data;
+
+ public ref int Start => ref _data->Start;
+ public ref int End => ref _data->End;
+ public ref LiveRange Next => ref _data->Next;
+
+ public LiveRange(int start, int end, LiveRange next = default)
+ {
+ _data = Allocators.LiveRanges.Allocate<Data>();
+
+ Start = start;
+ End = end;
+ Next = next;
+ }
+
+ public bool Overlaps(int start, int end)
+ {
+ return Start < end && start < End;
+ }
+
+ public bool Overlaps(LiveRange range)
+ {
+ return Start < range.End && range.Start < End;
+ }
+
+ public bool Overlaps(int position)
+ {
+ return position >= Start && position < End;
+ }
+
+ public bool Equals(LiveRange range)
+ {
+ return range._data == _data;
+ }
+
+ public override bool Equals(object obj)
+ {
+ return obj is LiveRange range && Equals(range);
+ }
+
+ public static bool operator ==(LiveRange a, LiveRange b)
+ {
+ return a.Equals(b);
+ }
+
+ public static bool operator !=(LiveRange a, LiveRange b)
+ {
+ return !a.Equals(b);
+ }
+
+ public override int GetHashCode()
+ {
+ return HashCode.Combine((IntPtr)_data);
+ }
+
+ public override string ToString()
+ {
+ return $"[{Start}, {End})";
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/RegisterMasks.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/RegisterMasks.cs
new file mode 100644
index 00000000..bc948f95
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/RegisterMasks.cs
@@ -0,0 +1,50 @@
+using ARMeilleure.IntermediateRepresentation;
+using System;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ readonly struct RegisterMasks
+ {
+ public int IntAvailableRegisters { get; }
+ public int VecAvailableRegisters { get; }
+ public int IntCallerSavedRegisters { get; }
+ public int VecCallerSavedRegisters { get; }
+ public int IntCalleeSavedRegisters { get; }
+ public int VecCalleeSavedRegisters { get; }
+ public int RegistersCount { get; }
+
+ public RegisterMasks(
+ int intAvailableRegisters,
+ int vecAvailableRegisters,
+ int intCallerSavedRegisters,
+ int vecCallerSavedRegisters,
+ int intCalleeSavedRegisters,
+ int vecCalleeSavedRegisters,
+ int registersCount)
+ {
+ IntAvailableRegisters = intAvailableRegisters;
+ VecAvailableRegisters = vecAvailableRegisters;
+ IntCallerSavedRegisters = intCallerSavedRegisters;
+ VecCallerSavedRegisters = vecCallerSavedRegisters;
+ IntCalleeSavedRegisters = intCalleeSavedRegisters;
+ VecCalleeSavedRegisters = vecCalleeSavedRegisters;
+ RegistersCount = registersCount;
+ }
+
+ public int GetAvailableRegisters(RegisterType type)
+ {
+ if (type == RegisterType.Integer)
+ {
+ return IntAvailableRegisters;
+ }
+ else if (type == RegisterType.Vector)
+ {
+ return VecAvailableRegisters;
+ }
+ else
+ {
+ throw new ArgumentException($"Invalid register type \"{type}\".");
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/StackAllocator.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/StackAllocator.cs
new file mode 100644
index 00000000..038312fe
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/StackAllocator.cs
@@ -0,0 +1,25 @@
+using ARMeilleure.IntermediateRepresentation;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ class StackAllocator
+ {
+ private int _offset;
+
+ public int TotalSize => _offset;
+
+ public int Allocate(OperandType type)
+ {
+ return Allocate(type.GetSizeInBytes());
+ }
+
+ public int Allocate(int sizeInBytes)
+ {
+ int offset = _offset;
+
+ _offset += sizeInBytes;
+
+ return offset;
+ }
+ }
+} \ No newline at end of file
diff --git a/src/ARMeilleure/CodeGen/RegisterAllocators/UseList.cs b/src/ARMeilleure/CodeGen/RegisterAllocators/UseList.cs
new file mode 100644
index 00000000..c89f0854
--- /dev/null
+++ b/src/ARMeilleure/CodeGen/RegisterAllocators/UseList.cs
@@ -0,0 +1,84 @@
+using System;
+
+namespace ARMeilleure.CodeGen.RegisterAllocators
+{
+ unsafe struct UseList
+ {
+ private int* _items;
+ private int _capacity;
+ private int _count;
+
+ public int Count => _count;
+ public int FirstUse => _count > 0 ? _items[_count - 1] : LiveInterval.NotFound;
+ public Span<int> Span => new(_items, _count);
+
+ public void Add(int position)
+ {
+ if (_count + 1 > _capacity)
+ {
+ var oldSpan = Span;
+
+ _capacity = Math.Max(4, _capacity * 2);
+ _items = Allocators.Default.Allocate<int>((uint)_capacity);
+
+ var newSpan = Span;
+
+ oldSpan.CopyTo(newSpan);
+ }
+
+ // Use positions are usually inserted in descending order, so inserting in descending order is faster,
+ // since the number of half exchanges is reduced.
+ int i = _count - 1;
+
+ while (i >= 0 && _items[i] < position)
+ {
+ _items[i + 1] = _items[i--];
+ }
+
+ _items[i + 1] = position;
+ _count++;
+ }
+
+ public int NextUse(int position)
+ {
+ int index = NextUseIndex(position);
+
+ return index != LiveInterval.NotFound ? _items[index] : LiveInterval.NotFound;
+ }
+
+ public int NextUseIndex(int position)
+ {
+ int i = _count - 1;
+
+ if (i == -1 || position > _items[0])
+ {
+ return LiveInterval.NotFound;
+ }
+
+ while (i >= 0 && _items[i] < position)
+ {
+ i--;
+ }
+
+ return i;
+ }
+
+ public UseList Split(int position)
+ {
+ int index = NextUseIndex(position);
+
+ // Since the list is in descending order, the new split list takes the front of the list and the current
+ // list takes the back of the list.
+ UseList result = new();
+ result._count = index + 1;
+ result._capacity = result._count;
+ result._items = _items;
+
+ _count = _count - result._count;
+ _capacity = _count;
+ _items = _items + result._count;
+
+ return result;
+ }
+ }
+} \ No newline at end of file