aboutsummaryrefslogtreecommitdiff
path: root/src/Ryujinx.Audio/Backends/CompatLayer
diff options
context:
space:
mode:
authorTSR Berry <20988865+TSRBerry@users.noreply.github.com>2023-04-08 01:22:00 +0200
committerMary <thog@protonmail.com>2023-04-27 23:51:14 +0200
commitcee712105850ac3385cd0091a923438167433f9f (patch)
tree4a5274b21d8b7f938c0d0ce18736d3f2993b11b1 /src/Ryujinx.Audio/Backends/CompatLayer
parentcd124bda587ef09668a971fa1cac1c3f0cfc9f21 (diff)
Move solution and projects to src
Diffstat (limited to 'src/Ryujinx.Audio/Backends/CompatLayer')
-rw-r--r--src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceDriver.cs186
-rw-r--r--src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceSession.cs162
-rw-r--r--src/Ryujinx.Audio/Backends/CompatLayer/Downmixing.cs125
3 files changed, 473 insertions, 0 deletions
diff --git a/src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceDriver.cs b/src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceDriver.cs
new file mode 100644
index 00000000..22919f1e
--- /dev/null
+++ b/src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceDriver.cs
@@ -0,0 +1,186 @@
+using Ryujinx.Audio.Backends.Common;
+using Ryujinx.Audio.Backends.Dummy;
+using Ryujinx.Audio.Common;
+using Ryujinx.Audio.Integration;
+using Ryujinx.Common.Logging;
+using Ryujinx.Memory;
+using System;
+using System.Threading;
+
+using static Ryujinx.Audio.Integration.IHardwareDeviceDriver;
+
+namespace Ryujinx.Audio.Backends.CompatLayer
+{
+ public class CompatLayerHardwareDeviceDriver : IHardwareDeviceDriver
+ {
+ private IHardwareDeviceDriver _realDriver;
+
+ public static bool IsSupported => true;
+
+ public CompatLayerHardwareDeviceDriver(IHardwareDeviceDriver realDevice)
+ {
+ _realDriver = realDevice;
+ }
+
+ public void Dispose()
+ {
+ _realDriver.Dispose();
+ }
+
+ public ManualResetEvent GetUpdateRequiredEvent()
+ {
+ return _realDriver.GetUpdateRequiredEvent();
+ }
+
+ public ManualResetEvent GetPauseEvent()
+ {
+ return _realDriver.GetPauseEvent();
+ }
+
+ private uint SelectHardwareChannelCount(uint targetChannelCount)
+ {
+ if (_realDriver.SupportsChannelCount(targetChannelCount))
+ {
+ return targetChannelCount;
+ }
+
+ return targetChannelCount switch
+ {
+ 6 => SelectHardwareChannelCount(2),
+ 2 => SelectHardwareChannelCount(1),
+ 1 => throw new ArgumentException("No valid channel configuration found!"),
+ _ => throw new ArgumentException($"Invalid targetChannelCount {targetChannelCount}")
+ };
+ }
+
+ private SampleFormat SelectHardwareSampleFormat(SampleFormat targetSampleFormat)
+ {
+ if (_realDriver.SupportsSampleFormat(targetSampleFormat))
+ {
+ return targetSampleFormat;
+ }
+
+ // Attempt conversion from PCM16.
+ if (targetSampleFormat == SampleFormat.PcmInt16)
+ {
+ // Prefer PCM32 if we need to convert.
+ if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt32))
+ {
+ return SampleFormat.PcmInt32;
+ }
+
+ // If not supported, PCM float provides the best quality with a cost lower than PCM24.
+ if (_realDriver.SupportsSampleFormat(SampleFormat.PcmFloat))
+ {
+ return SampleFormat.PcmFloat;
+ }
+
+ if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt24))
+ {
+ return SampleFormat.PcmInt24;
+ }
+
+ // If nothing is truly supported, attempt PCM8 at the cost of losing quality.
+ if (_realDriver.SupportsSampleFormat(SampleFormat.PcmInt8))
+ {
+ return SampleFormat.PcmInt8;
+ }
+ }
+
+ throw new ArgumentException("No valid sample format configuration found!");
+ }
+
+ public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount, float volume)
+ {
+ if (channelCount == 0)
+ {
+ channelCount = 2;
+ }
+
+ if (sampleRate == 0)
+ {
+ sampleRate = Constants.TargetSampleRate;
+ }
+
+ volume = Math.Clamp(volume, 0, 1);
+
+ if (!_realDriver.SupportsDirection(direction))
+ {
+ if (direction == Direction.Input)
+ {
+ Logger.Warning?.Print(LogClass.Audio, "The selected audio backend doesn't support audio input, fallback to dummy...");
+
+ return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
+ }
+
+ throw new NotImplementedException();
+ }
+
+ SampleFormat hardwareSampleFormat = SelectHardwareSampleFormat(sampleFormat);
+ uint hardwareChannelCount = SelectHardwareChannelCount(channelCount);
+
+ IHardwareDeviceSession realSession = _realDriver.OpenDeviceSession(direction, memoryManager, hardwareSampleFormat, sampleRate, hardwareChannelCount, volume);
+
+ if (hardwareChannelCount == channelCount && hardwareSampleFormat == sampleFormat)
+ {
+ return realSession;
+ }
+
+ if (hardwareSampleFormat != sampleFormat)
+ {
+ Logger.Warning?.Print(LogClass.Audio, $"{sampleFormat} isn't supported by the audio device, conversion to {hardwareSampleFormat} will happen.");
+
+ if (hardwareSampleFormat < sampleFormat)
+ {
+ Logger.Warning?.Print(LogClass.Audio, $"{hardwareSampleFormat} has lower quality than {sampleFormat}, expect some loss in audio fidelity.");
+ }
+ }
+
+ if (direction == Direction.Input)
+ {
+ Logger.Warning?.Print(LogClass.Audio, $"The selected audio backend doesn't support the requested audio input configuration, fallback to dummy...");
+
+ // TODO: We currently don't support audio input upsampling/downsampling, implement this.
+ realSession.Dispose();
+
+ return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
+ }
+
+ // It must be a HardwareDeviceSessionOutputBase.
+ if (realSession is not HardwareDeviceSessionOutputBase realSessionOutputBase)
+ {
+ throw new InvalidOperationException($"Real driver session class type isn't based on {typeof(HardwareDeviceSessionOutputBase).Name}.");
+ }
+
+ // If we need to do post processing before sending to the hardware device, wrap around it.
+ return new CompatLayerHardwareDeviceSession(realSessionOutputBase, sampleFormat, channelCount);
+ }
+
+ public bool SupportsChannelCount(uint channelCount)
+ {
+ return channelCount == 1 || channelCount == 2 || channelCount == 6;
+ }
+
+ public bool SupportsSampleFormat(SampleFormat sampleFormat)
+ {
+ // TODO: More formats.
+ return sampleFormat == SampleFormat.PcmInt16;
+ }
+
+ public bool SupportsSampleRate(uint sampleRate)
+ {
+ // TODO: More sample rates.
+ return sampleRate == Constants.TargetSampleRate;
+ }
+
+ public IHardwareDeviceDriver GetRealDeviceDriver()
+ {
+ return _realDriver;
+ }
+
+ public bool SupportsDirection(Direction direction)
+ {
+ return direction == Direction.Input || direction == Direction.Output;
+ }
+ }
+} \ No newline at end of file
diff --git a/src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceSession.cs b/src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceSession.cs
new file mode 100644
index 00000000..f22a7a69
--- /dev/null
+++ b/src/Ryujinx.Audio/Backends/CompatLayer/CompatLayerHardwareDeviceSession.cs
@@ -0,0 +1,162 @@
+using Ryujinx.Audio.Backends.Common;
+using Ryujinx.Audio.Common;
+using Ryujinx.Audio.Renderer.Dsp;
+using System;
+using System.Runtime.InteropServices;
+
+namespace Ryujinx.Audio.Backends.CompatLayer
+{
+ class CompatLayerHardwareDeviceSession : HardwareDeviceSessionOutputBase
+ {
+ private HardwareDeviceSessionOutputBase _realSession;
+ private SampleFormat _userSampleFormat;
+ private uint _userChannelCount;
+
+ public CompatLayerHardwareDeviceSession(HardwareDeviceSessionOutputBase realSession, SampleFormat userSampleFormat, uint userChannelCount) : base(realSession.MemoryManager, realSession.RequestedSampleFormat, realSession.RequestedSampleRate, userChannelCount)
+ {
+ _realSession = realSession;
+ _userSampleFormat = userSampleFormat;
+ _userChannelCount = userChannelCount;
+ }
+
+ public override void Dispose()
+ {
+ _realSession.Dispose();
+ }
+
+ public override ulong GetPlayedSampleCount()
+ {
+ return _realSession.GetPlayedSampleCount();
+ }
+
+ public override float GetVolume()
+ {
+ return _realSession.GetVolume();
+ }
+
+ public override void PrepareToClose()
+ {
+ _realSession.PrepareToClose();
+ }
+
+ public override void QueueBuffer(AudioBuffer buffer)
+ {
+ SampleFormat realSampleFormat = _realSession.RequestedSampleFormat;
+
+ if (_userSampleFormat != realSampleFormat)
+ {
+ if (_userSampleFormat != SampleFormat.PcmInt16)
+ {
+ throw new NotImplementedException("Converting formats other than PCM16 is not supported.");
+ }
+
+ int userSampleCount = buffer.Data.Length / BackendHelper.GetSampleSize(_userSampleFormat);
+
+ ReadOnlySpan<short> samples = MemoryMarshal.Cast<byte, short>(buffer.Data);
+ byte[] convertedSamples = new byte[BackendHelper.GetSampleSize(realSampleFormat) * userSampleCount];
+
+ switch (realSampleFormat)
+ {
+ case SampleFormat.PcmInt8:
+ PcmHelper.ConvertSampleToPcm8(MemoryMarshal.Cast<byte, sbyte>(convertedSamples), samples);
+ break;
+ case SampleFormat.PcmInt24:
+ PcmHelper.ConvertSampleToPcm24(convertedSamples, samples);
+ break;
+ case SampleFormat.PcmInt32:
+ PcmHelper.ConvertSampleToPcm32(MemoryMarshal.Cast<byte, int>(convertedSamples), samples);
+ break;
+ case SampleFormat.PcmFloat:
+ PcmHelper.ConvertSampleToPcmFloat(MemoryMarshal.Cast<byte, float>(convertedSamples), samples);
+ break;
+ default:
+ throw new NotImplementedException($"Sample format conversion from {_userSampleFormat} to {realSampleFormat} not implemented.");
+ }
+
+ buffer.Data = convertedSamples;
+ }
+
+ _realSession.QueueBuffer(buffer);
+ }
+
+ public override bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
+ {
+ if (samples == null)
+ {
+ return false;
+ }
+
+ if (_userChannelCount != _realSession.RequestedChannelCount)
+ {
+ if (_userSampleFormat != SampleFormat.PcmInt16)
+ {
+ throw new NotImplementedException("Downmixing formats other than PCM16 is not supported.");
+ }
+
+ ReadOnlySpan<short> samplesPCM16 = MemoryMarshal.Cast<byte, short>(samples);
+
+ if (_userChannelCount == 6)
+ {
+ samplesPCM16 = Downmixing.DownMixSurroundToStereo(samplesPCM16);
+
+ if (_realSession.RequestedChannelCount == 1)
+ {
+ samplesPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
+ }
+ }
+ else if (_userChannelCount == 2 && _realSession.RequestedChannelCount == 1)
+ {
+ samplesPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
+ }
+ else
+ {
+ throw new NotImplementedException($"Downmixing from {_userChannelCount} to {_realSession.RequestedChannelCount} not implemented.");
+ }
+
+ samples = MemoryMarshal.Cast<short, byte>(samplesPCM16).ToArray();
+ }
+
+ AudioBuffer fakeBuffer = new AudioBuffer
+ {
+ BufferTag = buffer.BufferTag,
+ DataPointer = buffer.DataPointer,
+ DataSize = (ulong)samples.Length
+ };
+
+ bool result = _realSession.RegisterBuffer(fakeBuffer, samples);
+
+ if (result)
+ {
+ buffer.Data = fakeBuffer.Data;
+ buffer.DataSize = fakeBuffer.DataSize;
+ }
+
+ return result;
+ }
+
+ public override void SetVolume(float volume)
+ {
+ _realSession.SetVolume(volume);
+ }
+
+ public override void Start()
+ {
+ _realSession.Start();
+ }
+
+ public override void Stop()
+ {
+ _realSession.Stop();
+ }
+
+ public override void UnregisterBuffer(AudioBuffer buffer)
+ {
+ _realSession.UnregisterBuffer(buffer);
+ }
+
+ public override bool WasBufferFullyConsumed(AudioBuffer buffer)
+ {
+ return _realSession.WasBufferFullyConsumed(buffer);
+ }
+ }
+} \ No newline at end of file
diff --git a/src/Ryujinx.Audio/Backends/CompatLayer/Downmixing.cs b/src/Ryujinx.Audio/Backends/CompatLayer/Downmixing.cs
new file mode 100644
index 00000000..6959c158
--- /dev/null
+++ b/src/Ryujinx.Audio/Backends/CompatLayer/Downmixing.cs
@@ -0,0 +1,125 @@
+using System;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+namespace Ryujinx.Audio.Backends.CompatLayer
+{
+ public static class Downmixing
+ {
+ [StructLayout(LayoutKind.Sequential, Pack = 1)]
+ private struct Channel51FormatPCM16
+ {
+ public short FrontLeft;
+ public short FrontRight;
+ public short FrontCenter;
+ public short LowFrequency;
+ public short BackLeft;
+ public short BackRight;
+ }
+
+ [StructLayout(LayoutKind.Sequential, Pack = 1)]
+ private struct ChannelStereoFormatPCM16
+ {
+ public short Left;
+ public short Right;
+ }
+
+ private const int Q15Bits = 16;
+ private const int RawQ15One = 1 << Q15Bits;
+ private const int RawQ15HalfOne = (int)(0.5f * RawQ15One);
+ private const int Minus3dBInQ15 = (int)(0.707f * RawQ15One);
+ private const int Minus6dBInQ15 = (int)(0.501f * RawQ15One);
+ private const int Minus12dBInQ15 = (int)(0.251f * RawQ15One);
+
+ private static readonly int[] DefaultSurroundToStereoCoefficients = new int[4]
+ {
+ RawQ15One,
+ Minus3dBInQ15,
+ Minus12dBInQ15,
+ Minus3dBInQ15
+ };
+
+ private static readonly int[] DefaultStereoToMonoCoefficients = new int[2]
+ {
+ Minus6dBInQ15,
+ Minus6dBInQ15
+ };
+
+ private const int SurroundChannelCount = 6;
+ private const int StereoChannelCount = 2;
+ private const int MonoChannelCount = 1;
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static ReadOnlySpan<Channel51FormatPCM16> GetSurroundBuffer(ReadOnlySpan<short> data)
+ {
+ return MemoryMarshal.Cast<short, Channel51FormatPCM16>(data);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static ReadOnlySpan<ChannelStereoFormatPCM16> GetStereoBuffer(ReadOnlySpan<short> data)
+ {
+ return MemoryMarshal.Cast<short, ChannelStereoFormatPCM16>(data);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short DownMixStereoToMono(ReadOnlySpan<int> coefficients, short left, short right)
+ {
+ return (short)((left * coefficients[0] + right * coefficients[1]) >> Q15Bits);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, short back, short lfe, short center, short front)
+ {
+ return (short)((coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front + RawQ15HalfOne) >> Q15Bits);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short[] DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
+ {
+ int samplePerChannelCount = data.Length / SurroundChannelCount;
+
+ short[] downmixedBuffer = new short[samplePerChannelCount * StereoChannelCount];
+
+ ReadOnlySpan<Channel51FormatPCM16> channels = GetSurroundBuffer(data);
+
+ for (int i = 0; i < samplePerChannelCount; i++)
+ {
+ Channel51FormatPCM16 channel = channels[i];
+
+ downmixedBuffer[i * 2] = DownMixSurroundToStereo(coefficients, channel.BackLeft, channel.LowFrequency, channel.FrontCenter, channel.FrontLeft);
+ downmixedBuffer[i * 2 + 1] = DownMixSurroundToStereo(coefficients, channel.BackRight, channel.LowFrequency, channel.FrontCenter, channel.FrontRight);
+ }
+
+ return downmixedBuffer;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short[] DownMixStereoToMono(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
+ {
+ int samplePerChannelCount = data.Length / StereoChannelCount;
+
+ short[] downmixedBuffer = new short[samplePerChannelCount * MonoChannelCount];
+
+ ReadOnlySpan<ChannelStereoFormatPCM16> channels = GetStereoBuffer(data);
+
+ for (int i = 0; i < samplePerChannelCount; i++)
+ {
+ ChannelStereoFormatPCM16 channel = channels[i];
+
+ downmixedBuffer[i] = DownMixStereoToMono(coefficients, channel.Left, channel.Right);
+ }
+
+ return downmixedBuffer;
+ }
+
+ public static short[] DownMixStereoToMono(ReadOnlySpan<short> data)
+ {
+ return DownMixStereoToMono(DefaultStereoToMonoCoefficients, data);
+ }
+
+ public static short[] DownMixSurroundToStereo(ReadOnlySpan<short> data)
+ {
+ return DownMixSurroundToStereo(DefaultSurroundToStereoCoefficients, data);
+ }
+ }
+} \ No newline at end of file