aboutsummaryrefslogtreecommitdiff
path: root/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
diff options
context:
space:
mode:
authorMary <me@thog.eu>2020-08-18 21:03:55 +0200
committerGitHub <noreply@github.com>2020-08-18 21:03:55 +0200
commit5b26e4ef94afca8450f07c42393180e3c97f9c00 (patch)
tree5c698591bd557711a487430ba06921b10ce53761 /Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
parenta389dd59bd881cf2cff09a1f67f5c30de61123e6 (diff)
Misc audio fixes (#1348)
Changes: Implement software surround downmixing (fix #796). Fix a crash when no audio renderer were created when stopping emulation. NOTE: This PR also disable support of 5.1 surround on the OpenAL backend as we cannot detect if the hardware directly support it. (the downmixing applied by OpenAL on Windows is terribly slow)
Diffstat (limited to 'Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs')
-rw-r--r--Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs49
1 files changed, 43 insertions, 6 deletions
diff --git a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
index ea5ce621..fe82fced 100644
--- a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
+++ b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
@@ -104,15 +104,24 @@ namespace Ryujinx.Audio
_context.Dispose();
}
+ public bool SupportsChannelCount(int channels)
+ {
+ // NOTE: OpenAL doesn't give us a way to know if the 5.1 setup is supported by hardware or actually emulated.
+ // TODO: find a way to determine hardware support.
+ return channels == 1 || channels == 2;
+ }
+
/// <summary>
/// Creates a new audio track with the specified parameters
/// </summary>
/// <param name="sampleRate">The requested sample rate</param>
- /// <param name="channels">The requested channels</param>
+ /// <param name="hardwareChannels">The requested hardware channels</param>
+ /// <param name="virtualChannels">The requested virtual channels</param>
/// <param name="callback">A <see cref="ReleaseCallback" /> that represents the delegate to invoke when a buffer has been released by the audio track</param>
- public int OpenTrack(int sampleRate, int channels, ReleaseCallback callback)
+ /// <returns>The created track's Track ID</returns>
+ public int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback)
{
- OpenALAudioTrack track = new OpenALAudioTrack(sampleRate, GetALFormat(channels), callback);
+ OpenALAudioTrack track = new OpenALAudioTrack(sampleRate, GetALFormat(hardwareChannels), hardwareChannels, virtualChannels, callback);
for (int id = 0; id < MaxTracks; id++)
{
@@ -204,9 +213,37 @@ namespace Ryujinx.Audio
{
int bufferId = track.AppendBuffer(bufferTag);
- int size = buffer.Length * Marshal.SizeOf<T>();
-
- AL.BufferData(bufferId, track.Format, buffer, size, track.SampleRate);
+ // Do we need to downmix?
+ if (track.HardwareChannels != track.VirtualChannels)
+ {
+ short[] downmixedBuffer;
+
+ ReadOnlySpan<short> bufferPCM16 = MemoryMarshal.Cast<T, short>(buffer);
+
+ if (track.VirtualChannels == 6)
+ {
+ downmixedBuffer = Downmixing.DownMixSurroundToStereo(bufferPCM16);
+
+ if (track.HardwareChannels == 1)
+ {
+ downmixedBuffer = Downmixing.DownMixStereoToMono(downmixedBuffer);
+ }
+ }
+ else if (track.VirtualChannels == 2)
+ {
+ downmixedBuffer = Downmixing.DownMixStereoToMono(bufferPCM16);
+ }
+ else
+ {
+ throw new NotImplementedException($"Downmixing from {track.VirtualChannels} to {track.HardwareChannels} not implemented!");
+ }
+
+ AL.BufferData(bufferId, track.Format, downmixedBuffer, downmixedBuffer.Length * sizeof(ushort), track.SampleRate);
+ }
+ else
+ {
+ AL.BufferData(bufferId, track.Format, buffer, buffer.Length * sizeof(ushort), track.SampleRate);
+ }
AL.SourceQueueBuffer(track.SourceId, bufferId);