aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs7
-rw-r--r--Ryujinx.Audio/Downmixing.cs127
-rw-r--r--Ryujinx.Audio/DspUtils.cs2
-rw-r--r--Ryujinx.Audio/IAalOutput.cs31
-rw-r--r--Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs5
-rw-r--r--Ryujinx.Audio/Renderers/DummyAudioOut.cs11
-rw-r--r--Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs49
-rw-r--r--Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs8
-rw-r--r--Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs12
-rw-r--r--Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs76
-rw-r--r--Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs6
11 files changed, 303 insertions, 31 deletions
diff --git a/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs b/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs
index 90f6cd51..674f20f9 100644
--- a/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs
+++ b/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs
@@ -54,6 +54,11 @@ namespace Ryujinx.Audio.Renderer.Dsp
private long _playbackEnds;
private ManualResetEvent _event;
+ public AudioProcessor()
+ {
+ _event = new ManualResetEvent(false);
+ }
+
public void SetOutputDevices(HardwareDevice[] outputDevices)
{
_outputDevices = outputDevices;
@@ -63,7 +68,7 @@ namespace Ryujinx.Audio.Renderer.Dsp
{
_mailbox = new Mailbox<MailboxMessage>();
_sessionCommandList = new RendererSession[RendererConstants.AudioRendererSessionCountMax];
- _event = new ManualResetEvent(false);
+ _event.Reset();
_lastTime = PerformanceCounter.ElapsedNanoseconds;
StartThread();
diff --git a/Ryujinx.Audio/Downmixing.cs b/Ryujinx.Audio/Downmixing.cs
new file mode 100644
index 00000000..bd020b11
--- /dev/null
+++ b/Ryujinx.Audio/Downmixing.cs
@@ -0,0 +1,127 @@
+using System;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+namespace Ryujinx.Audio
+{
+ public static class Downmixing
+ {
+ [StructLayout(LayoutKind.Sequential, Pack = 1)]
+ private struct Channel51FormatPCM16
+ {
+ public short FrontLeft;
+ public short FrontRight;
+ public short FrontCenter;
+ public short LowFrequency;
+ public short BackLeft;
+ public short BackRight;
+ }
+
+ [StructLayout(LayoutKind.Sequential, Pack = 1)]
+ private struct ChannelStereoFormatPCM16
+ {
+ public short Left;
+ public short Right;
+ }
+
+ private const int Q15Bits = 16;
+ private const int RawQ15One = 1 << Q15Bits;
+ private const int RawQ15HalfOne = (int)(0.5f * RawQ15One);
+ private const int Minus3dBInQ15 = (int)(0.707f * RawQ15One);
+ private const int Minus6dBInQ15 = (int)(0.501f * RawQ15One);
+ private const int Minus12dBInQ15 = (int)(0.251f * RawQ15One);
+
+ private static int[] DefaultSurroundToStereoCoefficients = new int[4]
+ {
+ RawQ15One,
+ Minus3dBInQ15,
+ Minus12dBInQ15,
+ Minus3dBInQ15,
+ };
+
+ private static int[] DefaultStereoToMonoCoefficients = new int[2]
+ {
+ Minus6dBInQ15,
+ Minus6dBInQ15,
+ };
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static ReadOnlySpan<Channel51FormatPCM16> GetSurroundBuffer(ReadOnlySpan<short> data)
+ {
+ return MemoryMarshal.Cast<short, Channel51FormatPCM16>(data);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static ReadOnlySpan<ChannelStereoFormatPCM16> GetStereoBuffer(ReadOnlySpan<short> data)
+ {
+ return MemoryMarshal.Cast<short, ChannelStereoFormatPCM16>(data);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short DownMixStereoToMono(ReadOnlySpan<int> coefficients, short left, short right)
+ {
+ return (short)((left * coefficients[0] + right * coefficients[1]) >> Q15Bits);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, short back, short lfe, short center, short front)
+ {
+ return (short)((coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front + RawQ15HalfOne) >> Q15Bits);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short[] DownMixSurroundToStereo(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
+ {
+ const int SurroundChannelCount = 6;
+ const int StereoChannelCount = 2;
+
+ int samplePerChannelCount = data.Length / SurroundChannelCount;
+
+ short[] downmixedBuffer = new short[samplePerChannelCount * StereoChannelCount];
+
+ ReadOnlySpan<Channel51FormatPCM16> channels = GetSurroundBuffer(data);
+
+ for (int i = 0; i < samplePerChannelCount; i++)
+ {
+ Channel51FormatPCM16 channel = channels[i];
+
+ downmixedBuffer[i * 2] = DownMixSurroundToStereo(coefficients, channel.BackLeft, channel.LowFrequency, channel.FrontCenter, channel.FrontLeft);
+ downmixedBuffer[i * 2 + 1] = DownMixSurroundToStereo(coefficients, channel.BackRight, channel.LowFrequency, channel.FrontCenter, channel.FrontRight);
+ }
+
+ return downmixedBuffer;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static short[] DownMixStereoToMono(ReadOnlySpan<int> coefficients, ReadOnlySpan<short> data)
+ {
+ const int StereoChannelCount = 2;
+ const int MonoChannelCount = 1;
+
+ int samplePerChannelCount = data.Length / StereoChannelCount;
+
+ short[] downmixedBuffer = new short[samplePerChannelCount * MonoChannelCount];
+
+ ReadOnlySpan<ChannelStereoFormatPCM16> channels = GetStereoBuffer(data);
+
+ for (int i = 0; i < samplePerChannelCount; i++)
+ {
+ ChannelStereoFormatPCM16 channel = channels[i];
+
+ downmixedBuffer[i] = DownMixStereoToMono(coefficients, channel.Left, channel.Right);
+ }
+
+ return downmixedBuffer;
+ }
+
+ public static short[] DownMixStereoToMono(ReadOnlySpan<short> data)
+ {
+ return DownMixStereoToMono(DefaultStereoToMonoCoefficients, data);
+ }
+
+ public static short[] DownMixSurroundToStereo(ReadOnlySpan<short> data)
+ {
+ return DownMixSurroundToStereo(DefaultSurroundToStereoCoefficients, data);
+ }
+ }
+}
diff --git a/Ryujinx.Audio/DspUtils.cs b/Ryujinx.Audio/DspUtils.cs
index c048161d..44e22d73 100644
--- a/Ryujinx.Audio/DspUtils.cs
+++ b/Ryujinx.Audio/DspUtils.cs
@@ -1,4 +1,4 @@
-namespace Ryujinx.Audio.Adpcm
+namespace Ryujinx.Audio
{
public static class DspUtils
{
diff --git a/Ryujinx.Audio/IAalOutput.cs b/Ryujinx.Audio/IAalOutput.cs
index 489f9002..821c1ffb 100644
--- a/Ryujinx.Audio/IAalOutput.cs
+++ b/Ryujinx.Audio/IAalOutput.cs
@@ -4,7 +4,34 @@ namespace Ryujinx.Audio
{
public interface IAalOutput : IDisposable
{
- int OpenTrack(int sampleRate, int channels, ReleaseCallback callback);
+ bool SupportsChannelCount(int channels);
+
+ private int SelectHardwareChannelCount(int targetChannelCount)
+ {
+ if (SupportsChannelCount(targetChannelCount))
+ {
+ return targetChannelCount;
+ }
+
+ switch (targetChannelCount)
+ {
+ case 6:
+ return SelectHardwareChannelCount(2);
+ case 2:
+ return SelectHardwareChannelCount(1);
+ case 1:
+ throw new ArgumentException("No valid channel configuration found!");
+ default:
+ throw new ArgumentException($"Invalid targetChannelCount {targetChannelCount}");
+ }
+ }
+
+ int OpenTrack(int sampleRate, int channels, ReleaseCallback callback)
+ {
+ return OpenHardwareTrack(sampleRate, SelectHardwareChannelCount(channels), channels, callback);
+ }
+
+ int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback);
void CloseTrack(int trackId);
@@ -12,7 +39,7 @@ namespace Ryujinx.Audio
long[] GetReleasedBuffers(int trackId, int maxCount);
- void AppendBuffer<T>(int trackId, long bufferTag, T[] buffer) where T : struct;
+ void AppendBuffer<T>(int trackId, long bufferTag, T[] buffer) where T : struct;
void Start(int trackId);
diff --git a/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs b/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs
index 81b78b67..dff945b6 100644
--- a/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs
+++ b/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs
@@ -197,6 +197,11 @@ namespace SoundIOSharp
return Natives.soundio_device_supports_sample_rate (handle, sampleRate);
}
+ public bool SupportsChannelCount(int channelCount)
+ {
+ return Natives.soundio_device_supports_layout(handle, SoundIOChannelLayout.GetDefault(channelCount).Handle);
+ }
+
public int GetNearestSampleRate (int sampleRate)
{
return Natives.soundio_device_nearest_sample_rate (handle, sampleRate);
diff --git a/Ryujinx.Audio/Renderers/DummyAudioOut.cs b/Ryujinx.Audio/Renderers/DummyAudioOut.cs
index 10943ae6..2698b928 100644
--- a/Ryujinx.Audio/Renderers/DummyAudioOut.cs
+++ b/Ryujinx.Audio/Renderers/DummyAudioOut.cs
@@ -30,7 +30,12 @@ namespace Ryujinx.Audio
public PlaybackState GetState(int trackId) => PlaybackState.Stopped;
- public int OpenTrack(int sampleRate, int channels, ReleaseCallback callback)
+ public bool SupportsChannelCount(int channels)
+ {
+ return true;
+ }
+
+ public int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback)
{
if (!_trackIds.TryDequeue(out int trackId))
{
@@ -67,11 +72,11 @@ namespace Ryujinx.Audio
return bufferTags.ToArray();
}
- public void AppendBuffer<T>(int trackID, long bufferTag, T[] buffer) where T : struct
+ public void AppendBuffer<T>(int trackId, long bufferTag, T[] buffer) where T : struct
{
_buffers.Enqueue(bufferTag);
- if (_releaseCallbacks.TryGetValue(trackID, out var callback))
+ if (_releaseCallbacks.TryGetValue(trackId, out var callback))
{
callback?.Invoke();
}
diff --git a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
index ea5ce621..fe82fced 100644
--- a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
+++ b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs
@@ -104,15 +104,24 @@ namespace Ryujinx.Audio
_context.Dispose();
}
+ public bool SupportsChannelCount(int channels)
+ {
+ // NOTE: OpenAL doesn't give us a way to know if the 5.1 setup is supported by hardware or actually emulated.
+ // TODO: find a way to determine hardware support.
+ return channels == 1 || channels == 2;
+ }
+
/// <summary>
/// Creates a new audio track with the specified parameters
/// </summary>
/// <param name="sampleRate">The requested sample rate</param>
- /// <param name="channels">The requested channels</param>
+ /// <param name="hardwareChannels">The requested hardware channels</param>
+ /// <param name="virtualChannels">The requested virtual channels</param>
/// <param name="callback">A <see cref="ReleaseCallback" /> that represents the delegate to invoke when a buffer has been released by the audio track</param>
- public int OpenTrack(int sampleRate, int channels, ReleaseCallback callback)
+ /// <returns>The created track's Track ID</returns>
+ public int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback)
{
- OpenALAudioTrack track = new OpenALAudioTrack(sampleRate, GetALFormat(channels), callback);
+ OpenALAudioTrack track = new OpenALAudioTrack(sampleRate, GetALFormat(hardwareChannels), hardwareChannels, virtualChannels, callback);
for (int id = 0; id < MaxTracks; id++)
{
@@ -204,9 +213,37 @@ namespace Ryujinx.Audio
{
int bufferId = track.AppendBuffer(bufferTag);
- int size = buffer.Length * Marshal.SizeOf<T>();
-
- AL.BufferData(bufferId, track.Format, buffer, size, track.SampleRate);
+ // Do we need to downmix?
+ if (track.HardwareChannels != track.VirtualChannels)
+ {
+ short[] downmixedBuffer;
+
+ ReadOnlySpan<short> bufferPCM16 = MemoryMarshal.Cast<T, short>(buffer);
+
+ if (track.VirtualChannels == 6)
+ {
+ downmixedBuffer = Downmixing.DownMixSurroundToStereo(bufferPCM16);
+
+ if (track.HardwareChannels == 1)
+ {
+ downmixedBuffer = Downmixing.DownMixStereoToMono(downmixedBuffer);
+ }
+ }
+ else if (track.VirtualChannels == 2)
+ {
+ downmixedBuffer = Downmixing.DownMixStereoToMono(bufferPCM16);
+ }
+ else
+ {
+ throw new NotImplementedException($"Downmixing from {track.VirtualChannels} to {track.HardwareChannels} not implemented!");
+ }
+
+ AL.BufferData(bufferId, track.Format, downmixedBuffer, downmixedBuffer.Length * sizeof(ushort), track.SampleRate);
+ }
+ else
+ {
+ AL.BufferData(bufferId, track.Format, buffer, buffer.Length * sizeof(ushort), track.SampleRate);
+ }
AL.SourceQueueBuffer(track.SourceId, bufferId);
diff --git a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs
index 8629dc96..2f150998 100644
--- a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs
+++ b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs
@@ -12,6 +12,9 @@ namespace Ryujinx.Audio
public ALFormat Format { get; private set; }
public PlaybackState State { get; set; }
+ public int HardwareChannels { get; }
+ public int VirtualChannels { get; }
+
private ReleaseCallback _callback;
private ConcurrentDictionary<long, int> _buffers;
@@ -21,13 +24,16 @@ namespace Ryujinx.Audio
private bool _disposed;
- public OpenALAudioTrack(int sampleRate, ALFormat format, ReleaseCallback callback)
+ public OpenALAudioTrack(int sampleRate, ALFormat format, int hardwareChannels, int virtualChannels, ReleaseCallback callback)
{
SampleRate = sampleRate;
Format = format;
State = PlaybackState.Stopped;
SourceId = AL.GenSource();
+ HardwareChannels = hardwareChannels;
+ VirtualChannels = virtualChannels;
+
_callback = callback;
_buffers = new ConcurrentDictionary<long, int>();
diff --git a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs
index 1e487a6d..fa3961e4 100644
--- a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs
+++ b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs
@@ -65,14 +65,20 @@ namespace Ryujinx.Audio
_trackPool = new SoundIoAudioTrackPool(_audioContext, _audioDevice, MaximumTracks);
}
+ public bool SupportsChannelCount(int channels)
+ {
+ return _audioDevice.SupportsChannelCount(channels);
+ }
+
/// <summary>
/// Creates a new audio track with the specified parameters
/// </summary>
/// <param name="sampleRate">The requested sample rate</param>
- /// <param name="channels">The requested channels</param>
+ /// <param name="hardwareChannels">The requested hardware channels</param>
+ /// <param name="virtualChannels">The requested virtual channels</param>
/// <param name="callback">A <see cref="ReleaseCallback" /> that represents the delegate to invoke when a buffer has been released by the audio track</param>
/// <returns>The created track's Track ID</returns>
- public int OpenTrack(int sampleRate, int channels, ReleaseCallback callback)
+ public int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback)
{
if (!_trackPool.TryGet(out SoundIoAudioTrack track))
{
@@ -80,7 +86,7 @@ namespace Ryujinx.Audio
}
// Open the output. We currently only support 16-bit signed LE
- track.Open(sampleRate, channels, callback, SoundIOFormat.S16LE);
+ track.Open(sampleRate, hardwareChannels, virtualChannels, callback, SoundIOFormat.S16LE);
return track.TrackID;
}
diff --git a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs
index 97ba11d5..6fdeb991 100644
--- a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs
+++ b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs
@@ -3,6 +3,7 @@ using System;
using System.Collections.Concurrent;
using System.Linq;
using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
namespace Ryujinx.Audio.SoundIo
{
@@ -53,6 +54,9 @@ namespace Ryujinx.Audio.SoundIo
/// </summary>
public ConcurrentQueue<long> ReleasedBuffers { get; private set; }
+ private int _hardwareChannels;
+ private int _virtualChannels;
+
/// <summary>
/// Constructs a new instance of a <see cref="SoundIoAudioTrack"/>
/// </summary>
@@ -75,12 +79,14 @@ namespace Ryujinx.Audio.SoundIo
/// Opens the audio track with the specified parameters
/// </summary>
/// <param name="sampleRate">The requested sample rate of the track</param>
- /// <param name="channelCount">The requested channel count of the track</param>
+ /// <param name="hardwareChannels">The requested hardware channels</param>
+ /// <param name="virtualChannels">The requested virtual channels</param>
/// <param name="callback">A <see cref="ReleaseCallback" /> that represents the delegate to invoke when a buffer has been released by the audio track</param>
/// <param name="format">The requested sample format of the track</param>
public void Open(
int sampleRate,
- int channelCount,
+ int hardwareChannels,
+ int virtualChannels,
ReleaseCallback callback,
SoundIOFormat format = SoundIOFormat.S16LE)
{
@@ -100,10 +106,18 @@ namespace Ryujinx.Audio.SoundIo
throw new InvalidOperationException($"This sound device does not support SoundIOFormat.{Enum.GetName(typeof(SoundIOFormat), format)}");
}
+ if (!AudioDevice.SupportsChannelCount(hardwareChannels))
+ {
+ throw new InvalidOperationException($"This sound device does not support channel count {hardwareChannels}");
+ }
+
+ _hardwareChannels = hardwareChannels;
+ _virtualChannels = virtualChannels;
+
AudioStream = AudioDevice.CreateOutStream();
AudioStream.Name = $"SwitchAudioTrack_{TrackID}";
- AudioStream.Layout = SoundIOChannelLayout.GetDefault(channelCount);
+ AudioStream.Layout = SoundIOChannelLayout.GetDefault(hardwareChannels);
AudioStream.Format = format;
AudioStream.SampleRate = sampleRate;
@@ -490,24 +504,62 @@ namespace Ryujinx.Audio.SoundIo
/// <typeparam name="T">The audio sample type</typeparam>
/// <param name="bufferTag">The unqiue tag of the buffer being appended</param>
/// <param name="buffer">The buffer to append</param>
- public void AppendBuffer<T>(long bufferTag, T[] buffer)
+ public void AppendBuffer<T>(long bufferTag, T[] buffer) where T: struct
{
if (AudioStream == null)
{
return;
}
- // Calculate the size of the audio samples
- int size = Unsafe.SizeOf<T>();
+ int sampleSize = Unsafe.SizeOf<T>();
+ int targetSize = sampleSize * buffer.Length;
+
+ // Do we need to downmix?
+ if (_hardwareChannels != _virtualChannels)
+ {
+ if (sampleSize != sizeof(short))
+ {
+ throw new NotImplementedException("Downmixing formats other than PCM16 is not supported!");
+ }
+
+ short[] downmixedBuffer;
+
+ ReadOnlySpan<short> bufferPCM16 = MemoryMarshal.Cast<T, short>(buffer);
+
+ if (_virtualChannels == 6)
+ {
+ downmixedBuffer = Downmixing.DownMixSurroundToStereo(bufferPCM16);
+
+ if (_hardwareChannels == 1)
+ {
+ downmixedBuffer = Downmixing.DownMixStereoToMono(downmixedBuffer);
+ }
+ }
+ else if (_virtualChannels == 2)
+ {
+ downmixedBuffer = Downmixing.DownMixStereoToMono(bufferPCM16);
+ }
+ else
+ {
+ throw new NotImplementedException($"Downmixing from {_virtualChannels} to {_hardwareChannels} not implemented!");
+ }
+
+ targetSize = sampleSize * downmixedBuffer.Length;
- // Calculate the amount of bytes to copy from the buffer
- int bytesToCopy = size * buffer.Length;
+ // Copy the memory to our ring buffer
+ m_Buffer.Write(downmixedBuffer, 0, targetSize);
- // Copy the memory to our ring buffer
- m_Buffer.Write(buffer, 0, bytesToCopy);
+ // Keep track of "buffered" buffers
+ m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, targetSize));
+ }
+ else
+ {
+ // Copy the memory to our ring buffer
+ m_Buffer.Write(buffer, 0, targetSize);
- // Keep track of "buffered" buffers
- m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, bytesToCopy));
+ // Keep track of "buffered" buffers
+ m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, targetSize));
+ }
}
/// <summary>
diff --git a/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs b/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs
index e6b7cb3d..d75fecf2 100644
--- a/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs
+++ b/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs
@@ -4,6 +4,7 @@ using Ryujinx.HLE.HOS.Ipc;
using Ryujinx.HLE.HOS.Kernel.Common;
using Ryujinx.HLE.HOS.Kernel.Threading;
using System;
+using System.Runtime.InteropServices;
namespace Ryujinx.HLE.HOS.Services.Audio.AudioOutManager
{
@@ -106,9 +107,10 @@ namespace Ryujinx.HLE.HOS.Services.Audio.AudioOutManager
context.Memory,
position);
- byte[] buffer = new byte[data.SampleBufferSize];
+ // NOTE: Assume PCM16 all the time, change if new format are found.
+ short[] buffer = new short[data.SampleBufferSize / sizeof(short)];
- context.Memory.Read((ulong)data.SampleBufferPtr, buffer);
+ context.Memory.Read((ulong)data.SampleBufferPtr, MemoryMarshal.Cast<short, byte>(buffer));
_audioOut.AppendBuffer(_track, tag, buffer);