diff --git a/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs b/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs index 90f6cd5176..674f20f967 100644 --- a/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs +++ b/Ryujinx.Audio.Renderer/Dsp/AudioProcessor.cs @@ -54,6 +54,11 @@ namespace Ryujinx.Audio.Renderer.Dsp private long _playbackEnds; private ManualResetEvent _event; + public AudioProcessor() + { + _event = new ManualResetEvent(false); + } + public void SetOutputDevices(HardwareDevice[] outputDevices) { _outputDevices = outputDevices; @@ -63,7 +68,7 @@ namespace Ryujinx.Audio.Renderer.Dsp { _mailbox = new Mailbox(); _sessionCommandList = new RendererSession[RendererConstants.AudioRendererSessionCountMax]; - _event = new ManualResetEvent(false); + _event.Reset(); _lastTime = PerformanceCounter.ElapsedNanoseconds; StartThread(); diff --git a/Ryujinx.Audio/Downmixing.cs b/Ryujinx.Audio/Downmixing.cs new file mode 100644 index 0000000000..bd020b1195 --- /dev/null +++ b/Ryujinx.Audio/Downmixing.cs @@ -0,0 +1,127 @@ +using System; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +namespace Ryujinx.Audio +{ + public static class Downmixing + { + [StructLayout(LayoutKind.Sequential, Pack = 1)] + private struct Channel51FormatPCM16 + { + public short FrontLeft; + public short FrontRight; + public short FrontCenter; + public short LowFrequency; + public short BackLeft; + public short BackRight; + } + + [StructLayout(LayoutKind.Sequential, Pack = 1)] + private struct ChannelStereoFormatPCM16 + { + public short Left; + public short Right; + } + + private const int Q15Bits = 16; + private const int RawQ15One = 1 << Q15Bits; + private const int RawQ15HalfOne = (int)(0.5f * RawQ15One); + private const int Minus3dBInQ15 = (int)(0.707f * RawQ15One); + private const int Minus6dBInQ15 = (int)(0.501f * RawQ15One); + private const int Minus12dBInQ15 = (int)(0.251f * RawQ15One); + + private static int[] DefaultSurroundToStereoCoefficients = new int[4] + { + RawQ15One, + Minus3dBInQ15, + Minus12dBInQ15, + Minus3dBInQ15, + }; + + private static int[] DefaultStereoToMonoCoefficients = new int[2] + { + Minus6dBInQ15, + Minus6dBInQ15, + }; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static ReadOnlySpan GetSurroundBuffer(ReadOnlySpan data) + { + return MemoryMarshal.Cast(data); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static ReadOnlySpan GetStereoBuffer(ReadOnlySpan data) + { + return MemoryMarshal.Cast(data); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static short DownMixStereoToMono(ReadOnlySpan coefficients, short left, short right) + { + return (short)((left * coefficients[0] + right * coefficients[1]) >> Q15Bits); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static short DownMixSurroundToStereo(ReadOnlySpan coefficients, short back, short lfe, short center, short front) + { + return (short)((coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front + RawQ15HalfOne) >> Q15Bits); + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static short[] DownMixSurroundToStereo(ReadOnlySpan coefficients, ReadOnlySpan data) + { + const int SurroundChannelCount = 6; + const int StereoChannelCount = 2; + + int samplePerChannelCount = data.Length / SurroundChannelCount; + + short[] downmixedBuffer = new short[samplePerChannelCount * StereoChannelCount]; + + ReadOnlySpan channels = GetSurroundBuffer(data); + + for (int i = 0; i < samplePerChannelCount; i++) + { + Channel51FormatPCM16 channel = channels[i]; + + downmixedBuffer[i * 2] = DownMixSurroundToStereo(coefficients, channel.BackLeft, channel.LowFrequency, channel.FrontCenter, channel.FrontLeft); + downmixedBuffer[i * 2 + 1] = DownMixSurroundToStereo(coefficients, channel.BackRight, channel.LowFrequency, channel.FrontCenter, channel.FrontRight); + } + + return downmixedBuffer; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static short[] DownMixStereoToMono(ReadOnlySpan coefficients, ReadOnlySpan data) + { + const int StereoChannelCount = 2; + const int MonoChannelCount = 1; + + int samplePerChannelCount = data.Length / StereoChannelCount; + + short[] downmixedBuffer = new short[samplePerChannelCount * MonoChannelCount]; + + ReadOnlySpan channels = GetStereoBuffer(data); + + for (int i = 0; i < samplePerChannelCount; i++) + { + ChannelStereoFormatPCM16 channel = channels[i]; + + downmixedBuffer[i] = DownMixStereoToMono(coefficients, channel.Left, channel.Right); + } + + return downmixedBuffer; + } + + public static short[] DownMixStereoToMono(ReadOnlySpan data) + { + return DownMixStereoToMono(DefaultStereoToMonoCoefficients, data); + } + + public static short[] DownMixSurroundToStereo(ReadOnlySpan data) + { + return DownMixSurroundToStereo(DefaultSurroundToStereoCoefficients, data); + } + } +} diff --git a/Ryujinx.Audio/DspUtils.cs b/Ryujinx.Audio/DspUtils.cs index c048161dae..44e22d73a8 100644 --- a/Ryujinx.Audio/DspUtils.cs +++ b/Ryujinx.Audio/DspUtils.cs @@ -1,4 +1,4 @@ -namespace Ryujinx.Audio.Adpcm +namespace Ryujinx.Audio { public static class DspUtils { diff --git a/Ryujinx.Audio/IAalOutput.cs b/Ryujinx.Audio/IAalOutput.cs index 489f90028e..821c1ffb55 100644 --- a/Ryujinx.Audio/IAalOutput.cs +++ b/Ryujinx.Audio/IAalOutput.cs @@ -4,7 +4,34 @@ namespace Ryujinx.Audio { public interface IAalOutput : IDisposable { - int OpenTrack(int sampleRate, int channels, ReleaseCallback callback); + bool SupportsChannelCount(int channels); + + private int SelectHardwareChannelCount(int targetChannelCount) + { + if (SupportsChannelCount(targetChannelCount)) + { + return targetChannelCount; + } + + switch (targetChannelCount) + { + case 6: + return SelectHardwareChannelCount(2); + case 2: + return SelectHardwareChannelCount(1); + case 1: + throw new ArgumentException("No valid channel configuration found!"); + default: + throw new ArgumentException($"Invalid targetChannelCount {targetChannelCount}"); + } + } + + int OpenTrack(int sampleRate, int channels, ReleaseCallback callback) + { + return OpenHardwareTrack(sampleRate, SelectHardwareChannelCount(channels), channels, callback); + } + + int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback); void CloseTrack(int trackId); @@ -12,7 +39,7 @@ namespace Ryujinx.Audio long[] GetReleasedBuffers(int trackId, int maxCount); - void AppendBuffer(int trackId, long bufferTag, T[] buffer) where T : struct; + void AppendBuffer(int trackId, long bufferTag, T[] buffer) where T : struct; void Start(int trackId); diff --git a/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs b/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs index 81b78b679f..dff945b60c 100644 --- a/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs +++ b/Ryujinx.Audio/Native/libsoundio/SoundIODevice.cs @@ -197,6 +197,11 @@ namespace SoundIOSharp return Natives.soundio_device_supports_sample_rate (handle, sampleRate); } + public bool SupportsChannelCount(int channelCount) + { + return Natives.soundio_device_supports_layout(handle, SoundIOChannelLayout.GetDefault(channelCount).Handle); + } + public int GetNearestSampleRate (int sampleRate) { return Natives.soundio_device_nearest_sample_rate (handle, sampleRate); diff --git a/Ryujinx.Audio/Renderers/DummyAudioOut.cs b/Ryujinx.Audio/Renderers/DummyAudioOut.cs index 10943ae62b..2698b92819 100644 --- a/Ryujinx.Audio/Renderers/DummyAudioOut.cs +++ b/Ryujinx.Audio/Renderers/DummyAudioOut.cs @@ -30,7 +30,12 @@ namespace Ryujinx.Audio public PlaybackState GetState(int trackId) => PlaybackState.Stopped; - public int OpenTrack(int sampleRate, int channels, ReleaseCallback callback) + public bool SupportsChannelCount(int channels) + { + return true; + } + + public int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback) { if (!_trackIds.TryDequeue(out int trackId)) { @@ -67,11 +72,11 @@ namespace Ryujinx.Audio return bufferTags.ToArray(); } - public void AppendBuffer(int trackID, long bufferTag, T[] buffer) where T : struct + public void AppendBuffer(int trackId, long bufferTag, T[] buffer) where T : struct { _buffers.Enqueue(bufferTag); - if (_releaseCallbacks.TryGetValue(trackID, out var callback)) + if (_releaseCallbacks.TryGetValue(trackId, out var callback)) { callback?.Invoke(); } diff --git a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs index ea5ce62146..fe82fced26 100644 --- a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs +++ b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioOut.cs @@ -104,15 +104,24 @@ namespace Ryujinx.Audio _context.Dispose(); } + public bool SupportsChannelCount(int channels) + { + // NOTE: OpenAL doesn't give us a way to know if the 5.1 setup is supported by hardware or actually emulated. + // TODO: find a way to determine hardware support. + return channels == 1 || channels == 2; + } + /// /// Creates a new audio track with the specified parameters /// /// The requested sample rate - /// The requested channels + /// The requested hardware channels + /// The requested virtual channels /// A that represents the delegate to invoke when a buffer has been released by the audio track - public int OpenTrack(int sampleRate, int channels, ReleaseCallback callback) + /// The created track's Track ID + public int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback) { - OpenALAudioTrack track = new OpenALAudioTrack(sampleRate, GetALFormat(channels), callback); + OpenALAudioTrack track = new OpenALAudioTrack(sampleRate, GetALFormat(hardwareChannels), hardwareChannels, virtualChannels, callback); for (int id = 0; id < MaxTracks; id++) { @@ -204,9 +213,37 @@ namespace Ryujinx.Audio { int bufferId = track.AppendBuffer(bufferTag); - int size = buffer.Length * Marshal.SizeOf(); + // Do we need to downmix? + if (track.HardwareChannels != track.VirtualChannels) + { + short[] downmixedBuffer; - AL.BufferData(bufferId, track.Format, buffer, size, track.SampleRate); + ReadOnlySpan bufferPCM16 = MemoryMarshal.Cast(buffer); + + if (track.VirtualChannels == 6) + { + downmixedBuffer = Downmixing.DownMixSurroundToStereo(bufferPCM16); + + if (track.HardwareChannels == 1) + { + downmixedBuffer = Downmixing.DownMixStereoToMono(downmixedBuffer); + } + } + else if (track.VirtualChannels == 2) + { + downmixedBuffer = Downmixing.DownMixStereoToMono(bufferPCM16); + } + else + { + throw new NotImplementedException($"Downmixing from {track.VirtualChannels} to {track.HardwareChannels} not implemented!"); + } + + AL.BufferData(bufferId, track.Format, downmixedBuffer, downmixedBuffer.Length * sizeof(ushort), track.SampleRate); + } + else + { + AL.BufferData(bufferId, track.Format, buffer, buffer.Length * sizeof(ushort), track.SampleRate); + } AL.SourceQueueBuffer(track.SourceId, bufferId); diff --git a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs index 8629dc969c..2f15099866 100644 --- a/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs +++ b/Ryujinx.Audio/Renderers/OpenAL/OpenALAudioTrack.cs @@ -12,6 +12,9 @@ namespace Ryujinx.Audio public ALFormat Format { get; private set; } public PlaybackState State { get; set; } + public int HardwareChannels { get; } + public int VirtualChannels { get; } + private ReleaseCallback _callback; private ConcurrentDictionary _buffers; @@ -21,13 +24,16 @@ namespace Ryujinx.Audio private bool _disposed; - public OpenALAudioTrack(int sampleRate, ALFormat format, ReleaseCallback callback) + public OpenALAudioTrack(int sampleRate, ALFormat format, int hardwareChannels, int virtualChannels, ReleaseCallback callback) { SampleRate = sampleRate; Format = format; State = PlaybackState.Stopped; SourceId = AL.GenSource(); + HardwareChannels = hardwareChannels; + VirtualChannels = virtualChannels; + _callback = callback; _buffers = new ConcurrentDictionary(); diff --git a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs index 1e487a6d93..fa3961e4b4 100644 --- a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs +++ b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioOut.cs @@ -65,14 +65,20 @@ namespace Ryujinx.Audio _trackPool = new SoundIoAudioTrackPool(_audioContext, _audioDevice, MaximumTracks); } + public bool SupportsChannelCount(int channels) + { + return _audioDevice.SupportsChannelCount(channels); + } + /// /// Creates a new audio track with the specified parameters /// /// The requested sample rate - /// The requested channels + /// The requested hardware channels + /// The requested virtual channels /// A that represents the delegate to invoke when a buffer has been released by the audio track /// The created track's Track ID - public int OpenTrack(int sampleRate, int channels, ReleaseCallback callback) + public int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback) { if (!_trackPool.TryGet(out SoundIoAudioTrack track)) { @@ -80,7 +86,7 @@ namespace Ryujinx.Audio } // Open the output. We currently only support 16-bit signed LE - track.Open(sampleRate, channels, callback, SoundIOFormat.S16LE); + track.Open(sampleRate, hardwareChannels, virtualChannels, callback, SoundIOFormat.S16LE); return track.TrackID; } diff --git a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs index 97ba11d513..6fdeb99148 100644 --- a/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs +++ b/Ryujinx.Audio/Renderers/SoundIo/SoundIoAudioTrack.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Concurrent; using System.Linq; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; namespace Ryujinx.Audio.SoundIo { @@ -53,6 +54,9 @@ namespace Ryujinx.Audio.SoundIo /// public ConcurrentQueue ReleasedBuffers { get; private set; } + private int _hardwareChannels; + private int _virtualChannels; + /// /// Constructs a new instance of a /// @@ -75,12 +79,14 @@ namespace Ryujinx.Audio.SoundIo /// Opens the audio track with the specified parameters /// /// The requested sample rate of the track - /// The requested channel count of the track + /// The requested hardware channels + /// The requested virtual channels /// A that represents the delegate to invoke when a buffer has been released by the audio track /// The requested sample format of the track public void Open( int sampleRate, - int channelCount, + int hardwareChannels, + int virtualChannels, ReleaseCallback callback, SoundIOFormat format = SoundIOFormat.S16LE) { @@ -100,10 +106,18 @@ namespace Ryujinx.Audio.SoundIo throw new InvalidOperationException($"This sound device does not support SoundIOFormat.{Enum.GetName(typeof(SoundIOFormat), format)}"); } + if (!AudioDevice.SupportsChannelCount(hardwareChannels)) + { + throw new InvalidOperationException($"This sound device does not support channel count {hardwareChannels}"); + } + + _hardwareChannels = hardwareChannels; + _virtualChannels = virtualChannels; + AudioStream = AudioDevice.CreateOutStream(); AudioStream.Name = $"SwitchAudioTrack_{TrackID}"; - AudioStream.Layout = SoundIOChannelLayout.GetDefault(channelCount); + AudioStream.Layout = SoundIOChannelLayout.GetDefault(hardwareChannels); AudioStream.Format = format; AudioStream.SampleRate = sampleRate; @@ -490,24 +504,62 @@ namespace Ryujinx.Audio.SoundIo /// The audio sample type /// The unqiue tag of the buffer being appended /// The buffer to append - public void AppendBuffer(long bufferTag, T[] buffer) + public void AppendBuffer(long bufferTag, T[] buffer) where T: struct { if (AudioStream == null) { return; } - // Calculate the size of the audio samples - int size = Unsafe.SizeOf(); + int sampleSize = Unsafe.SizeOf(); + int targetSize = sampleSize * buffer.Length; - // Calculate the amount of bytes to copy from the buffer - int bytesToCopy = size * buffer.Length; + // Do we need to downmix? + if (_hardwareChannels != _virtualChannels) + { + if (sampleSize != sizeof(short)) + { + throw new NotImplementedException("Downmixing formats other than PCM16 is not supported!"); + } - // Copy the memory to our ring buffer - m_Buffer.Write(buffer, 0, bytesToCopy); + short[] downmixedBuffer; - // Keep track of "buffered" buffers - m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, bytesToCopy)); + ReadOnlySpan bufferPCM16 = MemoryMarshal.Cast(buffer); + + if (_virtualChannels == 6) + { + downmixedBuffer = Downmixing.DownMixSurroundToStereo(bufferPCM16); + + if (_hardwareChannels == 1) + { + downmixedBuffer = Downmixing.DownMixStereoToMono(downmixedBuffer); + } + } + else if (_virtualChannels == 2) + { + downmixedBuffer = Downmixing.DownMixStereoToMono(bufferPCM16); + } + else + { + throw new NotImplementedException($"Downmixing from {_virtualChannels} to {_hardwareChannels} not implemented!"); + } + + targetSize = sampleSize * downmixedBuffer.Length; + + // Copy the memory to our ring buffer + m_Buffer.Write(downmixedBuffer, 0, targetSize); + + // Keep track of "buffered" buffers + m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, targetSize)); + } + else + { + // Copy the memory to our ring buffer + m_Buffer.Write(buffer, 0, targetSize); + + // Keep track of "buffered" buffers + m_ReservedBuffers.Enqueue(new SoundIoBuffer(bufferTag, targetSize)); + } } /// diff --git a/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs b/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs index e6b7cb3d1f..d75fecf2a4 100644 --- a/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs +++ b/Ryujinx.HLE/HOS/Services/Audio/AudioOutManager/IAudioOut.cs @@ -4,6 +4,7 @@ using Ryujinx.HLE.HOS.Ipc; using Ryujinx.HLE.HOS.Kernel.Common; using Ryujinx.HLE.HOS.Kernel.Threading; using System; +using System.Runtime.InteropServices; namespace Ryujinx.HLE.HOS.Services.Audio.AudioOutManager { @@ -106,9 +107,10 @@ namespace Ryujinx.HLE.HOS.Services.Audio.AudioOutManager context.Memory, position); - byte[] buffer = new byte[data.SampleBufferSize]; + // NOTE: Assume PCM16 all the time, change if new format are found. + short[] buffer = new short[data.SampleBufferSize / sizeof(short)]; - context.Memory.Read((ulong)data.SampleBufferPtr, buffer); + context.Memory.Read((ulong)data.SampleBufferPtr, MemoryMarshal.Cast(buffer)); _audioOut.AppendBuffer(_track, tag, buffer);