Move solution and projects to src

This commit is contained in:
TSR Berry
2023-04-08 01:22:00 +02:00
committed by Mary
parent cd124bda58
commit cee7121058
3466 changed files with 55 additions and 55 deletions

View File

@@ -0,0 +1,75 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class AdpcmDataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.AdpcmDataSourceVersion1;
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public ulong AdpcmParameter { get; }
public ulong AdpcmParameterSize { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public AdpcmDataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = outputBufferIndex;
SampleRate = serverState.SampleRate;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
AdpcmParameter = serverState.DataSourceStateAddressInfo.GetReference(true);
AdpcmParameterSize = serverState.DataSourceStateAddressInfo.Size;
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.Adpcm,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = AdpcmParameter,
ExtraParameterSize = AdpcmParameterSize,
ChannelIndex = 0,
ChannelCount = 1,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@@ -0,0 +1,173 @@
using Ryujinx.Audio.Renderer.Common;
using Ryujinx.Memory;
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using static Ryujinx.Audio.Renderer.Dsp.State.AuxiliaryBufferHeader;
using CpuAddress = System.UInt64;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class AuxiliaryBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.AuxiliaryBuffer;
public uint EstimatedProcessingTime { get; set; }
public uint InputBufferIndex { get; }
public uint OutputBufferIndex { get; }
public AuxiliaryBufferAddresses BufferInfo { get; }
public CpuAddress InputBuffer { get; }
public CpuAddress OutputBuffer { get; }
public uint CountMax { get; }
public uint UpdateCount { get; }
public uint WriteOffset { get; }
public bool IsEffectEnabled { get; }
public AuxiliaryBufferCommand(uint bufferOffset, byte inputBufferOffset, byte outputBufferOffset,
ref AuxiliaryBufferAddresses sendBufferInfo, bool isEnabled, uint countMax,
CpuAddress outputBuffer, CpuAddress inputBuffer, uint updateCount, uint writeOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = bufferOffset + inputBufferOffset;
OutputBufferIndex = bufferOffset + outputBufferOffset;
BufferInfo = sendBufferInfo;
InputBuffer = inputBuffer;
OutputBuffer = outputBuffer;
CountMax = countMax;
UpdateCount = updateCount;
WriteOffset = writeOffset;
IsEffectEnabled = isEnabled;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private uint Read(IVirtualMemoryManager memoryManager, ulong bufferAddress, uint countMax, Span<int> outBuffer, uint count, uint readOffset, uint updateCount)
{
if (countMax == 0 || bufferAddress == 0)
{
return 0;
}
uint targetReadOffset = readOffset + AuxiliaryBufferInfo.GetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo);
if (targetReadOffset > countMax)
{
return 0;
}
uint remaining = count;
uint outBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetReadOffset, remaining);
memoryManager.Read(bufferAddress + targetReadOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(outBuffer.Slice((int)outBufferOffset, (int)countToWrite)));
targetReadOffset = (targetReadOffset + countToWrite) % countMax;
remaining -= countToWrite;
outBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint newReadOffset = (AuxiliaryBufferInfo.GetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo) + updateCount) % countMax;
AuxiliaryBufferInfo.SetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo, newReadOffset);
}
return count;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private uint Write(IVirtualMemoryManager memoryManager, ulong outBufferAddress, uint countMax, ReadOnlySpan<int> buffer, uint count, uint writeOffset, uint updateCount)
{
if (countMax == 0 || outBufferAddress == 0)
{
return 0;
}
uint targetWriteOffset = writeOffset + AuxiliaryBufferInfo.GetWriteOffset(memoryManager, BufferInfo.SendBufferInfo);
if (targetWriteOffset > countMax)
{
return 0;
}
uint remaining = count;
uint inBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetWriteOffset, remaining);
memoryManager.Write(outBufferAddress + targetWriteOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(buffer.Slice((int)inBufferOffset, (int)countToWrite)));
targetWriteOffset = (targetWriteOffset + countToWrite) % countMax;
remaining -= countToWrite;
inBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint newWriteOffset = (AuxiliaryBufferInfo.GetWriteOffset(memoryManager, BufferInfo.SendBufferInfo) + updateCount) % countMax;
AuxiliaryBufferInfo.SetWriteOffset(memoryManager, BufferInfo.SendBufferInfo, newWriteOffset);
}
return count;
}
public void Process(CommandList context)
{
Span<float> inputBuffer = context.GetBuffer((int)InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer((int)OutputBufferIndex);
if (IsEffectEnabled)
{
Span<int> inputBufferInt = MemoryMarshal.Cast<float, int>(inputBuffer);
Span<int> outputBufferInt = MemoryMarshal.Cast<float, int>(outputBuffer);
// Convert input data to the target format for user (int)
DataSourceHelper.ToInt(inputBufferInt, inputBuffer, inputBuffer.Length);
// Send the input to the user
Write(context.MemoryManager, OutputBuffer, CountMax, inputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert back to float just in case it's reused
DataSourceHelper.ToFloat(inputBuffer, inputBufferInt, inputBuffer.Length);
// Retrieve the input from user
uint readResult = Read(context.MemoryManager, InputBuffer, CountMax, outputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert the outputBuffer back to the target format of the renderer (float)
DataSourceHelper.ToFloat(outputBuffer, outputBufferInt, outputBuffer.Length);
if (readResult != context.SampleCount)
{
outputBuffer.Slice((int)readResult, (int)context.SampleCount - (int)readResult).Fill(0);
}
}
else
{
AuxiliaryBufferInfo.Reset(context.MemoryManager, BufferInfo.SendBufferInfo);
AuxiliaryBufferInfo.Reset(context.MemoryManager, BufferInfo.ReturnBufferInfo);
if (InputBufferIndex != OutputBufferIndex)
{
inputBuffer.CopyTo(outputBuffer);
}
}
}
}
}

View File

@@ -0,0 +1,51 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class BiquadFilterCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.BiquadFilter;
public uint EstimatedProcessingTime { get; set; }
public Memory<BiquadFilterState> BiquadFilterState { get; }
public int InputBufferIndex { get; }
public int OutputBufferIndex { get; }
public bool NeedInitialization { get; }
private BiquadFilterParameter _parameter;
public BiquadFilterCommand(int baseIndex, ref BiquadFilterParameter filter, Memory<BiquadFilterState> biquadFilterStateMemory, int inputBufferOffset, int outputBufferOffset, bool needInitialization, int nodeId)
{
_parameter = filter;
BiquadFilterState = biquadFilterStateMemory;
InputBufferIndex = baseIndex + inputBufferOffset;
OutputBufferIndex = baseIndex + outputBufferOffset;
NeedInitialization = needInitialization;
Enabled = true;
NodeId = nodeId;
}
public void Process(CommandList context)
{
ref BiquadFilterState state = ref BiquadFilterState.Span[0];
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
if (NeedInitialization)
{
state = new BiquadFilterState();
}
BiquadFilterHelper.ProcessBiquadFilter(ref _parameter, ref state, outputBuffer, inputBuffer, context.SampleCount);
}
}
}

View File

@@ -0,0 +1,136 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Memory;
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using static Ryujinx.Audio.Renderer.Dsp.State.AuxiliaryBufferHeader;
using CpuAddress = System.UInt64;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CaptureBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CaptureBuffer;
public uint EstimatedProcessingTime { get; set; }
public uint InputBufferIndex { get; }
public ulong CpuBufferInfoAddress { get; }
public ulong DspBufferInfoAddress { get; }
public CpuAddress OutputBuffer { get; }
public uint CountMax { get; }
public uint UpdateCount { get; }
public uint WriteOffset { get; }
public bool IsEffectEnabled { get; }
public CaptureBufferCommand(uint bufferOffset, byte inputBufferOffset, ulong sendBufferInfo, bool isEnabled,
uint countMax, CpuAddress outputBuffer, uint updateCount, uint writeOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = bufferOffset + inputBufferOffset;
CpuBufferInfoAddress = sendBufferInfo;
DspBufferInfoAddress = sendBufferInfo + (ulong)Unsafe.SizeOf<AuxiliaryBufferHeader>();
OutputBuffer = outputBuffer;
CountMax = countMax;
UpdateCount = updateCount;
WriteOffset = writeOffset;
IsEffectEnabled = isEnabled;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private uint Write(IVirtualMemoryManager memoryManager, ulong outBufferAddress, uint countMax, ReadOnlySpan<int> buffer, uint count, uint writeOffset, uint updateCount)
{
if (countMax == 0 || outBufferAddress == 0)
{
return 0;
}
uint targetWriteOffset = writeOffset + AuxiliaryBufferInfo.GetWriteOffset(memoryManager, DspBufferInfoAddress);
if (targetWriteOffset > countMax)
{
return 0;
}
uint remaining = count;
uint inBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetWriteOffset, remaining);
memoryManager.Write(outBufferAddress + targetWriteOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(buffer.Slice((int)inBufferOffset, (int)countToWrite)));
targetWriteOffset = (targetWriteOffset + countToWrite) % countMax;
remaining -= countToWrite;
inBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint dspTotalSampleCount = AuxiliaryBufferInfo.GetTotalSampleCount(memoryManager, DspBufferInfoAddress);
uint cpuTotalSampleCount = AuxiliaryBufferInfo.GetTotalSampleCount(memoryManager, CpuBufferInfoAddress);
uint totalSampleCountDiff = dspTotalSampleCount - cpuTotalSampleCount;
if (totalSampleCountDiff >= countMax)
{
uint dspLostSampleCount = AuxiliaryBufferInfo.GetLostSampleCount(memoryManager, DspBufferInfoAddress);
uint cpuLostSampleCount = AuxiliaryBufferInfo.GetLostSampleCount(memoryManager, CpuBufferInfoAddress);
uint lostSampleCountDiff = dspLostSampleCount - cpuLostSampleCount;
uint newLostSampleCount = lostSampleCountDiff + updateCount;
if (lostSampleCountDiff > newLostSampleCount)
{
newLostSampleCount = cpuLostSampleCount - 1;
}
AuxiliaryBufferInfo.SetLostSampleCount(memoryManager, DspBufferInfoAddress, newLostSampleCount);
}
uint newWriteOffset = (AuxiliaryBufferInfo.GetWriteOffset(memoryManager, DspBufferInfoAddress) + updateCount) % countMax;
AuxiliaryBufferInfo.SetWriteOffset(memoryManager, DspBufferInfoAddress, newWriteOffset);
uint newTotalSampleCount = totalSampleCountDiff + newWriteOffset;
AuxiliaryBufferInfo.SetTotalSampleCount(memoryManager, DspBufferInfoAddress, newTotalSampleCount);
}
return count;
}
public void Process(CommandList context)
{
Span<float> inputBuffer = context.GetBuffer((int)InputBufferIndex);
if (IsEffectEnabled)
{
Span<int> inputBufferInt = MemoryMarshal.Cast<float, int>(inputBuffer);
// Convert input data to the target format for user (int)
DataSourceHelper.ToInt(inputBufferInt, inputBuffer, inputBuffer.Length);
// Send the input to the user
Write(context.MemoryManager, OutputBuffer, CountMax, inputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert back to float
DataSourceHelper.ToFloat(inputBuffer, inputBufferInt, inputBuffer.Length);
}
else
{
AuxiliaryBufferInfo.Reset(context.MemoryManager, DspBufferInfoAddress);
}
}
}
}

View File

@@ -0,0 +1,76 @@
using Ryujinx.Audio.Renderer.Parameter.Sink;
using Ryujinx.Audio.Renderer.Server.MemoryPool;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CircularBufferSinkCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CircularBufferSink;
public uint EstimatedProcessingTime { get; set; }
public ushort[] Input { get; }
public uint InputCount { get; }
public ulong CircularBuffer { get; }
public ulong CircularBufferSize { get; }
public ulong CurrentOffset { get; }
public CircularBufferSinkCommand(uint bufferOffset, ref CircularBufferParameter parameter, ref AddressInfo circularBufferAddressInfo, uint currentOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
Input = new ushort[Constants.ChannelCountMax];
InputCount = parameter.InputCount;
for (int i = 0; i < InputCount; i++)
{
Input[i] = (ushort)(bufferOffset + parameter.Input[i]);
}
CircularBuffer = circularBufferAddressInfo.GetReference(true);
CircularBufferSize = parameter.BufferSize;
CurrentOffset = currentOffset;
Debug.Assert(CircularBuffer != 0);
}
public void Process(CommandList context)
{
const int targetChannelCount = 2;
ulong currentOffset = CurrentOffset;
if (CircularBufferSize > 0)
{
for (int i = 0; i < InputCount; i++)
{
unsafe
{
float* inputBuffer = (float*)context.GetBufferPointer(Input[i]);
ulong targetOffset = CircularBuffer + currentOffset;
for (int y = 0; y < context.SampleCount; y++)
{
context.MemoryManager.Write(targetOffset + (ulong)y * targetChannelCount, PcmHelper.Saturate(inputBuffer[y]));
}
currentOffset += context.SampleCount * targetChannelCount;
if (currentOffset >= CircularBufferSize)
{
currentOffset = 0;
}
}
}
}
}
}
}

View File

@@ -0,0 +1,24 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class ClearMixBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.ClearMixBuffer;
public uint EstimatedProcessingTime { get; set; }
public ClearMixBufferCommand(int nodeId)
{
Enabled = true;
NodeId = nodeId;
}
public void Process(CommandList context)
{
context.ClearBuffers();
}
}
}

View File

@@ -0,0 +1,155 @@
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Server;
using Ryujinx.Common;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CommandList : IDisposable
{
public ulong StartTime { get; private set; }
public ulong EndTime { get; private set; }
public uint SampleCount { get; }
public uint SampleRate { get; }
public Memory<float> Buffers { get; }
public uint BufferCount { get; }
public List<ICommand> Commands { get; }
public IVirtualMemoryManager MemoryManager { get; }
public IHardwareDevice OutputDevice { get; private set; }
private readonly int _sampleCount;
private readonly int _buffersEntryCount;
private readonly MemoryHandle _buffersMemoryHandle;
public CommandList(AudioRenderSystem renderSystem) : this(renderSystem.MemoryManager,
renderSystem.GetMixBuffer(),
renderSystem.GetSampleCount(),
renderSystem.GetSampleRate(),
renderSystem.GetMixBufferCount(),
renderSystem.GetVoiceChannelCountMax())
{
}
public CommandList(IVirtualMemoryManager memoryManager, Memory<float> mixBuffer, uint sampleCount, uint sampleRate, uint mixBufferCount, uint voiceChannelCountMax)
{
SampleCount = sampleCount;
_sampleCount = (int)SampleCount;
SampleRate = sampleRate;
BufferCount = mixBufferCount + voiceChannelCountMax;
Buffers = mixBuffer;
Commands = new List<ICommand>();
MemoryManager = memoryManager;
_buffersEntryCount = Buffers.Length;
_buffersMemoryHandle = Buffers.Pin();
}
public void AddCommand(ICommand command)
{
Commands.Add(command);
}
public void AddCommand<T>(T command) where T : unmanaged, ICommand
{
throw new NotImplementedException();
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe IntPtr GetBufferPointer(int index)
{
if (index >= 0 && index < _buffersEntryCount)
{
return (IntPtr)((float*)_buffersMemoryHandle.Pointer + index * _sampleCount);
}
throw new ArgumentOutOfRangeException();
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void ClearBuffer(int index)
{
Unsafe.InitBlock((void*)GetBufferPointer(index), 0, SampleCount);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void ClearBuffers()
{
Unsafe.InitBlock(_buffersMemoryHandle.Pointer, 0, (uint)_buffersEntryCount * sizeof(float));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public unsafe void CopyBuffer(int outputBufferIndex, int inputBufferIndex)
{
Unsafe.CopyBlock((void*)GetBufferPointer(outputBufferIndex), (void*)GetBufferPointer(inputBufferIndex), SampleCount);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public Span<float> GetBuffer(int index)
{
if (index < 0 || index >= _buffersEntryCount)
{
return Span<float>.Empty;
}
unsafe
{
return new Span<float>((float*)_buffersMemoryHandle.Pointer + index * _sampleCount, _sampleCount);
}
}
public ulong GetTimeElapsedSinceDspStartedProcessing()
{
return (ulong)PerformanceCounter.ElapsedNanoseconds - StartTime;
}
public void Process(IHardwareDevice outputDevice)
{
OutputDevice = outputDevice;
StartTime = (ulong)PerformanceCounter.ElapsedNanoseconds;
foreach (ICommand command in Commands)
{
if (command.Enabled)
{
bool shouldMeter = command.ShouldMeter();
long startTime = 0;
if (shouldMeter)
{
startTime = PerformanceCounter.ElapsedNanoseconds;
}
command.Process(this);
if (shouldMeter)
{
ulong effectiveElapsedTime = (ulong)(PerformanceCounter.ElapsedNanoseconds - startTime);
if (effectiveElapsedTime > command.EstimatedProcessingTime)
{
Logger.Warning?.Print(LogClass.AudioRenderer, $"Command {command.GetType().Name} took {effectiveElapsedTime}ns (expected {command.EstimatedProcessingTime}ns)");
}
}
}
}
EndTime = (ulong)PerformanceCounter.ElapsedNanoseconds;
}
public void Dispose()
{
_buffersMemoryHandle.Dispose();
}
}
}

View File

@@ -0,0 +1,37 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public enum CommandType : byte
{
Invalid,
PcmInt16DataSourceVersion1,
PcmInt16DataSourceVersion2,
PcmFloatDataSourceVersion1,
PcmFloatDataSourceVersion2,
AdpcmDataSourceVersion1,
AdpcmDataSourceVersion2,
Volume,
VolumeRamp,
BiquadFilter,
Mix,
MixRamp,
MixRampGrouped,
DepopPrepare,
DepopForMixBuffers,
Delay,
Upsample,
DownMixSurroundToStereo,
AuxiliaryBuffer,
DeviceSink,
CircularBufferSink,
Reverb,
Reverb3d,
Performance,
ClearMixBuffer,
CopyMixBuffer,
LimiterVersion1,
LimiterVersion2,
GroupedBiquadFilter,
CaptureBuffer,
Compressor
}
}

View File

@@ -0,0 +1,173 @@
using Ryujinx.Audio.Renderer.Dsp.Effect;
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CompressorCommand : ICommand
{
private const int FixedPointPrecision = 15;
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Compressor;
public uint EstimatedProcessingTime { get; set; }
public CompressorParameter Parameter => _parameter;
public Memory<CompressorState> State { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private CompressorParameter _parameter;
public CompressorCommand(uint bufferOffset, CompressorParameter parameter, Memory<CompressorState> state, bool isEnabled, int nodeId)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < _parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + _parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + _parameter.Output[i]);
}
}
public void Process(CommandList context)
{
ref CompressorState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (_parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new CompressorState(ref _parameter);
}
else if (_parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessCompressor(context, ref state);
}
private unsafe void ProcessCompressor(CommandList context, ref CompressorState state)
{
Debug.Assert(_parameter.IsChannelCountValid());
if (IsEffectEnabled && _parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<float> channelInput = stackalloc float[Parameter.ChannelCount];
ExponentialMovingAverage inputMovingAverage = state.InputMovingAverage;
float unknown4 = state.Unknown4;
ExponentialMovingAverage compressionGainAverage = state.CompressionGainAverage;
float previousCompressionEmaAlpha = state.PreviousCompressionEmaAlpha;
for (int i = 0; i < _parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
for (int sampleIndex = 0; sampleIndex < context.SampleCount; sampleIndex++)
{
for (int channelIndex = 0; channelIndex < _parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = *((float*)inputBuffers[channelIndex] + sampleIndex);
}
float newMean = inputMovingAverage.Update(FloatingPointHelper.MeanSquare(channelInput), _parameter.InputGain);
float y = FloatingPointHelper.Log10(newMean) * 10.0f;
float z = 0.0f;
bool unknown10OutOfRange = false;
if (newMean < 1.0e-10f)
{
z = 1.0f;
unknown10OutOfRange = state.Unknown10 < -100.0f;
}
if (y >= state.Unknown10 || unknown10OutOfRange)
{
float tmpGain;
if (y >= state.Unknown14)
{
tmpGain = ((1.0f / Parameter.Ratio) - 1.0f) * (y - Parameter.Threshold);
}
else
{
tmpGain = (y - state.Unknown10) * ((y - state.Unknown10) * -state.CompressorGainReduction);
}
z = FloatingPointHelper.DecibelToLinearExtended(tmpGain);
}
float unknown4New = z;
float compressionEmaAlpha;
if ((unknown4 - z) <= 0.08f)
{
compressionEmaAlpha = Parameter.ReleaseCoefficient;
if ((unknown4 - z) >= -0.08f)
{
if (MathF.Abs(compressionGainAverage.Read() - z) >= 0.001f)
{
unknown4New = unknown4;
}
compressionEmaAlpha = previousCompressionEmaAlpha;
}
}
else
{
compressionEmaAlpha = Parameter.AttackCoefficient;
}
float compressionGain = compressionGainAverage.Update(z, compressionEmaAlpha);
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
*((float*)outputBuffers[channelIndex] + sampleIndex) = channelInput[channelIndex] * compressionGain * state.OutputGain;
}
unknown4 = unknown4New;
previousCompressionEmaAlpha = compressionEmaAlpha;
}
state.InputMovingAverage = inputMovingAverage;
state.Unknown4 = unknown4;
state.CompressionGainAverage = compressionGainAverage;
state.PreviousCompressionEmaAlpha = previousCompressionEmaAlpha;
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
}
}

View File

@@ -0,0 +1,30 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CopyMixBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CopyMixBuffer;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public CopyMixBufferCommand(uint inputBufferIndex, uint outputBufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
}
public void Process(CommandList context)
{
context.CopyBuffer(OutputBufferIndex, InputBufferIndex);
}
}
}

View File

@@ -0,0 +1,108 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DataSourceVersion2Command : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType { get; }
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public ulong ExtraParameter { get; }
public ulong ExtraParameterSize { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public SampleFormat SampleFormat { get; }
public SampleRateConversionQuality SrcQuality { get; }
public DataSourceVersion2Command(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
SampleFormat = serverState.SampleFormat;
SrcQuality = serverState.SrcQuality;
CommandType = GetCommandTypeBySampleFormat(SampleFormat);
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(2);
}
if (SampleFormat == SampleFormat.Adpcm)
{
ExtraParameter = serverState.DataSourceStateAddressInfo.GetReference(true);
ExtraParameterSize = serverState.DataSourceStateAddressInfo.Size;
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
private static CommandType GetCommandTypeBySampleFormat(SampleFormat sampleFormat)
{
switch (sampleFormat)
{
case SampleFormat.Adpcm:
return CommandType.AdpcmDataSourceVersion2;
case SampleFormat.PcmInt16:
return CommandType.PcmInt16DataSourceVersion2;
case SampleFormat.PcmFloat:
return CommandType.PcmFloatDataSourceVersion2;
default:
throw new NotImplementedException($"{sampleFormat}");
}
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = ExtraParameter,
ExtraParameterSize = ExtraParameterSize,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
SrcQuality = SrcQuality
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@@ -0,0 +1,280 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using Ryujinx.Audio.Renderer.Server.Effect;
using Ryujinx.Audio.Renderer.Utils.Math;
using System;
using System.Diagnostics;
using System.Numerics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DelayCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Delay;
public uint EstimatedProcessingTime { get; set; }
public DelayParameter Parameter => _parameter;
public Memory<DelayState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private DelayParameter _parameter;
private const int FixedPointPrecision = 14;
public DelayCommand(uint bufferOffset, DelayParameter parameter, Memory<DelayState> state, bool isEnabled, ulong workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
DataSourceHelper.RemapLegacyChannelEffectMappingToChannelResourceMapping(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapLegacyChannelEffectMappingToChannelResourceMapping(newEffectChannelMappingSupported, OutputBufferIndices);
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayMono(ref DelayState state, float* outputBuffer, float* inputBuffer, uint sampleCount)
{
const ushort channelCount = 1;
float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision);
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
for (int i = 0; i < sampleCount; i++)
{
float input = inputBuffer[i] * 64;
float delayLineValue = state.DelayLines[0].Read();
float temp = input * inGain + delayLineValue * feedbackGain;
state.UpdateLowPassFilter(ref temp, channelCount);
outputBuffer[i] = (input * dryGain + delayLineValue * outGain) / 64;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayStereo(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
const ushort channelCount = 2;
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix2x2 delayFeedback = new Matrix2x2(delayFeedbackBaseGain, delayFeedbackCrossGain,
delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++)
{
Vector2 channelInput = new Vector2
{
X = *((float*)inputBuffers[0] + i) * 64,
Y = *((float*)inputBuffers[1] + i) * 64,
};
Vector2 delayLineValues = new Vector2()
{
X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
};
Vector2 temp = MatrixHelper.Transform(ref delayLineValues, ref delayFeedback) + channelInput * inGain;
state.UpdateLowPassFilter(ref Unsafe.As<Vector2, float>(ref temp), channelCount);
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelayQuadraphonic(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
const ushort channelCount = 4;
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix4x4 delayFeedback = new Matrix4x4(delayFeedbackBaseGain, delayFeedbackCrossGain, delayFeedbackCrossGain, 0.0f,
delayFeedbackCrossGain, delayFeedbackBaseGain, 0.0f, delayFeedbackCrossGain,
delayFeedbackCrossGain, 0.0f, delayFeedbackBaseGain, delayFeedbackCrossGain,
0.0f, delayFeedbackCrossGain, delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++)
{
Vector4 channelInput = new Vector4
{
X = *((float*)inputBuffers[0] + i) * 64,
Y = *((float*)inputBuffers[1] + i) * 64,
Z = *((float*)inputBuffers[2] + i) * 64,
W = *((float*)inputBuffers[3] + i) * 64
};
Vector4 delayLineValues = new Vector4()
{
X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
Z = state.DelayLines[2].Read(),
W = state.DelayLines[3].Read()
};
Vector4 temp = MatrixHelper.Transform(ref delayLineValues, ref delayFeedback) + channelInput * inGain;
state.UpdateLowPassFilter(ref Unsafe.As<Vector4, float>(ref temp), channelCount);
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
*((float*)outputBuffers[2] + i) = (channelInput.Z * dryGain + delayLineValues.Z * outGain) / 64;
*((float*)outputBuffers[3] + i) = (channelInput.W * dryGain + delayLineValues.W * outGain) / 64;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
private unsafe void ProcessDelaySurround(ref DelayState state, Span<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
const ushort channelCount = 6;
float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision);
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
Matrix6x6 delayFeedback = new Matrix6x6(delayFeedbackBaseGain, 0.0f, delayFeedbackCrossGain, 0.0f, delayFeedbackCrossGain, 0.0f,
0.0f, delayFeedbackBaseGain, delayFeedbackCrossGain, 0.0f, 0.0f, delayFeedbackCrossGain,
delayFeedbackCrossGain, delayFeedbackCrossGain, delayFeedbackBaseGain, 0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, feedbackGain, 0.0f, 0.0f,
delayFeedbackCrossGain, 0.0f, 0.0f, 0.0f, delayFeedbackBaseGain, delayFeedbackCrossGain,
0.0f, delayFeedbackCrossGain, 0.0f, 0.0f, delayFeedbackCrossGain, delayFeedbackBaseGain);
for (int i = 0; i < sampleCount; i++)
{
Vector6 channelInput = new Vector6
{
X = *((float*)inputBuffers[0] + i) * 64,
Y = *((float*)inputBuffers[1] + i) * 64,
Z = *((float*)inputBuffers[2] + i) * 64,
W = *((float*)inputBuffers[3] + i) * 64,
V = *((float*)inputBuffers[4] + i) * 64,
U = *((float*)inputBuffers[5] + i) * 64
};
Vector6 delayLineValues = new Vector6
{
X = state.DelayLines[0].Read(),
Y = state.DelayLines[1].Read(),
Z = state.DelayLines[2].Read(),
W = state.DelayLines[3].Read(),
V = state.DelayLines[4].Read(),
U = state.DelayLines[5].Read()
};
Vector6 temp = MatrixHelper.Transform(ref delayLineValues, ref delayFeedback) + channelInput * inGain;
state.UpdateLowPassFilter(ref Unsafe.As<Vector6, float>(ref temp), channelCount);
*((float*)outputBuffers[0] + i) = (channelInput.X * dryGain + delayLineValues.X * outGain) / 64;
*((float*)outputBuffers[1] + i) = (channelInput.Y * dryGain + delayLineValues.Y * outGain) / 64;
*((float*)outputBuffers[2] + i) = (channelInput.Z * dryGain + delayLineValues.Z * outGain) / 64;
*((float*)outputBuffers[3] + i) = (channelInput.W * dryGain + delayLineValues.W * outGain) / 64;
*((float*)outputBuffers[4] + i) = (channelInput.V * dryGain + delayLineValues.V * outGain) / 64;
*((float*)outputBuffers[5] + i) = (channelInput.U * dryGain + delayLineValues.U * outGain) / 64;
}
}
private unsafe void ProcessDelay(CommandList context, ref DelayState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessDelayMono(ref state, (float*)outputBuffers[0], (float*)inputBuffers[0], context.SampleCount);
break;
case 2:
ProcessDelayStereo(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessDelayQuadraphonic(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessDelaySurround(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException(Parameter.ChannelCount.ToString());
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
public void Process(CommandList context)
{
ref DelayState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == UsageState.Invalid)
{
state = new DelayState(ref _parameter, WorkBuffer);
}
else if (Parameter.Status == UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessDelay(context, ref state);
}
}
}

View File

@@ -0,0 +1,92 @@
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DepopForMixBuffersCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DepopForMixBuffers;
public uint EstimatedProcessingTime { get; set; }
public uint MixBufferOffset { get; }
public uint MixBufferCount { get; }
public float Decay { get; }
public Memory<float> DepopBuffer { get; }
public DepopForMixBuffersCommand(Memory<float> depopBuffer, uint bufferOffset, uint mixBufferCount, int nodeId, uint sampleRate)
{
Enabled = true;
NodeId = nodeId;
MixBufferOffset = bufferOffset;
MixBufferCount = mixBufferCount;
DepopBuffer = depopBuffer;
if (sampleRate == 48000)
{
Decay = 0.962189f;
}
else // if (sampleRate == 32000)
{
Decay = 0.943695f;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private unsafe float ProcessDepopMix(float* buffer, float depopValue, uint sampleCount)
{
if (depopValue < 0)
{
depopValue = -depopValue;
for (int i = 0; i < sampleCount; i++)
{
depopValue = FloatingPointHelper.MultiplyRoundDown(Decay, depopValue);
buffer[i] -= depopValue;
}
return -depopValue;
}
else
{
for (int i = 0; i < sampleCount; i++)
{
depopValue = FloatingPointHelper.MultiplyRoundDown(Decay, depopValue);
buffer[i] += depopValue;
}
return depopValue;
}
}
public void Process(CommandList context)
{
Span<float> depopBuffer = DepopBuffer.Span;
uint bufferCount = Math.Min(MixBufferOffset + MixBufferCount, context.BufferCount);
for (int i = (int)MixBufferOffset; i < bufferCount; i++)
{
float depopValue = depopBuffer[i];
if (depopValue != 0)
{
unsafe
{
float* buffer = (float*)context.GetBufferPointer(i);
depopBuffer[i] = ProcessDepopMix(buffer, depopValue, context.SampleCount);
}
}
}
}
}
}

View File

@@ -0,0 +1,57 @@
using Ryujinx.Audio.Renderer.Common;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DepopPrepareCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DepopPrepare;
public uint EstimatedProcessingTime { get; set; }
public uint MixBufferCount { get; }
public ushort[] OutputBufferIndices { get; }
public Memory<VoiceUpdateState> State { get; }
public Memory<float> DepopBuffer { get; }
public DepopPrepareCommand(Memory<VoiceUpdateState> state, Memory<float> depopBuffer, uint mixBufferCount, uint bufferOffset, int nodeId, bool enabled)
{
Enabled = enabled;
NodeId = nodeId;
MixBufferCount = mixBufferCount;
OutputBufferIndices = new ushort[Constants.MixBufferCountMax];
for (int i = 0; i < Constants.MixBufferCountMax; i++)
{
OutputBufferIndices[i] = (ushort)(bufferOffset + i);
}
State = state;
DepopBuffer = depopBuffer;
}
public void Process(CommandList context)
{
ref VoiceUpdateState state = ref State.Span[0];
Span<float> depopBuffer = DepopBuffer.Span;
for (int i = 0; i < MixBufferCount; i++)
{
if (state.LastSamples[i] != 0)
{
depopBuffer[OutputBufferIndices[i]] += state.LastSamples[i];
state.LastSamples[i] = 0;
}
}
}
}
}

View File

@@ -0,0 +1,91 @@
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Server.Sink;
using System;
using System.Runtime.CompilerServices;
using System.Text;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DeviceSinkCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DeviceSink;
public uint EstimatedProcessingTime { get; set; }
public string DeviceName { get; }
public int SessionId { get; }
public uint InputCount { get; }
public ushort[] InputBufferIndices { get; }
public Memory<float> Buffers { get; }
public DeviceSinkCommand(uint bufferOffset, DeviceSink sink, int sessionId, Memory<float> buffers, int nodeId)
{
Enabled = true;
NodeId = nodeId;
DeviceName = Encoding.ASCII.GetString(sink.Parameter.DeviceName).TrimEnd('\0');
SessionId = sessionId;
InputCount = sink.Parameter.InputCount;
InputBufferIndices = new ushort[InputCount];
for (int i = 0; i < Math.Min(InputCount, Constants.ChannelCountMax); i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + sink.Parameter.Input[i]);
}
if (sink.UpsamplerState != null)
{
Buffers = sink.UpsamplerState.OutputBuffer;
}
else
{
Buffers = buffers;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private Span<float> GetBuffer(int index, int sampleCount)
{
return Buffers.Span.Slice(index * sampleCount, sampleCount);
}
public void Process(CommandList context)
{
IHardwareDevice device = context.OutputDevice;
if (device.GetSampleRate() == Constants.TargetSampleRate)
{
int channelCount = (int)device.GetChannelCount();
uint bufferCount = Math.Min(device.GetChannelCount(), InputCount);
const int sampleCount = Constants.TargetSampleCount;
short[] outputBuffer = new short[bufferCount * sampleCount];
for (int i = 0; i < bufferCount; i++)
{
ReadOnlySpan<float> inputBuffer = GetBuffer(InputBufferIndices[i], sampleCount);
for (int j = 0; j < sampleCount; j++)
{
outputBuffer[i + j * channelCount] = PcmHelper.Saturate(inputBuffer[j]);
}
}
device.AppendBuffer(outputBuffer, InputCount);
}
else
{
// TODO: support resampling for device only supporting something different
throw new NotImplementedException();
}
}
}
}

View File

@@ -0,0 +1,68 @@
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DownMixSurroundToStereoCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DownMixSurroundToStereo;
public uint EstimatedProcessingTime { get; set; }
public ushort[] InputBufferIndices { get; }
public ushort[] OutputBufferIndices { get; }
public float[] Coefficients { get; }
public DownMixSurroundToStereoCommand(uint bufferOffset, Span<byte> inputBufferOffset, Span<byte> outputBufferOffset, float[] downMixParameter, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Constants.VoiceChannelCountMax; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + inputBufferOffset[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + outputBufferOffset[i]);
}
Coefficients = downMixParameter;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static float DownMixSurroundToStereo(ReadOnlySpan<float> coefficients, float back, float lfe, float center, float front)
{
return FloatingPointHelper.RoundUp(coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front);
}
public void Process(CommandList context)
{
ReadOnlySpan<float> frontLeft = context.GetBuffer(InputBufferIndices[0]);
ReadOnlySpan<float> frontRight = context.GetBuffer(InputBufferIndices[1]);
ReadOnlySpan<float> frontCenter = context.GetBuffer(InputBufferIndices[2]);
ReadOnlySpan<float> lowFrequency = context.GetBuffer(InputBufferIndices[3]);
ReadOnlySpan<float> backLeft = context.GetBuffer(InputBufferIndices[4]);
ReadOnlySpan<float> backRight = context.GetBuffer(InputBufferIndices[5]);
Span<float> stereoLeft = context.GetBuffer(OutputBufferIndices[0]);
Span<float> stereoRight = context.GetBuffer(OutputBufferIndices[1]);
for (int i = 0; i < context.SampleCount; i++)
{
stereoLeft[i] = DownMixSurroundToStereo(Coefficients, backLeft[i], lowFrequency[i], frontCenter[i], frontLeft[i]);
stereoRight[i] = DownMixSurroundToStereo(Coefficients, backRight[i], lowFrequency[i], frontCenter[i], frontRight[i]);
}
context.ClearBuffer(OutputBufferIndices[2]);
context.ClearBuffer(OutputBufferIndices[3]);
context.ClearBuffer(OutputBufferIndices[4]);
context.ClearBuffer(OutputBufferIndices[5]);
}
}
}

View File

@@ -0,0 +1,62 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class GroupedBiquadFilterCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.GroupedBiquadFilter;
public uint EstimatedProcessingTime { get; set; }
private BiquadFilterParameter[] _parameters;
private Memory<BiquadFilterState> _biquadFilterStates;
private int _inputBufferIndex;
private int _outputBufferIndex;
private bool[] _isInitialized;
public GroupedBiquadFilterCommand(int baseIndex, ReadOnlySpan<BiquadFilterParameter> filters, Memory<BiquadFilterState> biquadFilterStateMemory, int inputBufferOffset, int outputBufferOffset, ReadOnlySpan<bool> isInitialized, int nodeId)
{
_parameters = filters.ToArray();
_biquadFilterStates = biquadFilterStateMemory;
_inputBufferIndex = baseIndex + inputBufferOffset;
_outputBufferIndex = baseIndex + outputBufferOffset;
_isInitialized = isInitialized.ToArray();
Enabled = true;
NodeId = nodeId;
}
public void Process(CommandList context)
{
Span<BiquadFilterState> states = _biquadFilterStates.Span;
ReadOnlySpan<float> inputBuffer = context.GetBuffer(_inputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(_outputBufferIndex);
for (int i = 0; i < _parameters.Length; i++)
{
if (!_isInitialized[i])
{
states[i] = new BiquadFilterState();
}
}
// NOTE: Nintendo only implement single and double biquad filters but no generic path when the command definition suggests it could be done.
// As such we currently only implement a generic path for simplicity for double biquad.
if (_parameters.Length == 1)
{
BiquadFilterHelper.ProcessBiquadFilter(ref _parameters[0], ref states[0], outputBuffer, inputBuffer, context.SampleCount);
}
else
{
BiquadFilterHelper.ProcessBiquadFilter(_parameters, states, outputBuffer, inputBuffer, context.SampleCount);
}
}
}
}

View File

@@ -0,0 +1,20 @@
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public interface ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType { get; }
public uint EstimatedProcessingTime { get; }
public void Process(CommandList context);
public bool ShouldMeter()
{
return false;
}
}
}

View File

@@ -0,0 +1,144 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class LimiterCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.LimiterVersion1;
public uint EstimatedProcessingTime { get; set; }
public LimiterParameter Parameter => _parameter;
public Memory<LimiterState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private LimiterParameter _parameter;
public LimiterCommandVersion1(uint bufferOffset, LimiterParameter parameter, Memory<LimiterState> state, bool isEnabled, ulong workBuffer, int nodeId)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
}
public void Process(CommandList context)
{
ref LimiterState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new LimiterState(ref _parameter, WorkBuffer);
}
else if (Parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessLimiter(context, ref state);
}
private unsafe void ProcessLimiter(CommandList context, ref LimiterState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
for (int sampleIndex = 0; sampleIndex < context.SampleCount; sampleIndex++)
{
float rawInputSample = *((float*)inputBuffers[channelIndex] + sampleIndex);
float inputSample = (rawInputSample / short.MaxValue) * Parameter.InputGain;
float sampleInputMax = Math.Abs(inputSample);
float inputCoefficient = Parameter.ReleaseCoefficient;
if (sampleInputMax > state.DetectorAverage[channelIndex].Read())
{
inputCoefficient = Parameter.AttackCoefficient;
}
float detectorValue = state.DetectorAverage[channelIndex].Update(sampleInputMax, inputCoefficient);
float attenuation = 1.0f;
if (detectorValue > Parameter.Threshold)
{
attenuation = Parameter.Threshold / detectorValue;
}
float outputCoefficient = Parameter.ReleaseCoefficient;
if (state.CompressionGainAverage[channelIndex].Read() > attenuation)
{
outputCoefficient = Parameter.AttackCoefficient;
}
float compressionGain = state.CompressionGainAverage[channelIndex].Update(attenuation, outputCoefficient);
ref float delayedSample = ref state.DelayedSampleBuffer[channelIndex * Parameter.DelayBufferSampleCountMax + state.DelayedSampleBufferPosition[channelIndex]];
float outputSample = delayedSample * compressionGain * Parameter.OutputGain;
*((float*)outputBuffers[channelIndex] + sampleIndex) = outputSample * short.MaxValue;
delayedSample = inputSample;
state.DelayedSampleBufferPosition[channelIndex]++;
while (state.DelayedSampleBufferPosition[channelIndex] >= Parameter.DelayBufferSampleCountMin)
{
state.DelayedSampleBufferPosition[channelIndex] -= Parameter.DelayBufferSampleCountMin;
}
}
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
}
}

View File

@@ -0,0 +1,163 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class LimiterCommandVersion2 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.LimiterVersion2;
public uint EstimatedProcessingTime { get; set; }
public LimiterParameter Parameter => _parameter;
public Memory<LimiterState> State { get; }
public Memory<EffectResultState> ResultState { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private LimiterParameter _parameter;
public LimiterCommandVersion2(uint bufferOffset, LimiterParameter parameter, Memory<LimiterState> state, Memory<EffectResultState> resultState, bool isEnabled, ulong workBuffer, int nodeId)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
ResultState = resultState;
WorkBuffer = workBuffer;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
}
public void Process(CommandList context)
{
ref LimiterState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new LimiterState(ref _parameter, WorkBuffer);
}
else if (Parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessLimiter(context, ref state);
}
private unsafe void ProcessLimiter(CommandList context, ref LimiterState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
if (!ResultState.IsEmpty && Parameter.StatisticsReset)
{
ref LimiterStatistics statistics = ref MemoryMarshal.Cast<byte, LimiterStatistics>(ResultState.Span[0].SpecificData)[0];
statistics.Reset();
}
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
for (int sampleIndex = 0; sampleIndex < context.SampleCount; sampleIndex++)
{
float rawInputSample = *((float*)inputBuffers[channelIndex] + sampleIndex);
float inputSample = (rawInputSample / short.MaxValue) * Parameter.InputGain;
float sampleInputMax = Math.Abs(inputSample);
float inputCoefficient = Parameter.ReleaseCoefficient;
if (sampleInputMax > state.DetectorAverage[channelIndex].Read())
{
inputCoefficient = Parameter.AttackCoefficient;
}
float detectorValue = state.DetectorAverage[channelIndex].Update(sampleInputMax, inputCoefficient);
float attenuation = 1.0f;
if (detectorValue > Parameter.Threshold)
{
attenuation = Parameter.Threshold / detectorValue;
}
float outputCoefficient = Parameter.ReleaseCoefficient;
if (state.CompressionGainAverage[channelIndex].Read() > attenuation)
{
outputCoefficient = Parameter.AttackCoefficient;
}
float compressionGain = state.CompressionGainAverage[channelIndex].Update(attenuation, outputCoefficient);
ref float delayedSample = ref state.DelayedSampleBuffer[channelIndex * Parameter.DelayBufferSampleCountMax + state.DelayedSampleBufferPosition[channelIndex]];
float outputSample = delayedSample * compressionGain * Parameter.OutputGain;
*((float*)outputBuffers[channelIndex] + sampleIndex) = outputSample * short.MaxValue;
delayedSample = inputSample;
state.DelayedSampleBufferPosition[channelIndex]++;
while (state.DelayedSampleBufferPosition[channelIndex] >= Parameter.DelayBufferSampleCountMin)
{
state.DelayedSampleBufferPosition[channelIndex] -= Parameter.DelayBufferSampleCountMin;
}
if (!ResultState.IsEmpty)
{
ref LimiterStatistics statistics = ref MemoryMarshal.Cast<byte, LimiterStatistics>(ResultState.Span[0].SpecificData)[0];
statistics.InputMax[channelIndex] = Math.Max(statistics.InputMax[channelIndex], sampleInputMax);
statistics.CompressionGainMin[channelIndex] = Math.Min(statistics.CompressionGainMin[channelIndex], compressionGain);
}
}
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
}
}

View File

@@ -0,0 +1,137 @@
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
using System.Runtime.Intrinsics.X86;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Mix;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume { get; }
public MixCommand(uint inputBufferIndex, uint outputBufferIndex, int nodeId, float volume)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
Volume = volume;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixAvx(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector256<float> volumeVec = Vector256.Create(Volume);
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(inputMix);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(outputMix);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.Add(outputVec[i], Avx.Ceiling(Avx.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixSse41(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputMix);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputMix);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse.Add(outputVec[i], Sse41.Ceiling(Sse.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixAdvSimd(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputMix);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputMix);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = AdvSimd.Add(outputVec[i], AdvSimd.Ceiling(AdvSimd.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixSlowPath(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
for (int i = 0; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMix(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
if (Avx.IsSupported)
{
ProcessMixAvx(outputMix, inputMix);
}
else if (Sse41.IsSupported)
{
ProcessMixSse41(outputMix, inputMix);
}
else if (AdvSimd.IsSupported)
{
ProcessMixAdvSimd(outputMix, inputMix);
}
else
{
ProcessMixSlowPath(outputMix, inputMix);
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessMix(outputBuffer, inputBuffer);
}
}
}

View File

@@ -0,0 +1,68 @@
using Ryujinx.Audio.Renderer.Common;
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixRampCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.MixRamp;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume0 { get; }
public float Volume1 { get; }
public Memory<VoiceUpdateState> State { get; }
public int LastSampleIndex { get; }
public MixRampCommand(float volume0, float volume1, uint inputBufferIndex, uint outputBufferIndex, int lastSampleIndex, Memory<VoiceUpdateState> state, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
Volume0 = volume0;
Volume1 = volume1;
State = state;
LastSampleIndex = lastSampleIndex;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private float ProcessMixRamp(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, int sampleCount)
{
float ramp = (Volume1 - Volume0) / sampleCount;
float volume = Volume0;
float state = 0;
for (int i = 0; i < sampleCount; i++)
{
state = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
outputBuffer[i] += state;
volume += ramp;
}
return state;
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
State.Span[0].LastSamples[LastSampleIndex] = ProcessMixRamp(outputBuffer, inputBuffer, (int)context.SampleCount);
}
}
}

View File

@@ -0,0 +1,91 @@
using Ryujinx.Audio.Renderer.Common;
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixRampGroupedCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.MixRampGrouped;
public uint EstimatedProcessingTime { get; set; }
public uint MixBufferCount { get; }
public ushort[] InputBufferIndices { get; }
public ushort[] OutputBufferIndices { get; }
public float[] Volume0 { get; }
public float[] Volume1 { get; }
public Memory<VoiceUpdateState> State { get; }
public MixRampGroupedCommand(uint mixBufferCount, uint inputBufferIndex, uint outputBufferIndex, Span<float> volume0, Span<float> volume1, Memory<VoiceUpdateState> state, int nodeId)
{
Enabled = true;
MixBufferCount = mixBufferCount;
NodeId = nodeId;
InputBufferIndices = new ushort[Constants.MixBufferCountMax];
OutputBufferIndices = new ushort[Constants.MixBufferCountMax];
Volume0 = new float[Constants.MixBufferCountMax];
Volume1 = new float[Constants.MixBufferCountMax];
for (int i = 0; i < mixBufferCount; i++)
{
InputBufferIndices[i] = (ushort)inputBufferIndex;
OutputBufferIndices[i] = (ushort)(outputBufferIndex + i);
Volume0[i] = volume0[i];
Volume1[i] = volume1[i];
}
State = state;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private float ProcessMixRampGrouped(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, float volume0, float volume1, int sampleCount)
{
float ramp = (volume1 - volume0) / sampleCount;
float volume = volume0;
float state = 0;
for (int i = 0; i < sampleCount; i++)
{
state = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
outputBuffer[i] += state;
volume += ramp;
}
return state;
}
public void Process(CommandList context)
{
for (int i = 0; i < MixBufferCount; i++)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndices[i]);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndices[i]);
float volume0 = Volume0[i];
float volume1 = Volume1[i];
ref VoiceUpdateState state = ref State.Span[0];
if (volume0 != 0 || volume1 != 0)
{
state.LastSamples[i] = ProcessMixRampGrouped(outputBuffer, inputBuffer, volume0, volume1, (int)context.SampleCount);
}
else
{
state.LastSamples[i] = 0;
}
}
}
}
}

View File

@@ -0,0 +1,74 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PcmFloatDataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.PcmFloatDataSourceVersion1;
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public PcmFloatDataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.PcmFloat,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = 0,
ExtraParameterSize = 0,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@@ -0,0 +1,74 @@
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PcmInt16DataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.PcmInt16DataSourceVersion1;
public uint EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public PcmInt16DataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation
{
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.PcmInt16,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
ExtraParameter = 0,
ExtraParameterSize = 0,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, ref info, WaveBuffers, ref State.Span[0], context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@@ -0,0 +1,47 @@
using Ryujinx.Audio.Renderer.Server.Performance;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PerformanceCommand : ICommand
{
public enum Type
{
Invalid,
Start,
End
}
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Performance;
public uint EstimatedProcessingTime { get; set; }
public PerformanceEntryAddresses PerformanceEntryAddresses { get; }
public Type PerformanceType { get; set; }
public PerformanceCommand(ref PerformanceEntryAddresses performanceEntryAddresses, Type performanceType, int nodeId)
{
Enabled = true;
PerformanceEntryAddresses = performanceEntryAddresses;
PerformanceType = performanceType;
NodeId = nodeId;
}
public void Process(CommandList context)
{
if (PerformanceType == Type.Start)
{
PerformanceEntryAddresses.SetStartTime(context.GetTimeElapsedSinceDspStartedProcessing());
}
else if (PerformanceType == Type.End)
{
PerformanceEntryAddresses.SetProcessingTime(context.GetTimeElapsedSinceDspStartedProcessing());
PerformanceEntryAddresses.IncrementEntryCount();
}
}
}
}

View File

@@ -0,0 +1,254 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using Ryujinx.Audio.Renderer.Server.Effect;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class Reverb3dCommand : ICommand
{
private static readonly int[] OutputEarlyIndicesTableMono = new int[20] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
private static readonly int[] TargetEarlyDelayLineIndicesTableMono = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableMono = new int[1] { 0 };
private static readonly int[] OutputEarlyIndicesTableStereo = new int[20] { 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1 };
private static readonly int[] TargetEarlyDelayLineIndicesTableStereo = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableStereo = new int[2] { 0, 1 };
private static readonly int[] OutputEarlyIndicesTableQuadraphonic = new int[20] { 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0, 0, 0, 0, 3, 3, 3 };
private static readonly int[] TargetEarlyDelayLineIndicesTableQuadraphonic = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableSurround = new int[40] { 4, 5, 0, 5, 0, 5, 1, 5, 1, 5, 1, 5, 1, 5, 2, 5, 2, 5, 2, 5, 1, 5, 1, 5, 1, 5, 0, 5, 0, 5, 0, 5, 0, 5, 3, 5, 3, 5, 3, 5 };
private static readonly int[] TargetEarlyDelayLineIndicesTableSurround = new int[40] { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableSurround = new int[6] { 0, 1, 2, 3, -1, 3 };
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Reverb3d;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public Reverb3dParameter Parameter => _parameter;
public Memory<Reverb3dState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private Reverb3dParameter _parameter;
public Reverb3dCommand(uint bufferOffset, Reverb3dParameter parameter, Memory<Reverb3dState> state, bool isEnabled, ulong workBuffer, int nodeId, bool newEffectChannelMappingSupported)
{
Enabled = true;
IsEffectEnabled = isEnabled;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
// NOTE: We do the opposite as Nintendo here for now to restore previous behaviour
// TODO: Update reverb 3d processing and remove this to use RemapLegacyChannelEffectMappingToChannelResourceMapping.
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, OutputBufferIndices);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dMono(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableMono, TargetEarlyDelayLineIndicesTableMono, TargetOutputFeedbackIndicesTableMono);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dStereo(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableStereo, TargetEarlyDelayLineIndicesTableStereo, TargetOutputFeedbackIndicesTableStereo);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dQuadraphonic(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableQuadraphonic, TargetEarlyDelayLineIndicesTableQuadraphonic, TargetOutputFeedbackIndicesTableQuadraphonic);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverb3dSurround(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(ref state, outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableSurround, TargetEarlyDelayLineIndicesTableSurround, TargetOutputFeedbackIndicesTableSurround);
}
private unsafe void ProcessReverb3dGeneric(ref Reverb3dState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount, ReadOnlySpan<int> outputEarlyIndicesTable, ReadOnlySpan<int> targetEarlyDelayLineIndicesTable, ReadOnlySpan<int> targetOutputFeedbackIndicesTable)
{
const int delayLineSampleIndexOffset = 1;
bool isMono = Parameter.ChannelCount == 1;
bool isSurround = Parameter.ChannelCount == 6;
Span<float> outputValues = stackalloc float[Constants.ChannelCountMax];
Span<float> channelInput = stackalloc float[Parameter.ChannelCount];
Span<float> feedbackValues = stackalloc float[4];
Span<float> feedbackOutputValues = stackalloc float[4];
Span<float> values = stackalloc float[4];
for (int sampleIndex = 0; sampleIndex < sampleCount; sampleIndex++)
{
outputValues.Fill(0);
float tapOut = state.PreDelayLine.TapUnsafe(state.ReflectionDelayTime, delayLineSampleIndexOffset);
for (int i = 0; i < targetEarlyDelayLineIndicesTable.Length; i++)
{
int earlyDelayIndex = targetEarlyDelayLineIndicesTable[i];
int outputIndex = outputEarlyIndicesTable[i];
float tempTapOut = state.PreDelayLine.TapUnsafe(state.EarlyDelayTime[earlyDelayIndex], delayLineSampleIndexOffset);
outputValues[outputIndex] += tempTapOut * state.EarlyGain[earlyDelayIndex];
}
float targetPreDelayValue = 0;
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = *((float*)inputBuffers[channelIndex] + sampleIndex);
targetPreDelayValue += channelInput[channelIndex];
}
for (int i = 0; i < Parameter.ChannelCount; i++)
{
outputValues[i] *= state.EarlyReflectionsGain;
}
state.PreviousPreDelayValue = (targetPreDelayValue * state.TargetPreDelayGain) + (state.PreviousPreDelayValue * state.PreviousPreDelayGain);
state.PreDelayLine.Update(state.PreviousPreDelayValue);
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
float fdnValue = state.FdnDelayLines[i].Read();
float feedbackOutputValue = fdnValue * state.DecayDirectFdnGain[i] + state.PreviousFeedbackOutputDecayed[i];
state.PreviousFeedbackOutputDecayed[i] = (fdnValue * state.DecayCurrentFdnGain[i]) + (feedbackOutputValue * state.DecayCurrentOutputGain[i]);
feedbackOutputValues[i] = feedbackOutputValue;
}
feedbackValues[0] = feedbackOutputValues[2] + feedbackOutputValues[1];
feedbackValues[1] = -feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[2] = feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[3] = feedbackOutputValues[1] - feedbackOutputValues[2];
for (int i = 0; i < state.DecayDelays1.Length; i++)
{
float temp = state.DecayDelays1[i].Update(tapOut * state.LateReverbGain + feedbackValues[i]);
values[i] = state.DecayDelays2[i].Update(temp);
state.FdnDelayLines[i].Update(values[i]);
}
for (int channelIndex = 0; channelIndex < targetOutputFeedbackIndicesTable.Length; channelIndex++)
{
int targetOutputFeedbackIndex = targetOutputFeedbackIndicesTable[channelIndex];
if (targetOutputFeedbackIndex >= 0)
{
*((float*)outputBuffers[channelIndex] + sampleIndex) = (outputValues[channelIndex] + values[targetOutputFeedbackIndex] + channelInput[channelIndex] * state.DryGain);
}
}
if (isMono)
{
*((float*)outputBuffers[0] + sampleIndex) += values[1];
}
if (isSurround)
{
*((float*)outputBuffers[4] + sampleIndex) += (outputValues[4] + state.FrontCenterDelayLine.Update((values[2] - values[3]) * 0.5f) + channelInput[4] * state.DryGain);
}
}
}
public void ProcessReverb3d(CommandList context, ref Reverb3dState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessReverb3dMono(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 2:
ProcessReverb3dStereo(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessReverb3dQuadraphonic(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessReverb3dSurround(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException(Parameter.ChannelCount.ToString());
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
public void Process(CommandList context)
{
ref Reverb3dState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.ParameterStatus == UsageState.Invalid)
{
state = new Reverb3dState(ref _parameter, WorkBuffer);
}
else if (Parameter.ParameterStatus == UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessReverb3d(context, ref state);
}
}
}

View File

@@ -0,0 +1,279 @@
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class ReverbCommand : ICommand
{
private static readonly int[] OutputEarlyIndicesTableMono = new int[10] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
private static readonly int[] TargetEarlyDelayLineIndicesTableMono = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableMono = new int[4] { 0, 0, 0, 0 };
private static readonly int[] TargetOutputFeedbackIndicesTableMono = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableStereo = new int[10] { 0, 0, 1, 1, 0, 1, 0, 0, 1, 1 };
private static readonly int[] TargetEarlyDelayLineIndicesTableStereo = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableStereo = new int[4] { 0, 0, 1, 1 };
private static readonly int[] TargetOutputFeedbackIndicesTableStereo = new int[4] { 2, 0, 3, 1 };
private static readonly int[] OutputEarlyIndicesTableQuadraphonic = new int[10] { 0, 0, 1, 1, 0, 1, 2, 2, 3, 3 };
private static readonly int[] TargetEarlyDelayLineIndicesTableQuadraphonic = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] TargetOutputFeedbackIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableSurround = new int[20] { 0, 5, 0, 5, 1, 5, 1, 5, 4, 5, 4, 5, 2, 5, 2, 5, 3, 5, 3, 5 };
private static readonly int[] TargetEarlyDelayLineIndicesTableSurround = new int[20] { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9 };
private static readonly int[] OutputIndicesTableSurround = new int[Constants.ChannelCountMax] { 0, 1, 2, 3, 4, 5 };
private static readonly int[] TargetOutputFeedbackIndicesTableSurround = new int[Constants.ChannelCountMax] { 0, 1, 2, 3, -1, 3 };
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Reverb;
public uint EstimatedProcessingTime { get; set; }
public ReverbParameter Parameter => _parameter;
public Memory<ReverbState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsLongSizePreDelaySupported { get; }
public bool IsEffectEnabled { get; }
private ReverbParameter _parameter;
private const int FixedPointPrecision = 14;
public ReverbCommand(uint bufferOffset, ReverbParameter parameter, Memory<ReverbState> state, bool isEnabled, ulong workBuffer, int nodeId, bool isLongSizePreDelaySupported, bool newEffectChannelMappingSupported)
{
Enabled = true;
IsEffectEnabled = isEnabled;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
IsLongSizePreDelaySupported = isLongSizePreDelaySupported;
// NOTE: We do the opposite as Nintendo here for now to restore previous behaviour
// TODO: Update reverb processing and remove this to use RemapLegacyChannelEffectMappingToChannelResourceMapping.
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, InputBufferIndices);
DataSourceHelper.RemapChannelResourceMappingToLegacy(newEffectChannelMappingSupported, OutputBufferIndices);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbMono(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableMono,
TargetEarlyDelayLineIndicesTableMono,
TargetOutputFeedbackIndicesTableMono,
OutputIndicesTableMono);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbStereo(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableStereo,
TargetEarlyDelayLineIndicesTableStereo,
TargetOutputFeedbackIndicesTableStereo,
OutputIndicesTableStereo);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbQuadraphonic(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableQuadraphonic,
TargetEarlyDelayLineIndicesTableQuadraphonic,
TargetOutputFeedbackIndicesTableQuadraphonic,
OutputIndicesTableQuadraphonic);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessReverbSurround(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(ref state,
outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableSurround,
TargetEarlyDelayLineIndicesTableSurround,
TargetOutputFeedbackIndicesTableSurround,
OutputIndicesTableSurround);
}
private unsafe void ProcessReverbGeneric(ref ReverbState state, ReadOnlySpan<IntPtr> outputBuffers, ReadOnlySpan<IntPtr> inputBuffers, uint sampleCount, ReadOnlySpan<int> outputEarlyIndicesTable, ReadOnlySpan<int> targetEarlyDelayLineIndicesTable, ReadOnlySpan<int> targetOutputFeedbackIndicesTable, ReadOnlySpan<int> outputIndicesTable)
{
bool isSurround = Parameter.ChannelCount == 6;
float reverbGain = FixedPointHelper.ToFloat(Parameter.ReverbGain, FixedPointPrecision);
float lateGain = FixedPointHelper.ToFloat(Parameter.LateGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
Span<float> outputValues = stackalloc float[Constants.ChannelCountMax];
Span<float> feedbackValues = stackalloc float[4];
Span<float> feedbackOutputValues = stackalloc float[4];
Span<float> channelInput = stackalloc float[Parameter.ChannelCount];
for (int sampleIndex = 0; sampleIndex < sampleCount; sampleIndex++)
{
outputValues.Fill(0);
for (int i = 0; i < targetEarlyDelayLineIndicesTable.Length; i++)
{
int earlyDelayIndex = targetEarlyDelayLineIndicesTable[i];
int outputIndex = outputEarlyIndicesTable[i];
float tapOutput = state.PreDelayLine.TapUnsafe(state.EarlyDelayTime[earlyDelayIndex], 0);
outputValues[outputIndex] += tapOutput * state.EarlyGain[earlyDelayIndex];
}
if (isSurround)
{
outputValues[5] *= 0.2f;
}
float targetPreDelayValue = 0;
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = *((float*)inputBuffers[channelIndex] + sampleIndex) * 64;
targetPreDelayValue += channelInput[channelIndex] * reverbGain;
}
state.PreDelayLine.Update(targetPreDelayValue);
float lateValue = state.PreDelayLine.Tap(state.PreDelayLineDelayTime) * lateGain;
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
feedbackOutputValues[i] = state.FdnDelayLines[i].Read() * state.HighFrequencyDecayDirectGain[i] + state.PreviousFeedbackOutput[i] * state.HighFrequencyDecayPreviousGain[i];
state.PreviousFeedbackOutput[i] = feedbackOutputValues[i];
}
feedbackValues[0] = feedbackOutputValues[2] + feedbackOutputValues[1];
feedbackValues[1] = -feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[2] = feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[3] = feedbackOutputValues[1] - feedbackOutputValues[2];
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
feedbackOutputValues[i] = state.DecayDelays[i].Update(feedbackValues[i] + lateValue);
state.FdnDelayLines[i].Update(feedbackOutputValues[i]);
}
for (int i = 0; i < targetOutputFeedbackIndicesTable.Length; i++)
{
int targetOutputFeedbackIndex = targetOutputFeedbackIndicesTable[i];
int outputIndex = outputIndicesTable[i];
if (targetOutputFeedbackIndex >= 0)
{
outputValues[outputIndex] += feedbackOutputValues[targetOutputFeedbackIndex];
}
}
if (isSurround)
{
outputValues[4] += state.FrontCenterDelayLine.Update((feedbackOutputValues[2] - feedbackOutputValues[3]) * 0.5f);
}
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
*((float*)outputBuffers[channelIndex] + sampleIndex) = (outputValues[channelIndex] * outGain + channelInput[channelIndex] * dryGain) / 64;
}
}
}
private void ProcessReverb(CommandList context, ref ReverbState state)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
Span<IntPtr> inputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
Span<IntPtr> outputBuffers = stackalloc IntPtr[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferPointer(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferPointer(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessReverbMono(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 2:
ProcessReverbStereo(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessReverbQuadraphonic(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessReverbSurround(ref state, outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException(Parameter.ChannelCount.ToString());
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.CopyBuffer(OutputBufferIndices[i], InputBufferIndices[i]);
}
}
}
}
public void Process(CommandList context)
{
ref ReverbState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new ReverbState(ref _parameter, WorkBuffer, IsLongSizePreDelaySupported);
}
else if (Parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessReverb(context, ref state);
}
}
}

View File

@@ -0,0 +1,70 @@
using Ryujinx.Audio.Renderer.Server.Upsampler;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class UpsampleCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Upsample;
public uint EstimatedProcessingTime { get; set; }
public uint BufferCount { get; }
public uint InputBufferIndex { get; }
public uint InputSampleCount { get; }
public uint InputSampleRate { get; }
public UpsamplerState UpsamplerInfo { get; }
public Memory<float> OutBuffer { get; }
public UpsampleCommand(uint bufferOffset, UpsamplerState info, uint inputCount, Span<byte> inputBufferOffset, uint bufferCount, uint sampleCount, uint sampleRate, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = 0;
OutBuffer = info.OutputBuffer;
BufferCount = bufferCount;
InputSampleCount = sampleCount;
InputSampleRate = sampleRate;
info.SourceSampleCount = inputCount;
info.InputBufferIndices = new ushort[inputCount];
for (int i = 0; i < inputCount; i++)
{
info.InputBufferIndices[i] = (ushort)(bufferOffset + inputBufferOffset[i]);
}
if (info.BufferStates?.Length != (int)inputCount)
{
// Keep state if possible.
info.BufferStates = new UpsamplerBufferState[(int)inputCount];
}
UpsamplerInfo = info;
}
private Span<float> GetBuffer(int index, int sampleCount)
{
return UpsamplerInfo.OutputBuffer.Span.Slice(index * sampleCount, sampleCount);
}
public void Process(CommandList context)
{
uint bufferCount = Math.Min(BufferCount, UpsamplerInfo.SourceSampleCount);
for (int i = 0; i < bufferCount; i++)
{
Span<float> inputBuffer = context.GetBuffer(UpsamplerInfo.InputBufferIndices[i]);
Span<float> outputBuffer = GetBuffer(UpsamplerInfo.InputBufferIndices[i], (int)UpsamplerInfo.SampleCount);
UpsamplerHelper.Upsample(outputBuffer, inputBuffer, (int)UpsamplerInfo.SampleCount, (int)InputSampleCount, ref UpsamplerInfo.BufferStates[i]);
}
}
}
}

View File

@@ -0,0 +1,137 @@
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.Arm;
using System.Runtime.Intrinsics.X86;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class VolumeCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Volume;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume { get; }
public VolumeCommand(float volume, uint bufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)bufferIndex;
OutputBufferIndex = (ushort)bufferIndex;
Volume = volume;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeAvx(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector256<float> volumeVec = Vector256.Create(Volume);
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(inputBuffer);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(outputBuffer);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.Ceiling(Avx.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeSse41(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputBuffer);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputBuffer);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse41.Ceiling(Sse.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeAdvSimd(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputBuffer);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputBuffer);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = AdvSimd.Ceiling(AdvSimd.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolume(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
if (Avx.IsSupported)
{
ProcessVolumeAvx(outputBuffer, inputBuffer);
}
else if (Sse41.IsSupported)
{
ProcessVolumeSse41(outputBuffer, inputBuffer);
}
else if (AdvSimd.IsSupported)
{
ProcessVolumeAdvSimd(outputBuffer, inputBuffer);
}
else
{
ProcessVolumeSlowPath(outputBuffer, inputBuffer);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeSlowPath(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
for (int i = 0; i < outputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessVolume(outputBuffer, inputBuffer);
}
}
}

View File

@@ -0,0 +1,56 @@
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class VolumeRampCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.VolumeRamp;
public uint EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume0 { get; }
public float Volume1 { get; }
public VolumeRampCommand(float volume0, float volume1, uint bufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)bufferIndex;
OutputBufferIndex = (ushort)bufferIndex;
Volume0 = volume0;
Volume1 = volume1;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeRamp(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, int sampleCount)
{
float ramp = (Volume1 - Volume0) / sampleCount;
float volume = Volume0;
for (int i = 0; i < sampleCount; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
volume += ramp;
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessVolumeRamp(outputBuffer, inputBuffer, (int)context.SampleCount);
}
}
}