From d1604aa762a3f669a3fecff0a30b7360399954bc Mon Sep 17 00:00:00 2001 From: Ac_K Date: Tue, 12 Oct 2021 22:55:57 +0200 Subject: nvdec: Adding Vp8 codec support (#2707) * first try * second try * working update * Final impl * Fixes nits * Fix everything * remove leftover * Update FFmpegContext.cs * Update Surface.cs * Addresses gdkchan feedback * bool not byte * Addresses gdkchan feedback --- Ryujinx.Graphics.Nvdec.FFmpeg/FFmpegContext.cs | 162 +++++++++++++++++++++ Ryujinx.Graphics.Nvdec.FFmpeg/H264/Decoder.cs | 56 +++++++ .../H264/H264BitStreamWriter.cs | 121 +++++++++++++++ .../H264/SpsAndPpsReconstruction.cs | 159 ++++++++++++++++++++ .../Ryujinx.Graphics.Nvdec.FFmpeg.csproj | 17 +++ Ryujinx.Graphics.Nvdec.FFmpeg/Surface.cs | 39 +++++ Ryujinx.Graphics.Nvdec.FFmpeg/Vp8/Decoder.cs | 53 +++++++ Ryujinx.Graphics.Nvdec.H264/Decoder.cs | 55 ------- Ryujinx.Graphics.Nvdec.H264/FFmpegContext.cs | 162 --------------------- Ryujinx.Graphics.Nvdec.H264/H264BitStreamWriter.cs | 121 --------------- .../Ryujinx.Graphics.Nvdec.H264.csproj | 16 -- .../SpsAndPpsReconstruction.cs | 159 -------------------- Ryujinx.Graphics.Nvdec.H264/Surface.cs | 39 ----- Ryujinx.Graphics.Nvdec/H264Decoder.cs | 6 +- Ryujinx.Graphics.Nvdec/NvdecDecoderContext.cs | 20 ++- Ryujinx.Graphics.Nvdec/NvdecDevice.cs | 3 + .../Ryujinx.Graphics.Nvdec.csproj | 2 +- Ryujinx.Graphics.Nvdec/Types/Vp8/PictureInfo.cs | 75 ++++++++++ Ryujinx.Graphics.Nvdec/Vp8Decoder.cs | 33 +++++ Ryujinx.Graphics.Video/Vp8PictureInfo.cs | 11 ++ Ryujinx.sln | 14 +- Ryujinx/Ryujinx.csproj | 2 +- 22 files changed, 755 insertions(+), 570 deletions(-) create mode 100644 Ryujinx.Graphics.Nvdec.FFmpeg/FFmpegContext.cs create mode 100644 Ryujinx.Graphics.Nvdec.FFmpeg/H264/Decoder.cs create mode 100644 Ryujinx.Graphics.Nvdec.FFmpeg/H264/H264BitStreamWriter.cs create mode 100644 Ryujinx.Graphics.Nvdec.FFmpeg/H264/SpsAndPpsReconstruction.cs create mode 100644 Ryujinx.Graphics.Nvdec.FFmpeg/Ryujinx.Graphics.Nvdec.FFmpeg.csproj create mode 100644 Ryujinx.Graphics.Nvdec.FFmpeg/Surface.cs create mode 100644 Ryujinx.Graphics.Nvdec.FFmpeg/Vp8/Decoder.cs delete mode 100644 Ryujinx.Graphics.Nvdec.H264/Decoder.cs delete mode 100644 Ryujinx.Graphics.Nvdec.H264/FFmpegContext.cs delete mode 100644 Ryujinx.Graphics.Nvdec.H264/H264BitStreamWriter.cs delete mode 100644 Ryujinx.Graphics.Nvdec.H264/Ryujinx.Graphics.Nvdec.H264.csproj delete mode 100644 Ryujinx.Graphics.Nvdec.H264/SpsAndPpsReconstruction.cs delete mode 100644 Ryujinx.Graphics.Nvdec.H264/Surface.cs create mode 100644 Ryujinx.Graphics.Nvdec/Types/Vp8/PictureInfo.cs create mode 100644 Ryujinx.Graphics.Nvdec/Vp8Decoder.cs create mode 100644 Ryujinx.Graphics.Video/Vp8PictureInfo.cs diff --git a/Ryujinx.Graphics.Nvdec.FFmpeg/FFmpegContext.cs b/Ryujinx.Graphics.Nvdec.FFmpeg/FFmpegContext.cs new file mode 100644 index 00000000..1e3b88bb --- /dev/null +++ b/Ryujinx.Graphics.Nvdec.FFmpeg/FFmpegContext.cs @@ -0,0 +1,162 @@ +using FFmpeg.AutoGen; +using Ryujinx.Common.Logging; +using System; +using System.Diagnostics; +using System.IO; +using System.Runtime.InteropServices; + +namespace Ryujinx.Graphics.Nvdec.FFmpeg +{ + unsafe class FFmpegContext : IDisposable + { + private readonly AVCodec_decode _decodeFrame; + private static readonly av_log_set_callback_callback _logFunc; + private readonly AVCodec* _codec; + private AVPacket* _packet; + private AVCodecContext* _context; + + public FFmpegContext(AVCodecID codecId) + { + _codec = ffmpeg.avcodec_find_decoder(codecId); + _context = ffmpeg.avcodec_alloc_context3(_codec); + + ffmpeg.avcodec_open2(_context, _codec, null); + + _packet = ffmpeg.av_packet_alloc(); + + _decodeFrame = Marshal.GetDelegateForFunctionPointer(_codec->decode.Pointer); + } + + static FFmpegContext() + { + SetRootPath(); + + _logFunc = Log; + + // Redirect log output. + ffmpeg.av_log_set_level(ffmpeg.AV_LOG_MAX_OFFSET); + ffmpeg.av_log_set_callback(_logFunc); + } + + private static void SetRootPath() + { + if (OperatingSystem.IsLinux()) + { + // Configure FFmpeg search path + Process lddProcess = Process.Start(new ProcessStartInfo + { + FileName = "/bin/sh", + Arguments = "-c \"ldd $(which ffmpeg 2>/dev/null) | grep libavfilter\" 2>/dev/null", + UseShellExecute = false, + RedirectStandardOutput = true + }); + + string lddOutput = lddProcess.StandardOutput.ReadToEnd(); + + lddProcess.WaitForExit(); + lddProcess.Close(); + + if (lddOutput.Contains(" => ")) + { + ffmpeg.RootPath = Path.GetDirectoryName(lddOutput.Split(" => ")[1]); + } + else + { + Logger.Error?.PrintMsg(LogClass.FFmpeg, "FFmpeg wasn't found. Make sure that you have it installed and up to date."); + } + } + } + + private static void Log(void* p0, int level, string format, byte* vl) + { + if (level > ffmpeg.av_log_get_level()) + { + return; + } + + int lineSize = 1024; + byte* lineBuffer = stackalloc byte[lineSize]; + int printPrefix = 1; + + ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix); + + string line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer).Trim(); + + switch (level) + { + case ffmpeg.AV_LOG_PANIC: + case ffmpeg.AV_LOG_FATAL: + case ffmpeg.AV_LOG_ERROR: + Logger.Error?.Print(LogClass.FFmpeg, line); + break; + case ffmpeg.AV_LOG_WARNING: + Logger.Warning?.Print(LogClass.FFmpeg, line); + break; + case ffmpeg.AV_LOG_INFO: + Logger.Info?.Print(LogClass.FFmpeg, line); + break; + case ffmpeg.AV_LOG_VERBOSE: + case ffmpeg.AV_LOG_DEBUG: + case ffmpeg.AV_LOG_TRACE: + Logger.Debug?.Print(LogClass.FFmpeg, line); + break; + } + } + + public int DecodeFrame(Surface output, ReadOnlySpan bitstream) + { + ffmpeg.av_frame_unref(output.Frame); + + int result; + int gotFrame; + + fixed (byte* ptr = bitstream) + { + _packet->data = ptr; + _packet->size = bitstream.Length; + result = _decodeFrame(_context, output.Frame, &gotFrame, _packet); + } + + if (gotFrame == 0) + { + ffmpeg.av_frame_unref(output.Frame); + + // If the frame was not delivered, it was probably delayed. + // Get the next delayed frame by passing a 0 length packet. + _packet->data = null; + _packet->size = 0; + result = _decodeFrame(_context, output.Frame, &gotFrame, _packet); + + // We need to set B frames to 0 as we already consumed all delayed frames. + // This prevents the decoder from trying to return a delayed frame next time. + _context->has_b_frames = 0; + } + + ffmpeg.av_packet_unref(_packet); + + if (gotFrame == 0) + { + ffmpeg.av_frame_unref(output.Frame); + + return -1; + } + + return result < 0 ? result : 0; + } + + public void Dispose() + { + fixed (AVPacket** ppPacket = &_packet) + { + ffmpeg.av_packet_free(ppPacket); + } + + ffmpeg.avcodec_close(_context); + + fixed (AVCodecContext** ppContext = &_context) + { + ffmpeg.avcodec_free_context(ppContext); + } + } + } +} diff --git a/Ryujinx.Graphics.Nvdec.FFmpeg/H264/Decoder.cs b/Ryujinx.Graphics.Nvdec.FFmpeg/H264/Decoder.cs new file mode 100644 index 00000000..8deda42a --- /dev/null +++ b/Ryujinx.Graphics.Nvdec.FFmpeg/H264/Decoder.cs @@ -0,0 +1,56 @@ +using FFmpeg.AutoGen; +using Ryujinx.Graphics.Video; +using System; + +namespace Ryujinx.Graphics.Nvdec.FFmpeg.H264 +{ + public sealed class Decoder : IH264Decoder + { + public bool IsHardwareAccelerated => false; + + private const int WorkBufferSize = 0x200; + + private readonly byte[] _workBuffer = new byte[WorkBufferSize]; + + private FFmpegContext _context = new FFmpegContext(AVCodecID.AV_CODEC_ID_H264); + + private int _oldOutputWidth; + private int _oldOutputHeight; + + public ISurface CreateSurface(int width, int height) + { + return new Surface(width, height); + } + + public bool Decode(ref H264PictureInfo pictureInfo, ISurface output, ReadOnlySpan bitstream) + { + Surface outSurf = (Surface)output; + + if (outSurf.RequestedWidth != _oldOutputWidth || + outSurf.RequestedHeight != _oldOutputHeight) + { + _context.Dispose(); + _context = new FFmpegContext(AVCodecID.AV_CODEC_ID_H264); + + _oldOutputWidth = outSurf.RequestedWidth; + _oldOutputHeight = outSurf.RequestedHeight; + } + + Span bs = Prepend(bitstream, SpsAndPpsReconstruction.Reconstruct(ref pictureInfo, _workBuffer)); + + return _context.DecodeFrame(outSurf, bs) == 0; + } + + private static byte[] Prepend(ReadOnlySpan data, ReadOnlySpan prep) + { + byte[] output = new byte[data.Length + prep.Length]; + + prep.CopyTo(output); + data.CopyTo(new Span(output).Slice(prep.Length)); + + return output; + } + + public void Dispose() => _context.Dispose(); + } +} diff --git a/Ryujinx.Graphics.Nvdec.FFmpeg/H264/H264BitStreamWriter.cs b/Ryujinx.Graphics.Nvdec.FFmpeg/H264/H264BitStreamWriter.cs new file mode 100644 index 00000000..3d3b3293 --- /dev/null +++ b/Ryujinx.Graphics.Nvdec.FFmpeg/H264/H264BitStreamWriter.cs @@ -0,0 +1,121 @@ +using System; +using System.Numerics; + +namespace Ryujinx.Graphics.Nvdec.FFmpeg.H264 +{ + struct H264BitStreamWriter + { + private const int BufferSize = 8; + + private readonly byte[] _workBuffer; + + private int _offset; + private int _buffer; + private int _bufferPos; + + public H264BitStreamWriter(byte[] workBuffer) + { + _workBuffer = workBuffer; + _offset = 0; + _buffer = 0; + _bufferPos = 0; + } + + public void WriteBit(bool value) + { + WriteBits(value ? 1 : 0, 1); + } + + public void WriteBits(int value, int valueSize) + { + int valuePos = 0; + + int remaining = valueSize; + + while (remaining > 0) + { + int copySize = remaining; + + int free = GetFreeBufferBits(); + + if (copySize > free) + { + copySize = free; + } + + int mask = (1 << copySize) - 1; + + int srcShift = (valueSize - valuePos) - copySize; + int dstShift = (BufferSize - _bufferPos) - copySize; + + _buffer |= ((value >> srcShift) & mask) << dstShift; + + valuePos += copySize; + _bufferPos += copySize; + remaining -= copySize; + } + } + + private int GetFreeBufferBits() + { + if (_bufferPos == BufferSize) + { + Flush(); + } + + return BufferSize - _bufferPos; + } + + public void Flush() + { + if (_bufferPos != 0) + { + _workBuffer[_offset++] = (byte)_buffer; + + _buffer = 0; + _bufferPos = 0; + } + } + + public void End() + { + WriteBit(true); + + Flush(); + } + + public Span AsSpan() + { + return new Span(_workBuffer).Slice(0, _offset); + } + + public void WriteU(uint value, int valueSize) => WriteBits((int)value, valueSize); + public void WriteSe(int value) => WriteExpGolombCodedInt(value); + public void WriteUe(uint value) => WriteExpGolombCodedUInt(value); + + private void WriteExpGolombCodedInt(int value) + { + int sign = value <= 0 ? 0 : 1; + + if (value < 0) + { + value = -value; + } + + value = (value << 1) - sign; + + WriteExpGolombCodedUInt((uint)value); + } + + private void WriteExpGolombCodedUInt(uint value) + { + int size = 32 - BitOperations.LeadingZeroCount(value + 1); + + WriteBits(1, size); + + value -= (1u << (size - 1)) - 1; + + WriteBits((int)value, size - 1); + } + } +} \ No newline at end of file diff --git a/Ryujinx.Graphics.Nvdec.FFmpeg/H264/SpsAndPpsReconstruction.cs b/Ryujinx.Graphics.Nvdec.FFmpeg/H264/SpsAndPpsReconstruction.cs new file mode 100644 index 00000000..5c16ef3d --- /dev/null +++ b/Ryujinx.Graphics.Nvdec.FFmpeg/H264/SpsAndPpsReconstruction.cs @@ -0,0 +1,159 @@ +using Ryujinx.Common.Memory; +using Ryujinx.Graphics.Video; +using System; + +namespace Ryujinx.Graphics.Nvdec.FFmpeg.H264 +{ + static class SpsAndPpsReconstruction + { + public static Span Reconstruct(ref H264PictureInfo pictureInfo, byte[] workBuffer) + { + H264BitStreamWriter writer = new H264BitStreamWriter(workBuffer); + + // Sequence Parameter Set. + writer.WriteU(1, 24); + writer.WriteU(0, 1); + writer.WriteU(3, 2); + writer.WriteU(7, 5); + writer.WriteU(100, 8); // Profile idc + writer.WriteU(0, 8); // Reserved + writer.WriteU(31, 8); // Level idc + writer.WriteUe(0); // Seq parameter set id + writer.WriteUe(pictureInfo.ChromaFormatIdc); + + if (pictureInfo.ChromaFormatIdc == 3) + { + writer.WriteBit(false); // Separate colour plane flag + } + + writer.WriteUe(0); // Bit depth luma minus 8 + writer.WriteUe(0); // Bit depth chroma minus 8 + writer.WriteBit(pictureInfo.QpprimeYZeroTransformBypassFlag); + writer.WriteBit(false); // Scaling matrix present flag + + writer.WriteUe(pictureInfo.Log2MaxFrameNumMinus4); + writer.WriteUe(pictureInfo.PicOrderCntType); + + if (pictureInfo.PicOrderCntType == 0) + { + writer.WriteUe(pictureInfo.Log2MaxPicOrderCntLsbMinus4); + } + else if (pictureInfo.PicOrderCntType == 1) + { + writer.WriteBit(pictureInfo.DeltaPicOrderAlwaysZeroFlag); + + writer.WriteSe(0); // Offset for non-ref pic + writer.WriteSe(0); // Offset for top to bottom field + writer.WriteUe(0); // Num ref frames in pic order cnt cycle + } + + writer.WriteUe(16); // Max num ref frames + writer.WriteBit(false); // Gaps in frame num value allowed flag + writer.WriteUe(pictureInfo.PicWidthInMbsMinus1); + writer.WriteUe(pictureInfo.PicHeightInMapUnitsMinus1); + writer.WriteBit(pictureInfo.FrameMbsOnlyFlag); + + if (!pictureInfo.FrameMbsOnlyFlag) + { + writer.WriteBit(pictureInfo.MbAdaptiveFrameFieldFlag); + } + + writer.WriteBit(pictureInfo.Direct8x8InferenceFlag); + writer.WriteBit(false); // Frame cropping flag + writer.WriteBit(false); // VUI parameter present flag + + writer.End(); + + // Picture Parameter Set. + writer.WriteU(1, 24); + writer.WriteU(0, 1); + writer.WriteU(3, 2); + writer.WriteU(8, 5); + + writer.WriteUe(0); // Pic parameter set id + writer.WriteUe(0); // Seq parameter set id + + writer.WriteBit(pictureInfo.EntropyCodingModeFlag); + writer.WriteBit(pictureInfo.PicOrderPresentFlag); + writer.WriteUe(0); // Num slice groups minus 1 + writer.WriteUe(pictureInfo.NumRefIdxL0ActiveMinus1); + writer.WriteUe(pictureInfo.NumRefIdxL1ActiveMinus1); + writer.WriteBit(pictureInfo.WeightedPredFlag); + writer.WriteU(pictureInfo.WeightedBipredIdc, 2); + writer.WriteSe(pictureInfo.PicInitQpMinus26); + writer.WriteSe(0); // Pic init qs minus 26 + writer.WriteSe(pictureInfo.ChromaQpIndexOffset); + writer.WriteBit(pictureInfo.DeblockingFilterControlPresentFlag); + writer.WriteBit(pictureInfo.ConstrainedIntraPredFlag); + writer.WriteBit(pictureInfo.RedundantPicCntPresentFlag); + writer.WriteBit(pictureInfo.Transform8x8ModeFlag); + + writer.WriteBit(pictureInfo.ScalingMatrixPresent); + + if (pictureInfo.ScalingMatrixPresent) + { + for (int index = 0; index < 6; index++) + { + writer.WriteBit(true); + + WriteScalingList(ref writer, pictureInfo.ScalingLists4x4[index]); + } + + if (pictureInfo.Transform8x8ModeFlag) + { + for (int index = 0; index < 2; index++) + { + writer.WriteBit(true); + + WriteScalingList(ref writer, pictureInfo.ScalingLists8x8[index]); + } + } + } + + writer.WriteSe(pictureInfo.SecondChromaQpIndexOffset); + + writer.End(); + + return writer.AsSpan(); + } + + // ZigZag LUTs from libavcodec. + private static readonly byte[] ZigZagDirect = new byte[] + { + 0, 1, 8, 16, 9, 2, 3, 10, + 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, + 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, + 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, + 53, 60, 61, 54, 47, 55, 62, 63 + }; + + private static readonly byte[] ZigZagScan = new byte[] + { + 0 + 0 * 4, 1 + 0 * 4, 0 + 1 * 4, 0 + 2 * 4, + 1 + 1 * 4, 2 + 0 * 4, 3 + 0 * 4, 2 + 1 * 4, + 1 + 2 * 4, 0 + 3 * 4, 1 + 3 * 4, 2 + 2 * 4, + 3 + 1 * 4, 3 + 2 * 4, 2 + 3 * 4, 3 + 3 * 4 + }; + + private static void WriteScalingList(ref H264BitStreamWriter writer, IArray list) + { + byte[] scan = list.Length == 16 ? ZigZagScan : ZigZagDirect; + + int lastScale = 8; + + for (int index = 0; index < list.Length; index++) + { + byte value = list[scan[index]]; + + int deltaScale = value - lastScale; + + writer.WriteSe(deltaScale); + + lastScale = value; + } + } + } +} diff --git a/Ryujinx.Graphics.Nvdec.FFmpeg/Ryujinx.Graphics.Nvdec.FFmpeg.csproj b/Ryujinx.Graphics.Nvdec.FFmpeg/Ryujinx.Graphics.Nvdec.FFmpeg.csproj new file mode 100644 index 00000000..b437f36e --- /dev/null +++ b/Ryujinx.Graphics.Nvdec.FFmpeg/Ryujinx.Graphics.Nvdec.FFmpeg.csproj @@ -0,0 +1,17 @@ + + + + net5.0 + true + + + + + + + + + + + + diff --git a/Ryujinx.Graphics.Nvdec.FFmpeg/Surface.cs b/Ryujinx.Graphics.Nvdec.FFmpeg/Surface.cs new file mode 100644 index 00000000..20cee4a1 --- /dev/null +++ b/Ryujinx.Graphics.Nvdec.FFmpeg/Surface.cs @@ -0,0 +1,39 @@ +using FFmpeg.AutoGen; +using Ryujinx.Graphics.Video; +using System; + +namespace Ryujinx.Graphics.Nvdec.FFmpeg +{ + unsafe class Surface : ISurface + { + public AVFrame* Frame { get; } + + public int RequestedWidth { get; } + public int RequestedHeight { get; } + + public Plane YPlane => new Plane((IntPtr)Frame->data[0], Stride * Height); + public Plane UPlane => new Plane((IntPtr)Frame->data[1], UvStride * UvHeight); + public Plane VPlane => new Plane((IntPtr)Frame->data[2], UvStride * UvHeight); + + public int Width => Frame->width; + public int Height => Frame->height; + public int Stride => Frame->linesize[0]; + public int UvWidth => (Frame->width + 1) >> 1; + public int UvHeight => (Frame->height + 1) >> 1; + public int UvStride => Frame->linesize[1]; + + public Surface(int width, int height) + { + RequestedWidth = width; + RequestedHeight = height; + + Frame = ffmpeg.av_frame_alloc(); + } + + public void Dispose() + { + ffmpeg.av_frame_unref(Frame); + ffmpeg.av_free(Frame); + } + } +} diff --git a/Ryujinx.Graphics.Nvdec.FFmpeg/Vp8/Decoder.cs b/Ryujinx.Graphics.Nvdec.FFmpeg/Vp8/Decoder.cs new file mode 100644 index 00000000..f12de287 --- /dev/null +++ b/Ryujinx.Graphics.Nvdec.FFmpeg/Vp8/Decoder.cs @@ -0,0 +1,53 @@ +using FFmpeg.AutoGen; +using Ryujinx.Graphics.Video; +using System; + +namespace Ryujinx.Graphics.Nvdec.FFmpeg.Vp8 +{ + public sealed class Decoder : IDecoder + { + public bool IsHardwareAccelerated => false; + + private readonly FFmpegContext _context = new FFmpegContext(AVCodecID.AV_CODEC_ID_VP8); + + public ISurface CreateSurface(int width, int height) + { + return new Surface(width, height); + } + + public bool Decode(ref Vp8PictureInfo pictureInfo, ISurface output, ReadOnlySpan bitstream) + { + Surface outSurf = (Surface)output; + + int uncompHeaderSize = pictureInfo.KeyFrame ? 10 : 3; + + byte[] frame = new byte[bitstream.Length + uncompHeaderSize]; + + uint firstPartSizeShifted = pictureInfo.FirstPartSize << 5; + + frame[0] = (byte)(pictureInfo.KeyFrame ? 0 : 1); + frame[0] |= (byte)((pictureInfo.Version & 7) << 1); + frame[0] |= 1 << 4; + frame[0] |= (byte)firstPartSizeShifted; + frame[1] |= (byte)(firstPartSizeShifted >> 8); + frame[2] |= (byte)(firstPartSizeShifted >> 16); + + if (pictureInfo.KeyFrame) + { + frame[3] = 0x9d; + frame[4] = 0x01; + frame[5] = 0x2a; + frame[6] = (byte)pictureInfo.FrameWidth; + frame[7] = (byte)((pictureInfo.FrameWidth >> 8) & 0x3F); + frame[8] = (byte)pictureInfo.FrameHeight; + frame[9] = (byte)((pictureInfo.FrameHeight >> 8) & 0x3F); + } + + bitstream.CopyTo(new Span(frame).Slice(uncompHeaderSize)); + + return _context.DecodeFrame(outSurf, frame) == 0; + } + + public void Dispose() => _context.Dispose(); + } +} \ No newline at end of file diff --git a/Ryujinx.Graphics.Nvdec.H264/Decoder.cs b/Ryujinx.Graphics.Nvdec.H264/Decoder.cs deleted file mode 100644 index fed64af4..00000000 --- a/Ryujinx.Graphics.Nvdec.H264/Decoder.cs +++ /dev/null @@ -1,55 +0,0 @@ -using Ryujinx.Graphics.Video; -using System; - -namespace Ryujinx.Graphics.Nvdec.H264 -{ - public sealed class Decoder : IH264Decoder - { - public bool IsHardwareAccelerated => false; - - private const int WorkBufferSize = 0x200; - - private readonly byte[] _workBuffer = new byte[WorkBufferSize]; - - private FFmpegContext _context = new FFmpegContext(); - - private int _oldOutputWidth; - private int _oldOutputHeight; - - public ISurface CreateSurface(int width, int height) - { - return new Surface(width, height); - } - - public bool Decode(ref H264PictureInfo pictureInfo, ISurface output, ReadOnlySpan bitstream) - { - Surface outSurf = (Surface)output; - - if (outSurf.RequestedWidth != _oldOutputWidth || - outSurf.RequestedHeight != _oldOutputHeight) - { - _context.Dispose(); - _context = new FFmpegContext(); - - _oldOutputWidth = outSurf.RequestedWidth; - _oldOutputHeight = outSurf.RequestedHeight; - } - - Span bs = Prepend(bitstream, SpsAndPpsReconstruction.Reconstruct(ref pictureInfo, _workBuffer)); - - return _context.DecodeFrame(outSurf, bs) == 0; - } - - private static byte[] Prepend(ReadOnlySpan data, ReadOnlySpan prep) - { - byte[] output = new byte[data.Length + prep.Length]; - - prep.CopyTo(output); - data.CopyTo(new Span(output).Slice(prep.Length)); - - return output; - } - - public void Dispose() => _context.Dispose(); - } -} diff --git a/Ryujinx.Graphics.Nvdec.H264/FFmpegContext.cs b/Ryujinx.Graphics.Nvdec.H264/FFmpegContext.cs deleted file mode 100644 index 66b1e6c1..00000000 --- a/Ryujinx.Graphics.Nvdec.H264/FFmpegContext.cs +++ /dev/null @@ -1,162 +0,0 @@ -using FFmpeg.AutoGen; -using Ryujinx.Common.Logging; -using System; -using System.Diagnostics; -using System.IO; -using System.Runtime.InteropServices; - -namespace Ryujinx.Graphics.Nvdec.H264 -{ - unsafe class FFmpegContext : IDisposable - { - private readonly AVCodec_decode _h264Decode; - private static readonly av_log_set_callback_callback _logFunc; - private readonly AVCodec* _codec; - private AVPacket* _packet; - private AVCodecContext* _context; - - public FFmpegContext() - { - _codec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264); - _context = ffmpeg.avcodec_alloc_context3(_codec); - _context->debug |= ffmpeg.FF_DEBUG_MMCO; - - ffmpeg.avcodec_open2(_context, _codec, null); - - _packet = ffmpeg.av_packet_alloc(); - - _h264Decode = Marshal.GetDelegateForFunctionPointer(_codec->decode.Pointer); - } - - static FFmpegContext() - { - SetRootPath(); - - _logFunc = Log; - - // Redirect log output. - ffmpeg.av_log_set_level(ffmpeg.AV_LOG_MAX_OFFSET); - ffmpeg.av_log_set_callback(_logFunc); - } - - private static void SetRootPath() - { - if (OperatingSystem.IsLinux()) - { - // Configure FFmpeg search path - Process lddProcess = Process.Start(new ProcessStartInfo - { - FileName = "/bin/sh", - Arguments = "-c \"ldd $(which ffmpeg 2>/dev/null) | grep libavfilter\" 2>/dev/null", - UseShellExecute = false, - RedirectStandardOutput = true - }); - - string lddOutput = lddProcess.StandardOutput.ReadToEnd(); - - lddProcess.WaitForExit(); - lddProcess.Close(); - - if (lddOutput.Contains(" => ")) - { - ffmpeg.RootPath = Path.GetDirectoryName(lddOutput.Split(" => ")[1]); - } - else - { - Logger.Error?.PrintMsg(LogClass.FFmpeg, "FFmpeg wasn't found. Make sure that you have it installed and up to date."); - } - } - } - - private static void Log(void* p0, int level, string format, byte* vl) - { - if (level > ffmpeg.av_log_get_level()) - { - return; - } - - int lineSize = 1024; - byte* lineBuffer = stackalloc byte[lineSize]; - int printPrefix = 1; - - ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix); - - string line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer).Trim(); - - switch (level) - { - case ffmpeg.AV_LOG_PANIC: - case ffmpeg.AV_LOG_FATAL: - case ffmpeg.AV_LOG_ERROR: - Logger.Error?.Print(LogClass.FFmpeg, line); - break; - case ffmpeg.AV_LOG_WARNING: - Logger.Warning?.Print(LogClass.FFmpeg, line); - break; - case ffmpeg.AV_LOG_INFO: - Logger.Info?.Print(LogClass.FFmpeg, line); - break; - case ffmpeg.AV_LOG_VERBOSE: - case ffmpeg.AV_LOG_DEBUG: - case ffmpeg.AV_LOG_TRACE: - Logger.Debug?.Print(LogClass.FFmpeg, line); - break; - } - } - - public int DecodeFrame(Surface output, ReadOnlySpan bitstream) - { - ffmpeg.av_frame_unref(output.Frame); - - int result; - int gotFrame; - - fixed (byte* ptr = bitstream) - { - _packet->data = ptr; - _packet->size = bitstream.Length; - result = _h264Decode(_context, output.Frame, &gotFrame, _packet); - } - - if (gotFrame == 0) - { - ffmpeg.av_frame_unref(output.Frame); - - // If the frame was not delivered, it was probably delayed. - // Get the next delayed frame by passing a 0 length packet. - _packet->data = null; - _packet->size = 0; - result = _h264Decode(_context, output.Frame, &gotFrame, _packet); - - // We need to set B frames to 0 as we already consumed all delayed frames. - // This prevents the decoder from trying to return a delayed frame next time. - _context->has_b_frames = 0; - } - - ffmpeg.av_packet_unref(_packet); - - if (gotFrame == 0) - { - ffmpeg.av_frame_unref(output.Frame); - return -1; - } - - return result < 0 ? result : 0; - } - - public void Dispose() - { - fixed (AVPacket** ppPacket = &_packet) - { - ffmpeg.av_packet_free(ppPacket); - } - - ffmpeg.avcodec_close(_context); - - fixed (AVCodecContext** ppContext = &_context) - { - ffmpeg.avcodec_free_context(ppContext); - } - } - } -} diff --git a/Ryujinx.Graphics.Nvdec.H264/H264BitStreamWriter.cs b/Ryujinx.Graphics.Nvdec.H264/H264BitStreamWriter.cs deleted file mode 100644 index c0e2357d..00000000 --- a/Ryujinx.Graphics.Nvdec.H264/H264BitStreamWriter.cs +++ /dev/null @@ -1,121 +0,0 @@ -using System; -using System.Numerics; - -namespace Ryujinx.Graphics.Nvdec.H264 -{ - struct H264BitStreamWriter - { - private const int BufferSize = 8; - - private readonly byte[] _workBuffer; - - private int _offset; - private int _buffer; - private int _bufferPos; - - public H264BitStreamWriter(byte[] workBuffer) - { - _workBuffer = workBuffer; - _offset = 0; - _buffer = 0; - _bufferPos = 0; - } - - public void WriteBit(bool value) - { - WriteBits(value ? 1 : 0, 1); - } - - public void WriteBits(int value, int valueSize) - { - int valuePos = 0; - - int remaining = valueSize; - - while (remaining > 0) - { - int copySize = remaining; - - int free = GetFreeBufferBits(); - - if (copySize > free) - { - copySize = free; - } - - int mask = (1 << copySize) - 1; - - int srcShift = (valueSize - valuePos) - copySize; - int dstShift = (BufferSize - _bufferPos) - copySize; - - _buffer |= ((value >> srcShift) & mask) << dstShift; - - valuePos += copySize; - _bufferPos += copySize; - remaining -= copySize; - } - } - - private int GetFreeBufferBits() - { - if (_bufferPos == BufferSize) - { - Flush(); - } - - return BufferSize - _bufferPos; - } - - public void Flush() - { - if (_bufferPos != 0) - { - _workBuffer[_offset++] = (byte)_buffer; - - _buffer = 0; - _bufferPos = 0; - } - } - - public void End() - { - WriteBit(true); - - Flush(); - } - - public Span AsSpan() - { - return new Span(_workBuffer).Slice(0, _offset); - } - - public void WriteU(uint value, int valueSize) => WriteBits((int)value, valueSize); - public void WriteSe(int value) => WriteExpGolombCodedInt(value); - public void WriteUe(uint value) => WriteExpGolombCodedUInt(value); - - private void WriteExpGolombCodedInt(int value) - { - int sign = value <= 0 ? 0 : 1; - - if (value < 0) - { - value = -value; - } - - value = (value << 1) - sign; - - WriteExpGolombCodedUInt((uint)value); - } - - private void WriteExpGolombCodedUInt(uint value) - { - int size = 32 - BitOperations.LeadingZeroCount(value + 1); - - WriteBits(1, size); - - value -= (1u << (size - 1)) - 1; - - WriteBits((int)value, size - 1); - } - } -} \ No newline at end of file diff --git a/Ryujinx.Graphics.Nvdec.H264/Ryujinx.Graphics.Nvdec.H264.csproj b/Ryujinx.Graphics.Nvdec.H264/Ryujinx.Graphics.Nvdec.H264.csproj deleted file mode 100644 index fdcdae06..00000000 --- a/Ryujinx.Graphics.Nvdec.H264/Ryujinx.Graphics.Nvdec.H264.csproj +++ /dev/null @@ -1,16 +0,0 @@ - - - - net5.0 - true - - - - - - - - - - - diff --git a/Ryujinx.Graphics.Nvdec.H264/SpsAndPpsReconstruction.cs b/Ryujinx.Graphics.Nvdec.H264/SpsAndPpsReconstruction.cs deleted file mode 100644 index 6fd1ce79..00000000 --- a/Ryujinx.Graphics.Nvdec.H264/SpsAndPpsReconstruction.cs +++ /dev/null @@ -1,159 +0,0 @@ -using Ryujinx.Common.Memory; -using Ryujinx.Graphics.Video; -using System; - -namespace Ryujinx.Graphics.Nvdec.H264 -{ - static class SpsAndPpsReconstruction - { - public static Span Reconstruct(ref H264PictureInfo pictureInfo, byte[] workBuffer) - { - H264BitStreamWriter writer = new H264BitStreamWriter(workBuffer); - - // Sequence Parameter Set. - writer.WriteU(1, 24); - writer.WriteU(0, 1); - writer.WriteU(3, 2); - writer.WriteU(7, 5); - writer.WriteU(100, 8); // Profile idc - writer.WriteU(0, 8); // Reserved - writer.WriteU(31, 8); // Level idc - writer.WriteUe(0); // Seq parameter set id - writer.WriteUe(pictureInfo.ChromaFormatIdc); - - if (pictureInfo.ChromaFormatIdc == 3) - { - writer.WriteBit(false); // Separate colour plane flag - } - - writer.WriteUe(0); // Bit depth luma minus 8 - writer.WriteUe(0); // Bit depth chroma minus 8 - writer.WriteBit(pictureInfo.QpprimeYZeroTransformBypassFlag); - writer.WriteBit(false); // Scaling matrix present flag - - writer.WriteUe(pictureInfo.Log2MaxFrameNumMinus4); - writer.WriteUe(pictureInfo.PicOrderCntType); - - if (pictureInfo.PicOrderCntType == 0) - { - writer.WriteUe(pictureInfo.Log2MaxPicOrderCntLsbMinus4); - } - else if (pictureInfo.PicOrderCntType == 1) - { - writer.WriteBit(pictureInfo.DeltaPicOrderAlwaysZeroFlag); - - writer.WriteSe(0); // Offset for non-ref pic - writer.WriteSe(0); // Offset for top to bottom field - writer.WriteUe(0); // Num ref frames in pic order cnt cycle - } - - writer.WriteUe(16); // Max num ref frames - writer.WriteBit(false); // Gaps in frame num value allowed flag - writer.WriteUe(pictureInfo.PicWidthInMbsMinus1); - writer.WriteUe(pictureInfo.PicHeightInMapUnitsMinus1); - writer.WriteBit(pictureInfo.FrameMbsOnlyFlag); - - if (!pictureInfo.FrameMbsOnlyFlag) - { - writer.WriteBit(pictureInfo.MbAdaptiveFrameFieldFlag); - } - - writer.WriteBit(pictureInfo.Direct8x8InferenceFlag); - writer.WriteBit(false); // Frame cropping flag - writer.WriteBit(false); // VUI parameter present flag - - writer.End(); - - // Picture Parameter Set. - writer.WriteU(1, 24); - writer.WriteU(0, 1); - writer.WriteU(3, 2); - writer.WriteU(8, 5); - - writer.WriteUe(0); // Pic parameter set id - writer.WriteUe(0); // Seq parameter set id - - writer.WriteBit(pictureInfo.EntropyCodingModeFlag); - writer.WriteBit(pictureInfo.PicOrderPresentFlag); - writer.WriteUe(0); // Num slice groups minus 1 - writer.WriteUe(pictureInfo.NumRefIdxL0ActiveMinus1); - writer.WriteUe(pictureInfo.NumRefIdxL1ActiveMinus1); - writer.WriteBit(pictureInfo.WeightedPredFlag); - writer.WriteU(pictureInfo.WeightedBipredIdc, 2); - writer.WriteSe(pictureInfo.PicInitQpMinus26); - writer.WriteSe(0); // Pic init qs minus 26 - writer.WriteSe(pictureInfo.ChromaQpIndexOffset); - writer.WriteBit(pictureInfo.DeblockingFilterControlPresentFlag); - writer.WriteBit(pictureInfo.ConstrainedIntraPredFlag); - writer.WriteBit(pictureInfo.RedundantPicCntPresentFlag); - writer.WriteBit(pictureInfo.Transform8x8ModeFlag); - - writer.WriteBit(pictureInfo.ScalingMatrixPresent); - - if (pictureInfo.ScalingMatrixPresent) - { - for (int index = 0; index < 6; index++) - { - writer.WriteBit(true); - - WriteScalingList(ref writer, pictureInfo.ScalingLists4x4[index]); - } - - if (pictureInfo.Transform8x8ModeFlag) - { - for (int index = 0; index < 2; index++) - { - writer.WriteBit(true); - - WriteScalingList(ref writer, pictureInfo.ScalingLists8x8[index]); - } - } - } - - writer.WriteSe(pictureInfo.SecondChromaQpIndexOffset); - - writer.End(); - - return writer.AsSpan(); - } - - // ZigZag LUTs from libavcodec. - private static readonly byte[] ZigZagDirect = new byte[] - { - 0, 1, 8, 16, 9, 2, 3, 10, - 17, 24, 32, 25, 18, 11, 4, 5, - 12, 19, 26, 33, 40, 48, 41, 34, - 27, 20, 13, 6, 7, 14, 21, 28, - 35, 42, 49, 56, 57, 50, 43, 36, - 29, 22, 15, 23, 30, 37, 44, 51, - 58, 59, 52, 45, 38, 31, 39, 46, - 53, 60, 61, 54, 47, 55, 62, 63 - }; - - private static readonly byte[] ZigZagScan = new byte[] - { - 0 + 0 * 4, 1 + 0 * 4, 0 + 1 * 4, 0 + 2 * 4, - 1 + 1 * 4, 2 + 0 * 4, 3 + 0 * 4, 2 + 1 * 4, - 1 + 2 * 4, 0 + 3 * 4, 1 + 3 * 4, 2 + 2 * 4, - 3 + 1 * 4, 3 + 2 * 4, 2 + 3 * 4, 3 + 3 * 4 - }; - - private static void WriteScalingList(ref H264BitStreamWriter writer, IArray list) - { - byte[] scan = list.Length == 16 ? ZigZagScan : ZigZagDirect; - - int lastScale = 8; - - for (int index = 0; index < list.Length; index++) - { - byte value = list[scan[index]]; - - int deltaScale = value - lastScale; - - writer.WriteSe(deltaScale); - - lastScale = value; - } - } - } -} diff --git a/Ryujinx.Graphics.Nvdec.H264/Surface.cs b/Ryujinx.Graphics.Nvdec.H264/Surface.cs deleted file mode 100644 index 3dbc980e..00000000 --- a/Ryujinx.Graphics.Nvdec.H264/Surface.cs +++ /dev/null @@ -1,39 +0,0 @@ -using FFmpeg.AutoGen; -using Ryujinx.Graphics.Video; -using System; - -namespace Ryujinx.Graphics.Nvdec.H264 -{ - unsafe class Surface : ISurface - { - public AVFrame* Frame { get; } - - public int RequestedWidth { get; } - public int RequestedHeight { get; } - - public Plane YPlane => new Plane((IntPtr)Frame->data[0], Stride * Height); - public Plane UPlane => new Plane((IntPtr)Frame->data[1], UvStride * UvHeight); - public Plane VPlane => new Plane((IntPtr)Frame->data[2], UvStride * UvHeight); - - public int Width => Frame->width; - public int Height => Frame->height; - public int Stride => Frame->linesize[0]; - public int UvWidth => (Frame->width + 1) >> 1; - public int UvHeight => (Frame->height + 1) >> 1; - public int UvStride => Frame->linesize[1]; - - public Surface(int width, int height) - { - RequestedWidth = width; - RequestedHeight = height; - - Frame = ffmpeg.av_frame_alloc(); - } - - public void Dispose() - { - ffmpeg.av_frame_unref(Frame); - ffmpeg.av_free(Frame); - } - } -} diff --git a/Ryujinx.Graphics.Nvdec/H264Decoder.cs b/Ryujinx.Graphics.Nvdec/H264Decoder.cs index 1ee3997b..69eeb494 100644 --- a/Ryujinx.Graphics.Nvdec/H264Decoder.cs +++ b/Ryujinx.Graphics.Nvdec/H264Decoder.cs @@ -1,4 +1,4 @@ -using Ryujinx.Graphics.Nvdec.H264; +using Ryujinx.Graphics.Nvdec.FFmpeg.H264; using Ryujinx.Graphics.Nvdec.Image; using Ryujinx.Graphics.Nvdec.Types.H264; using Ryujinx.Graphics.Video; @@ -10,7 +10,7 @@ namespace Ryujinx.Graphics.Nvdec { private const int MbSizeInPixels = 16; - public unsafe static void Decode(NvdecDecoderContext context, ResourceManager rm, ref NvdecRegisters state) + public static void Decode(NvdecDecoderContext context, ResourceManager rm, ref NvdecRegisters state) { PictureInfo pictureInfo = rm.Gmm.DeviceRead(state.SetPictureInfoOffset); H264PictureInfo info = pictureInfo.Convert(); @@ -25,7 +25,7 @@ namespace Ryujinx.Graphics.Nvdec uint lumaOffset = state.SetSurfaceLumaOffset[surfaceIndex]; uint chromaOffset = state.SetSurfaceChromaOffset[surfaceIndex]; - Decoder decoder = context.GetDecoder(); + Decoder decoder = context.GetH264Decoder(); ISurface outputSurface = rm.Cache.Get(decoder, 0, 0, width, height); diff --git a/Ryujinx.Graphics.Nvdec/NvdecDecoderContext.cs b/Ryujinx.Graphics.Nvdec/NvdecDecoderContext.cs index 90da0bee..54934bc5 100644 --- a/Ryujinx.Graphics.Nvdec/NvdecDecoderContext.cs +++ b/Ryujinx.Graphics.Nvdec/NvdecDecoderContext.cs @@ -1,21 +1,29 @@ -using Ryujinx.Graphics.Nvdec.H264; using System; namespace Ryujinx.Graphics.Nvdec { class NvdecDecoderContext : IDisposable { - private Decoder _decoder; + private FFmpeg.H264.Decoder _h264Decoder; + private FFmpeg.Vp8.Decoder _vp8Decoder; - public Decoder GetDecoder() + public FFmpeg.H264.Decoder GetH264Decoder() { - return _decoder ??= new Decoder(); + return _h264Decoder ??= new FFmpeg.H264.Decoder(); + } + + public FFmpeg.Vp8.Decoder GetVp8Decoder() + { + return _vp8Decoder ??= new FFmpeg.Vp8.Decoder(); } public void Dispose() { - _decoder?.Dispose(); - _decoder = null; + _h264Decoder?.Dispose(); + _h264Decoder = null; + + _vp8Decoder?.Dispose(); + _vp8Decoder = null; } } } \ No newline at end of file diff --git a/Ryujinx.Graphics.Nvdec/NvdecDevice.cs b/Ryujinx.Graphics.Nvdec/NvdecDevice.cs index 5319429b..18c2fc13 100644 --- a/Ryujinx.Graphics.Nvdec/NvdecDevice.cs +++ b/Ryujinx.Graphics.Nvdec/NvdecDevice.cs @@ -68,6 +68,9 @@ namespace Ryujinx.Graphics.Nvdec case CodecId.H264: H264Decoder.Decode(_currentContext, _rm, ref _state.State); break; + case CodecId.Vp8: + Vp8Decoder.Decode(_currentContext, _rm, ref _state.State); + break; case CodecId.Vp9: Vp9Decoder.Decode(_rm, ref _state.State); break; diff --git a/Ryujinx.Graphics.Nvdec/Ryujinx.Graphics.Nvdec.csproj b/Ryujinx.Graphics.Nvdec/Ryujinx.Graphics.Nvdec.csproj index 4c20979d..095e0e59 100644 --- a/Ryujinx.Graphics.Nvdec/Ryujinx.Graphics.Nvdec.csproj +++ b/Ryujinx.Graphics.Nvdec/Ryujinx.Graphics.Nvdec.csproj @@ -9,7 +9,7 @@ - + diff --git a/Ryujinx.Graphics.Nvdec/Types/Vp8/PictureInfo.cs b/Ryujinx.Graphics.Nvdec/Types/Vp8/PictureInfo.cs new file mode 100644 index 00000000..844f2103 --- /dev/null +++ b/Ryujinx.Graphics.Nvdec/Types/Vp8/PictureInfo.cs @@ -0,0 +1,75 @@ +using Ryujinx.Common.Memory; +using Ryujinx.Graphics.Video; + +namespace Ryujinx.Graphics.Nvdec.Types.Vp8 +{ + struct PictureInfo + { +#pragma warning disable CS0649 + public Array13 Unknown0; + public uint GpTimerTimeoutValue; + public ushort FrameWidth; + public ushort FrameHeight; + public byte KeyFrame; // 1: key frame - 0: not + public byte Version; + public byte Flags0; + // TileFormat : 2 // 0: TBL; 1: KBL; + // GobHeight : 3 // Set GOB height, 0: GOB_2, 1: GOB_4, 2: GOB_8, 3: GOB_16, 4: GOB_32 (NVDEC3 onwards) + // ReserverdSurfaceFormat : 3 + public byte ErrorConcealOn; // 1: error conceal on - 0: off + public uint FirstPartSize; // the size of first partition (frame header and mb header partition) + public uint HistBufferSize; // in units of 256 + public uint VLDBufferSize; // in units of 1 + public Array2 FrameStride; // [y_c] + public uint LumaTopOffset; // offset of luma top field in units of 256 + public uint LumaBotOffset; // offset of luma bottom field in units of 256 + public uint LumaFrameOffset; // offset of luma frame in units of 256 + public uint ChromaTopOffset; // offset of chroma top field in units of 256 + public uint ChromaBotOffset; // offset of chroma bottom field in units of 256 + public uint ChromaFrameOffset; // offset of chroma frame in units of 256 + public uint Flags1; + // EnableTFOutput : 1; // =1, enable dbfdma to output the display surface; if disable, then the following configure on tf is useless. + // Remap for VC1 + // VC1MapYFlag : 1 + // MapYValue : 3 + // VC1MapUVFlag : 1 + // MapUVValue : 3 + // TF + // OutStride : 8 + // TilingFormat : 3; + // OutputStructure : 1 // 0:frame, 1:field + // Reserved0 : 11 + public Array2 OutputTop; // in units of 256 + public Array2 OutputBottom; // in units of 256 + // Histogram + public uint Flags2; + // EnableHistogram : 1 // enable histogram info collection + // HistogramStartX : 12 // start X of Histogram window + // HistogramStartY : 12 // start Y of Histogram window + // Reserved1 : 7 + // HistogramEndX : 12 // end X of Histogram window + // HistogramEndY : 12 // end y of Histogram window + // Reserved2 : 8 + // Decode picture buffer related + public sbyte CurrentOutputMemoryLayout; + public Array3 OutputMemoryLayout; // output NV12/NV24 setting. item 0:golden - 1: altref - 2: last + public byte SegmentationFeatureDataUpdate; + public Array3 Reserved3; + public uint ResultValue; // ucode return result + public Array8 PartitionOffset; + public Array3 Reserved4; +#pragma warning restore CS0649 + + public Vp8PictureInfo Convert() + { + return new Vp8PictureInfo() + { + KeyFrame = KeyFrame != 0, + FirstPartSize = FirstPartSize, + Version = Version, + FrameWidth = FrameWidth, + FrameHeight = FrameHeight + }; + } + } +} diff --git a/Ryujinx.Graphics.Nvdec/Vp8Decoder.cs b/Ryujinx.Graphics.Nvdec/Vp8Decoder.cs new file mode 100644 index 00000000..8a369984 --- /dev/null +++ b/Ryujinx.Graphics.Nvdec/Vp8Decoder.cs @@ -0,0 +1,33 @@ +using Ryujinx.Graphics.Nvdec.FFmpeg.Vp8; +using Ryujinx.Graphics.Nvdec.Image; +using Ryujinx.Graphics.Nvdec.Types.Vp8; +using Ryujinx.Graphics.Video; +using System; + +namespace Ryujinx.Graphics.Nvdec +{ + static class Vp8Decoder + { + public static void Decode(NvdecDecoderContext context, ResourceManager rm, ref NvdecRegisters state) + { + PictureInfo pictureInfo = rm.Gmm.DeviceRead(state.SetPictureInfoOffset); + ReadOnlySpan bitstream = rm.Gmm.DeviceGetSpan(state.SetBitstreamOffset, (int)pictureInfo.VLDBufferSize); + + Decoder decoder = context.GetVp8Decoder(); + + ISurface outputSurface = rm.Cache.Get(decoder, 0, 0, pictureInfo.FrameWidth, pictureInfo.FrameHeight); + + Vp8PictureInfo info = pictureInfo.Convert(); + + uint lumaOffset = state.SetSurfaceLumaOffset[3]; + uint chromaOffset = state.SetSurfaceChromaOffset[3]; + + if (decoder.Decode(ref info, outputSurface, bitstream)) + { + SurfaceWriter.Write(rm.Gmm, outputSurface, lumaOffset, chromaOffset); + } + + rm.Cache.Put(outputSurface); + } + } +} \ No newline at end of file diff --git a/Ryujinx.Graphics.Video/Vp8PictureInfo.cs b/Ryujinx.Graphics.Video/Vp8PictureInfo.cs new file mode 100644 index 00000000..878674b8 --- /dev/null +++ b/Ryujinx.Graphics.Video/Vp8PictureInfo.cs @@ -0,0 +1,11 @@ +namespace Ryujinx.Graphics.Video +{ + public ref struct Vp8PictureInfo + { + public bool KeyFrame; + public uint FirstPartSize; + public uint Version; + public ushort FrameWidth; + public ushort FrameHeight; + } +} \ No newline at end of file diff --git a/Ryujinx.sln b/Ryujinx.sln index 9504bbc2..e0d35bdb 100644 --- a/Ryujinx.sln +++ b/Ryujinx.sln @@ -51,8 +51,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.Graphics.Nvdec.Vp9" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.Graphics.Vic", "Ryujinx.Graphics.Vic\Ryujinx.Graphics.Vic.csproj", "{81BB2C11-9408-4EA3-822E-42987AF54429}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.Graphics.Nvdec.H264", "Ryujinx.Graphics.Nvdec.H264\Ryujinx.Graphics.Nvdec.H264.csproj", "{990F9601-343E-46CB-8529-B498FA761A92}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.Graphics.Video", "Ryujinx.Graphics.Video\Ryujinx.Graphics.Video.csproj", "{FD4A2C14-8E3D-4957-ABBE-3C38897B3E2D}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.Audio.Backends.OpenAL", "Ryujinx.Audio.Backends.OpenAL\Ryujinx.Audio.Backends.OpenAL.csproj", "{0BE11899-DF2D-4BDE-B9EE-2489E8D35E7D}" @@ -67,7 +65,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.SDL2.Common", "Ryuj EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.Audio.Backends.SDL2", "Ryujinx.Audio.Backends.SDL2\Ryujinx.Audio.Backends.SDL2.csproj", "{D99A395A-8569-4DB0-B336-900647890052}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Ryujinx.Headless.SDL2", "Ryujinx.Headless.SDL2\Ryujinx.Headless.SDL2.csproj", "{390DC343-5CB4-4C79-A5DD-E3ED235E4C49}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Ryujinx.Headless.SDL2", "Ryujinx.Headless.SDL2\Ryujinx.Headless.SDL2.csproj", "{390DC343-5CB4-4C79-A5DD-E3ED235E4C49}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Ryujinx.Graphics.Nvdec.FFmpeg", "Ryujinx.Graphics.Nvdec.FFmpeg\Ryujinx.Graphics.Nvdec.FFmpeg.csproj", "{BEE1C184-C9A4-410B-8DFC-FB74D5C93AEB}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -159,10 +159,6 @@ Global {81BB2C11-9408-4EA3-822E-42987AF54429}.Debug|Any CPU.Build.0 = Debug|Any CPU {81BB2C11-9408-4EA3-822E-42987AF54429}.Release|Any CPU.ActiveCfg = Release|Any CPU {81BB2C11-9408-4EA3-822E-42987AF54429}.Release|Any CPU.Build.0 = Release|Any CPU - {990F9601-343E-46CB-8529-B498FA761A92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {990F9601-343E-46CB-8529-B498FA761A92}.Debug|Any CPU.Build.0 = Debug|Any CPU - {990F9601-343E-46CB-8529-B498FA761A92}.Release|Any CPU.ActiveCfg = Release|Any CPU - {990F9601-343E-46CB-8529-B498FA761A92}.Release|Any CPU.Build.0 = Release|Any CPU {FD4A2C14-8E3D-4957-ABBE-3C38897B3E2D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {FD4A2C14-8E3D-4957-ABBE-3C38897B3E2D}.Debug|Any CPU.Build.0 = Debug|Any CPU {FD4A2C14-8E3D-4957-ABBE-3C38897B3E2D}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -195,6 +191,10 @@ Global {390DC343-5CB4-4C79-A5DD-E3ED235E4C49}.Debug|Any CPU.Build.0 = Debug|Any CPU {390DC343-5CB4-4C79-A5DD-E3ED235E4C49}.Release|Any CPU.ActiveCfg = Release|Any CPU {390DC343-5CB4-4C79-A5DD-E3ED235E4C49}.Release|Any CPU.Build.0 = Release|Any CPU + {BEE1C184-C9A4-410B-8DFC-FB74D5C93AEB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BEE1C184-C9A4-410B-8DFC-FB74D5C93AEB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BEE1C184-C9A4-410B-8DFC-FB74D5C93AEB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BEE1C184-C9A4-410B-8DFC-FB74D5C93AEB}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/Ryujinx/Ryujinx.csproj b/Ryujinx/Ryujinx.csproj index 86afe71e..aba9b53c 100644 --- a/Ryujinx/Ryujinx.csproj +++ b/Ryujinx/Ryujinx.csproj @@ -20,7 +20,7 @@ - + -- cgit v1.2.3