Setup entry point + integrated moonworks stuff

This commit is contained in:
2024-07-05 14:32:58 +02:00
parent e7a4a862be
commit 8334a24fd1
116 changed files with 16988 additions and 3 deletions

View File

@ -0,0 +1,67 @@
using System;
using System.IO;
using Nerfed.Runtime.Graphics;
namespace Nerfed.Runtime.Video;
/// <summary>
/// This class takes in a filename for AV1 data in .obu (open bitstream unit) format
/// </summary>
public unsafe class VideoAV1 : GraphicsResource
{
public string Filename { get; }
public int Width => width;
public int Height => height;
public double FramesPerSecond { get; set; }
public Dav1dfile.PixelLayout PixelLayout => pixelLayout;
public int UVWidth { get; }
public int UVHeight { get; }
private int width;
private int height;
private Dav1dfile.PixelLayout pixelLayout;
/// <summary>
/// Opens an AV1 file so it can be loaded by VideoPlayer. You must also provide a playback framerate.
/// </summary>
public VideoAV1(GraphicsDevice device, string filename, double framesPerSecond) : base(device)
{
if (!File.Exists(filename))
{
throw new ArgumentException("Video file not found!");
}
if (Dav1dfile.df_fopen(filename, out IntPtr handle) == 0)
{
throw new Exception("Failed to open video file!");
}
Dav1dfile.df_videoinfo(handle, out width, out height, out pixelLayout);
Dav1dfile.df_close(handle);
if (pixelLayout == Dav1dfile.PixelLayout.I420)
{
UVWidth = Width / 2;
UVHeight = Height / 2;
}
else if (pixelLayout == Dav1dfile.PixelLayout.I422)
{
UVWidth = Width / 2;
UVHeight = Height;
}
else if (pixelLayout == Dav1dfile.PixelLayout.I444)
{
UVWidth = width;
UVHeight = height;
}
else
{
throw new NotSupportedException("Unrecognized YUV format!");
}
FramesPerSecond = framesPerSecond;
Filename = filename;
}
}

View File

@ -0,0 +1,156 @@
using System;
using System.Collections.Concurrent;
using System.Threading;
using Nerfed.Runtime.Graphics;
namespace Nerfed.Runtime.Video;
// Note that all public methods are async.
internal class VideoAV1Stream : GraphicsResource
{
public IntPtr Handle => handle;
IntPtr handle;
public bool Loaded => handle != IntPtr.Zero;
public bool Ended => Dav1dfile.df_eos(Handle) == 1;
public IntPtr yDataHandle;
public IntPtr uDataHandle;
public IntPtr vDataHandle;
public uint yDataLength;
public uint uvDataLength;
public uint yStride;
public uint uvStride;
public bool FrameDataUpdated { get; set; }
private BlockingCollection<Action> Actions = new BlockingCollection<Action>();
private bool Running = false;
Thread Thread;
public VideoAV1Stream(GraphicsDevice device) : base(device)
{
handle = IntPtr.Zero;
Running = true;
Thread = new Thread(ThreadMain);
Thread.Start();
}
private void ThreadMain()
{
while (Running)
{
// block until we can take an action, then run it
Action action = Actions.Take();
action.Invoke();
}
// shutting down...
while (Actions.TryTake(out Action action))
{
action.Invoke();
}
}
public void Load(string filename)
{
Actions.Add(() => LoadHelper(filename));
}
public void Reset()
{
Actions.Add(ResetHelper);
}
public void ReadNextFrame()
{
Actions.Add(ReadNextFrameHelper);
}
public void Unload()
{
Actions.Add(UnloadHelper);
}
private void LoadHelper(string filename)
{
if (!Loaded)
{
if (Dav1dfile.df_fopen(filename, out handle) == 0)
{
Log.Error("Failed to load video file: " + filename);
throw new Exception("Failed to load video file!");
}
Reset();
}
}
private void ResetHelper()
{
if (Loaded)
{
Dav1dfile.df_reset(handle);
ReadNextFrame();
}
}
private void ReadNextFrameHelper()
{
if (Loaded && !Ended)
{
lock (this)
{
if (Dav1dfile.df_readvideo(
handle,
1,
out IntPtr yDataHandle,
out IntPtr uDataHandle,
out IntPtr vDataHandle,
out uint yDataLength,
out uint uvDataLength,
out uint yStride,
out uint uvStride) == 1
) {
this.yDataHandle = yDataHandle;
this.uDataHandle = uDataHandle;
this.vDataHandle = vDataHandle;
this.yDataLength = yDataLength;
this.uvDataLength = uvDataLength;
this.yStride = yStride;
this.uvStride = uvStride;
FrameDataUpdated = true;
}
}
}
}
private void UnloadHelper()
{
if (Loaded)
{
Dav1dfile.df_close(handle);
handle = IntPtr.Zero;
}
}
protected override void Dispose(bool disposing)
{
if (!IsDisposed)
{
Unload();
Running = false;
if (disposing)
{
Thread.Join();
}
}
base.Dispose(disposing);
}
}

View File

@ -0,0 +1,347 @@
using System;
using System.Diagnostics;
using System.Threading.Tasks;
using Nerfed.Runtime.Graphics;
namespace Nerfed.Runtime.Video;
/// <summary>
/// A structure for continuous decoding of AV1 videos and rendering them into a texture.
/// </summary>
public unsafe class VideoPlayer : GraphicsResource
{
public Texture RenderTexture { get; private set; } = null;
public VideoState State { get; private set; } = VideoState.Stopped;
public bool Loop { get; set; }
public float PlaybackSpeed { get; set; } = 1;
private VideoAV1 Video = null;
private VideoAV1Stream Stream { get; }
private Texture yTexture = null;
private Texture uTexture = null;
private Texture vTexture = null;
private Sampler LinearSampler;
private TransferBuffer TransferBuffer;
private int currentFrame;
private Stopwatch timer;
private double lastTimestamp;
private double timeElapsed;
public VideoPlayer(GraphicsDevice device) : base(device)
{
Stream = new VideoAV1Stream(device);
LinearSampler = new Sampler(device, SamplerCreateInfo.LinearClamp);
timer = new Stopwatch();
}
/// <summary>
/// Prepares a VideoAV1 for decoding and rendering.
/// </summary>
/// <param name="video"></param>
public void Load(VideoAV1 video)
{
if (Video != video)
{
Unload();
if (RenderTexture == null)
{
RenderTexture = CreateRenderTexture(Device, video.Width, video.Height);
}
if (yTexture == null)
{
yTexture = CreateSubTexture(Device, video.Width, video.Height);
}
if (uTexture == null)
{
uTexture = CreateSubTexture(Device, video.UVWidth, video.UVHeight);
}
if (vTexture == null)
{
vTexture = CreateSubTexture(Device, video.UVWidth, video.UVHeight);
}
if (video.Width != RenderTexture.Width || video.Height != RenderTexture.Height)
{
RenderTexture.Dispose();
RenderTexture = CreateRenderTexture(Device, video.Width, video.Height);
}
if (video.Width != yTexture.Width || video.Height != yTexture.Height)
{
yTexture.Dispose();
yTexture = CreateSubTexture(Device, video.Width, video.Height);
}
if (video.UVWidth != uTexture.Width || video.UVHeight != uTexture.Height)
{
uTexture.Dispose();
uTexture = CreateSubTexture(Device, video.UVWidth, video.UVHeight);
}
if (video.UVWidth != vTexture.Width || video.UVHeight != vTexture.Height)
{
vTexture.Dispose();
vTexture = CreateSubTexture(Device, video.UVWidth, video.UVHeight);
}
Video = video;
InitializeDav1dStream();
}
}
/// <summary>
/// Starts playing back and decoding the loaded video.
/// </summary>
public void Play()
{
if (Video == null) { return; }
if (State == VideoState.Playing)
{
return;
}
timer.Start();
State = VideoState.Playing;
}
/// <summary>
/// Pauses playback and decoding of the currently playing video.
/// </summary>
public void Pause()
{
if (Video == null) { return; }
if (State != VideoState.Playing)
{
return;
}
timer.Stop();
State = VideoState.Paused;
}
/// <summary>
/// Stops and resets decoding of the currently playing video.
/// </summary>
public void Stop()
{
if (Video == null) { return; }
if (State == VideoState.Stopped)
{
return;
}
timer.Stop();
timer.Reset();
lastTimestamp = 0;
timeElapsed = 0;
ResetDav1dStreams();
State = VideoState.Stopped;
}
/// <summary>
/// Unloads the currently playing video.
/// </summary>
public void Unload()
{
if (Video == null)
{
return;
}
timer.Stop();
timer.Reset();
lastTimestamp = 0;
timeElapsed = 0;
State = VideoState.Stopped;
Stream.Unload();
Video = null;
}
/// <summary>
/// Renders the video data into RenderTexture.
/// </summary>
public void Render()
{
if (Video == null || State == VideoState.Stopped)
{
return;
}
timeElapsed += (timer.Elapsed.TotalMilliseconds - lastTimestamp) * PlaybackSpeed;
lastTimestamp = timer.Elapsed.TotalMilliseconds;
int thisFrame = ((int) (timeElapsed / (1000.0 / Video.FramesPerSecond)));
if (thisFrame > currentFrame)
{
if (Stream.FrameDataUpdated)
{
UpdateRenderTexture();
Stream.FrameDataUpdated = false;
}
currentFrame = thisFrame;
Stream.ReadNextFrame();
}
if (Stream.Ended)
{
timer.Stop();
timer.Reset();
Stream.Reset();
if (Loop)
{
// Start over!
currentFrame = -1;
timer.Start();
}
else
{
State = VideoState.Stopped;
}
}
}
private void UpdateRenderTexture()
{
uint uOffset;
uint vOffset;
uint yStride;
uint uvStride;
lock (Stream)
{
Span<byte> ySpan = new Span<byte>((void*) Stream.yDataHandle, (int) Stream.yDataLength);
Span<byte> uSpan = new Span<byte>((void*) Stream.uDataHandle, (int) Stream.uvDataLength);
Span<byte> vSpan = new Span<byte>((void*) Stream.vDataHandle, (int) Stream.uvDataLength);
if (TransferBuffer == null || TransferBuffer.Size < ySpan.Length + uSpan.Length + vSpan.Length)
{
TransferBuffer?.Dispose();
TransferBuffer = new TransferBuffer(Device, TransferBufferUsage.Upload, (uint) (ySpan.Length + uSpan.Length + vSpan.Length));
}
TransferBuffer.SetData(ySpan, 0, true);
TransferBuffer.SetData(uSpan, (uint) ySpan.Length, false);
TransferBuffer.SetData(vSpan, (uint) (ySpan.Length + uSpan.Length), false);
uOffset = (uint) ySpan.Length;
vOffset = (uint) (ySpan.Length + vSpan.Length);
yStride = Stream.yStride;
uvStride = Stream.uvStride;
}
CommandBuffer commandBuffer = Device.AcquireCommandBuffer();
CopyPass copyPass = commandBuffer.BeginCopyPass();
copyPass.UploadToTexture(
new TextureTransferInfo(TransferBuffer, 0, yStride, yTexture.Height),
yTexture,
true
);
copyPass.UploadToTexture(
new TextureTransferInfo(TransferBuffer, uOffset, uvStride, uTexture.Height),
uTexture,
true
);
copyPass.UploadToTexture(
new TextureTransferInfo(TransferBuffer, vOffset, uvStride, vTexture.Height),
vTexture,
true
);
commandBuffer.EndCopyPass(copyPass);
RenderPass renderPass = commandBuffer.BeginRenderPass(
new ColorAttachmentInfo(RenderTexture, true, Color.Black)
);
renderPass.BindGraphicsPipeline(Device.VideoPipeline);
renderPass.BindFragmentSampler(new TextureSamplerBinding(yTexture, LinearSampler), 0);
renderPass.BindFragmentSampler(new TextureSamplerBinding(uTexture, LinearSampler), 1);
renderPass.BindFragmentSampler(new TextureSamplerBinding(vTexture, LinearSampler), 2);
renderPass.DrawPrimitives(0, 1);
commandBuffer.EndRenderPass(renderPass);
Device.Submit(commandBuffer);
}
private static Texture CreateRenderTexture(GraphicsDevice graphicsDevice, int width, int height)
{
return Texture.CreateTexture2D(
graphicsDevice,
(uint) width,
(uint) height,
TextureFormat.R8G8B8A8,
TextureUsageFlags.ColorTarget | TextureUsageFlags.Sampler
);
}
private static Texture CreateSubTexture(GraphicsDevice graphicsDevice, int width, int height)
{
return Texture.CreateTexture2D(
graphicsDevice,
(uint) width,
(uint) height,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
}
private void InitializeDav1dStream()
{
Stream.Load(Video.Filename);
currentFrame = -1;
}
private void ResetDav1dStreams()
{
Stream.Reset();
currentFrame = -1;
}
protected override void Dispose(bool disposing)
{
if (!IsDisposed)
{
if (disposing)
{
Unload();
RenderTexture?.Dispose();
yTexture?.Dispose();
uTexture?.Dispose();
vTexture?.Dispose();
}
}
base.Dispose(disposing);
}
}

View File

@ -0,0 +1,8 @@
namespace Nerfed.Runtime.Video;
public enum VideoState
{
Playing,
Paused,
Stopped
}