WINUI——Magewell视频捕捉开发手记

发布于:2025-06-10 ⋅ 阅读:(26) ⋅ 点赞:(0)

背景

因需要融合视频,并加载患者CT中提取出的气管镜与病变,以便能实时查看气管镜是否在正确位置。

开发环境

硬件:Magewell的USB Capture HDMI Gen 2

IDE:VS2022

FrameWork: .Net6   WINUI

Package: MVVMToolKit     NLog

Magewell Demo源码浅析

看了下Magewell提供的Demo源码(官网下载MWCaptureSDK 3.3.1.1513安装后,在安装目录下即可看到),看了下,其仅提供了WinForm的示例。

以下为获取视频并播放的核心代码:

void audio_play()
{
    Console.WriteLine("audio_play in");
    while (m_capturing){
        CRingBuffer.st_frame_t frame = m_capture.m_audio_buffer.get_frame_to_render();
        if (frame.buffer_len == 0){
            Thread.Sleep(5);
            continue;
        }
        LibMWMedia.MWDSoundRendererPushFrame(m_dsound_render, frame.p_buffer, frame.frame_len);
    }
    m_capture.m_audio_buffer.stop_render();
    Console.WriteLine("audio_play out");
}

其中m_d3d_renderer的初始化如下:

 m_d3d_renderer初始化时调用了Handle参数:

 那么也就是说执行

 LibMWMedia.MWDSoundRendererPushFrame(m_dsound_render, frame.p_buffer, frame.frame_len);

就是将每帧的数据显示到Handle句柄所在的Form上。

而在执行 LibMWMedia.MWDSoundRendererPushFrame(m_dsound_render, frame.p_buffer, frame.frame_len)时,相应图像帧(或视频流)的数据就应该是frame.p_buffer()。

在WINUI中仅Window可以获取到句柄,但若用Window进行处理,就需要处理多个Window,这是不推荐的;最好还是以Page来显示会更合适一些。

通过测试,使用视频流来进行开发是可行的,其中一种方法即是将缓存下来的图像帧(YUY2)转化为图片(Bitmap,即YUY2转为BGRA),然后将图片贴到UI上即可,只要能保证大于24帧的帧率,就能达到流畅的视频效果。

下述为YUY2转化为BGRA类:

    /// <summary>
    /// YUY2转为BGRA格式
    /// </summary>
    internal class YUY2ToBGRA
    {
        internal static void ConvertYUY2ToBGRA(byte[] inputPtr, byte[] outputPtr)
        {
            // 假设数据是16位灰度(高字节在前)
            for (int i = 0, j = 0; i < inputPtr.Length; i += 4, j += 8)
            {
                // 读取YUY2数据(每4字节包含2个像素)
                byte y0 = inputPtr[i];
                byte u = inputPtr[i + 1];
                byte y1 = inputPtr[i + 2];
                byte v = inputPtr[i + 3];

                // 转换为RGB(简化版)
                YUVToRGB(y0, u, v, out byte r0, out byte g0, out byte b0);
                YUVToRGB(y1, u, v, out byte r1, out byte g1, out byte b1);

                // 写入BGRA格式
                outputPtr[j] = b0;     // B
                outputPtr[j + 1] = g0; // G
                outputPtr[j + 2] = r0; // R
                outputPtr[j + 3] = 255; // A

                outputPtr[j + 4] = b1;
                outputPtr[j + 5] = g1;
                outputPtr[j + 6] = r1;
                outputPtr[j + 7] = 255;
            }
        }

        // YUV到RGB转换
        private static void YUVToRGB(byte y, byte u, byte v, out byte r, out byte g, out byte b)
        {
            // 标准化YUV值
            double yD = (y - 16) / 219.0;
            double uD = (u - 128) / 224.0;
            double vD = (v - 128) / 224.0;

            // 转换矩阵(BT.601标准)
            r = (byte)(255 * Math.Clamp(yD + 1.402 * vD, 0, 1));
            g = (byte)(255 * Math.Clamp(yD - 0.344 * uD - 0.714 * vD, 0, 1));
            b = (byte)(255 * Math.Clamp(yD + 1.772 * uD, 0, 1));
        }
    }

根据Magewell给出的示例,增加下述视频捕获类:

    /// <summary>
    /// 视频捕获
    /// </summary>
    internal class VedioCapture
    {
        Boolean m_capturing = false;
        protected CMWCapture m_capture = null; 
        protected IntPtr m_d3d_renderer = IntPtr.Zero;
        private byte[] conversionBuffer; // 用于格式转换的缓冲区
        // 假设视频格式为YUY2 (1920x1080)
        private int Width = 1920;
        private int Height = 1080;
        //private const int BytesPerPixel = 2; // YUY2是2字节/像素

        /// <summary>
        /// 开始捕捉
        /// </summary>
        /// <param name="index"></param>
        /// <returns></returns>
        internal Boolean Start_capture(Int32 index = 0)
        {
            if (m_capturing)
            {
                return true;
            }
            m_capture = CMWCapture.mw_capture_factory(index);
            if (null == m_capture)
            {
                return false;
            }
            if (!m_capture.set_device(index))
            {
                return false;
            }
            if (!m_capture.start_capture(true, false))
            {
                return false;
            }
            m_capturing = true;

            m_capture.get_mw_fourcc(out uint mw_fourcc);
            m_capture.get_mirror_and_reverse(out bool mirror, out bool reverse);
            m_capture.get_resolution(out Width, out Height);

            NlogHelper.Logger.Info($"获取分辨率,Width: {Width}, Height: {Height}");
            // 更新 UI
         
            Task.Run(() =>
            {
                _ = Video_play();
            });
            return true;
        }


        /// <summary>
        /// 视频播放
        /// </summary>
        /// <returns></returns>
        async Task Video_play()
        {
            NlogHelper.Logger.Info("video_play in");
            conversionBuffer = new byte[Width * Height * 4];
            while (m_capturing)
            {
                CRingBuffer.st_frame_t frame = m_capture.m_video_buffer.get_frame_to_render();
                if (frame.buffer_len == 0)
                {
                    continue;
                }
                UpdateFrame(frame.p_buffer);

                await Task.Delay(5);
            }

            NlogHelper.Logger.Info("video_play out");
        }

        /// <summary>
        /// 停止捕捉视频
        /// </summary>
        internal void Stop_capture()
        {
            if (!m_capturing)
            {
                return;
            }
            m_capturing = false;

            if (m_capture != null)
            {
                CRingBuffer.st_frame_t out_frame = m_capture.m_video_buffer.get_buffer_by_index(0);
                Array.Clear(out_frame.p_buffer, 0, out_frame.p_buffer.Length);
                m_capture.m_video_buffer.stop_render();
                m_capture.Dispose();
                m_capture = null;
            }
        }

        public void UpdateFrame(byte[] rawData)
        {
            if (rawData.Length != Width * Height * 2)
            {
                NlogHelper.Logger.Info($"rawData.Length {rawData.Length}");
                return;
            }

            // 1. 将YUY2转换为BGRA
            YUY2ToBGRA.ConvertYUY2ToBGRA(rawData, conversionBuffer);
            WeakReferenceMessenger.Default.Send(conversionBuffer, "VedioCaptureResult");
        }


    }

上述代码中,视频捕捉硬件的默认输出分辨率为1920*1080,若需要修改,可以在安装的Magwell上进行设置:

在VM中调用如下:

   private void StartCapture()
   {
       CMWCapture.Init();
       CMWCapture.RefreshDevices();
       int m_channel_count = CMWCapture.GetChannelCount();

       if (m_channel_count == 0)
       {
           ToUIMessage.SendMessage("GpuEncodeGui", "Can't find capture devices!");
       }
       else
       {
           VedioCapture = new();
           VedioCapture.Start_capture();
           InitSegment();
       }
   }

因为CMWCapture为非托管资源,在使用完成时需要手动将资源释放,故在VM中需要手动释放,示例如下:

[RelayCommand]
private void ReleaseResource()
{
    VedioCapture?.Stop_capture();
    WeakReferenceMessenger.Default.UnregisterAll(this);
}

VedioCapture为VM初始化时构造的VedioCapture实例。

WINUI中处理获视频流

UI需先引入 xmlns:win2d="using:Microsoft.Graphics.Canvas.UI.Xaml"

<win2d:CanvasSwapChainPanel
    x:Name="swapChainPanel"
    Width="1920"
    Height="1080"
    VerticalAlignment="Top" />

UI后台代码中接收视频捕获结果如下:

 WeakReferenceMessenger.Default.Register<byte[], string>(this, "VedioCaptureResult", (r, conversionBuffer) =>
 {
     UpdateFrame(conversionBuffer);
 });

上述涉及调用的UpdateFrame方法,以更新每一帧:

 private void UpdateFrame(byte[] conversionBuffer)
 {
     using (var drawingSession = swapChain.CreateDrawingSession(Colors.Black))
     using (var bitmap = CanvasBitmap.CreateFromBytes(
      canvasDevice,
      conversionBuffer,
      width,
      height,
      DirectXPixelFormat.B8G8R8A8UIntNormalized))
     {
         // 3. 绘制到交换链
         drawingSession.DrawImage(bitmap);
     }
     // 呈现到SwapChainPanel
     swapChain.Present();
 }

注意:上述使用的了MvvmToolkit中的Messenger,在离开所在Page时需要将Messenger全部释放,如  WeakReferenceMessenger.Default.UnregisterAll(this);  否则会导致Page不能被释放,会导致内存泄漏。