2012-04-23 49 views
1

事實上,這是一種技術的混搭,但我的問題(我認爲)的答案最接近於Direct3D 9.我掛鉤到一個任意的D3D9應用程序,在大多數情況下它是一個遊戲,並注入我自己的代碼來模仿EndScene函數的行爲。後臺緩衝區被複制到一個表面中,該表面被設置爲指向推送源DirectShow過濾器中的位圖。過濾器以25 fps採樣位圖並將視頻流傳輸到.avi文件。遊戲屏幕上顯示的文字覆蓋圖告訴用戶有關應該停止遊戲捕捉的熱鍵組合,但此覆蓋圖不應顯示在再現的視頻中。除了一個令人討厭的事實外,一切都快速而美麗。在隨機的場合,帶有文字的框架會過分地進入再現的視頻。這不是一個真正想要的人造物,最終用戶只想看到他在視頻中的遊戲玩法,而沒有其他任何東西。我很想聽聽有沒有人可以分享爲什麼會發生這種情況。這裏是EndScene鉤子的源代碼:掛鉤IDirect3DDevice9 :: EndScene方法捕捉遊戲視頻:無法擺脫錄製的視頻中的文本覆蓋

using System; 
using SlimDX; 
using SlimDX.Direct3D9; 
using System.Diagnostics; 
using DirectShowLib; 
using System.Runtime.InteropServices; 

[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] 
[System.Security.SuppressUnmanagedCodeSecurity] 
[Guid("EA2829B9-F644-4341-B3CF-82FF92FD7C20")] 

public interface IScene 
{ 
    unsafe int PassMemoryPtr(void* ptr, bool noheaders); 
    int SetBITMAPINFO([MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)]byte[] ptr, bool noheaders); 
} 

public class Class1 
{ 
    object _lockRenderTarget = new object(); 
    public string StatusMess { get; set; } 
    Surface _renderTarget; 
    //points to image bytes 
    unsafe void* bytesptr; 
    //used to store headers AND image bytes 
    byte[] bytes; 
    IFilterGraph2 ifg2; 
    ICaptureGraphBuilder2 icgb2; 
    IBaseFilter push; 
    IBaseFilter compressor; 
    IScene scene; 
    IBaseFilter mux; 
    IFileSinkFilter sink; 
    IMediaControl media; 
    bool NeedRunGraphInit = true; 
    bool NeedRunGraphClean = true; 
    DataStream s; 
    DataRectangle dr; 

    unsafe int EndSceneHook(IntPtr devicePtr) 
    { 
     int hr; 

     using (Device device = Device.FromPointer(devicePtr)) 
      { 
      try 
      { 
       lock (_lockRenderTarget) 
       { 

        bool TimeToGrabFrame = false; 

        //.... 
        //logic based on elapsed milliseconds deciding if it is time to grab another frame 

        if (TimeToGrabFrame) 
        { 

         //First ensure we have a Surface to render target data into 
         //called only once 
         if (_renderTarget == null) 
         { 

          //Create offscreen surface to use as copy of render target data 
          using (SwapChain sc = device.GetSwapChain(0)) 
          { 

           //Att: created in system memory, not in video memory 
           _renderTarget = Surface.CreateOffscreenPlain(device, sc.PresentParameters.BackBufferWidth, sc.PresentParameters.BackBufferHeight, sc.PresentParameters.BackBufferFormat, Pool.SystemMemory); 

          } //end using 
         } // end if 

         using (Surface backBuffer = device.GetBackBuffer(0, 0)) 
         { 
          //The following line is where main action takes place: 
          //Direct3D 9 back buffer gets copied to Surface _renderTarget, 
          //which has been connected by references to DirectShow's 
          //bitmap capture filter 
          //Inside the filter (code not shown in this listing) the bitmap is periodically 
          //scanned to create a streaming video. 
          device.GetRenderTargetData(backBuffer, _renderTarget); 

          if (NeedRunGraphInit) //ran only once 
          { 
           ifg2 = (IFilterGraph2)new FilterGraph(); 
           icgb2 = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); 
           icgb2.SetFiltergraph(ifg2); 
           push = (IBaseFilter) new PushSourceFilter(); 
           scene = (IScene)push; 

           //this way we get bitmapfile and bitmapinfo headers 
           //ToStream is slow, but run it only once to get the headers 
           s = Surface.ToStream(_renderTarget, ImageFileFormat.Bmp); 
           bytes = new byte[s.Length]; 

           s.Read(bytes, 0, (int)s.Length); 
           hr = scene.SetBITMAPINFO(bytes, false); 

           //we just supplied the header to the PushSource 
           //filter. Let's pass reference to 
           //just image bytes from LockRectangle 

           dr = _renderTarget.LockRectangle(LockFlags.None); 
           s = dr.Data; 
           Result r = _renderTarget.UnlockRectangle(); 
           bytesptr = s.DataPointer.ToPointer(); 
           hr = scene.PassMemoryPtr(bytesptr, true); 

           //continue building graph 
           ifg2.AddFilter(push, "MyPushSource"); 

           icgb2.SetOutputFileName(MediaSubType.Avi, "C:\foo.avi", out mux, out sink); 

           icgb2.RenderStream(null, null, push, null, mux); 

           media = (IMediaControl)ifg2; 

           media.Run(); 

           NeedRunGraphInit = false; 
           NeedRunGraphClean = true; 

           StatusMess = "now capturing, press shift-F11 to stop"; 

          } //end if 

         } // end using backbuffer 
        } // end if Time to grab frame 

       } //end lock 
      } // end try 

      //It is usually thrown when the user makes game window inactive 
      //or it is thrown deliberately when time is up, or the user pressed F11 and 
      //it resulted in stopping a capture. 
      //If it is thrown for another reason, it is still a good 
      //idea to stop recording and free the graph 
      catch (Exception ex) 
      { 
       //.. 
       //stop the DirectShow graph and cleanup 

      } // end catch 

      //draw overlay 
      using (SlimDX.Direct3D9.Font font = new SlimDX.Direct3D9.Font(device, new System.Drawing.Font("Times New Roman", 26.0f, FontStyle.Bold))) 
      { 
       font.DrawString(null, StatusMess, 20, 100, System.Drawing.Color.FromArgb(255, 255, 255, 255)); 
      } 

      return device.EndScene().Code; 

     } // end using device 

    } //end EndSceneHook 

回答

0

有時,我終於自己找到了這個問題的答案,如果有人感興趣的話。事實證明,每次調用掛鉤的EndScene時,某些Direct3D9應用程序中的backbuffer不一定會刷新。因此,有時將來自前一個EndScene掛接調用的文本覆蓋的後臺緩存傳遞給負責收集輸入幀的DirectShow源過濾器。我開始用已知RGB值的小3像素覆蓋圖對每個幀進行加蓋,並在將幀傳遞給DirectShow過濾器之前檢查此虛擬覆蓋圖是否仍存在。如果疊加層在那裏,則先前緩存的幀被傳遞而不是當前的幀。該方法有效地從DirectShow圖表中記錄的視頻中刪除文本疊加。