我們在實作Windows平台RTSP播放器或RTMP播放器的時候,需要考慮的點很多,比如多執行個體設計、多繪制模式相容、軟硬解碼支援、快照、RTSP下TCP-UDP自動切換等,以下就其中幾個方面,做個大概的探讨。
1. 視訊繪制模式
我們在實作Windows平台播放的時候,一般首選D3D,D3D不支援的情況下,考慮資料回上來,采用GDI模式,一般實作如下,先做D3D檢測,以大牛直播SDK播放端為例(
Github),調用NT_SP_IsSupportD3DRender(),檢測是否支援D3D模式,如果支援的話,調用NT_SP_SetRenderWindow(), 然後,設定是否等比例縮放(調用NT_SP_SetRenderScaleMode())。
bool is_support_d3d_render = false;
Int32 in_support_d3d_render = 0;
if (NT.NTBaseCodeDefine.NT_ERC_OK == NTSmartPlayerSDK.NT_SP_IsSupportD3DRender(player_handle_, playWnd.Handle, ref in_support_d3d_render))
{
if (1 == in_support_d3d_render)
{
is_support_d3d_render = true;
}
}
if (is_support_d3d_render)
{
is_gdi_render_ = false;
// 支援d3d繪制的話,就用D3D繪制
NTSmartPlayerSDK.NT_SP_SetRenderWindow(player_handle_, playWnd.Handle);
if (btn_check_render_scale_mode.Checked)
{
NTSmartPlayerSDK.NT_SP_SetRenderScaleMode(player_handle_, 1);
}
else
{
NTSmartPlayerSDK.NT_SP_SetRenderScaleMode(player_handle_, 0);
}
}
else
{
is_gdi_render_ = true;
playWnd.Visible = false;
// 不支援D3D就讓播放器吐出資料來,用GDI繪制
//video frame callback (YUV/RGB)
//format請參見 NT_SP_E_VIDEO_FRAME_FORMAT,如需回調YUV,請設定為 NT_SP_E_VIDEO_FRAME_FROMAT_I420
video_frame_call_back_ = new SP_SDKVideoFrameCallBack(SetVideoFrameCallBack);
NTSmartPlayerSDK.NT_SP_SetVideoFrameCallBack(player_handle_, (Int32)NT.NTSmartPlayerDefine.NT_SP_E_VIDEO_FRAME_FORMAT.NT_SP_E_VIDEO_FRAME_FORMAT_RGB32, IntPtr.Zero, video_frame_call_back_);
}
如果不支援D3D,設定RGB資料回調:
video_frame_call_back_ = new SP_SDKVideoFrameCallBack(SetVideoFrameCallBack);
NTSmartPlayerSDK.NT_SP_SetVideoFrameCallBack(player_handle_, (Int32)NT.NTSmartPlayerDefine.NT_SP_E_VIDEO_FRAME_FORMAT.NT_SP_E_VIDEO_FRAME_FORMAT_RGB32, IntPtr.Zero, video_frame_call_back_);
資料處理如下:
public void SetVideoFrameCallBack(IntPtr handle, IntPtr userData, UInt32 status, IntPtr frame)
{
if (frame == IntPtr.Zero)
{
return;
}
//如需直接處理RGB資料,請參考以下流程
NT_SP_VideoFrame video_frame = (NT_SP_VideoFrame)Marshal.PtrToStructure(frame, typeof(NT_SP_VideoFrame));
NT_SP_VideoFrame pVideoFrame = new NT_SP_VideoFrame();
pVideoFrame.format_ = video_frame.format_;
pVideoFrame.width_ = video_frame.width_;
pVideoFrame.height_ = video_frame.height_;
pVideoFrame.timestamp_ = video_frame.timestamp_;
pVideoFrame.stride0_ = video_frame.stride0_;
pVideoFrame.stride1_ = video_frame.stride1_;
pVideoFrame.stride2_ = video_frame.stride2_;
pVideoFrame.stride3_ = video_frame.stride3_;
Int32 argb_size = video_frame.stride0_ * video_frame.height_;
pVideoFrame.plane0_ = Marshal.AllocHGlobal(argb_size);
CopyMemory(pVideoFrame.plane0_, video_frame.plane0_, (UInt32)argb_size);
if (playWnd.InvokeRequired)
{
BeginInvoke(set_video_frame_call_back_, status, pVideoFrame);
}
else
{
set_video_frame_call_back_(status, pVideoFrame);
}
}
在OnPaint()繪制即可:
private void SmartPlayerForm_Paint(object sender, PaintEventArgs e)
{
if (player_handle_ == IntPtr.Zero || !is_gdi_render_ || !is_playing_)
{
return;
}
if (cur_video_frame_.plane0_ == IntPtr.Zero)
{
return;
}
Bitmap bitmap = new Bitmap(cur_video_frame_.width_, cur_video_frame_.height_, cur_video_frame_.stride0_,
System.Drawing.Imaging.PixelFormat.Format32bppRgb, cur_video_frame_.plane0_);
int image_width = cur_video_frame_.width_;
int image_height = cur_video_frame_.height_;
Graphics g = e.Graphics; //擷取窗體畫布
g.SmoothingMode = SmoothingMode.HighSpeed;
int limit_w = this.Width - 60;
int limit_h = this.Height - playWnd.Top - 60;
if (btn_check_render_scale_mode.Checked)
{
int d_w = 0, d_h = 0;
int left_offset = 0;
int top_offset = 0;
Brush brush = new SolidBrush(Color.Black);
g.FillRectangle(brush, playWnd.Left, playWnd.Top, limit_w, limit_h);
GetRenderRect(limit_w, limit_h, image_width, image_height, ref left_offset, ref top_offset, ref d_w, ref d_h);
g.DrawImage(bitmap, playWnd.Left + left_offset, playWnd.Top + top_offset, d_w, d_h); //在窗體的畫布中繪畫出記憶體中的圖像
}
else
{
g.DrawImage(bitmap, playWnd.Left, playWnd.Top, limit_w, limit_h); //在窗體的畫布中繪畫出記憶體中的圖像
}
}
2. 特定機型硬解碼
Windows平台硬解碼,主要适用于性能偏弱的PC端,或者有多路播放訴求的場景,一般建議在軟解性能沒問題的情況下,盡量軟解,具體處理如下,先檢測系統是否支援硬解,如果支援,再做硬解設定,這樣的好處在于如果系統不支援硬解,可以繼續軟解播放,具體設定如下,在調用NT_SP_Open()之前,做檢測,因為NT_SP_Open()每個句柄對應一個player執行個體,多個執行個體隻需要做一次判斷即可:
is_support_h264_hardware_decoder_ = NT.NTBaseCodeDefine.NT_ERC_OK == NT.NTSmartPlayerSDK.NT_SP_IsSupportH264HardwareDecoder();
is_support_h265_hardware_decoder_ = NT.NTBaseCodeDefine.NT_ERC_OK == NT.NTSmartPlayerSDK.NT_SP_IsSupportH265HardwareDecoder();
if (player_handle_ == IntPtr.Zero)
{
player_handle_ = new IntPtr();
UInt32 ret_open = NTSmartPlayerSDK.NT_SP_Open(out player_handle_, IntPtr.Zero, 0, IntPtr.Zero);
if (ret_open != 0)
{
player_handle_ = IntPtr.Zero;
MessageBox.Show("調用NT_SP_Open失敗..");
return;
}
}
播放之前,設定硬解碼:
if (checkBox_hardware_decoder.Checked)
{
NTSmartPlayerSDK.NT_SP_SetH264HardwareDecoder(player_handle_, is_support_h264_hardware_decoder_ ? 1 : 0, 0);
NTSmartPlayerSDK.NT_SP_SetH265HardwareDecoder(player_handle_, is_support_h265_hardware_decoder_ ? 1 : 0, 0);
}
else
{
NTSmartPlayerSDK.NT_SP_SetH264HardwareDecoder(player_handle_, 0, 0);
NTSmartPlayerSDK.NT_SP_SetH265HardwareDecoder(player_handle_, 0, 0);
}
3. 隻解碼關鍵幀
隻解關鍵幀的場景,也是用于多路播放訴求,比如一般的監控場景,考慮到多路的場景,一般關鍵幀間隔不大(如1-2秒一個),平台可對現場場景有個宏觀了解,如需重點關注某幾路畫面的時候,再實時取消這個選項,實作全幀播放,是以,隻解關鍵幀一定要做成實時調用的接口才更有設計意義。
// 設定是否隻解碼關鍵幀
if (btn_check_only_decode_video_key_frame.Checked)
{
NTSmartPlayerSDK.NT_SP_SetOnlyDecodeVideoKeyFrame(player_handle_, 1);
}
else
{
NTSmartPlayerSDK.NT_SP_SetOnlyDecodeVideoKeyFrame(player_handle_, 0);
}
4. 視訊view旋轉
好多現場的開發人員有這樣的困惑,有些裝置,在安裝時,可能沒調整好角度,導緻拍出來的角度倒立等,看着很不友善,這時候,如果現場裝置比較多的話,不可能每台裝置都到現場重新安裝,實時view旋轉,就展現了價值,具體如下:
/*
* 設定旋轉,順時針旋轉
* degress: 設定0, 90, 180, 270度有效,其他值無效
* 注意:除了0度,其他角度播放會耗費更多CPU
* 接口調用成功傳回NT_ERC_OK
*/
[DllImport(@"SmartPlayerSDK.dll")]
public static extern UInt32 NT_SP_SetRotation(IntPtr handle, Int32 degress);
視訊view選擇,會消耗一定的CPU。
5. 實時快照
實時快照功能不表,是一個好的RTSP播放器和RTMP播放器必備的功能,實時快照是把解碼後的yuv資料重新編碼成png,是以有一定的CPU消耗,不建議過于頻繁操作,具體實作如下:
if ( String.IsNullOrEmpty(capture_image_path_) )
{
MessageBox.Show("請先設定儲存截圖檔案的目錄! 點選截圖左邊的按鈕設定!");
return;
}
if ( player_handle_ == IntPtr.Zero )
{
return;
}
if ( !is_playing_)
{
MessageBox.Show("請在播放狀态下截圖!");
return;
}
String name = capture_image_path_ + "\\" + DateTime.Now.ToString("hh-mm-ss") + ".png";
byte[] buffer1 = Encoding.Default.GetBytes(name);
byte[] buffer2 = Encoding.Convert(Encoding.Default, Encoding.UTF8, buffer1, 0, buffer1.Length);
byte[] buffer3 = new byte[buffer2.Length + 1];
buffer3[buffer2.Length] = 0;
Array.Copy(buffer2, buffer3, buffer2.Length);
IntPtr file_name_ptr = Marshal.AllocHGlobal(buffer3.Length);
Marshal.Copy(buffer3, 0, file_name_ptr, buffer3.Length);
capture_image_call_back_ = new SP_SDKCaptureImageCallBack(SDKCaptureImageCallBack);
UInt32 ret = NTSmartPlayerSDK.NT_SP_CaptureImage(player_handle_, file_name_ptr, IntPtr.Zero, capture_image_call_back_);
Marshal.FreeHGlobal(file_name_ptr);
if (NT.NTBaseCodeDefine.NT_ERC_OK == ret)
{
// 發送截圖請求成功
}
else if ((UInt32)NT.NTSmartPlayerDefine.SP_E_ERROR_CODE.NT_ERC_SP_TOO_MANY_CAPTURE_IMAGE_REQUESTS == ret)
{
// 通知使用者延時
MessageBox.Show("Too many capture image requests!");
}
else
{
// 其他失敗
}
public void SDKCaptureImageCallBack(IntPtr handle, IntPtr userData, UInt32 result, IntPtr file_name)
{
if (file_name == IntPtr.Zero)
return;
int index = 0;
while (true)
{
if (0 == Marshal.ReadByte(file_name, index))
break;
index++;
}
byte[] file_name_buffer = new byte[index];
Marshal.Copy(file_name, file_name_buffer, 0, index);
byte[] dst_buffer = Encoding.Convert(Encoding.UTF8, Encoding.Default, file_name_buffer, 0, file_name_buffer.Length);
String image_name = Encoding.Default.GetString(dst_buffer, 0, dst_buffer.Length);
if (playWnd.InvokeRequired)
{
BeginInvoke(set_capture_image_call_back_, result, image_name);
}
else
{
set_capture_image_call_back_(result, image_name);
}
}
後續,我們将針對RTSP和RTMP播放器設計過程中的其他點,做更進一步的探讨,歡迎大家關注。