RTP发送
这里使用上次写的RTP发送程序,RTP发送继续使用h264,等到下一次再使用h265.
在上次的基础上不使用GDI渲染,而使用SDL2 来做渲染,最后给出cpu占用率 使用的代码封装成一个libRtpReceive库 增加一个SDLDraw 类,所有代码如下所示:
#pragma once
#include
extern "C"
{
#include "SDL2\SDL.h"
};
#pragma comment(lib,"../lib/sdl2.lib")
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)
#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)
//int g_fps = 30;
//int thread_exit = 0;
//int thread_pause = 0;
//int sfp_refresh_thread(void *opaque)
//{
// thread_exit = 0;
// thread_pause = 0;
//
// while (!thread_exit)
// {
// if (!thread_pause)
// {
// SDL_Event event;
// event.type = SFM_REFRESH_EVENT;
// SDL_PushEvent(&event);
// }
// SDL_Delay(1000 / g_fps);
// }
// thread_exit = 0;
// thread_pause = 0;
// //Break
// SDL_Event event;
// event.type = SFM_BREAK_EVENT;
// SDL_PushEvent(&event);
// return 0;
//}
class SDLDraw
{
int m_w = 0, m_h = 0;
SDL_Window *screen = NULL;
SDL_Renderer *sdlRenderer = NULL;
SDL_Texture *sdlTexture = NULL;
SDL_Rect sdlRect;
SDL_Thread *video_tid;
SDL_Event event;
struct SwsContext *img_convert_ctx = NULL;
bool m_window_init = false;
public:
int draw_init(HWND hWnd, int w, int h)
{
if (m_window_init == false)
{
SDL_Init(SDL_INIT_VIDEO);
//screen = SDL_CreateWindow("FF", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
// w, h, SDL_WINDOW_OPENGL);
screen = SDL_CreateWindowFrom((void *)(hWnd));
if (screen == NULL)
{
//printf("Window could not be created! SDL_Error: %s\n", SDL_GetError());
return -1;
}
sdlRenderer = SDL_CreateRenderer(screen, 0, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "1");
m_window_init = true;
}
return 0;
}
void draw(uint8_t *data,int w, int h)
{
if (m_w != w || m_h != h)
{
if (sdlTexture != NULL)
{
SDL_DestroyTexture(sdlTexture);
sdlTexture = NULL;
}
m_w = w;
m_h = h;
}
int nh = w * 9 / 16;
if (nh > h)
nh = h;
if (sdlTexture == NULL)
{
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_BGR24,
SDL_TEXTUREACCESS_STREAMING, w, nh);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = w;
sdlRect.h = nh;
}
//sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
if (0)
{
uint8_t *ndata = data + w * (nh - 1) * 3;
SDL_UpdateTexture(sdlTexture, &sdlRect, ndata ,-w * 3);
}
else
{
uint8_t *ndata = data ;
SDL_UpdateTexture(sdlTexture, &sdlRect, ndata, w*3);
}
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, NULL);
SDL_RenderPresent(sdlRenderer);
//video_tid = SDL_CreateThread(sfp_refresh_thread, NULL, NULL);
}
SDLDraw();
~SDLDraw();
};
非常简单而精简的代码,为了示例使用。以下是c_drawrgb24类,使用GDI 和 SDL2 两种方式来渲染。
#include "common.h"
#include "SDLDraw.h"
class c_drawrgb24
{
public:
c_drawrgb24(void);
~c_drawrgb24(void);
protected:
void CreateDoubleBuffer(HDC hdc1, int cxClient1 ,int cyClient1,HDC hdc2,int cxClient2,int cyClient2);
public:
void Draw2(HWND hWnd,HWND hWnd2, unsigned char * buffer, int SrcW, int SrcH);
void DrawSDL(HWND hWnd, unsigned char * buffer, int SrcW, int SrcH);
//void DrawPInP(HWND hWnd
public:
//图像翻转
void SetVertial();
private:
LPBITMAPINFO m_lpBmpInfo;
bool m_bInit;
HBITMAP _hBm1;
HDC _hdc_buffer1;
HBITMAP _hBm2;
HDC _hdc_buffer2;
SDLDraw _sdldraw;
};
#ifdef _WIN32
#include
#endif
#include "c_drawrgb24.h"
c_drawrgb24::c_drawrgb24(void):
m_bInit(false),
m_lpBmpInfo(NULL),
_hBm1(NULL),
_hdc_buffer1(NULL),
_hBm2(NULL),
_hdc_buffer2(NULL)
{
}
c_drawrgb24::~c_drawrgb24(void)
{
if(m_lpBmpInfo)
delete m_lpBmpInfo;
}
void c_drawrgb24::SetVertial()
{
if(m_lpBmpInfo!=NULL)
m_lpBmpInfo->bmiHeader.biHeight = 0-m_lpBmpInfo->bmiHeader.biHeight;
}
void c_drawrgb24::CreateDoubleBuffer(HDC hdc1, int cxClient1 ,int cyClient1,HDC hdc2,int cxClient2,int cyClient2)
{
//创建虚拟位图
if(hdc1!=NULL)
{
_hBm1 = CreateCompatibleBitmap(hdc1,cxClient1,cyClient1);
//创建和hdc兼容的设备
_hdc_buffer1 = CreateCompatibleDC(hdc1);
SelectObject(_hdc_buffer1,_hBm1);
}
if(hdc2!=NULL)
{
_hBm2 = CreateCompatibleBitmap(hdc2,cxClient2,cyClient2);
//创建和hdc兼容的设备
_hdc_buffer1 = CreateCompatibleDC(hdc2);
SelectObject(_hdc_buffer2,_hBm2);
}
}
void c_drawrgb24::Draw2(HWND hWnd, HWND hWnd2,unsigned char * buffer, int SrcW, int SrcH)
{
HDC hDCDst1 = NULL;
HDC hDCDst2 = NULL;
RECT destRect1;
RECT destRect2;
if(hWnd!=NULL)
{
hDCDst1 = GetDC(hWnd);
GetClientRect(hWnd,&destRect1);
}
if(hWnd2!=NULL)
{
hDCDst2 = GetDC(hWnd2);
GetClientRect(hWnd2,&destRect2);
}
if(!m_bInit)
{
m_bInit = true;
m_lpBmpInfo=new BITMAPINFO;
m_lpBmpInfo->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
m_lpBmpInfo->bmiHeader.biWidth = SrcW;
m_lpBmpInfo->bmiHeader.biHeight= -SrcH;
m_lpBmpInfo->bmiHeader.biPlanes= 1;
m_lpBmpInfo->bmiHeader.biBitCount = 24;
m_lpBmpInfo->bmiHeader.biCompression = 0;
m_lpBmpInfo->bmiHeader.biSizeImage = 0;
m_lpBmpInfo->bmiHeader.biXPelsPerMeter = 0;
m_lpBmpInfo->bmiHeader.biYPelsPerMeter = 0;
m_lpBmpInfo->bmiHeader.biClrUsed=0;
m_lpBmpInfo->bmiHeader.biClrImportant = 0;
//CDC * dc = CDC::FromHandle(hDCDst);
//m_pMemDC = new CMemDC(*dc,DestRect);
}
if(hDCDst1!=NULL)
{
int DstWidth = destRect1.right-destRect1.left;
int DstHeight = destRect1.bottom- destRect1.top;
SetStretchBltMode(hDCDst1,STRETCH_HALFTONE);
::StretchDIBits(
//m_pMemDC->GetDC().GetSafeHdc(),
hDCDst1,
0, 0, DstWidth, DstHeight,
0, 0, SrcW, SrcH,
buffer, m_lpBmpInfo, DIB_RGB_COLORS, SRCCOPY );
ReleaseDC(hWnd,hDCDst1);
}
if(hDCDst2!=NULL)
{
int DstWidth = destRect2.right-destRect2.left;
int DstHeight = destRect2.bottom- destRect2.top;
SetStretchBltMode(hDCDst2,STRETCH_HALFTONE);
::StretchDIBits(
//m_pMemDC->GetDC().GetSafeHdc(),
hDCDst2,
0, 0, DstWidth, DstHeight,
0, 0, SrcW, SrcH,
buffer, m_lpBmpInfo, DIB_RGB_COLORS, SRCCOPY );
ReleaseDC(hWnd2,hDCDst2);
}
}
void c_drawrgb24::DrawSDL(HWND hWnd, unsigned char * buffer, int SrcW, int SrcH)
{
_sdldraw.draw_init(hWnd, SrcW, SrcH);
_sdldraw.draw(buffer, SrcW, SrcH);
}
使用两种方式来展示: 大图使用SDL2,右上角使用GDI,实际上这两种占用cpu暂时都不多。
编码占了一些,解码为零,因为分辨率并不大,而我的cpu为20核,所以不用发愁。
所有代码其实都已经上传,地址可以查找我的其他有关RTP文章中有,我会持续修改,增加更多的功能: 1 h265 2 aac 音频 3 截图 4 … 持续关注我就行