rtsp视频解码-分析-转发http-flv ws-flv webrtc
Posted qianbo_insist
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了rtsp视频解码-分析-转发http-flv ws-flv webrtc相关的知识,希望对你有一定的参考价值。
说明
因为该代码没有完全完成,所以完整代码没有放上来,如果需要,可以将email发送给我,我把代码发送给大家。但是最主要的关键部分已经贴出来了,图为接收rtspserver的视频,接收播放,分析,并转发flv。
框架在完成以后,一种是调用c++的opencv直接分析,一种是调用python 去做,思考成熟以后,再决定。
rtsp 解码
使用live555 ,在一个线程中接收
class c_rtspthread:public c_thread
{
int v_headlen = 0;
c_rtsp *v_rtsp = nullptr;
//32位hash值
uint32_t v_key = 0;// hash(live/1001);
uint32_t _recv_stamp = 0;
uint32_t _first_stamp = 0;
sp_buffer _spbuffer;
c_flvserver *v_flv;
std::string v_livename;//live/1001
private:
//decode use it
AVCodec *v_codec = NULL;
AVCodecContext *v_codecctx = NULL;
AVFrame *v_frame = NULL;
c_analyse *v_analyse = NULL;
int do_decode_init(const char *name,const char *codec);
int do_decode_unit();
int width()
{
if (v_codecctx != NULL)
return v_codecctx->width;
return 0;
}
int height()
{
if (v_codecctx != NULL)
return v_codecctx->height;
return 0;
}
int v_width = 0;
int v_height= 0;
int v_fps = 0;
int v_towidth = 0;
int v_toheight = 0;
int decode2YUV(uint8_t* src,
int srcLen,
uint8_t *destYuv,
int destw, int desth);
void decode2RGB(uint8_t* src, int & srcLen);
struct SwsContext *_img_convert_ctx = NULL;
public:
void init_start(c_flvserver * flv, const char * url,const char* livename,int towidth,int toheight, uint32_t key);
int callback(const char* flag, uint8_t * data,long size, uint32_t ts);
//重写stop函数
void Stop();
//断线重连
void Run();
};
分析 使用opencv
为了使用大众使用的opencv,这里直接调用opencv,python调用需要斟酌,将ffmepg的avframe 与opencv的mat结合,主要是以下几句话
AVFrame *dframe = av_frame_alloc();
cv::Mat nmat;
nmat.create(cv::Size(w, h), CV_8UC3);
//printf("frame %3d\\n", v_codecctx->frame_number);
av_image_fill_arrays(dframe->data, dframe->linesize, nmat.data, AV_PIX_FMT_BGR24,
w, h, 16);
解码时,直接吧cv::Mat 和AVFrame的内存关联上,不要复制来复制去。实际上,opencv的Mat一般来说也是BGR模式,如果需要灰度图,直接解码成YUV以后,取Y分量就行。
int c_rtspthread::decode2YUV(uint8_t* src, int srcLen, uint8_t *destYuv, int destw, int desth)
{
cv::Mat m;// (Width, Height, CV_8UC1);
//int gotPicture = 0;
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = src;
pkt.size = srcLen;
int ret = avcodec_send_packet(v_codecctx, &pkt);
av_packet_unref(&pkt);
if (ret < 0)
{
fprintf(stderr, "Error sending a packet for decoding\\n");
return -1;
}
//fixme :qianbo maybe receive more frame;
while (ret >= 0) {
AVFrame *frame = av_frame_alloc();
ret = avcodec_receive_frame(v_codecctx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
av_frame_free(&frame);
return 0;
}
else if (ret < 0) {
av_frame_free(&frame);
//fprintf(stderr, "Error during decoding\\n");
return 0;
}
//printf("frame %3d\\n", v_codecctx->frame_number);
if (v_analyse != NULL)
{
v_analyse->pushdata2(frame);
}
}
#if 0
if (_img_convert_ctx == NULL)
{
if (v_destframe == NULL)
v_destframe = av_frame_alloc();
if (destw == 0)
destw = Width;
if (desth == 0)
desth = Height;
av_image_fill_arrays(v_destframe->data, v_destframe->linesize, destYuv, AV_PIX_FMT_YUV420P, destw, desth, 1);
_img_convert_ctx = sws_getContext(Width, Height,
_codecCtx->pix_fmt,//PIX_FMT_YUV420P,
destw,
desth,
AV_PIX_FMT_YUV420P,
SWS_POINT,
//SWS_BICUBIC,
NULL,
NULL,
NULL);
}
sws_scale(_img_convert_ctx, _Frame->data, _Frame->linesize, 0, Height, _yuvFrame->data, _yuvFrame->linesize);
#endif
return -1;
}
void c_rtspthread::decode2RGB(uint8_t* src, int & srcLen)
{
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = src;
pkt.size = srcLen;
int ret = avcodec_send_packet(v_codecctx, &pkt) == 0;
av_packet_unref(&pkt);
if (ret < 0)
{
fprintf(stderr, "Error sending a packet for decoding\\n");
return;
}
while (ret >= 0)
{
ret = avcodec_receive_frame(v_codecctx, v_frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
//av_frame_free(&frame);
break;
}
else if (ret < 0) {
//fprintf(stderr, "Error during decoding\\n");
//av_frame_free(&frame);
break;
}
int w = v_towidth; //v_frame->width;
int h = v_toheight; //v_frame->height;
if (_img_convert_ctx == NULL)
{
_img_convert_ctx = sws_getContext(v_frame->width, v_frame->height,
v_codecctx->pix_fmt/*AV_PIX_FMT_YUV420P*/,
w,
h,
AV_PIX_FMT_BGR24,
//SWS_POINT,
SWS_BICUBIC,
NULL,
NULL,
NULL);
}
AVFrame *dframe = av_frame_alloc();
cv::Mat nmat;
nmat.create(cv::Size(w, h), CV_8UC3);
//printf("frame %3d\\n", v_codecctx->frame_number);
av_image_fill_arrays(dframe->data, dframe->linesize, nmat.data, AV_PIX_FMT_BGR24,
w, h, 16);
sws_scale(_img_convert_ctx, v_frame->data, v_frame->linesize, 0,
v_frame->height,
dframe->data, dframe->linesize);
if (v_analyse != NULL)
{
v_analyse->pushdata(nmat);
}
av_frame_free(&dframe);
}
//av_packet_unref(&pkt);
}
转发 flv
这一部分可以分成两种方式,一种是直接发送到现有的flvserver,一种是直接自己成为flvserver,以效率来说,直接成为flvserver可以成为优先选项,先用boost库的协程做一个httpserver,因为websocketserver是建立在httpserver基础之上的
class c_http_session:public std::enable_shared_from_this<c_http_session>
{
public:
uint32_t v_key = 0;
uint32_t v_start_ts = 0;
tcp::socket v_socket;
int v_has_send_meta = 0;
int v_has_send_video = 0;
int v_has_send_audio = 0;
int v_has_sent_key_frame = 0;
asio::strand<asio::io_context::executor_type> v_strand;
void close()
{
if (v_socket.is_open())
v_socket.close();
/*if (v_key > 0)
c_flvhubs::instance()->pop(v_key, shared_from_this());*/
}
public:
bool func_hand_shake(boost::asio::yield_context &yield)
{
return false;
}
void go()
{
auto self(shared_from_this());
boost::asio::spawn(v_strand,
[this, self](boost::asio::yield_context yield)
{
//try
//{
//timer_.expires_from_now(std::chrono::seconds(10));
if (func_hand_shake(yield) == false)
{
std::cout << "not hand shake" << std::endl;
return;
}
for (;;)
{
//bool ret = func_recv_message(yield);
/*if (!ret)
{
close();
break;
}*/
}
//}
//catch (std::exception& e)
//{
// std::cout << "some is error:" << e.what() << std::endl;
// close();
// //timer_.cancel();
//}
});
}
};
上面这个类不实际使用,因为写完websocket server必须把httpserver的情况也考虑进去,实际上,httpsever的数据量要小于websocket,除了开始的头部,因为websocketserver每次都必须要把帧数据的大小回送对端,实际上是解决了tcp的粘包问题,但返回来说,flv的头部也是有这个数据长度的,所以http flv是可以直接发送数据的。
**根据RFC文档6455 文档,**把原理和头部理解清楚,就可以制作一个简洁的websocket server,注意浏览器发送的数据是经过加密处理的,这里要解码一次,因为整个过程是和浏览器交互,所以很好调试,写一个调试用的javascript,如下:
这个html是可以接收服务端返回来的图像的,作为一个工具,可以调试使用。
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title></title>
</head>
<body>
<div id="imgDiv"></div>
<div id="sse">
<a href="javascript:WebSocketTest()">运行 WebSocket</a>
</div>
<script type="text/javascript">
function init() {
canvas = document.createElement('canvas');
content = canvas.getContext('2d');
canvas.width = 320;
canvas.height = 240;
content.scale(1, -1);
content.translate(0, -240);
document.body.appendChild(canvas);
// container.appendChild( canvas );
img = new Image();
img.src = "bg1.jpg";
canvas.style.position = 'absolute';
img.onload = function () {
content.drawImage(img, 0, 0, canvas.width, canvas.height);
//URL.revokeObjectURL(url);
// imgDate = content.getImageData(0, 0, canvas.width, canvas.height);
//createPotCloud(); //创建点云
};
}
init();
function WebSocketTest() {
if ("WebSocket" in window) {
// alert("您的浏览器支持 WebSocket!");
// 打开一个 web socket
var ws = new WebSocket("ws://127.0.0.1:9000/live/image");
console.log(ws);
ws.onopen = function (evt) {
console.log("connected");
/*let obj = JSON.stringify({
test:"qianbo0423"
})
ws.send(obj);*/
};
ws.onmessage = function (evt) {
if (typeof (evt.data) == "string") {
//textHandler(JSON.parse(evt.data));
} else {
var reader = new FileReader();
reader.onload = function (evt) {
if (evt.target.readyState == FileReader.DONE) {
var url = evt.target.result;
// console.log(url);
img.src = url;
//img.src = url;// "bg1.jpg";
//var imga = document.getElementById("imgDiv");
//imga.innerHTML = "<img src = " + url + " />";
}
}
reader.readAsDataURL(evt.data);
}
};
ws.onclose = function () {
alert("连接已关闭...");
};
} else {
// 浏览器不支持 WebSocket
alert("您的浏览器不支持 WebSocket!");
}
}
</script>
</body>
</html>
以下是websocket server的代码,比较简洁,因为是第一版,还没有进行各方面的处理,读者需要自己进行错误处理,笔者正在进行开发,使得http server 和websocket的server 在一个端口上进行服务,并进行错误处理。
class c_ws_session : public std::enable_shared_from_this<c_ws_session>
{
private:
void SetSendBufferSize(int nSize)
{
boost::asio::socket_base::send_buffer_size size_option(nSize);
v_socket.set_option(size_option);
}
public:
//do not need this ,we just need key
//std::string v_app_stream;
uint32_t v_key = 0;
//time stamp record,every one not the same
uint32_t v_start_ts = 0;
public:
explicit c_ws_session(boost::asio::io_context& io_context, tcp::socket socket)
: v_socket(std::move(socket)),
/*timer_(io_context),*/
v_strand(io_context.get_executor())
{
SetSendBufferSize(1 * 1024 * 1024);
}
/*
The handshake from the client looks as follows :
GET /chat HTTP/1.1
Host: server.example.com
Upgrade: websocket
Connection: Upgrade
Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ ==
Origin: http://example.com
Sec-WebSocket-Protocol: chat, superchat
Sec-WebSocket-Version: 13
GET /chat HTTP/1.1
Host: 127.0.0.1:9000
Connection: Upgrade
Pragma: no-cache
Cache-Control: no-cache
Upgrade: websocket
Origin: file://
Sec-WebSocket-Version: 13
User-Agent: Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.104 Safari/537.36
Accept-Encoding: gzip, deflate, br
Accept-Language: zh-CN,zh;q=0.8
Sec-WebSocket-Key: 1M9Y1T8iMgTLepYQGDFoxg==
Sec-WebSocket-Extensions: permessage-deflate; client_max_window_bits
*/
bool func_hand_shake(boost::asio::yield_context &yield)
{
DEFINE_EC
asio::streambuf content_;
size_t length = asio::async_read_until(v_socket, content_, "\\r\\n\\r\\n", yield[ec]);
ERROR_RETURN_FALSE
asio::streambuf::const_buffers_type bufs = content_.data();
std::string lines(asio::buffers_begin(bufs), asio::buffers_begin(bufs) + length);
//c_header_map hmap;
//fetch_head_info(lines, hmap, v_app_stream);
//the url length not over 1024;
char buf[1024];
fetch_head_get(lines.c_str(), buf, 1023);
//v_app_stream = buf;
cout << "get:" << buf<< endl; //like this--> live/1001 rtmp server must like this
std::string response, key, encrypted_key;
//find the get
//std::string request;
size_t n = lines.find_first_of('\\r');
//find the Sec-WebSocket-Key
size_t pos = lines.find("Sec-WebSocket-Key");
if (pos == lines.npos)
return false;
size_t end = lines.find("\\r\\n", pos);
key = lines.substr(pos + 19, end - pos - RTSP/ONVIF协议视频平台EasyNVR级联时上级平台播放视频卡顿的原因排查分析
RTSP/ONVIF协议视频平台EasyNVR级联时上级平台播放视频卡顿的原因排查分析
国标GB/T28181流媒体服务获取接入的设备通道直播流地址HLS/HTTP-FLV/WS-FLV/RTMP/RTSP