1,download source
https://github.com/rgaufman/live555
2,编译
live$ ./genMakefiles linux-gdb
gdb 为添加 -g 功能 通过config.xxxxx 文件可以判断 在config.linux-gdb中添加-DDEBUG
3 结构图
RTSPServer.cpp 为rtsp服务端结构,里面有rtsp协议解析,以及socket/bind/listen/accept/read/write的操作
通过testProgs/testOnDemandRTSPServer.cpp,可以查看基本的ServerMediaSession的格种操作
{
char const* streamName = "h264ESVideoTest";
char const* inputFileName = "test.264";
ServerMediaSession* sms = ServerMediaSession::createNew(*env,
streamName, streamName, descriptionString);
sms->addSubsession(
H264VideoFileServerMediaSubsession::createNew(*env,
inputFileName, reuseFirstSource));
rtspServer->addServerMediaSession(sms);
announceStream(rtspServer, sms, streamName, inputFileName);
}
在mediaServer/DynamicRTSPServer.cpp,继承RTSPServerSupportingHTTPStreaming:public RTSPServer {}
主要实现virtual ServerMediaSession *lookupServerMediaSession(....) 当须要播放时,RTSPServer就会调用,返回一个ServerMediaSession对象.
当同时有视频和音频时,须要同时向ServerMediaSession中添加Subsession
这里实现一个循环播放H264文件功能
class H264VideoFileServerMediaSubsessionLoop: public H264VideoFileServerMediaSubsession {
protected: // redefined virtual functions
//主要实现这个函数
virtual FramedSource* createNewStreamSource(unsigned clientSessionId,
unsigned& estBitrate);
}
//函数实现
FramedSource* H264VideoFileServerMediaSubsessionLoop::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
estBitrate = 500; // kbps, estimate
// Create the video source:
ByteStreamFileSourceLoop* fileSource = ByteStreamFileSourceLoop::createNew(envir(), fFileName);
if (fileSource == NULL) return NULL;
fFileSize = fileSource->fileSize();
// Create a framer for the Video Elementary Stream:
return H264VideoStreamFramer::createNew(envir(), fileSource);
}
接下来实现ByteStreamFileSourceLoop
//复制ByteStreamFileSource.cpp 重命名 ByteStreamFileSourceLoop
class ByteStreamFileSourceLoop: public FramedFileSource {
private:
//主要实现此方法
virtual void doGetNextFrame();
};
void ByteStreamFileSourceLoop::doGetNextFrame() {
//将这里进行修改
if (feof(fFid) || ferror(fFid) || (fLimitNumBytesToStream && fNumBytesToStream == 0)) {
fseek(fFid, 0, SEEK_SET);
}
..........
}
handleClosure(); //为停止
//为执行FramedSource::afterGetting(this)函数,貌似为线程调度执行
//这一句千万不要丢
nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
(TaskFunc*)FramedSource::afterGetting, this);
FramedSource::afterGetting(this); //和上面功能一样,只是这是同步
好了,基本的循环播放H264文件就成功了
接下来使用ffmpeg读取H264文件
//程序启动第一次
av_register_all();
//打开文件
AVFormatContext *fmt_ctx = NULL;
int video_stream;
int audio_stream;
AVBitStreamFilterContext* h264bsfc = NULL;
result = avformat_open_input(&fmt_ctx, fileName, NULL, NULL);
result = avformat_find_stream_info(fmt_ctx, NULL);
//这里寻找到视频帧位置
video_stream = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1,
NULL, 0);
//这里寻找到音频帧位置
audio_stream = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1,
NULL, 0);
//这是mp4里面的视频转换成h264视频
h264bsfc = av_bitstream_filter_init("h264_mp4toannexb");
//打印出视频宽高
AVCodecContext *origin_ctx = NULL;
origin_ctx = fmt_ctx->streams[video_stream]->codec;
log_d("video_stream:%d", video_stream);
log_d("height:%d", origin_ctx->height);
log_d("width:%d", origin_ctx->width);
/*** 读取帧 可以放在 StreamFileSource::doGetNextFrame() *****/
AVPacket packet;
AVCodecContext *origin_ctx = NULL;
av_init_packet(&packet);
while (1) {
if ((result = av_read_frame(fmt_ctx, &packet)) != 0) {
//文件读取完成
handleClosure(); /**调用live555退出*/
return;
}
origin_ctx = fmt_ctx->streams[packet.stream_index]->codec;
if (origin_ctx->codec_id == AV_CODEC_ID_H264) {
if (strstr(ffileName, ".mp4")) { //MP4视频转换H264
av_bitstream_filter_filter(h264bsfc, origin_ctx, NULL,
&packet.data,
&packet.size, packet.data,
packet.size, 0);
}
memcpy(fTo, packet.data, packet.size); //复制视频到fTo
fFrameSize = packet.size; //整个数据帧大小
av_free_packet(&packet); //must free the packet
break;
}
av_free_packet(&packet); //must free the packet
}
fNumTruncatedBytes = fFrameSize;
gettimeofday(&fPresentationTime, NULL);
fDurationInMicroseconds = 0;
FramedSource::afterGetting(this);
//关闭ffmpeg读取
av_bitstream_filter_close(h264bsfc);
avformat_close_input(&fmt_ctx);
关于FramedSource类,用于读取数据帧 virtual void doGetNextFrame() = 0;
unsigned char* fTo; // in 输出帧数据保存位置
unsigned fMaxSize; // in 输出帧最大值
unsigned fFrameSize; // out 输出帧大小
unsigned fNumTruncatedBytes; // out ????
struct timeval fPresentationTime; // out 时间
unsigned fDurationInMicroseconds; // out 控制播放速度
当你的数据大于fTo的fMaxSize,就需要把剩下的数据帧缓存起来,下次在doGetNexFrame()时,先读取缓存数据到fTo
当然Live555会出现数据过长错误.
参考 :https://blog.csdn.net/sunxiaopengsun/article/details/56834457
需要修改
virtual unsigned maxFrameSize() const;
或者修改
#define BANK_SIZE 150000
live/liveMedia/StreamParser.cpp
ServerMediaSession *sms添加到RTSPServer中,Subsession添加到ServerMediaSession中,这两个类添加后貌似就不会释放.
但是通过Subsession::createNewStreamSource()出来的FileSource会被调用析构函数
如:H264视频文件的Subsession返回的FrameSource
FramedSource* H264VideoFileServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) {
estBitrate = 500; // kbps, estimate
// Create the video source:
ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(envir(), fFileName);
if (fileSource == NULL) return NULL;
fFileSize = fileSource->fileSize();
// Create a framer for the Video Elementary Stream:
return H264VideoStreamFramer::createNew(envir(), fileSource);
}
这是别人的解释:原文:https://blog.csdn.net/zhangjikuan/article/details/38554589
H264VideoStreamFramer把自己的缓冲(其实是sink的)传给H264VideoStreamParser,每当H264VideoStreamFramer要获取一个NALU时,就跟H264VideoStreamParser要,H264VideoStreamParser就从ByteStreamFileSource读一坨数据,然后进行分析,如果取得了一个NALU,就传给H264VideoStreamFramer.
相关知识: H264视频的NALU数据 mp4中读取到的AVPacket需要h264_mp4toannexb_filter得到sps及pps
RTP 传输视频数据 / RTCP 视频视屏质量 ------- RFC3550/RFC3551
RTSP 视频控制----- RFC2326
RTP 数据协议负责对流媒体数据进行封包并实现媒体流的实时传输,每一个RTP数据报都由头部(Header)和负载(Payload)两个部分组成,其中头部前12个字节的含义是固定的,而负载则可以是音频或者视频数据。
CSRC记数\负载类型PT(编码算法、采样频率、承载通道等)\***\时间戳
GetNextFrame()调用栈
关于numTruncatedBytes 到afterGettingFrame1中,则表示 fNumTruncatedBytes = fFrameSize-fMaxSize; 表示多余的数据量
这是通过eclipse查看的类继承关系 Medium
ServerMediaSubsession
FramedSource