MC3302_SDK_V1.1.9_202507281.../media/sample/system/rpcvideodemo/stream.c
2025-11-11 12:08:31 +08:00

341 lines
8.9 KiB
C
Executable File

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/time.h>
#include <sys/select.h>
#include <errno.h>
#include <math.h>
#include <signal.h>
#include <sys/prctl.h>
#include <sys/stat.h>
#include <pthread.h>
//#include "fy_type.h"
#include <librtsp/include/librtsp.h>
#include <libdmc/include/libdmc.h>
#include <libdmc/include/libdmc_pes.h>
#include <libdmc/include/libdmc_rtsp.h>
#include <libdmc/include/libdmc_record_raw.h>
#include <sys/time.h>
#include "types/type_def.h"
#include "dsp/fh_system_mpi.h"
#include "dsp/fh_venc_mpi.h"
#include "dsp/fh_jpege_mpi.h"
#include "dsp/fh_vdec_mpi.h"
#include "sample_opts.h"
#include "vmm_api.h"
#include <libdmc/include/libdmc_http_mjpeg.h>
#define HTTP_MJPEG_PORT (1111)
#define YUV_BUF_NUM 3
enum stream_type {
FH_PES = 0x1,
FH_RTSP = 0x2,
FH_HTTP = 0x4,
FH_RAW = 0x8,
};
struct stream_info {
FH_UINT32 grpid;
enum stream_type type;
};
#define MAX_GRP_NUM 4
static struct stream_info g_stream_info[MAX_GRP_NUM];
static FH_VOID _get_stream_info_type()
{
memset(g_stream_info, 0, sizeof(g_stream_info));
g_stream_info[0].type |= FH_RTSP;
}
FH_SINT32 sample_dmc_init(FH_CHAR *dst_ip, FH_UINT32 port)
{
FH_SINT32 rtspgrp;
// FH_SINT32 rawgrp;
_get_stream_info_type();
dmc_init();
rtspgrp = 7;
// rawgrp = 0;
dmc_rtsp_subscribe(rtspgrp, port);
dmc_http_mjpeg_subscribe(HTTP_MJPEG_PORT);
return 0;
}
FH_SINT32 sample_dmc_deinit(FH_VOID)
{
dmc_rtsp_unsubscribe();
dmc_http_mjpeg_unsubscribe();
dmc_deinit();
return 0;
}
#define DUMP_MJPEG 0
#define DUMP_STREAM 0
FH_VOID *sample_common_get_stream_proc(FH_VOID *arg)
{
FH_SINT32 ret, i;
FH_SINT32 end_flag;
FH_SINT32 subtype;
FH_VENC_STREAM stream;
// FH_SINT32 mediaType = 0;
int chn=0;
unsigned int*p = (unsigned int*)arg;
unsigned int chns = p[0];
unsigned int *stop = (p+1);
#if(DUMP_STREAM)
int chan_cnts[4] = {0,0,0,0};
FILE* fps[4] = {NULL,NULL,NULL,NULL};
char fname[128];
#define DUMP_CNT 50
#endif
prctl(PR_SET_NAME, "demo_get_stream");
printf("chns: 0x%x\n",chns);
while (*stop == 0)
{
//WR_PROC_DEV(TRACE_PROC, "timing_GetStream_START");
/*阻塞模式下,获取一帧H264或者H265数据*/
#if 0
ret = -1;
if((chns & (1<<chn))){
ret = FH_VENC_GetChnStream_Timeout(chn, &stream, 100);
}
#endif
chn++; chn %=3;
ret = FH_VENC_GetStream_Block(FH_STREAM_ALL,&stream);
// if(ret==0) printf("FH_VENC_GetChnStream_Timeout(%d) OK\n",chn);
// WR_PROC_DEV(TRACE_PROC, "timing_EncBlkFinish_xxx");
if (ret != 0)
{
//LOG_PRT("Error(%d - %x): FH_VENC_GetChnStream_Timeout failed!\n", ret, ret);
usleep(1000);
continue;
}
// printf("%s:%d\n",__FUNCTION__,__LINE__);
if (stream.stmtype == FH_STREAM_H264)
{
subtype = stream.h264_stream.frame_type == FH_FRAME_I ? DMC_MEDIA_SUBTYPE_IFRAME : DMC_MEDIA_SUBTYPE_PFRAME;
printf("chan:%d, frame_type:%d,nalu_cnt:%d\n",stream.chan,subtype,stream.h264_stream.nalu_cnt);
for (i = 0; i < stream.h264_stream.nalu_cnt; i++)
{
end_flag = (i == (stream.h264_stream.nalu_cnt - 1)) ? 1 : 0;
dmc_input(stream.chan,
DMC_MEDIA_TYPE_H264,
subtype,
stream.h264_stream.time_stamp,
stream.h264_stream.nalu[i].start,
stream.h264_stream.nalu[i].length,
end_flag);
}
}
/*获取到一帧H265数据,按照下面的方式处理*/
else if (stream.stmtype == FH_STREAM_H265)
{
subtype = stream.h265_stream.frame_type == FH_FRAME_I ? DMC_MEDIA_SUBTYPE_IFRAME : DMC_MEDIA_SUBTYPE_PFRAME;
for (i = 0; i < stream.h265_stream.nalu_cnt; i++)
{
end_flag = (i == (stream.h265_stream.nalu_cnt - 1)) ? 1 : 0;
#if(DUMP_STREAM)
if(chan_cnts[stream.chan]<DUMP_CNT){
if(fps[stream.chan]==NULL){
sprintf(fname,"venc_chan%d.265",stream.chan);
fps[stream.chan] = fopen(fname,"wb");
}
if(fps[stream.chan] )
fwrite(stream.h265_stream.nalu[i].start,1,stream.h265_stream.nalu[i].length,fps[stream.chan]);
}
#endif
dmc_input(stream.chan,
DMC_MEDIA_TYPE_H265,
subtype,
stream.h265_stream.time_stamp,
stream.h265_stream.nalu[i].start,
stream.h265_stream.nalu[i].length,
end_flag);
}
#if(DUMP_STREAM)
chan_cnts[stream.chan]++;
if(chan_cnts[stream.chan]==DUMP_CNT){
if(fps[stream.chan])
fclose(fps[stream.chan]);
}
#endif
}
/*获取到一帧MJPEG数据,按照下面的方式处理*/
else if (stream.stmtype == FH_STREAM_MJPEG)
{
dmc_input(stream.chan,
DMC_MEDIA_TYPE_MJPEG,
0,
0,
stream.mjpeg_stream.start,
stream.mjpeg_stream.length,
1);
}
/*必须和FH_VENC_GetStream配套调用,以释放码流资源*/
ret = FH_VENC_ReleaseStream(&stream);
if(ret)
{
printf("Error(%d - %x): FH_VENC_ReleaseStream failed for chan(%d)!\n", ret, ret, stream.chan);
}
//WR_PROC_DEV(TRACE_PROC, "timing_GetStream_END");
}
printf("%s exit:\n",__FUNCTION__);
*stop = 0;
return NULL;
}
//#define JPEG_DECODE_SAMPLE 1
FH_VOID *sample_common_mjpeg_get_stream_thread(FH_VOID *arg)
{
int s32Ret = 0;
unsigned int*p = (unsigned int*)arg;
unsigned int chns = p[0];
JPEGE_CHN chn;
JPEGE_STREAM_S stStream;
unsigned int *stop = (p+1);
#if(DUMP_MJPEG)
int chan_cnts[4] = {0,0,0,0};
FILE* fps[4] = {NULL,NULL,NULL,NULL};
char fname[128];
#define DUMP_CNT 50
#endif
#if(JPEG_DECODE_SAMPLE)
VDEC_CHN_ATTR_S stVdecChnAttr;
VDEC_CHN_ATTR_S *pstVdecChnAttr = &stVdecChnAttr;
VDEC_STREAM_S stVdecStream;
VIDEO_FRAME_INFO_S stFrameInfo;
VDEC_CHN_STAT_S stChanStat;
unsigned int vfcnt= 0;
/*add JPEG decode sample*/
pstVdecChnAttr->enType = PT_JPEG;
pstVdecChnAttr->u32BufSize = 1920 * 1080/2;
pstVdecChnAttr->u32Priority = 5;
pstVdecChnAttr->u32PicWidth = 3840;
pstVdecChnAttr->u32PicHeight = 2160;
pstVdecChnAttr->stVdecJpegAttr.enMode = VIDEO_MODE_FRAME;
pstVdecChnAttr->stVdecJpegAttr.enJpegFormat = JPG_COLOR_FMT_YCBCR420;
FH_VDEC_SetChnVBCnt(0, 1);
FH_VDEC_CreateChn(0, pstVdecChnAttr);
FH_VDEC_StartRecvStream(0);
#endif
printf("Enter %s\n",__FUNCTION__);
while (*stop == 0) {
for(chn=0;chn<8;chn++){
if(!(chns & (1<<chn)))
continue;
memset(&stStream, 0, sizeof(stStream));
stStream.s32MilliSec = 100;
s32Ret = FH_JPEGE_GetStream(chn, &stStream);
#if(DUMP_MJPEG)
if(chan_cnts[chn]<DUMP_CNT){
if(fps[chn]==NULL){
sprintf(fname,"mjpeg%d.265",chn);
fps[chn] = fopen(fname,"wb");
}
if(fps[chn] )
fwrite(stStream.pu8Addr ,1, stStream.u32Len,fps[chn]);
}
#endif
if (s32Ret == 0) {
dmc_input(chn, DMC_MEDIA_TYPE_MJPEG,
0,
0,
stStream.pu8Addr, stStream.u32Len,
1);
#if(JPEG_DECODE_SAMPLE)
stVdecStream.pu8Addr = stStream.pu8Addr;
stVdecStream.u32Len = stStream.u32Len;
stVdecStream.u64PTS = stStream.u64PTS;
stVdecStream.bEndOfFrame = FH_TRUE;
stVdecStream.bEndOfStream= FH_FALSE;
if(chn==0){
s32Ret = FH_VDEC_SendStream(0,&stVdecStream,100);
if(s32Ret==0){
s32Ret = FH_VDEC_GetImage(0,&stFrameInfo,0);
FH_VDEC_ReleaseImage(0,&stFrameInfo);
}
vfcnt++;
if(vfcnt%100==0){
stChanStat.u32DecodeStreamFrames = 0;
FH_VDEC_Query(0,&stChanStat);
printf("send_cnt:%d,dec_count:%d\n",stChanStat.u32RecvStreamFrames,stChanStat.u32DecodeStreamFrames);
}
}
#endif
#if(DUMP_MJPEG)
chan_cnts[chn]++;
if(chan_cnts[chn]==DUMP_CNT){
if(fps[chn])
fclose(fps[chn]);
}
#endif
FH_JPEGE_ReleaseStream(chn, &stStream);
}
}
}
#if(JPEG_DECODE_SAMPLE)
FH_VDEC_StopRecvStream(0);
FH_VDEC_DestroyChn(0);
#endif
printf("Exit %s\n",__FUNCTION__);
*stop = 0;
return NULL;
}