|
发表于 2022-1-20 16:56:41
只看该作者
12#
static void *AudioRtspStream(void *data) {
printf("--------------------------------Get packet-AudioRtspStream-------into---------------------------\n");
AudioParams *pstAudioParams = (AudioParams *)data;
RK_U8 aac_header[7];
int ret = 0;
RK_U32 u32SampleRate = 16000;
RK_U32 u32BitRate = 64000; // 64kbps
RK_U32 u32ChnCnt = 1;
RK_U32 u32FrameCnt = 1024; // always 1024 for aac
SAMPLE_FORMAT_E enSampleFmt = RK_SAMPLE_FMT_FLTP;
// default:CARD=rockchiprk809co
RK_CHAR *pDeviceName = "default";
RK_CHAR *pOutPath = "/tmp/aenc.adts";
AudioParams stAudioParams;
stAudioParams.u32SampleRate = u32SampleRate;
stAudioParams.u32ChnCnt = u32ChnCnt;
stAudioParams.enSampleFmt = enSampleFmt;
stAudioParams.pOutPath = pOutPath;
MPP_CHN_S mpp_chn_ai, mpp_chn_aenc;
mpp_chn_ai.enModId = RK_ID_AI;
mpp_chn_ai.s32ChnId = 0;
mpp_chn_aenc.enModId = RK_ID_AENC;
mpp_chn_aenc.s32ChnId = 0;
// 1. create AI
AI_CHN_ATTR_S ai_attr;
ai_attr.pcAudioNode = pDeviceName;
ai_attr.enSampleFormat = enSampleFmt;
ai_attr.u32NbSamples = u32FrameCnt;
ai_attr.u32SampleRate = u32SampleRate;
ai_attr.u32Channels = u32ChnCnt;
ai_attr.enAiLayout = AI_LAYOUT_NORMAL;
ret = RK_MPI_AI_SetChnAttr(mpp_chn_ai.s32ChnId, &ai_attr);
ret |= RK_MPI_AI_EnableChn(mpp_chn_ai.s32ChnId);
if (ret) {
printf("Create AI[0] failed! ret=%d\n", ret);
return NULL;
}
// 2. create AENC
AENC_CHN_ATTR_S aenc_attr;
aenc_attr.enCodecType = RK_CODEC_TYPE_AAC;
aenc_attr.u32Bitrate = u32BitRate;
aenc_attr.u32Quality = 1;
aenc_attr.stAencAAC.u32Channels = u32ChnCnt;
aenc_attr.stAencAAC.u32SampleRate = u32SampleRate;
ret = RK_MPI_AENC_CreateChn(mpp_chn_aenc.s32ChnId, &aenc_attr);
if (ret) {
printf("Create AENC[0] failed! ret=%d\n", ret);
return NULL;
}
// 3. bind AI-AENC
ret = RK_MPI_SYS_Bind(&mpp_chn_ai, &mpp_chn_aenc);
if (ret) {
printf("Bind AI[0] to AENC[0] failed! ret=%d\n", ret);
return NULL;
}
MEDIA_BUFFER buffer;
while(1) {
buffer = RK_MPI_SYS_GetMediaBuffer(
RK_ID_AENC, 0, -1);
if (!buffer)
continue;
printf("#Get packet-AudioRtspStream, size %zu\n", RK_MPI_MB_GetSize(buffer));
GetAdtsHeader(aac_header, u32SampleRate,
u32ChnCnt, RK_MPI_MB_GetSize(buffer));
char *adts_aac = (char *)malloc(RK_MPI_MB_GetSize(buffer) + sizeof(aac_header));
memcpy(adts_aac, aac_header, 7);
adts_aac += 7; //偏移 7 位来接收 aac 裸流数据
memcpy(adts_aac, RK_MPI_MB_GetPtr(buffer), RK_MPI_MB_GetSize(buffer));
adts_aac -= 7; //偏移回退 7 位来指向帧的头部数据
if (g_audiortsplive && g_audiortsp_session) {
rtsp_tx_audio(g_audiortsp_session, (unsigned char *)adts_aac, (RK_MPI_MB_GetSize(buffer) + 7),
RK_MPI_MB_GetTimestamp(buffer));
rtsp_do_event(g_audiortsplive);
}
RK_MPI_MB_ReleaseBuffer(buffer);
}
return NULL;
}
static void *VideoRtspStream(void *data) {
printf("--------------------------------Get packet-VideoRtspStream-----------------------------------\n");
MEDIA_BUFFER buffer;
while(1) {
buffer = RK_MPI_SYS_GetMediaBuffer(RK_ID_VENC, 0, -1);
if (!buffer)
continue;
printf("#Get packet-VideoRtspStream, size %zu\n", RK_MPI_MB_GetSize(buffer));
if (g_rtsplive && g_rtsp_session) {
rtsp_tx_video(g_rtsp_session, (unsigned char*)RK_MPI_MB_GetPtr(buffer), RK_MPI_MB_GetSize(buffer),
RK_MPI_MB_GetTimestamp(buffer));
rtsp_do_event(g_rtsplive);
}
RK_MPI_MB_ReleaseBuffer(buffer);
}
return NULL;
}
camera_thread::camera_thread(QObject *parent)
{
ret = SAMPLE_COMM_ISP_Init(0, RK_AIQ_WORKING_MODE_NORMAL, RK_TRUE, iq_dir);
if (ret)
return ;
SAMPLE_COMM_ISP_Run(0);
SAMPLE_COMM_ISP_SET_Contrast(0, 110);//亮度
SAMPLE_COMM_ISP_SetFrameRate(0, 30);
RK_MPI_SYS_Init();
memset(&vi_chn_attr, 0, sizeof(vi_chn_attr));
vi_chn_attr.pcVideoNode = "rkispp_scale0";
vi_chn_attr.u32BufCnt = 4;
vi_chn_attr.u32Width = video_width;
vi_chn_attr.u32Height = video_height;
vi_chn_attr.enPixFmt = IMAGE_TYPE_NV12;
vi_chn_attr.enWorkMode = VI_WORK_MODE_NORMAL;
vi_chn_attr.enBufType = VI_CHN_BUF_TYPE_MMAP;
ret = RK_MPI_VI_SetChnAttr(0, 0, &vi_chn_attr);
ret |= RK_MPI_VI_EnableChn(0, 0);
if (ret) {
printf("Create vi[1] failed! ret=%d\n", ret);
return ;
}
memset(&stRgaAttr, 0, sizeof(stRgaAttr));
stRgaAttr.bEnBufPool = RK_TRUE;
stRgaAttr.u16BufPoolCnt = 4;
stRgaAttr.u16Rotaion = 0;
stRgaAttr.u16Rotaion = 90;
stRgaAttr.stImgIn.u32X = 0;
stRgaAttr.stImgIn.u32Y = 0;
stRgaAttr.stImgIn.imgType = IMAGE_TYPE_NV12;
stRgaAttr.stImgIn.u32Width = video_width;
stRgaAttr.stImgIn.u32Height = video_height;
stRgaAttr.stImgIn.u32HorStride = video_width;
stRgaAttr.stImgIn.u32VirStride = video_height;
stRgaAttr.stImgOut.u32X = 0;
stRgaAttr.stImgOut.u32Y = 0;
stRgaAttr.stImgOut.imgType = IMAGE_TYPE_NV12;
stRgaAttr.stImgOut.u32Width = disp_height;
stRgaAttr.stImgOut.u32Height = disp_width;
stRgaAttr.stImgOut.u32HorStride = disp_height;
stRgaAttr.stImgOut.u32VirStride = disp_width;
ret = RK_MPI_RGA_CreateChn(0, &stRgaAttr);
if (ret) {
printf("Create rga[0] falied! ret=%d\n", ret);
return ;
}
memset(&venc_chn_attr, 0, sizeof(venc_chn_attr));
venc_chn_attr.stVencAttr.enType = RK_CODEC_TYPE_H264;
venc_chn_attr.stRcAttr.enRcMode = VENC_RC_MODE_H264CBR;
venc_chn_attr.stRcAttr.stH264Cbr.u32Gop = 30;
venc_chn_attr.stRcAttr.stH264Cbr.u32BitRate = disp_width * disp_height;
// frame rate: in 30/1, out 30/1.
venc_chn_attr.stRcAttr.stH264Cbr.fr32DstFrameRateDen = 1;
venc_chn_attr.stRcAttr.stH264Cbr.fr32DstFrameRateNum = 30;
venc_chn_attr.stRcAttr.stH264Cbr.u32SrcFrameRateDen = 1;
venc_chn_attr.stRcAttr.stH264Cbr.u32SrcFrameRateNum = 30;
venc_chn_attr.stVencAttr.imageType = IMAGE_TYPE_NV12;
venc_chn_attr.stVencAttr.u32PicWidth = disp_height;
venc_chn_attr.stVencAttr.u32PicHeight = disp_width;
venc_chn_attr.stVencAttr.u32VirWidth = disp_height;
venc_chn_attr.stVencAttr.u32VirHeight = disp_width;
venc_chn_attr.stVencAttr.u32Profile = 77;
ret = RK_MPI_VENC_CreateChn(0, &venc_chn_attr);
if (ret) {
printf("ERROR: create VENC[0] error! ret=%d\n", ret);
}
stSrcChn.enModId = RK_ID_VI;
stSrcChn.s32DevId = 0;
stSrcChn.s32ChnId = 0;
stDestChn.enModId = RK_ID_RGA;
stDestChn.s32DevId = 0;
stDestChn.s32ChnId = 0;
ret = RK_MPI_SYS_Bind(&stSrcChn, &stDestChn);
if (ret) {
printf("Bind vi[0] to rga[0] failed! ret=%d\n", ret);
return;
}
// MPP_CHN_S stEncChn;
stSrcChn.enModId = RK_ID_RGA;
stSrcChn.s32DevId = 0;
stSrcChn.s32ChnId = 0;
stDestChn.enModId = RK_ID_VENC;
stDestChn.s32DevId = 0;
stDestChn.s32ChnId = 0;
ret = RK_MPI_SYS_Bind(&stSrcChn, &stDestChn);
if (ret) {
printf("ERROR: register output callback for VENC[0] error! ret=%d\n", ret);
}
g_rtsplive = create_rtsp_demo(8554);
g_rtsp_session = rtsp_new_session(g_rtsplive, "/H264_stream_0");
// CODEC_TYPE_E enCodecType = session_cfg->VideoType;
rtsp_set_video(g_rtsp_session, RTSP_CODEC_ID_VIDEO_H264, NULL, 0);
rtsp_sync_video_ts(g_rtsp_session, rtsp_get_reltime(), rtsp_get_ntptime());
g_audiortsplive = create_rtsp_demo(8555);
g_audiortsp_session = rtsp_new_session(g_audiortsplive, "/audio_stream_0");
rtsp_set_audio(g_audiortsp_session, RTSP_CODEC_ID_AUDIO_AAC, NULL, 0);
rtsp_sync_audio_ts(g_audiortsp_session, rtsp_get_reltime(), rtsp_get_ntptime());
pthread_create(&audio_rtsp_thread, NULL, AudioRtspStream, NULL);
}
大致是这样~参数有一些声明没贴。这个可以生成相应的视频音频流,我打算直接一起拉这个两个流一起播放,看了效果,没有明显的口型对不上的问题。 |
|