| | |
| | | std::map<int,LivePusher *> pushers = {}; |
| | | vector<MiniViewData *> pushMiniDatas = {}; |
| | | MiniViewData *mainView = 0; |
| | | YUVData *mainYuvData = 0; |
| | | int mainStreamCode = 0; |
| | | pthread_mutex_t pushVideoMutex,pushNV21Mutex,pushAudioMutex; |
| | | ANativeWindow *nativeMainWindow = 0; |
| | |
| | | SafeQueue<YUVData *> pushFrames; |
| | | AudioChannel *audioChannel = 0; |
| | | VideoChannel *videoChannel = 0; |
| | | LiveViewCall *jvmMainCall = 0; |
| | | |
| | | void packetCallBack(RTMPPacket *packet){ |
| | | //LOGI("packetCallBack safequeue packets:%d",&packets); |
| | |
| | | } |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * 剪切画面 |
| | | * 剪切主画面 |
| | | * @param pData |
| | | * @param data |
| | | */ |
| | |
| | | |
| | | // 分配目标YUV内存 |
| | | int dstYSize = miniWidth * miniHeight; |
| | | cutYUV(yuvData,width,height,cropX,cropY,dstData,miniWidth,miniHeight); |
| | | } |
| | | |
| | | /** |
| | | * 裁切画面 |
| | | * @param yuvData 源数据 |
| | | * @param width 源数据宽度 |
| | | * @param height 源数据高度 |
| | | * @param cropX 裁切位置X |
| | | * @param cropY 裁切位置Y |
| | | * @param dstData 目标数据 |
| | | * @param dstWidth 目标数据宽度 |
| | | * @param dstHeight 目标数据高度 |
| | | */ |
| | | void cutYUV(uint8_t *yuvData[3],int width,int height,int cropX,int cropY,uint8_t *dstData[],int dstWidth,int dstHeight){ |
| | | // 分配目标YUV内存 |
| | | int dstYSize = dstWidth * dstHeight; |
| | | int dstUSize = dstYSize / 4; |
| | | |
| | | dstData[0] = new uint8_t[dstYSize]; |
| | |
| | | dstData[2] = new uint8_t[dstUSize]; |
| | | |
| | | // 裁剪Y平面 |
| | | for (int y = 0; y < miniHeight; ++y) { |
| | | memcpy(dstData[0] + y * miniWidth,yuvData[0]+ (cropY + y) * width + cropX, |
| | | miniWidth); |
| | | for (int y = 0; y < dstHeight; ++y) { |
| | | memcpy(dstData[0] + y * dstWidth,yuvData[0]+ (cropY + y) * width + cropX, |
| | | dstWidth); |
| | | } |
| | | |
| | | int srcChromaWidth = width / 2; |
| | | int cropChromaX = cropX / 2; |
| | | int cropChromaY = cropY / 2; |
| | | int cropChromaWidth = dstWidth / 2; |
| | | int cropChromaHeight = dstHeight / 2; |
| | | |
| | | // 裁剪U平面(UV分辨率是Y的1/2) |
| | | for (int y = 0; y < miniHeight / 2; ++y) { |
| | | memcpy(dstData[1] + y * (miniWidth / 2), |
| | | yuvData[1] + (cropY / 2 + y) * (width / 2) + cropX / 2, |
| | | miniWidth / 2); |
| | | for (int y = 0; y < cropChromaHeight; ++y) { |
| | | memcpy(dstData[1] + y * (cropChromaWidth), |
| | | yuvData[1] + (cropChromaY + y) * (srcChromaWidth) + cropChromaX, |
| | | cropChromaWidth); |
| | | } |
| | | |
| | | // 裁剪V平面 |
| | | for (int y = 0; y < miniHeight / 2; ++y) { |
| | | memcpy(dstData[2] + y * (miniWidth / 2), |
| | | yuvData[2] + (cropY / 2 + y) * (width / 2) + cropX / 2, |
| | | miniWidth / 2); |
| | | for (int y = 0; y < cropChromaHeight; ++y) { |
| | | memcpy(dstData[2] + y * (cropChromaWidth), |
| | | yuvData[2] + (cropChromaY + y) * (srcChromaWidth) + cropChromaX, |
| | | cropChromaWidth); |
| | | } |
| | | } |
| | | |
| | |
| | | void waterYUV(int index,uint8_t *mainData[3]) { |
| | | //LOGI("waterYUV 加水印(画中画)"); |
| | | for (int i = 0; i < index; ++i) { |
| | | MiniViewData *pData = pushMiniDatas[i]; |
| | | if(pData->streamCode == mainStreamCode){ |
| | | MiniViewData *miniView = pushMiniDatas[i]; |
| | | if(miniView->streamCode == mainStreamCode){ |
| | | continue; |
| | | } |
| | | if(!pData->videoOn){ |
| | | if(!miniView->videoOn){ |
| | | continue; |
| | | } |
| | | if(pData->streamType == 6){ |
| | | jvmMainCall->drawText(pData->streamCode,mainData); |
| | | if(miniView->streamType == 6){ |
| | | int64_t t = getCurrentTimestamp(); |
| | | //*****裁切画面 |
| | | int scaleWidth = (FRAME_WIDTH * miniView->viewRate); |
| | | int scaleHeight = (FRAME_WIDTH / (miniView->width * 1.0 / miniView->height) * miniView->viewRate); |
| | | //涉及到 YUV 4:2:0 格式的图像时,宽度和高度通常需要是 2 的倍数 |
| | | if(scaleWidth % 2 == 1){ |
| | | scaleWidth+=1; |
| | | } |
| | | if(scaleHeight % 2 == 1){ |
| | | scaleHeight+=1; |
| | | } |
| | | if(scaleWidth > FRAME_WIDTH){ |
| | | scaleWidth = FRAME_WIDTH; |
| | | } |
| | | if(scaleHeight > FRAME_HEIGHT){ |
| | | scaleHeight = FRAME_HEIGHT; |
| | | } |
| | | YUVData *yuvFrame = new YUVData; |
| | | yuvFrame->width = scaleWidth; |
| | | yuvFrame->height = scaleHeight; |
| | | yuvFrame->pYrate = miniView->pYrate; |
| | | yuvFrame->pXrate = miniView->pXrate; |
| | | uint8_t *dstData[3]; |
| | | //位置 |
| | | int offsetY = (FRAME_HEIGHT - scaleHeight) * miniView->pYrate; |
| | | int offsetX = (FRAME_WIDTH - scaleWidth) * miniView->pXrate; |
| | | cutYUV(mainData,FRAME_WIDTH,FRAME_HEIGHT,offsetX,offsetY,dstData,scaleWidth,scaleHeight); |
| | | //addBlackBorder(yuvFrame->yuvData,scaleWidth,scaleHeight,3); |
| | | uint8_t *rgbaData = jvmMainCall->drawText(miniView->streamCode,dstData,scaleWidth,scaleHeight); |
| | | releaseYuvData(dstData); |
| | | rgbaToYUV((uint8_t *)rgbaData,scaleWidth,scaleHeight,yuvFrame->yuvData); |
| | | miniView->scaledYuvFrames.push(yuvFrame); |
| | | av_free(rgbaData); |
| | | LOGI("drawText 耗时:%d",getCurrentTimestamp() - t); |
| | | } |
| | | if(!pData->scaledYuvFrame && pData->scaledYuvFrames.size() == 0 ){ |
| | | if(!miniView->scaledYuvFrame && miniView->scaledYuvFrames.size() == 0 ){ |
| | | //LOGE("小窗丢帧1"); |
| | | continue; |
| | | } |
| | | if(pData->scaledYuvFrames.size() > 0){ |
| | | releaseYuvFrameData(pData->scaledYuvFrame); |
| | | pData->scaledYuvFrames.pop(pData->scaledYuvFrame); |
| | | if(miniView->scaledYuvFrames.size() > 0){ |
| | | releaseYuvFrameData(miniView->scaledYuvFrame); |
| | | miniView->scaledYuvFrames.pop(miniView->scaledYuvFrame); |
| | | }else{ |
| | | LOGE("小窗丢帧2"); |
| | | } |
| | | //清除yuv画面 |
| | | if(pData->yuvFrames.size() > 1){ |
| | | if(!pData->yuvFrame){ |
| | | pData->yuvFrames.pop(pData->yuvFrame); |
| | | if(miniView->yuvFrames.size() > 1){ |
| | | if(!miniView->yuvFrame){ |
| | | miniView->yuvFrames.pop(miniView->yuvFrame); |
| | | } |
| | | while (pData->yuvFrame->timestamp < pData->scaledYuvFrame->timestamp && pData->yuvFrames.size() > 1 ){ |
| | | releaseYuvFrameData(pData->yuvFrame); |
| | | pData->yuvFrames.pop(pData->yuvFrame); |
| | | while (miniView->yuvFrame->timestamp < miniView->scaledYuvFrame->timestamp && miniView->yuvFrames.size() > 1 ){ |
| | | releaseYuvFrameData(miniView->yuvFrame); |
| | | miniView->yuvFrames.pop(miniView->yuvFrame); |
| | | } |
| | | } |
| | | |
| | | YUVData *scaledYuvFrame = pData->scaledYuvFrame; |
| | | YUVData *scaledYuvFrame = miniView->scaledYuvFrame; |
| | | //位置 |
| | | int offsetY = (mainHeight - scaledYuvFrame->height) * (pData->streamType == 6 ? pData->pYrate : scaledYuvFrame->pYrate); |
| | | int offsetX = (mainWidth - scaledYuvFrame->width) * (pData->streamType == 6 ? pData->pXrate : scaledYuvFrame->pXrate); |
| | | int offsetY = (mainHeight - scaledYuvFrame->height) * scaledYuvFrame->pYrate; |
| | | int offsetX = (mainWidth - scaledYuvFrame->width) * scaledYuvFrame->pXrate; |
| | | |
| | | LOGI("waterYUV %dx%d x:%d,y:%d x:%d,y:%d ",scaledYuvFrame->width,scaledYuvFrame->height,pData->pYrate,pData->pXrate,offsetX,offsetY); |
| | | //LOGI("waterYUV %dx%d x:%d,y:%d x:%d,y:%d ",scaledYuvFrame->width,scaledYuvFrame->height,miniView->pYrate,miniView->pXrate,offsetX,offsetY); |
| | | |
| | | // 边界检查,确保不会越界 |
| | | if (offsetX + scaledYuvFrame->width > mainWidth ) { |
| | |
| | | sws_freeContext(sws_ctx); |
| | | return rgba_data; |
| | | } |
| | | |
| | | void argbToYUV(uint8_t *rgba,int width,int height,uint8_t *src_slices[3]){ |
| | | |
| | | int frameSize = width * height; |
| | | int uvSize = (width / 2) * (height / 2); |
| | | // 为 YUV 数据分配内存 |
| | | int yuvSize = frameSize * 3 / 2; // YUV420P 的大小 |
| | | |
| | | src_slices[0] = new uint8_t[frameSize]; |
| | | src_slices[1] = new uint8_t[uvSize]; |
| | | src_slices[2] = new uint8_t[uvSize]; |
| | | |
| | | // 使用 libyuv 将 RGBA 转换为 YUV420P |
| | | libyuv::ARGBToI420( |
| | | reinterpret_cast<uint8_t*>(rgba), width * 4, // RGBA 数据和每行宽度(RGBA 每个像素 4 字节) |
| | | src_slices[0], width, // Y 分量数据 |
| | | src_slices[1], width / 2, // U 分量数据 |
| | | src_slices[2], width / 2, // V 分量数据 |
| | | width, // 宽度 |
| | | height // 高度 |
| | | ); |
| | | //LOGI("pushRGBA width:%d height:%d %d",miniView->width,miniView->height,rgbaData); |
| | | } |
| | | |
| | | void rgbaToYUV(uint8_t *rgba,int width,int height,uint8_t *yuvData[3]){ |
| | | |
| | | int frameSize = width * height; |
| | | int uvSize = (width / 2) * (height / 2); |
| | | |
| | | yuvData[0] = new uint8_t[frameSize]; |
| | | yuvData[1] = new uint8_t[uvSize]; |
| | | yuvData[2] = new uint8_t[uvSize]; |
| | | |
| | | // 使用 libyuv 将 RGBA 转换为 YUV420P |
| | | /*libyuv::RGBAToI420( |
| | | reinterpret_cast<uint8_t*>(rgba), width * 4, // RGBA 数据和每行宽度(RGBA 每个像素 4 字节) |
| | | src_slices[0], width, // Y 分量数据 |
| | | src_slices[1], width / 2, // U 分量数据 |
| | | src_slices[2], width / 2, // V 分量数据 |
| | | width, // 宽度 |
| | | height // 高度 |
| | | );*/ |
| | | |
| | | // 1. 创建 SwsContext |
| | | SwsContext* swsCtx = sws_getContext( |
| | | width, height, |
| | | AV_PIX_FMT_RGBA, // 输入格式,根据实际可改为 BGRA、ARGB 等 |
| | | width, height, |
| | | AV_PIX_FMT_YUV420P, // 输出格式 |
| | | SWS_BILINEAR, |
| | | nullptr, nullptr, nullptr |
| | | ); |
| | | // 2. 输入数据封装 |
| | | uint8_t* inData[1] = { rgba }; |
| | | int inLineSize[1] = { width * 4 }; |
| | | |
| | | // 3. 分配输出缓冲区 |
| | | int yuv_linesize[3] = { width, width / 2, width / 2 }; // YUV420P: U 和 V 的宽度是 Y 的一半 |
| | | |
| | | // 4. 执行格式转换 |
| | | sws_scale(swsCtx, inData, inLineSize, 0, height, yuvData, yuv_linesize); |
| | | |
| | | // 5. 释放资源 |
| | | sws_freeContext(swsCtx); |
| | | // 注意:yuvBuffer 由调用方在不需要时调用 av_free() 释放 |
| | | //LOGI("pushRGBA width:%d height:%d %d",miniView->width,miniView->height,rgbaData); |
| | | } |
| | | |
| | | void copyYUV(uint8_t *yuvData[3],int width,int height, uint8_t *dstData[3]){ |
| | | // 计算每个平面的大小 |
| | | size_t y_size = width * height; // Y 平面大小 |
| | |
| | | addBlackBorder(scaleYuvFrame->yuvData,scaleWidth,scaleHeight,3); |
| | | miniView->scaledYuvFrames.push(scaleYuvFrame); |
| | | //最大缓存画面数量不能超过max(5-10)帧 |
| | | int max = 15;int min = 3; |
| | | int max = 3;int min = 3; |
| | | if(miniView->yuvFrames.size() > (mainStreamCode == miniView->streamCode ? max:min) ){ |
| | | if(mainStreamCode == miniView->streamCode){ |
| | | LOGE("%d 溢帧 pushyuv %d",streamCode,miniView->yuvFrames.size() - (miniView->videoOn ? max:min)); |
| | |
| | | if(miniView->width == 0 || miniView->height == 0){ |
| | | return; |
| | | } |
| | | int frameSize = miniView->width * miniView->height; |
| | | int uvSize = (miniView->width / 2) * (miniView->height / 2); |
| | | // 为 YUV 数据分配内存 |
| | | int yuvSize = frameSize * 3 / 2; // YUV420P 的大小 |
| | | uint8_t *yuvData = new uint8_t[yuvSize]; |
| | | |
| | | // 将 yuvData 分成 3 部分(Y、U、V) |
| | | uint8_t *yPlane = yuvData; |
| | | uint8_t *uPlane = yuvData + frameSize; |
| | | uint8_t *vPlane = uPlane + uvSize; |
| | | |
| | | // 使用 libyuv 将 RGBA 转换为 YUV420P |
| | | libyuv::ARGBToI420( |
| | | reinterpret_cast<uint8_t*>(rgbaData), miniView->width * 4, // RGBA 数据和每行宽度(RGBA 每个像素 4 字节) |
| | | yPlane, miniView->width, // Y 分量数据 |
| | | uPlane, miniView->width / 2, // U 分量数据 |
| | | vPlane, miniView->width / 2, // V 分量数据 |
| | | miniView->width, // 宽度 |
| | | miniView->height // 高度 |
| | | ); |
| | | //LOGI("pushRGBA width:%d height:%d %d",miniView->width,miniView->height,rgbaData); |
| | | // 输入数据的平面指针 |
| | | uint8_t* src_slices[3] = {yuvData,yuvData + frameSize,yuvData + frameSize + frameSize / 4}; |
| | | |
| | | uint8_t* src_slices[3]; |
| | | argbToYUV(rgbaData,miniView->width,miniView->height,src_slices); |
| | | pushYUV(streamCode,src_slices); |
| | | // 释放资源 |
| | | free(yuvData); |
| | | delete[] src_slices[0]; |
| | | delete[] src_slices[1]; |
| | | delete[] src_slices[2]; |
| | | } |
| | | |
| | | void pushNV21(int streamCode,uint8_t *yData,uint8_t *uData,uint8_t *vData,jint y_stride, jint u_stride,jint v_stride, jint uv_stride,jint angle,jint width,jint height){ |
| | |
| | | } |
| | | |
| | | void loadSurface(ANativeWindow *nativeWindow,uint8_t *yuvData[3],int width,int height){ |
| | | uint8_t* rgba_data = yuvToRGBA(yuvData,width,height); |
| | | loadSurface(nativeWindow,rgba_data,width,height); |
| | | delete[] rgba_data; |
| | | } |
| | | void loadSurface(ANativeWindow *nativeWindow,uint8_t *rgba_data,int width,int height){ |
| | | if(nativeWindow){ |
| | | int64_t t = getCurrentTimestamp(); |
| | | |
| | | uint8_t* rgba_data = yuvToRGBA(yuvData,width,height); |
| | | int64_t t1 = getCurrentTimestamp(); |
| | | //缓冲区 |
| | | //LOGI("视频缓冲"); |
| | |
| | | } |
| | | int64_t t4 = getCurrentTimestamp(); |
| | | ANativeWindow_unlockAndPost(nativeWindow); |
| | | delete[] rgba_data; |
| | | //LOGI("loadSurface 耗时:%d sws_scale:%d memcpy:%d ANativeWindow_lock:%d 视频缓冲:%d %dx%d",getCurrentTimestamp() - t,t1-t,t4-t3,t3-t2,t2-t1,width,height); |
| | | } |
| | | } |
| | |
| | | } |
| | | void releaseYuvData(uint8_t *yuvData[3]){ |
| | | if(yuvData[0] != NULL){ |
| | | // 释放内存,避免泄漏 |
| | | delete[] yuvData[0]; |
| | | delete[] yuvData[1]; |
| | | delete[] yuvData[2]; |
| | | yuvData[0] = NULL; |
| | | yuvData[0] = yuvData[1] = yuvData[2] = nullptr; |
| | | } |
| | | } |
| | | |