2 files added
7 files modified
New file |
| | |
| | | // |
| | | // Created by 倪路朋 on 4/13/25. |
| | | // |
| | | |
| | | #include "live_view_call.h" |
| | | #include "server_global.h" |
| | | |
| | | LiveViewCall::LiveViewCall(JNIEnv *env_, jobject instance_) { |
| | | this->env = env_; |
| | | this->env->GetJavaVM(&javaVM); |
| | | this->instance = env->NewGlobalRef(instance_); |
| | | jclass localClazz = env->FindClass("com/runt/live/cpp/LiveMiniView"); |
| | | if (localClazz == nullptr || env->ExceptionCheck()) { |
| | | env->ExceptionDescribe(); |
| | | env->ExceptionClear(); |
| | | return; |
| | | } |
| | | |
| | | // 拿一个 global ref:关键!! |
| | | //jobject,jclass等对象在方法执行完后就会被回收 |
| | | miniClazz = (jclass)env->NewGlobalRef(localClazz); |
| | | jmd_draw_text = env->GetStaticMethodID(miniClazz,"drawText","([BIII)[B"); |
| | | }; |
| | | |
| | | LiveViewCall::~LiveViewCall() { |
| | | javaVM = 0 ; |
| | | env->DeleteGlobalRef(instance); |
| | | env->DeleteGlobalRef(miniClazz); |
| | | instance = 0 ; |
| | | } |
| | | uint8_t *LiveViewCall::drawText(int stream_code, uint8_t *yuvData[3],int width,int height) { |
| | | |
| | | //******转为rgba |
| | | int size = width * height * 4; |
| | | uint8_t *rgba_data = yuvToRGBA(yuvData,width,height); |
| | | //转为java字节数组 |
| | | jint res = javaVM->AttachCurrentThread(&env, nullptr); |
| | | |
| | | jbyteArray java_array = env->NewByteArray(size); |
| | | env->SetByteArrayRegion(java_array, 0, size, (jbyte *)rgba_data); |
| | | delete[] rgba_data; |
| | | jbyteArray rgbaArray = (jbyteArray)env->CallStaticObjectMethod(miniClazz, jmd_draw_text,java_array,stream_code,width,height); |
| | | env->DeleteLocalRef(java_array); |
| | | // 2. 分配 native buffer |
| | | uint8_t* rgba_copy = new uint8_t[size]; |
| | | // 3. 拷贝数据 |
| | | env->GetByteArrayRegion(rgbaArray, 0, size, reinterpret_cast<jbyte*>(rgba_copy)); |
| | | env->DeleteLocalRef(rgbaArray); |
| | | javaVM->DetachCurrentThread(); |
| | | return rgba_copy; |
| | | } |
New file |
| | |
| | | // |
| | | // Created by 倪路朋 on 4/13/25. |
| | | // |
| | | |
| | | #ifndef LIVEPROJECT_LIVE_VIEW_CALL_H |
| | | #define LIVEPROJECT_LIVE_VIEW_CALL_H |
| | | |
| | | |
| | | #include <jni.h> |
| | | |
| | | class LiveViewCall { |
| | | public: |
| | | LiveViewCall(JNIEnv *env_, jobject instance_); |
| | | ~LiveViewCall(); |
| | | |
| | | uint8_t *drawText(int stream_code,uint8_t *yuvData[3],int width,int height); |
| | | private: |
| | | JavaVM *javaVM; |
| | | JNIEnv *env; |
| | | jobject instance; |
| | | jclass miniClazz; |
| | | jmethodID jmd_draw_text; |
| | | }; |
| | | |
| | | #endif //LIVEPROJECT_LIVE_VIEW_CALL_H |
| | |
| | | void *task_pushFrames(void *args){ |
| | | int64_t time = getCurrentTimestamp(); |
| | | while (TRUE){ |
| | | YUVData *mainYuvData; |
| | | int result = pushFrames.pop(mainYuvData); |
| | | if(!result){ |
| | | continue; |
| | |
| | | LOGE("nativeWindow 回收"); |
| | | } |
| | | if(!jvmMainCall){ |
| | | jvmMainCall = new JavaMainCall(env,clazz); |
| | | jvmMainCall = new LiveViewCall(env,clazz); |
| | | } |
| | | |
| | | } |
| | |
| | | yuv[2] = reinterpret_cast<uint8_t *>(data + ySize + uSize); // V 平面 |
| | | pushYUV(stream_code,yuv); |
| | | env->ReleaseByteArrayElements(bytes,data,0); |
| | | } |
| | | extern "C" JNIEXPORT jbyteArray JNICALL |
| | | Java_com_runt_live_cpp_LiveMiniView_native_1get_1cut_1frame(JNIEnv *env, jclass clazz, jint index, jint stream_code) { |
| | | uint8_t *yuvData[3]; |
| | | if(mainYuvData){ |
| | | pthread_mutex_lock(&pushVideoMutex); |
| | | copyYUV(mainYuvData->yuvData,FRAME_WIDTH,FRAME_HEIGHT,yuvData); |
| | | pthread_mutex_unlock(&pushVideoMutex); |
| | | }else{ |
| | | copyYUV(blackYUV,FRAME_WIDTH,FRAME_HEIGHT,yuvData); |
| | | } |
| | | waterYUV(index,yuvData); |
| | | //*****裁切画面 |
| | | MiniViewData *miniView = getMiniView(stream_code); |
| | | int scaleWidth = (FRAME_WIDTH * miniView->viewRate); |
| | | int scaleHeight = (FRAME_WIDTH / (miniView->width * 1.0 / miniView->height) * miniView->viewRate); |
| | | //涉及到 YUV 4:2:0 格式的图像时,宽度和高度通常需要是 2 的倍数 |
| | | if(scaleWidth % 2 == 1){ |
| | | scaleWidth+=1; |
| | | } |
| | | if(scaleHeight % 2 == 1){ |
| | | scaleHeight+=1; |
| | | } |
| | | if(scaleWidth > FRAME_WIDTH){ |
| | | scaleWidth = FRAME_WIDTH; |
| | | } |
| | | if(scaleHeight > FRAME_HEIGHT){ |
| | | scaleHeight = FRAME_HEIGHT; |
| | | } |
| | | if(!miniView->scaledYuvFrame){ |
| | | miniView->scaledYuvFrame = new YUVData; |
| | | } |
| | | miniView->scaledYuvFrame->width = scaleWidth; |
| | | miniView->scaledYuvFrame->height = scaleHeight; |
| | | miniView->scaledYuvFrame->pYrate = miniView->pYrate; |
| | | miniView->scaledYuvFrame->pXrate = miniView->pXrate; |
| | | uint8_t *dstData[3]; |
| | | //位置 |
| | | int offsetY = (FRAME_HEIGHT - scaleHeight) * miniView->pYrate; |
| | | int offsetX = (FRAME_WIDTH - scaleWidth) * miniView->pXrate; |
| | | cutYUV(yuvData,FRAME_WIDTH,FRAME_HEIGHT,offsetX,offsetY,dstData,scaleWidth,scaleHeight); |
| | | LOGE("index:%d x:%d y:%d %dx%d view:%dx%d",index,offsetX,offsetY,scaleWidth,scaleHeight,miniView->width,miniView->height); |
| | | copyYUV(dstData,scaleWidth,scaleHeight,miniView->scaledYuvFrame->yuvData); |
| | | addBlackBorder(miniView->scaledYuvFrame->yuvData,scaleWidth,scaleHeight,3); |
| | | //源数据没用了 |
| | | delete yuvData[0]; |
| | | delete yuvData[1]; |
| | | delete yuvData[2]; |
| | | //******转为rgba |
| | | int size = scaleWidth * scaleHeight * 4; |
| | | uint8_t *rgba_data = yuvToRGBA(dstData,scaleWidth,scaleHeight); |
| | | //yuv数据没用了 |
| | | delete dstData[0]; |
| | | delete dstData[1]; |
| | | delete dstData[2]; |
| | | //转为java字节数组 |
| | | jbyteArray java_array = env->NewByteArray(size); |
| | | env->SetByteArrayRegion(java_array, 0, size, (jbyte *)rgba_data); |
| | | delete[] rgba_data; |
| | | return java_array; |
| | | } |
| | |
| | | std::map<int,LivePusher *> pushers = {}; |
| | | vector<MiniViewData *> pushMiniDatas = {}; |
| | | MiniViewData *mainView = 0; |
| | | YUVData *mainYuvData = 0; |
| | | int mainStreamCode = 0; |
| | | pthread_mutex_t pushVideoMutex,pushNV21Mutex,pushAudioMutex; |
| | | ANativeWindow *nativeMainWindow = 0; |
| | |
| | | SafeQueue<YUVData *> pushFrames; |
| | | AudioChannel *audioChannel = 0; |
| | | VideoChannel *videoChannel = 0; |
| | | LiveViewCall *jvmMainCall = 0; |
| | | |
| | | void packetCallBack(RTMPPacket *packet){ |
| | | //LOGI("packetCallBack safequeue packets:%d",&packets); |
| | |
| | | } |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * 剪切画面 |
| | | * 剪切主画面 |
| | | * @param pData |
| | | * @param data |
| | | */ |
| | |
| | | |
| | | // 分配目标YUV内存 |
| | | int dstYSize = miniWidth * miniHeight; |
| | | cutYUV(yuvData,width,height,cropX,cropY,dstData,miniWidth,miniHeight); |
| | | } |
| | | |
| | | /** |
| | | * 裁切画面 |
| | | * @param yuvData 源数据 |
| | | * @param width 源数据宽度 |
| | | * @param height 源数据高度 |
| | | * @param cropX 裁切位置X |
| | | * @param cropY 裁切位置Y |
| | | * @param dstData 目标数据 |
| | | * @param dstWidth 目标数据宽度 |
| | | * @param dstHeight 目标数据高度 |
| | | */ |
| | | void cutYUV(uint8_t *yuvData[3],int width,int height,int cropX,int cropY,uint8_t *dstData[],int dstWidth,int dstHeight){ |
| | | // 分配目标YUV内存 |
| | | int dstYSize = dstWidth * dstHeight; |
| | | int dstUSize = dstYSize / 4; |
| | | |
| | | dstData[0] = new uint8_t[dstYSize]; |
| | |
| | | dstData[2] = new uint8_t[dstUSize]; |
| | | |
| | | // 裁剪Y平面 |
| | | for (int y = 0; y < miniHeight; ++y) { |
| | | memcpy(dstData[0] + y * miniWidth,yuvData[0]+ (cropY + y) * width + cropX, |
| | | miniWidth); |
| | | for (int y = 0; y < dstHeight; ++y) { |
| | | memcpy(dstData[0] + y * dstWidth,yuvData[0]+ (cropY + y) * width + cropX, |
| | | dstWidth); |
| | | } |
| | | |
| | | int srcChromaWidth = width / 2; |
| | | int cropChromaX = cropX / 2; |
| | | int cropChromaY = cropY / 2; |
| | | int cropChromaWidth = dstWidth / 2; |
| | | int cropChromaHeight = dstHeight / 2; |
| | | |
| | | // 裁剪U平面(UV分辨率是Y的1/2) |
| | | for (int y = 0; y < miniHeight / 2; ++y) { |
| | | memcpy(dstData[1] + y * (miniWidth / 2), |
| | | yuvData[1] + (cropY / 2 + y) * (width / 2) + cropX / 2, |
| | | miniWidth / 2); |
| | | for (int y = 0; y < cropChromaHeight; ++y) { |
| | | memcpy(dstData[1] + y * (cropChromaWidth), |
| | | yuvData[1] + (cropChromaY + y) * (srcChromaWidth) + cropChromaX, |
| | | cropChromaWidth); |
| | | } |
| | | |
| | | // 裁剪V平面 |
| | | for (int y = 0; y < miniHeight / 2; ++y) { |
| | | memcpy(dstData[2] + y * (miniWidth / 2), |
| | | yuvData[2] + (cropY / 2 + y) * (width / 2) + cropX / 2, |
| | | miniWidth / 2); |
| | | for (int y = 0; y < cropChromaHeight; ++y) { |
| | | memcpy(dstData[2] + y * (cropChromaWidth), |
| | | yuvData[2] + (cropChromaY + y) * (srcChromaWidth) + cropChromaX, |
| | | cropChromaWidth); |
| | | } |
| | | } |
| | | |
| | |
| | | void waterYUV(int index,uint8_t *mainData[3]) { |
| | | //LOGI("waterYUV 加水印(画中画)"); |
| | | for (int i = 0; i < index; ++i) { |
| | | MiniViewData *pData = pushMiniDatas[i]; |
| | | if(pData->streamCode == mainStreamCode){ |
| | | MiniViewData *miniView = pushMiniDatas[i]; |
| | | if(miniView->streamCode == mainStreamCode){ |
| | | continue; |
| | | } |
| | | if(!pData->videoOn){ |
| | | if(!miniView->videoOn){ |
| | | continue; |
| | | } |
| | | if(pData->streamType == 6){ |
| | | jvmMainCall->drawText(pData->streamCode,mainData); |
| | | if(miniView->streamType == 6){ |
| | | int64_t t = getCurrentTimestamp(); |
| | | //*****裁切画面 |
| | | int scaleWidth = (FRAME_WIDTH * miniView->viewRate); |
| | | int scaleHeight = (FRAME_WIDTH / (miniView->width * 1.0 / miniView->height) * miniView->viewRate); |
| | | //涉及到 YUV 4:2:0 格式的图像时,宽度和高度通常需要是 2 的倍数 |
| | | if(scaleWidth % 2 == 1){ |
| | | scaleWidth+=1; |
| | | } |
| | | if(scaleHeight % 2 == 1){ |
| | | scaleHeight+=1; |
| | | } |
| | | if(scaleWidth > FRAME_WIDTH){ |
| | | scaleWidth = FRAME_WIDTH; |
| | | } |
| | | if(scaleHeight > FRAME_HEIGHT){ |
| | | scaleHeight = FRAME_HEIGHT; |
| | | } |
| | | YUVData *yuvFrame = new YUVData; |
| | | yuvFrame->width = scaleWidth; |
| | | yuvFrame->height = scaleHeight; |
| | | yuvFrame->pYrate = miniView->pYrate; |
| | | yuvFrame->pXrate = miniView->pXrate; |
| | | uint8_t *dstData[3]; |
| | | //位置 |
| | | int offsetY = (FRAME_HEIGHT - scaleHeight) * miniView->pYrate; |
| | | int offsetX = (FRAME_WIDTH - scaleWidth) * miniView->pXrate; |
| | | cutYUV(mainData,FRAME_WIDTH,FRAME_HEIGHT,offsetX,offsetY,dstData,scaleWidth,scaleHeight); |
| | | //addBlackBorder(yuvFrame->yuvData,scaleWidth,scaleHeight,3); |
| | | uint8_t *rgbaData = jvmMainCall->drawText(miniView->streamCode,dstData,scaleWidth,scaleHeight); |
| | | releaseYuvData(dstData); |
| | | rgbaToYUV((uint8_t *)rgbaData,scaleWidth,scaleHeight,yuvFrame->yuvData); |
| | | miniView->scaledYuvFrames.push(yuvFrame); |
| | | av_free(rgbaData); |
| | | LOGI("drawText 耗时:%d",getCurrentTimestamp() - t); |
| | | } |
| | | if(!pData->scaledYuvFrame && pData->scaledYuvFrames.size() == 0 ){ |
| | | if(!miniView->scaledYuvFrame && miniView->scaledYuvFrames.size() == 0 ){ |
| | | //LOGE("小窗丢帧1"); |
| | | continue; |
| | | } |
| | | if(pData->scaledYuvFrames.size() > 0){ |
| | | releaseYuvFrameData(pData->scaledYuvFrame); |
| | | pData->scaledYuvFrames.pop(pData->scaledYuvFrame); |
| | | if(miniView->scaledYuvFrames.size() > 0){ |
| | | releaseYuvFrameData(miniView->scaledYuvFrame); |
| | | miniView->scaledYuvFrames.pop(miniView->scaledYuvFrame); |
| | | }else{ |
| | | LOGE("小窗丢帧2"); |
| | | } |
| | | //清除yuv画面 |
| | | if(pData->yuvFrames.size() > 1){ |
| | | if(!pData->yuvFrame){ |
| | | pData->yuvFrames.pop(pData->yuvFrame); |
| | | if(miniView->yuvFrames.size() > 1){ |
| | | if(!miniView->yuvFrame){ |
| | | miniView->yuvFrames.pop(miniView->yuvFrame); |
| | | } |
| | | while (pData->yuvFrame->timestamp < pData->scaledYuvFrame->timestamp && pData->yuvFrames.size() > 1 ){ |
| | | releaseYuvFrameData(pData->yuvFrame); |
| | | pData->yuvFrames.pop(pData->yuvFrame); |
| | | while (miniView->yuvFrame->timestamp < miniView->scaledYuvFrame->timestamp && miniView->yuvFrames.size() > 1 ){ |
| | | releaseYuvFrameData(miniView->yuvFrame); |
| | | miniView->yuvFrames.pop(miniView->yuvFrame); |
| | | } |
| | | } |
| | | |
| | | YUVData *scaledYuvFrame = pData->scaledYuvFrame; |
| | | YUVData *scaledYuvFrame = miniView->scaledYuvFrame; |
| | | //位置 |
| | | int offsetY = (mainHeight - scaledYuvFrame->height) * (pData->streamType == 6 ? pData->pYrate : scaledYuvFrame->pYrate); |
| | | int offsetX = (mainWidth - scaledYuvFrame->width) * (pData->streamType == 6 ? pData->pXrate : scaledYuvFrame->pXrate); |
| | | int offsetY = (mainHeight - scaledYuvFrame->height) * scaledYuvFrame->pYrate; |
| | | int offsetX = (mainWidth - scaledYuvFrame->width) * scaledYuvFrame->pXrate; |
| | | |
| | | LOGI("waterYUV %dx%d x:%d,y:%d x:%d,y:%d ",scaledYuvFrame->width,scaledYuvFrame->height,pData->pYrate,pData->pXrate,offsetX,offsetY); |
| | | //LOGI("waterYUV %dx%d x:%d,y:%d x:%d,y:%d ",scaledYuvFrame->width,scaledYuvFrame->height,miniView->pYrate,miniView->pXrate,offsetX,offsetY); |
| | | |
| | | // 边界检查,确保不会越界 |
| | | if (offsetX + scaledYuvFrame->width > mainWidth ) { |
| | |
| | | sws_freeContext(sws_ctx); |
| | | return rgba_data; |
| | | } |
| | | |
| | | void argbToYUV(uint8_t *rgba,int width,int height,uint8_t *src_slices[3]){ |
| | | |
| | | int frameSize = width * height; |
| | | int uvSize = (width / 2) * (height / 2); |
| | | // 为 YUV 数据分配内存 |
| | | int yuvSize = frameSize * 3 / 2; // YUV420P 的大小 |
| | | |
| | | src_slices[0] = new uint8_t[frameSize]; |
| | | src_slices[1] = new uint8_t[uvSize]; |
| | | src_slices[2] = new uint8_t[uvSize]; |
| | | |
| | | // 使用 libyuv 将 RGBA 转换为 YUV420P |
| | | libyuv::ARGBToI420( |
| | | reinterpret_cast<uint8_t*>(rgba), width * 4, // RGBA 数据和每行宽度(RGBA 每个像素 4 字节) |
| | | src_slices[0], width, // Y 分量数据 |
| | | src_slices[1], width / 2, // U 分量数据 |
| | | src_slices[2], width / 2, // V 分量数据 |
| | | width, // 宽度 |
| | | height // 高度 |
| | | ); |
| | | //LOGI("pushRGBA width:%d height:%d %d",miniView->width,miniView->height,rgbaData); |
| | | } |
| | | |
| | | void rgbaToYUV(uint8_t *rgba,int width,int height,uint8_t *yuvData[3]){ |
| | | |
| | | int frameSize = width * height; |
| | | int uvSize = (width / 2) * (height / 2); |
| | | |
| | | yuvData[0] = new uint8_t[frameSize]; |
| | | yuvData[1] = new uint8_t[uvSize]; |
| | | yuvData[2] = new uint8_t[uvSize]; |
| | | |
| | | // 使用 libyuv 将 RGBA 转换为 YUV420P |
| | | /*libyuv::RGBAToI420( |
| | | reinterpret_cast<uint8_t*>(rgba), width * 4, // RGBA 数据和每行宽度(RGBA 每个像素 4 字节) |
| | | src_slices[0], width, // Y 分量数据 |
| | | src_slices[1], width / 2, // U 分量数据 |
| | | src_slices[2], width / 2, // V 分量数据 |
| | | width, // 宽度 |
| | | height // 高度 |
| | | );*/ |
| | | |
| | | // 1. 创建 SwsContext |
| | | SwsContext* swsCtx = sws_getContext( |
| | | width, height, |
| | | AV_PIX_FMT_RGBA, // 输入格式,根据实际可改为 BGRA、ARGB 等 |
| | | width, height, |
| | | AV_PIX_FMT_YUV420P, // 输出格式 |
| | | SWS_BILINEAR, |
| | | nullptr, nullptr, nullptr |
| | | ); |
| | | // 2. 输入数据封装 |
| | | uint8_t* inData[1] = { rgba }; |
| | | int inLineSize[1] = { width * 4 }; |
| | | |
| | | // 3. 分配输出缓冲区 |
| | | int yuv_linesize[3] = { width, width / 2, width / 2 }; // YUV420P: U 和 V 的宽度是 Y 的一半 |
| | | |
| | | // 4. 执行格式转换 |
| | | sws_scale(swsCtx, inData, inLineSize, 0, height, yuvData, yuv_linesize); |
| | | |
| | | // 5. 释放资源 |
| | | sws_freeContext(swsCtx); |
| | | // 注意:yuvBuffer 由调用方在不需要时调用 av_free() 释放 |
| | | //LOGI("pushRGBA width:%d height:%d %d",miniView->width,miniView->height,rgbaData); |
| | | } |
| | | |
| | | void copyYUV(uint8_t *yuvData[3],int width,int height, uint8_t *dstData[3]){ |
| | | // 计算每个平面的大小 |
| | | size_t y_size = width * height; // Y 平面大小 |
| | |
| | | addBlackBorder(scaleYuvFrame->yuvData,scaleWidth,scaleHeight,3); |
| | | miniView->scaledYuvFrames.push(scaleYuvFrame); |
| | | //最大缓存画面数量不能超过max(5-10)帧 |
| | | int max = 15;int min = 3; |
| | | int max = 3;int min = 3; |
| | | if(miniView->yuvFrames.size() > (mainStreamCode == miniView->streamCode ? max:min) ){ |
| | | if(mainStreamCode == miniView->streamCode){ |
| | | LOGE("%d 溢帧 pushyuv %d",streamCode,miniView->yuvFrames.size() - (miniView->videoOn ? max:min)); |
| | |
| | | if(miniView->width == 0 || miniView->height == 0){ |
| | | return; |
| | | } |
| | | int frameSize = miniView->width * miniView->height; |
| | | int uvSize = (miniView->width / 2) * (miniView->height / 2); |
| | | // 为 YUV 数据分配内存 |
| | | int yuvSize = frameSize * 3 / 2; // YUV420P 的大小 |
| | | uint8_t *yuvData = new uint8_t[yuvSize]; |
| | | |
| | | // 将 yuvData 分成 3 部分(Y、U、V) |
| | | uint8_t *yPlane = yuvData; |
| | | uint8_t *uPlane = yuvData + frameSize; |
| | | uint8_t *vPlane = uPlane + uvSize; |
| | | |
| | | // 使用 libyuv 将 RGBA 转换为 YUV420P |
| | | libyuv::ARGBToI420( |
| | | reinterpret_cast<uint8_t*>(rgbaData), miniView->width * 4, // RGBA 数据和每行宽度(RGBA 每个像素 4 字节) |
| | | yPlane, miniView->width, // Y 分量数据 |
| | | uPlane, miniView->width / 2, // U 分量数据 |
| | | vPlane, miniView->width / 2, // V 分量数据 |
| | | miniView->width, // 宽度 |
| | | miniView->height // 高度 |
| | | ); |
| | | //LOGI("pushRGBA width:%d height:%d %d",miniView->width,miniView->height,rgbaData); |
| | | // 输入数据的平面指针 |
| | | uint8_t* src_slices[3] = {yuvData,yuvData + frameSize,yuvData + frameSize + frameSize / 4}; |
| | | |
| | | uint8_t* src_slices[3]; |
| | | argbToYUV(rgbaData,miniView->width,miniView->height,src_slices); |
| | | pushYUV(streamCode,src_slices); |
| | | // 释放资源 |
| | | free(yuvData); |
| | | delete[] src_slices[0]; |
| | | delete[] src_slices[1]; |
| | | delete[] src_slices[2]; |
| | | } |
| | | |
| | | void pushNV21(int streamCode,uint8_t *yData,uint8_t *uData,uint8_t *vData,jint y_stride, jint u_stride,jint v_stride, jint uv_stride,jint angle,jint width,jint height){ |
| | |
| | | } |
| | | |
| | | void loadSurface(ANativeWindow *nativeWindow,uint8_t *yuvData[3],int width,int height){ |
| | | uint8_t* rgba_data = yuvToRGBA(yuvData,width,height); |
| | | loadSurface(nativeWindow,rgba_data,width,height); |
| | | delete[] rgba_data; |
| | | } |
| | | void loadSurface(ANativeWindow *nativeWindow,uint8_t *rgba_data,int width,int height){ |
| | | if(nativeWindow){ |
| | | int64_t t = getCurrentTimestamp(); |
| | | |
| | | uint8_t* rgba_data = yuvToRGBA(yuvData,width,height); |
| | | int64_t t1 = getCurrentTimestamp(); |
| | | //缓冲区 |
| | | //LOGI("视频缓冲"); |
| | |
| | | } |
| | | int64_t t4 = getCurrentTimestamp(); |
| | | ANativeWindow_unlockAndPost(nativeWindow); |
| | | delete[] rgba_data; |
| | | //LOGI("loadSurface 耗时:%d sws_scale:%d memcpy:%d ANativeWindow_lock:%d 视频缓冲:%d %dx%d",getCurrentTimestamp() - t,t1-t,t4-t3,t3-t2,t2-t1,width,height); |
| | | } |
| | | } |
| | |
| | | } |
| | | void releaseYuvData(uint8_t *yuvData[3]){ |
| | | if(yuvData[0] != NULL){ |
| | | // 释放内存,避免泄漏 |
| | | delete[] yuvData[0]; |
| | | delete[] yuvData[1]; |
| | | delete[] yuvData[2]; |
| | | yuvData[0] = NULL; |
| | | yuvData[0] = yuvData[1] = yuvData[2] = nullptr; |
| | | } |
| | | } |
| | | |
| | |
| | | #include "yuv_data.h" |
| | | #include "video_channel.h" |
| | | #include "audio_channel.h" |
| | | #include "live_view_call.h" |
| | | |
| | | extern "C"{ |
| | | #include <libyuv.h> |
| | |
| | | const int FRAME_WIDTH = 1080,FRAME_HEIGHT = 1920,FRAME_PS = 30,FRAME_RATE = 4000; |
| | | extern AudioChannel *audioChannel; |
| | | extern VideoChannel *videoChannel; |
| | | extern LiveViewCall *jvmMainCall; |
| | | extern uint8_t *blackYUV[3]; |
| | | extern uint32_t start_time; |
| | | |
| | |
| | | void waterYUV(int index,uint8_t *mainData[3]); |
| | | void getPushYUV(int index,uint8_t *mainData[3]); |
| | | uint8_t* yuvToRGBA(uint8_t *yuvData[3],int width,int height); |
| | | void argbToYUV(uint8_t *rgba,int width,int height,uint8_t *yuvData[3]); |
| | | void rgbaToYUV(uint8_t *rgba,int width,int height,uint8_t *yuvData[3]); |
| | | void pushRGBA(int streamCode,uint8_t *rgbaData); |
| | | void pushNV21(int streamCode, jbyte *data); |
| | | void pushNV21(int streamCode,uint8_t *yData,uint8_t *uData,uint8_t *vData,jint y_stride, jint u_stride,jint v_stride, jint uv_stride,jint angle,jint width,jint height); |
| | | void loadSurface(ANativeWindow *nativeWindow,uint8_t *yuvData[3],int width,int height); |
| | | void loadSurface(ANativeWindow *nativeWindow,uint8_t *rgba,int width,int height); |
| | | void addBlackBorder(uint8_t *yuvData[3],int width,int height,int borderWidth); |
| | | void addCornerAndBlackBorder(uint8_t *yuvData[3],int width,int height,int borderWidth,int cornerRadius); |
| | | void playPCM(uint8_t *out_buffer,int out_buffer_size); |
| | |
| | | package com.runt.live.cpp; |
| | | |
| | | import android.media.AudioFormat; |
| | | import android.graphics.Bitmap; |
| | | import android.graphics.Canvas; |
| | | import android.graphics.Rect; |
| | | import android.util.Log; |
| | | import android.view.Surface; |
| | | |
| | | import com.runt.live.R; |
| | | import com.runt.live.data.StreamWindow; |
| | | import com.runt.live.ui.stream.LiveLayoutView; |
| | | import com.runt.live.util.AudioUtil; |
| | | import com.runt.live.util.BitmapUtils; |
| | | import com.runt.open.mvi.OpenApplication; |
| | | |
| | | import java.nio.ByteBuffer; |
| | | import java.util.ArrayList; |
| | |
| | | static final String TAG = "LiveMiniView"; |
| | | public static AudioUtil audioUtil = new AudioUtil(); |
| | | static HashMap<String,PcmData> pcmdatas = new HashMap<>(); |
| | | static HashMap<String,Bitmap> textBitmaps = new HashMap<>(); |
| | | private static final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); |
| | | public static int mainStreamCode = 0 ; |
| | | public static final int FRAME_WIDTH = 1080,FRAME_HEIGHT = 1920,FRAME_PS = 30; |
| | |
| | | public static native void native_push_pcm(byte[] bytes); |
| | | public static native void native_set_main_surface(Surface surface); |
| | | public static native void native_release_main_surface(); |
| | | public static native byte[] native_convert_nv21_to_rgba(byte[] bytes,int width,int height); |
| | | |
| | | public static byte[] drawText(byte[] rgba,int streamCode,int width,int height){ |
| | | |
| | | int length = rgba.length; |
| | | |
| | | byte[] argb = new byte[length]; |
| | | StreamWindow streamWindow = LiveLayoutView.Companion.getLiveStreamsState().getValue().getStream(streamCode); |
| | | String key = streamWindow.getId()+""; |
| | | if(!textBitmaps.containsKey(key)){ |
| | | Bitmap bitmap = BitmapUtils.Companion.getInstance().textToBitmap(streamWindow.getRemark(),130f , OpenApplication.Companion.getApplication().getResources().getColor(R.color.white), |
| | | OpenApplication.Companion.getApplication().getResources().getColor(R.color.transparent)); |
| | | textBitmaps.put(key,bitmap); |
| | | } |
| | | Bitmap bitmap = textBitmaps.get(key); |
| | | Bitmap mainBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); |
| | | //Log.e(TAG , "updateText: ${width}x${cutHeight} ${streamWindow.sizeState.value} ${( streamWindow.sizeState.value.x * 1.0 / streamWindow.sizeState.value.y )}", ) |
| | | ByteBuffer buffer = ByteBuffer.wrap(rgba); |
| | | mainBitmap.copyPixelsFromBuffer(buffer); |
| | | Bitmap scaledBitmap = Bitmap.createScaledBitmap(bitmap,width,height, true); |
| | | |
| | | Canvas canvas = new Canvas(mainBitmap); |
| | | |
| | | // 构建目标区域 |
| | | Rect destRect = new Rect(0,0,width,height); |
| | | // 绘制小图到大图 |
| | | canvas.drawBitmap(scaledBitmap, null, destRect, null); |
| | | |
| | | ByteBuffer buffer2 = ByteBuffer.allocate(mainBitmap.getByteCount()); |
| | | mainBitmap.copyPixelsToBuffer(buffer2); |
| | | argb = buffer2.array(); |
| | | return argb; |
| | | } |
| | | |
| | | /* external fun native_update_mini_sn(code : Int ,streamCode : Int, sn : Int) |
| | | external fun native_update_mini_live(code : Int ,streamCode : Int, isLive:Boolean) |
| | |
| | | } |
| | | return null; |
| | | } |
| | | fun getStream(streamId:Int):StreamWindow?{ |
| | | for ( i in subStreamWindows.value.indices){ |
| | | if(streamId == subStreamWindows.value[i].id){ |
| | | return subStreamWindows.value[i] |
| | | } |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | } |
| | |
| | | package com.runt.live.ui.stream |
| | | |
| | | import android.graphics.Bitmap |
| | | import android.graphics.Canvas |
| | | import android.graphics.Point |
| | | import android.graphics.Rect |
| | | import android.os.Handler |
| | | import android.util.Log |
| | | import android.view.SurfaceHolder |
| | | import com.runt.live.R |
| | | import com.runt.live.cpp.LiveMiniView |
| | | import com.runt.live.cpp.LiveMiniView.FRAME_HEIGHT |
| | | import com.runt.live.cpp.LiveMiniView.FRAME_WIDTH |
| | | import com.runt.live.data.StreamWindow |
| | | import com.runt.live.enum.LiveState |
| | | import com.runt.live.enum.StreamType |
| | | import com.runt.live.native.LivePuller |
| | | import com.runt.live.native.LivePusher |
| | | import com.runt.live.native.MediaPlayer |
| | | import com.runt.live.ui.stream.LiveLayoutView.Companion.subStreamsState |
| | | import com.runt.live.util.BitmapUtils |
| | | import com.runt.open.mvi.base.model.BaseViewModel |
| | | import java.nio.ByteBuffer |
| | | import java.util.concurrent.ConcurrentSkipListMap |
| | | import kotlin.concurrent.thread |
| | | |
| | |
| | | streamWindow.audioState.value == LiveState.IN_LIVE,streamWindow.videoState.value == LiveState.IN_LIVE, |
| | | streamWindow.videoDelay,streamWindow.streamType.value, |
| | | streamWindow.mainPositionRateState.value, streamWindow.viewRateState.value) |
| | | |
| | | } |
| | | |
| | | fun removeMiniView(streamCode :Int){ |
| | | LiveMiniView.native_remove_mini_view(streamCode) |
| | | } |
| | | |
| | | fun updateText(streamWindow : StreamWindow){ |
| | | thread { |
| | | var bytes = LiveMiniView.native_get_cut_frame(subStreamsState.value.indexOf(streamWindow),streamWindow.id) |
| | | var bitmap = BitmapUtils.instance!!.textToBitmap(streamWindow.remark!!,130f,getActivity().resources.getColor(R.color.white)!!,getActivity().resources.getColor(R.color.transparent)!!) |
| | | |
| | | var cutwidth : Int = (FRAME_WIDTH * streamWindow.viewRateState.value).toInt() |
| | | var cutHeight : Int = (FRAME_WIDTH / ( streamWindow.sizeState.value.x * 1.0 / streamWindow.sizeState.value.y ) * streamWindow.viewRateState.value).toInt() |
| | | |
| | | //涉及到 YUV 4:2:0 格式的图像时,宽度和高度通常需要是 2 的倍数 |
| | | if (cutwidth % 2 == 1) { |
| | | cutwidth += 1 |
| | | } |
| | | if (cutHeight % 2 == 1) { |
| | | cutHeight += 1 |
| | | } |
| | | if (cutwidth > FRAME_WIDTH) { |
| | | cutwidth = FRAME_WIDTH |
| | | } |
| | | if (cutHeight > FRAME_HEIGHT) { |
| | | cutHeight = FRAME_HEIGHT |
| | | } |
| | | val cutBitmap = Bitmap.createBitmap(cutwidth, cutHeight, Bitmap.Config.ARGB_8888) |
| | | Log.e(TAG , "updateText: ${cutwidth}x${cutHeight} ${streamWindow.sizeState.value} ${( streamWindow.sizeState.value.x * 1.0 / streamWindow.sizeState.value.y )}", ) |
| | | val buffer = ByteBuffer.wrap(bytes) |
| | | cutBitmap.copyPixelsFromBuffer(buffer) |
| | | |
| | | val dstBitmap = Bitmap.createBitmap(bitmap.width, bitmap.height, Bitmap.Config.ARGB_8888) |
| | | |
| | | val canvas = Canvas(dstBitmap) |
| | | |
| | | // 构建目标区域 |
| | | val destRect = Rect(0,0,dstBitmap.width,dstBitmap.height) |
| | | |
| | | // 绘制小图到大图 |
| | | canvas.drawBitmap(cutBitmap, null, destRect, null) |
| | | //canvas.drawBitmap(bitmap, null, destRect, null) |
| | | |
| | | val width : Int = bitmap.getWidth() |
| | | val height : Int = bitmap.getHeight() |
| | | val argb = IntArray(width * height) |
| | | dstBitmap.getPixels(argb , 0 , width , 0 , 0 , width , height) |
| | | //LiveMiniView.native_push_nv21(streamWindow.id,BitmapUtils.instance!!.argbToNV21(argb,width,height)); |
| | | } |
| | | |
| | | } |
| | | fun openText(streamWindow : StreamWindow){ |
| | | |
| | | var bitmap = BitmapUtils.instance!!.textToBitmap(streamWindow.remark!!,130f,getActivity().resources.getColor(R.color.white)!!,getActivity().resources.getColor(R.color.transparent)!!) |
| | |
| | | streamWindow.listener?.onSurfaceUpdate?.let { it(holder.surface) } |
| | | thread { |
| | | BitmapUtils.instance!!.cavansSurface(holder,bitmap); |
| | | val width : Int = bitmap.getWidth() |
| | | val height : Int = bitmap.getHeight() |
| | | val argb = IntArray(width * height) |
| | | bitmap.getPixels(argb , 0 , width , 0 , 0 , width , height) |
| | | LiveMiniView.native_push_nv21(streamWindow.id,BitmapUtils.instance!!.argbToNV21(argb,width,height)); |
| | | } |
| | | //Log.w(TAG , "surfaceChanged: ${holder.hashCode()}" , ) |
| | | } |
| | |
| | | //Log.w(TAG , "surfaceDestroyed: ${holder.hashCode()} ${id}" , ) |
| | | } |
| | | } |
| | | streamWindow.listener?.onStarted?.invoke() |
| | | Handler().postDelayed({streamWindow.listener?.onStarted?.invoke()},500) |
| | | } |
| | | |
| | | /** |
| | |
| | | val bmpHeight = ceil((lineHeight * lines.size).toDouble()).toInt() |
| | | |
| | | val bitmap = Bitmap.createBitmap(bmpWidth , bmpHeight , Bitmap.Config.ARGB_8888) |
| | | bitmap.eraseColor(Color.TRANSPARENT); // 设置为完全透明 |
| | | val canvas = Canvas(bitmap) |
| | | canvas.drawColor(bgColor) // 背景色 |
| | | |
| | | |
| | | // 逐行绘制文本 |