#include #include #include #include #include #include #include "server_global.h" #include "yuv_data.h" extern "C"{ #include "libavutil/avutil.h" #include "librtmp/rtmp.h" #include } struct BacktraceState { void** current; void** end; }; static _Unwind_Reason_Code unwindCallback(struct _Unwind_Context* context, void* arg) { BacktraceState* state = (BacktraceState*)arg; uintptr_t pc = _Unwind_GetIP(context); if (pc) { if (state->current == state->end) { return _URC_END_OF_STACK; } else { *state->current++ = (void*)pc; } } return _URC_NO_REASON; } size_t captureBacktrace(void** buffer, size_t max) { BacktraceState state = {buffer, buffer + max}; _Unwind_Backtrace(unwindCallback, &state); return state.current - buffer; } void printBacktrace() { const size_t max = 30; void* buffer[max]; size_t size = captureBacktrace(buffer, max); for (size_t i = 0; i < size; i++) { const void* addr = buffer[i]; const char* symbol = ""; Dl_info info; if (dladdr(addr, &info) && info.dli_sname) { symbol = info.dli_sname; } __android_log_print(ANDROID_LOG_ERROR, "NativeCrash", " #%zu pc %p %s", i, addr, symbol); } } void signalHandler(int sig) { __android_log_print(ANDROID_LOG_ERROR, "NativeCrash", "Caught signal: %d", sig); printBacktrace(); // 打印崩溃时的调用栈 signal(sig, SIG_DFL); raise(sig); } void setupSignalHandlers() { signal(SIGSEGV, signalHandler); // 捕获段错误 signal(SIGABRT, signalHandler); // 捕获 abort 信号 signal(SIGFPE, signalHandler); // 捕获浮点运算异常 signal(SIGILL, signalHandler); // 捕获非法指令 } void *task_load_main_surface(void *args){ while (TRUE){ YUVData *data; int result = mainSurfaceFrames.pop(data); if(!result){ continue; } pthread_mutex_lock(&loadSurfaceMutex); loadSurface(nativeMainWindow,data->yuvData,data->width,data->height); pthread_mutex_unlock(&loadSurfaceMutex); releaseYuvData(data->yuvData); if(mainSurfaceFrames.size() > 1){ int result = mainSurfaceFrames.pop(data); if(result){ releaseYuvData(data->yuvData); } } delete data; } return 0; } void *task_count_down(void *args){ const double frameInterval = 1000.0 / FRAME_PS; // 每帧 16.67 毫秒 (60 FPS) while (TRUE){ auto startTime = std::chrono::high_resolution_clock::now(); YUVData *yuv = new YUVData; yuv->timestamp = RTMP_GetTime() - start_time; pushFrames.push(yuv); // 计算时间间隔和实际执行时间 auto endTime = std::chrono::high_resolution_clock::now(); std::chrono::duration elapsed = endTime - startTime; auto waitTime = std::chrono::milliseconds(static_cast(frameInterval)) - elapsed; if (waitTime.count() > 0) { // 等待剩余的时间,确保每秒 60 帧 std::this_thread::sleep_for(waitTime); } } return 0; } void *task_pushFrames(void *args){ int64_t time = getCurrentTimestamp(); while (TRUE){ int result = pushFrames.pop(mainYuvData); if(!result){ continue; } //LOGI("开始编辑画面"); float rate = 0; int width = 0; int height = 0; pthread_mutex_lock(&pushVideoMutex); bool flag = mainView && (mainView->yuvFrames.size() > 0 || mainView->yuvFrame); //当前有主画面 if(flag){ if(mainView->yuvFrames.size() > 0){ releaseYuvFrameData(mainView->yuvFrame); mainView->yuvFrames.pop(mainView->yuvFrame); //清除缩放画面 if(mainView->scaledYuvFrames.size() > 5){ if(!mainView->scaledYuvFrame){ mainView->scaledYuvFrames.pop(mainView->scaledYuvFrame); } while (mainView->scaledYuvFrame->timestamp < mainView->yuvFrame->timestamp && mainView->scaledYuvFrames.size() > 5 ){ releaseYuvFrameData(mainView->scaledYuvFrame); mainView->scaledYuvFrames.pop(mainView->scaledYuvFrame); } } }else{ LOGE("主窗丢帧"); } rate = mainView->yuvFrame->width * 1.0 / mainView->yuvFrame->height; width = mainView->yuvFrame->width; height = mainView->yuvFrame->height; } else { rate = FRAME_WIDTH * 1.0 / FRAME_HEIGHT; width = FRAME_WIDTH; height = FRAME_HEIGHT; } int64_t t = getCurrentTimestamp(); //LOGI("pushMainYUV"); //画面比例不一致,需要裁切 if(flag && rate != FRAME_WIDTH*1.0/FRAME_HEIGHT){ if (rate < FRAME_WIDTH*1.0/FRAME_HEIGHT){ width = width; height = (width / 9 * 16); }else{ height = height; width = (height / 16 * 9); } int64_t t1 = getCurrentTimestamp(); cutYUV(mainView->yuvFrame->yuvData,mainView->yuvFrame->width,mainView->yuvFrame->height,mainView->yuvFrame->mainPositionRate,mainYuvData->yuvData); //LOGI("cutYUV 裁切 耗时:%d",getCurrentTimestamp() - t1); }else if(flag){ int64_t t1 = getCurrentTimestamp(); copyYUV(mainView->yuvFrame->yuvData,width,height,mainYuvData->yuvData); //LOGI("copyYUV 耗时:%d",getCurrentTimestamp() - t1); }else{ copyYUV(blackYUV,width,height,mainYuvData->yuvData); } if(nativeMainWindow && mainView){ YUVData *mainYUV = new YUVData; mainYUV->width = width; mainYUV->height = height; copyYUV(mainYuvData->yuvData,width,height,mainYUV->yuvData); mainSurfaceFrames.push(mainYUV); } do { if(pushers.size() == 0){ break; } if(width != FRAME_WIDTH){ //缩放至1080 uint8_t *dstData[3]; copyYUV(mainYuvData->yuvData,width,height,dstData); delete mainYuvData->yuvData[0]; delete mainYuvData->yuvData[1]; delete mainYuvData->yuvData[2]; //LOGI("缩放至1080"); int64_t t1 = getCurrentTimestamp(); scaleYUV(dstData,width,height,FRAME_WIDTH,FRAME_HEIGHT,mainYuvData->yuvData); //LOGI("缩放至1080 耗时:%d",getCurrentTimestamp() - t1); delete dstData[0]; delete dstData[1]; delete dstData[2]; } //加水印 //LOGI("加水印"); int64_t t2 = getCurrentTimestamp(); waterYUV(mainYuvData->yuvData); //LOGI("加水印 耗时:%d",getCurrentTimestamp() - t2); //主画面 //LOGI("pushYUV pushYUV %d",pushers.size()); int64_t t1 = getCurrentTimestamp(); videoChannel->pushYUV(mainYuvData->yuvData); //LOGI("pushYUV 编码 耗时:%d", getCurrentTimestamp() - t1); //LOGI("pushYUV 耗时:%d", getCurrentTimestamp() - t); } while (0); releaseYuvFrameData(mainYuvData); pthread_mutex_unlock(&pushVideoMutex); } return 0; } pthread_t mainFrameStart; pthread_t countDownStart; pthread_t pushFrameStart; jint JNI_OnLoad(JavaVM *vm,void *reserved){ // 计算每个平面的大小 size_t y_size = FRAME_WIDTH * FRAME_HEIGHT; // Y 平面大小 size_t u_size = (FRAME_WIDTH / 2) * (FRAME_HEIGHT / 2); // U 平面大小 size_t v_size = (FRAME_WIDTH / 2) * (FRAME_HEIGHT / 2); // V 平面大小 blackYUV[0] = new uint8_t[y_size]; blackYUV[1] = new uint8_t[u_size]; blackYUV[2] = new uint8_t[v_size]; // 填充 Y 平面(黑色,Y = 0) memset(blackYUV[0], 0,y_size); // 填充 U 平面(中性色度,U = 128) memset(blackYUV[1], 128, u_size); // 填充 V 平面(中性色度,V = 128) memset(blackYUV[2], 128, v_size); setupSignalHandlers(); pthread_mutex_init(&pushVideoMutex,0); pthread_mutex_init(&pushAudioMutex,0); pthread_mutex_init(&pushNV21Mutex,0); pthread_mutex_init(&loadSurfaceMutex,0); mainSurfaceFrames.setWork(1); mainSurfaceFrames.setReleaseCallback(releaseYuvDataCallBack); pthread_create(&mainFrameStart,0, task_load_main_surface,NULL); pushFrames.setWork(1); pushFrames.setReleaseCallback(releaseYuvDataCallBack); pthread_create(&countDownStart,0, task_count_down,NULL); pthread_create(&pushFrameStart,0, task_pushFrames,NULL); //记录开始推流的时间戳 start_time = RTMP_GetTime(); videoChannel = new VideoChannel(); audioChannel = new AudioChannel(); videoChannel->setVideoCallback(packetCallBack); videoChannel->initVideoEncoder(FRAME_WIDTH,FRAME_HEIGHT,FRAME_PS,FRAME_RATE); audioChannel->setAudioCallback(packetCallBack); LOGI("encoderInit"); audioChannel->initAudioEncoder(44100,2); return JNI_VERSION_1_6; } extern "C" JNIEXPORT jstring JNICALL Java_com_runt_live_native_LivePlayer_ffpeg_1version(JNIEnv *env, jobject thiz) { //return env->NewStringUTF(av_version_info()); char version[50]; sprintf(version,"librtmp version :%d",RTMP_LibVersion()); return env->NewStringUTF(version); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1set_1main_1stream_1code(JNIEnv *env, jclass clazz, jint stream_code) { pthread_mutex_lock(&pushVideoMutex); mainStreamCode = stream_code; mainView = getMainViewData(); LOGI("主窗口变更 %d",stream_code); pthread_mutex_unlock(&pushVideoMutex); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1remove_1mini_1view(JNIEnv *env, jclass clazz, jint stream_code) { pthread_mutex_lock(&pushVideoMutex); LOGI("%d 关闭窗口",stream_code); if(!pushMiniDatas.empty() && pushMiniDatas.size()>0){ for(int i = 0 ; i < pushMiniDatas.size() ; i ++ ){ if(pushMiniDatas[i]->streamCode == stream_code){ releaseYuvFrameData(pushMiniDatas[i]->yuvFrame); releaseYuvFrameData(pushMiniDatas[i]->scaledYuvFrame); pushMiniDatas[i]->yuvFrame = 0; pushMiniDatas[i]->scaledYuvFrame = 0; pushMiniDatas[i]->yuvFrames.setWork(0); //pushMiniDatas[i]->yuvFrames.clear(); pushMiniDatas[i]->scaledYuvFrames.setWork(0); //pushMiniDatas[i]->scaledYuvFrames.clear(); DELETE(pushMiniDatas[i]) pushMiniDatas.erase(pushMiniDatas.begin() + i); break; } } } if(mainStreamCode == stream_code){ mainView = 0; } pthread_mutex_unlock(&pushVideoMutex); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1update_1mini_1view(JNIEnv *env, jclass clazz, jint stream_code, jint sn, jint width, jint height,float p_x_rate, float p_y_rate,jboolean audioOn,jboolean videoOn, jlong videoDelay,jint stream_type, jfloat main_position_rate, jfloat view_rate) { pthread_mutex_lock(&pushVideoMutex); //LOGI("%d 窗口变化",stream_code); MiniViewData *data = 0; if(pushMiniDatas.size() > sn && pushMiniDatas[sn]){ data = pushMiniDatas[sn]; //LOGI("%d 更新窗口",stream_code); //LOGI("%d 窗口变化 %dx%d => %dx%d",stream_code,data->width,data->height,width,height); }else{ data = new MiniViewData; pushMiniDatas.push_back(data); data->scaledYuvFrame = 0; data->yuvFrame = 0; data->yuvFrames.setWork(1); data->scaledYuvFrames.setWork(1); //LOGI("%d 新增窗口 %f %f",stream_code,data->viewRate , view_rate); } data->streamCode = stream_code; data->width = width; data->sn = sn; data->audioOn = audioOn; data->videoOn = videoOn; data->videoDelay = videoDelay; data->height = height; data->pYrate = p_y_rate; data->pXrate = p_x_rate; data->mainPositionRate = main_position_rate; data->viewRate = view_rate; data->streamType = stream_type; pthread_mutex_unlock(&pushVideoMutex); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1push_1nv21(JNIEnv *env, jclass clazz, jint stream_code, jbyteArray bytes) { jbyte *data = env->GetByteArrayElements(bytes,NULL); //LOGI("native_1push_1nv21 "); pushNV21(stream_code,data); env->ReleaseByteArrayElements(bytes,data,0); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1set_1main_1surface(JNIEnv *env, jclass clazz, jobject surface) { pthread_mutex_lock(&loadSurfaceMutex); nativeMainWindow = ANativeWindow_fromSurface(env, surface); pthread_mutex_unlock(&loadSurfaceMutex); if(!nativeMainWindow){ LOGE("nativeWindow 回收"); } if(!jvmMainCall){ jvmMainCall = new JavaMainCall(env,clazz); } } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1release_1main_1surface(JNIEnv *env, jclass clazz) { pthread_mutex_lock(&loadSurfaceMutex); if(nativeMainWindow){ ANativeWindow_release(nativeMainWindow); nativeMainWindow = 0 ; } pthread_mutex_unlock(&loadSurfaceMutex); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1push_1rgba(JNIEnv *env, jclass clazz, jint stream_code, jbyteArray bytes) { jbyte *data = env->GetByteArrayElements(bytes, NULL); pushRGBA(stream_code, reinterpret_cast(data)); env->ReleaseByteArrayElements(bytes,data,0); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1push_1nv_121_1data(JNIEnv *env, jclass clazz, jint stream_code, jobject y_data,jobject u_data, jobject v_data, jint y_stride, jint u_stride,jint v_stride, jint uv_stride, jint angle,jint width,jint height) { uint8_t *yData = (uint8_t *)env->GetDirectBufferAddress(y_data); uint8_t *uData = (uint8_t *)env->GetDirectBufferAddress(u_data); uint8_t *vData = (uint8_t *)env->GetDirectBufferAddress(v_data); pushNV21(stream_code, (uint8_t *)(yData), (uint8_t *)uData, (uint8_t *)vData,y_stride,u_stride,v_stride,uv_stride,angle,width,height); LOGI("native_1push_1nv_121_1data ,y_stride:%d,u_stride:%d,v_stride:%d,uv_stride:%d ",y_stride,u_stride,v_stride,uv_stride); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1push_1pcm(JNIEnv *env, jclass clazz, jbyteArray bytes) { jbyte *data = env->GetByteArrayElements(bytes,NULL); pushPCM(data); //LOGI("native_1push_1pcm safequeue packets:%d",&pusher->audioPackets); env->ReleaseByteArrayElements(bytes,data,0); } extern "C" JNIEXPORT jbyteArray JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1convert_1nv21_1to_1rgba(JNIEnv *env, jclass clazz, jbyteArray bytes, jint width, jint height) { // TODO: implement native_convert_nv21_to_rgba() } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1push_1yuyv(JNIEnv *env, jclass clazz, jint stream_code, jbyteArray bytes) { jbyte *data = env->GetByteArrayElements(bytes, NULL); pushYUYV(stream_code, data); env->ReleaseByteArrayElements(bytes,data,0); } extern "C" JNIEXPORT void JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1push_1yuv(JNIEnv *env, jclass clazz, jint stream_code, jbyteArray bytes) { jbyte *data = env->GetByteArrayElements(bytes, NULL); uint8_t *yuv[3]; MiniViewData *view = getMiniView(stream_code); // 获取 jbyteArray 的大小 int ySize = view->width * view->height; int uSize = ySize / 4; int vSize = ySize / 4; int totalSize = ySize + uSize + vSize; // 让 data[3] 指向适当的位置 yuv[0] = reinterpret_cast(data); // Y 平面 yuv[1] = reinterpret_cast(data + ySize); // U 平面 yuv[2] = reinterpret_cast(data + ySize + uSize); // V 平面 pushYUV(stream_code,yuv); env->ReleaseByteArrayElements(bytes,data,0); } extern "C" JNIEXPORT jbyteArray JNICALL Java_com_runt_live_cpp_LiveMiniView_native_1get_1cut_1frame(JNIEnv *env, jclass clazz, jint index, jint stream_code) { uint8_t *yuvData[3]; if(mainYuvData){ pthread_mutex_lock(&pushVideoMutex); copyYUV(mainYuvData->yuvData,FRAME_WIDTH,FRAME_HEIGHT,yuvData); pthread_mutex_unlock(&pushVideoMutex); }else{ copyYUV(blackYUV,FRAME_WIDTH,FRAME_HEIGHT,yuvData); } waterYUV(index,yuvData); //*****裁切画面 MiniViewData *miniView = getMiniView(stream_code); int scaleWidth = (FRAME_WIDTH * miniView->viewRate); int scaleHeight = (FRAME_WIDTH / (miniView->width * 1.0 / miniView->height) * miniView->viewRate); //涉及到 YUV 4:2:0 格式的图像时,宽度和高度通常需要是 2 的倍数 if(scaleWidth % 2 == 1){ scaleWidth+=1; } if(scaleHeight % 2 == 1){ scaleHeight+=1; } if(scaleWidth > FRAME_WIDTH){ scaleWidth = FRAME_WIDTH; } if(scaleHeight > FRAME_HEIGHT){ scaleHeight = FRAME_HEIGHT; } if(!miniView->scaledYuvFrame){ miniView->scaledYuvFrame = new YUVData; } miniView->scaledYuvFrame->width = scaleWidth; miniView->scaledYuvFrame->height = scaleHeight; miniView->scaledYuvFrame->pYrate = miniView->pYrate; miniView->scaledYuvFrame->pXrate = miniView->pXrate; uint8_t *dstData[3]; //位置 int offsetY = (FRAME_HEIGHT - scaleHeight) * miniView->pYrate; int offsetX = (FRAME_WIDTH - scaleWidth) * miniView->pXrate; cutYUV(yuvData,FRAME_WIDTH,FRAME_HEIGHT,offsetX,offsetY,dstData,scaleWidth,scaleHeight); LOGE("index:%d x:%d y:%d %dx%d view:%dx%d",index,offsetX,offsetY,scaleWidth,scaleHeight,miniView->width,miniView->height); copyYUV(dstData,scaleWidth,scaleHeight,miniView->scaledYuvFrame->yuvData); addBlackBorder(miniView->scaledYuvFrame->yuvData,scaleWidth,scaleHeight,3); //源数据没用了 delete yuvData[0]; delete yuvData[1]; delete yuvData[2]; //******转为rgba int size = scaleWidth * scaleHeight * 4; uint8_t *rgba_data = yuvToRGBA(dstData,scaleWidth,scaleHeight); //yuv数据没用了 delete dstData[0]; delete dstData[1]; delete dstData[2]; //转为java字节数组 jbyteArray java_array = env->NewByteArray(size); env->SetByteArrayRegion(java_array, 0, size, (jbyte *)rgba_data); delete[] rgba_data; return java_array; }