uvc,相机fps计算
网络和cpu性能下降导致的画面编码内存增加问题
| | |
| | | } |
| | | } |
| | | }else{ |
| | | //LOGE("主窗丢帧"); |
| | | LOGE("主窗丢帧"); |
| | | } |
| | | rate = mainView->yuvFrame->width * 1.0 / mainView->yuvFrame->height; |
| | | width = mainView->yuvFrame->width; |
| | |
| | | int64_t t = 0; |
| | | bool n = fps == 0 || fps < 20; |
| | | int64_t t2 = getCurrentTimestamp(); |
| | | int64_t lastPts = 00; |
| | | int t_fps = 0 ; |
| | | int d_fpsl = 0; |
| | | double interval = 0.0; |
| | |
| | | } |
| | | t_fps += 1; |
| | | if(getCurrentTimestamp() - t2 > 1000){ |
| | | LOGE("%s fps: %d" ,url,t_fps); |
| | | LOGE("%d %s fps: %d 弃帧:%d" ,streamCode,url,t_fps,d_fpsl); |
| | | fps = t_fps; |
| | | t_fps = 0 ; |
| | | d_fpsl = 0; |
| | | t2 = getCurrentTimestamp(); |
| | | int out = fps - FRAME_PS; |
| | | interval = fps / out; // 删除的间隔 |
| | | interval = 00; // 删除的间隔 |
| | | } |
| | | result = avcodec_send_packet(codecContext,packet); |
| | | //1094995529 |
| | |
| | | miniView->width = frame->width; |
| | | miniView->height = frame->height; |
| | | pullerCall->setViewSize(width,height); |
| | | LOGE("提取视频流 width:%d,height:%d format:%d,fps:%f",width,height,frame->format,fps); |
| | | LOGE("%d 提取视频流 width:%d,height:%d format:%d,fps:%f",streamCode,width,height,frame->format,fps); |
| | | } |
| | | // 获取帧的时间戳(以微秒为单位) |
| | | // pts = packet->pts * av_q2d(timeBase) * 1000000; |
| | |
| | | } |
| | | int out = fps - FRAME_PS; |
| | | if(out > 10) { |
| | | int elapsed_ms = getCurrentTimestamp() - t; |
| | | //LOGI("ms:%d", elapsed_ms); |
| | | int elapsed_ms = packet->pts - lastPts; |
| | | LOGI("ms:%d", elapsed_ms); |
| | | // fps 原始木头数量 |
| | | // FRAME_PS 目标数量 |
| | | // out 需要删除的数量 |
| | | if(interval > 1){ |
| | | interval = interval - 1; |
| | | lastPts = packet->pts; |
| | | //降低帧率,保证主画面30帧,当下保证主画面平均10s丢一帧 |
| | | if(t_fps > interval){ |
| | | interval = interval + fps / out; |
| | | d_fpsl++; |
| | | //LOGE("丢帧 %d",d_fpsl); |
| | | //LOGE("弃帧 %d",d_fpsl); |
| | | av_frame_free(&frame); |
| | | DELETE(frame); |
| | | av_packet_free(&packet); |
| | | DELETE(packet); |
| | | continue; // 跳过当前木头 |
| | | } |
| | | interval++; |
| | | } |
| | | |
| | | t = getCurrentTimestamp(); |
| | |
| | | void LivePullerCall::onVideoStream(uint8_t *yuvData[3],int width,int height) { |
| | | |
| | | pthread_mutex_lock(&loadVideoMutex); |
| | | int64_t t = getCurrentTimestamp(); |
| | | loadSurface(nativeWindow,yuvData,width,height); |
| | | //LOGI("loadSurface 耗时:%d ",getCurrentTimestamp() - t); |
| | | /*int size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1); |
| | | uint8_t *dst_data = yuvToRGBA(yuvData,width,height); |
| | | //转为java字节数组,传给应用层渲染画面,导致内存抖动,弃用 |
| | |
| | | continue; |
| | | } |
| | | if(!pData->scaledYuvFrame && pData->scaledYuvFrames.size() == 0 ){ |
| | | LOGE("小窗丢帧1"); |
| | | //LOGE("小窗丢帧1"); |
| | | continue; |
| | | } |
| | | if(pData->scaledYuvFrames.size() > 0){ |
| | |
| | | // 边界检查,确保不会越界 |
| | | if (offsetX + scaledYuvFrame->width > mainWidth ) { |
| | | offsetX = mainWidth - scaledYuvFrame->width - 1; |
| | | // 若超出边界,直接返回 |
| | | LOGE("超出边界 %d",scaledYuvFrame->width); |
| | | } |
| | | if (offsetY + scaledYuvFrame->height > mainHeight) { |
| | | // 若超出边界,直接返回 |
| | | offsetY = mainHeight - scaledYuvFrame->height - 1; |
| | | LOGE("超出边界 %d",scaledYuvFrame->height); |
| | | } |
| | | if( scaledYuvFrame->height > mainHeight || scaledYuvFrame->width > mainWidth){ |
| | | LOGE("超出边界"); |
| | |
| | | addBlackBorder(scaleYuvFrame->yuvData,scaleWidth,scaleHeight,3); |
| | | miniView->scaledYuvFrames.push(scaleYuvFrame); |
| | | //最大缓存画面数量不能超过max(5-10)帧 |
| | | int max = 5;int min = 3; |
| | | int max = 15;int min = 3; |
| | | if(miniView->yuvFrames.size() > (mainStreamCode == miniView->streamCode ? max:min) ){ |
| | | if(mainStreamCode == miniView->streamCode){ |
| | | //LOGE("丢帧 pushyuv %d",miniView->yuvFrames.size() ); |
| | | LOGE("%d 溢帧 pushyuv %d",streamCode,miniView->yuvFrames.size() - (miniView->videoOn ? max:min)); |
| | | } |
| | | while (miniView->yuvFrames.size() > (mainStreamCode == miniView->streamCode ? max:min)){ |
| | | releaseYuvFrameData(miniView->yuvFrame); |
| | |
| | | } |
| | | if(miniView->scaledYuvFrames.size() > (miniView->videoOn ? max:min)){ |
| | | if(miniView->videoOn && mainStreamCode != miniView->streamCode){ |
| | | //LOGE("缩放丢帧 pushyuv %d",miniView->scaledYuvFrames.size() ); |
| | | LOGE("%d 溢帧 pushyuv %d",streamCode,miniView->scaledYuvFrames.size() - (miniView->videoOn ? max:min) ); |
| | | } |
| | | while (miniView->scaledYuvFrames.size() > (miniView->videoOn ? max:min)){ |
| | | releaseYuvFrameData(miniView->scaledYuvFrame); |
| | |
| | | LOGE("VideoChannel 提取消息失败 %d",result); |
| | | continue; |
| | | } |
| | | int64_t t = getCurrentTimestamp(); |
| | | channel->encodeData(data->yuvData); |
| | | delete[] data->yuvData[0]; |
| | | delete[] data->yuvData[1]; |
| | | delete[] data->yuvData[2]; |
| | | delete data; |
| | | //LOGI("encode_push 耗时:%d ,yuvDatas_size:%d", getCurrentTimestamp() - t,channel->yuvDatas.size()); |
| | | if(channel->yuvDatas.size() > 30){ |
| | | LOGE("编码推流性能下降 %d",channel->yuvDatas.size()); |
| | | while (channel->yuvDatas.size() > 30){ |
| | | int result = channel->yuvDatas.pop(data); |
| | | delete[] data->yuvData[0]; |
| | | delete[] data->yuvData[1]; |
| | | delete[] data->yuvData[2]; |
| | | delete data; |
| | | } |
| | | } |
| | | } |
| | | return 0; |
| | | } |
| | |
| | | static HashMap<String,PcmData> pcmdatas = new HashMap<>(); |
| | | private static final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); |
| | | public static int mainStreamCode = 0 ; |
| | | public static final int FRAME_WIDTH = 1080,FRAME_HEIGHT = 1920,FRAME_RATE = 30; |
| | | public static final int FRAME_WIDTH = 1080,FRAME_HEIGHT = 1920,FRAME_PS = 30; |
| | | public static final int SAMPLE_RATE = 44100; |
| | | public static final int SAMPLE_SIZE = (SAMPLE_RATE * 2 * 16 / 8 / 4096);//每秒音频帧的数量 |
| | | |
| | |
| | | val mLifecycle:LifecycleRegistry; |
| | | var startTime = 0L; |
| | | var dateFmt = SimpleDateFormat("hh:MM:ss"); |
| | | var t_fps = 0 |
| | | |
| | | |
| | | |
| | |
| | | val cameraProvider = cameraProviderFuture!!.get() |
| | | cameraProvider.unbindAll() |
| | | var t2 = Date().time; |
| | | var t_fps = 0 |
| | | imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(mContext)) { image : ImageProxy -> |
| | | t_fps += 1; |
| | | if (Date().time - t2 > 1000) { |
| | |
| | | if (monitor.streamWindow.surfaceHolder != null) { |
| | | camera.setPreviewDisplay(monitor.streamWindow.surfaceHolder!!.surface) |
| | | camera.startPreview() |
| | | var t2 = Date().time; |
| | | var t_fps = 0 |
| | | camera.setFrameCallback({ |
| | | t_fps += 1; |
| | | if (Date().time - t2 > 1000) { |
| | | Log.e(TAG , "uvc ${device.deviceName} fps: ${t_fps}" ) |
| | | t_fps = 0 |
| | | t2 = Date().time |
| | | } |
| | | val bytes = ByteArray(it.remaining()) // 确保数组大小等于缓冲区剩余大小 |
| | | it.get(bytes) |
| | | monitor.imagesQueue.put(BytesData(bytes, Date().time)) |