//
|
// Created by 倪路朋 on 10/24/24.
|
//
|
|
#include "server_global.h"
|
#include <SLES/OpenSLES.h>
|
#include <SLES/OpenSLES_Android.h>
|
#include <sys/resource.h>
|
#include <thread>
|
|
std::map<int,LivePusher *> pushers = {};
|
vector<MiniViewData *> pushMiniDatas = {};
|
MiniViewData *mainView = 0;
|
int mainStreamCode = 0;
|
pthread_mutex_t pushVideoMutex,pushNV21Mutex,pushAudioMutex;
|
ANativeWindow *nativeMainWindow = 0;
|
pthread_mutex_t loadSurfaceMutex;
|
int mainWidth = FRAME_WIDTH;
|
int mainHeight = FRAME_HEIGHT;
|
float STREAM_RATE = FRAME_WIDTH*1.0/FRAME_HEIGHT;
|
uint8_t *blackYUV[3];
|
uint32_t start_time;
|
SafeQueue<YUVData *> mainSurfaceFrames ;
|
SafeQueue<YUVData *> pushFrames;
|
AudioChannel *audioChannel = 0;
|
VideoChannel *videoChannel = 0;
|
|
void packetCallBack(RTMPPacket *packet){
|
//LOGI("packetCallBack safequeue packets:%d",&packets);
|
if(packet){
|
//LOGI("packets.push ");
|
if(packet->m_packetType == RTMP_PACKET_TYPE_AUDIO && packet->m_body[1] == 0x00){
|
LOGI("RTMP1 发送音频头信息 ");
|
}
|
if(packet->m_nTimeStamp == -1){
|
packet->m_nTimeStamp = RTMP_GetTime() - start_time;
|
}
|
if(pushers.size() > 0){
|
for(auto i : pushers){
|
if(packet->m_packetType == RTMP_PACKET_TYPE_VIDEO){
|
i.second->pushYUV(packet);
|
}else{
|
i.second->pushPCM(packet);
|
}
|
//LOGI("pushPCM2.2");
|
}
|
RTMPPacket_Free(packet);
|
}
|
}
|
}
|
|
int CopyRTMPPacket(RTMPPacket *dest, const RTMPPacket *src) {
|
|
// 初始化目标 RTMPPacket
|
RTMPPacket_Alloc(dest, src->m_nBodySize);
|
if (!dest->m_body) {
|
return 0; // 内存分配失败
|
}
|
|
// 复制字段
|
memcpy(dest->m_body, src->m_body, src->m_nBodySize);
|
dest->m_headerType = src->m_headerType;
|
dest->m_packetType = src->m_packetType;
|
dest->m_nChannel = src->m_nChannel;
|
dest->m_nTimeStamp = src->m_nTimeStamp;
|
dest->m_nInfoField2 = src->m_nInfoField2;
|
dest->m_hasAbsTimestamp = src->m_hasAbsTimestamp;
|
dest->m_nBodySize = src->m_nBodySize;
|
|
return 1; // 成功
|
}
|
|
int64_t getCurrentTimestamp(){
|
auto now = std::chrono::system_clock::now();
|
auto millis = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch());
|
return millis.count();
|
}
|
|
int getIndexOfStream(int streamCode){
|
int index = -1;
|
for(int i = 0 ; i < pushMiniDatas.size() ; i ++){
|
index = i ;
|
if(pushMiniDatas[index]->streamCode == streamCode){
|
break;
|
}
|
}
|
return index;
|
}
|
|
int getIndexOfMain(){
|
int index = -1;
|
for(int i = 0 ; i < pushMiniDatas.size() ; i ++){
|
index = i ;
|
if(pushMiniDatas[index]->streamCode == mainStreamCode){
|
break;
|
}
|
}
|
return index;
|
}
|
|
/**
|
* 获取主画面
|
* @return
|
*/
|
MiniViewData *getMainViewData(){
|
MiniViewData *mainView = 0;
|
for (int i = 0; i < pushMiniDatas.size(); ++i) {
|
MiniViewData *miniView = pushMiniDatas[i];
|
if (miniView->streamCode == mainStreamCode) {
|
mainView = miniView;
|
continue;
|
}
|
}
|
return mainView;
|
}
|
|
MiniViewData *getMiniView(int streamCode){
|
|
int index = getIndexOfStream(streamCode);
|
if(index == -1){
|
return 0;
|
}
|
//LOGI("pushYUV");
|
return pushMiniDatas[index];
|
}
|
|
/**
|
* 添加黑边
|
* @param yuvData
|
* @param srcWidth
|
* @param srcHeight
|
* @param borderWidth
|
*/
|
void addBlackBorder(uint8_t *yuvData[3],int srcWidth,int srcHeight,int borderWidth)
|
{
|
int uvWidth = srcWidth / 2;
|
int uvHeight = srcHeight / 2;
|
|
// 填充 Y 平面的黑边
|
// 上边
|
for (int y = 0; y < borderWidth; y++) {
|
memset(yuvData[0] + y * srcWidth, 0, srcWidth);
|
}
|
// 下边
|
for (int y = srcHeight - borderWidth; y < srcHeight; y++) {
|
memset(yuvData[0] + y * srcWidth, 0, srcWidth);
|
}
|
// 左边和右边
|
for (int y = borderWidth; y < srcHeight - borderWidth; y++) {
|
memset(yuvData[0] + y * srcWidth, 0, borderWidth); // 左边
|
memset(yuvData[0] + y * srcWidth + srcWidth - borderWidth, 0, borderWidth); // 右边
|
}
|
|
// 填充 U 和 V 平面的黑边(边框宽度在 U 和 V 平面上缩小一半)
|
int uvBorderWidth = borderWidth / 2;
|
// 上边
|
for (int y = 0; y < uvBorderWidth; y++) {
|
memset(yuvData[1] + y * uvWidth, 128, uvWidth);
|
memset(yuvData[2] + y * uvWidth, 128, uvWidth);
|
}
|
// 下边
|
for (int y = uvHeight - uvBorderWidth; y < uvHeight; y++) {
|
memset(yuvData[1] + y * uvWidth, 128, uvWidth);
|
memset(yuvData[2] + y * uvWidth, 128, uvWidth);
|
}
|
// 左边和右边
|
for (int y = uvBorderWidth; y < uvHeight - uvBorderWidth; y++) {
|
memset(yuvData[1] + y * uvWidth, 128, uvBorderWidth); // 左边
|
memset(yuvData[1] + y * uvWidth + uvWidth - uvBorderWidth, 128, uvBorderWidth); // 右边
|
memset(yuvData[2] + y * uvWidth, 128, uvBorderWidth); // 左边
|
memset(yuvData[2] + y * uvWidth + uvWidth - uvBorderWidth, 128, uvBorderWidth); // 右边
|
}
|
}
|
|
/**
|
* 添加圆角黑边
|
* @param yuvData
|
* @param width
|
* @param height
|
* @param borderWidth
|
* @param cornerRadius
|
*/
|
void addCornerAndBlackBorder(uint8_t *yuvData[3],int srcWidth,int srcHeight,int borderWidth,int cornerRadius){
|
int uvWidth = srcWidth / 2;
|
int uvHeight = srcHeight / 2;
|
int uvBorderWidth = borderWidth / 2;
|
int uvCornerRadius = cornerRadius / 2;
|
|
// 填充 Y 平面的上下黑色边框
|
for (int y = 0; y < borderWidth; y++) {
|
memset(yuvData[0] + y * srcWidth, 0, srcWidth); // 上黑边
|
memset(yuvData[0] + (srcHeight - y - 1) * srcWidth, 0, srcWidth); // 下黑边
|
}
|
// 填充 Y 平面的左右黑色边框
|
for (int y = borderWidth; y < srcHeight - borderWidth; y++) {
|
memset(yuvData[0] + y * srcWidth, 0, borderWidth); // 左黑边
|
memset(yuvData[0] + y * srcWidth + srcWidth - borderWidth, 0, borderWidth); // 右黑边
|
}
|
|
// 圆角裁切,给四角应用圆形裁切
|
for (int y = 0; y < cornerRadius; y++) {
|
for (int x = 0; x < cornerRadius; x++) {
|
// 判断是否在圆角区域内
|
if (sqrt((cornerRadius - x) * (cornerRadius - x) + (cornerRadius - y) * (cornerRadius - y)) >= cornerRadius) {
|
// 设置为透明模拟值(中灰色)
|
yuvData[0][y * srcWidth + x] = 128; // 左上角
|
yuvData[0][y * srcWidth + (srcWidth - x - 1)] = 128; // 右上角
|
yuvData[0][(srcHeight - y - 1) * srcWidth + x] = 128; // 左下角
|
yuvData[0][(srcHeight - y - 1) * srcWidth + (srcWidth - x - 1)] = 128; // 右下角
|
}
|
}
|
}
|
|
// U 和 V 平面的圆角裁切
|
for (int y = 0; y < uvCornerRadius; y++) {
|
for (int x = 0; x < uvCornerRadius; x++) {
|
if (sqrt((uvCornerRadius - x) * (uvCornerRadius - x) + (uvCornerRadius - y) * (uvCornerRadius - y)) >= uvCornerRadius) {
|
yuvData[1][y * uvWidth + x] = 128; // 左上角
|
yuvData[1][y * uvWidth + (uvWidth - x - 1)] = 128; // 右上角
|
yuvData[1][(uvHeight - y - 1) * uvWidth + x] = 128; // 左下角
|
yuvData[1][(uvHeight - y - 1) * uvWidth + (uvWidth - x - 1)] = 128; // 右下角
|
|
yuvData[2][y * uvWidth + x] = 128; // 左上角
|
yuvData[2][y * uvWidth + (uvWidth - x - 1)] = 128; // 右上角
|
yuvData[2][(uvHeight - y - 1) * uvWidth + x] = 128; // 左下角
|
yuvData[2][(uvHeight - y - 1) * uvWidth + (uvWidth - x - 1)] = 128; // 右下角
|
}
|
}
|
}
|
}
|
|
/**
|
* 缩放图片
|
* @param pData
|
* @param data YUV数据
|
*/
|
void scaleYUV(uint8_t *yuvData[3],int width,int height,int dstWidth,int dstHeight,uint8_t *dstData[3]) {
|
//LOGI("scaleYUV 缩放");
|
|
int srcYSize = width * height;
|
int srcUSize = width * height / 4;
|
|
// 进行图像缩放
|
//LOGI("scaleYUV 原图 %dx%d %d",width,height,sizeof(uint8_t));
|
//LOGI("scaleYUV 缩放 %dx%d",dstWidth,dstHeight);
|
|
|
// 源图像数据
|
int srcStride[] = { width, width / 2, width / 2 };
|
int dstStride[] = { dstWidth, dstWidth / 2, dstWidth / 2 };
|
|
// 分配缩放后 YUV 数据缓冲区
|
int dst_y_size = dstWidth * dstHeight;
|
int dst_uv_size = (dstWidth/2) * (dstHeight/2);
|
|
dstData[0] = new uint8_t[dst_y_size];
|
dstData[1] = new uint8_t[dst_uv_size];
|
dstData[2] = new uint8_t[dst_uv_size];
|
|
|
// 进行图像缩放
|
//LOGI("scaleYUV 缩放开始 sws_ctx:%d",sws_ctx);
|
libyuv::I420Scale(
|
yuvData[0], srcStride[0],
|
yuvData[1], srcStride[1],
|
yuvData[2], srcStride[2],
|
width, height,
|
dstData[0], dstStride[0],
|
dstData[1], dstStride[1],
|
dstData[2], dstStride[2],
|
dstWidth, dstHeight,
|
libyuv::kFilterBox // 使用 Box 滤波器(可选:kFilterNone, kFilterLinear, kFilterBilinear, kFilterBox)
|
);
|
//LOGI("scaleYUV 缩放完毕 %d",result);
|
}
|
|
void rotate_yuv(uint8_t* srcData[3], int width, int height,int angle,uint8_t* dstData[3],int new_width,int new_height) {
|
|
// 分配缩放后 YUV 数据缓冲区
|
int dst_y_size = new_width * new_height;
|
int dst_uv_size = (new_width/2) * (new_height/2);
|
dstData[0] = new uint8_t[dst_y_size];
|
dstData[1] = new uint8_t[dst_uv_size];
|
dstData[2] = new uint8_t[dst_uv_size];
|
|
uint8_t* src_y = srcData[0];
|
uint8_t* src_u = srcData[1];
|
uint8_t* src_v = srcData[2];
|
|
uint8_t* dst_y = dstData[0];
|
uint8_t* dst_u = dstData[1];
|
uint8_t* dst_v = dstData[2];
|
|
if (angle == 90) {
|
// 90 度顺时针旋转
|
for (int y = 0; y < height; ++y) {
|
for (int x = 0; x < width; ++x) {
|
dst_y[x * new_width + (new_width - y - 1)] = src_y[y * width + x];
|
}
|
}
|
|
for (int y = 0; y < height / 2; ++y) {
|
for (int x = 0; x < width / 2; ++x) {
|
dst_u[x * new_width / 2 + (new_width / 2 - y - 1)] = src_u[y * width / 2 + x];
|
dst_v[x * new_width / 2 + (new_width / 2 - y - 1)] = src_v[y * width / 2 + x];
|
}
|
}
|
} else if (angle == 180) {
|
// 180 度旋转
|
for (int y = 0; y < height; ++y) {
|
for (int x = 0; x < width; ++x) {
|
dst_y[(height - y - 1) * width + (width - x - 1)] = src_y[y * width + x];
|
}
|
}
|
|
for (int y = 0; y < height / 2; ++y) {
|
for (int x = 0; x < width / 2; ++x) {
|
dst_u[(height / 2 - y - 1) * width / 2 + (width / 2 - x - 1)] = src_u[y * width / 2 + x];
|
dst_v[(height / 2 - y - 1) * width / 2 + (width / 2 - x - 1)] = src_v[y * width / 2 + x];
|
}
|
}
|
} else if (angle == 270) {
|
// 270 度顺时针旋转
|
for (int y = 0; y < height; ++y) {
|
for (int x = 0; x < width; ++x) {
|
dst_y[(new_height - x - 1) * new_width + y] = src_y[y * width + x];
|
}
|
}
|
|
for (int y = 0; y < height / 2; ++y) {
|
for (int x = 0; x < width / 2; ++x) {
|
dst_u[(new_height / 2 - x - 1) * new_width / 2 + y] = src_u[y * width / 2 + x];
|
dst_v[(new_height / 2 - x - 1) * new_width / 2 + y] = src_v[y * width / 2 + x];
|
}
|
}
|
}
|
}
|
/**
|
* 剪切画面
|
* @param pData
|
* @param data
|
*/
|
void cutYUV(uint8_t *yuvData[3],int width,int height ,float positionRate,uint8_t *dstData[3]) {
|
float rate = width * 1.0/height;
|
int miniHeight = 0;
|
int miniWidth = 0;
|
if (rate < STREAM_RATE){
|
miniWidth = width;
|
miniHeight = (miniWidth / 9 * 16);
|
}else{
|
miniHeight = height;
|
miniWidth = (miniHeight / 16 * 9);
|
}
|
int cropX =(rate < STREAM_RATE) ? 0 : (width) * positionRate;
|
int cropY = (rate < STREAM_RATE) ? (height) * positionRate : 0;
|
if(cropX + miniWidth > width){
|
cropX = width - miniWidth;
|
}
|
if(cropY + miniHeight > height){
|
cropY = height - miniHeight;
|
}
|
//LOGI("cutYUV 裁切 %dx%d mini:%dx%d crop:%dx%d",width,height,miniWidth,miniHeight,cropX,cropY);
|
|
// 分配目标YUV内存
|
int dstYSize = miniWidth * miniHeight;
|
int dstUSize = dstYSize / 4;
|
|
dstData[0] = new uint8_t[dstYSize];
|
dstData[1] = new uint8_t[dstUSize];
|
dstData[2] = new uint8_t[dstUSize];
|
|
// 裁剪Y平面
|
for (int y = 0; y < miniHeight; ++y) {
|
memcpy(dstData[0] + y * miniWidth,yuvData[0]+ (cropY + y) * width + cropX,
|
miniWidth);
|
}
|
|
// 裁剪U平面(UV分辨率是Y的1/2)
|
for (int y = 0; y < miniHeight / 2; ++y) {
|
memcpy(dstData[1] + y * (miniWidth / 2),
|
yuvData[1] + (cropY / 2 + y) * (width / 2) + cropX / 2,
|
miniWidth / 2);
|
}
|
|
// 裁剪V平面
|
for (int y = 0; y < miniHeight / 2; ++y) {
|
memcpy(dstData[2] + y * (miniWidth / 2),
|
yuvData[2] + (cropY / 2 + y) * (width / 2) + cropX / 2,
|
miniWidth / 2);
|
}
|
}
|
|
/**
|
* 加水印(画中画)
|
* @param mainData
|
*/
|
void waterYUV(uint8_t *mainData[3]) {
|
//LOGI("waterYUV 加水印(画中画)");
|
int size = pushMiniDatas.size();
|
for (int i = 0; i < pushMiniDatas.size(); ++i) {
|
MiniViewData *pData = pushMiniDatas[i];
|
if(pData->streamCode == mainStreamCode){
|
continue;
|
}
|
if(!pData->videoOn){
|
continue;
|
}
|
if(!pData->scaledYuvFrame && pData->scaledYuvFrames.size() == 0 ){
|
//LOGE("小窗丢帧1");
|
continue;
|
}
|
if(pData->scaledYuvFrames.size() > 0){
|
releaseYuvFrameData(pData->scaledYuvFrame);
|
pData->scaledYuvFrames.pop(pData->scaledYuvFrame);
|
}else{
|
LOGE("小窗丢帧2");
|
}
|
//清除yuv画面
|
if(pData->yuvFrames.size() > 1){
|
if(!pData->yuvFrame){
|
pData->yuvFrames.pop(pData->yuvFrame);
|
}
|
while (pData->yuvFrame->timestamp < pData->scaledYuvFrame->timestamp && pData->yuvFrames.size() > 1 ){
|
releaseYuvFrameData(pData->yuvFrame);
|
pData->yuvFrames.pop(pData->yuvFrame);
|
}
|
}
|
|
YUVData *scaledYuvFrame = pData->scaledYuvFrame;
|
//位置
|
int offsetY = (mainHeight - scaledYuvFrame->height) * scaledYuvFrame->pYrate;
|
int offsetX = (mainWidth - scaledYuvFrame->width) * scaledYuvFrame->pXrate;
|
|
//LOGI("waterYUV %dx%d x:%d,y:%d x:%f,y:%f [0]:%d",pData->scaleWidth,miniHeight,pData->pYrate,pData->pXrate,offsetX,offsetY,pData->scaledYUVData[0]);
|
|
// 边界检查,确保不会越界
|
if (offsetX + scaledYuvFrame->width > mainWidth ) {
|
offsetX = mainWidth - scaledYuvFrame->width - 1;
|
}
|
if (offsetY + scaledYuvFrame->height > mainHeight) {
|
offsetY = mainHeight - scaledYuvFrame->height - 1;
|
}
|
if( scaledYuvFrame->height > mainHeight || scaledYuvFrame->width > mainWidth){
|
LOGE("超出边界");
|
return;
|
}
|
// 处理 Y 平面
|
for (int y = 0; y < scaledYuvFrame->height; y++) {
|
uint8_t* main_y_row = mainData[0] + (offsetY + y) * mainWidth + offsetX;
|
uint8_t* mini_y_row = scaledYuvFrame->yuvData[0] + y * scaledYuvFrame->width;
|
|
memcpy(main_y_row, mini_y_row, scaledYuvFrame->width);
|
}
|
|
// 处理 U 平面
|
for (int y = 0; y < scaledYuvFrame->height / 2; y++) {
|
uint8_t* main_u_row = mainData[1] + ((offsetY / 2) + y) * (mainWidth / 2) + (offsetX / 2);
|
uint8_t* mini_u_row = scaledYuvFrame->yuvData[1] + y * (scaledYuvFrame->width / 2);
|
|
memcpy(main_u_row, mini_u_row, scaledYuvFrame->width / 2);
|
}
|
|
// 处理 V 平面
|
for (int y = 0; y < scaledYuvFrame->height / 2; y++) {
|
uint8_t* main_v_row = mainData[2] + ((offsetY / 2) + y) * (mainWidth / 2) + (offsetX / 2);
|
uint8_t* mini_v_row = scaledYuvFrame->yuvData[2] + y * (scaledYuvFrame->width / 2);
|
|
memcpy(main_v_row, mini_v_row, scaledYuvFrame->width / 2);
|
}
|
}
|
|
}
|
|
uint8_t* yuvToRGBA(uint8_t *yuvData[3],int width,int height){
|
|
// 将 I420 转为 RGBA
|
// 分配 RGBA 缓冲区
|
int rgba_size = width * height * 4; // RGBA 每个像素 4 字节
|
uint8_t* rgba_data = new uint8_t[rgba_size];
|
|
/*int y_stride = width; // Y 平面的步幅
|
int uv_stride = width / 2; // U 和 V 平面的步幅
|
int rgba_stride = width * 4; // RGBA 的步幅,每个像素 4 字节
|
|
|
//YUV->RGBA_8888
|
libyuv::I420ToARGB(yuvData[0], y_stride, yuvData[2], uv_stride, yuvData[1], uv_stride, rgba_data, rgba_stride, width, height);*/
|
|
// 初始化SwsContext
|
struct SwsContext* sws_ctx = sws_getContext(width, height, AV_PIX_FMT_YUV420P, // 输入格式
|
width, height, AV_PIX_FMT_RGBA, // 输出格式
|
SWS_BILINEAR, nullptr, nullptr, nullptr);
|
|
// YUV420P 的 linesize (Y, U, V)
|
int yuv_linesize[3] = { width, width / 2, width / 2 }; // YUV420P: U 和 V 的宽度是 Y 的一半
|
|
uint8_t* dest[4] = { rgba_data, nullptr, nullptr, nullptr };
|
int dest_linesize[4] = { width * 4, 0, 0, 0 };
|
|
// 转换 YUV 到 RGBA
|
sws_scale(sws_ctx, yuvData, yuv_linesize, 0, height, dest, dest_linesize);
|
// 释放上下文
|
sws_freeContext(sws_ctx);
|
return rgba_data;
|
}
|
void copyYUV(uint8_t *yuvData[3],int width,int height, uint8_t *dstData[3]){
|
// 计算每个平面的大小
|
size_t y_size = width * height; // Y 平面大小
|
size_t u_size = (width / 2) * (height / 2); // U 平面大小
|
size_t v_size = (width / 2) * (height / 2); // V 平面大小
|
|
dstData[0] = new uint8_t[y_size];
|
dstData[1] = new uint8_t[u_size];
|
dstData[2] = new uint8_t[v_size];
|
|
size_t slice_sizes[3] = {y_size, u_size, v_size};
|
|
for (int i = 0; i < 3; ++i) {
|
if (yuvData[i] && dstData[i] && slice_sizes[i] > 0) {
|
// Copy the data from the source slice to the destination slice
|
memcpy(dstData[i], yuvData[i], slice_sizes[i]);
|
}
|
}
|
}
|
|
void pushYUV(int streamCode, uint8_t *yuvData[3]) {
|
//LOGI("pushYUV");
|
MiniViewData *miniView = getMiniView(streamCode);
|
if(!miniView){
|
return;
|
}
|
if(miniView->videoOn || miniView->streamCode == mainStreamCode){
|
pthread_mutex_lock(&pushVideoMutex);
|
YUVData *yuvFrame = new YUVData();
|
yuvFrame->width = miniView->width;
|
yuvFrame->height = miniView->height;
|
yuvFrame->timestamp = RTMP_GetTime() - start_time;
|
yuvFrame->mainPositionRate = miniView->mainPositionRate;
|
copyYUV(yuvData,miniView->width,miniView->height,yuvFrame->yuvData);
|
miniView->yuvFrames.push(yuvFrame);
|
//缩放尺寸
|
//LOGI("pushYUV viewRate:%f", miniView->viewRate);
|
int scaleWidth = (FRAME_WIDTH * miniView->viewRate);
|
int scaleHeight = (FRAME_WIDTH / (miniView->width * 1.0 / miniView->height) * miniView->viewRate);
|
//涉及到 YUV 4:2:0 格式的图像时,宽度和高度通常需要是 2 的倍数
|
if(scaleWidth % 2 == 1){
|
scaleWidth+=1;
|
}
|
if(scaleHeight % 2 == 1){
|
scaleHeight+=1;
|
}
|
if(scaleWidth > FRAME_WIDTH){
|
scaleWidth = FRAME_WIDTH;
|
}
|
if(scaleHeight > FRAME_HEIGHT){
|
scaleHeight = FRAME_HEIGHT;
|
}
|
YUVData *scaleYuvFrame = new YUVData();
|
scaleYuvFrame->width = scaleWidth;
|
scaleYuvFrame->height = scaleHeight;
|
scaleYuvFrame->pYrate = miniView->pYrate;
|
scaleYuvFrame->pXrate = miniView->pXrate;
|
scaleYuvFrame->timestamp = yuvFrame->timestamp;
|
scaleYUV(yuvFrame->yuvData,yuvFrame->width,yuvFrame->height,scaleYuvFrame->width,scaleYuvFrame->height,scaleYuvFrame->yuvData);
|
addBlackBorder(scaleYuvFrame->yuvData,scaleWidth,scaleHeight,3);
|
miniView->scaledYuvFrames.push(scaleYuvFrame);
|
//最大缓存画面数量不能超过max(5-10)帧
|
int max = 15;int min = 3;
|
if(miniView->yuvFrames.size() > (mainStreamCode == miniView->streamCode ? max:min) ){
|
if(mainStreamCode == miniView->streamCode){
|
LOGE("%d 溢帧 pushyuv %d",streamCode,miniView->yuvFrames.size() - (miniView->videoOn ? max:min));
|
}
|
while (miniView->yuvFrames.size() > (mainStreamCode == miniView->streamCode ? max:min)){
|
releaseYuvFrameData(miniView->yuvFrame);
|
miniView->yuvFrames.pop(miniView->yuvFrame);
|
}
|
}
|
if(miniView->scaledYuvFrames.size() > (miniView->videoOn ? max:min)){
|
if(miniView->videoOn && mainStreamCode != miniView->streamCode){
|
LOGE("%d 溢帧 pushyuv %d",streamCode,miniView->scaledYuvFrames.size() - (miniView->videoOn ? max:min) );
|
}
|
while (miniView->scaledYuvFrames.size() > (miniView->videoOn ? max:min)){
|
releaseYuvFrameData(miniView->scaledYuvFrame);
|
miniView->scaledYuvFrames.pop(miniView->scaledYuvFrame);
|
}
|
}
|
pthread_mutex_unlock(&pushVideoMutex);
|
|
}
|
}
|
|
void pushYUYV(int streamCode,jbyte *yuyvData){
|
|
MiniViewData *miniView = getMiniView(streamCode);
|
if(!miniView){
|
return;
|
}
|
int width = miniView->width;
|
int height = miniView->height;
|
int y_size = width * height;
|
int uv_size = y_size / 4;
|
|
uint8_t *data[3];
|
// 分配 YUV 数据
|
data[0] = new uint8_t[y_size]; // Y
|
data[1] = new uint8_t[uv_size]; // U
|
data[2] = new uint8_t[uv_size]; // V
|
|
// YUYV -> YUV420P 转换
|
for (int i = 0, j = 0; i < y_size; i += 2, j++) {
|
// 从 YUYV 获取 Y 分量(Y1 和 Y2)
|
data[0][i] = yuyvData[i * 2]; // Y1
|
data[0][i + 1] = yuyvData[i * 2 + 2]; // Y2
|
|
// 从 YUYV 获取 U 和 V 分量
|
if (i % 2 == 0) {
|
// U 和 V 共享
|
data[1][j] = yuyvData[i * 2 + 1]; // U
|
data[2][j] = yuyvData[i * 2 + 3]; // V
|
}
|
}
|
pushYUV(streamCode,data);
|
}
|
|
void pushNV21(int streamCode, jbyte *data) {
|
MiniViewData *miniView = getMiniView(streamCode);
|
if(!miniView){
|
return;
|
}
|
if(miniView->width == 0 || miniView->height == 0){
|
return;
|
}
|
pthread_mutex_lock(&pushNV21Mutex);
|
int src_frame_size = miniView->width * miniView->height * 3 / 2;
|
uint8_t *nv21_data = (uint8_t*)data;
|
|
//LOGI("pushNV21 %dx%d",miniView->width ,miniView->height);
|
//LOGI("pushNV21 %d",src_frame_size);
|
// 输入的 YUV420P 数据
|
uint8_t* srcYUV420P = (uint8_t*)malloc(src_frame_size);
|
|
int frame_size = miniView->width * miniView->height;
|
uint8_t* y_plane = srcYUV420P;
|
uint8_t* u_plane = srcYUV420P + frame_size;
|
uint8_t* v_plane = srcYUV420P + frame_size + frame_size / 4;
|
|
// 拷贝 Y 平面
|
/*memcpy(y_plane, nv21_data, frame_size);
|
|
// NV21 的 UV 是交错的,VU VUVUVU...
|
uint8_t* uv_plane = nv21_data + frame_size;
|
for (int i = 0; i < frame_size / 4; i++) {
|
v_plane[i] = uv_plane[2 * i]; // NV21 中 V 在前
|
u_plane[i] = uv_plane[2 * i + 1]; // U 在后
|
}*/
|
uint8_t *src_yplane = nv21_data;
|
uint8_t *src_uvplane = nv21_data + miniView->width * miniView->height;
|
|
libyuv::NV21ToI420(
|
(const uint8_t *) src_yplane, miniView->width,
|
(const uint8_t *) src_uvplane, miniView->width,
|
(uint8_t *) y_plane, miniView->width,
|
(uint8_t *) u_plane, miniView->width / 2,
|
(uint8_t *) v_plane, miniView->width / 2,
|
miniView->width, miniView->height);
|
// 输入数据的平面指针
|
uint8_t* src_slices[3] = {srcYUV420P,srcYUV420P + frame_size,srcYUV420P + frame_size + frame_size / 4};
|
pushYUV(streamCode,src_slices);
|
free(srcYUV420P);
|
pthread_mutex_unlock(&pushNV21Mutex);
|
}
|
|
void pushRGBA(int streamCode, uint8_t *rgbaData){
|
MiniViewData *miniView = getMiniView(streamCode);
|
if(!miniView){
|
return;
|
}
|
//LOGI("pushRGBA %d",rgbaData);
|
//RGBA转yuv
|
if(miniView->width == 0 || miniView->height == 0){
|
return;
|
}
|
int frameSize = miniView->width * miniView->height;
|
int uvSize = (miniView->width / 2) * (miniView->height / 2);
|
// 为 YUV 数据分配内存
|
int yuvSize = frameSize * 3 / 2; // YUV420P 的大小
|
uint8_t *yuvData = new uint8_t[yuvSize];
|
|
// 将 yuvData 分成 3 部分(Y、U、V)
|
uint8_t *yPlane = yuvData;
|
uint8_t *uPlane = yuvData + frameSize;
|
uint8_t *vPlane = uPlane + uvSize;
|
|
// 使用 libyuv 将 RGBA 转换为 YUV420P
|
libyuv::ARGBToI420(
|
reinterpret_cast<uint8_t*>(rgbaData), miniView->width * 4, // RGBA 数据和每行宽度(RGBA 每个像素 4 字节)
|
yPlane, miniView->width, // Y 分量数据
|
uPlane, miniView->width / 2, // U 分量数据
|
vPlane, miniView->width / 2, // V 分量数据
|
miniView->width, // 宽度
|
miniView->height // 高度
|
);
|
//LOGI("pushRGBA width:%d height:%d %d",miniView->width,miniView->height,rgbaData);
|
// 输入数据的平面指针
|
uint8_t* src_slices[3] = {yuvData,yuvData + frameSize,yuvData + frameSize + frameSize / 4};
|
|
pushYUV(streamCode,src_slices);
|
// 释放资源
|
free(yuvData);
|
}
|
|
void pushNV21(int streamCode,uint8_t *yData,uint8_t *uData,uint8_t *vData,jint y_stride, jint u_stride,jint v_stride, jint uv_stride,jint angle,jint width,jint height){
|
|
MiniViewData *miniView = getMiniView(streamCode);
|
if(!miniView){
|
return;
|
}
|
//LOGI("pushNV21");
|
//nv21转yuv
|
if(miniView->width == 0 || miniView->height == 0){
|
return;
|
}
|
pthread_mutex_lock(&pushNV21Mutex);
|
// 创建目标 I420 数据
|
int ySize = width * height;
|
int uvSize = width * height / 4; // UV 分量的大小(YUV 4:2:0)
|
|
uint8_t* i420Y = new uint8_t[ySize];
|
uint8_t* i420U = new uint8_t[uvSize];
|
uint8_t* i420V = new uint8_t[uvSize];
|
|
// 使用 libyuv 将 YUV 数据转换为 I420
|
libyuv::Android420ToI420(yData, y_stride,uData, u_stride,vData, v_stride,uv_stride,
|
i420Y, width,i420U, width/2,i420V,width/2,
|
width, height);
|
// 输入数据的平面指针
|
uint8_t* src_slices[3] = {i420Y,i420U,i420V};
|
if(angle != 0){
|
uint8_t *dstData[3];
|
int new_width = (angle%180 == 0) ? width : height;
|
int new_height = (angle%180 == 0) ? height : width;
|
LOGI("rotate_yuv angle:%d",angle);
|
rotate_yuv(src_slices, width, height,angle,dstData,new_width,new_height);
|
pushYUV(streamCode,dstData);
|
delete[] i420Y;
|
delete[] i420U;
|
delete[] i420V;
|
}else{
|
pushYUV(streamCode,src_slices);
|
}
|
pthread_mutex_unlock(&pushNV21Mutex);
|
}
|
|
void loadSurface(ANativeWindow *nativeWindow,uint8_t *yuvData[3],int width,int height){
|
if(nativeWindow){
|
int64_t t = getCurrentTimestamp();
|
|
uint8_t* rgba_data = yuvToRGBA(yuvData,width,height);
|
int64_t t1 = getCurrentTimestamp();
|
//缓冲区
|
//LOGI("视频缓冲");
|
ANativeWindow_setBuffersGeometry(nativeWindow,width,height,WINDOW_FORMAT_RGBA_8888);
|
ANativeWindow_Buffer outBuffer;
|
int64_t t2 = getCurrentTimestamp();
|
|
ANativeWindow_lock(nativeWindow , &outBuffer,NULL);
|
int64_t t3 = getCurrentTimestamp();
|
//渲染
|
//LOGI("开始渲染");
|
//把buffer中的数据进行赋值(修改)
|
uint8_t *dst_data = static_cast<uint8_t *>(outBuffer.bits);
|
int dst_lineSize = width * 4;//ARGB
|
//逐行拷贝
|
// 将 RGB 数据逐行复制到窗口缓冲区
|
for (int i = 0; i < height; i++) {
|
memcpy(dst_data + i * outBuffer.stride * 4, rgba_data + i * dst_lineSize, dst_lineSize);
|
}
|
int64_t t4 = getCurrentTimestamp();
|
ANativeWindow_unlockAndPost(nativeWindow);
|
delete[] rgba_data;
|
//LOGI("loadSurface 耗时:%d sws_scale:%d memcpy:%d ANativeWindow_lock:%d 视频缓冲:%d %dx%d",getCurrentTimestamp() - t,t1-t,t4-t3,t3-t2,t2-t1,width,height);
|
}
|
}
|
|
void pushPCM(int8_t *data){
|
|
//LOGI("pushPCM1");
|
pthread_mutex_lock(&pushAudioMutex);
|
//LOGI("pushPCM2");
|
audioChannel->encodeData(data);
|
//LOGI("pushPCM3");
|
pthread_mutex_unlock(&pushAudioMutex);
|
}
|
|
void playPcm(uint8_t *out_buffer,int out_buffer_size){
|
|
}
|
|
|
void releaseYuvFrameData(YUVData *yuvData){
|
if(yuvData){
|
releaseYuvData(yuvData->yuvData);
|
delete yuvData;
|
}
|
}
|
void releaseYuvData(uint8_t *yuvData[3]){
|
if(yuvData[0] != NULL){
|
delete[] yuvData[0];
|
delete[] yuvData[1];
|
delete[] yuvData[2];
|
yuvData[0] = NULL;
|
}
|
}
|
|
void releaseYuvDataCallBack(YUVData **yuvData){
|
if (!yuvData || !*yuvData) return; // 先判断是否为空
|
//free((*yuvData)->yuvData);
|
//YUVData yuv; // 栈上变量(不能 delete)
|
//delete *yuvData;
|
}
|