package com.runt.live.media
|
|
import android.annotation.SuppressLint
|
import android.content.Context
|
import android.graphics.Bitmap
|
import android.graphics.ImageFormat
|
import android.graphics.Point
|
import android.hardware.camera2.CameraCharacteristics
|
import android.hardware.camera2.CameraManager
|
import android.util.Log
|
import android.util.Size
|
import androidx.annotation.OptIn
|
import androidx.camera.camera2.interop.ExperimentalCamera2Interop
|
import androidx.camera.core.Camera
|
import androidx.camera.core.CameraSelector
|
import androidx.camera.core.ImageAnalysis
|
import androidx.camera.core.ImageCapture
|
import androidx.camera.core.ImageProxy
|
import androidx.camera.core.Preview
|
import androidx.camera.core.resolutionselector.ResolutionSelector
|
import androidx.camera.core.resolutionselector.ResolutionStrategy
|
import androidx.camera.lifecycle.ProcessCameraProvider
|
import androidx.core.content.ContextCompat
|
import androidx.lifecycle.Lifecycle
|
import androidx.lifecycle.LifecycleOwner
|
import androidx.lifecycle.LifecycleRegistry
|
import com.google.common.util.concurrent.ListenableFuture
|
import com.runt.live.cpp.LiveMiniView
|
import com.runt.live.cpp.LiveMiniView.mainStreamCode
|
import com.runt.live.data.StreamWindow
|
import com.runt.live.enum.LiveState
|
import com.runt.live.ui.stream.LiveViewModel
|
import com.runt.live.util.BitmapUtils
|
import com.runt.open.mvi.OpenApplication
|
import java.text.SimpleDateFormat
|
import java.util.Date
|
import java.util.concurrent.LinkedBlockingQueue
|
import kotlin.concurrent.thread
|
|
|
/**
|
* @author Runt(qingingrunt2010 @ qq.com)
|
* @purpose
|
* @date 9/22/24
|
*/
|
class CameraHelper : LifecycleOwner{
|
|
val TAG = "CameraHelper";
|
|
data class BytesData(val bitmap : Bitmap,val bytes:ByteArray,val timestamp : Long)
|
protected var imagesQueue = LinkedBlockingQueue<BytesData>()
|
|
|
private var cameraProviderFuture : ListenableFuture<ProcessCameraProvider>? = null
|
var camera : Camera? = null
|
var cameraSelector = CameraSelector.DEFAULT_FRONT_CAMERA
|
//用于预览
|
val preview = Preview.Builder().build()
|
//用于拍照
|
val imageCapture = ImageCapture.Builder().setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY).build()
|
//设置分辨率
|
val strategy = ResolutionStrategy(Size(LiveMiniView.FRAME_HEIGHT,LiveMiniView.FRAME_WIDTH) , ResolutionStrategy.FALLBACK_RULE_NONE)
|
val resolutionBuilder = ResolutionSelector.Builder().setResolutionStrategy(strategy)
|
val imageAnalysis = ImageAnalysis.Builder().setOutputImageRotationEnabled(true) //允许输出流旋转
|
.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888)
|
.setResolutionSelector(resolutionBuilder.build()).build()
|
|
val mContext:Context;
|
var mStreamWindow: StreamWindow? = null;
|
var mViewModel:LiveViewModel
|
val mLifecycle:LifecycleRegistry;
|
var startTime = 0L;
|
var dateFmt = SimpleDateFormat("hh:MM:ss");
|
|
|
|
constructor(context:Context,viewModel:LiveViewModel){
|
mContext = context;
|
mViewModel = viewModel;
|
mLifecycle = LifecycleRegistry(mContext as LifecycleOwner)
|
mLifecycle.currentState = Lifecycle.State.CREATED
|
mLifecycle.currentState = Lifecycle.State.STARTED
|
val cameraManager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager;
|
for(cameraId in cameraManager.cameraIdList){
|
var outputSizes = cameraManager.getCameraCharacteristics(cameraId).get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)!!.getOutputSizes(ImageFormat.YUV_420_888)
|
val characteristics = cameraManager !!.getCameraCharacteristics(cameraId)
|
val lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING)
|
Log.d("CameraX" , "Camera ID: $cameraId, Facing: $lensFacing")
|
for (size in outputSizes){
|
Log.i(TAG , "cameraId: ${cameraId} size: ${size}")
|
}
|
}
|
startPushData();
|
|
|
//detectInputDeviceUsb(context)
|
}
|
|
private fun stopCamera(){
|
cameraProviderFuture?.get()?.unbindAll()
|
imageAnalysis.clearAnalyzer()
|
camera = null;
|
}
|
|
fun closeCamera(){
|
stopCamera()
|
//audioRecord!!.stop()
|
mLifecycle.currentState = Lifecycle.State.DESTROYED
|
imagesQueue.clear()
|
}
|
|
|
@OptIn(ExperimentalCamera2Interop::class)
|
@SuppressLint("RestrictedApi")
|
fun swichCamera(){
|
Log.i(TAG , "swichCamera: ${cameraSelector }")
|
var selector = CameraSelector.LENS_FACING_FRONT;
|
/*if(cameraSelector == CameraSelector.DEFAULT_BACK_CAMERA){
|
cameraSelector = CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_EXTERNAL).build();
|
}else */if(cameraSelector == CameraSelector.DEFAULT_FRONT_CAMERA){
|
selector = CameraSelector.LENS_FACING_BACK;
|
}else{
|
selector = CameraSelector.LENS_FACING_FRONT;
|
}
|
cameraSelector = CameraSelector.Builder().requireLensFacing(selector) // 试试 BACK 代替 EXTERNAL
|
.build();
|
/*cameraSelector = CameraSelector.Builder().addCameraFilter { cameraInfos : List<CameraInfo> ->
|
val filteredList : MutableList<CameraInfo> = ArrayList<CameraInfo>()
|
for (cameraInfo in cameraInfos) {
|
val camera2CameraInfo = cameraInfo as Camera2CameraInfo
|
Log.d("CameraX" , "Found Camera ID: " + camera2CameraInfo.cameraId)
|
filteredList.add(cameraInfo)
|
}
|
filteredList
|
}.build()*/
|
stopCamera()
|
Log.i(TAG , "bindCamera: ${cameraSelector}")
|
bindCamera(cameraSelector)
|
}
|
|
fun startPushData(){
|
thread {
|
while (mLifecycle.currentState != Lifecycle.State.DESTROYED){
|
var imageData = imagesQueue.take();
|
mStreamWindow?.let { streamWindow ->
|
if(streamWindow.videoDelay > 0){
|
var tempStamp = Date().time;
|
var c = (imageData.timestamp + streamWindow.videoDelay) - tempStamp;
|
Log.i(TAG , "startPushData: ${streamWindow.videoDelay} ${c}")
|
if(c < streamWindow.videoDelay && c > 0 ){
|
Thread.sleep(c);
|
}
|
}
|
}
|
if(mStreamWindow!!.sizeState.value.x != imageData.bitmap.width){
|
if (imageData.bitmap.width > imageData.bitmap.height && mStreamWindow!!.viewRateState.value < 0.4) {
|
mStreamWindow!!.viewRateState.value = 0.4f
|
}
|
mStreamWindow!!.sizeState.value = Point(imageData.bitmap.width,imageData.bitmap.height);
|
}
|
if(mStreamWindow!!.videoState.value == LiveState.IN_LIVE || mStreamWindow!!.id == mainStreamCode){
|
mViewModel?.let {
|
//val bytes = BitmapUtils.instance !!.YUV420toNV21(image) !!
|
it.pushNV21(mStreamWindow!!.id,imageData.bytes)
|
}
|
}
|
if(OpenApplication.getApplication().isInfront()){
|
mStreamWindow?.surfaceHolder?.let {
|
//Log.i(TAG , "渲染: ${it.surfaceFrame.width()} ${it.surfaceFrame.height()} ${it.hashCode()}")
|
BitmapUtils.instance!!.cavansSurface(it,imageData.bitmap);
|
}
|
imageData.bitmap.recycle();
|
}
|
}
|
}
|
}
|
|
fun openCamera(streamWindow : StreamWindow?) {
|
mStreamWindow = streamWindow;
|
cameraProviderFuture = ProcessCameraProvider.getInstance(mContext)
|
cameraSelector = CameraSelector.DEFAULT_FRONT_CAMERA
|
cameraProviderFuture!!.addListener(Runnable {
|
bindCamera(cameraSelector)
|
mStreamWindow?.listener?.onStarted?.invoke()
|
} , ContextCompat.getMainExecutor(mContext))
|
startTime = Date().time;
|
}
|
|
@SuppressLint("UnsafeOptInUsageError")
|
private fun bindCamera(selector : CameraSelector){
|
val cameraProvider = cameraProviderFuture!!.get()
|
cameraProvider.unbindAll()
|
var t2 = Date().time;
|
var t_fps = 0
|
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(mContext)) { image : ImageProxy ->
|
t_fps += 1;
|
if (Date().time - t2 > 1000) {
|
Log.e(TAG , "bindCamera fps: ${t_fps}" )
|
t_fps = 0
|
t2 = Date().time
|
}
|
val bytes : ByteArray = BitmapUtils.instance!!.decodeToNV21(image)!!
|
var bitmap = image.toBitmap();
|
image.close()
|
imagesQueue.put(BytesData(bitmap,bytes,Date().time))
|
}
|
//preview.setSurfaceProvider(mStreamWindow.surfaceHolder!!)
|
camera = cameraProvider.bindToLifecycle(this , selector , preview , imageCapture , imageAnalysis)
|
mLifecycle.currentState = Lifecycle.State.RESUMED
|
}
|
|
interface OnFrameUpdateListener{
|
fun onUpdate(image:ImageProxy);
|
}
|
|
override val lifecycle : Lifecycle
|
get() = mLifecycle
|
|
}
|