LiveProject/controller/CameraCapture.swift
@@ -0,0 +1,87 @@
//
//  CameraCapture.swift
//  LiveProject
//
//  Created by 倪路朋 on 6/27/25.
//
import AVFoundation
class CameraCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    private let session = AVCaptureSession()
    private var videoOutput: AVCaptureVideoDataOutput?
    private var input: AVCaptureDeviceInput?
    var onFrame: ((CVPixelBuffer) -> Void)?
    func start() {
        guard let device = AVCaptureDevice.default(for: .video),
              let input = try? AVCaptureDeviceInput(device: device) else {
            print("❌ 相机设备无法创建")
            return
        }
        self.input = input
        session.beginConfiguration()
        session.sessionPreset = .hd1920x1080
        if session.canAddInput(input) {
            session.addInput(input)
        }
        let output = AVCaptureVideoDataOutput()
        output.videoSettings = [
            kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
        ]
        output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera.queue"))
        if session.canAddOutput(output) {
            session.addOutput(output)
        }
        self.videoOutput = output
        // 在相机配置代码中
        if let videoConnection = output.connection(with: .video) {
            // 自动旋转(推荐)
            videoConnection.automaticallyAdjustsVideoMirroring = false
            videoConnection.videoOrientation = .portrait // 或根据UI方向设置
            // 对于前置摄像头启用镜像
            /*if videoConnection.isVideoMirroringSupported {
                videoConnection.isVideoMirrored = (cameraPosition == .front)
            }*/
        }
        session.commitConfiguration()
        session.startRunning()
        print("📷 相机已开启")
    }
    func captureOutput(_ output: AVCaptureOutput,
                       didOutput sampleBuffer: CMSampleBuffer,
                       from connection: AVCaptureConnection) {
        guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
        let width = CVPixelBufferGetWidth(buffer)
        let height = CVPixelBufferGetHeight(buffer)
        //print("Buffer Size: \(CVPixelBufferGetWidth(buffer))x\(CVPixelBufferGetHeight(buffer))")
        //print("Connection orientation: \(connection.videoOrientation.rawValue)")
        //print("Rotation angle: \(connection.videoRotationAngle)")
        onFrame?(buffer)
    }
    func stop(){
        session.stopRunning()
        session.beginConfiguration()
        if let input = input {
            session.removeInput(input)
        }
        if let output = videoOutput {
            session.removeOutput(output)
        }
        session.commitConfiguration()
        input = nil
        videoOutput = nil
        print("📷 相机已关闭")
    }
}