//
|
// CameraCapture.swift
|
// LiveProject
|
//
|
// Created by 倪路朋 on 6/27/25.
|
//
|
import AVFoundation
|
|
class CameraCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
|
private let session = AVCaptureSession()
|
var onFrame: ((CVPixelBuffer) -> Void)?
|
|
func start() {
|
guard let device = AVCaptureDevice.default(for: .video),
|
let input = try? AVCaptureDeviceInput(device: device) else {
|
print("❌ 相机设备无法创建")
|
return
|
}
|
|
session.beginConfiguration()
|
session.sessionPreset = .high
|
|
if session.canAddInput(input) {
|
session.addInput(input)
|
}
|
|
let output = AVCaptureVideoDataOutput()
|
output.videoSettings = [
|
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
|
]
|
output.setSampleBufferDelegate(self, queue: DispatchQueue(label: "camera.queue"))
|
|
if session.canAddOutput(output) {
|
session.addOutput(output)
|
}
|
|
session.commitConfiguration()
|
session.startRunning()
|
}
|
|
func captureOutput(_ output: AVCaptureOutput,
|
didOutput sampleBuffer: CMSampleBuffer,
|
from connection: AVCaptureConnection) {
|
guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
|
onFrame?(buffer)
|
}
|
}
|