//
|
// LiveViewModel.swift
|
// LiveProject
|
//
|
// Created by 倪路朋 on 6/27/25.
|
//
|
import UIKit
|
import AVFoundation
|
|
class LiveViewModel: ObservableObject {
|
@Published var pixelBuffer: CVPixelBuffer?
|
|
let encoder = H264Encoder(width: 1080, height: 1920, fps: 30, bitrate: 1_000_000)
|
var frameIndex: Int64 = 0
|
let encodeQueue = DispatchQueue(label: "encoder.queue")
|
|
lazy var camera = CameraCapture()
|
var timestamp = Int(Date().timeIntervalSince1970 * 1000)
|
|
func newWindowAction(device:DeviceInfo,completion: @escaping (Bool) -> Void = {b in}){
|
switch device.type{
|
case StreamType.CAMERA:
|
requestCameraPermission(mediaType: .video){ staus in
|
if(staus){
|
var ts1 = Int(Date().timeIntervalSince1970 * 1000)
|
self.camera.onFrame = { [weak self] buffer in
|
guard let self = self else { return }
|
|
let width = CVPixelBufferGetWidth(buffer)
|
let height = CVPixelBufferGetHeight(buffer)
|
|
guard width > 0 && height > 0 else {
|
print("Invalid pixel buffer size: \(width)x\(height)")
|
return
|
}
|
|
self.frameIndex += 1
|
let ts = Int(Date().timeIntervalSince1970 * 1000)
|
|
self.timestamp = ts;
|
let cmTime = CMTimeMake(value: Int64(CACurrentMediaTime() * 1000), timescale: 1000);
|
self.encoder.encode(pixelBuffer: buffer, pts: cmTime)
|
DispatchQueue.main.async {
|
self.pixelBuffer = buffer;
|
}
|
//print("画面更新")
|
}
|
DispatchQueue.global(qos: .userInitiated).async {
|
self.camera.start()
|
}
|
print("启动相机")
|
self.encoder.onEncoded = { (data: Data, ctime: CMTime, isKey: Bool) in
|
let timestamp2 = Int(Date().timeIntervalSince1970 * 1000)
|
print("编码时间2 \(timestamp2 - self.timestamp)")
|
print("Encoded NALU size: \(data.count), key frame: \(isKey)")
|
|
}
|
}else{
|
|
}
|
completion(staus)
|
}
|
break;
|
default:
|
completion(true)
|
break;
|
}
|
}
|
|
func closeWindowAction(device:DeviceInfo){
|
switch device.type{
|
case StreamType.CAMERA:
|
print("关闭相机")
|
break;
|
default:
|
break;
|
}
|
}
|
|
|
|
func requestCameraPermission(mediaType: AVMediaType,completion: @escaping (Bool) -> Void) {
|
let status = AVCaptureDevice.authorizationStatus(for: mediaType)
|
switch status {
|
case .authorized:
|
completion(true)
|
case .notDetermined:
|
AVCaptureDevice.requestAccess(for: .video) { granted in
|
DispatchQueue.main.async {
|
completion(granted)
|
}
|
}
|
default:
|
// denied / restricted
|
completion(false)
|
}
|
}
|
|
func copyPixelBuffer(_ src: CVPixelBuffer) -> CVPixelBuffer? {
|
let width = CVPixelBufferGetWidth(src)
|
let height = CVPixelBufferGetHeight(src)
|
let pixelFormat = CVPixelBufferGetPixelFormatType(src)
|
|
var dst: CVPixelBuffer?
|
let attrs: [String: Any] = [
|
kCVPixelBufferIOSurfacePropertiesKey as String: [:]
|
]
|
|
let status = CVPixelBufferCreate(
|
kCFAllocatorDefault,
|
width,
|
height,
|
pixelFormat,
|
attrs as CFDictionary,
|
&dst
|
)
|
|
guard status == kCVReturnSuccess, let dstBuffer = dst else {
|
print("❌ 复制 PixelBuffer 失败")
|
return nil
|
}
|
|
CVPixelBufferLockBaseAddress(src, .readOnly)
|
CVPixelBufferLockBaseAddress(dstBuffer, [])
|
|
let planeCount = CVPixelBufferGetPlaneCount(src)
|
for i in 0..<planeCount {
|
let srcAddr = CVPixelBufferGetBaseAddressOfPlane(src, i)
|
let dstAddr = CVPixelBufferGetBaseAddressOfPlane(dstBuffer, i)
|
|
let height = CVPixelBufferGetHeightOfPlane(src, i)
|
let bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(src, i)
|
|
memcpy(dstAddr, srcAddr, height * bytesPerRow)
|
}
|
|
CVPixelBufferUnlockBaseAddress(src, .readOnly)
|
CVPixelBufferUnlockBaseAddress(dstBuffer, [])
|
|
return dstBuffer
|
}
|
|
|
}
|