Runt
2025-07-09 3b7521a47ae731f0bf0a922822e4417493489539
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
//
//  LiveViewModel.swift
//  LiveProject
//
//  Created by 倪路朋 on 6/27/25.
//
import UIKit
import AVFoundation
 
class LiveViewModel: ObservableObject {
    @Published var pixelBuffer: CVPixelBuffer?
    
    let encoder = H264Encoder(width: 1080, height: 1920, fps: 30, bitrate: 1_000_000)
    var frameIndex: Int64 = 0
    let encodeQueue = DispatchQueue(label: "encoder.queue")
 
    lazy var camera = CameraCapture()
    var timestamp = Int(Date().timeIntervalSince1970 * 1000)
    
    func newWindowAction(device:DeviceInfo,completion: @escaping (Bool) -> Void = {b in}){
        switch device.type{
        case StreamType.CAMERA:
            requestCameraPermission(mediaType: .video){ staus in
                if(staus){
                    var ts1 =  Int(Date().timeIntervalSince1970 * 1000)
                    self.camera.onFrame = {  [weak self]  buffer in
                        guard let self = self else { return }
 
                        let width = CVPixelBufferGetWidth(buffer)
                        let height = CVPixelBufferGetHeight(buffer)
 
                        guard width > 0 && height > 0 else {
                            print("Invalid pixel buffer size: \(width)x\(height)")
                            return
                        }
                        
                        self.frameIndex += 1
                        let ts =  Int(Date().timeIntervalSince1970 * 1000)
 
                        self.timestamp = ts;
                        let cmTime = CMTimeMake(value: Int64(CACurrentMediaTime() * 1000), timescale: 1000);
                        self.encoder.encode(pixelBuffer: buffer, pts: cmTime)
                        DispatchQueue.main.async {
                            self.pixelBuffer = buffer;
                        }
                        //print("画面更新")
                    }
                    DispatchQueue.global(qos: .userInitiated).async {
                        self.camera.start()
                    }
                    print("启动相机")
                    self.encoder.onEncoded = { (data: Data, ctime: CMTime, isKey: Bool) in
                        let timestamp2 = Int(Date().timeIntervalSince1970 * 1000)
                        print("编码时间2 \(timestamp2 - self.timestamp)")
                        print("Encoded NALU size: \(data.count), key frame: \(isKey)")
 
                    }
                }else{
                    
                }
                completion(staus)
            }
            break;
        default:
            completion(true)
            break;
        }
    }
    
    func closeWindowAction(device:DeviceInfo){
        switch device.type{
        case StreamType.CAMERA:
            print("关闭相机")
            break;
        default:
            break;
        }
    }
    
    
    
    func requestCameraPermission(mediaType: AVMediaType,completion: @escaping (Bool) -> Void) {
        let status = AVCaptureDevice.authorizationStatus(for: mediaType)
        switch status {
        case .authorized:
            completion(true)
        case .notDetermined:
            AVCaptureDevice.requestAccess(for: .video) { granted in
                DispatchQueue.main.async {
                    completion(granted)
                }
            }
        default:
            // denied / restricted
            completion(false)
        }
    }
    
    func copyPixelBuffer(_ src: CVPixelBuffer) -> CVPixelBuffer? {
        let width = CVPixelBufferGetWidth(src)
        let height = CVPixelBufferGetHeight(src)
        let pixelFormat = CVPixelBufferGetPixelFormatType(src)
 
        var dst: CVPixelBuffer?
        let attrs: [String: Any] = [
            kCVPixelBufferIOSurfacePropertiesKey as String: [:]
        ]
 
        let status = CVPixelBufferCreate(
            kCFAllocatorDefault,
            width,
            height,
            pixelFormat,
            attrs as CFDictionary,
            &dst
        )
 
        guard status == kCVReturnSuccess, let dstBuffer = dst else {
            print("❌ 复制 PixelBuffer 失败")
            return nil
        }
 
        CVPixelBufferLockBaseAddress(src, .readOnly)
        CVPixelBufferLockBaseAddress(dstBuffer, [])
 
        let planeCount = CVPixelBufferGetPlaneCount(src)
        for i in 0..<planeCount {
            let srcAddr = CVPixelBufferGetBaseAddressOfPlane(src, i)
            let dstAddr = CVPixelBufferGetBaseAddressOfPlane(dstBuffer, i)
 
            let height = CVPixelBufferGetHeightOfPlane(src, i)
            let bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(src, i)
 
            memcpy(dstAddr, srcAddr, height * bytesPerRow)
        }
 
        CVPixelBufferUnlockBaseAddress(src, .readOnly)
        CVPixelBufferUnlockBaseAddress(dstBuffer, [])
 
        return dstBuffer
    }
    
 
}