From acf8e83cbf106b4350536d54eb46379dd86a623c Mon Sep 17 00:00:00 2001
From: Runt <qingingrunt2010@qq.com>
Date: Fri, 04 Jul 2025 17:05:00 +0000
Subject: [PATCH] 输入弹框 图标修改 相机调整
---
LiveProject/views/VideoRendererView.swift | 38 +
.gitignore | 1
LiveProject/enum/Icons.swift | 24 +
LiveProject/tool/H264Encoder.swift | 166 ++++++++
LiveProject/activity/stream/LiveActivity.swift | 102 +++-
LiveProject/tool/MetalRenderer.swift | 127 +++--
LiveProject/data/DeviceInfo.swift | 2
/dev/null | 12
LiveProject/views/FlowLayout.swift | 190 ++------
LiveProject/controller/CameraCapture.swift | 31 +
LiveProject/tool/PixelBufferConverter.swift | 44 ++
LiveProject/activity/stream/LiveViewModel.swift | 101 ++++
LiveProject/tool/MetalPixelConverter.swift | 227 +++++++++++
LiveProject/data/IconInfo.swift | 12
LiveProject/views/MButton.swift | 16
LiveProject/views/LTextField.swift | 73 +++
LiveProject/views/TitleBarView.swift | 8
17 files changed, 925 insertions(+), 249 deletions(-)
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..85e7c1d
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+/.idea/
diff --git a/LiveProject/activity/stream/LiveActivity.swift b/LiveProject/activity/stream/LiveActivity.swift
index ef284a2..57e7890 100644
--- a/LiveProject/activity/stream/LiveActivity.swift
+++ b/LiveProject/activity/stream/LiveActivity.swift
@@ -11,10 +11,9 @@
import MetalKit
struct LiveActivity: View {
- @State private var pixelBuffer: CVPixelBuffer?
-
@State private var showDeviceDialog = false
+ @State private var showInputDialog = false
@State private var streamRate = Float(9/16.0);
@State private var fpsState = 30;
@@ -22,27 +21,27 @@
@State private var displaySize : CGSize = .zero;
- @State private var devices = [DeviceInfo(name: "相机", type: .CAMERA, deviceId: UUID().uuidString,icon: IconCamera()),
- DeviceInfo(name: "话筒", type: .MICROPHONE,deviceId: UUID().uuidString,icon: IconMic()),
- DeviceInfo(name: "系统", type: .SYSTEM,deviceId : UUID().uuidString,icon: IconPortrait())]
+ @State private var devices = [DeviceInfo(name: "相机", type: .CAMERA, deviceId: UUID().uuidString,icon: Icons.CAMERA),
+ DeviceInfo(name: "话筒", type: .MICROPHONE,deviceId: UUID().uuidString,icon: Icons.MIC),
+ DeviceInfo(name: "系统", type: .SYSTEM,deviceId : UUID().uuidString,icon: Icons.PORTRAIT)]
- private let mViewModel = LiveViewModel()
+ @StateObject private var mViewModel = LiveViewModel()
var body: some View {
ZStack{
Color.clear
.ignoresSafeArea() // 填满全屏
VStack{
- VideoRendererView(renderer:mViewModel.renderer).background(Color.black).frame(width: mainSize.width,height:mainSize.height)
+ VideoRendererView(pixelBuffer: $mViewModel.pixelBuffer).background(Color.black).frame(width: mainSize.width,height:mainSize.height)
Spacer()
}.border(Color.blue)
VStack{
Spacer()
BottomBtns().frame(alignment: .bottom).border(Color.green)
}
- if showDeviceDialog {
- DialogDevices()
+ if showInputDialog{
+ DialogInput()
}
}.frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity, alignment: .topLeading)
.background(
@@ -99,20 +98,59 @@
print("updateWindow:\(mainSize)")
}
- func DialogDevices() -> some View{
+ func DialogInput(onCancel:() ->Void = {},onConfirm:() -> Void = {}) -> some View{
ZStack{
Color.black.opacity(0.4)
.edgesIgnoringSafeArea(.all)
.onTapGesture {
withAnimation {
- showDeviceDialog = false
+ showInputDialog = false
}
}
VStack {
- Spacer()
- VStack(spacing: 20) {
- Spacer().frame(height:40)
- FlowLayout(devices){ device in
+ VStack(alignment: .leading, spacing: 40) {
+ Text("请输入直播地址")
+ .font(Font.system(size: 20))
+ LTextField().environmentObject(LText())
+ HStack{
+ Spacer()
+ Button(action:{
+ showInputDialog.toggle();
+ }){
+ Text("取消")
+ .font(Font.system(size: 20))
+ .foregroundColor(Color.gray)
+ }
+
+ Spacer().frame(width: 30)
+ Button(action:{
+ showInputDialog.toggle();
+ }){
+ Text("确认")
+ .font(Font.system(size: 20))
+ .foregroundColor(Color.colorTextLink)
+ }
+
+ }
+ }
+ .padding(30)
+ .background(Color.white)
+ .cornerRadius(20)
+ .transition(.move(edge: .bottom))
+ }
+ .padding(60)
+ .zIndex(1)
+ .animation(.default, value: devices)
+ }
+ }
+
+ func DialogDevices() -> some View{
+ VStack{
+ VStack(spacing: 20) {
+ Spacer().frame(height:20)
+ FlowLayout(){
+
+ ForEach(devices, id: \.self) { device in
MButton(icon: device.icon,text: device.name){
mViewModel.newWindowAction(device: device){ status in
withAnimation{
@@ -122,17 +160,12 @@
print("\(device.name) click")
}
}
- .padding()
}
- .frame(maxWidth: .infinity)
- .padding()
- .background(Color.white)
- .cornerRadius(20)
- .transition(.move(edge: .bottom))
}
- .zIndex(1)
- .animation(.default, value: devices)
+ .frame(maxWidth: .infinity,alignment:.leading)
+ .padding()
}
+ .frame(maxHeight: .infinity,alignment:.topLeading)
}
func BottomBtns() -> some View{
@@ -140,7 +173,7 @@
HStack(){
//横竖屏控制
- MButton(icon:streamRate == (9/16.0) ? IconPortrait() : IconLandscape() ){
+ MButton(icon:streamRate == (9/16.0) ? Icons.PORTRAIT : Icons.LANDSCAPE ){
streamRate = streamRate == (9/16.0) ? (16/9.0) : (9/16.0)
updateWindowSize()
}
@@ -156,18 +189,29 @@
HStack{
LButton(text: "设备"){
print("Click 设备 button")
- withAnimation{
- showDeviceDialog.toggle()
+ showDeviceDialog.toggle()
+ }.sheet(isPresented:$showDeviceDialog, content: {
+ VStack {
+ ScrollView {
+ DialogDevices()
+ }
}
- }
+ .presentationDetents([.height(200),.medium])
+ })
LButton(text: "RTMP"){
-
+ print("Click RTMP button")
+ withAnimation{
+ showInputDialog.toggle()
+ }
}
/*flLButton(text: "文件"){
}*/
LButton(text: "文本"){
-
+ print("Click 文本 button")
+ withAnimation{
+ showInputDialog.toggle()
+ }
}
}
HStack{
diff --git a/LiveProject/activity/stream/LiveViewModel.swift b/LiveProject/activity/stream/LiveViewModel.swift
index 347d761..ab408ed 100644
--- a/LiveProject/activity/stream/LiveViewModel.swift
+++ b/LiveProject/activity/stream/LiveViewModel.swift
@@ -7,22 +7,54 @@
import UIKit
import AVFoundation
-class LiveViewModel{
+class LiveViewModel: ObservableObject {
+ @Published var pixelBuffer: CVPixelBuffer?
+ let encoder = H264Encoder(width: 1080, height: 1920, fps: 30, bitrate: 1_000_000)
+ var frameIndex: Int64 = 0
+ let encodeQueue = DispatchQueue(label: "encoder.queue")
+
lazy var camera = CameraCapture()
- lazy var renderer = MetalRenderer()
+ var timestamp = Int(Date().timeIntervalSince1970 * 1000)
func newWindowAction(device:DeviceInfo,completion: @escaping (Bool) -> Void = {b in}){
switch device.type{
case StreamType.CAMERA:
requestCameraPermission(mediaType: .video){ staus in
if(staus){
- self.camera.onFrame = { buffer in
- self.renderer.updateFrame(pixelBuffer: buffer)
- print("画面更新")
+ var ts1 = Int(Date().timeIntervalSince1970 * 1000)
+ self.camera.onFrame = { [weak self] buffer in
+ guard let self = self else { return }
+
+ let width = CVPixelBufferGetWidth(buffer)
+ let height = CVPixelBufferGetHeight(buffer)
+
+ guard width > 0 && height > 0 else {
+ print("Invalid pixel buffer size: \(width)x\(height)")
+ return
+ }
+
+ self.frameIndex += 1
+ let ts = Int(Date().timeIntervalSince1970 * 1000)
+
+ self.timestamp = ts;
+ let cmTime = CMTimeMake(value: Int64(CACurrentMediaTime() * 1000), timescale: 1000);
+ self.encoder.encode(pixelBuffer: buffer, pts: cmTime)
+ DispatchQueue.main.async {
+ self.pixelBuffer = buffer;
+ }
+ //print("画面更新")
}
- self.camera.start()
+ DispatchQueue.global(qos: .userInitiated).async {
+ self.camera.start()
+ }
print("启动相机")
+ self.encoder.onEncoded = { (data: Data, ctime: CMTime, isKey: Bool) in
+ let timestamp2 = Int(Date().timeIntervalSince1970 * 1000)
+ print("编码时间2 \(timestamp2 - self.timestamp)")
+ print("Encoded NALU size: \(data.count), key frame: \(isKey)")
+
+ }
}else{
}
@@ -33,6 +65,17 @@
break;
}
}
+
+ func closeWindowAction(device:DeviceInfo){
+ switch device.type{
+ case StreamType.CAMERA:
+ print("关闭相机")
+ break;
+ default:
+ break;
+ }
+ }
+
func requestCameraPermission(mediaType: AVMediaType,completion: @escaping (Bool) -> Void) {
@@ -51,4 +94,50 @@
completion(false)
}
}
+
+ func copyPixelBuffer(_ src: CVPixelBuffer) -> CVPixelBuffer? {
+ let width = CVPixelBufferGetWidth(src)
+ let height = CVPixelBufferGetHeight(src)
+ let pixelFormat = CVPixelBufferGetPixelFormatType(src)
+
+ var dst: CVPixelBuffer?
+ let attrs: [String: Any] = [
+ kCVPixelBufferIOSurfacePropertiesKey as String: [:]
+ ]
+
+ let status = CVPixelBufferCreate(
+ kCFAllocatorDefault,
+ width,
+ height,
+ pixelFormat,
+ attrs as CFDictionary,
+ &dst
+ )
+
+ guard status == kCVReturnSuccess, let dstBuffer = dst else {
+ print("❌ 复制 PixelBuffer 失败")
+ return nil
+ }
+
+ CVPixelBufferLockBaseAddress(src, .readOnly)
+ CVPixelBufferLockBaseAddress(dstBuffer, [])
+
+ let planeCount = CVPixelBufferGetPlaneCount(src)
+ for i in 0..<planeCount {
+ let srcAddr = CVPixelBufferGetBaseAddressOfPlane(src, i)
+ let dstAddr = CVPixelBufferGetBaseAddressOfPlane(dstBuffer, i)
+
+ let height = CVPixelBufferGetHeightOfPlane(src, i)
+ let bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(src, i)
+
+ memcpy(dstAddr, srcAddr, height * bytesPerRow)
+ }
+
+ CVPixelBufferUnlockBaseAddress(src, .readOnly)
+ CVPixelBufferUnlockBaseAddress(dstBuffer, [])
+
+ return dstBuffer
+ }
+
+
}
diff --git a/LiveProject/controller/CameraCapture.swift b/LiveProject/controller/CameraCapture.swift
index 455b88a..3345602 100644
--- a/LiveProject/controller/CameraCapture.swift
+++ b/LiveProject/controller/CameraCapture.swift
@@ -8,6 +8,8 @@
class CameraCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
private let session = AVCaptureSession()
+ private var videoOutput: AVCaptureVideoDataOutput?
+ private var input: AVCaptureDeviceInput?
var onFrame: ((CVPixelBuffer) -> Void)?
func start() {
@@ -17,8 +19,10 @@
return
}
+ self.input = input
+
session.beginConfiguration()
- session.sessionPreset = .high
+ session.sessionPreset = .hd1920x1080
if session.canAddInput(input) {
session.addInput(input)
@@ -34,14 +38,39 @@
session.addOutput(output)
}
+ self.videoOutput = output
+
session.commitConfiguration()
session.startRunning()
+ print("📷 相机已开启")
}
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
+ let width = CVPixelBufferGetWidth(buffer)
+ let height = CVPixelBufferGetHeight(buffer)
+ //print("📷 当前帧尺寸: \(width)x\(height)")
onFrame?(buffer)
}
+
+ func stop(){
+ session.stopRunning()
+ session.beginConfiguration()
+
+ if let input = input {
+ session.removeInput(input)
+ }
+
+ if let output = videoOutput {
+ session.removeOutput(output)
+ }
+
+ session.commitConfiguration()
+
+ input = nil
+ videoOutput = nil
+ print("📷 相机已关闭")
+ }
}
diff --git a/LiveProject/data/DeviceInfo.swift b/LiveProject/data/DeviceInfo.swift
index 86d4ecf..ff6bf3f 100644
--- a/LiveProject/data/DeviceInfo.swift
+++ b/LiveProject/data/DeviceInfo.swift
@@ -10,7 +10,7 @@
let name:String
let type:StreamType
let deviceId:String;
- var icon : (any View)? = nil;
+ var icon : IconInfo? = nil;
func hash(into hasher: inout Hasher){
hasher.combine(deviceId)
diff --git a/LiveProject/data/IconInfo.swift b/LiveProject/data/IconInfo.swift
new file mode 100644
index 0000000..9a07bbd
--- /dev/null
+++ b/LiveProject/data/IconInfo.swift
@@ -0,0 +1,12 @@
+//
+// IconInfo.swift
+// LiveProject
+//
+// Created by 倪路朋 on 7/4/25.
+//
+import SwiftUI
+
+struct IconInfo{
+ var name:String
+ var size:CGSize = CGSize(width: 20, height: 20)
+}
diff --git a/LiveProject/enum/Icons.swift b/LiveProject/enum/Icons.swift
new file mode 100644
index 0000000..c1b1d7b
--- /dev/null
+++ b/LiveProject/enum/Icons.swift
@@ -0,0 +1,24 @@
+//
+// Icons.swift
+// LiveProject
+//
+// Created by 倪路朋 on 7/4/25.
+//
+import SwiftUI
+
+struct Icons{
+ static let CAMERA = IconInfo(name: "camera",size: CGSize(width: 25, height: 20))
+ static let MIC = IconInfo(name: "mic",size: CGSize(width: 15, height: 23))
+ static let MIC_MUTE = IconInfo(name: "mic.slash",size: CGSize(width: 20, height: 23))
+ static let PORTRAIT = IconInfo(name: "ipad",size: CGSize(width: 18, height: 23))
+ static let LANDSCAPE = IconInfo(name: "ipad.landscape",size: CGSize(width: 25, height: 20))
+ static let BACK = IconInfo(name: "arrow.left",size: CGSize(width: 25, height: 20))
+ static let SPEAKER = IconInfo(name: "speaker",size: CGSize(width: 18, height: 23))
+ static let SPEAKER_MUTE = IconInfo(name: "speaker.slash",size: CGSize(width: 18, height: 23))
+ static let IMAGE = IconInfo(name: "photo",size: CGSize(width: 25, height: 23))
+ static let IMAGE_MUTE = IconInfo(name: "photo.slash",size: CGSize(width: 25, height: 23))
+ static let ROTATE_LEFT = IconInfo(name: "rotate.left",size: CGSize(width: 25, height: 25))
+ static let ROTATE_RIGHT = IconInfo(name: "rotate.right",size: CGSize(width: 25, height: 25))
+ static let INFO = IconInfo(name: "info.circle",size: CGSize(width: 25, height: 25))
+ static let PAINT = IconInfo(name: "paintpalette",size: CGSize(width: 25, height: 25))
+}
diff --git a/LiveProject/shape/IconBack.swift b/LiveProject/shape/IconBack.swift
deleted file mode 100644
index 5d760f8..0000000
--- a/LiveProject/shape/IconBack.swift
+++ /dev/null
@@ -1,36 +0,0 @@
-//
-// IconBack.swift
-// LiveProject
-//
-// Created by 倪路朋 on 6/26/25.
-//
-
-import SwiftUI
-
-struct IconBack: Shape {
-
- func path(in rect: CGRect) -> Path {
- var path = Path()
-
- let arrowHeadLength = rect.width * 0.4
- let shaftY = rect.midY
- let headHeight = rect.height * 0.5
-
- // 箭头头部(三角形)
- path.move(to: CGPoint(x: arrowHeadLength, y: rect.minY))
- path.addLine(to: CGPoint(x: 0, y: shaftY))
- path.addLine(to: CGPoint(x: arrowHeadLength, y: rect.maxY))
-
- // 箭身(横线)
- path.move(to: CGPoint(x: 0, y: shaftY))
- path.addLine(to: CGPoint(x: rect.maxX, y: shaftY))
- return path
- }
-}
-
-struct IconBack_Previews : PreviewProvider{
- static var previews: some View {
- IconBack()
- .stroke(Color.primary, lineWidth: 3).frame(width: 30,height: 25)
- }
-}
diff --git a/LiveProject/shape/IconCamera.swift b/LiveProject/shape/IconCamera.swift
deleted file mode 100644
index 2bd8d01..0000000
--- a/LiveProject/shape/IconCamera.swift
+++ /dev/null
@@ -1,60 +0,0 @@
-//
-// IconCamera.swift
-// LiveProject
-//
-// Created by 倪路朋 on 6/27/25.
-//
-
-import SwiftUI
-
-struct IconCameraShape: Shape {
- func path(in rect: CGRect) -> Path {
- var path = Path()
-
- // 比例参数
- let cornerRadius = rect.height * 0.1
- let bodyInset = rect.height * 0.1
- let lensDiameter = rect.height * 0.3
- let flashSize = CGSize(width: rect.width * 0.08, height: rect.height * 0.08)
-
- // 相机机身(主圆角矩形)
- let bodyRect = rect.insetBy(dx: 0, dy: bodyInset)
- path.addRoundedRect(in: bodyRect, cornerSize: CGSize(width: cornerRadius, height: cornerRadius))
-
- // 镜头(中间大圆)
- let lensOrigin = CGPoint(
- x: rect.midX - lensDiameter / 2,
- y: rect.midY - lensDiameter / 2
- )
- let lensRect = CGRect(origin: lensOrigin, size: CGSize(width: lensDiameter, height: lensDiameter))
- path.addEllipse(in: lensRect)
-
- // 闪光灯(机身内的小圆点)
- let flashOrigin = CGPoint(
- x: bodyRect.maxX - flashSize.width * 1.5,
- y: bodyRect.minY + flashSize.height * 1.5
- )
- let flashRect = CGRect(origin: flashOrigin, size: flashSize)
- path.addEllipse(in: flashRect)
-
- return path
- }
-}
-
-struct IconCamera: View {
- var color: Color = .white
- var size: CGSize = CGSize(width: 20, height:20)
- var lineWidth: CGFloat = 2.0
-
- var body: some View {
- IconCameraShape()
- .stroke(color, lineWidth: lineWidth)
- .frame(width: size.width, height: size.height)
- }
-}
-
-struct IconCamera_Previews : PreviewProvider{
- static var previews: some View {
- IconCamera(color: .black).background(.red)
- }
-}
diff --git a/LiveProject/shape/IconLandscape.swift b/LiveProject/shape/IconLandscape.swift
deleted file mode 100644
index 793c0d8..0000000
--- a/LiveProject/shape/IconLandscape.swift
+++ /dev/null
@@ -1,27 +0,0 @@
-//
-// IconLand.swift
-// LiveProject
-//
-// Created by 倪路朋 on 6/25/25.
-//
-
-
-import SwiftUI
-
-struct IconLandscape: View {
- var color: Color = .white
- var size: CGSize = CGSize(width: 20, height:15)
- var lineWidth: CGFloat = 2.0
-
- var body: some View {
- IconRect()
- .stroke(color, lineWidth: lineWidth)
- .frame(width: size.width, height: size.height)
- }
-}
-
-struct IconLandscape_Previews : PreviewProvider{
- static var previews: some View {
- IconLandscape(color: .black,size: CGSize(width: 90, height: 60))
- }
-}
diff --git a/LiveProject/shape/IconMic.swift b/LiveProject/shape/IconMic.swift
deleted file mode 100644
index 91aec35..0000000
--- a/LiveProject/shape/IconMic.swift
+++ /dev/null
@@ -1,63 +0,0 @@
-//
-// IconMic.swift
-// LiveProject
-//
-// Created by 倪路朋 on 6/27/25.
-//
-import SwiftUI
-
-struct IconMicShape: Shape {
- func path(in rect: CGRect) -> Path {
- var path = Path()
-
- // 计算容器高度(麦克风主体 + 支架 + 底座 + 一点留白)
- let micHeight = rect.height * 0.5
- let stemHeight = rect.height * 0.1
- let baseHeight = rect.height * 0.02
- let containerHeight = micHeight + stemHeight + baseHeight
-
- // 计算顶部偏移量,确保垂直居中
- let topOffset = (rect.height - containerHeight) / 2
-
- // 主体:麦克风(居中)
- let micWidth = rect.width * 0.3
- let micRect = CGRect(
- x: rect.midX - micWidth / 2,
- y: topOffset,
- width: micWidth,
- height: micHeight
- )
- path.addRoundedRect(in: micRect, cornerSize: CGSize(width: micWidth / 2, height: micWidth / 2))
-
- // 支架
- let stemTop = CGPoint(x: rect.midX, y: micRect.maxY)
- let stemBottom = CGPoint(x: rect.midX, y: stemTop.y + stemHeight)
- path.move(to: stemTop)
- path.addLine(to: stemBottom)
-
- // 底座
- let baseWidth = rect.width * 0.3
- path.move(to: CGPoint(x: rect.midX - baseWidth / 2, y: stemBottom.y))
- path.addLine(to: CGPoint(x: rect.midX + baseWidth / 2, y: stemBottom.y))
-
- return path
- }
-}
-struct IconMic: View {
- var color: Color = .white
- var size: CGSize = CGSize(width: 20, height:30)
- var lineWidth: CGFloat = 2.0
-
- var body: some View {
- IconMicShape()
- .stroke(color, lineWidth: lineWidth)
- .frame(width: size.width, height: size.height)
- }
-}
-
-
-struct IconMic_Previews : PreviewProvider{
- static var previews: some View {
- IconMic(color: .black).background(.red)
- }
-}
diff --git a/LiveProject/shape/IconPortrait.swift b/LiveProject/shape/IconPortrait.swift
deleted file mode 100644
index 8aa380b..0000000
--- a/LiveProject/shape/IconPortrait.swift
+++ /dev/null
@@ -1,28 +0,0 @@
-//
-// IconPortrait.swift
-// LiveProject
-//
-// Created by 倪路朋 on 6/25/25.
-//
-
-import SwiftUI
-
-struct IconPortrait: View {
- var color: Color = .white
- var size: CGSize = CGSize(width: 15, height:20)
- var lineWidth: CGFloat = 2.0
- var padding = EdgeInsets(top: 0, leading: 2.5, bottom: 0, trailing: 2.5)
-
- var body: some View {
- IconRect()
- .stroke(color, lineWidth: lineWidth)
- .frame(width: size.width, height: size.height)
- .padding(padding)
- }
-}
-
-struct IconPortrait_Previews : PreviewProvider{
- static var previews: some View {
- IconPortrait(color: .black,size: CGSize(width: 60, height: 90))
- }
-}
diff --git a/LiveProject/shape/IconRect.swift b/LiveProject/shape/IconRect.swift
deleted file mode 100644
index c54307f..0000000
--- a/LiveProject/shape/IconRect.swift
+++ /dev/null
@@ -1,22 +0,0 @@
-//
-// File.swift
-// LiveProject
-//
-// Created by 倪路朋 on 6/25/25.
-//
-
-import Foundation
-import SwiftUI
-
-struct IconRect: Shape {
-
- func path(in rect: CGRect) -> Path {
- var path = Path()
-
- let cornerRadius = rect.width * 0.08
- let borderRect = CGRect(origin: .zero, size: rect.size)
- path.addRoundedRect(in: borderRect, cornerSize: CGSize(width: cornerRadius, height: cornerRadius))
-
- return path
- }
-}
diff --git a/LiveProject/tool/CameraHelper.swift b/LiveProject/tool/CameraHelper.swift
deleted file mode 100644
index 299f2ce..0000000
--- a/LiveProject/tool/CameraHelper.swift
+++ /dev/null
@@ -1,12 +0,0 @@
-//
-// CameraHelper.swift
-// LiveProject
-//
-// Created by 倪路朋 on 6/27/25.
-//
-import UIKit
-import AVFoundation
-
-class CameraHelper{
-
-}
diff --git a/LiveProject/tool/H264Encoder.swift b/LiveProject/tool/H264Encoder.swift
new file mode 100644
index 0000000..8413eca
--- /dev/null
+++ b/LiveProject/tool/H264Encoder.swift
@@ -0,0 +1,166 @@
+//
+// H264Encoder.swift
+// LiveProject
+//
+// Created by 倪路朋 on 6/28/25.
+//
+import Foundation
+import AVFoundation
+import VideoToolbox
+
+class H264Encoder {
+ private var session: VTCompressionSession?
+ private let width: Int
+ private let height: Int
+ private let fps: Int
+ private let bitrate: Int
+
+ private let converter : PixelBufferConverter = PixelBufferConverter()
+
+ var onEncoded: ((Data, CMTime, Bool) -> Void)?
+
+ init(width: Int, height: Int, fps: Int, bitrate: Int) {
+ self.width = width
+ self.height = height
+ self.fps = fps
+ self.bitrate = bitrate
+ setupSession()
+ }
+
+ private func setupSession() {
+ let status = VTCompressionSessionCreate(
+ allocator: nil,
+ width: Int32(width),
+ height: Int32(height),
+ codecType: kCMVideoCodecType_H264,
+ encoderSpecification: nil,
+ imageBufferAttributes: nil,
+ compressedDataAllocator: nil,
+ outputCallback: encodeCallback,
+ refcon: UnsafeMutableRawPointer(Unmanaged.passRetained(self).toOpaque()),
+ compressionSessionOut: &session
+ )
+
+ guard status == noErr, let session = session else {
+ print("❌ Failed to create session: \(status)")
+ return
+ }
+
+ VTSessionSetProperty(session, key: kVTCompressionPropertyKey_RealTime, value: kCFBooleanTrue)
+ VTSessionSetProperty(session, key: kVTCompressionPropertyKey_AverageBitRate, value: bitrate as CFTypeRef)
+ let frameInterval = Int(fps)
+ VTSessionSetProperty(session, key: kVTCompressionPropertyKey_MaxKeyFrameInterval, value: frameInterval as CFTypeRef)
+ VTSessionSetProperty(session, key: kVTCompressionPropertyKey_ExpectedFrameRate, value: frameInterval as CFTypeRef)
+
+ VTCompressionSessionPrepareToEncodeFrames(session)
+ }
+
+ func encode(pixelBuffer: CVPixelBuffer, pts: CMTime) {
+ guard let session = session else {
+ print("❌ Session is nil")
+ return
+ }
+
+ let format = CVPixelBufferGetPixelFormatType(pixelBuffer)
+ let supportedFormats: [OSType] = [
+ kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
+ ]
+ switch format{
+ case kCVPixelFormatType_32BGRA:
+ print("32BGRA")
+ break;
+ case kCVPixelFormatType_32ARGB:
+ print("32ARGB")
+ break;
+ default:
+ print("????")
+ break;
+ }
+
+ if let buffer = converter.convertBGRAtoNV12(pixelBuffer) {
+ print("converter \(decodeOSType(CVPixelBufferGetPixelFormatType(buffer)))")
+ let timestamp = CMTimeMake(value: Int64(CACurrentMediaTime() * 1000), timescale: 1000)
+
+ var flags = VTEncodeInfoFlags()
+ let status = VTCompressionSessionEncodeFrame(
+ session,
+ imageBuffer:buffer ,
+ presentationTimeStamp: pts,
+ duration: .invalid,
+ frameProperties: nil,
+ sourceFrameRefcon: nil,
+ infoFlagsOut: &flags
+ )
+ if status != noErr {
+ print("❌ Encoding failed: \(status)")
+ }
+ }
+
+ }
+
+ func finish() {
+ guard let session = session else { return }
+ VTCompressionSessionCompleteFrames(session, untilPresentationTimeStamp: .invalid)
+ }
+
+ func invalidate() {
+ guard let session = session else { return }
+ VTCompressionSessionInvalidate(session)
+ self.session = nil
+ }
+
+ func decodeOSType(_ format: OSType) -> String {
+ let characters = [
+ UInt8((format >> 24) & 0xFF),
+ UInt8((format >> 16) & 0xFF),
+ UInt8((format >> 8) & 0xFF),
+ UInt8(format & 0xFF)
+ ]
+ return String(bytes: characters, encoding: .ascii) ?? "????"
+ }
+}
+
+// MARK: - VideoToolbox Callback
+
+private func encodeCallback(
+ outputCallbackRefCon: UnsafeMutableRawPointer?,
+ sourceFrameRefCon: UnsafeMutableRawPointer?,
+ status: OSStatus,
+ infoFlags: VTEncodeInfoFlags,
+ sampleBuffer: CMSampleBuffer?
+) {
+ guard
+ status == noErr,
+ let sampleBuffer = sampleBuffer,
+ CMSampleBufferDataIsReady(sampleBuffer),
+ let ref = outputCallbackRefCon
+ else { return }
+
+ let encoder = Unmanaged<H264Encoder>.fromOpaque(ref).takeUnretainedValue()
+
+ guard let blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) else { return }
+
+ var length = 0
+ var dataPointer: UnsafeMutablePointer<Int8>?
+ guard CMBlockBufferGetDataPointer(blockBuffer, atOffset: 0, lengthAtOffsetOut: nil, totalLengthOut: &length, dataPointerOut: &dataPointer) == noErr else {
+ return
+ }
+
+ let data = Data(bytes: dataPointer!, count: length)
+
+ var isKeyframe = true
+ if let attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: false),
+ let dict = CFArrayGetValueAtIndex(attachments, 0) {
+ let d = unsafeBitCast(dict, to: CFDictionary.self) as NSDictionary
+ if let notSync = d[kCMSampleAttachmentKey_NotSync] as? Bool {
+ isKeyframe = !notSync
+ }
+ }
+
+ let pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
+ encoder.onEncoded?(data, pts, isKeyframe)
+
+ // ⚠️ 这里必须 release 与 passRetained 配对
+ Unmanaged<H264Encoder>.fromOpaque(ref).release()
+}
diff --git a/LiveProject/tool/MetalPixelConverter.swift b/LiveProject/tool/MetalPixelConverter.swift
new file mode 100644
index 0000000..23a00d9
--- /dev/null
+++ b/LiveProject/tool/MetalPixelConverter.swift
@@ -0,0 +1,227 @@
+//
+// MetalPixelConverter.swift
+// LiveProject
+//
+// Created by 倪路朋 on 7/1/25.
+//
+
+import Metal
+import MetalKit
+import CoreVideo
+
+final class MetalPixelConverter {
+
+ // MARK: - Properties
+ private let device: MTLDevice
+ private var textureCache: CVMetalTextureCache?
+ private var commandQueue: MTLCommandQueue?
+ private var computePipeline: MTLComputePipelineState?
+
+ // MARK: - Initialization
+ init?(metalDevice: MTLDevice? = MTLCreateSystemDefaultDevice()) {
+ guard let device = metalDevice else {
+ print("⚠️ Metal 不可用")
+ return nil
+ }
+ self.device = device
+
+ // 1. 创建纹理缓存
+ guard CVMetalTextureCacheCreate(
+ kCFAllocatorDefault,
+ nil,
+ device,
+ nil,
+ &textureCache
+ ) == kCVReturnSuccess else {
+ print("❌ 创建 Metal 纹理缓存失败")
+ return nil
+ }
+
+ // 2. 创建命令队列
+ self.commandQueue = device.makeCommandQueue()
+
+ // 3. 加载着色器
+ do {
+ self.computePipeline = try makeComputePipeline(device: device)
+ } catch {
+ print("❌ 加载着色器失败: \(error)")
+ return nil
+ }
+ }
+
+ // MARK: - Public Methods
+ /// 将 BGRA CVPixelBuffer 转换为 NV12 CVPixelBuffer
+ func convertBGRAtoNV12(
+ _ inputBuffer: CVPixelBuffer,
+ completion: @escaping (Result<CVPixelBuffer, Error>) -> Void
+ ) {
+ // 0. 验证输入格式
+ guard CVPixelBufferGetPixelFormatType(inputBuffer) == kCVPixelFormatType_32BGRA else {
+ completion(.failure(ConversionError.invalidInputFormat))
+ return
+ }
+
+ // 1. 创建输出 NV12 Buffer
+ let (width, height) = (CVPixelBufferGetWidth(inputBuffer), CVPixelBufferGetHeight(inputBuffer))
+ guard let outputBuffer = createNV12PixelBuffer(width: width, height: height) else {
+ completion(.failure(ConversionError.outputBufferCreationFailed))
+ return
+ }
+
+ // 2. 异步处理(避免阻塞主线程)
+ DispatchQueue.global(qos: .userInitiated).async { [weak self] in
+ guard let self = self else { return }
+
+ do {
+ let result = try self.performConversion(
+ inputBuffer: inputBuffer,
+ outputBuffer: outputBuffer
+ )
+ DispatchQueue.main.async {
+ completion(.success(result))
+ }
+ } catch {
+ DispatchQueue.main.async {
+ completion(.failure(error))
+ }
+ }
+ }
+ }
+
+ // MARK: - Private Methods
+ private func makeComputePipeline(device: MTLDevice) throws -> MTLComputePipelineState {
+ // 1. 获取默认 Metal 库(需在项目中添加 .metal 文件)
+ let library = try device.makeDefaultLibrary(bundle: Bundle(for: Self.self))
+
+ // 2. 加载着色器函数
+ guard let kernelFunction = library.makeFunction(name: "bgraToNV12") else {
+ throw ConversionError.shaderNotFound
+ }
+
+ // 3. 创建计算管线
+ return try device.makeComputePipelineState(function: kernelFunction)
+ }
+
+ private func createNV12PixelBuffer(width: Int, height: Int) -> CVPixelBuffer? {
+ var pixelBuffer: CVPixelBuffer?
+ let status = CVPixelBufferCreate(
+ kCFAllocatorDefault,
+ width,
+ height,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ nil,
+ &pixelBuffer
+ )
+ return status == kCVReturnSuccess ? pixelBuffer : nil
+ }
+
+ private func performConversion(
+ inputBuffer: CVPixelBuffer,
+ outputBuffer: CVPixelBuffer
+ ) throws -> CVPixelBuffer {
+ // 1. 锁定缓冲区
+ CVPixelBufferLockBaseAddress(inputBuffer, .readOnly)
+ CVPixelBufferLockBaseAddress(outputBuffer, [])
+ defer {
+ CVPixelBufferUnlockBaseAddress(inputBuffer, .readOnly)
+ CVPixelBufferUnlockBaseAddress(outputBuffer, [])
+ }
+
+ // 2. 创建 Metal 纹理
+ guard let textureCache = textureCache,
+ let inputTexture = createMetalTexture(
+ from: inputBuffer,
+ pixelFormat: .bgra8Unorm,
+ textureCache: textureCache
+ ),
+ let yTexture = createMetalTexture(
+ from: outputBuffer,
+ planeIndex: 0,
+ pixelFormat: .r8Unorm,
+ textureCache: textureCache
+ ),
+ let uvTexture = createMetalTexture(
+ from: outputBuffer,
+ planeIndex: 1,
+ pixelFormat: .rg8Unorm,
+ textureCache: textureCache
+ ) else {
+ throw ConversionError.textureCreationFailed
+ }
+
+ // 3. 执行 Metal 计算
+ guard let commandBuffer = commandQueue?.makeCommandBuffer(),
+ let encoder = commandBuffer.makeComputeCommandEncoder() else {
+ throw ConversionError.metalCommandFailed
+ }
+
+ encoder.setComputePipelineState(computePipeline!)
+ encoder.setTexture(inputTexture, index: 0)
+ encoder.setTexture(yTexture, index: 1)
+ encoder.setTexture(uvTexture, index: 2)
+
+ // 4. 调度计算任务
+ let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
+ let threadgroupCount = MTLSize(
+ width: (inputTexture.width + threadgroupSize.width - 1) / threadgroupSize.width,
+ height: (inputTexture.height + threadgroupSize.height - 1) / threadgroupSize.height,
+ depth: 1
+ )
+ encoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
+ encoder.endEncoding()
+
+ // 5. 提交并等待完成
+ commandBuffer.commit()
+ commandBuffer.waitUntilCompleted()
+
+ return outputBuffer
+ }
+
+ private func createMetalTexture(
+ from pixelBuffer: CVPixelBuffer,
+ planeIndex: Int = 0,
+ pixelFormat: MTLPixelFormat,
+ textureCache: CVMetalTextureCache
+ ) -> MTLTexture? {
+ let width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex)
+ let height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)
+
+ var cvMetalTexture: CVMetalTexture?
+ let status = CVMetalTextureCacheCreateTextureFromImage(
+ nil,
+ textureCache,
+ pixelBuffer,
+ nil,
+ pixelFormat,
+ width,
+ height,
+ planeIndex,
+ &cvMetalTexture
+ )
+
+ guard status == kCVReturnSuccess, let texture = cvMetalTexture else {
+ return nil
+ }
+
+ return CVMetalTextureGetTexture(texture)
+ }
+
+ // MARK: - Error Handling
+ enum ConversionError: Error, LocalizedError {
+ case invalidInputFormat
+ case outputBufferCreationFailed
+ case shaderNotFound
+ case textureCreationFailed
+ case metalCommandFailed
+
+ var errorDescription: String? {
+ switch self {
+ case .invalidInputFormat: return "输入格式必须是 BGRA"
+ case .outputBufferCreationFailed: return "无法创建 NV12 输出缓冲区"
+ case .shaderNotFound: return "找不到 Metal 着色器"
+ case .textureCreationFailed: return "无法创建 Metal 纹理"
+ case .metalCommandFailed: return "Metal 命令执行失败"
+ }
+ }
+ }
+}
diff --git a/LiveProject/tool/MetalRenderer.swift b/LiveProject/tool/MetalRenderer.swift
index a2aa4ba..61bd769 100644
--- a/LiveProject/tool/MetalRenderer.swift
+++ b/LiveProject/tool/MetalRenderer.swift
@@ -4,68 +4,99 @@
// 渲染工具
// Created by 倪路朋 on 6/26/25.
//
+import Foundation
+import Metal
import MetalKit
+import AVFoundation
class MetalRenderer: NSObject, MTKViewDelegate {
- private var device: MTLDevice!
- private var commandQueue: MTLCommandQueue!
- private var textureCache: CVMetalTextureCache!
+ private let device: MTLDevice
+ private let commandQueue: MTLCommandQueue
+ private let ciContext: CIContext
+
private var currentPixelBuffer: CVPixelBuffer?
+ private let textureCache: CVMetalTextureCache
- func setup(view: MTKView) {
- self.device = view.device
- self.commandQueue = device.makeCommandQueue()
- CVMetalTextureCacheCreate(nil, nil, device, nil, &textureCache)
+ init(mtkView: MTKView) {
+ guard let device = MTLCreateSystemDefaultDevice(),
+ let commandQueue = device.makeCommandQueue() else {
+ fatalError("Unable to create Metal device or command queue")
+ }
+
+ self.device = device
+ self.commandQueue = commandQueue
+ self.ciContext = CIContext(mtlDevice: device)
+
+ var tmpCache: CVMetalTextureCache?
+ CVMetalTextureCacheCreate(nil, nil, device, nil, &tmpCache)
+ guard let textureCache = tmpCache else {
+ fatalError("Unable to create texture cache")
+ }
+ self.textureCache = textureCache
+
+ super.init()
+
+ // ✅ 设置驱动渲染的关键代码
+ mtkView.device = device
+ mtkView.framebufferOnly = false
+ mtkView.isPaused = false
+ mtkView.enableSetNeedsDisplay = false
+ mtkView.delegate = self
+ print("MetalRenderer init")
}
-
- func updateFrame(pixelBuffer: CVPixelBuffer) {
- self.currentPixelBuffer = pixelBuffer
- }
-
- func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {}
func draw(in view: MTKView) {
- guard let drawable = view.currentDrawable,
- let descriptor = view.currentRenderPassDescriptor,
- let pixelBuffer = currentPixelBuffer else { return }
+ let size = view.drawableSize
+ //print("🧾 drawableSize = \(size)")
- var textureRef: CVMetalTexture?
- let width = CVPixelBufferGetWidth(pixelBuffer)
- let height = CVPixelBufferGetHeight(pixelBuffer)
-
- let status = CVMetalTextureCacheCreateTextureFromImage(
- nil, textureCache, pixelBuffer, nil,
- .bgra8Unorm, width, height, 0, &textureRef)
-
- guard status == kCVReturnSuccess,
- let cvTexture = textureRef,
- let texture = CVMetalTextureGetTexture(cvTexture) else { return }
-
- let commandBuffer = commandQueue.makeCommandBuffer()!
- let encoder = commandBuffer.makeRenderCommandEncoder(descriptor: descriptor)!
- encoder.setFragmentTexture(texture, index: 0)
- encoder.endEncoding()
-
- // 简单拷贝(不做 shader 处理)
- let blitEncoder = commandBuffer.makeBlitCommandEncoder()!
- let dstTexture = drawable.texture
- if dstTexture.width != texture.width || dstTexture.height != texture.height {
- print("❌ 尺寸不一致,无法 blit:src = \(texture.width)x\(texture.height), dst = \(dstTexture.width)x\(dstTexture.height)")
+ if !size.width.isFinite || !size.height.isFinite || size.width <= 0 || size.height <= 0 {
+ print("❌ 非法尺寸,跳过渲染 \(size)")
return
}
- blitEncoder.copy(from: texture,
- sourceSlice: 0,
- sourceLevel: 0,
- sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0),
- sourceSize: MTLSize(width: width, height: height, depth: 1),
+ guard let drawable = view.currentDrawable,
+ let commandBuffer = commandQueue.makeCommandBuffer(),
+ let pixelBuffer = currentPixelBuffer else {
+ return
+ }
+
+ let drawableSize = view.drawableSize
+ guard drawableSize.width > 0, drawableSize.height > 0 else { return }
+
+ // 加方向修正:顺时针旋转90度
+ var ciImage = CIImage(cvPixelBuffer: pixelBuffer).oriented(.right)
+
+ // 等比缩放后居中
+ let sourceExtent = ciImage.extent
+ let scaleX = drawableSize.width / sourceExtent.width
+ let scaleY = drawableSize.height / sourceExtent.height
+ let scale = min(scaleX, scaleY)
+
+ let scaledImage = ciImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
+
+ let xOffset = (drawableSize.width - scaledImage.extent.width) / 2
+ let yOffset = (drawableSize.height - scaledImage.extent.height) / 2
+ let translatedImage = scaledImage.transformed(by: CGAffineTransform(translationX: xOffset, y: yOffset))
+
+ // 渲染
+ ciContext.render(translatedImage,
to: drawable.texture,
- destinationSlice: 0,
- destinationLevel: 0,
- destinationOrigin: MTLOrigin(x: 0, y: 0, z: 0))
- blitEncoder.endEncoding()
+ commandBuffer: commandBuffer,
+ bounds: CGRect(origin: .zero, size: drawableSize),
+ colorSpace: CGColorSpaceCreateDeviceRGB())
commandBuffer.present(drawable)
commandBuffer.commit()
- print("绘制画面")
+ //print("绘制画面")
+ }
+
+ func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
+ // No-op: Handle size change if needed
+ }
+
+ func display(pixelBuffer: CVPixelBuffer) {
+ self.currentPixelBuffer = pixelBuffer
+ //print("display")
+ //刷新
+
}
}
diff --git a/LiveProject/tool/PixelBufferConverter.swift b/LiveProject/tool/PixelBufferConverter.swift
new file mode 100644
index 0000000..bd88d1f
--- /dev/null
+++ b/LiveProject/tool/PixelBufferConverter.swift
@@ -0,0 +1,44 @@
+//
+// PixelBufferConverter.swift
+// LiveProject
+//
+// Created by 倪路朋 on 6/30/25.
+//
+
+import Foundation
+import CoreImage
+import CoreVideo
+
+
+class PixelBufferConverter {
+ private let ciContext = CIContext()
+
+ func convertBGRAtoNV12(_ srcBuffer: CVPixelBuffer) -> CVPixelBuffer? {
+ let width = CVPixelBufferGetWidth(srcBuffer)
+ let height = CVPixelBufferGetHeight(srcBuffer)
+
+ let attrs: [CFString: Any] = [
+ kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
+ kCVPixelBufferWidthKey: width,
+ kCVPixelBufferHeightKey: height,
+ kCVPixelBufferIOSurfacePropertiesKey: [:]
+ ]
+
+ var dstBuffer: CVPixelBuffer?
+ let status = CVPixelBufferCreate(nil, width, height, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, attrs as CFDictionary, &dstBuffer)
+
+ guard status == kCVReturnSuccess, let output = dstBuffer else {
+ print("❌ 创建 NV12 失败,状态: \(status)")
+ return nil
+ }
+
+ // 渲染
+ let ciImage = CIImage(cvPixelBuffer: srcBuffer)
+
+ CVPixelBufferLockBaseAddress(output, [])
+ ciContext.render(ciImage, to: output)
+ CVPixelBufferUnlockBaseAddress(output, [])
+
+ return output
+ }
+}
diff --git a/LiveProject/views/FlowLayout.swift b/LiveProject/views/FlowLayout.swift
index 151cd68..038d158 100644
--- a/LiveProject/views/FlowLayout.swift
+++ b/LiveProject/views/FlowLayout.swift
@@ -5,163 +5,89 @@
// Created by 倪路朋 on 6/26/25.
//
import SwiftUI
-import SwiftUI
-/// 完全自定义的自动换行布局容器
-struct FlowLayout<Data: RandomAccessCollection, Content: View>: View where Data.Element: Hashable {
- // MARK: - 属性
-
- /// 要显示的数据集合
- let data: Data
- /// 水平间距
- let horizontalSpacing: CGFloat
- /// 垂直间距
- let verticalSpacing: CGFloat
- /// 对齐方式
- let alignment: HorizontalAlignment
- /// 内容构建闭包
- let content: (Data.Element) -> Content
- /// 总高度状态
- @State private var totalHeight: CGFloat = 0
-
- // MARK: - 初始化
-
- /// 初始化FlowLayout
- /// - Parameters:
- /// - data: 要显示的数据集合
- /// - horizontalSpacing: 水平间距,默认为8
- /// - verticalSpacing: 垂直间距,默认为8
- /// - alignment: 对齐方式,默认为.leading
- /// - content: 内容构建闭包
- init(
- _ data: Data,
- horizontalSpacing: CGFloat = 8,
- verticalSpacing: CGFloat = 8,
- alignment: HorizontalAlignment = .leading,
- @ViewBuilder content: @escaping (Data.Element) -> Content
- ) {
- self.data = data
- self.horizontalSpacing = horizontalSpacing
- self.verticalSpacing = verticalSpacing
- self.alignment = alignment
- self.content = content
- }
-
- // MARK: - 主体视图
-
- var body: some View {
- GeometryReader { geometry in
- self.contentView(in: geometry)
- .background(
- HeightReader(height: $totalHeight)
- )
- }
- .frame(height: totalHeight)
- }
-
- // MARK: - 私有方法
-
- /// 构建内容视图
- private func contentView(in geometry: GeometryProxy) -> some View {
+struct FlowLayout: Layout {
+ var spacing: CGFloat = 8
+ var lineSpacing: CGFloat = 8
+
+ func sizeThatFits(proposal: ProposedViewSize, subviews: Subviews, cache: inout ()) -> CGSize {
var width: CGFloat = 0
var height: CGFloat = 0
- var lastHeight: CGFloat = 0
-
- return ZStack(alignment: Alignment(horizontal: alignment, vertical: .top)) {
- ForEach(data.map { $0 }, id: \.self) { item in
- content(item)
- .padding(.trailing, horizontalSpacing)
- .padding(.bottom, verticalSpacing)
- .alignmentGuide(.leading) { dimensions in
- // 检查是否需要换行
- if abs(width - dimensions.width) > geometry.size.width {
- width = 0
- height += lastHeight + verticalSpacing
- }
-
- let result = width
-
- // 更新宽度计算
- if item == data.last {
- width = 0 // 重置为0,最后一项
- } else {
- width -= dimensions.width + horizontalSpacing
- }
-
- // 记录当前行高度
- lastHeight = dimensions.height
- return result
- }
- .alignmentGuide(.top) { dimensions in
- let result = height
-
- // 如果是最后一项,更新总高度
- if item == data.last {
- height += lastHeight + verticalSpacing
- }
-
- return result
- }
+ var currentLineWidth: CGFloat = 0
+ var currentLineHeight: CGFloat = 0
+ let maxWidth = proposal.width ?? .infinity
+
+ for view in subviews {
+ let size = view.sizeThatFits(.unspecified)
+ if currentLineWidth + size.width > maxWidth {
+ width = max(width, currentLineWidth)
+ height += currentLineHeight + lineSpacing
+ currentLineWidth = size.width
+ currentLineHeight = size.height
+ } else {
+ currentLineWidth += size.width + spacing
+ currentLineHeight = max(currentLineHeight, size.height)
}
}
+
+ width = max(width, currentLineWidth)
+ height += currentLineHeight
+
+ return CGSize(width: width, height: height)
}
-}
-// MARK: - 高度读取器
+ func placeSubviews(in bounds: CGRect, proposal: ProposedViewSize, subviews: Subviews, cache: inout ()) {
+ var x: CGFloat = 0
+ var y: CGFloat = 0
+ var lineHeight: CGFloat = 0
-/// 用于读取视图高度的辅助视图
-private struct HeightReader: View {
- @Binding var height: CGFloat
-
- var body: some View {
- GeometryReader { geometry in
- Color.clear
- .preference(
- key: HeightPreferenceKey.self,
- value: geometry.size.height
- )
- }
- .onPreferenceChange(HeightPreferenceKey.self) { newHeight in
- DispatchQueue.main.async {
- self.height = newHeight
+ for view in subviews {
+ let size = view.sizeThatFits(.unspecified)
+
+ if x + size.width > bounds.width {
+ x = 0
+ y += lineHeight + lineSpacing
+ lineHeight = 0
}
+
+ view.place(
+ at: CGPoint(x: bounds.minX + x, y: bounds.minY + y),
+ proposal: ProposedViewSize(width: size.width, height: size.height)
+ )
+
+ x += size.width + spacing
+ lineHeight = max(lineHeight, size.height)
}
}
}
-
-// MARK: - 高度偏好键
-
-/// 用于传递高度值的PreferenceKey
-private struct HeightPreferenceKey: PreferenceKey {
- static var defaultValue: CGFloat = 0
-
- static func reduce(value: inout CGFloat, nextValue: () -> CGFloat) {
- value = nextValue()
- }
-}
-
// MARK: - 使用示例
struct FlowLayoutExample: View {
let tags = [
- "Swift", "SwiftUI", "UIKit", "Combine", "Core Data",
- "Xcode", "Interface Builder", "Core Animation", "ARKit",
- "Metal", "Core ML", "Vision", "MapKit", "CloudKit"
+ "Swift"
]
@State private var newTag = ""
@State private var customTags = ["自定义标签1", "自定义标签2"]
var body: some View {
- VStack {
- FlowLayout(customTags + tags, horizontalSpacing: 10, verticalSpacing: 10) { tag in
- MButton(text:tag){
+ VStack{
+ VStack(spacing: 20) {
+ FlowLayout(){
- }
+ ForEach(tags, id: \.self) { item in
+ Text(item)
+ .padding(.horizontal, 12)
+ .padding(.vertical, 6)
+ .background(Color.blue.opacity(0.2))
+ .cornerRadius(8)
+ }
+ }.frame(alignment:.leading)
+ .background(Color.red)
}
- .padding()
- .animation(.default, value: customTags)
+ .frame(maxWidth: .infinity,alignment:.leading)
}
+ .background(Color.black)
}
private func addTag() {
diff --git a/LiveProject/views/LTextField.swift b/LiveProject/views/LTextField.swift
new file mode 100644
index 0000000..a945b34
--- /dev/null
+++ b/LiveProject/views/LTextField.swift
@@ -0,0 +1,73 @@
+//
+// LTextField.swift
+// LiveProject
+//
+// Created by 倪路朋 on 7/1/25.
+//
+
+
+import SwiftUI
+
+class LText:ObservableObject{
+ @Published var input = ""
+
+ func update( _ text : String){
+ input = text;
+ }
+}
+
+struct LTextField: View {
+
+ var hint:String = "请输入RTMP直播地址";
+ @EnvironmentObject var text:LText;
+ @State var onFocus = false;
+ @FocusState var isfocused:Bool
+ @State var inputType : UIKeyboardType = UIKeyboardType.default;
+ @State var strl = "";
+ var lines = 1;
+
+ var body: some View {
+
+ VStack(alignment: .leading) {// iOS
+ let binding = Binding<String>(get: {
+ //print(self.text.input);
+ return text.input
+ }, set: { str in
+ text.update(str)
+ })
+
+ TextField(hint,text: binding){ change in
+ print(hint+" \(change)")
+ self.onFocus = change
+ } onCommit: {
+ print("onCommit")
+ }
+ .font(Font.system(size: 16))
+ .focused($isfocused)
+ .foregroundColor(Color.colorText)
+ .padding(.leading,24)
+ .padding(.trailing,24)
+ .cornerRadius(12)
+ .frame(height: 60)
+ .keyboardType(inputType)
+ .onSubmit {
+ print("onSubmit")
+ }
+ .overlay(
+ RoundedRectangle(cornerRadius: 12, style: .continuous)
+ .stroke(Color.colorText, lineWidth: 2)
+ )
+ }.frame(minWidth: 0, maxWidth: .infinity, alignment: .topLeading)
+ .onTapGesture {
+ isfocused = true
+ print("Click ATextField button")
+ //textField.focused(SwiftUI.FocusState<Bool>)
+ }
+ }
+}
+
+struct LTextField_Previews: PreviewProvider {
+ static var previews: some View {
+ LTextField().environmentObject(LText())
+ }
+}
diff --git a/LiveProject/views/MButton.swift b/LiveProject/views/MButton.swift
index ce8b915..54b2d2d 100644
--- a/LiveProject/views/MButton.swift
+++ b/LiveProject/views/MButton.swift
@@ -12,7 +12,7 @@
var valid :ValidState = .VALID;
- var icon : (any View)? = nil;
+ var icon : IconInfo? = nil;
var text : String? = nil;
@@ -30,19 +30,23 @@
ZStack() {// iOS
HStack() {// iOS
- if let iconView = icon {
- AnyView(iconView)
+ if let info = icon{
+ Image(systemName: info.name)
+ .resizable()
+ .frame(width: info.size.width, height: info.size.height)
+ .aspectRatio(contentMode: .fit)
+ .foregroundColor(Color.white)
}
if let str = text {
Text(str)
.font(Font.system(size: 16))
- .foregroundColor(Color.init("ColorWhite"))
+ .foregroundColor(Color.white)
.frame(width: .infinity, height: 40)
}
}.frame(minWidth: 40, maxHeight: 40).padding(EdgeInsets(top: 0, leading: 15, bottom: 0, trailing: 15))
}.frame(maxHeight: 40).background(
RoundedRectangle(cornerRadius: 20, style: .continuous)
- .fill(Color.init(valid == .INVALID ?"ColorGray":"ColorText"))
+ .fill(valid == .INVALID ? Color.colorGray : Color.colorText)
)
}.buttonStyle( TextBtnStyle())
@@ -51,6 +55,6 @@
struct MButton_Previews: PreviewProvider {
static var previews: some View {
- MButton(icon: IconPortrait())
+ MButton(icon: Icons.IMAGE_MUTE)
}
}
diff --git a/LiveProject/views/TitleBarView.swift b/LiveProject/views/TitleBarView.swift
index 7cfce53..d063a17 100644
--- a/LiveProject/views/TitleBarView.swift
+++ b/LiveProject/views/TitleBarView.swift
@@ -12,7 +12,7 @@
@Environment(\.presentationMode) var presentationMode
var title = ""
- var iconBack = IconBack();
+ var iconBack = Icons.BACK;
var imgRight = "";
var titleColor = Color.colorText
@@ -24,7 +24,11 @@
print("Click back button")
self.presentationMode.wrappedValue.dismiss()
}) {
- iconBack.stroke(Color.primary, lineWidth: 2.5).frame(width: 18,height: 14)
+ Image(systemName: iconBack.name)
+ .resizable()
+ .frame(width: iconBack.size.width, height: iconBack.size.height)
+ .aspectRatio(contentMode: .fit)
+ .foregroundColor(Color.white)
}
Spacer()
Text(title).foregroundColor(titleColor)
diff --git a/LiveProject/views/VideoRendererView.swift b/LiveProject/views/VideoRendererView.swift
index 67fdc70..d157f06 100644
--- a/LiveProject/views/VideoRendererView.swift
+++ b/LiveProject/views/VideoRendererView.swift
@@ -8,19 +8,33 @@
import MetalKit
struct VideoRendererView: UIViewRepresentable {
- let renderer: MetalRenderer // 自定义 Metal 渲染器,支持传入 RGBA/YUV 数据帧
+ @Binding var pixelBuffer: CVPixelBuffer?
- func makeUIView(context: Context) -> MTKView {
- let view = MTKView()
- view.device = MTLCreateSystemDefaultDevice()
- view.colorPixelFormat = .bgra8Unorm
- view.clearColor = MTLClearColor(red: 0.2, green: 0.5, blue: 0.7, alpha: 1.0)
- view.delegate = renderer
- view.isPaused = false
- view.enableSetNeedsDisplay = false
- renderer.setup(view: view)
- return view
+ //用 Coordinator 缓存实例
+ func makeCoordinator() -> Coordinator {
+ return Coordinator()
}
- func updateUIView(_ uiView: MTKView, context: Context) {}
+ func makeUIView(context: Context) -> MTKView {
+ return context.coordinator.mtkView
+ }
+
+ func updateUIView(_ uiView: MTKView, context: Context) {
+ if let buffer = pixelBuffer {
+ //print("updateUIView")
+ context.coordinator.renderer.display(pixelBuffer: buffer)
+ }
+ }
+
+ class Coordinator {
+ let mtkView: MTKView
+ let renderer: MetalRenderer
+
+ init() {
+ print("📦 MetalRendererWrapper 初始化了")
+ mtkView = MTKView()
+ renderer = MetalRenderer(mtkView: mtkView)
+ }
+ }
}
+
--
Gitblit v1.9.1