代码之家  ›  专栏  ›  技术社区  ›  Ahmed Zaidan

Swift AVFoundation方法已折旧

  •  0
  • Ahmed Zaidan  · 技术社区  · 1 年前

    我在下面有一些代码将视频剪辑合并为一个视频。我对AVFoundation没有太多的经验,所以我不确定如何用更新的方法替换贬值的代码而不引起错误 类型“CameraViewModel”不符合协议“AVCaptureFileOutputRecordingDelegate” 。此代码适用于iOS 15,但在iOS 16中已贬值。下面的第一个代码片段是原始折旧代码(特别是mergeVideos函数中的折旧代码)。在第二个代码片段中,我试图用正确的代码更新贬值的代码,但在这样做的过程中,我得到了上面的错误。

    在我的代码中,我更改了折旧 资产期限 转换为异步 asset.load(.duration)

    我也变了 asset.tracks(带MediaType:.audio)[0] 转换为异步 asset.loadTracks(带MediaType:.audio)

    import SwiftUI
    import SwiftUI
    import AVFoundation
    
    class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate{
        @Published var session = AVCaptureSession()
        @Published var alert = false
        @Published var output = AVCaptureMovieFileOutput()
        @Published var preview : AVCaptureVideoPreviewLayer!
        @Published var isRecording: Bool = false
        @Published var recordedURLs: [URL] = []
        @Published var previewURL: URL?
        @Published var showPreview: Bool = false
    
        func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
            if let error = error {
                print(error.localizedDescription)
                return
            }
    
            self.recordedURLs.append(outputFileURL)
            if self.recordedURLs.count == 1{
                self.previewURL = outputFileURL
                return
            }
    
            let assets = recordedURLs.compactMap { url -> AVURLAsset in
                return AVURLAsset(url: url)
            }
    
            self.previewURL = nil
            mergeVideos(assets: assets) { exporter in // ADD AWAIT HERE
                exporter.exportAsynchronously {
                    if exporter.status == .failed{
                        print(exporter.error!)
                    }
                    else{
                        if let finalURL = exporter.outputURL{
                            print(finalURL)
                            DispatchQueue.main.async {
                                self.previewURL = finalURL
                            }
                        }
                    }
                }
            }
        }
    
        func mergeVideos(assets: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()){
            
            let compostion = AVMutableComposition()
            var lastTime: CMTime = .zero
            
            guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
            guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
            
            for asset in assets {
                // Linking Audio and Video
                do{
                    try videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: asset.tracks(withMediaType: .video)[0], at: lastTime)
                    // Safe Check if Video has Audio
                    if !asset.tracks(withMediaType: .audio).isEmpty{
                        try audioTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: asset.tracks(withMediaType: .audio)[0], at: lastTime)
                    }
                }
                catch{
                    // HANDLE ERROR
                    print(error.localizedDescription)
                }
                
                // Updating Last Time
                lastTime = CMTimeAdd(lastTime, asset.duration)
            }
    
            //more code
        }
    }
    
    import SwiftUI
    import SwiftUI
    import AVFoundation
    
    class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate{
        @Published var session = AVCaptureSession()
        @Published var alert = false
        @Published var output = AVCaptureMovieFileOutput()
        @Published var preview : AVCaptureVideoPreviewLayer!
        @Published var isRecording: Bool = false
        @Published var recordedURLs: [URL] = []
        @Published var previewURL: URL?
        @Published var showPreview: Bool = false
        
        func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) async { //made func async
            if let error = error {
                print(error.localizedDescription)
                return
            }
    
            self.recordedURLs.append(outputFileURL)
            if self.recordedURLs.count == 1{
                self.previewURL = outputFileURL
                return
            }
            
            let assets = recordedURLs.compactMap { url -> AVURLAsset in
                return AVURLAsset(url: url)
            }
            
            self.previewURL = nil
            await mergeVideos(assets: assets) { exporter in // ADD AWAIT HERE
                exporter.exportAsynchronously {
                    if exporter.status == .failed{
                        print(exporter.error!)
                    }
                    else{
                        if let finalURL = exporter.outputURL{
                            print(finalURL)
                            DispatchQueue.main.async {
                                self.previewURL = finalURL
                            }
                        }
                    }
                }
            }
        }
        
        func mergeVideos(assets: [AVURLAsset],completion: @escaping (_ exporter: AVAssetExportSession)->()) async {
            let compostion = AVMutableComposition()
            var lastTime: CMTime = .zero
            
            guard let videoTrack = compostion.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
            guard let audioTrack = compostion.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else{return}
            
            for asset in assets {
                do {
                    //Changes
                    try await videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.load(.duration)), of: asset.loadTracks(withMediaType: .video)[0], at: lastTime)
                    if try await !asset.loadTracks(withMediaType: .audio).isEmpty {
                        try await audioTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.load(.duration)), of: asset.loadTracks(withMediaType: .audio)[0], at: lastTime)
                    }
                }
                catch {
                    print(error.localizedDescription)
                }
            
                do {
                    lastTime = try await CMTimeAdd(lastTime, asset.load(.duration))
                } catch {
                    print(error.localizedDescription)
                }
            }
        }
    }
    
    1 回复  |  直到 1 年前
        1
  •  1
  •   David B.    1 年前

    而不是制作 func fileOutput async ,只需将mergeVideos块包装在 Task 。这将保持协议的一致性。

    Task {
            await mergeVideos(assets: assets) { exporter in
                exporter.exportAsynchronously {
                    if exporter.status == .failed{
                        print(exporter.error!)
                    }
                    else{
                        if let finalURL = exporter.outputURL{
                            print(finalURL)
                            DispatchQueue.main.async {
                                self.previewURL = finalURL
                            }
                        }
                    }
                }
            }
        }