Ответ 1
Вы можете добиться того, чего хотите, записав фрагменты видео за 30 секунд, а затем запустив их в AVQueuePlayer
для беспрепятственного воспроизведения. Запись видео-фрагментов будет очень легко с AVCaptureFileOutput
на macOS, но, к сожалению, на iOS вы не сможете создавать новые куски, не отбрасывая кадры, поэтому вам нужно использовать API-интерфейс wordier, lower level AVAssetWriter
:
import UIKit
import AVFoundation
// TODO: delete old videos
// TODO: audio
class ViewController: UIViewController {
// capture
let captureSession = AVCaptureSession()
// playback
let player = AVQueuePlayer()
var playerLayer: AVPlayerLayer! = nil
// output. sadly not AVCaptureMovieFileOutput
var assetWriter: AVAssetWriter! = nil
var assetWriterInput: AVAssetWriterInput! = nil
var chunkNumber = 0
var chunkStartTime: CMTime! = nil
var chunkOutputURL: URL! = nil
override func viewDidLoad() {
super.viewDidLoad()
playerLayer = AVPlayerLayer(player: player)
view.layer.addSublayer(playerLayer)
// inputs
let videoCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let videoInput = try! AVCaptureDeviceInput(device: videoCaptureDevice)
captureSession.addInput(videoInput)
// outputs
// iOS AVCaptureFileOutput/AVCaptureMovieFileOutput still don't support dynamically
// switching files (?) so we have to re-implement with AVAssetWriter
let videoOutput = AVCaptureVideoDataOutput()
// TODO: probably something else
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
captureSession.addOutput(videoOutput)
captureSession.startRunning()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
playerLayer.frame = view.layer.bounds
}
func createWriterInput(for presentationTimeStamp: CMTime) {
let fileManager = FileManager.default
chunkOutputURL = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("chunk\(chunkNumber).mov")
try? fileManager.removeItem(at: chunkOutputURL)
assetWriter = try! AVAssetWriter(outputURL: chunkOutputURL, fileType: AVFileTypeQuickTimeMovie)
// TODO: get dimensions from image CMSampleBufferGetImageBuffer(sampleBuffer)
let outputSettings: [String: Any] = [AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey: 1920, AVVideoHeightKey: 1080]
assetWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
assetWriterInput.expectsMediaDataInRealTime = true
assetWriter.add(assetWriterInput)
chunkNumber += 1
chunkStartTime = presentationTimeStamp
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: chunkStartTime)
}
}
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if assetWriter == nil {
createWriterInput(for: presentationTimeStamp)
} else {
let chunkDuration = CMTimeGetSeconds(CMTimeSubtract(presentationTimeStamp, chunkStartTime))
if chunkDuration > 30 {
assetWriter.endSession(atSourceTime: presentationTimeStamp)
// make a copy, as finishWriting is asynchronous
let newChunkURL = chunkOutputURL!
let chunkAssetWriter = assetWriter!
chunkAssetWriter.finishWriting {
print("finishWriting says: \(chunkAssetWriter.status.rawValue, chunkAssetWriter.error)")
print("queuing \(newChunkURL)")
self.player.insert(AVPlayerItem(url: newChunkURL), after: nil)
self.player.play()
}
createWriterInput(for: presentationTimeStamp)
}
}
if !assetWriterInput.append(sampleBuffer) {
print("append says NO: \(assetWriter.status.rawValue, assetWriter.error)")
}
}
}
p.s. очень любопытно посмотреть, что вы делали 30 секунд назад. Что именно вы делаете?