I am working on a video editing software in Swift and I am using AVComposition for video processing and editing them frame by frame.
Initially, I build the composition with one single track ( full ) and now, I allow the users to make a cut anywhere in the middle, but because av composition doesn't work on the fly like AVAudioMix, I have to call the rebuild composition function every single time, which do a weird black screen change to our video player.
Basically, when I make a cut using the button, there is black splash appear when rebuilding the composition, and anytime I call the buildCOmposition function, that splash apppears again.
I want to make it like final cut pro, any changes in the AVComposition, smoothly updates without causing any visual disturbance
Here is my sample code :- https://github.com/zaidbren/SimpleEditor
struct Project: Equatable {
var isCut: Bool = false
var id = UUID()
}
struct ProjectEditor: View {
@StateObject private var renderer: Renderer
@State private var player: AVPlayer?
@State private var project = Project(isCut: false)
init(videoURL: URL) {
_renderer = StateObject(wrappedValue: Renderer(videoURL: videoURL))
}
var body: some View {
VStack(spacing: 20) {
if let player {
VideoPlayer(player: player)
.aspectRatio(calculateAspectRatio(), contentMode: .fit)
.frame(maxWidth: 800, maxHeight: 450)
.onAppear {
player.play()
}
} else {
Rectangle()
.fill(Color.gray.opacity(0.3))
.aspectRatio(16/9, contentMode: .fit)
.frame(maxWidth: 800, maxHeight: 450)
.overlay(Text("Loading..."))
}
HStack(spacing: 12) {
Button {
project.isCut = true
} label: {
Label("Cut", systemImage: "scissors")
}
.buttonStyle(.bordered)
.tint(project.isCut ? .blue : .gray)
Button {
project.isCut = false
} label: {
Label("Uncut", systemImage: "arrow.uturn.backward")
}
.buttonStyle(.bordered)
.tint(!project.isCut ? .blue : .gray)
}
Text(project.isCut ? "3-5 seconds trimmed from video" : "Full video")
.font(.caption)
.foregroundColor(.secondary)
.multilineTextAlignment(.center)
.padding(.horizontal)
}
.padding()
.onDisappear {
Task {
await renderer.cleanup()
}
}
.onAppear {
Task {
await buildInitialComposition()
}
}
.onChange(of: project) { oldValue, newValue in
Task {
await rebuildComposition()
}
}
}
private func calculateAspectRatio() -> CGFloat {
let size = renderer.compositionSize
guard size.width > 0 && size.height > 0 else {
return 16/9
}
return size.width / size.height
}
private func buildInitialComposition() async {
let playerItem = await renderer.buildComposition(isCut: project.isCut)
player = AVPlayer(playerItem: playerItem)
}
private func rebuildComposition() async {
let playerItem = await renderer.buildComposition(isCut: project.isCut)
// Replace the player item
await MainActor.run {
player?.replaceCurrentItem(with: playerItem)
player?.seek(to: .zero)
player?.play()
}
}
}
@MainActor
class Renderer: ObservableObject {
@Published var isLoading = false
@Published var compositionSize: CGSize = CGSize(width: 640, height: 360)
private let compositorId: String
private let sourceAsset: AVAsset
private let videoURL: URL
private var currentProject = Project()
private let renderQueue = DispatchQueue(label: "com.simple.renderer.export", qos: .userInitiated)
init(videoURL: URL) {
self.videoURL = videoURL
self.compositorId = UUID().uuidString
self.sourceAsset = AVAsset(url: videoURL)
Task {
await CustomVideoCompositor.setProject(currentProject, forId: compositorId)
}
}
func buildComposition(isCut: Bool) async -> AVPlayerItem {
currentProject.isCut = isCut
await CustomVideoCompositor.updateProject(currentProject, forId: compositorId)
let composition = AVMutableComposition()
let videoTrack = composition.addMutableTrack(
withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid
)!
guard let sourceTrack = sourceAsset.tracks(withMediaType: .video).first else {
fatalError("No video track found in source asset")
}
// Update composition size
await MainActor.run {
compositionSize = sourceTrack.naturalSize
}
// Calculate time range based on cut/uncut
let duration = sourceAsset.duration
let timeRange: CMTimeRange
if isCut {
// Trim 3-5 seconds (let's use 4 seconds) from the start
let trimDuration = CMTime(seconds: 4.0, preferredTimescale: 600)
let startTime = trimDuration
let remainingDuration = CMTimeSubtract(duration, trimDuration)
timeRange = CMTimeRange(start: startTime, duration: remainingDuration)
} else {
// Use full video
timeRange = CMTimeRange(start: .zero, duration: duration)
}
do {
try videoTrack.insertTimeRange(
timeRange,
of: sourceTrack,
at: .zero
)
} catch {
fatalError("Failed to insert video track: \(error)")
}
// Handle audio track if present
if let sourceAudioTrack = sourceAsset.tracks(withMediaType: .audio).first {
if let audioTrack = composition.addMutableTrack(
withMediaType: .audio,
preferredTrackID: kCMPersistentTrackID_Invalid
) {
try? audioTrack.insertTimeRange(
timeRange,
of: sourceAudioTrack,
at: .zero
)
}
}
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.renderSize = sourceTrack.naturalSize
let instruction = CompositorInstruction()
instruction.timeRange = CMTimeRange(start: .zero, duration: timeRange.duration)
instruction.compositorId = compositorId
instruction.requiredSourceTrackIDs = [NSNumber(value: videoTrack.trackID)]
videoComposition.instructions = [instruction]
videoComposition.customVideoCompositorClass = CustomVideoCompositor.self
let playerItem = AVPlayerItem(asset: composition)
playerItem.videoComposition = videoComposition
return playerItem
}
func cleanup() async {
await CustomVideoCompositor.removeProject(forId: compositorId)
}
}
