I've been able to center the middle of a 16:9 landscape video, crop the video, and then create a 9:16 portrait version of the video similar to how Apple does it in the Photos album.
The only issue is the resulting portrait video isn't centered in the middle of the screen (images below).
How can I get the resulting portrait video in the center of the screen?
func createExportSession(for videoURL: URL) {
let asset = AVURLAsset(url: videoURL)
let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = turnHorizontalVideoToPortraitVideo(asset: asset)
exporter.outputURL = // ...
exporter.outputFileType = AVFileType.mp4
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronously { [weak self] in
// ...
// the exporter.url is eventually added to an AVURLAsset and played inside an AVPlayer
}
}
func turnHorizontalVideoToPortraitVideo(asset: AVURLAsset) -> AVVideoComposition {
let track = asset.tracks(withMediaType: AVMediaType.video)[0]
let renderSize = CGSize(width: 720, height: 1280)
var transform1 = track.preferredTransform
transform1 = transform1.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0 * .pi / 180)))
transform1 = transform1.concatenating(CGAffineTransform(translationX: track.naturalSize.width, y: 0))
let transform2 = CGAffineTransform(translationX: track.naturalSize.height, y: (track.naturalSize.width - track.naturalSize.height) / 2)
let transform3 = transform2.rotated(by: CGFloat(Double.pi/2)).concatenating(transform1)
let translate = CGAffineTransform(translationX: renderSize.width, y: renderSize.height)
let rotateFromUpsideDown = translate.rotated(by: CGFloat(Double.pi)) // without this the portrait video is always upside down
let finalTransform = transform3.concatenating(rotateFromUpsideDown)
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
transformer.setTransform(finalTransform, at: .zero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: .zero, duration: asset.duration)
instruction.layerInstructions = [transformer]
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.renderSize = renderSize
videoComposition.instructions = [instruction]
return videoComposition
}
Initial horizontal video:
Resulting portrait video after running the above code. The portrait video is incorrectly centered:
This is the way that it should be centered:
I'm the same person who posted the other question and 2 previous answer. This is what I came up with for both landscapeRight
and landscapeLeft
videos
func turnHorizontalVideoToPortraitVideo(asset: AVURLAsset) -> AVVideoComposition {
let track = asset.tracks(withMediaType: AVMediaType.video)[0]
let renderSize = CGSize(width: 720, height: 1280)
let t = track.preferredTransform
if (t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
print("landscapeRight")
}
var isLandscapeLeft = false
if (t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
print("landscapeLeft")
isLandscapeLeft = true
}
var transform1 = t
transform1 = transform1.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0 * .pi / 180)))
transform1 = transform1.concatenating(CGAffineTransform(translationX: track.naturalSize.width, y: 0))
let transform2 = CGAffineTransform(translationX: track.naturalSize.height, y: (track.naturalSize.width - track.naturalSize.height) / 2)
var p = Double.pi/2
if isLandscapeLeft {
p = -Double.pi/2
}
let transform3 = transform2.rotated(by: CGFloat(p)).concatenating(transform1)
let finalTransform = transform3
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
if isLandscapeLeft {
let ty = finalTransform.ty
let dividedNum = ty/2.5
let translation = CGAffineTransform(translationX: 0, y: dividedNum)
let new_finalTransform = finalTransform.concatenating(translation)
transformer.setTransform(new_finalTransform, at: .zero)
}
if !isLandscapeLeft {
let translate = CGAffineTransform(translationX: renderSize.width, y: renderSize.height)
let rotateFromUpsideDown = translate.rotated(by: CGFloat(Double.pi))
let transformRotated = finalTransform.concatenating(rotateFromUpsideDown)
let ty = transformRotated.ty
var dividedNum = ty/2
if dividedNum < 0 {
dividedNum = 0
}
let translation = CGAffineTransform(translationX: 0, y: -dividedNum)
let new_finalTransform = transformRotated.concatenating(translation)
transformer.setTransform(new_finalTransform, at: .zero)
}
let instruction = AVMutableVideoCompositionInstruction()
//instruction.backgroundColor = UIColor.yellow.cgColor
instruction.timeRange = CMTimeRange(start: .zero, duration: asset.duration)
instruction.layerInstructions = [transformer]
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.renderSize = renderSize
videoComposition.instructions = [instruction]
return videoComposition
}