I'm the same person who posted the other question and 2 previous answer. This is what I came up with for both landscapeRight and landscapeLeft videos
func turnHorizontalVideoToPortraitVideo(asset: AVURLAsset) -> AVVideoComposition {
let track = asset.tracks(withMediaType: AVMediaType.video)[0]
let renderSize = CGSize(width: 720, height: 1280)
let t = track.preferredTransform
if (t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
print("landscapeRight")
}
var isLandscapeLeft = false
if (t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
print("landscapeLeft")
isLandscapeLeft = true
}
var transform1 = t
transform1 = transform1.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0 * .pi / 180)))
transform1 = transform1.concatenating(CGAffineTransform(translationX: track.naturalSize.width, y: 0))
let transform2 = CGAffineTransform(translationX: track.naturalSize.height, y: (track.naturalSize.width - track.naturalSize.height) / 2)
var p = Double.pi/2
if isLandscapeLeft {
p = -Double.pi/2
}
let transform3 = transform2.rotated(by: CGFloat(p)).concatenating(transform1)
let finalTransform = transform3
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
if isLandscapeLeft {
let ty = finalTransform.ty
let dividedNum = ty/2.5
let translation = CGAffineTransform(translationX: 0, y: dividedNum)
let new_finalTransform = finalTransform.concatenating(translation)
transformer.setTransform(new_finalTransform, at: .zero)
}
if !isLandscapeLeft {
let translate = CGAffineTransform(translationX: renderSize.width, y: renderSize.height)
let rotateFromUpsideDown = translate.rotated(by: CGFloat(Double.pi))
let transformRotated = finalTransform.concatenating(rotateFromUpsideDown)
let ty = transformRotated.ty
var dividedNum = ty/2
if dividedNum < 0 {
dividedNum = 0
}
let translation = CGAffineTransform(translationX: 0, y: -dividedNum)
let new_finalTransform = transformRotated.concatenating(translation)
transformer.setTransform(new_finalTransform, at: .zero)
}
let instruction = AVMutableVideoCompositionInstruction()
//instruction.backgroundColor = UIColor.yellow.cgColor
instruction.timeRange = CMTimeRange(start: .zero, duration: asset.duration)
instruction.layerInstructions = [transformer]
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.renderSize = renderSize
videoComposition.instructions = [instruction]
return videoComposition
}
Post
Replies
Boosts
Views
Activity
I posted the above answer and it should be noted that the answer only works for videos that have an orientation of .landscapeRight.
You must check the following before using the above code:
let t = track.preferredTransform
// LandscapeRight
if (t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
print(" *** the above answer will only work for landscapeRight *** ")
transform1 = transform1.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0 * .pi / 180)))
}
// LandscapeLeft
if (t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
print(" *** the above answer will NOT work for landscapeLeft *** ")
}
If anyone has a better answer please post, I'll check and accept it
Unbeknownst to me the video was in the correct position but the negative black bar space was causing the video to be appear that it was misaligned. Changing the AVMutableVideoCompositionInstruction() show the .backgroundColor show the negative black bar space issue in yellow:
instruction.backgroundColor = UIColor.yellow.cgColor
To fix it, I divided the finalTransform.ty in half and subtracted that from a translation-y-value, so now the code is:
// ...
let finalTransform = transform3.concatenating(rotateFromUpsideDown)
let ty = finalTransform.ty
var divided = ty/2
if divided < 0 {
divided = 0
}
let translation = CGAffineTransform(translationX: 0, y: -divided)
let new_finalTransform = finalTransform.concatenating(translation)
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
transformer.setTransform(new_finalTransform, at: .zero)
// ...
The fix:
The fix with the negative black bar space in yellow to show how it's now centered:
I got it working for both portrait and landscape.
I tested this answer with videos recorded in portrait, landscape left/right, upside down, front camera, and the back camera. I haven't had any issues. I'm far from a CGAffineTransform expert, so if anyone has a better answer please post it.
Ray Wenderlich's merging code works, but it doesn't work for videos with different orientations. I used this answer to check the properties of the preferredTransform for the orientation check.
One thing to point out is the comments from DonMag told me about the benefit of using 720x1280. The code below will merge all of the videos together with a renderSize of 720x1280 which will keep them the same size.
code:
// class property
let renderSize = CGSize(width: 720, height: 1280) // for higher quality use CGSize(width: 1080, height: 1920)
func mergVideos() {
let mixComposition = AVMutableComposition()
let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var count = 0
var insertTime = CMTime.zero
var instructions = [AVMutableVideoCompositionInstruction]()
for videoAsset in arrOfAssets {
guard let firstTrack = videoAsset.tracks.first, let _ = videoAsset.tracks(withMediaType: .video).first else { continue }
do {
try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: videoAsset.duration), of: videoAsset.tracks(withMediaType: .video)[0], at: insertTime)
if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: videoAsset.duration), of: audioTrack, at: insertTime)
}
let layerInstruction = videoCompositionInstruction(firstTrack, asset: videoAsset, count: count)
let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
videoCompositionInstruction.timeRange = CMTimeRangeMake(start: insertTime, duration: videoAsset.duration)
videoCompositionInstruction.layerInstructions = [layerInstruction]
instructions.append(videoCompositionInstruction)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
count += 1
} catch { }
}
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = instructions
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
videoComposition.renderSize = self.renderSize // <--- **** IMPORTANT ****
// ...
exporter.videoComposition = videoComposition
}
Most important part of this answer that replaces the RW code:
func videoCompositionInstruction(_ firstTrack: AVAssetTrack, asset: AVAsset, count: Int) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack)
let assetTrack = asset.tracks(withMediaType: .video)[0]
let t = assetTrack.fixedPreferredTransform // new transform fix
let assetInfo = orientationFromTransform(t)
if assetInfo.isPortrait {
let scaleToFitRatio = self.renderSize.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
var finalTransform = assetTrack.fixedPreferredTransform.concatenating(scaleFactor)
// From the OP that I used for the portrait part. I haven't tested this, his words: "if video not taking entire screen and leaving some parts black - don't know when actually needed so test"
if assetInfo.orientation == .rightMirrored || assetInfo.orientation == .leftMirrored {
finalTransform = finalTransform.translatedBy(x: -transform.ty, y: 0)
}
instruction.setTransform(finalTransform, at: CMTime.zero)
} else {
let renderRect = CGRect(x: 0, y: 0, width: self.renderSize.width, height: self.renderSize.height)
let videoRect = CGRect(origin: .zero, size: assetTrack.naturalSize).applying(assetTrack.fixedPreferredTransform)
let scale = renderRect.width / videoRect.width
let transform = CGAffineTransform(scaleX: renderRect.width / videoRect.width, y: (videoRect.height * scale) / assetTrack.naturalSize.height)
let translate = CGAffineTransform(translationX: .zero, y: ((self.renderSize.height - (videoRect.height * scale))) / 2)
instruction.setTransform(assetTrack.fixedPreferredTransform.concatenating(transform).concatenating(translate), at: .zero)
}
if count == 0 {
instruction.setOpacity(0.0, at: asset.duration)
}
return instruction
}
New orientation check:
func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {
var assetOrientation = UIImage.Orientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == 1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .rightMirrored
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .leftMirrored
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
}
preferredTransform fix:
extension AVAssetTrack {
var fixedPreferredTransform: CGAffineTransform {
var t = preferredTransform
switch(t.a, t.b, t.c, t.d) {
case (1, 0, 0, 1):
t.tx = 0
t.ty = 0
case (1, 0, 0, -1):
t.tx = 0
t.ty = naturalSize.height
case (-1, 0, 0, 1):
t.tx = naturalSize.width
t.ty = 0
case (-1, 0, 0, -1):
t.tx = naturalSize.width
t.ty = naturalSize.height
case (0, -1, 1, 0):
t.tx = 0
t.ty = naturalSize.width
case (0, 1, -1, 0):
t.tx = naturalSize.height
t.ty = 0
case (0, 1, 1, 0):
t.tx = 0
t.ty = 0
case (0, -1, -1, 0):
t.tx = naturalSize.height
t.ty = naturalSize.width
default:
break
}
return t
}
}
@HeshanY is correct, use the answer from Yodagama on SO. This only thing is he didn't add how to detect the orientation
if let photoOutputConnection = self.photoOutput.connection(with: .video) {
// USE the below function HERE
photoOutputConnection.videoOrientation = videoOrientation()
}
photoOutput.capturePhoto(with: settings, delegate: self)
func to detect device orientation:
func videoOrientation() -> AVCaptureVideoOrientation {
var videoOrientation: AVCaptureVideoOrientation!
let orientation: UIDeviceOrientation = UIDevice.current.orientation
switch orientation {
case .faceUp, .faceDown, .unknown:
// let interfaceOrientation = UIApplication.shared.statusBarOrientation
if let interfaceOrientation = UIApplication.shared.windows.first(where: { $0.isKeyWindow })?.windowScene?.interfaceOrientation {
switch interfaceOrientation {
case .portrait, .portraitUpsideDown, .unknown:
videoOrientation = .portrait
case .landscapeLeft:
videoOrientation = .landscapeRight
case .landscapeRight:
videoOrientation = .landscapeLeft
@unknown default:
videoOrientation = .portrait
}
}
case .portrait, .portraitUpsideDown:
videoOrientation = .portrait
case .landscapeLeft:
videoOrientation = .landscapeRight
case .landscapeRight:
videoOrientation = .landscapeLeft
@unknown default:
videoOrientation = .portrait
}
return videoOrientation
}
So it seems that although the code from my question did covert the audio file to a video file, there still wasn't a video track. I know this for a fact because after I got the exporter's videoURL from my question, I tried to add a watermark to it and in the watermark code it kept crashing on
let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
Basically the code from my question coverts audio to video but doesn't add a video track.
What I assumed happened is when the Files app reads the file, it knows that it's a .mov or .mp4 file and then it'll play the audio track even if the video track is missing.
Conversely, when the Photos app reads the file it also know's that it's a .mov or .mp4 file but if there isn't a video track, it won't play anything.
I had to combine these 2 answers to get the audio to play as a video in the Photos app.
1st- I added my app icon as 1 image to an array of images to make a video track using the code from How do I export UIImage array as a movie? answered by scootermg. The code from scootermg's answer is at this GitHub here by dldnh
2nd- I combined the app icon video that I just made with the audio url from my question using the code from Swift Merge audio and video files into one video answered by TungFam
In the mixCompostion from TungFam's answer I used the audio url's asset duration for the length of the video.
do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: .zero,
duration: aAudioAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: .zero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: .zero,
duration: aAudioAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: .zero)
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: .zero,
duration: aAudioAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: .zero)
}
} catch {
print(error.localizedDescription)
}
The answer that I thought worked didn't work.
The bottom error is actually www.google.com. I didn't add the first dot after www because it wouldn't let me post it using the entire web address. It said "Please fix the following URL related errors ..."
Apparently the issue is com.apple.developer.icloud-container-environment: Development vs Production.
Any of the iCloud posts that I made under Development aren't accessible while the Entitlement is set to Production. On the flip side any of the iCloud posts that I made under Production aren't accessible while the Entitlement is set to Entitlement.
I have no idea why it works this way but in short the NotAuthenticated issue occurs when trying to view a post that was made while in Development but the value is currently set to Production
Can anyone explain why it works this way?
If I could delete this I would. It was a weird mistake.
On the developer.apple.com sign in page, I signed in with the correct email info but for some reason it says I have 2 developers accounts. One is current (the one from this question) and one is old and no longer active. For some reason I still have access to that old account and that's the account I was using to check my iCloud container.
I never realized that old account was still available. Odd mistake considering it's no longer active but I can still access it and it's tied to my current active account.
Wow, who would've thought ...
The issue was both names began with new as in newBodyText and newHttpsStr. That's not allowed. I found the answer here by @Silfverstrom -https://coderedirect.com/questions/397168/cfstring-release-message-sent-to-deallocated-instance
From Apple documentation
-https://developer.apple.com/library/ios/releasenotes/objectivec/rn-transitioningtoarc/introduction/introduction.html-:
To allow interoperation with manual retain-release code, ARC imposes a
constraint on method naming:
You cannot give an accessor a name that begins with new. This in turn
means that you can’t, for example, declare a property whose name
begins with new unless you specify a different getter
Once I changed them from newBodyText and newHttpsStr to updatedBodyText and updatedHttpsStr the crash went away.
@eskimo
You're correct. I'm doing something wrong. What i thought was working upon further testing isn't working and I think this may be causing the issue that I'm having in the other question.
I changed my PeerConnection class to follow the code the way that you did but it's not working correctly. There are 3 issues
1st issue, the echoData is always sent, however inside the PeerConnection class, connection.receive(minimumIncompleteLength.., inside if let data = data, !data.isEmpty { } I call receivedIncoming() again but the returned echoData is nil.
2nd issue, the only way I can get it to keep listening is if I move receivedIncoming() at the very top of the completionHandler like below but the echoData that comes back is is still nil
connection.receive(minimumIncompleteLength.. { ...
self?.receivedIncoming()
if let err = error { ... }
if let data = data, !data.isEmpty {
// ...
} else {
print("=-=-=-=-= Receive data is nil -=-=-=-=-=") // returned echoData is always nil
}
}
3rd issue, inside the same connection.receive(minimumIncompleteLength.., if there are any problems I remove any connections via delegate?.removeIncomingConnection(connection), the ViewController class that conforms to it will remove the connection thus removing the encodedData (a cell shows the data). This is a separate issue relating more so to my other question. But the fact that the returned echoData is nil is causing collateral damage
Coded like your example:
protocol PeerConnectionDelegate: class {
func sendOutgoing(_ connection: NWConnection)
func removeIncomingConnection(_ connection: NWConnection)
func readDataFromIncomingConnection(_ data: Data, _ connection: NWConnection)
}
final class PeerConnection: Hashable {
// outgoing init is the same
// incoming init is the same
func startIncomingConnection() {
connection.stateUpdateHandler = { [weak self](nwConnectionState) in
case .ready:
self?.receivedIncoming()
}
func receivedIncoming() {
guard let connection = connection else { return }
connection.receive(minimumIncompleteLength: 1, maximumLength: 65535) { [weak self](data, context, isComplete, error) in
if let err = error {
print("received error: (err.localizedDescription)")
self?.delegate?.removeIncomingConnection(connection)
return
}
if let data = data, !data.isEmpty {
let echoString = String(decoding: data, as : UTF8.self)
if echoString == "12345" {
print("echo received, stop listening")
return
}
self?.delegate?.readDataFromIncomingConnection(data, connection) // vc conforms to this
self?.receivedIncoming()
} else {
print("=-=-=-=-= Receive data is nil -=-=-=-=-=") // returned echoData gets hit here
self?.delegate?.removeIncomingConnection(connection)
}
}
}
}
extension ViewController: PeerConnectionDelegate {
// ... other peerConnection Delegate methods
func readDataFromIncomingConnection(_ data: Data, _ connection: NWConnection) {
guard let decodedData = NSKeyedUnarchiver.unarchiveTopLevelObjectWithData(data) ... else { return }
// display decodedData inside a cell
// first make sure connection isn't already in the array, the arrOfConnections is an ivar
arrOfConnections.append(connection)
let echoData = "12345".data(using: String.Encoding.utf8)
let message = NWProtocolWebSocket.Metadata(opcode: .text)
let context = NWConnection.ContentContext(identifier: "send", metadata: [message])
connection.send(content: echoData, contentContext: context, isComplete: true, completion: .contentProcessed({
(error) in
if let error = error { return }
print("echoData successfully sent") // this always prints
guard let echoData = echoData else { return }
let echoString = String(decoding: echoData, as : UTF8.self)
if echoString == "12345" {
print("here is the echoData that was sent: \(backToString)") // always prints
}
}))
}
I found this answer - https://stackoverflow.com/a/51319158/4833705 on Stack Overflow. The work around is whatever view controller that is using CallKit you simply make sure that it doesn't get initialized inside that view controller if the country is China. Like this
var callObserver: CXCallObserver?
override func viewDidLoad() {
super.viewDidLoad()
if Locale.current.regionCode != "CN" ||
Locale.current.regionCode != "CHN" { // only init for countries that aren't China
callObserver = CXCallObserver()
}
}
I just typed a very descriptive response to the other question that you was helping me out with and I lost it all because it said that I was unauthorized to respond. That is extremely frustrating.
I wonder are the 3 new problems that I encountered in my other question based on your last response to that question related to what you said above. From what you are saying my setup is incorrect and that could be the problem right there.
Should I start new thread or open a TSI?