Here's what I'm doing....it's probably wrong, but it's all I have:
Goal: To have more control of bluring whats behind the view
A video is playing with AVPlayer on a viewcontroller
Once every 1/30th of a second I capture a screenshot of the current frame with AVAssetImageGenerator
This screenshot is then passed to my function to be blurred
On a different view on a different viewcontroller which is presented over the top. I try to show the image with the following class:
import UIKit
import Metal
import MetalKit
let queue = dispatch_queue_create("com.metalQueue", DISPATCH_QUEUE_SERIAL)
class BlurView : UIImageView
{
var device:MTLDevice!
var commandQueue:MTLCommandQueue!
var defaultLibrary:MTLLibrary!
var commandBuffer:MTLCommandBuffer!
var computeCommandEncoder:MTLComputeCommandEncoder!
var pipelineState:MTLComputePipelineState?
var displayLink:CADisplayLink!
let bytesPerPixel: Int = 4
var outTexture: MTLTexture!
@IBOutlet weak var videoViewController:VideoViewController!
deinit
{
DLog("deinit \(String(self.dynamicType))")
if displayLink != nil
{
displayLink.invalidate()
}
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
setup()
}
func setup()
{
dispatch_async(queue) { () -> Void in
/
self.device = MTLCreateSystemDefaultDevice()!
/
self.commandQueue = self.device.newCommandQueue()
/
self.defaultLibrary = self.device.newDefaultLibrary()!
/
let function:MTLFunction = self.defaultLibrary.newFunctionWithName("pixelate")!
do
{
self.pipelineState = try self.device.newComputePipelineStateWithFunction(function)
}
catch
{
fatalError("Unable to create pipeline state")
}
}
}
override func didMoveToSuperview() {
super.didMoveToSuperview()
if let _ = self.superview
{
self.displayLink = CADisplayLink(target: self, selector: #selector(BlurView.displayLinkDidFire(_:)))
self.displayLink.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
} else {
self.displayLink.invalidate()
self.displayLink = nil
}
}
func displayLinkDidFire(displayLink:CADisplayLink)
{
autoreleasepool {
self.apply()
}
}
func apply()
{
dispatch_async(queue) { () -> Void in
// img from screenshot
guard let img = self.videoViewController?.currentImage else {
return
}
let inTexture:MTLTexture = self.textureFromImage(img)
let threadGroupCount = MTLSizeMake(16, 16, 1)
let threadGroups = MTLSizeMake(inTexture.width / threadGroupCount.width, inTexture.height / threadGroupCount.height, 1)
let commandBuffer = self.commandQueue.commandBuffer()
let commandEncoder = commandBuffer.computeCommandEncoder()
commandEncoder.setComputePipelineState(self.pipelineState!)
commandEncoder.setTexture(inTexture, atIndex: 0)
commandEncoder.setTexture(self.outTexture, atIndex: 1)
var pixelSize: UInt = 60
let buffer = self.device.newBufferWithBytes(&pixelSize, length: sizeof(UInt), options: [MTLResourceOptions.StorageModeShared])
commandEncoder.setBuffer(buffer, offset: 0, atIndex: 0)
commandEncoder.dispatchThreadgroups(threadGroups, threadsPerThreadgroup: threadGroupCount)
commandEncoder.endEncoding()
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
let finalResult = self.imageFromTexture(self.outTexture)
dispatch_async(dispatch_get_main_queue(), { () -> Void in
// Called, but doesn't seem to update with correct image.
self.image = finalResult
})
DLog("should show: \(NSDate())")
}
}
func textureFromImage(image: UIImage) -> MTLTexture {
guard let cgImage = image.CGImage else {
fatalError("Can't open image \(image)")
}
let textureLoader = MTKTextureLoader(device: self.device)
do {
let textureOut = try textureLoader.newTextureWithCGImage(cgImage, options: nil)
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptorWithPixelFormat(textureOut.pixelFormat, width: textureOut.width, height: textureOut.height, mipmapped: false)
outTexture = self.device.newTextureWithDescriptor(textureDescriptor)
return textureOut
}
catch {
fatalError("Can't load texture")
}
}
func imageFromTexture(texture: MTLTexture) -> UIImage {
/
let imageByteCount = texture.width * texture.height * bytesPerPixel
/
let bytesPerRow = texture.width * bytesPerPixel
/
var src = [UInt8](count: Int(imageByteCount), repeatedValue: 0)
/
let region = MTLRegionMake2D(0, 0, texture.width, texture.height)
texture.getBytes(&src, bytesPerRow: bytesPerRow, fromRegion: region, mipmapLevel: 0)
/
let bitmapInfo = CGBitmapInfo(rawValue: (CGBitmapInfo.ByteOrder32Big.rawValue | CGImageAlphaInfo.PremultipliedLast.rawValue))
let bitsPerComponent = 8
let colorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGBitmapContextCreate(&src, texture.width, texture.height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo.rawValue);
/
let dstImage = CGBitmapContextCreateImage(context);
/
return UIImage(CGImage: dstImage!, scale: 0.0, orientation: UIImageOrientation.DownMirrored)
}
}
Why am I not using UIVisualEffectView? - The designer is not happy with it.