Good day, everyone, I have a some questions.
I got color frame from ipad camera and rendered on scnview using metal.
and framedata's format is kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
.
And I can't change color frame pixel format because it's from some private sdk...
I got CMSamplerBuffer
at every frame, metal shader changed it to rgba color texture.
it's some weird, metal texture from ycbcr is darker or brighter than original image.
It's my metal shader code.
// https://github.com/google/filament/blob/main/filament/backend/src/metal/MetalExternalImage.mm
#include <metal_stdlib>
#include <simd/simd.h>
using namespace metal;
kernel void
ycbcrToRgb(texture2d<half, access::read> inYTexture [[texture(0)]],
texture2d<half, access::read> inCbCrTexture [[texture(1)]],
texture2d<half, access::write> outTexture [[texture(2)]],
uint2 gid [[thread_position_in_grid]])
{
if (gid.x >= outTexture.get_width() || gid.y >= outTexture.get_height()) {
return;
}
half luminance = inYTexture.read(gid).r;
// The color plane is half the size of the luminance plane.
half2 color = inCbCrTexture.read(gid / 2).rg;
half4 ycbcr = half4(luminance, color, 1.0);
const half4x4 ycbcrToRGBTransform = half4x4(
half4(+1.0000f, +1.0000f, +1.0000f, +0.0000f),
half4(+0.0000f, -0.3441f, +1.7720f, +0.0000f),
half4(+1.4020f, -0.7141f, +0.0000f, +0.0000f),
half4(-0.7010f, +0.5291f, -0.8860f, +1.0000f)
);
outTexture.write(ycbcrToRGBTransform * ycbcr, gid);
}
and it's my render code
func render(colorFrame: STColorFrame) {
// var frame = CGImage.create(sampleBuffer: colorFrame.sampleBuffer)!
let buffer = CMSampleBufferGetImageBuffer(colorFrame.sampleBuffer)!
convertVideoFrameToImage1(buffer)
scnview.scene?.background.contents = outTexture
}
private func convertVideoFrameToImage1(_ buffer: CVImageBuffer) {
// kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
CVPixelBufferLockBaseAddress(buffer, CVPixelBufferLockFlags(rawValue: 0))
let commandQueue = device!.makeCommandQueue()!
let library = device!.makeDefaultLibrary()!
let commandBuffer = commandQueue.makeCommandBuffer()!
let encoder = commandBuffer.makeComputeCommandEncoder()!
encoder.setComputePipelineState(
try! device!.makeComputePipelineState(function:
library.makeFunction(name: "ycbcrToRgb")!))
// input
// Extract Y and CbCr textures
// https://stackoverflow.com/questions/58175811/how-to-convert-an-rgba-texture-to-y-and-cbcr-textures-in-metal
let imageTextureY = createTexture(fromPixelBuffer: buffer,
pixelFormat: .r8Unorm, planeIndex: 0)!
let imageTextureCbCr = createTexture(fromPixelBuffer: buffer,
pixelFormat: .rg8Unorm, planeIndex: 1)!
let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
// NSLog("aaa3 \(imageTextureY.usage.rawValue) \(imageTextureCbCr.usage.rawValue) \(MTLTextureUsage.shaderRead.rawValue)")
encoder.setTexture(imageTextureY, index: 0)
encoder.setTexture(imageTextureCbCr, index: 1)
if outTexture == nil {
let descriptor = MTLTextureDescriptor()
descriptor.textureType = .type2D
descriptor.pixelFormat = .rgba32Float
descriptor.width = width
descriptor.height = height
descriptor.usage = [.shaderWrite, .shaderRead]
outTexture = device!.makeTexture(descriptor: descriptor)
}
encoder.setTexture(outTexture, index: 2)
let numThreadgroups = MTLSize(width: 32, height: 32, depth: 1)
let threadsPerThreadgroup = MTLSize(width: width / numThreadgroups.width,
height: height / numThreadgroups.height, depth: 1)
encoder.dispatchThreadgroups(numThreadgroups,
threadsPerThreadgroup: threadsPerThreadgroup)
encoder.endEncoding()
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
CVPixelBufferUnlockBaseAddress(buffer, CVPixelBufferLockFlags(rawValue: 0))
}
original image is this,
but render in scnview like this
They are a different little bit... I've been thinking about it for three days. Can You help me?