Visual isTranslatable: NO; reason: observation failure: noObservations, when trying to play custom compositor video with AVPlayer

I am trying to achieve an animated gradient effect that changes values over time based on the current seconds. I am also using AVPlayer and AVMutableVideoComposition along with custom instruction and class to generate the effect. I didn't want to load any video file, but rather generate a custom video with my own set of instructions. I used Metal Compute shaders to generate the effects and make the video to be 20 seconds.

However, when I run the code, I get a frozen player with the gradient applied, but when I try to play the video, I get this warning in the console :- Visual isTranslatable: NO; reason: observation failure: noObservations

Here is the screenshot :-https://i.sstatic.net/KPi85OdG.png

My entire code :-


import AVFoundation
import Metal

class GradientVideoCompositorTest: NSObject, AVVideoCompositing {
    var sourcePixelBufferAttributes: [String: Any]? = [
        kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
    ]
    
    var requiredPixelBufferAttributesForRenderContext: [String: Any] = [
        kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
    ]
    
    private var renderContext: AVVideoCompositionRenderContext?
    private var metalDevice: MTLDevice!
    private var metalCommandQueue: MTLCommandQueue!
    private var metalLibrary: MTLLibrary!
    private var metalPipeline: MTLComputePipelineState!
    
    override init() {
        super.init()
        setupMetal()
    }
    
    func setupMetal() {
        guard let device = MTLCreateSystemDefaultDevice(),
              let queue = device.makeCommandQueue(),
              let library = try? device.makeDefaultLibrary(),
              let function = library.makeFunction(name: "gradientShader") else {
            fatalError("Metal setup failed")
        }
        
        self.metalDevice = device
        self.metalCommandQueue = queue
        self.metalLibrary = library
        self.metalPipeline = try? device.makeComputePipelineState(function: function)
    }
    
    func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
        renderContext = newRenderContext
    }
    
    func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
        guard let outputPixelBuffer = renderContext?.newPixelBuffer(),
              let metalTexture = createMetalTexture(from: outputPixelBuffer) else {
            request.finish(with: NSError(domain: "com.example.gradient", code: -1, userInfo: nil))
            return
        }
        
        var time = Float(request.compositionTime.seconds)
        renderGradient(to: metalTexture, time: time)
        
        request.finish(withComposedVideoFrame: outputPixelBuffer)
    }
    
    private func createMetalTexture(from pixelBuffer: CVPixelBuffer) -> MTLTexture? {
        var texture: MTLTexture?
        let width = CVPixelBufferGetWidth(pixelBuffer)
        let height = CVPixelBufferGetHeight(pixelBuffer)
        
        let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
            pixelFormat: .bgra8Unorm,
            width: width,
            height: height,
            mipmapped: false
        )
        textureDescriptor.usage = [.shaderWrite, .shaderRead]
        
        CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
        if let textureCache = createTextureCache(), let cvTexture = createCVMetalTexture(from: pixelBuffer, cache: textureCache) {
            texture = CVMetalTextureGetTexture(cvTexture)
        }
        CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
        
        return texture
    }
    
    private func renderGradient(to texture: MTLTexture, time: Float) {
        guard let commandBuffer = metalCommandQueue.makeCommandBuffer(),
              let commandEncoder = commandBuffer.makeComputeCommandEncoder() else { return }
        
        commandEncoder.setComputePipelineState(metalPipeline)
        commandEncoder.setTexture(texture, index: 0)
        var mutableTime = time
        commandEncoder.setBytes(&mutableTime, length: MemoryLayout<Float>.size, index: 0)
        
        let threadsPerGroup = MTLSize(width: 16, height: 16, depth: 1)
        let threadGroups = MTLSize(
            width: (texture.width + 15) / 16,
            height: (texture.height + 15) / 16,
            depth: 1
        )
        
        commandEncoder.dispatchThreadgroups(threadGroups, threadsPerThreadgroup: threadsPerGroup)
        commandEncoder.endEncoding()
        commandBuffer.commit()
    }
    
    private func createTextureCache() -> CVMetalTextureCache? {
        var cache: CVMetalTextureCache?
        CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, metalDevice, nil, &cache)
        return cache
    }
    
    private func createCVMetalTexture(from pixelBuffer: CVPixelBuffer, cache: CVMetalTextureCache) -> CVMetalTexture? {
        var cvTexture: CVMetalTexture?
        let width = CVPixelBufferGetWidth(pixelBuffer)
        let height = CVPixelBufferGetHeight(pixelBuffer)
        
        CVMetalTextureCacheCreateTextureFromImage(
            kCFAllocatorDefault,
            cache,
            pixelBuffer,
            nil,
            .bgra8Unorm,
            width,
            height,
            0,
            &cvTexture
        )
        
        return cvTexture
    }
}

class GradientCompositionInstructionTest: NSObject, AVVideoCompositionInstructionProtocol {
    var timeRange: CMTimeRange
    var enablePostProcessing: Bool = true
    var containsTweening: Bool = true
    var requiredSourceTrackIDs: [NSValue]? = nil
    var passthroughTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
    
    init(timeRange: CMTimeRange) {
        self.timeRange = timeRange
    }
}

func createGradientVideoComposition(duration: CMTime, size: CGSize) -> AVMutableVideoComposition {
    let composition = AVMutableComposition()
    
    let instruction = GradientCompositionInstructionTest(timeRange: CMTimeRange(start: .zero, duration: duration))
    
    let videoComposition = AVMutableVideoComposition()
    videoComposition.customVideoCompositorClass = GradientVideoCompositorTest.self
    videoComposition.renderSize = size
    videoComposition.frameDuration = CMTime(value: 1, timescale: 30) // 30 FPS
    videoComposition.instructions = [instruction]
    
    return videoComposition
}
#include <metal_stdlib>
using namespace metal;

kernel void gradientShader(texture2d<float, access::write> output [[texture(0)]],
                           constant float &time [[buffer(0)]],
                           uint2 id [[thread_position_in_grid]]) {
    float2 uv = float2(id) / float2(output.get_width(), output.get_height());
    
    // Animated colors based on time
    float3 color1 = float3(sin(time) * 0.8 + 0.1, 0.6, 1.0);
    float3 color2 = float3(0.12, 0.99, cos(time) * 0.9 + 0.3);
    
    // Linear interpolation for gradient
    float3 gradientColor = mix(color1, color2, uv.y);
    
    output.write(float4(gradientColor, 1.0), id);
}

Thanks for the detailed post.

Do you get the same results with just the relevant code in a small test project? If so, please share a link to your test project. That'll help us better understand what's going on. If you're not familiar with preparing a test project, take a look at Creating a test project.

I believe if you can provide a complete project showing the issue, you may have better luck with replies to your post.

Albert Pascual
  Worldwide Developer Relations.

Visual isTranslatable: NO; reason: observation failure: noObservations, when trying to play custom compositor video with AVPlayer
 
 
Q