Mac OS 使用Metal渲染NV12、YUV420、CMSampleBufferRef视频

需求MTKView初始化摄像头采集CMSampleBufferRef渲染CMSampleBufferRefyuv420转NV12渲染NV12

END

资料较少,整合后仅作为记录学习使用。

需求

yuv420原始视频数据使用metal渲染。

MTKView初始化

vector_uint2 viewportSize;

MTKView *mMtkview;

id mDevice;

id mCmdQueue;

id mPipeline;

id mBuffer;

id mTexture;

CVMetalTextureCacheRef mTextureCache;

NSUInteger vertexCount;

id mTextureY;

id mTextureUV;

id mConvertMatrix;

setMetal

mMtkview = [[MTKView alloc] initWithFrame:self.view.frame device:MTLCreateSystemDefaultDevice()];

mDevice = mMtkview.device;

self.view = mMtkview;

mMtkview.delegate = self;

mCmdQueue = [mDevice newCommandQueue];

CVMetalTextureCacheCreate(NULL, NULL, mDevice, NULL, &mTextureCache);

setPipeline

id library = [mDevice newDefaultLibrary];

id vertexfunc = [library newFunctionWithName:@"verfunc"];

id fragfunc = [library newFunctionWithName:@"fragfunc"];

MTLRenderPipelineDescriptor *renderdes = [MTLRenderPipelineDescriptor new];

renderdes.vertexFunction = vertexfunc;

renderdes.fragmentFunction = fragfunc;

renderdes.colorAttachments[0].pixelFormat = mMtkview.colorPixelFormat;

mPipeline = [mDevice newRenderPipelineStateWithDescriptor:renderdes error:nil];

setupMatrix

matrix_float3x3 kColorConversion601FullRangeMatrix = (matrix_float3x3){

(simd_float3){1.0, 1.0, 1.0},

(simd_float3){0.0, -0.343, 1.765},

(simd_float3){1.4, -0.711, 0.0},

};

vector_float3 kColorConversion601FullRangeOffset = (vector_float3){ -(16.0/255.0), -0.5, -0.5}; // 这个是偏移

HQHConvertMatrix converMatrix;

converMatrix.matrix = kColorConversion601FullRangeMatrix;

converMatrix.offset = kColorConversion601FullRangeOffset;

mConvertMatrix = [mDevice newBufferWithBytes:&converMatrix length:sizeof(HQHConvertMatrix) options:MTLResourceStorageModeShared];

MTKViewDelegate

- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {

viewportSize = (vector_uint2){size.width, size.height};

}

- (void)drawInMTKView:(MTKView *)view {

if (mTextureY && mTextureUV) {

id cmdBuffer = [mCmdQueue commandBuffer];

MTLRenderPassDescriptor *passdes = view.currentRenderPassDescriptor;

if (passdes != nil) {

id cmdEncoder = [cmdBuffer renderCommandEncoderWithDescriptor:passdes];

[cmdEncoder setViewport:(MTLViewport){0.0,0.0,viewportSize.x,viewportSize.y, -1.0,1.0}];

[cmdEncoder setRenderPipelineState:mPipeline];

[cmdEncoder setVertexBuffer:mBuffer offset:0 atIndex:0];

[cmdEncoder setFragmentTexture:mTextureY atIndex:0];

[cmdEncoder setFragmentTexture:mTextureUV atIndex:1];

[cmdEncoder setFragmentBuffer:mConvertMatrix offset:0 atIndex:HQHFragmentInputindexMatrix];

[cmdEncoder drawPrimitives:MTLPrimitiveTypeTriangle vertexStart:0 vertexCount:vertexCount];

/**

rgb 数据

[cmdEncoder setFragmentTexture:mTexture atIndex:0];

[cmdEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:vertexCount];

*/

[cmdEncoder endEncoding];

[cmdBuffer presentDrawable:view.currentDrawable];

[cmdBuffer commit];

}

}

}

摄像头采集CMSampleBufferRef

AVCaptureSession *mCaptureSession;

AVCaptureDeviceInput *mCaptureInput;

AVCaptureVideoDataOutput *mCaptureOutput;

setupSession

mCaptureSession = [[AVCaptureSession alloc] init];

mCaptureSession.sessionPreset = AVCaptureSessionPreset1280x720;

if (@available(macOS 10.15, *)) {

AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];

mCaptureInput = [[AVCaptureDeviceInput alloc] initWithDevice:cameraDevice error:nil];

} else {

AVCaptureDevice *cameraDevice = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].firstObject;

mCaptureInput = [[AVCaptureDeviceInput alloc] initWithDevice:cameraDevice error:nil];

}

if ([mCaptureSession canAddInput:mCaptureInput]) {

[mCaptureSession addInput:mCaptureInput];

}

mCaptureOutput = [[AVCaptureVideoDataOutput alloc] init];

[mCaptureOutput setAlwaysDiscardsLateVideoFrames:NO];

[mCaptureOutput setSampleBufferDelegate:self queue:dispatch_queue_create("bd", DISPATCH_QUEUE_SERIAL)];

if ([mCaptureSession canAddOutput:mCaptureOutput]) {

[mCaptureSession addOutput:mCaptureOutput];

}

NSLog(@"out = %@ ary = %@",mCaptureOutput,[mCaptureOutput availableVideoCodecTypes]);

[mCaptureOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];

AVCaptureConnection *connection = [mCaptureOutput connectionWithMediaType:AVMediaTypeVideo];

[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];

[mCaptureSession startRunning];

AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

NSLog(@"data");

// [self setTexture:sampleBuffer];

}

渲染CMSampleBufferRef

- (void)setTexture:(CMSampleBufferRef)samplebuffer {

CVPixelBufferRef pixelbuffer = CMSampleBufferGetImageBuffer(samplebuffer);

// textureY

{

size_t width = CVPixelBufferGetWidthOfPlane(pixelbuffer, 0);

size_t height = CVPixelBufferGetHeightOfPlane(pixelbuffer, 0);

CVMetalTextureRef texture = NULL;

CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelbuffer, NULL, MTLPixelFormatR8Unorm, width, height, 0, &texture);

if (status == kCVReturnSuccess) {

mTextureY = CVMetalTextureGetTexture(texture);

CFRelease(texture);

}

}

// textureUV

{

size_t width = CVPixelBufferGetWidthOfPlane(pixelbuffer, 1);

size_t height = CVPixelBufferGetHeightOfPlane(pixelbuffer, 1);

CVMetalTextureRef texture = NULL;

CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelbuffer, NULL, MTLPixelFormatRG8Unorm, width, height, 1, &texture);

if (status == kCVReturnSuccess) {

mTextureUV = CVMetalTextureGetTexture(texture);

CFRelease(texture);

}

}

size_t width = CVPixelBufferGetWidth(pixelbuffer);

size_t heigth = CVPixelBufferGetHeight(pixelbuffer);

CVMetalTextureRef temTexture = nil;

CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelbuffer, NULL, MTLPixelFormatBGRA8Unorm, width, heigth, 0, &temTexture);

if (status == kCVReturnSuccess) {

mMtkview.drawableSize = CGSizeMake(width, heigth);

mTexture = CVMetalTextureGetTexture(temTexture);

CFRelease(temTexture);

}

}

yuv420转NV12

void YUV420PtoNV12(unsigned char *Src, unsigned char* Dst,int Width,int Height){

unsigned char* SrcU = Src + Width * Height;

unsigned char* SrcV = SrcU + Width * Height / 4 ;

memcpy(Dst, Src, Width * Height);

unsigned char* DstU = Dst + Width * Height;

for(int i = 0 ; i < Width * Height / 4 ; i++ ){

( *DstU++) = ( *SrcU++);

( *DstU++) = ( *SrcV++);

}

}

渲染NV12

- (void)renderYUV420P:(EVFrame *)frame {

int w = frame.vsize.width;

int h = frame.vsize.height;

unsigned char *i420 = frame.data;

unsigned char *buffer = malloc(w*h*10);

YUV420PtoNV12(i420, buffer, w, h);

NSDictionary *pixelAttributes = @{(NSString*)kCVPixelBufferIOSurfacePropertiesKey:@{}};

CVPixelBufferRef pixelBuffer = NULL;

CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault, w, h, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (__bridge CFDictionaryRef)(pixelAttributes), &pixelBuffer);

CVPixelBufferLockBaseAddress(pixelBuffer,0);

void *yDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);

unsigned char *y_ch0 = buffer;

unsigned char *y_ch1 = buffer + w * h;

memcpy(yDestPlane, y_ch0, w * h);

void *uvDestPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);

// NV12

memcpy(uvDestPlane, y_ch1, w * h * 0.5);

CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

if (result != kCVReturnSuccess) {

NSLog(@"Unable to create cvpixelbuffer %d", result);

}

{

size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);

size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);

CVMetalTextureRef texture = NULL;

CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelBuffer, NULL, MTLPixelFormatR8Unorm, width, height, 0, &texture);

if (status == kCVReturnSuccess) {

mTextureY = CVMetalTextureGetTexture(texture);

CFRelease(texture);

}

}

// textureUV

{

size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);

size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);

CVMetalTextureRef texture = NULL;

CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelBuffer, NULL, MTLPixelFormatRG8Unorm, width, height, 1, &texture);

if (status == kCVReturnSuccess) {

mTextureUV = CVMetalTextureGetTexture(texture);

CFRelease(texture);

}

}

size_t width = CVPixelBufferGetWidth(pixelBuffer);

size_t heigth = CVPixelBufferGetHeight(pixelBuffer);

CVMetalTextureRef temTexture = nil;

CVReturn status = CVMetalTextureCacheCreateTextureFromImage(NULL, mTextureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, heigth, 0, &temTexture);

if (status == kCVReturnSuccess) {

mMtkview.drawableSize = CGSizeMake(width, heigth);

mTexture = CVMetalTextureGetTexture(temTexture);

CFRelease(temTexture);

}

CVPixelBufferRelease(pixelBuffer);

free(buffer);

buffer = NULL;

}

END

metal确实比OpenGL简单好用很多

查看原文