AVCaptureVideoDataOutputSampleBufferDelegate
새로운 video frame 객체를 만들때 불린다.
(void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
CMSampleBufferRef -> CVImageBufferRef -> MLRImage
- (MLRImage *)convertToMLImage {
UIImageOrientation imageOrientation = [self imageOrientationFromSampleBuffer];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(self.sampleBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t padding = bytesPerRow - width * 4;
Byte *bytes = (Byte *)malloc(sizeof(Byte) * width * height * 4);
if (!bytes)
return nil;
CVPixelBufferLockBaseAddress(imageBuffer, 0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t offset = 0;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
size_t rgbPixelOffset = (i * width + j) * 4;
size_t rgbaPixelOffset = (i * width + j) * 4 + offset;
bytes[rgbPixelOffset + 0] = baseAddress[rgbaPixelOffset + 2];
bytes[rgbPixelOffset + 1] = baseAddress[rgbaPixelOffset + 1];
bytes[rgbPixelOffset + 2] = baseAddress[rgbaPixelOffset + 0];
bytes[rgbPixelOffset + 3] = baseAddress[rgbaPixelOffset + 3];
}
offset += padding;
}
NSMutableData *rgbaData = [NSMutableData dataWithBytesNoCopy:bytes length:width * height * 4];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
return [MLRImage createImageWithRawData:rgbaData rawImageSize:CGSizeMake(width, height) imageOrientation:imageOrientation];
}
반응형
'Platform > iOS' 카테고리의 다른 글
[WebView] WKWebView (0) | 2021.08.10 |
---|---|
[iOS] 앱의 생명주기와 앱의 구조 (0) | 2021.05.07 |
[iOS] FileSystem (0) | 2021.04.22 |
[UI] Points VS Pixel (0) | 2021.04.12 |
[Pods] pod 라이브로리 사용하기 (0) | 2021.03.16 |