RGB2YUV

RGB2YUV

处理音视频,对YUV RGB肯定不陌生,这里记录下经过OpenGL处理后的 RGB 格式的pixelBuffer 转成 YUV 格式导出视频。

DIY

自己处理矩阵的计算,YUV的存储格式,可以加深对YUV的理解, 类似的反过来处理,或者处理其他格式的YUV 422 444等,也是一样的。
所以还是有必要了解一下,DO IT YOURSELF.

重点是注意内存对齐,YUV420采样存储方式就好了,其他没什么复杂的。

BGRA
转换后
YUV

如果出现这种像素错位的情况,一般是内存对齐 导致的补位没有考虑到,参考下代码里面的 stride字段

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
CFDictionaryRef CreateCFDictionary(CFTypeRef* keys, CFTypeRef* values, size_t size) {
return CFDictionaryCreate(kCFAllocatorDefault,
keys,
values,
size,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
}

static void bt709_rgb2yuv8bit_TV(uint8_t R, uint8_t G, uint8_t B, uint8_t &Y, uint8_t &U, uint8_t &V)
{
Y = 0.183 * R + 0.614 * G + 0.062 * B + 16;
U = -0.101 * R - 0.339 * G + 0.439 * B + 128;
V = 0.439 * R - 0.399 * G - 0.040 * B + 128;
}

CVPixelBufferRef RGB2YCbCr8Bit(CVPixelBufferRef pixelBuffer)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer);
int w = (int) CVPixelBufferGetWidth(pixelBuffer);
int h = (int) CVPixelBufferGetHeight(pixelBuffer);
int stride = (int) CVPixelBufferGetBytesPerRow(pixelBuffer) / 4;

OSType pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;

CVPixelBufferRef pixelBufferCopy = NULL;
const size_t attributes_size = 1;
CFTypeRef keys[attributes_size] = {
kCVPixelBufferIOSurfacePropertiesKey,
};
CFDictionaryRef io_surface_value = CreateCFDictionary(nullptr, nullptr, 0);
CFTypeRef values[attributes_size] = {io_surface_value};

CFDictionaryRef attributes = CreateCFDictionary(keys, values, attributes_size);
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
w,
h,
pixelFormat,
attributes,
&pixelBufferCopy);
if (status != kCVReturnSuccess) {
std::cout << "YUVBufferCopyWithPixelBuffer :: failed" << std::endl;
return nullptr;
}
if (attributes) {
CFRelease(attributes);
attributes = nullptr;
}

CVPixelBufferLockBaseAddress(pixelBufferCopy, 0);

size_t y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferCopy, 0);
size_t uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferCopy, 1);

int plane_h1 = (int) CVPixelBufferGetHeightOfPlane(pixelBufferCopy, 0);
int plane_h2 = (int) CVPixelBufferGetHeightOfPlane(pixelBufferCopy, 1);

uint8_t *y = (uint8_t *) CVPixelBufferGetBaseAddressOfPlane(pixelBufferCopy, 0);
memset(y, 0x80, plane_h1 * y_stride);

uint8_t *uv = (uint8_t *) CVPixelBufferGetBaseAddressOfPlane(pixelBufferCopy, 1);
memset(uv, 0x80, plane_h2 * uv_stride);

int y_bufferSize = w * h;
int uv_bufferSize = w * h / 4;
uint8_t *y_planeData = (uint8_t *) malloc(y_bufferSize * sizeof(uint8_t));
uint8_t *u_planeData = (uint8_t *) malloc(uv_bufferSize * sizeof(uint8_t));
uint8_t *v_planeData = (uint8_t *) malloc(uv_bufferSize * sizeof(uint8_t));

int u_offset = 0;
int v_offset = 0;
uint8_t R, G, B;
uint8_t Y, U, V;

for (int i = 0; i < h; i ++) {
for (int j = 0; j < w; j ++) {
int offset = i * stride + j;
B = baseAddress[offset * 4];
G = baseAddress[offset * 4 + 1];
R = baseAddress[offset * 4 + 2];
bt709_rgb2yuv8bit_TV(R, G, B, Y, U, V);
y_planeData[i * w + j] = Y;
//隔行扫描 偶数行的偶数列取U 奇数行的偶数列取V
if (j % 2 == 0) {
(i % 2 == 0) ? u_planeData[u_offset++] = U : v_planeData[v_offset++] = V;
}
}
}

for (int i = 0; i < plane_h1; i ++) {
memcpy(y + i * y_stride, y_planeData + i * w, w);
if (i < plane_h2) {
for (int j = 0 ; j < w ; j+=2) {
//NV12 和 NV21 格式都属于 YUV420SP 类型。它也是先存储了 Y 分量,但接下来并不是再存储所有的 U 或者 V 分量,而是把 UV 分量交替连续存储。
//NV12 是 IOS 中有的模式,它的存储顺序是先存 Y 分量,再 UV 进行交替存储。
memcpy(uv + i * y_stride + j, u_planeData + i * w/2 + j/2, 1);
memcpy(uv + i * y_stride + j + 1, v_planeData + i * w/2 + j/2, 1);
}
}
}
free(y_planeData);
free(u_planeData);
free(v_planeData);

CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferUnlockBaseAddress(pixelBufferCopy, 0);
return pixelBufferCopy;
}

LibYUV

看过了上一种方式,LibYUV就更好理解了,这里主要通过pod 依赖下 LibYUV-ios, 就不自己编译了。

pod 'Libyuv','1703'

LibYUV 不能直接RGB转成NV12 ,需要通过I420过度下。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
CVPixelBufferLockBaseAddress(pixelBuffer, 0);

uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer);
size_t bgraStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer,0);

int w = (int) CVPixelBufferGetWidth(pixelBuffer);
int h = (int) CVPixelBufferGetHeight(pixelBuffer);

OSType pixelFormat = kCVPixelFormatType_420YpCbCr8Planar;

CVPixelBufferRef pixelBufferCopy = NULL;
const size_t attributes_size = 1;
CFTypeRef keys[attributes_size] = {
kCVPixelBufferIOSurfacePropertiesKey
};
CFDictionaryRef io_surface_value = vtc::CreateCFDictionary(nullptr, nullptr, 0);
CFTypeRef values[attributes_size] = {io_surface_value};

CFDictionaryRef attributes = vtc::CreateCFDictionary(keys, values, attributes_size);
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
w,
h,
pixelFormat,
attributes,
&pixelBufferCopy);
if (status != kCVReturnSuccess) {
std::cout << "YUVBufferCopyWithPixelBuffer :: failed" << std::endl;
return nullptr;
}
if (attributes) {
CFRelease(attributes);
attributes = nullptr;
}

CVPixelBufferLockBaseAddress(pixelBufferCopy, 0);

unsigned char* y = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBufferCopy,0);
unsigned char* u = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBufferCopy,1);
unsigned char* v = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBufferCopy,2);

int32_t width = (int32_t)CVPixelBufferGetWidth(pixelBufferCopy);
int32_t height = (int32_t)CVPixelBufferGetHeight(pixelBufferCopy);

size_t y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferCopy,0);
size_t u_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferCopy,1);
size_t v_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferCopy,2);

libyuv::ARGBToI420(baseAddress, (int)bgraStride, y, (int)y_stride, u, (int)u_stride, v, (int)v_stride, width, height);

CVPixelBufferRef pixelBufferNV12 = NULL;
const size_t size = 1;
CFTypeRef _keys[size] = {
kCVPixelBufferIOSurfacePropertiesKey
};
CFDictionaryRef _io_surface_value = vtc::CreateCFDictionary(nullptr, nullptr, 0);
CFTypeRef _values[size] = {_io_surface_value};

CFDictionaryRef _attributes = vtc::CreateCFDictionary(_keys, _values, size);
CVReturn _status = CVPixelBufferCreate(kCFAllocatorDefault,
w,
h,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
_attributes,
&pixelBufferNV12);
if (_status != kCVReturnSuccess) {
std::cout << "YUVBufferCopyWithPixelBuffer :: failed" << std::endl;
return nullptr;
}
if (_attributes) {
CFRelease(_attributes);
_attributes = nullptr;
}
CVPixelBufferLockBaseAddress(pixelBufferNV12, 0);

unsigned char* _y = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBufferNV12,0);
unsigned char* _uv = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBufferNV12,1);

size_t _y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferNV12, 0);
size_t _uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferNV12, 1);

int32_t _width = (int32_t)CVPixelBufferGetWidth(pixelBufferNV12);
int32_t _height = (int32_t)CVPixelBufferGetHeight(pixelBufferNV12);

libyuv::I420ToNV12(y, (int)y_stride, u, (int)u_stride, v, (int)v_stride, _y, (int)_y_stride, _uv, (int)_uv_stride, _width, _height);

CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferUnlockBaseAddress(pixelBufferCopy, 0);
CVPixelBufferUnlockBaseAddress(pixelBufferNV12, 0);

CVPixelBufferRelease(pixelBufferCopy);

RGB->NV21

更新下:
上次使用libyuv的时候 看漏了,其实有argb转nv21的

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer);
size_t bgraStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer,0);
int w = (int) CVPixelBufferGetWidth(pixelBuffer);
int h = (int) CVPixelBufferGetHeight(pixelBuffer);
CVPixelBufferRef pixelBufferNV12 = NULL;
const size_t attributes_size = 1;
CFTypeRef keys[attributes_size] = {
kCVPixelBufferIOSurfacePropertiesKey
};
CFDictionaryRef io_surface_value = vtc::CreateCFDictionary(nullptr, nullptr, 0);
CFTypeRef values[attributes_size] = {io_surface_value};
CFDictionaryRef attributes = vtc::CreateCFDictionary(keys, values, attributes_size);
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
w,
h,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
attributes,
&pixelBufferNV12);
if (status != kCVReturnSuccess) {
std::cout << "YUVBufferCopyWithPixelBuffer :: failed" << std::endl;
return nullptr;
}
if (attributes) {
CFRelease(attributes);
attributes = nullptr;
}
CVPixelBufferLockBaseAddress(pixelBufferNV12, 0);
unsigned char* y = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBufferNV12,0);
unsigned char* uv = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBufferNV12,1);
size_t y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferNV12, 0);
size_t uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBufferNV12, 1);
int32_t width = (int32_t)CVPixelBufferGetWidth(pixelBufferNV12);
int32_t height = (int32_t)CVPixelBufferGetHeight(pixelBufferNV12);

// ARGB->NV21
libyuv::ARGBToNV12(baseAddress, (int)bgraStride, y, (int)y_stride, uv, (int)uv_stride, width, height)

CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferUnlockBaseAddress(pixelBufferNV12, 0);

return pixelBufferNV12;