今天给大家分享下视频的编码,由于ios在8.0才支持硬编码,所以这里加入了软编码,openH264和X264均可,这里介绍的是X264。
硬编码由于耗用CPU极低,编码效率高所以优先选择,软编码(X264)占用CPU高,一般在4s上720P 20帧就编不动了吧,当然也有优点,6.X、7.X系统都可用。
首先是编码器的设计图:
H264Encoder作为基类首先是创建编码器接口,当8.0以上系统 选择硬编码 其他软编码
+ (id)create
{
if (Version_iOS_8) {
H264VideoToolboxEncoder *encoder = [[H264VideoToolboxEncoder alloc] init];
return encoder;
} else
{
H264EncoderImpl *encoder = [[H264EncoderImpl alloc] init];
return encoder;
}
return nil;
}
编码接口,首先检查采集数据的分辨率帧率有无变化,如有变化则去销毁原有的,重新创建编码器,若无则直接编码
- (int)checkEncoder:(struct VideoCapability*)capability
{
if((*capability)!=(*_usingParam))
{
memcpy(_usingParam,capability,sizeof(struct VideoCapability));
[self finiEncoder];
if (_pTmpOut)
{
free(_pTmpOut);
_pTmpOut = 0;
}
[self initEncoder];
if (!_pTmpCfg)
_pTmpCfg = (uint8_t*)malloc(100);
if (!_pTmpOut)
_pTmpOut = (uint8_t*)malloc(_usingParam->width * _usingParam->height * 2 + 100);
}
return 0;
}
- (int)encode:(NativeVideoFrame*)avframe Capability:(struct VideoCapability*)capability
{
NSAutoLock* autolock = [[NSAutoLock alloc] initWithLock:_lock];
UNUSED(autolock);
if (!_running)
return 0;
[self checkEncoder:capability];
return [self realEncode:avframe TimeStamp:[Utils now_ms]];
}
H264Encoder的派生类有H264Encoderlmpl(软编)和H264VideoToolboxEncoder(硬编),他们都实现了基类的三个protected方法。
顺带介绍下OC中protected方法的写法
@interface H264EncoderImpl (Protected)
- (BOOL)initEncoder;
- (void)finiEncoder;
- (int)realEncode:(NativeVideoFrame *)avFrame TimeStamp:(long)ts;
@end
先来介绍下H264Encoderlmpl
//
// H264EncoderImpl.m
// AVSession
//
// Created by whw on 2016/11/22.
// Copyright © 2016年 meixin. All rights reserved.
//
#import "H264EncoderImpl.h"
#import "x264.h"
#import "AVDefine.h"
#import "libavformat/avformat.h"
#import "Utils.h"
#import "libyuv.h"
#include "VideoDefines.h"
#include "VideoFrame.h"
typedef struct
{
x264_param_t * param;
x264_t *handle;
x264_picture_t * picture;
x264_nal_t *nal;
} Encoder;
@interface H264EncoderImpl ()
{
Encoder* _encoder;
}
@end
@interface H264EncoderImpl (Protected)
- (BOOL)initEncoder;
- (void)finiEncoder;
- (int)realEncode:(NativeVideoFrame *)avFrame TimeStamp:(long)ts;
@end
@implementation H264EncoderImpl
- (instancetype)init
{
self = [super init];
if (self) {
}
return self;
}
- (int)realEncode:(NativeVideoFrame *)raw TimeStamp:(long)ts
{
raw->native2i420();
int framesize = raw->width()*raw->height();
_encoder->picture->img.i_stride[kYPlane] = raw->stride(kYPlane);
_encoder->picture->img.i_stride[kUPlane] = raw->stride(kUPlane);
_encoder->picture->img.i_stride[kVPlane] = raw->stride(kVPlane);
_encoder->picture->img.i_stride[kNumOfPlanes] = 0;
memcpy(_encoder->picture->img.plane[kYPlane],raw->buffer(kYPlane), framesize);
memcpy(_encoder->picture->img.plane[kUPlane],raw->buffer(kUPlane), framesize>>2);
memcpy(_encoder->picture->img.plane[kVPlane],raw->buffer(kVPlane), framesize>>2);
_encoder->picture->img.plane[kNumOfPlanes] = 0;
_encoder->picture->img.i_csp = X264_CSP_I420;
return [self CompressBuffer:_encoder TS:ts];
}
- (BOOL)initEncoder
{
Encoder *en = (Encoder *) malloc(sizeof(Encoder));
en->param = (x264_param_t *) malloc(sizeof(x264_param_t));
en->picture = (x264_picture_t *) malloc(sizeof(x264_picture_t));
x264_param_default_preset(en->param, "superfast" , "zerolatency");
en->param->b_sliced_threads = 0;
en->param->i_threads = 1;
en->param->rc.i_rc_method = X264_RC_ABR;
int realBitrate = [Utils calcBiteRate:_usingParam->width heght:_usingParam->height fps:_usingParam->fps];
realBitrate = realBitrate>>10;
en->param->rc.i_vbv_max_bitrate= 2 * realBitrate;
en->param->rc.i_bitrate = realBitrate;
en->param->rc.i_vbv_buffer_size = 2 * realBitrate;
en->param->i_fps_num = _usingParam->fps;
en->param->i_fps_den = 1;
en->param->i_keyint_min = _usingParam->fps * 2;
en->param->i_keyint_max = _usingParam->fps * 2;
en->param->i_timebase_num = 1;
en->param->i_timebase_den = 1000;
x264_param_apply_profile(en->param,"baseline");
en->param->i_csp = X264_CSP_I420;
en->param->i_log_level = X264_LOG_NONE;
en->param->i_width = _usingParam->width; //set frame width
en->param->i_height = _usingParam->height; //set frame height
if ((en->handle = x264_encoder_open(en->param)) == 0) {
//tyy
free(en->param);
free(en->picture);
free(en);
return NO;
}
/* Create a new pic */
x264_picture_alloc(en->picture, X264_CSP_I420, en->param->i_width, en->param->i_height);
_encoder = en;
return YES;
}
- (void)finiEncoder
{
if(!_encoder)
return;
if(_encoder->picture)
{
x264_picture_clean(_encoder->picture);
free(_encoder->picture);
_encoder->picture = 0;
}
if(_encoder->param)
{
free(_encoder->param);
_encoder->param=0;
}
if(_encoder->handle)
{
x264_encoder_close(_encoder->handle);
}
free(_encoder);
_cfgLen = 0;
}
- (int)CompressBuffer:(Encoder *)en TS:(long)ts
{
x264_picture_t pic_out;
int nNal=-1;
int result=0;
int i=0;
en->picture->i_type = 0;
en->picture->i_pts = ts;
//LOGI("******************before encode");
int ret = x264_encoder_encode( en->handle, &(en->nal), &nNal, en->picture ,&pic_out);
if( ret < 0 )
{
AVLogWarn(@"******************encode failed");
return -1;
}
if(!nNal)
{
return 0;
}
if(!_pTmpOut || !_pTmpCfg)
return -1;
unsigned char* pTmp = _pTmpOut;
for (i = 0; i < nNal; i++)
{
if(en->nal[i].i_type == 6)
continue;
if(pic_out.b_keyframe)
{
if(en->nal[i].i_type == 5 )
{
int32_t length = ntohl(en->nal[i].i_payload - 3);
memcpy(pTmp, &length, 4);
memcpy(pTmp + 4, en->nal[i].p_payload + 3, en->nal[i].i_payload - 3);
pTmp += en->nal[i].i_payload + 1;
result += en->nal[i].i_payload + 1;
}
else if(en->nal[i].i_type == 7)
{
// SPS
pTmp = _pTmpCfg;
uint8_t* pData = en->nal[i].p_payload + 4;
pTmp[0] = 0x17;
pTmp[1] = 0x00;
pTmp[2] = 0x00;
pTmp[3] = 0x00;
pTmp[4] = 0x00;
pTmp[5] =0x01;
pTmp[6] =pData[1];
pTmp[7] =pData[2];
pTmp[8] =pData[3];
pTmp[9] =0xff;
pTmp[10]=0xe1;
short length = ntohs(en->nal[i].i_payload - 4);
memcpy(pTmp + 11, &length, 2);
memcpy(pTmp + 13, pData, en->nal[i].i_payload - 4);
result = 9 + en->nal[i].i_payload;
pTmp += result;
}
else if(en->nal[i].i_type == 8)
{
// PPS
pTmp[0] = 0x1;
short length = ntohs(en->nal[i].i_payload - 4);
memcpy(pTmp + 1, &length, 2);
memcpy(pTmp + 3, en->nal[i].p_payload + 4, en->nal[i].i_payload - 4);
result += en->nal[i].i_payload - 1;
_cfgLen = result;
// key frame
pTmp = _pTmpOut;
pTmp[0] = 0x17;
pTmp[1] = 0x01;
pTmp[2] = 0x00;
pTmp[3] = 0x00;
pTmp[4] = 0x00;
pTmp += 5;
result = 5;
}
}
else
{
// p frame
if(result == 0)
{
pTmp = _pTmpOut;
pTmp[0] = 0x27;
pTmp[1] = 0x01;
pTmp[2] = 0x00;
pTmp[3] = 0x00;
pTmp[4] = 0x00;
pTmp += 5;
result = 5;
memcpy(pTmp, en->nal[i].p_payload, en->nal[i].i_payload);
int32_t length = ntohl(en->nal[i].i_payload - 4);
memcpy(pTmp, &length, 4);
pTmp += en->nal[i].i_payload;
result += en->nal[i].i_payload;
}
else
{
int32_t length = ntohl(en->nal[i].i_payload - 3);
memcpy(pTmp, &length, 4);
memcpy(pTmp + 4, en->nal[i].p_payload + 3, en->nal[i].i_payload - 3);
pTmp += en->nal[i].i_payload + 1;
result += en->nal[i].i_payload + 1;
}
}
}
if (!result)
{
return 0;
}
unsigned int outts = (unsigned int)(pic_out.i_dts != 0 ? pic_out.i_dts : ts);
@autoreleasepool
{
if(pic_out.b_keyframe && _cfgLen > 0)
{
if (_delegate)
{
[_delegate encoded:_pTmpCfg length:_cfgLen timestamp:outts];
[_delegate encoded:_pTmpOut length:result timestamp:outts];
}
}
else if(!pic_out.b_keyframe)
{
if (_delegate)
{
[_delegate encoded:_pTmpOut length:result timestamp:outts];
}
}
}
return result;
}
@end
在来看看硬编码的实现
//
// H264VideoToolboxEncoder.m
// AVSession
//
// Created by whw on 2016/11/22.
// Copyright © 2016年 meixin. All rights reserved.
//
#import "H264VideoToolboxEncoder.h"
#import <UIKit/UIKit.h>
#import "Utils.h"
#import "libyuv.h"
#include "VideoDefines.h"
#include "VideoFrame.h"
const float kLimitToAverageBitRateFactor = 1.5f; // 2.0f
static OSType KVideoPixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
CVPixelBufferRef createCVPixelBuffer(NativeVideoFrame* buffer)
{
CFDictionaryRef pixelBufferAttributes = CreateCFDictionary(nil,nil,0);
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(NULL, buffer->width(), buffer->height(), KVideoPixelFormatType, pixelBufferAttributes, &pixelBuffer);
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
uint8_t* dst_y = reinterpret_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
int dst_stride_y = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
uint8_t* dst_uv = reinterpret_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
int dst_stride_uv = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
// Convert I420 to NV12.
libyuv::I420ToNV12(buffer->buffer(kYPlane), buffer->stride(kYPlane),
buffer->buffer(kUPlane), buffer->stride(kUPlane),
buffer->buffer(kVPlane), buffer->stride(kVPlane),
dst_y, dst_stride_y, dst_uv, dst_stride_uv,
buffer->width(), buffer->height());
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CFRelease(pixelBufferAttributes);
return pixelBuffer;
}
void SetVTSessionProperty(VTSessionRef session, CFStringRef key, int32_t value)
{
CFNumberRef cfNum = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &value);
OSStatus status = VTSessionSetProperty(session, key, cfNum);
CFRelease(cfNum);
if (status != noErr) {
NSString *key_string = (__bridge NSString *)(key);
AVLogError(@"VTSessionSetProperty failed to set:%@ to %d : %d", key_string, value, status);
}
}
// Convenience function for setting a VT property. uint32_t
void SetVTSessionProperty(VTSessionRef session, CFStringRef key, uint32_t value)
{
int64_t value_64 = value;
CFNumberRef cfNum = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &value_64);
OSStatus status = VTSessionSetProperty(session, key, cfNum);
CFRelease(cfNum);
if (status != noErr) {
NSString *key_string = (__bridge NSString *)(key);
AVLogError(@"VTSessionSetProperty failed to set:%@ to %d : %d", key_string, (int)value, (int)status);
}
}
// Convenience function for setting a VT property. bool
void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value)
{
CFBooleanRef cf_bool = (value) ? kCFBooleanTrue : kCFBooleanFalse;
OSStatus status = VTSessionSetProperty(session, key, cf_bool);
if (status != noErr) {
NSString *key_string = (__bridge NSString *)(key);
AVLogError(@"VTSessionSetProperty failed to set:%@ to %d : %d", key_string, (int)value, (int)status);
}
}
// Convenience function for setting a VT property. CFStringRef
void SetVTSessionProperty(VTSessionRef session, CFStringRef key, CFStringRef value)
{
OSStatus status = VTSessionSetProperty(session, key, value);
if (status != noErr) {
NSString *key_string = (__bridge NSString *)(key);
NSString *value_string = (__bridge NSString *)(key);
AVLogError(@"VTSessionSetProperty failed to set:%@ to %@ : %d", key_string, value_string, (int)status);
}
}
// This is the callback function that VideoToolbox calls when encode is complete.
static void encodeOutputCallback(void *encoder, void *params, OSStatus status, VTEncodeInfoFlags infoFlags,
CMSampleBufferRef sampleBuffer )
{
H264VideoToolboxEncoder *encoderSession = (__bridge H264VideoToolboxEncoder*)encoder;
[encoderSession encoded:sampleBuffer status:status flags:infoFlags];
}
@interface H264VideoToolboxEncoder()
{
VTCompressionSessionRef _encodeSession;
}
@end
@interface H264VideoToolboxEncoder (Protected)
- (BOOL)initEncoder;
- (void)finiEncoder;
- (int)realEncode:(NativeVideoFrame *)avFrame TimeStamp:(long)ts;
@end
@implementation H264VideoToolboxEncoder
- (instancetype)init
{
self = [super init];
if (self) {
}
return self;
}
- (int)realEncode:(NativeVideoFrame *)raw TimeStamp:(long)ts
{
if (!_encodeSession || !&encodeOutputCallback) {
return -1;
}
if (!([UIApplication sharedApplication].applicationState == UIApplicationStateActive)) {
// Ignore all encode requests when app isn't active. In this state, the
// hardware encoder has been invalidated by the OS.
return -1;
}
// Get a pixel buffer from the pool and copy frame data over.
CVPixelBufferPoolRef pixel_buffer_pool = VTCompressionSessionGetPixelBufferPool(_encodeSession);
if (!pixel_buffer_pool) {
[self finiEncoder];
[self initEncoder];
pixel_buffer_pool = VTCompressionSessionGetPixelBufferPool(_encodeSession);
AVLogWarn(@"Resetting compression session due to invalid pool.");
}
CVPixelBufferRef pixel_buffer = static_cast<CVPixelBufferRef>(raw->native_handle());
if (pixel_buffer) {
// This pixel buffer might have a higher resolution than what the
// compression session is configured to. The compression session can handle
// that and will output encoded frames in the configured resolution
// regardless of the input pixel buffer resolution.
CVBufferRetain(pixel_buffer);
pixel_buffer_pool = nullptr;
} else {
pixel_buffer = createCVPixelBuffer(raw);
}
CMTime presentation_time_stamp = CMTimeMake(ts, 1000);
CFDictionaryRef frame_properties = NULL;
VTEncodeInfoFlags flags;
OSStatus status = VTCompressionSessionEncodeFrame(_encodeSession, pixel_buffer, presentation_time_stamp, kCMTimeInvalid, frame_properties, NULL, &flags);
if (frame_properties) {
CFRelease(frame_properties);
}
if (pixel_buffer) {
CVBufferRelease(pixel_buffer);
}
if (status != noErr) {
AVLogError(@"Failed to encode frame with code: %d", (int)status);
return -1;
}
return 0;
}
- (BOOL)initEncoder
{
OSStatus status= -1;
const size_t attributes_size = 3;
CFTypeRef keys[attributes_size] = {
kCVPixelBufferOpenGLESCompatibilityKey,
kCVPixelBufferIOSurfacePropertiesKey,
kCVPixelBufferPixelFormatTypeKey
};
CFDictionaryRef io_surface_value = CreateCFDictionary(nil,nil,0);
int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
CFNumberRef pixel_format = CFNumberCreate(nil, kCFNumberLongType, &nv12type);
CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value, pixel_format};
CFDictionaryRef source_attributes = CreateCFDictionary(keys,values,attributes_size);
if (io_surface_value) {
CFRelease(io_surface_value);
io_surface_value = nil;
}
if (pixel_format) {
CFRelease(pixel_format);
pixel_format = nil;
}
status = VTCompressionSessionCreate(kCFAllocatorDefault,
_usingParam->width,
_usingParam->height,
kCMVideoCodecType_H264,
NULL,
source_attributes,
kCFAllocatorDefault,
encodeOutputCallback,
(__bridge void *)self,
&(_encodeSession));
if (source_attributes) {
CFRelease(source_attributes);
source_attributes = nil;
}
if (status != noErr) {
AVLogError(@"VTCompressionSessionCreate failed. ret = %d", (int)status);
return -1;
}
[self configureCompressionSession:_usingParam];
VTCompressionSessionPrepareToEncodeFrames(_encodeSession);
return status;
}
- (void)configureCompressionSession:(struct VideoCapability *)param
{
if (_encodeSession) {
SetVTSessionProperty(_encodeSession,
kVTCompressionPropertyKey_RealTime,
true);// 设置实时编码输出,降低编码延迟
// h264 profile, 直播一般使用baseline,可减少由于b帧带来的延时
SetVTSessionProperty(_encodeSession,
kVTCompressionPropertyKey_ProfileLevel,
kVTProfileLevel_H264_Baseline_AutoLevel);//kVTProfileLevel_H264_Baseline_4_1
SetVTSessionProperty(_encodeSession,
kVTCompressionPropertyKey_AllowFrameReordering,
false);
int realBitrate = [Utils calcBiteRate:_usingParam->width heght:_usingParam->height fps:_usingParam->fps];
//realBitrate = realBitrate>>10;
[self SetEncoderBitrateBps:realBitrate];
// 设置关键帧间隔,即gop size
SetVTSessionProperty(_encodeSession,
kVTCompressionPropertyKey_MaxKeyFrameInterval,
param->fps * 3);// param->fps * 2 param->fps * 3
SetVTSessionProperty(_encodeSession,
kVTCompressionPropertyKey_ExpectedFrameRate,
param->fps);
}
}
- (void)SetEncoderBitrateBps:(uint32_t)bps
{
if (_encodeSession) {
// 设置编码码率(比特率),如果不设置,默认将会以很低的码率编码,导致编码出来的视频很模糊
SetVTSessionProperty(_encodeSession,
kVTCompressionPropertyKey_AverageBitRate,
bps);
// TODO(tkchin): Add a helper method to set array value.
int64_t data_limit_bytes_per_second_value = (int64_t)(bps * kLimitToAverageBitRateFactor / 8);
CFNumberRef bytes_per_second = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,
&data_limit_bytes_per_second_value);
int64_t one_second_value = 1;
CFNumberRef one_second = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,
&one_second_value);
const void* nums[2] = {bytes_per_second, one_second};
CFArrayRef data_rate_limits = CFArrayCreate(NULL, nums, 2, &kCFTypeArrayCallBacks);
OSStatus status = VTSessionSetProperty(_encodeSession,
kVTCompressionPropertyKey_DataRateLimits,
data_rate_limits);
if (bytes_per_second) {
CFRelease(bytes_per_second);
}
if (one_second) {
CFRelease(one_second);
}
if (data_rate_limits) {
CFRelease(data_rate_limits);
}
if (status != noErr) {
AVLogError(@"Failed to set data rate limit: %d", (int)status);
}
}
}
- (void)finiEncoder
{
if (_encodeSession) {
VTCompressionSessionInvalidate(_encodeSession);
CFRelease(_encodeSession);
_encodeSession = nil;
}
}
-(void)encoded:(CMSampleBufferRef)sampleBuffer status:(OSStatus)status flags:(VTEncodeInfoFlags)infoFlags
{
if (status != noErr) {
AVLogError(@"H264 encode failed.");
return;
}
// Convert the sample buffer into a buffer suitable for RTP packetization.
// TODO(tkchin): Allocate buffers through a pool.
if (!CMSampleBufferDataIsReady(sampleBuffer))
{
AVLogWarn(@"encodeOutputCallback data is not ready status:%d infoFlags:%d", status, infoFlags);
return;
}
int result=0;
unsigned char* pTmp = _pTmpOut;
// Check if we have got a key frame first
CFDictionaryRef theDic = (CFDictionaryRef)CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0);
BOOL keyframe = !CFDictionaryContainsKey(theDic, kCMSampleAttachmentKey_NotSync);
if (keyframe) {
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
size_t sparameterSetSize, sparameterSetCount;
const uint8_t *sparameterSet;
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 );
if (statusCode == noErr)
{
pTmp = _pTmpCfg;
uint8_t* pData = (uint8_t *)sparameterSet;
pTmp[0] = 0x17;
pTmp[1] = 0x00;
pTmp[2] = 0x00;
pTmp[3] = 0x00;
pTmp[4] = 0x00;
pTmp[5] =0x01;
pTmp[6] =pData[1];
pTmp[7] =pData[2];
pTmp[8] =pData[3];
pTmp[9] =0xff;
pTmp[10]=0xe1;
short length = ntohs(sparameterSetSize);
memcpy(pTmp + 11, &length, 2);
memcpy(pTmp + 13, pData, sparameterSetSize);
result = 13 + (int)sparameterSetSize;
pTmp += result;
// Found sps and now check for pps
size_t pparameterSetSize, pparameterSetCount;
const uint8_t *pparameterSet;
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0 );
if (statusCode == noErr)
{
pTmp[0] = 0x1;
short length = ntohs(pparameterSetSize);
memcpy(pTmp + 1, &length, 2);
memcpy(pTmp + 3, pparameterSet, pparameterSetSize);
result += pparameterSetSize +3;
_cfgLen = result;
pTmp = _pTmpOut;
pTmp[0] = 0x17;
pTmp[1] = 0x01;
pTmp[2] = 0x00;
pTmp[3] = 0x00;
pTmp[4] = 0x00;
pTmp += 5;
result = 5;
}
}
}
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char *dataPointer;
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
if (statusCodeRet == noErr) {
size_t bufferOffset = 0;
static const int AVCCHeaderLength = 4;
while (bufferOffset < totalLength - AVCCHeaderLength) {
// Read the NAL unit length
uint32_t ipayload = 0;
memcpy(&ipayload, dataPointer + bufferOffset, AVCCHeaderLength);
// Convert the length value from Big-endian to Little-endian
ipayload = CFSwapInt32BigToHost(ipayload);
int len = ntohl(ipayload);
int type = dataPointer[bufferOffset + AVCCHeaderLength]&0x1f;
if (type != 6)
{
if (type == 5)
{
memcpy(pTmp, &len, 4);
memcpy(pTmp + 4, dataPointer + bufferOffset + AVCCHeaderLength, ipayload);
pTmp += ipayload + 4;
result += ipayload + 4;
}
else
{
if (result == 0)
{
pTmp = _pTmpOut;
pTmp[0] = 0x27;
pTmp[1] = 0x01;
pTmp[2] = 0x00;
pTmp[3] = 0x00;
pTmp[4] = 0x00;
pTmp += 5;
result = 5;
}
memcpy(pTmp, &len, 4);
memcpy(pTmp+4, dataPointer + bufferOffset + AVCCHeaderLength, ipayload);
pTmp += ipayload+4;
result += ipayload+4;
}
}
// Move to the next NAL unit in the block buffer
bufferOffset += AVCCHeaderLength + ipayload;
}
@autoreleasepool
{
if(keyframe && _cfgLen > 0)
{
if (_delegate)
{
[_delegate encoded:_pTmpCfg length:_cfgLen timestamp:0];
[_delegate encoded:_pTmpOut length:result timestamp:0];
}
}
else if(!keyframe)
{
if (_delegate)
{
[_delegate encoded:_pTmpOut length:result timestamp:0];
}
}
}
}
}
@end
这里对编码完的数据进行rtmp格式封包时写的不是很好,没有进行封装,以后有时间在写个H264RtmpPacket。
转载请注明原地址, 谢谢!
源码地址: https://github.com/haowei8196/VideoEngineMgr