|
@@ -8,20 +8,20 @@
|
|
|
|
|
|
#import "BFAVParseHandler.h"
|
|
|
#import <UIKit/UIKit.h>
|
|
|
-#include "log4cplus.h"
|
|
|
+#include "BFMediaLog.h"
|
|
|
|
|
|
#pragma mark - Global Var
|
|
|
|
|
|
#define kModuleName "BFAVParseHandler"
|
|
|
|
|
|
-static const int kXDXParseSupportMaxFps = 60;
|
|
|
-static const int kXDXParseFpsOffSet = 5;
|
|
|
-static const int kXDXParseWidth1920 = 1920;
|
|
|
-static const int kXDXParseHeight1080 = 1080;
|
|
|
-static const int kXDXParseSupportMaxWidth = 3840;
|
|
|
-static const int kXDXParseSupportMaxHeight = 2160;
|
|
|
+static const int kBFParseSupportMaxFps = 60;
|
|
|
+static const int kBFParseFpsOffSet = 5;
|
|
|
+static const int kBFParseWidth1920 = 1920;
|
|
|
+static const int kBFParseHeight1080 = 1080;
|
|
|
+static const int kBFParseSupportMaxWidth = 3840;
|
|
|
+static const int kBFParseSupportMaxHeight = 2160;
|
|
|
|
|
|
-@interface XDXAVParseHandler ()
|
|
|
+@interface BFAVParseHandler ()
|
|
|
{
|
|
|
/* Flag */
|
|
|
BOOL m_isStopParse;
|
|
@@ -40,7 +40,7 @@ static const int kXDXParseSupportMaxHeight = 2160;
|
|
|
|
|
|
@end
|
|
|
|
|
|
-@implementation XDXAVParseHandler
|
|
|
+@implementation BFAVParseHandler
|
|
|
|
|
|
#pragma mark - C Function
|
|
|
static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
@@ -77,7 +77,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
}
|
|
|
|
|
|
#pragma mark - public methods
|
|
|
-- (void)startParseWithCompletionHandler:(void (^)(BOOL isVideoFrame, BOOL isFinish, struct XDXParseVideoDataInfo *videoInfo, struct XDXParseAudioDataInfo *audioInfo))handler {
|
|
|
+- (void)startParseWithCompletionHandler:(void (^)(BOOL isVideoFrame, BOOL isFinish, struct BFParseVideoDataInfo *videoInfo, struct BFParseAudioDataInfo *audioInfo))handler {
|
|
|
[self startParseWithFormatContext:m_formatContext
|
|
|
videoStreamIndex:m_videoStreamIndex
|
|
|
audioStreamIndex:m_audioStreamIndex
|
|
@@ -115,7 +115,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
m_formatContext = [self createFormatContextbyFilePath:path];
|
|
|
|
|
|
if (m_formatContext == NULL) {
|
|
|
- log4cplus_error(kModuleName, "%s: create format context failed.",__func__);
|
|
|
+ NSLog(@"create format context failed.");
|
|
|
return;
|
|
|
}
|
|
|
|
|
@@ -128,7 +128,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
m_video_width = videoStream->codecpar->width;
|
|
|
m_video_height = videoStream->codecpar->height;
|
|
|
m_video_fps = GetAVStreamFPSTimeBase(videoStream);
|
|
|
- log4cplus_info(kModuleName, "%s: video index:%d, width:%d, height:%d, fps:%d",__func__,m_videoStreamIndex,m_video_width,m_video_height,m_video_fps);
|
|
|
+ NSLog(@"video index:%d, width:%d, height:%d, fps:%d",m_videoStreamIndex,m_video_width,m_video_height,m_video_fps);
|
|
|
|
|
|
BOOL isSupport = [self isSupportVideoStream:videoStream
|
|
|
formatContext:m_formatContext
|
|
@@ -136,7 +136,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
sourceHeight:m_video_height
|
|
|
sourceFps:m_video_fps];
|
|
|
if (!isSupport) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support the video stream",__func__);
|
|
|
+ NSLog(@"Not support the video stream");
|
|
|
return;
|
|
|
}
|
|
|
|
|
@@ -150,14 +150,14 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
isSupport = [self isSupportAudioStream:audioStream
|
|
|
formatContext:m_formatContext];
|
|
|
if (!isSupport) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support the audio stream",__func__);
|
|
|
+ NSLog(@"Not support the audio stream");
|
|
|
return;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- (AVFormatContext *)createFormatContextbyFilePath:(NSString *)filePath {
|
|
|
if (filePath == nil) {
|
|
|
- log4cplus_error(kModuleName, "%s: file path is NULL",__func__);
|
|
|
+ NSLog(@"file path is NULL");
|
|
|
return NULL;
|
|
|
}
|
|
|
|
|
@@ -193,7 +193,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
}
|
|
|
|
|
|
if (avStreamIndex == -1) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not find video stream",__func__);
|
|
|
+ NSLog(@"Not find video stream");
|
|
|
return NULL;
|
|
|
}else {
|
|
|
return avStreamIndex;
|
|
@@ -203,10 +203,10 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
- (BOOL)isSupportVideoStream:(AVStream *)stream formatContext:(AVFormatContext *)formatContext sourceWidth:(int)sourceWidth sourceHeight:(int)sourceHeight sourceFps:(int)sourceFps {
|
|
|
if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { // Video
|
|
|
AVCodecID codecID = stream->codecpar->codec_id;
|
|
|
- log4cplus_info(kModuleName, "%s: Current video codec format is %s",__func__, avcodec_find_decoder(codecID)->name);
|
|
|
+ NSLog(@"Current video codec format is %s", avcodec_find_decoder(codecID)->name);
|
|
|
// 目前只支持H264、H265(HEVC iOS11)编码格式的视频文件
|
|
|
if ((codecID != AV_CODEC_ID_H264 && codecID != AV_CODEC_ID_HEVC) || (codecID == AV_CODEC_ID_HEVC && [[UIDevice currentDevice].systemVersion floatValue] < 11.0)) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not suuport the codec",__func__);
|
|
|
+ NSLog(@"Not suuport the codec");
|
|
|
return NO;
|
|
|
}
|
|
|
|
|
@@ -216,7 +216,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
if (tag != NULL) {
|
|
|
int rotate = [[NSString stringWithFormat:@"%s",tag->value] intValue];
|
|
|
if (rotate != 0 /* && >= iPhone 8P*/) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support rotate for device ",__func__);
|
|
|
+ NSLog(@"Not support rotate for device ");
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -243,32 +243,32 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
*/
|
|
|
|
|
|
// 目前最高支持到60FPS
|
|
|
- if (sourceFps > kXDXParseSupportMaxFps + kXDXParseFpsOffSet) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support the fps",__func__);
|
|
|
+ if (sourceFps > kBFParseSupportMaxFps + kBFParseFpsOffSet) {
|
|
|
+ NSLog(@"Not support the fps");
|
|
|
return NO;
|
|
|
}
|
|
|
|
|
|
// 目前最高支持到3840*2160
|
|
|
- if (sourceWidth > kXDXParseSupportMaxWidth || sourceHeight > kXDXParseSupportMaxHeight) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support the resolution",__func__);
|
|
|
+ if (sourceWidth > kBFParseSupportMaxWidth || sourceHeight > kBFParseSupportMaxHeight) {
|
|
|
+ NSLog(@"Not support the resolution");
|
|
|
return NO;
|
|
|
}
|
|
|
|
|
|
// 60FPS -> 1080P
|
|
|
- if (sourceFps > kXDXParseSupportMaxFps - kXDXParseFpsOffSet && (sourceWidth > kXDXParseWidth1920 || sourceHeight > kXDXParseHeight1080)) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support the fps and resolution",__func__);
|
|
|
+ if (sourceFps > kBFParseSupportMaxFps - kBFParseFpsOffSet && (sourceWidth > kBFParseWidth1920 || sourceHeight > kBFParseHeight1080)) {
|
|
|
+ NSLog(@"Not support the fps and resolution");
|
|
|
return NO;
|
|
|
}
|
|
|
|
|
|
// 30FPS -> 4K
|
|
|
- if (sourceFps > kXDXParseSupportMaxFps / 2 + kXDXParseFpsOffSet && (sourceWidth >= kXDXParseSupportMaxWidth || sourceHeight >= kXDXParseSupportMaxHeight)) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support the fps and resolution",__func__);
|
|
|
+ if (sourceFps > kBFParseSupportMaxFps / 2 + kBFParseFpsOffSet && (sourceWidth >= kBFParseSupportMaxWidth || sourceHeight >= kBFParseSupportMaxHeight)) {
|
|
|
+ NSLog(@"Not support the fps and resolution");
|
|
|
return NO;
|
|
|
}
|
|
|
|
|
|
// 6S
|
|
|
-// if ([[XDXAnywhereTool deviceModelName] isEqualToString:@"iPhone 6s"] && sourceFps > kXDXParseSupportMaxFps - kXDXParseFpsOffSet && (sourceWidth >= kXDXParseWidth1920 || sourceHeight >= kXDXParseHeight1080)) {
|
|
|
-// log4cplus_error(kModuleName, "%s: Not support the fps and resolution",__func__);
|
|
|
+// if ([[BFAnywhereTool deviceModelName] isEqualToString:@"iPhone 6s"] && sourceFps > kBFParseSupportMaxFps - kBFParseFpsOffSet && (sourceWidth >= kBFParseWidth1920 || sourceHeight >= kBFParseHeight1080)) {
|
|
|
+// NSLog(@"Not support the fps and resolution");
|
|
|
// return NO;
|
|
|
// }
|
|
|
return YES;
|
|
@@ -281,10 +281,10 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
- (BOOL)isSupportAudioStream:(AVStream *)stream formatContext:(AVFormatContext *)formatContext {
|
|
|
if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
|
|
|
AVCodecID codecID = stream->codecpar->codec_id;
|
|
|
- log4cplus_info(kModuleName, "%s: Current audio codec format is %s",__func__, avcodec_find_decoder(codecID)->name);
|
|
|
+ NSLog(@" Current audio codec format is %s", avcodec_find_decoder(codecID)->name);
|
|
|
// 本项目只支持AAC格式的音频
|
|
|
if (codecID != AV_CODEC_ID_AAC) {
|
|
|
- log4cplus_error(kModuleName, "%s: Only support AAC format for the demo.",__func__);
|
|
|
+ NSLog(@"Only support AAC format for the demo.");
|
|
|
return NO;
|
|
|
}
|
|
|
|
|
@@ -295,7 +295,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
}
|
|
|
|
|
|
#pragma mark Start Parse
|
|
|
-- (void)startParseWithFormatContext:(AVFormatContext *)formatContext videoStreamIndex:(int)videoStreamIndex audioStreamIndex:(int)audioStreamIndex completionHandler:(void (^)(BOOL isVideoFrame, BOOL isFinish, struct XDXParseVideoDataInfo *videoInfo, struct XDXParseAudioDataInfo *audioInfo))handler{
|
|
|
+- (void)startParseWithFormatContext:(AVFormatContext *)formatContext videoStreamIndex:(int)videoStreamIndex audioStreamIndex:(int)audioStreamIndex completionHandler:(void (^)(BOOL isVideoFrame, BOOL isFinish, struct BFParseVideoDataInfo *videoInfo, struct BFParseAudioDataInfo *audioInfo))handler{
|
|
|
m_isStopParse = NO;
|
|
|
|
|
|
dispatch_queue_t parseQueue = dispatch_queue_create("parse_queue", DISPATCH_QUEUE_SERIAL);
|
|
@@ -317,12 +317,12 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
int size = av_read_frame(formatContext, &packet);
|
|
|
if (size < 0 || packet.size < 0) {
|
|
|
handler(YES, YES, NULL, NULL);
|
|
|
- log4cplus_error(kModuleName, "%s: Parse finish",__func__);
|
|
|
+ NSLog(@"Parse finish");
|
|
|
break;
|
|
|
}
|
|
|
|
|
|
if (packet.stream_index == videoStreamIndex) {
|
|
|
- XDXParseVideoDataInfo videoInfo = {0};
|
|
|
+ BFParseVideoDataInfo videoInfo = {0};
|
|
|
|
|
|
// get the rotation angle of video
|
|
|
AVDictionaryEntry *tag = NULL;
|
|
@@ -346,7 +346,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
}
|
|
|
|
|
|
if (videoInfo.videoRotate != 0 /* && <= iPhone 8*/) {
|
|
|
- log4cplus_error(kModuleName, "%s: Not support the angle",__func__);
|
|
|
+ NSLog(@"Not support the angle");
|
|
|
break;
|
|
|
}
|
|
|
|
|
@@ -357,10 +357,10 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
static char filter_name[32];
|
|
|
if (formatContext->streams[videoStreamIndex]->codecpar->codec_id == AV_CODEC_ID_H264) {
|
|
|
strncpy(filter_name, "h264_mp4toannexb", 32);
|
|
|
- videoInfo.videoFormat = XDXH264EncodeFormat;
|
|
|
+ videoInfo.videoFormat = BFH264EncodeFormat;
|
|
|
} else if (formatContext->streams[videoStreamIndex]->codecpar->codec_id == AV_CODEC_ID_HEVC) {
|
|
|
strncpy(filter_name, "hevc_mp4toannexb", 32);
|
|
|
- videoInfo.videoFormat = XDXH265EncodeFormat;
|
|
|
+ videoInfo.videoFormat = BFH265EncodeFormat;
|
|
|
} else {
|
|
|
break;
|
|
|
}
|
|
@@ -381,7 +381,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
}
|
|
|
av_bitstream_filter_filter(self->m_bitFilterContext, formatContext->streams[videoStreamIndex]->codec, NULL, &new_packet.data, &new_packet.size, packet.data, packet.size, 0);
|
|
|
|
|
|
- //log4cplus_info(kModuleName, "%s: extra data : %s , size : %d",__func__,formatContext->streams[videoStreamIndex]->codec->extradata,formatContext->streams[videoStreamIndex]->codec->extradata_size);
|
|
|
+ //NSLog(@" extra data : %s , size : %d",formatContext->streams[videoStreamIndex]->codec->extradata,formatContext->streams[videoStreamIndex]->codec->extradata_size);
|
|
|
|
|
|
CMSampleTimingInfo timingInfo;
|
|
|
CMTime presentationTimeStamp = kCMTimeInvalid;
|
|
@@ -410,7 +410,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
}
|
|
|
|
|
|
if (packet.stream_index == audioStreamIndex) {
|
|
|
- XDXParseAudioDataInfo audioInfo = {0};
|
|
|
+ BFParseAudioDataInfo audioInfo = {0};
|
|
|
audioInfo.data = (uint8_t *)malloc(packet.size);
|
|
|
memcpy(audioInfo.data, packet.data, packet.size);
|
|
|
audioInfo.dataSize = packet.size;
|
|
@@ -447,7 +447,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
int size = av_read_frame(formatContext, &packet);
|
|
|
if (size < 0 || packet.size < 0) {
|
|
|
handler(YES, YES, packet);
|
|
|
- log4cplus_error(kModuleName, "%s: Parse finish",__func__);
|
|
|
+ NSLog(@"Parse finish");
|
|
|
break;
|
|
|
}
|
|
|
|
|
@@ -466,7 +466,7 @@ static int GetAVStreamFPSTimeBase(AVStream *st) {
|
|
|
|
|
|
|
|
|
- (void)freeAllResources {
|
|
|
- log4cplus_error(kModuleName, "%s: Free all resources !",__func__);
|
|
|
+ NSLog(@"Free all resources !");
|
|
|
if (m_formatContext) {
|
|
|
avformat_close_input(&m_formatContext);
|
|
|
m_formatContext = NULL;
|