增加换肤功能

This commit is contained in:
启星
2025-08-14 10:07:49 +08:00
parent f6964c1e89
commit 4f9318d98e
8789 changed files with 978530 additions and 2 deletions

1259
Pods/QGVAPlayer/LICENSE.txt generated Normal file

File diff suppressed because it is too large Load Diff

76
Pods/QGVAPlayer/README.md generated Normal file
View File

@@ -0,0 +1,76 @@
# VAP
[![License](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](http://opensource.org/licenses/MIT)
简体中文 | [English](./README_en.md)
VAPVideo Animation Player是企鹅电竞开发用于播放酷炫动画的实现方案。
* 相比Webp, Apng动图方案具有高压缩率(素材更小)、硬件解码(解码更快)的优点
* 相比Lottie能实现更复杂的动画效果(比如粒子特效)
项目详细介绍请参考 [Introduction.md](./Introduction.md)
特效展示:
[展示主页](https://egame.qq.com/vap)
![](./images/anim1.gif)
而且VAP还能在动画中融入自定义的属性比如用户名称, 头像)
![](./images/anim2.gif)
## 性能简述
-|文件大小|解码方式|特效支持
---|---|---|---
Lottie|无法导出|软解|无粒子特效
GIF|4.6M|软解|只支持8位色彩
Apng|10.6M|软解|全支持
Webp|9.2M|软解|全支持
mp4|1.5M|硬解|无透明背景
VAP|***1.5M***|***硬解***|***全支持***
实验参数参考 [Introduction.md](./Introduction.md)
## 平台支持
支持:[Android](./Android), [iOS](./iOS), [web](./web). 接入说明在对应平台目录中
素材制作工具:[VapTool](./tool) (工具使用说明在tool目录下)
播放预览工具:[Mac](https://github.com/Tencent/vap/releases/download/VapPreview1.2.0/vap-player_mac_1.2.0.zip), [Windows](https://github.com/Tencent/vap/releases/download/VapPreview1.2.0/vap-player_1.2.0.exe)
## QQ交流群
遇到任何问题或者有好的建议欢迎提issues或者加入QQ群交流
VAP交流群719738292
## FAQ
[常见问题解答](https://github.com/Tencent/vap/wiki/FAQ)
## 已接入APP
![VAP接入APP](https://user-images.githubusercontent.com/3285051/195480843-1b9a3be4-8b74-4754-95b8-67638776f036.png)
需要显示自己App图标可以加入QQ群群公告里有App图标加入说明
## License
VAP is under the MIT license. See the [LICENSE](./LICENSE.txt) file for details.

View File

@@ -0,0 +1,33 @@
// QGBaseDecoder.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "QGAnimatedImageDecodeThread.h"
#import "QGBaseDFileInfo.h"
extern NSString* kQGVAPDecoderSeekStart;
extern NSString* kQGVAPDecoderSeekFinish;
@interface QGBaseDecoder : NSObject
@property (atomic, assign) NSInteger currentDecodeFrame; //正在解码的帧索引
@property (nonatomic, readonly) QGBaseDFileInfo *fileInfo; //解码文件信息 只能通过初始化方法赋值
- (instancetype)initWith:(QGBaseDFileInfo *)fileInfo error:(NSError **)error;
- (void)decodeFrame:(NSInteger)frameIndex buffers:(NSMutableArray *)buffers;
- (BOOL)shouldStopDecode:(NSInteger)nextFrameIndex;
- (BOOL)isFrameIndexBeyondEnd:(NSInteger)frameIndex;
@end

View File

@@ -0,0 +1,73 @@
// QGBaseDecoder.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseDecoder.h"
#import "QGAnimatedImageDecodeThreadPool.h"
NSString* kQGVAPDecoderSeekStart = @"kQGVAPDecoderSeekStart";
NSString* kQGVAPDecoderSeekFinish = @"kQGVAPDecoderSeekFinish";
@interface QGBaseDecoder() {
QGBaseDFileInfo *_fileInfo;
}
@end
@implementation QGBaseDecoder
- (instancetype)initWith:(QGBaseDFileInfo *)fileInfo error:(NSError **)error {
if (self = [super init]) {
_currentDecodeFrame = -1;
_fileInfo = fileInfo;
_fileInfo.occupiedCount ++;
}
return self;
}
- (QGBaseDFileInfo *)fileInfo {
return _fileInfo;
}
/**
decodeframeYES
@param nextFrameIndex
@return
*/
- (BOOL)shouldStopDecode:(NSInteger)nextFrameIndex {
// No implementation here. Meant to be overriden in subclass.
return NO;
}
- (BOOL)isFrameIndexBeyondEnd:(NSInteger)frameIndex {
return NO;
}
/**
线
@param frameIndex
@param buffers
*/
- (void)decodeFrame:(NSInteger)frameIndex buffers:(NSMutableArray *)buffers {
// No implementation here. Meant to be overriden in subclass.
}
@end

View File

@@ -0,0 +1,42 @@
// QGMP4FrameHWDecoder.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseDecoder.h"
#import "QGMP4HWDFileInfo.h"
#import <UIKit/UIKit.h>
/* 数字跳动的动画类型*/
typedef NS_ENUM(NSInteger, QGMP4HWDErrorCode){
QGMP4HWDErrorCode_FileNotExist = 10000, // 文件不存在
QGMP4HWDErrorCode_InvalidMP4File = 10001, // 非法的mp4文件
QGMP4HWDErrorCode_CanNotGetStreamInfo = 10002, // 无法获取视频流信息
QGMP4HWDErrorCode_CanNotGetStream = 10003, // 无法获取视频流
QGMP4HWDErrorCode_ErrorCreateVTBDesc = 10004, // 创建desc失败
QGMP4HWDErrorCode_ErrorCreateVTBSession = 10005, // 创建session失败
};
@interface UIDevice (HWD)
- (BOOL)hwd_isSimulator;
@end
@interface QGMP4FrameHWDecoder : QGBaseDecoder
+ (NSString *)errorDescriptionForCode:(QGMP4HWDErrorCode)errorCode;
@end

View File

@@ -0,0 +1,574 @@
// QGMP4FrameHWDecoder.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGMP4FrameHWDecoder.h"
#import "QGVAPWeakProxy.h"
#import "QGMP4AnimatedImageFrame.h"
#import "QGBaseAnimatedImageFrame+Displaying.h"
#import <VideoToolbox/VideoToolbox.h>
#import "QGHWDMP4OpenGLView.h"
#import "QGMP4Parser.h"
#import "QGVAPSafeMutableArray.h"
#import "NSNotificationCenter+VAPThreadSafe.h"
#include <sys/sysctl.h>
#import <AVFoundation/AVFoundation.h>
@implementation UIDevice (HWD)
- (BOOL)hwd_isSimulator {
static dispatch_once_t token;
static BOOL isSimulator = NO;
dispatch_once(&token, ^{
NSString *model = [self machineName];
if ([model isEqualToString:@"x86_64"] || [model isEqualToString:@"i386"]) {
isSimulator = YES;
}
});
return isSimulator;
}
- (NSString *)machineName {
static dispatch_once_t token;
static NSString *name;
dispatch_once(&token, ^{
size_t size;
sysctlbyname("hw.machine", NULL, &size, NULL, 0);
char *machineName = malloc(size);
sysctlbyname("hw.machine", machineName, &size, NULL, 0);
name = [NSString stringWithUTF8String:machineName];
free(machineName);
});
return name;
}
@end
@interface NSArray (SafeOperation)
@end
@implementation NSArray (SafeOperation)
- (id)safeObjectAtIndex:(NSUInteger)index
{
if (index >= self.count) {
NSAssert(0, @"Error: access to array index which is beyond bounds! ");
return nil;
}
return self[index];
}
@end
@interface QGMP4FrameHWDecoder() {
NSMutableArray *_buffers;
int _videoStream;
int _outputWidth, _outputHeight;
OSStatus _status;
BOOL _isFinish;
VTDecompressionSessionRef _mDecodeSession;
CMFormatDescriptionRef _mFormatDescription;
NSInteger _finishFrameIndex;
NSError *_constructErr;
QGMP4ParserProxy *_mp4Parser;
int _invalidRetryCount;
}
@property (atomic, strong) dispatch_queue_t decodeQueue; //dispatch decode task
@property (nonatomic, strong) NSData *ppsData; //Picture Parameter Set
@property (nonatomic, strong) NSData *spsData; //Sequence Parameter Set
/** Video Parameter Set */
@property (nonatomic, strong) NSData *vpsData;
@property (atomic, assign) NSInteger lastDecodeFrame;
@end
NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
@implementation QGMP4FrameHWDecoder
+ (NSString *)errorDescriptionForCode:(QGMP4HWDErrorCode)errorCode {
NSArray *errorDescs = @[@"文件不存在",@"非法文件格式",@"无法获取视频流信息",@"无法获取视频流",@"VTB创建desc失败",@"VTB创建session失败"];
NSString *desc = @"";
switch (errorCode) {
case QGMP4HWDErrorCode_FileNotExist:
desc = [errorDescs safeObjectAtIndex:0];
break;
case QGMP4HWDErrorCode_InvalidMP4File:
desc = [errorDescs safeObjectAtIndex:1];
break;
case QGMP4HWDErrorCode_CanNotGetStreamInfo:
desc = [errorDescs safeObjectAtIndex:2];
break;
case QGMP4HWDErrorCode_CanNotGetStream:
desc = [errorDescs safeObjectAtIndex:3];
break;
case QGMP4HWDErrorCode_ErrorCreateVTBDesc:
desc = [errorDescs safeObjectAtIndex:4];
break;
case QGMP4HWDErrorCode_ErrorCreateVTBSession:
desc = [errorDescs safeObjectAtIndex:5];
break;
default:
break;
}
return desc;
}
- (instancetype)initWith:(QGMP4HWDFileInfo *)fileInfo error:(NSError *__autoreleasing *)error{
if (self = [super initWith:fileInfo error:error]) {
_decodeQueue = dispatch_queue_create("com.qgame.vap.decode", DISPATCH_QUEUE_SERIAL);
_lastDecodeFrame = -1;
_mp4Parser = fileInfo.mp4Parser;
BOOL isOpenSuccess = [self onInputStart];
if (!isOpenSuccess) {
VAP_Event(kQGVAPModuleCommon, @"onInputStart fail!");
*error = _constructErr;
self = nil;
return nil;
}
[self registerNotification];
}
return self;
}
- (void)registerNotification {
}
- (void)hwd_didReceiveEnterBackgroundNotification:(NSNotification *)notification {
}
- (void)decodeFrame:(NSInteger)frameIndex buffers:(NSMutableArray *)buffers {
if (frameIndex == self.currentDecodeFrame) {
VAP_Event(kQGVAPModuleCommon, @"already in decode");
return ;
}
self.currentDecodeFrame = frameIndex;
_buffers = buffers;
dispatch_async(self.decodeQueue, ^{
if (frameIndex != self.lastDecodeFrame + 1) {
//
return;
}
[self _decodeFrame:frameIndex drop:NO];
});
}
- (void)_decodeFrame:(NSInteger)frameIndex drop:(BOOL)dropFlag {
if (_isFinish) {
return ;
}
if (!_buffers) {
return ;
}
if (self.spsData == nil || self.ppsData == nil) {
return ;
}
//
NSDate *startDate = [NSDate date];
NSData *packetData = [_mp4Parser readPacketOfSample:frameIndex];
if (!packetData.length) {
_finishFrameIndex = frameIndex;
[self _onInputEnd];
return;
}
// pts,ptsparse mp4 box
uint64_t currentPts = [_mp4Parser.videoSamples[frameIndex] pts];
CVPixelBufferRef outputPixelBuffer = NULL;
// 4. get NALUnit payload into a CMBlockBuffer,
CMBlockBufferRef blockBuffer = NULL;
_status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,
(void *)packetData.bytes,
packetData.length,
kCFAllocatorNull, NULL, 0,
packetData.length, 0,
&blockBuffer);
// 6. create a CMSampleBuffer.
CMSampleBufferRef sampleBuffer = NULL;
const size_t sampleSizeArray[] = {packetData.length};
_status = CMSampleBufferCreateReady(kCFAllocatorDefault,
blockBuffer,
_mFormatDescription,
1, 0, NULL, 1, sampleSizeArray,
&sampleBuffer);
if (blockBuffer) {
CFRelease(blockBuffer);
}
// 7. use VTDecompressionSessionDecodeFrame
if (@available(iOS 9.0, *)) {
__typeof(self) __weak weakSelf = self;
VTDecodeFrameFlags flags = 0;
VTDecodeInfoFlags flagOut = 0;
OSStatus status = VTDecompressionSessionDecodeFrameWithOutputHandler(_mDecodeSession, sampleBuffer, flags, &flagOut, ^(OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef _Nullable imageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration) {
__typeof(self) strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf handleDecodePixelBuffer:imageBuffer
sampleBuffer:sampleBuffer
frameIndex:frameIndex
currentPts:currentPts
startDate:startDate
status:status
needDrop:dropFlag];
});
if (status == kVTInvalidSessionErr) {
CFRelease(sampleBuffer);
//
if (_invalidRetryCount >= 3) {
return;
}
[self resetDecoder];
// I
[self findKeyFrameAndDecodeToCurrent:frameIndex];
} else {
_invalidRetryCount = 0;
}
} else {
// 7. use VTDecompressionSessionDecodeFrame
VTDecodeFrameFlags flags = 0;
VTDecodeInfoFlags flagOut = 0;
_status = VTDecompressionSessionDecodeFrame(_mDecodeSession, sampleBuffer, flags, &outputPixelBuffer, &flagOut);
if (_status == kVTInvalidSessionErr) {
CFRelease(sampleBuffer);
//
if (_invalidRetryCount >= 3) {
return;
}
[self resetDecoder];
// I
[self findKeyFrameAndDecodeToCurrent:frameIndex];
return;
} else {
_invalidRetryCount = 0;
}
[self handleDecodePixelBuffer:outputPixelBuffer
sampleBuffer:sampleBuffer
frameIndex:frameIndex
currentPts:currentPts
startDate:startDate
status:_status
needDrop:dropFlag];
}
}
- (void)handleDecodePixelBuffer:(CVPixelBufferRef)pixelBuffer
sampleBuffer:(CMSampleBufferRef)sampleBuffer
frameIndex:(NSInteger)frameIndex
currentPts:(uint64_t)currentPts
startDate:(NSDate *)startDate
status:(OSStatus)status
needDrop:(BOOL)dropFlag {
self.lastDecodeFrame = frameIndex;
CFRelease(sampleBuffer);
if(status == kVTInvalidSessionErr) {
VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ kVTInvalidSessionErr error:%@", @(frameIndex), @(status));
} else if(status == kVTVideoDecoderBadDataErr) {
VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ kVTVideoDecoderBadDataErr error:%@", @(frameIndex), @(status));
} else if(status != noErr) {
VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ error:%@", @(frameIndex), @(status));
}
if (dropFlag) {
return;
}
QGMP4AnimatedImageFrame *newFrame = [[QGMP4AnimatedImageFrame alloc] init];
// imagebufferframe
CVPixelBufferRetain(pixelBuffer);
newFrame.pixelBuffer = pixelBuffer;
newFrame.frameIndex = frameIndex; //dts
NSTimeInterval decodeTime = [[NSDate date] timeIntervalSinceDate:startDate]*1000;
newFrame.decodeTime = decodeTime;
newFrame.defaultFps = (int)_mp4Parser.fps;
newFrame.pts = currentPts;
// 8. insert into buffer
[_buffers addObject:newFrame];
// 9. sort
[_buffers sortUsingComparator:^NSComparisonResult(QGMP4AnimatedImageFrame * _Nonnull obj1, QGMP4AnimatedImageFrame * _Nonnull obj2) {
return [@(obj1.pts) compare:@(obj2.pts)];
}];
}
#pragma mark - override
- (BOOL)shouldStopDecode:(NSInteger)nextFrameIndex {
return _isFinish;
}
- (BOOL)isFrameIndexBeyondEnd:(NSInteger)frameIndex {
if (_finishFrameIndex > 0) {
return (frameIndex >= _finishFrameIndex);
}
return NO;
}
-(void)dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
[self _onInputEnd];
self.fileInfo.occupiedCount --;
if (self.fileInfo.occupiedCount <= 0) {
}
}
#pragma mark - private methods
- (BOOL)onInputStart {
NSFileManager *fileMgr = [NSFileManager defaultManager];
if (![fileMgr fileExistsAtPath:self.fileInfo.filePath]) {
_constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_FileNotExist userInfo:[self errorUserInfo]];
return NO;
}
_isFinish = NO;
self.vpsData = nil;
self.spsData = nil;
self.ppsData = nil;
_outputWidth = (int)_mp4Parser.picWidth;
_outputHeight = (int)_mp4Parser.picHeight;
BOOL paramsSetInitSuccess = [self initPPSnSPS];
return paramsSetInitSuccess;
}
- (BOOL)initPPSnSPS {
VAP_Info(kQGVAPModuleCommon, @"initPPSnSPS");
if (self.spsData && self.ppsData) {
VAP_Error(kQGVAPModuleCommon, @"sps&pps is already has value.");
return YES;
}
self.spsData = _mp4Parser.spsData;
self.ppsData = _mp4Parser.ppsData;
self.vpsData = _mp4Parser.vpsData;
// 2. create CMFormatDescription
if (self.spsData != nil && self.ppsData != nil && _mp4Parser.videoCodecID != QGMP4VideoStreamCodecIDUnknown) {
if (_mp4Parser.videoCodecID == QGMP4VideoStreamCodecIDH264) {
const uint8_t* const parameterSetPointers[2] = { (const uint8_t*)[self.spsData bytes], (const uint8_t*)[self.ppsData bytes] };
const size_t parameterSetSizes[2] = { [self.spsData length], [self.ppsData length] };
_status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
2,
parameterSetPointers,
parameterSetSizes,
4,
&_mFormatDescription);
if (_status != noErr) {
VAP_Event(kQGVAPModuleCommon, @"CMVideoFormatDescription. Creation: %@.", (_status == noErr) ? @"successfully." : @"failed.");
_constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_ErrorCreateVTBDesc userInfo:[self errorUserInfo]];
return NO;
}
} else if (_mp4Parser.videoCodecID == QGMP4VideoStreamCodecIDH265) {
if (@available(iOS 11.0, *)) {
if(VTIsHardwareDecodeSupported(kCMVideoCodecType_HEVC)) {
const uint8_t* const parameterSetPointers[3] = {(const uint8_t*)[self.vpsData bytes], (const uint8_t*)[self.spsData bytes], (const uint8_t*)[self.ppsData bytes]};
const size_t parameterSetSizes[3] = {[self.vpsData length], [self.spsData length], [self.ppsData length]};
_status = CMVideoFormatDescriptionCreateFromHEVCParameterSets(kCFAllocatorDefault,
3, // parameter_set_count
parameterSetPointers, // &parameter_set_pointers
parameterSetSizes, // &parameter_set_sizes
4, // nal_unit_header_length
NULL,
&_mFormatDescription);
if (_status != noErr) {
VAP_Event(kQGVAPModuleCommon, @"CMVideoFormatDescription. Creation: %@.", (_status == noErr) ? @"successfully." : @"failed.");
_constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_ErrorCreateVTBDesc userInfo:[self errorUserInfo]];
return NO;
}
} else {
VAP_Event(kQGVAPModuleCommon, @"H.265 decoding is un-supported because of the hardware");
return NO;
}
} else {
VAP_Event(kQGVAPModuleCommon, @"System version is too low to support H.265 decoding");
return NO;
}
}
}
// 3. create VTDecompressionSession
return [self createDecompressionSession];;
}
- (BOOL)createDecompressionSession {
CFDictionaryRef attrs = NULL;
const void *keys[] = {kCVPixelBufferPixelFormatTypeKey};
// kCVPixelFormatType_420YpCbCr8Planar is YUV420
// kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12
uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) };
attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL);
if ([UIDevice currentDevice].systemVersion.floatValue >= 9.0) {
_status = VTDecompressionSessionCreate(kCFAllocatorDefault,
_mFormatDescription,
NULL,
attrs,
NULL,
&_mDecodeSession);
if (_status != noErr) {
CFRelease(attrs);
_constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_ErrorCreateVTBSession userInfo:[self errorUserInfo]];
return NO;
}
} else {
VTDecompressionOutputCallbackRecord callBackRecord;
callBackRecord.decompressionOutputCallback = didDecompress;
callBackRecord.decompressionOutputRefCon = NULL;
_status = VTDecompressionSessionCreate(kCFAllocatorDefault,
_mFormatDescription,
NULL, attrs,
&callBackRecord,
&_mDecodeSession);
if (_status != noErr) {
CFRelease(attrs);
_constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_ErrorCreateVTBSession userInfo:[self errorUserInfo]];
return NO;
}
}
CFRelease(attrs);
return YES;
}
- (void)resetDecoder {
// delete
if (_mDecodeSession) {
VTDecompressionSessionWaitForAsynchronousFrames(_mDecodeSession);
VTDecompressionSessionInvalidate(_mDecodeSession);
CFRelease(_mDecodeSession);
_mDecodeSession = NULL;
}
// recreate
[self createDecompressionSession];
}
- (void)findKeyFrameAndDecodeToCurrent:(NSInteger)frameIndex {
[[NSNotificationCenter defaultCenter] postNotificationName:kQGVAPDecoderSeekStart object:self];
NSArray<NSNumber *> *keyframeIndexes = [_mp4Parser videoSyncSampleIndexes];
NSInteger index = [[keyframeIndexes firstObject] integerValue];
for(NSNumber *number in keyframeIndexes) {
if(number.integerValue < frameIndex) {
index = number.integerValue;
continue;
} else {
break;
}
}
// seek to last key frame
while (index < frameIndex) {
[self _decodeFrame:index drop:YES];
index++;
}
[self _decodeFrame:frameIndex drop:NO];
[[NSNotificationCenter defaultCenter] postNotificationName:kQGVAPDecoderSeekFinish object:self];
}
- (void)_onInputEnd {
if (_isFinish) {
return ;
}
_isFinish = YES;
if (_mDecodeSession) {
VTDecompressionSessionWaitForAsynchronousFrames(_mDecodeSession);
VTDecompressionSessionInvalidate(_mDecodeSession);
CFRelease(_mDecodeSession);
_mDecodeSession = NULL;
}
if (self.spsData || self.ppsData || self.vpsData) {
self.spsData = nil;
self.ppsData = nil;
self.vpsData = nil;
}
if (_mFormatDescription) {
CFRelease(_mFormatDescription);
_mFormatDescription = NULL;
}
}
- (void)onInputEnd {
//
__weak __typeof(self) weakSelf = self;
if ([NSThread isMainThread]) {
dispatch_sync(self.decodeQueue, ^{
[weakSelf _onInputEnd];
});
} else {
dispatch_async(self.decodeQueue, ^{
[weakSelf _onInputEnd];
});
}
}
//decode callback
static void didDecompress(void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
*outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
}
- (NSDictionary *)errorUserInfo {
NSDictionary *userInfo = @{@"location" : self.fileInfo.filePath ? : @""};
return userInfo;
}
@end

View File

@@ -0,0 +1,29 @@
// QGAnimatedImageBufferManager.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "QGAnimatedImageDecodeConfig.h"
#import "QGBaseAnimatedImageFrame.h"
@interface QGAnimatedImageBufferManager : NSObject
@property (nonatomic, strong) NSMutableArray *buffers;//缓冲
- (instancetype)initWithConfig:(QGAnimatedImageDecodeConfig *)config;
- (QGBaseAnimatedImageFrame *)getBufferedFrame:(NSInteger)frameIndex;
- (BOOL)isBufferFull;
- (QGBaseAnimatedImageFrame *)popVideoFrame;
@end

View File

@@ -0,0 +1,101 @@
// QGAnimatedImageBufferManager.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGAnimatedImageBufferManager.h"
#import "QGVAPSafeMutableArray.h"
@interface QGAnimatedImageBufferManager() {
QGAnimatedImageDecodeConfig *_config; //
}
@end
@implementation QGAnimatedImageBufferManager
- (instancetype)initWithConfig:(QGAnimatedImageDecodeConfig *)config {
if (self = [super init]) {
_config = config;
[self createBuffersWithConfig:config];
}
return self;
}
- (void)createBuffersWithConfig:(QGAnimatedImageDecodeConfig *)config {
_buffers = [[QGVAPSafeMutableArray alloc] initWithCapacity:config.bufferCount];
}
/**
@param frameIndex
@return
*/
- (QGBaseAnimatedImageFrame *)getBufferedFrame:(NSInteger)frameIndex {
if (_buffers.count == 0) {
//NSLog(@"fail buffer is nil");
return nil;
}
NSInteger bufferIndex = frameIndex%_buffers.count;
if (bufferIndex > _buffers.count-1) {
//NSLog(@"fail");
return nil;
}
id frame = [_buffers objectAtIndex:bufferIndex];
if (![frame isKindOfClass:[QGBaseAnimatedImageFrame class]] || ([(QGBaseAnimatedImageFrame*)frame frameIndex] != frameIndex)) {
return nil;
}
return frame;
}
- (QGBaseAnimatedImageFrame *)popVideoFrame {
if (!_buffers.count) {
return nil;
}
if (![_buffers.firstObject isKindOfClass:[QGBaseAnimatedImageFrame class]]) {
return nil;
}
QGBaseAnimatedImageFrame *frame = _buffers.firstObject;
[_buffers removeObjectAtIndex:0];
return frame;
}
/**
@return QGBaseAnimatedImageFrame
*/
- (BOOL)isBufferFull {
__block BOOL isFull = YES;
[_buffers enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
if (![obj isKindOfClass:[QGBaseAnimatedImageFrame class]]) {
isFull = NO;
*stop = YES;
}
}];
return isFull;
}
- (void)dealloc {
}
@end

View File

@@ -0,0 +1,25 @@
// QGAnimatedImageDecodeConfig.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
@interface QGAnimatedImageDecodeConfig : NSObject
@property (nonatomic, assign) NSInteger threadCount;//线程数
@property (nonatomic, assign) NSInteger bufferCount;//缓冲数
+ (instancetype)defaultConfig;
@end

View File

@@ -0,0 +1,28 @@
// QGAnimatedImageDecodeConfig.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGAnimatedImageDecodeConfig.h"
@implementation QGAnimatedImageDecodeConfig
+ (instancetype)defaultConfig {
QGAnimatedImageDecodeConfig *config = [QGAnimatedImageDecodeConfig new];
config.threadCount= 1;
config.bufferCount = 5;
return config;
}
@end

View File

@@ -0,0 +1,62 @@
// QGAnimatedImageDecodeManager.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "QGBaseDecoder.h"
#import "QGBaseAnimatedImageFrame.h"
#import "QGAnimatedImageDecodeConfig.h"
@class QGAnimatedImageDecodeManager;
@protocol QGAnimatedImageDecoderDelegate <NSObject>
/**
必须实现该方法 用以实例化解码器
@param manager 解码控制器
@return class
*/
- (Class)decoderClassForManager:(QGAnimatedImageDecodeManager *)manager;
@optional
- (BOOL)shouldSetupAudioPlayer;
/**
到文件末尾时被调用
@param decoder 解码器
*/
- (void)decoderDidFinishDecode:(QGBaseDecoder *)decoder;
- (void)decoderDidFailDecode:(QGBaseDecoder *)decoder error:(NSError *)error;
@end
@interface QGAnimatedImageDecodeManager : NSObject
@property (nonatomic, weak) id<QGAnimatedImageDecoderDelegate> decoderDelegate;
- (instancetype)initWith:(QGBaseDFileInfo *)fileInfo
config:(QGAnimatedImageDecodeConfig *)config
delegate:(id<QGAnimatedImageDecoderDelegate>)delegate;
- (QGBaseAnimatedImageFrame *)consumeDecodedFrame:(NSInteger)frameIndex;
- (void)tryToStartAudioPlay;
- (void)tryToStopAudioPlay;
- (void)tryToPauseAudioPlay;
- (void)tryToResumeAudioPlay;
- (BOOL)containsThisDeocder:(id)decoder;
@end

View File

@@ -0,0 +1,215 @@
// QGAnimatedImageDecodeManager.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGAnimatedImageDecodeManager.h"
#import "QGAnimatedImageBufferManager.h"
#import "QGBaseDecoder.h"
#import "QGVAPSafeMutableArray.h"
#import "QGMP4FrameHWDecoder.h"
#import "QGVAPLogger.h"
#import <sys/stat.h>
#import <AVFoundation/AVFoundation.h>
@interface QGAnimatedImageDecodeManager() {
QGAnimatedImageDecodeConfig *_config; //
QGBaseDFileInfo *_fileInfo; //sharpP
NSMutableArray *_decoders; //
QGAnimatedImageBufferManager *_bufferManager; //
AVAudioPlayer *_audioPlayer;
}
@end
@implementation QGAnimatedImageDecodeManager
- (instancetype)initWith:(QGBaseDFileInfo *)fileInfo
config:(QGAnimatedImageDecodeConfig *)config
delegate:(id<QGAnimatedImageDecoderDelegate>)delegate {
if (self = [super init]) {
_config = config;
_fileInfo = fileInfo;
_decoderDelegate = delegate;
[self createDecodersByConfig:config];
_bufferManager = [[QGAnimatedImageBufferManager alloc] initWithConfig:config];
[self initializeBuffersFromIndex:0];
[self setupAudioPlayerIfNeed];
}
return self;
}
/**
@param frameIndex
@return
*/
- (QGBaseAnimatedImageFrame *)consumeDecodedFrame:(NSInteger)frameIndex {
@synchronized (self) {
//
if (frameIndex == 0 && _bufferManager.buffers.count < _config.bufferCount) {
return nil;
}
BOOL decodeFinish = [self checkIfDecodeFinish:frameIndex];
QGBaseAnimatedImageFrame *frame = [_bufferManager popVideoFrame];
if (frame) {
// pts
frame.frameIndex = frameIndex;
[self decodeFrame:frameIndex+_config.bufferCount];
}
else if (!decodeFinish){
// buffer退
NSInteger decoderIndex = _decoders.count==1?0:frameIndex%_decoders.count;
QGBaseDecoder *decoder = _decoders[decoderIndex];
if ([decoder shouldStopDecode:frameIndex]) {
//
if ([self.decoderDelegate respondsToSelector:@selector(decoderDidFinishDecode:)]) {
[self.decoderDelegate decoderDidFinishDecode:decoder];
}
return nil;
}
[self initializeBuffersFromIndex:frameIndex];
}
return frame;
}
}
- (void)tryToStartAudioPlay {
if (!_audioPlayer) {
return ;
}
[_audioPlayer play];
}
- (void)tryToStopAudioPlay {
if (!_audioPlayer) {
return;
}
// CoreAudioAVAudioPlaeyrCppaudioPlayerDidFinishPlaying:successfully:线
// stop_audioPlayer
[_audioPlayer stop];
}
- (void)tryToPauseAudioPlay {
if (!_audioPlayer) {
return;
}
[_audioPlayer pause];
}
- (void)tryToResumeAudioPlay {
if (!_audioPlayer) {
return;
}
[_audioPlayer play];
}
#pragma mark - private methods
- (BOOL)checkIfDecodeFinish:(NSInteger)frameIndex {
NSInteger decoderIndex = _decoders.count==1?0:frameIndex%_decoders.count;
QGBaseDecoder *decoder = _decoders[decoderIndex];
if ([decoder isFrameIndexBeyondEnd:frameIndex]) {
if ([self.decoderDelegate respondsToSelector:@selector(decoderDidFinishDecode:)]) {
[self.decoderDelegate decoderDidFinishDecode:decoder];
}
return YES;
}
return NO;
}
- (void)decodeFrame:(NSInteger)frameIndex {
if (!_decoders || _decoders.count == 0) {
//NSLog(@"error! can't find decoder");
return ;
}
NSInteger decoderIndex = _decoders.count==1?0:frameIndex%_decoders.count;
QGBaseDecoder *decoder = _decoders[decoderIndex];
if ([decoder shouldStopDecode:frameIndex]) {
return ;
}
[decoder decodeFrame:frameIndex buffers:_bufferManager.buffers];
}
- (void)createDecodersByConfig:(QGAnimatedImageDecodeConfig *)config {
if (!self.decoderDelegate || ![self.decoderDelegate respondsToSelector:@selector(decoderClassForManager:)]) {
VAP_Event(kQGVAPModuleCommon, @"you MUST implement the delegate in invoker!");
NSAssert(0, @"you MUST implement the delegate in invoker!");
return ;
}
_decoders = [QGVAPSafeMutableArray new];
for (int i = 0; i < config.threadCount; i ++) {
Class class = [self.decoderDelegate decoderClassForManager:self];
NSError *error = nil;
QGBaseDecoder *decoder = [class alloc];
decoder = [decoder initWith:_fileInfo error:&error];
if (!decoder) {
if ([self.decoderDelegate respondsToSelector:@selector(decoderDidFailDecode:error:)]) {
[self.decoderDelegate decoderDidFailDecode:nil error:error];
}
break ;
}
[_decoders addObject:decoder];
}
}
- (void)initializeBuffersFromIndex:(NSInteger)start {
for (int i = 0; i < _config.bufferCount; i++) {
[self decodeFrame:start+i];
}
}
- (void)setupAudioPlayerIfNeed {
if ([_decoderDelegate respondsToSelector:@selector(shouldSetupAudioPlayer)]) {
BOOL should = [_decoderDelegate shouldSetupAudioPlayer];
if (!should) {
return;
}
}
if ([_fileInfo isKindOfClass:[QGMP4HWDFileInfo class]]) {
QGMP4ParserProxy *mp4Parser = [(QGMP4HWDFileInfo *)_fileInfo mp4Parser];
if (!mp4Parser.audioTrackBox) {
_audioPlayer = nil;
return ;
}
NSError *error;
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL URLWithString:_fileInfo.filePath] error:&error];
}
}
- (void)dealloc {
}
- (BOOL)containsThisDeocder:(id)decoder {
for (id d in _decoders) {
if (d == decoder) {
return YES;
}
}
return NO;
}
@end

View File

@@ -0,0 +1,23 @@
// QGAnimatedImageDecodeThread.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
@interface QGAnimatedImageDecodeThread : NSThread
@property (nonatomic, assign) BOOL occupied; //是否被解码器占用
@property (nonatomic, readonly) NSString *sequenceDec; //线程标识信息
@end

View File

@@ -0,0 +1,29 @@
// QGAnimatedImageDecodeThread.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGAnimatedImageDecodeThread.h"
@implementation QGAnimatedImageDecodeThread
- (NSString *)sequenceDec
{
#ifdef DEBUG
return [NSString stringWithFormat:@"%@",@([[self valueForKeyPath:@"private.seqNum"] integerValue])];//
#else
return [self description];
#endif
}
@end

View File

@@ -0,0 +1,24 @@
// QGAnimatedImageDecodeThreadPool.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "QGAnimatedImageDecodeThread.h"
@interface QGAnimatedImageDecodeThreadPool : NSObject
+ (instancetype)sharedPool;
- (QGAnimatedImageDecodeThread *)getDecodeThread;
@end

View File

@@ -0,0 +1,77 @@
// QGAnimatedImageDecodeThreadPool.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGAnimatedImageDecodeThreadPool.h"
#import "QGAnimatedImageDecodeThread.h"
#import "QGVAPSafeMutableArray.h"
@interface QGAnimatedImageDecodeThreadPool (){
NSMutableArray *_threads;
}
@end
@implementation QGAnimatedImageDecodeThreadPool
+ (instancetype)sharedPool {
static QGAnimatedImageDecodeThreadPool *instance;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
instance = [[QGAnimatedImageDecodeThreadPool alloc] init];
});
return instance;
}
- (instancetype)init {
if (self = [super init]) {
_threads = [QGVAPSafeMutableArray new];
}
return self;
}
/**
线
@return 线
*/
- (QGAnimatedImageDecodeThread *)getDecodeThread {
QGAnimatedImageDecodeThread *freeThread = nil;
for (QGAnimatedImageDecodeThread *thread in _threads) {
if (!thread.occupied) {
freeThread = thread;
}
}
if (!freeThread) {
freeThread = [[QGAnimatedImageDecodeThread alloc] initWithTarget:self selector:@selector(run) object:nil];
[freeThread start];
[_threads addObject:freeThread];
}
return freeThread;
}
- (void)run{
//线
@autoreleasepool {
[[NSRunLoop currentRunLoop] addPort:[NSPort port] forMode:NSDefaultRunLoopMode];
NSRunLoop *runLoop = [NSRunLoop currentRunLoop];
[runLoop run];
}
}
@end

View File

@@ -0,0 +1,44 @@
// QGVAPConfigManager.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "QGMP4HWDFileInfo.h"
#import "QGVAPConfigModel.h"
#import "VAPMacros.h"
@class QGVAPSourceInfo;
@protocol QGVAPConfigDelegate <NSObject>
- (void)onVAPConfigResourcesLoaded:(QGVAPConfigModel *)config error:(NSError *)error;
@optional
- (NSString *)vap_contentForTag:(NSString *)tag resource:(QGVAPSourceInfo *)info; //替换配置中的资源占位符不处理直接返回tag
- (void)vap_loadImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock;
@end
@interface QGVAPConfigManager : NSObject
@property (nonatomic, weak) id<QGVAPConfigDelegate> delegate;
@property (nonatomic, assign) BOOL hasValidConfig;
@property (nonatomic, strong) QGVAPConfigModel *model;
- (instancetype)initWith:(QGMP4HWDFileInfo *)fileInfo;
- (void)loadConfigResources;
- (void)loadMTLTextures:(id<MTLDevice>)device;
- (void)loadMTLBuffers:(id<MTLDevice>)device;
@end

View File

@@ -0,0 +1,258 @@
// QGVAPConfigManager.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPConfigManager.h"
#import "QGMP4Parser.h"
#import "QGVAPLogger.h"
#import "NSDictionary+VAPUtil.h"
#import "UIColor+VAPUtil.h"
#import "NSArray+VAPUtil.h"
#import "QGHWDMetalRenderer.h"
#import "QGVAPTextureLoader.h"
@interface QGVAPConfigManager () {
QGMP4HWDFileInfo *_fileInfo;
}
@end
@implementation QGVAPConfigManager
- (instancetype)initWith:(QGMP4HWDFileInfo *)fileInfo {
if (self = [super init]) {
_fileInfo = fileInfo;
[self setupConfig];
}
return self;
}
- (void)setupConfig {
QGMP4Box *vapc = [_fileInfo.mp4Parser.rootBox subBoxOfType:QGMP4BoxType_vapc];
if (!vapc) {
self.hasValidConfig = NO;
VAP_Error(kQGVAPModuleCommon, @"config can not find vapc box");
return ;
}
self.hasValidConfig = YES;
NSData *vapcData = [_fileInfo.mp4Parser readDataOfBox:vapc length:vapc.length-8 offset:8];
NSError *error = nil;
NSDictionary *configDictionary = [NSJSONSerialization JSONObjectWithData:vapcData options:kNilOptions error:&error];
if (error) {
VAP_Error(kQGVAPModuleCommon, @"fail to parse config as dictionary file %@", vapc);
}
[self parseConfigDictinary:configDictionary];
}
#pragma mark - resource loader
- (void)loadConfigResources {
if (self.model.resources.count == 0) {
if ([self.delegate respondsToSelector:@selector(onVAPConfigResourcesLoaded:error:)]) {
[self.delegate onVAPConfigResourcesLoaded:self.model error:nil];
}
return ;
}
//tags
if ([self.delegate respondsToSelector:@selector(vap_contentForTag:resource:)]) {
[self.model.resources enumerateObjectsUsingBlock:^(QGVAPSourceInfo * _Nonnull resource, NSUInteger idx, BOOL * _Nonnull stop) {
resource.contentTagValue = [self.delegate vap_contentForTag:resource.contentTag resource:resource];
}];
}
if (![self.delegate respondsToSelector:@selector(vap_loadImageWithURL:context:completion:)]) {
return ;
}
__block NSError *loadError = nil;
dispatch_group_t group = dispatch_group_create();
[self.model.resources enumerateObjectsUsingBlock:^(QGVAPSourceInfo * _Nonnull resource, NSUInteger idx, BOOL * _Nonnull stop) {
NSString *tagContent = resource.contentTagValue;
if ([resource.type isEqualToString:kQGAGAttachmentSourceTypeText] && [resource.loadType isEqualToString:QGAGAttachmentSourceLoadTypeLocal]) {
resource.sourceImage = [QGVAPTextureLoader drawingImageForText:tagContent color:resource.color size:resource.size bold:[resource.style isEqualToString:kQGAGAttachmentSourceStyleBoldText]];
}
if ([resource.type isEqualToString:kQGAGAttachmentSourceTypeImg] && [resource.loadType isEqualToString:QGAGAttachmentSourceLoadTypeNet]) {
NSString *imageURL = tagContent;
NSDictionary *context = @{@"resource":resource};
dispatch_group_enter(group);
[self.delegate vap_loadImageWithURL:imageURL context:context completion:^(UIImage *image, NSError *error, NSString *imageURL) {
if (!image || error) {
VAP_Error(kQGVAPModuleCommon, @"loadImageWithURL %@ error:%@", imageURL, error);
loadError = (loadError ?: (error ?: ([NSError errorWithDomain:[NSString stringWithFormat:@"loadImageError:%@", imageURL] code:-1 userInfo:nil])));
}
resource.sourceImage = image;
dispatch_group_leave(group);
}];
}
}];
dispatch_group_notify(group, dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:@selector(onVAPConfigResourcesLoaded:error:)]) {
[self.delegate onVAPConfigResourcesLoaded:self.model error:loadError];
}
});
}
- (void)loadMTLTextures:(id<MTLDevice>)device {
[self.model.resources enumerateObjectsUsingBlock:^(QGVAPSourceInfo * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
id<MTLTexture> texture = [QGVAPTextureLoader loadTextureWithImage:obj.sourceImage device:device];
obj.sourceImage = nil;
obj.texture = texture;
}];
}
- (void)loadMTLBuffers:(id<MTLDevice>)device {
[self.model.resources enumerateObjectsUsingBlock:^(QGVAPSourceInfo * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
id<MTLBuffer> buffer = [QGVAPTextureLoader loadVapColorFillBufferWith:obj.color device:device];
obj.colorParamsBuffer = buffer;
}];
}
#pragma mark - parse json
- (void)parseConfigDictinary:(NSDictionary *)configDic {
NSDictionary *commonInfoDic = [configDic hwd_dicValue:@"info"];
NSArray *sourcesArr = [configDic hwd_arrValue:@"src"];
NSArray *framesArr = [configDic hwd_arrValue:@"frame"];
if (!commonInfoDic) {
VAP_Error(kQGVAPModuleCommon, @"has no commonInfoDic:%@", configDic);
return ;
}
QGVAPConfigModel *configModel = [QGVAPConfigModel new];
//parse
NSInteger version = [commonInfoDic hwd_integerValue:@"v"];
NSInteger frameCount = [commonInfoDic hwd_integerValue:@"f"];
CGFloat w = [commonInfoDic hwd_floatValue:@"w"];
CGFloat h = [commonInfoDic hwd_floatValue:@"h"];
CGFloat video_w = [commonInfoDic hwd_floatValue:@"videoW"];
CGFloat video_h = [commonInfoDic hwd_floatValue:@"videoH"];
CGFloat orientaion = [commonInfoDic hwd_integerValue:@"orien"];
NSInteger fps = [commonInfoDic hwd_integerValue:@"fps"];
BOOL isMerged = ([commonInfoDic hwd_integerValue:@"isVapx"] == 1);
NSArray *a_frame = [commonInfoDic hwd_arrValue:@"aFrame"];
NSArray *rgb_frame = [commonInfoDic hwd_arrValue:@"rgbFrame"];
self.model = configModel;
//
QGVAPCommonInfo *commonInfo = [QGVAPCommonInfo new];
commonInfo.version = version;
commonInfo.framesCount = frameCount;
commonInfo.size = CGSizeMake(w, h);
commonInfo.videoSize = CGSizeMake(video_w, video_h);
commonInfo.targetOrientaion = orientaion;
commonInfo.fps = fps;
commonInfo.isMerged = isMerged;
commonInfo.alphaAreaRect = a_frame ? [a_frame hwd_rectValue] : CGRectZero;
commonInfo.rgbAreaRect = rgb_frame ? [rgb_frame hwd_rectValue] : CGRectZero;
configModel.info = commonInfo;
//parserfps
_fileInfo.mp4Parser.fps = fps;
if (!sourcesArr) {
VAP_Error(kQGVAPModuleCommon, @"has no sourcesArr:%@", configDic);
return ;
}
//
NSMutableDictionary <NSString *, QGVAPSourceInfo *>*sources = [NSMutableDictionary new];
[sourcesArr enumerateObjectsUsingBlock:^(NSDictionary *sourceDic, NSUInteger idx, BOOL * _Nonnull stop) {
if (![sourceDic isKindOfClass:[NSDictionary class]]) {
VAP_Error(kQGVAPModuleCommon, @"sourceDic is not dic:%@", sourceDic);
return ;
}
NSString *sourceID = [sourceDic hwd_stringValue:@"srcId"];
if (!sourceID) {
VAP_Error(kQGVAPModuleCommon, @"has no sourceID:%@", sourceDic);
return ;
}
//parse
QGAGAttachmentSourceType sourceType = [sourceDic hwd_stringValue:@"srcType"];
QGAGAttachmentSourceLoadType loadType = [sourceDic hwd_stringValue:@"loadType"];
NSString *contentTag = [sourceDic hwd_stringValue:@"srcTag"];
UIColor *color = [UIColor hwd_colorWithHexString:[sourceDic hwd_stringValue:@"color"]];
QGAGAttachmentSourceStyle style = [sourceDic hwd_stringValue:@"style"];
CGFloat width = [sourceDic hwd_floatValue:@"w"];
CGFloat height = [sourceDic hwd_floatValue:@"h"];
QGAGAttachmentFitType fitType = [sourceDic hwd_stringValue:@"fitType"];
QGVAPSourceInfo *sourceInfo = [QGVAPSourceInfo new];
sourceInfo.type = sourceType;
sourceInfo.style = style;
sourceInfo.contentTag = contentTag;
sourceInfo.color = color;
sourceInfo.size = CGSizeMake(width, height);
sourceInfo.fitType = fitType;
sourceInfo.loadType = loadType;
sources[sourceID] = sourceInfo;
}];
configModel.resources = sources.allValues;
//
if (!framesArr) {
VAP_Error(kQGVAPModuleCommon, @"has no framesArr:%@", configDic);
return ;
}
NSMutableDictionary <NSNumber *, NSArray<QGVAPMergedInfo *>*> *mergedConfig = [NSMutableDictionary new];
[framesArr enumerateObjectsUsingBlock:^(NSDictionary *frameMergedDic, NSUInteger idx, BOOL * _Nonnull stop) {
if (![frameMergedDic isKindOfClass:[NSDictionary class]]) {
VAP_Error(kQGVAPModuleCommon, @"frameMergedDic is not dic:%@", frameMergedDic);
return ;
}
NSInteger frameIndex = [frameMergedDic hwd_integerValue:@"i"];
NSMutableArray <QGVAPMergedInfo *> *mergedInfos = [NSMutableArray new];
NSArray *mergedObjs = [frameMergedDic hwd_arrValue:@"obj"];
[mergedObjs enumerateObjectsUsingBlock:^(NSDictionary *mergeInfoDic, NSUInteger idx, BOOL * _Nonnull stop) {
if (![mergeInfoDic isKindOfClass:[NSDictionary class]]) {
VAP_Error(kQGVAPModuleCommon, @"mergeInfoDic is not dic:%@", mergeInfoDic);
return ;
}
NSString *sourceID = [mergeInfoDic hwd_stringValue:@"srcId"];
QGVAPSourceInfo *sourceInfo = sources[sourceID];
if (!sourceInfo) {
VAP_Error(kQGVAPModuleCommon, @"sourceInfo is nil:%@", mergeInfoDic);
return ;
}
//parse
NSArray *frame = [mergeInfoDic hwd_arrValue:@"frame"];
NSArray *m_frame = [mergeInfoDic hwd_arrValue:@"mFrame"];
NSInteger renderIndex = [mergeInfoDic hwd_integerValue:@"z"];
NSInteger rotationAngle = [mergeInfoDic hwd_integerValue:@"mt"];
QGVAPMergedInfo *mergeInfo = [QGVAPMergedInfo new];
mergeInfo.source = sourceInfo;
mergeInfo.renderIndex = renderIndex;
mergeInfo.needMask = (m_frame != nil);
mergeInfo.renderRect = frame ? [frame hwd_rectValue] : CGRectZero;
mergeInfo.maskRect = m_frame ? [m_frame hwd_rectValue] : CGRectZero;
mergeInfo.maskRotation = rotationAngle;
[mergedInfos addObject:mergeInfo];
}];
NSArray *sortedMergeInfos = [mergedInfos sortedArrayUsingComparator:^NSComparisonResult(QGVAPMergedInfo *info1, QGVAPMergedInfo *info2) {
return [@(info1.renderIndex) compare:@(info2.renderIndex)];
}];
mergedConfig[@(frameIndex)] = sortedMergeInfos;
}];
configModel.mergedConfig = mergedConfig;
}
@end

View File

@@ -0,0 +1,238 @@
// QGMP4Box.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#define ATOM_TYPE(a, b, c, d) ((d) | ((c) << 8) | ((b) << 16) | ((unsigned)(a) << 24))
#define READ32BIT(bytes) ((((bytes)[0]&0xff)<<24)+(((bytes)[1]&0xff)<<16)+(((bytes)[2]&0xff)<<8)+((bytes)[3]&0xff))
extern NSInteger const kQGBoxSizeLengthInBytes;
extern NSInteger const kQGBoxTypeLengthInBytes;
extern NSInteger const kQGBoxLargeSizeLengthInBytes;
extern NSInteger const kQGBoxLargeSizeFlagLengthInBytes;
@class QGMP4Box;
typedef NSData* (^QGMp4BoxDataFetcher)(QGMP4Box *box);
typedef NS_ENUM(NSUInteger, QGMP4CodecType) {
QGMP4CodecTypeUnknown = 0,
QGMP4CodecTypeVideo,
QGMP4CodecTypeAudio
};
typedef NS_ENUM(uint32_t, QGMP4TrackType) {
QGMP4TrackType_Video = ATOM_TYPE('v','i','d','e'),
QGMP4TrackType_Audio = ATOM_TYPE('s','o','u','n'),
QGMP4TrackType_Hint = ATOM_TYPE('h','i','n','t')
};
typedef NS_ENUM(NSUInteger, QGMP4BoxType) {
QGMP4BoxType_unknown = 0x0,
QGMP4BoxType_ftyp = ATOM_TYPE('f','t','y','p'),//0x66747970,
QGMP4BoxType_free = ATOM_TYPE('f','r','e','e'),//0x66726565,
QGMP4BoxType_mdat = ATOM_TYPE('m','d','a','t'),//0x6d646174,
QGMP4BoxType_moov = ATOM_TYPE('m','o','o','v'),//0x6d6f6f76,
QGMP4BoxType_mvhd = ATOM_TYPE('m','v','h','d'),//0x6d766864,
QGMP4BoxType_iods = ATOM_TYPE('i','o','d','s'),//0x696f6473,
QGMP4BoxType_trak = ATOM_TYPE('t','r','a','k'),//0x7472616b,
QGMP4BoxType_tkhd = ATOM_TYPE('t','k','h','d'),//0x746b6864,
QGMP4BoxType_edts = ATOM_TYPE('e','d','t','s'),//0x65647473,
QGMP4BoxType_elst = ATOM_TYPE('e','l','s','t'),//0x656c7374,
QGMP4BoxType_mdia = ATOM_TYPE('m','d','i','a'),//0x6d646961,
QGMP4BoxType_mdhd = ATOM_TYPE('m','d','h','d'),//0x6d646864,
QGMP4BoxType_hdlr = ATOM_TYPE('h','d','l','r'),//0x68646c72,
QGMP4BoxType_minf = ATOM_TYPE('m','i','n','f'),//0x6d696e66,
QGMP4BoxType_vmhd = ATOM_TYPE('v','m','h','d'),//0x766d6864,
QGMP4BoxType_dinf = ATOM_TYPE('d','i','n','f'),//0x64696e66,
QGMP4BoxType_dref = ATOM_TYPE('d','r','e','f'),//0x64726566,
QGMP4BoxType_url = ATOM_TYPE( 0 ,'u','r','l'),//0x75726c,
QGMP4BoxType_stbl = ATOM_TYPE('s','t','b','l'),//0x7374626c,
QGMP4BoxType_stsd = ATOM_TYPE('s','t','s','d'),//0x73747364,
QGMP4BoxType_avc1 = ATOM_TYPE('a','v','c','1'),//0x61766331,
QGMP4BoxType_avcC = ATOM_TYPE('a','v','c','C'),//0x61766343,
QGMP4BoxType_stts = ATOM_TYPE('s','t','t','s'),//0x73747473,
QGMP4BoxType_stss = ATOM_TYPE('s','t','s','s'),//0x73747373,
QGMP4BoxType_stsc = ATOM_TYPE('s','t','s','c'),//0x73747363,
QGMP4BoxType_stsz = ATOM_TYPE('s','t','s','z'),//0x7374737a,
QGMP4BoxType_stco = ATOM_TYPE('s','t','c','o'),//0x7374636f,
QGMP4BoxType_ctts = ATOM_TYPE('c', 't', 't', 's'),//只有视频有主要⽤来记录pts和dts的差值通过它可以计算出pts
QGMP4BoxType_udta = ATOM_TYPE('u','d','t','a'),//0x75647461,
QGMP4BoxType_meta = ATOM_TYPE('m','e','t','a'),//0x6d657461,
QGMP4BoxType_ilst = ATOM_TYPE('i','l','s','t'),//0x696c7374,
QGMP4BoxType_data = ATOM_TYPE('d','a','t','a'),//0x64617461,
QGMP4BoxType_wide = ATOM_TYPE('w','i','d','e'),//0x77696465,
QGMP4BoxType_loci = ATOM_TYPE('l','o','c','i'),//0x6c6f6369,
QGMP4BoxType_smhd = ATOM_TYPE('s','m','h','d'),//0x736d6864,
QGMP4BoxType_vapc = ATOM_TYPE('v','a','p','c'),//0x76617063,//vap专属存储json配置信息
QGMP4BoxType_hvc1 = ATOM_TYPE('h','v','c','1'),
QGMP4BoxType_hvcC = ATOM_TYPE('h','v','c','C')
};
typedef NS_ENUM(NSUInteger, QGMP4VideoStreamCodecID) {
QGMP4VideoStreamCodecIDUnknown = 0,
QGMP4VideoStreamCodecIDH264,
QGMP4VideoStreamCodecIDH265
};
/**
* QGCttsEntry
*/
@interface QGCttsEntry : NSObject
/** sampleCount */
@property (nonatomic, assign) uint32_t sampleCount;
/** compositionOffset */
@property (nonatomic, assign) uint32_t compositionOffset;
@end
@interface QGMP4BoxFactory : NSObject
+ (BOOL)isTypeValueValid:(QGMP4BoxType)type;
+ (Class)boxClassForType:(QGMP4BoxType)type;
+ (QGMP4Box *)createBoxForType:(QGMP4BoxType)type startIndex:(unsigned long long)startIndexInBytes length:(unsigned long long)length;
@end
@protocol QGMP4BoxDelegate <NSObject>
@optional
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock;
@end
@interface QGMP4Box : NSObject <QGMP4BoxDelegate>
@property (nonatomic, assign) QGMP4BoxType type;
@property (nonatomic, assign) unsigned long long length;
@property (nonatomic, assign) unsigned long long startIndexInBytes;
@property (nonatomic, weak) QGMP4Box *superBox;
@property (nonatomic, strong) NSMutableArray *subBoxes;
- (instancetype)initWithType:(QGMP4BoxType)type startIndex:(unsigned long long)startIndexInBytes length:(unsigned long long)length;
- (id)subBoxOfType:(QGMP4BoxType)type;
- (id)superBoxOfType:(QGMP4BoxType)type;
@end
//实际媒体数据具体的划分等都在moov⾥⾯。
@interface QGMP4MdatBox : QGMP4Box
@end
@interface QGMP4AvccBox : QGMP4Box
@end
@interface QGMP4HvccBox : QGMP4Box
@end
@interface QGMP4MvhdBox : QGMP4Box
@end
//sample description
@interface QGMP4StsdBox : QGMP4Box
@end
/**Samples within the media data are grouped into chunks. Chunks can be of different sizes, and the samples within a chunk can have different sizes. This table can be used to find the chunk that contains a sample, its position, and the associated sample description.
The table is compactly coded. Each entry gives the index of the first chunk of a run of chunks with the same characteristics. By subtracting one entry here from the previous one, you can compute how many chunks are in this run. You can convert this to a sample count by multiplying by the appropriate samples-per-chunk.*/
//https://blog.csdn.net/tung214/article/details/30492895
@interface QGStscEntry : NSObject
@property (nonatomic, assign) uint32_t firstChunk;
@property (nonatomic, assign) uint32_t samplesPerChunk;
@property (nonatomic, assign) uint32_t sampleDescriptionIndex;
@end
@interface QGMP4StscBox : QGMP4Box
@property (nonatomic, strong) NSMutableArray<QGStscEntry *> *entries;
@end
@interface QGMP4StcoBox : QGMP4Box
@property (nonatomic, assign) uint32_t chunkCount;
@property (nonatomic, strong) NSMutableArray<NSNumber *> *chunkOffsets;
@end
@interface QGMP4StssBox : QGMP4Box
@property(nonatomic, strong) NSMutableArray<NSNumber *> *syncSamples;
@end
/**
* ctts
*/
@interface QGMP4CttsBox : QGMP4Box
/** compositionOffsets */
@property (nonatomic, strong) NSMutableArray<NSNumber *> *compositionOffsets;
@end
//This box contains a compact version of a table that allows indexing from decoding time to sample number. Other tables give sample sizes and pointers, from the sample number. Each entry in the table gives the number of consecutive samples with the same time delta, and the delta of those samples. By adding the deltas a complete time-to-sample map may be built.
@interface QGSttsEntry : NSObject
@property (nonatomic, assign) uint32_t sampleCount;
@property (nonatomic, assign) uint32_t sampleDelta;
@end
@interface QGMP4SttsBox : QGMP4Box
@property (nonatomic, strong) NSMutableArray<QGSttsEntry *> *entries;
@end
//sample size
@interface QGMP4StszBox : QGMP4Box
@property (nonatomic, assign) uint32_t sampleCount;
@property (nonatomic, strong) NSMutableArray<NSNumber *> *sampleSizes;
@end
@interface QGMP4TrackBox : QGMP4Box
@end
@interface QGMP4HdlrBox : QGMP4Box
@property (nonatomic, assign) QGMP4TrackType trackType;
@end
@interface QGMP4Sample : NSObject
@property (nonatomic, assign) QGMP4CodecType codecType;
@property (nonatomic, assign) uint32_t sampleDelta;
@property (nonatomic, assign) uint32_t sampleSize;
@property (nonatomic, assign) uint32_t sampleIndex;
@property (nonatomic, assign) uint32_t chunkIndex;
@property (nonatomic, assign) uint32_t streamOffset;
@property (nonatomic, assign) uint64_t pts;
@property (nonatomic, assign) uint64_t dts;
@property (nonatomic, assign) BOOL isKeySample;
@end
@interface QGChunkOffsetEntry : NSObject
@property (nonatomic, assign) uint32_t samplesPerChunk;
@property (nonatomic, assign) uint32_t offset;
@end

View File

@@ -0,0 +1,416 @@
// QGMP4Box.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGMP4Box.h"
#import "QGMP4Parser.h"
NSInteger const kQGBoxSizeLengthInBytes = 4;
NSInteger const kQGBoxTypeLengthInBytes = 4;
NSInteger const kQGBoxLargeSizeLengthInBytes = 8;
NSInteger const kQGBoxLargeSizeFlagLengthInBytes = 1;
#pragma mark - boxes
#pragma mark -- base box
@implementation QGMP4Box
- (instancetype)initWithType:(QGMP4BoxType)type startIndex:(unsigned long long)startIndexInBytes length:(unsigned long long)length {
if (self = [super init]) {
_type = type;
_startIndexInBytes = startIndexInBytes;
_length = length;
}
return self;
}
/**
box
@param type box
@return box
*/
- (id)subBoxOfType:(QGMP4BoxType)type {
if (self.subBoxes) {
for (QGMP4Box *subBox in self.subBoxes) {
if (subBox.type == type) {
return subBox;
}
QGMP4Box *box = [subBox subBoxOfType:type];
if (box) {
return box;
}
}
}
return nil;
}
/**
box
@param type box
@return hbox
*/
- (id)superBoxOfType:(QGMP4BoxType)type {
if (self.superBox) {
if (self.superBox.type == type) {
return self.superBox;
}
QGMP4Box *box = [self.superBox superBoxOfType:type];
if (box) {
return box;
}
}
return nil;
}
- (NSString *)description {
return [self descriptionForRecursionLevel:0];
}
- (NSString *)descriptionForRecursionLevel:(NSInteger)level {
__block NSString *des = [NSString stringWithFormat:@"Box:%@ offset:%@ size:%@ ",self.typeString,@(self.startIndexInBytes),@(self.length)];
for (int i = 0; i < level; i++) {
des = [NSString stringWithFormat:@"|--%@",des];
}
des = [NSString stringWithFormat:@"\n%@",des];
[self.subBoxes enumerateObjectsUsingBlock:^(QGMP4Box *obj, NSUInteger idx, BOOL * _Nonnull stop) {
des = [des stringByAppendingString:[obj descriptionForRecursionLevel:(level+1)]];
}];
return des;
}
- (NSString *)typeString {
NSUInteger value = self.type;
NSString *des = @"";
while (value > 0) {
NSUInteger hexValue = value&0xff;
value = value>>8;
des = [NSString stringWithFormat:@"%c%@",(int)hexValue,des];
}
return des;
}
@end
#pragma mark -- hvcc box
/**
* QGMP4HvccBox
*/
@implementation QGMP4HvccBox
@end
/**
* QGCttsEntry dtspts
*/
@implementation QGCttsEntry
@end
/**
* QGMP4CttsBox dtspts
*/
@implementation QGMP4CttsBox
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
if (!_compositionOffsets) {
_compositionOffsets = [NSMutableArray new];
}
NSData *cttsData = datablock(self);
const char *bytes = cttsData.bytes;
uint32_t entryCount = READ32BIT(&bytes[12]);
for (int i = 0; i < entryCount; ++i) {
uint32_t sampleCount = READ32BIT(&bytes[16+i*8]);
uint32_t compositionOffset = READ32BIT(&bytes[16+i*8+4]);
for (int j = 0; j < sampleCount; j++) {
[_compositionOffsets addObject:@(compositionOffset)];
}
}
}
@end
#pragma mark -- mdat box
@implementation QGMP4MdatBox
@end
#pragma mark -- avcc box
@implementation QGMP4AvccBox
@end
@implementation QGMP4MvhdBox
@end
/**
video trackstsdsps&pps
stsdatom headerversionentry count
entryentrytypetypesample description
video trackVisualSampleEntryaudio track
AudioSampleEntrySPSPPS
box
*/
@implementation QGMP4StsdBox
@end
/**
There are two variants of the sample size box. The first variant has a fixed size 32-bit field for representing the sample sizes; it permits defining a constant size for all samples in a track. The second variant permits smaller size fields, to save space when the sizes are varying but small. One of these boxes must be present; the first version is preferred for maximum compatibility.
sample
*/
@implementation QGMP4StszBox
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
if (!_sampleSizes) {
_sampleSizes = [NSMutableArray new];
}
NSData *stszData = datablock(self);
const char *bytes = stszData.bytes;
uint32_t sampleSize = READ32BIT(&bytes[12]);
uint32_t sampleCount = READ32BIT(&bytes[16]);
self.sampleCount = sampleCount;
for (int i = 0; i < sampleCount; i ++) {
if (sampleSize > 0) {
[self.sampleSizes addObject:@(sampleSize)];
} else {
uint32_t entryValue = READ32BIT(&bytes[20+i*4]);
[self.sampleSizes addObject:@(entryValue)];
}
}
}
@end
/**
Samples within the media data are grouped into chunks. Chunks can be of different sizes, and the samples within a chunk can have different sizes. This table can be used to find the chunk that contains a sample, its position, and the associated sample description.
stscchunkSample https://img-blog.csdn.net/20140613154636296
*/
@implementation QGStscEntry
@end
/*
stscchunksamplesample
*/
@implementation QGMP4StscBox
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
if (!_entries) {
_entries = [NSMutableArray new];
}
NSData *stscData = datablock(self);
const char *bytes = stscData.bytes;
uint32_t entry_count = READ32BIT(&bytes[12]);
for (int i = 0; i < entry_count; ++i) {
QGStscEntry *entry = [QGStscEntry new];
entry.firstChunk = READ32BIT(&bytes[16+i*12]);
entry.samplesPerChunk = READ32BIT(&bytes[16+i*12+4]);
entry.sampleDescriptionIndex = READ32BIT(&bytes[16+i*12+8]);
[_entries addObject:entry];
}
}
@end
/**
stco chunkstscsample
entry_count is an integer that gives the number of entries in the following table
chunk_offset is a 32 or 64 bit integer that gives the offset of the start of a chunk into its containing
media file.
*/
@implementation QGMP4StcoBox
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
if (!_chunkOffsets) {
_chunkOffsets = [NSMutableArray new];
}
NSData *stcoData = datablock(self);
const char *bytes = stcoData.bytes;
uint32_t entry_count = READ32BIT(&bytes[12]);
self.chunkCount = entry_count;
for (int i = 0; i < entry_count; ++i) {
[self.chunkOffsets addObject:@(READ32BIT(&bytes[16+i*4]))];
}
}
@end
@implementation QGMP4StssBox
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
if (!_syncSamples) {
_syncSamples = [NSMutableArray new];
}
NSData *stssData = datablock(self);
const char *bytes = stssData.bytes;
uint32_t sample_count = READ32BIT(&bytes[12]);
for (int i = 0; i < sample_count; i++) {
NSInteger index = READ32BIT(&bytes[16 + 4 * i]) - 1;
[_syncSamples addObject:[NSNumber numberWithInteger:index]];
}
}
@end
/**
Decoding Time to Sample Box
dts
*/
@implementation QGSttsEntry
@end
/*
sttssampleentryentrysample
entrydeltaatomsample
*/
@implementation QGMP4SttsBox
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
if (!_entries) {
_entries = [NSMutableArray new];
}
NSData *sttsData = datablock(self);
const char *bytes = sttsData.bytes;
uint32_t entry_count = READ32BIT(&bytes[12]);
for (int i = 0; i < entry_count; ++i) {
QGSttsEntry *entry = [QGSttsEntry new];
entry.sampleCount = READ32BIT(&bytes[16+i*8]);;
entry.sampleDelta = READ32BIT(&bytes[16+i*8+4]);
[_entries addObject:entry];
}
}
@end
@implementation QGMP4TrackBox
@end
@implementation QGMP4HdlrBox
- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
NSData *hdlrData = datablock(self);
const char *bytes = hdlrData.bytes;
uint32_t trackType = READ32BIT(&bytes[16]);
self.trackType = trackType;
}
@end
@implementation QGMP4Sample
@end
@implementation QGChunkOffsetEntry
@end
@implementation QGMP4BoxFactory
+ (QGMP4Box *)createBoxForType:(QGMP4BoxType)type startIndex:(unsigned long long)startIndexInBytes length:(unsigned long long)length {
Class boxClass = [self boxClassForType:type] ?: [QGMP4Box class];
QGMP4Box *box = [[boxClass alloc] initWithType:type startIndex:startIndexInBytes length:length];
return box;
}
+ (Class)boxClassForType:(QGMP4BoxType)type {
switch (type) {
case QGMP4BoxType_ftyp:
case QGMP4BoxType_free:
case QGMP4BoxType_moov:
case QGMP4BoxType_mvhd:
case QGMP4BoxType_trak:
case QGMP4BoxType_tkhd:
case QGMP4BoxType_edts:
case QGMP4BoxType_elst:
case QGMP4BoxType_mdia:
case QGMP4BoxType_minf:
case QGMP4BoxType_vmhd:
case QGMP4BoxType_dinf:
case QGMP4BoxType_dref:
case QGMP4BoxType_url:
case QGMP4BoxType_stbl:
case QGMP4BoxType_avc1:
case QGMP4BoxType_udta:
case QGMP4BoxType_meta:
case QGMP4BoxType_ilst:
case QGMP4BoxType_data:
case QGMP4BoxType_iods:
case QGMP4BoxType_wide:
case QGMP4BoxType_loci:
case QGMP4BoxType_smhd:
return [QGMP4Box class];
case QGMP4BoxType_stss:
return [QGMP4StssBox class];
case QGMP4BoxType_mdat:
return [QGMP4MdatBox class];
case QGMP4BoxType_avcC:
return [QGMP4AvccBox class];
case QGMP4BoxType_mdhd:
return [QGMP4MvhdBox class];
case QGMP4BoxType_stsd:
return [QGMP4StsdBox class];
case QGMP4BoxType_stsz:
return [QGMP4StszBox class];
case QGMP4BoxType_hdlr:
return [QGMP4HdlrBox class];
case QGMP4BoxType_stsc:
return [QGMP4StscBox class];
case QGMP4BoxType_stts:
return [QGMP4SttsBox class];
case QGMP4BoxType_stco:
return [QGMP4StcoBox class];
case QGMP4BoxType_hvcC:
return [QGMP4HvccBox class];
case QGMP4BoxType_ctts:
return [QGMP4CttsBox class];
default:
return nil;
}
}
/**
boxClassForType:box
@param type box
@return QGMP4BoxTypeQGMP4BoxType_unknown
*/
+ (BOOL)isTypeValueValid:(QGMP4BoxType)type {
Class class = [self boxClassForType:type];
if (class) {
return YES;
}
return NO;
}
@end

View File

@@ -0,0 +1,65 @@
// QGMP4Parser.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "QGMP4Box.h"
@class QGMP4Parser;
@protocol QGMP4ParserDelegate <NSObject>
@optional
- (void)didParseMP4Box:(QGMP4Box *)box parser:(QGMP4Parser *)parser;
- (void)MP4FileDidFinishParse:(QGMP4Parser *)parser;
@end
@interface QGMP4Parser : NSObject
@property (nonatomic, strong) QGMP4Box *rootBox;
@property (nonatomic, strong) NSFileHandle *fileHandle;
@property (nonatomic, weak) id<QGMP4ParserDelegate> delegate;
- (instancetype)initWithFilePath:(NSString *)filePath;
- (void)parse;
- (NSData *)readDataForBox:(QGMP4Box *)box;
- (NSInteger)readValue:(const char*)bytes length:(NSInteger)length;
@end
@interface QGMP4ParserProxy : NSObject
- (instancetype)initWithFilePath:(NSString *)filePath;
@property (nonatomic, assign) NSInteger picWidth; //视频宽度
@property (nonatomic, assign) NSInteger picHeight; //视频高度
@property (nonatomic, assign) NSInteger fps; //视频fps
@property (nonatomic, assign) double duration; //视频时长
@property (nonatomic, strong) NSData *spsData; //sps
@property (nonatomic, strong) NSData *ppsData; //pps
@property (nonatomic, strong) NSArray *videoSamples; //所有帧数据,包含了位置和大小等信息
@property (nonatomic, strong) NSArray *videoSyncSampleIndexes; // 所有关键帧的index
@property (nonatomic, strong) QGMP4Box *rootBox; //mp4文件根box
@property (nonatomic, strong) QGMP4TrackBox *videoTrackBox; //视频track
@property (nonatomic, strong) QGMP4TrackBox *audioTrackBox; //音频track
/** vps */
@property (nonatomic, strong) NSData *vpsData;
/** 视频流编码器ID类型 */
@property (nonatomic, assign) QGMP4VideoStreamCodecID videoCodecID;
- (void)parse;
- (NSData *)readPacketOfSample:(NSInteger)sampleIndex;
- (NSData *)readDataOfBox:(QGMP4Box *)box length:(NSInteger)length offset:(NSInteger)offset;
@end

View File

@@ -0,0 +1,595 @@
// QGMP4Parser.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGMP4Parser.h"
#import "QGVAPLogger.h"
#pragma mark - mp4 parser
@interface QGMP4Parser() {
QGMp4BoxDataFetcher _boxDataFetcher;
}
@property (nonatomic, strong) NSString *filePath;
@end
@implementation QGMP4Parser
#pragma mark -- life cycle
- (instancetype)initWithFilePath:(NSString *)filePath {
if (self = [super init]) {
_filePath = filePath;
_fileHandle = [NSFileHandle fileHandleForReadingAtPath:_filePath];
__weak __typeof(self) weakSelf = self;
_boxDataFetcher = ^NSData *(QGMP4Box *box) {return [weakSelf readDataForBox:box];};
}
return self;
}
- (void)dealloc {
[_fileHandle closeFile];
}
#pragma mark -- methods
- (void)parse {
if (!_filePath || !_fileHandle) {
return ;
}
unsigned long long fileSize = [_fileHandle seekToEndOfFile];
[_fileHandle seekToFileOffset:0];
_rootBox = [QGMP4BoxFactory createBoxForType:QGMP4BoxType_unknown startIndex:0 length:fileSize];
NSMutableArray *BFSQueue = [NSMutableArray new];
[BFSQueue addObject:_rootBox];
QGMP4Box *calBox = _rootBox;
//+
while ((calBox = [BFSQueue firstObject])) {
[BFSQueue removeObjectAtIndex:0];
if (calBox.length <= 2*(kQGBoxSizeLengthInBytes+kQGBoxTypeLengthInBytes)) {
//
continue ;
}
unsigned long long offset = 0;
unsigned long long length = 0;
QGMP4BoxType type = QGMP4BoxType_unknown;
//box
offset = calBox.superBox ? (calBox.startIndexInBytes + kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes) : 0;
//
if ([self shouldResetOffset:calBox.type]) {
[self calibrateOffset:&offset boxType:calBox.type];
}
//box
do {
//
if ((offset+kQGBoxSizeLengthInBytes+kQGBoxTypeLengthInBytes)>(calBox.startIndexInBytes+calBox.length)) {
break ;
}
if (![self readBoxTypeAndLength:offset type:&type length:&length]) {
break;
}
if ((offset+length)>(calBox.startIndexInBytes+calBox.length)) {
//reach to super box end or not a box
break ;
}
if (![QGMP4BoxFactory isTypeValueValid:type] && (offset == (calBox.startIndexInBytes + kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes))) {
//
break ;
}
QGMP4Box *subBox = [QGMP4BoxFactory createBoxForType:type startIndex:offset length:length];
subBox.superBox = calBox;
if (!calBox.subBoxes) {
calBox.subBoxes = [NSMutableArray new];
}
//box
[calBox.subBoxes addObject:subBox];
//广
[BFSQueue addObject:subBox];
[self didParseBox:subBox];
//box
offset += length;
} while(1);
}
[self didFinisheParseFile];
}
- (BOOL)readBoxTypeAndLength:(uint64_t)offset type:(QGMP4BoxType *)type length:(uint64_t*)length {
[_fileHandle seekToFileOffset:offset];
NSData *data = [_fileHandle readDataOfLength:(kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes)];
if (data.length < kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes) {
VAP_Error(kQGVAPModuleCommon, @"read box length and type error");
return NO;
}
const char *bytes = data.bytes;
*length = [self readValue:bytes length:kQGBoxSizeLengthInBytes];
*type = [self readValue:&bytes[kQGBoxSizeLengthInBytes] length:kQGBoxTypeLengthInBytes];
if (*length == kQGBoxLargeSizeFlagLengthInBytes) {
offset += kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes;
[_fileHandle seekToFileOffset:offset];
data = [_fileHandle readDataOfLength:kQGBoxLargeSizeLengthInBytes];
if (data.length < kQGBoxLargeSizeLengthInBytes) {
VAP_Error(kQGVAPModuleCommon, @"read box length and type error");
return NO;
}
bytes = data.bytes;
*length = [self readValue:bytes length:kQGBoxLargeSizeLengthInBytes];
if (*length == 0) {
VAP_Error(kQGVAPModuleCommon, @"read box length is 0");
return NO;
}
}
return YES;
}
- (BOOL)shouldResetOffset:(QGMP4BoxType)type {
return type == QGMP4BoxType_stsd ||
type == QGMP4BoxType_avc1 ||
type == QGMP4BoxType_hvc1;
}
- (void)calibrateOffset:(uint64_t*)offset boxType:(QGMP4BoxType)type {
switch (type) {
case QGMP4BoxType_stsd:
*offset += 8;
break;
case QGMP4BoxType_avc1:
case QGMP4BoxType_hvc1:
*offset += (24 + 2 + 2 + 14 + 32 + 4);
break;
default:
break;
}
}
- (NSData *)readDataForBox:(QGMP4Box *)box {
if (!box) {
return nil;
}
[_fileHandle seekToFileOffset:box.startIndexInBytes];
return [_fileHandle readDataOfLength:(NSUInteger)box.length];
}
- (NSInteger)readValue:(const char*)bytes length:(NSInteger)length {
NSInteger value = 0;
for (int i = 0; i < length; i++) {
value += (bytes[i]&0xff)<<((length-i-1)*8);
}
VAP_Debug(kQGVAPModuleCommon, @"readValue length:%lld value:%lld", length, value);
return value;
}
#pragma mark -- private methods
- (void)didParseBox:(QGMP4Box *)box {
if ([box respondsToSelector:@selector(boxDidParsed:)]) {
[box boxDidParsed:_boxDataFetcher];
}
if ([self.delegate respondsToSelector:@selector(didParseMP4Box:parser:)]) {
[self.delegate didParseMP4Box:box parser:self];
}
}
- (void)didFinisheParseFile {
if ([self.delegate respondsToSelector:@selector(MP4FileDidFinishParse:)]) {
[self.delegate MP4FileDidFinishParse:self];
}
}
@end
#pragma mark - parser proxy
@interface QGMP4ParserProxy() <QGMP4ParserDelegate> {
QGMP4Parser *_parser;
}
@end
@implementation QGMP4ParserProxy
- (instancetype)initWithFilePath:(NSString *)filePath {
if (self = [super init]) {
_parser = [[QGMP4Parser alloc] initWithFilePath:filePath];
_parser.delegate = self;
}
return self;
}
- (NSInteger)picWidth {
if (_picWidth == 0) {
_picWidth = [self readPicWidth];
}
return _picWidth;
}
- (NSInteger)picHeight {
if (_picHeight == 0) {
_picHeight = [self readPicHeight];
}
return _picHeight;
}
- (NSInteger)fps {
if (_fps == 0) {
if (self.videoSamples.count == 0) {
return 0;
}
_fps = lround(self.videoSamples.count/self.duration);
}
return _fps;
}
- (double)duration {
if (_duration == 0) {
_duration = [self readDuration];
}
return _duration;
}
- (NSArray *)videoSamples {
if (_videoSamples) {
return _videoSamples;
}
NSMutableArray *videoSamples = [NSMutableArray new];
uint64_t tmp = 0;
QGMP4SttsBox *sttsBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stts];
QGMP4StszBox *stszBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsz];
QGMP4StscBox *stscBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsc];
QGMP4StcoBox *stcoBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stco];
QGMP4CttsBox *cttsBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_ctts];
uint32_t stscEntryIndex = 0;
uint32_t stscEntrySampleIndex = 0;
uint32_t stscEntrySampleOffset = 0;
uint32_t sttsEntryIndex = 0;
uint32_t sttsEntrySampleIndex = 0;
uint32_t stcoChunkLogicIndex = 0;
for (int i = 0; i < stszBox.sampleCount; ++i) {
if (stscEntryIndex >= stscBox.entries.count ||
sttsEntryIndex >= sttsBox.entries.count ||
stcoChunkLogicIndex >= stcoBox.chunkOffsets.count) {
break;
}
QGStscEntry *stscEntry = stscBox.entries[stscEntryIndex];
QGSttsEntry *sttsEntry = sttsBox.entries[sttsEntryIndex];
uint32_t sampleOffset = [stcoBox.chunkOffsets[stcoChunkLogicIndex] unsignedIntValue] + stscEntrySampleOffset;
uint32_t ctts = 0;
if (i < cttsBox.compositionOffsets.count) {
ctts = [cttsBox.compositionOffsets[i] unsignedIntValue];
}
QGMP4Sample *sample = [QGMP4Sample new];
sample.codecType = QGMP4CodecTypeVideo;
sample.sampleIndex = i;
sample.chunkIndex = stcoChunkLogicIndex;
sample.sampleDelta = sttsEntry.sampleDelta;
sample.sampleSize = [stszBox.sampleSizes[i] unsignedIntValue];
sample.pts = tmp + ctts;
sample.streamOffset = sampleOffset;
[videoSamples addObject:sample];
stscEntrySampleOffset += sample.sampleSize;
tmp += sample.sampleDelta;
stscEntrySampleIndex++;
if (stscEntrySampleIndex >= stscEntry.samplesPerChunk) {
if (stcoChunkLogicIndex + 1 < stcoBox.chunkOffsets.count) {
stcoChunkLogicIndex++;
}
stscEntrySampleIndex = 0;
stscEntrySampleOffset = 0;
}
sttsEntrySampleIndex++;
if (sttsEntrySampleIndex >= sttsEntry.sampleCount) {
sttsEntrySampleIndex = 0;
if (sttsEntryIndex + 1 < sttsBox.entries.count) {
sttsEntryIndex++;
}
}
if (stscEntryIndex + 1 < stscBox.entries.count) {
if (stcoChunkLogicIndex >= stscBox.entries[stscEntryIndex + 1].firstChunk - 1) {
stscEntryIndex++;
}
}
}
_videoSamples = videoSamples;
return _videoSamples;
}
- (NSArray *)videoSyncSampleIndexes {
QGMP4StssBox *stssBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stss];
return stssBox.syncSamples;
}
/**
mp4
*/
- (void)parse {
[_parser parse];
_rootBox = _parser.rootBox;
//
[self parseVideoDecoderConfigRecord];
}
#pragma mark - Private
- (void)parseVideoDecoderConfigRecord {
if (self.videoCodecID == QGMP4VideoStreamCodecIDH264) {
[self parseAvccDecoderConfigRecord];
} else if (self.videoCodecID == QGMP4VideoStreamCodecIDH265) {
[self parseHvccDecoderConfigRecord];
}
}
- (void)parseAvccDecoderConfigRecord {
self.spsData = [self parseAvccSPSData];
self.ppsData = [self parseAvccPPSData];
}
- (void)parseHvccDecoderConfigRecord {
NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_hvcC]];
if (extraData.length <= 8) {
return;
}
const char *bytes = extraData.bytes;
int index = 30; // 21 + 4 + 4
//int lengthSize = ((bytes[index++] & 0xff) & 0x03) + 1;
int arrayNum = bytes[index++] & 0xff;
// sps pps vps
for (int i = 0; i < arrayNum; i++) {
int value = bytes[index++] & 0xff;
int naluType = value & 0x3F;
// sps pps vps
int naluNum = ((bytes[index] & 0xff) << 8) + (bytes[index + 1] & 0xff);
index += 2;
for (int j = 0; j < naluNum; j++) {
int naluLength = ((bytes[index] & 0xff) << 8) + (bytes[index + 1] & 0xff);
index += 2;
NSData *paramData = [NSData dataWithBytes:&bytes[index] length:naluLength];
if (naluType == 32) {
// vps
self.vpsData = paramData;
} else if (naluType == 33) {
// sps
self.spsData = paramData;
} else if (naluType == 34) {
// pps
self.ppsData = paramData;
}
index += naluLength;
}
}
}
- (NSData *)parseAvccSPSData {
//boxsize(32)+boxtype(32)+prefix(40)+(3)+spsCount(5)+spssize(16)+...+ppscount(8)+ppssize(16)+...
NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_avcC]];
if (extraData.length <= 8) {
return nil;
}
const char *bytes = extraData.bytes;
//sps 使
//NSInteger spsCount = bytes[13]&0x1f;
NSInteger spsLength = ((bytes[14]&0xff)<<8) + (bytes[15]&0xff);
NSInteger naluType = (uint8_t)bytes[16]&0x1F;
if (spsLength + 16 > extraData.length || naluType != 7) {
return nil;
}
NSData *spsData = [NSData dataWithBytes:&bytes[16] length:spsLength];
return spsData;
}
- (NSData *)parseAvccPPSData {
NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_avcC]];
if (extraData.length <= 8) {
return nil;
}
const char *bytes = extraData.bytes;
NSInteger spsCount = bytes[13]&0x1f;
NSInteger spsLength = ((bytes[14]&0xff)<<8) + (bytes[15]&0xff);
NSInteger prefixLength = 16 + spsLength;
while (--spsCount > 0) {
if (prefixLength+2 >= extraData.length) {
return nil;
}
NSInteger nextSpsLength = ((bytes[prefixLength]&0xff)<<8)+bytes[prefixLength+1]&0xff;
prefixLength += nextSpsLength;
}
//1
// NSInteger ppsCount = bytes[prefixLength]&0xff;
if (prefixLength+3 >= extraData.length) {
return nil;
}
NSInteger ppsLength = ((bytes[prefixLength+1]&0xff)<<8)+(bytes[prefixLength+2]&0xff);
NSInteger naluType = (uint8_t)bytes[prefixLength+3]&0x1F;
if (naluType != 8 || (ppsLength+prefixLength+3) > extraData.length) {
return nil;
}
NSData *ppsData = [NSData dataWithBytes:&bytes[prefixLength+3] length:ppsLength];
return ppsData;
}
- (NSInteger)readPicWidth {
if (self.videoCodecID == QGMP4VideoStreamCodecIDUnknown) {
return 0;
}
QGMP4BoxType boxType = self.videoCodecID == QGMP4VideoStreamCodecIDH264 ? QGMP4BoxType_avc1 : QGMP4BoxType_hvc1;
NSInteger sizeIndex = 32;
NSUInteger readLength = 2;
QGMP4Box *avc1 = [self.videoTrackBox subBoxOfType:boxType];
[_parser.fileHandle seekToFileOffset:avc1.startIndexInBytes+sizeIndex];
NSData *widthData = [_parser.fileHandle readDataOfLength:readLength];
if (widthData.length < readLength) {
return 0;
}
const char *bytes = widthData.bytes;
NSInteger width = ((bytes[0]&0xff)<<8)+(bytes[1]&0xff);
return width;
}
- (NSInteger)readPicHeight {
if (self.videoCodecID == QGMP4VideoStreamCodecIDUnknown) {
return 0;
}
QGMP4BoxType boxType = self.videoCodecID == QGMP4VideoStreamCodecIDH264 ? QGMP4BoxType_avc1 : QGMP4BoxType_hvc1;
NSInteger sizeIndex = 34;
NSUInteger readLength = 2;
QGMP4Box *avc1 = [self.videoTrackBox subBoxOfType:boxType];
[_parser.fileHandle seekToFileOffset:avc1.startIndexInBytes+sizeIndex];
NSData *heightData = [_parser.fileHandle readDataOfLength:readLength];
if (heightData.length < readLength) {
return 0;
}
const char *bytes = heightData.bytes;
NSInteger height = ((bytes[0]&0xff)<<8)+(bytes[1]&0xff);
return height;
}
- (double)readDuration {
QGMP4MvhdBox *mdhdBox = [self.rootBox subBoxOfType:QGMP4BoxType_mvhd];
NSData *mvhdData = [_parser readDataForBox:mdhdBox];
const char *bytes = mvhdData.bytes;
NSInteger version = READ32BIT(&bytes[8]);
NSInteger timescaleIndex = 20;
NSInteger timescaleLength = 4;
NSInteger durationIndex = 24;
NSInteger durationLength = 4;
if (version == 1) {
timescaleIndex = 28;
durationIndex = 32;
durationLength = 8;
}
NSInteger scale = [_parser readValue:&bytes[timescaleIndex] length:timescaleLength];
NSInteger duration = [_parser readValue:&bytes[durationIndex] length:durationLength];
if (scale == 0) {
return 0;
}
double result = duration/(double)scale;
return result;
}
- (NSData *)readPacketOfSample:(NSInteger)sampleIndex {
if (sampleIndex >= self.videoSamples.count) {
VAP_Error(kQGVAPModuleCommon, @"readPacketOfSample beyond bounds!:%@ > %@", @(sampleIndex), @(self.videoSamples.count-1));
return nil;
}
QGMP4Sample *videoSample = self.videoSamples[sampleIndex];
NSInteger currentSampleSize = videoSample.sampleSize;
[_parser.fileHandle seekToFileOffset:videoSample.streamOffset];
// sampleIndexsampleIndex < self.videoSamples.count()readDataOfLength0Bytes
NSData *packetData = [_parser.fileHandle readDataOfLength:currentSampleSize];
return packetData;
}
- (NSData *)readDataOfBox:(QGMP4Box *)box length:(NSInteger)length offset:(NSInteger)offset {
if (length <= 0 || offset + length > box.length) {
return nil;
}
[_parser.fileHandle seekToFileOffset:box.startIndexInBytes+offset];
NSData *data = [_parser.fileHandle readDataOfLength:length];
return data;
}
#pragma mark -- delegate
- (void)MP4FileDidFinishParse:(QGMP4Parser *)parser {
}
- (void)didParseMP4Box:(QGMP4Box *)box parser:(QGMP4Parser *)parser {
switch (box.type) {
case QGMP4BoxType_hdlr: {
QGMP4TrackType trackType = ((QGMP4HdlrBox*)box).trackType;
QGMP4TrackBox *trackBox = (QGMP4TrackBox*)[box superBoxOfType:QGMP4BoxType_trak];
switch (trackType) {
case QGMP4TrackType_Video:
self.videoTrackBox = trackBox;
break;
case QGMP4TrackType_Audio:
self.audioTrackBox = trackBox;
break;
default:
break;
}
} break;
case QGMP4BoxType_avc1: {
self.videoCodecID = QGMP4VideoStreamCodecIDH264;
} break;
case QGMP4BoxType_hvc1: {
self.videoCodecID = QGMP4VideoStreamCodecIDH265;
} break;
default:
break;
}
}
@end

View File

@@ -0,0 +1,25 @@
// QGBaseAnimatedImageFrame+Displaying.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseAnimatedImageFrame.h"
@interface QGBaseAnimatedImageFrame (Displaying)
@property (nonatomic, strong) NSDate *startDate; //开始播放的时间
@property (nonatomic, assign) NSTimeInterval decodeTime; //解码时间
- (BOOL)shouldFinishDisplaying; //是否需要结束播放(根据播放时长来决定)
@end

View File

@@ -0,0 +1,34 @@
// QGBaseAnimatedImageFrame+Displaying.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseAnimatedImageFrame+Displaying.h"
#import <objc/runtime.h>
#import "VAPMacros.h"
@implementation QGBaseAnimatedImageFrame (Displaying)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(decodeTime, setDecodeTime, NSTimeInterval)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(startDate, setStartDate, OBJC_ASSOCIATION_RETAIN);
- (BOOL)shouldFinishDisplaying {
if (!self.startDate) {
return YES;
}
NSTimeInterval timeInterval = [[NSDate date] timeIntervalSinceDate:self.startDate];
//VSYNC16ms
return timeInterval*1000 + 10 >= self.duration;
}
@end

View File

@@ -0,0 +1,25 @@
// QGBaseAnimatedImageFrame.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
@interface QGBaseAnimatedImageFrame : NSObject
@property (atomic, assign) NSInteger frameIndex; //当前帧索引
@property (atomic, assign) NSTimeInterval duration; //播放时长
/** pts */
@property (atomic, assign) uint64_t pts;
@end

View File

@@ -0,0 +1,20 @@
// QGBaseAnimatedImageFrame.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseAnimatedImageFrame.h"
@implementation QGBaseAnimatedImageFrame
@end

View File

@@ -0,0 +1,23 @@
// QGBaseDFileInfo.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
@interface QGBaseDFileInfo : NSObject
@property (nonatomic, strong) NSString *filePath; //文件路径
@property (atomic, assign) NSInteger occupiedCount; //作用类似retainCount
@end

View File

@@ -0,0 +1,20 @@
// QGBaseDFileInfo.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseDFileInfo.h"
@implementation QGBaseDFileInfo
@end

View File

@@ -0,0 +1,24 @@
// QGMP4AnimatedImageFrame.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseAnimatedImageFrame.h"
#import <CoreVideo/CoreVideo.h>
@interface QGMP4AnimatedImageFrame : QGBaseAnimatedImageFrame
@property (nonatomic, assign) CVPixelBufferRef pixelBuffer;
@property (nonatomic, assign) int defaultFps;
@end

View File

@@ -0,0 +1,28 @@
// QGMP4AnimatedImageFrame.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGMP4AnimatedImageFrame.h"
@implementation QGMP4AnimatedImageFrame
- (void)dealloc {
//image buffer
if (self.pixelBuffer) {
CVPixelBufferRelease(self.pixelBuffer);
}
}
@end

View File

@@ -0,0 +1,23 @@
// QGMP4HWDFileInfo.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGBaseDFileInfo.h"
#import "QGMP4Parser.h"
@interface QGMP4HWDFileInfo : QGBaseDFileInfo
@property (nonatomic, strong) QGMP4ParserProxy *mp4Parser;
@end

View File

@@ -0,0 +1,20 @@
// QGMP4HWDFileInfo.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGMP4HWDFileInfo.h"
@implementation QGMP4HWDFileInfo
@end

View File

@@ -0,0 +1,112 @@
// QGVAPConfigModel.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <Metal/Metal.h>
typedef NS_ENUM(NSInteger, QGVAPOrientation){
QGVAPOrientation_None = 0, // 兼容
QGVAPOrientation_Portrait = 1, // 竖屏
QGVAPOrientation_landscape = 2, // 横屏
};
typedef NSString * QGAGAttachmentSourceType NS_EXTENSIBLE_STRING_ENUM;//资源类型
typedef NSString * QGAGAttachmentSourceLoadType NS_EXTENSIBLE_STRING_ENUM;//资源加载类型
typedef NSString * QGAGAttachmentSourceStyle NS_EXTENSIBLE_STRING_ENUM;//字体
typedef NSString * QGAGAttachmentFitType NS_EXTENSIBLE_STRING_ENUM;//资源适配类型
//资源适配类型
UIKIT_EXTERN QGAGAttachmentFitType const kQGAGAttachmentFitTypeFitXY; //按指定尺寸缩放
UIKIT_EXTERN QGAGAttachmentFitType const kQGAGAttachmentFitTypeCenterFull; //默认按资源尺寸展示,如果资源尺寸小于遮罩,则等比缩放至可填满
//资源类型
UIKIT_EXTERN QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeTextStr; //文字
UIKIT_EXTERN QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeImgUrl; //图片
UIKIT_EXTERN QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeText;
UIKIT_EXTERN QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeImg;
UIKIT_EXTERN QGAGAttachmentSourceLoadType const QGAGAttachmentSourceLoadTypeLocal;
UIKIT_EXTERN QGAGAttachmentSourceLoadType const QGAGAttachmentSourceLoadTypeNet;
//字体
UIKIT_EXTERN QGAGAttachmentSourceStyle const kQGAGAttachmentSourceStyleBoldText; //粗体
//https://docs.qq.com/sheet/DTGl0bXdidFVkS3pn?tab=7od8yj&c=C25A0I0
@class QGVAPCommonInfo,QGVAPSourceInfo,QGVAPMergedInfo;
@interface QGVAPConfigModel : NSObject
@property (nonatomic, strong) QGVAPCommonInfo *info;
@property (nonatomic, strong) NSArray<QGVAPSourceInfo *> *resources;
@property (nonatomic, strong) NSDictionary<NSNumber *, NSArray<QGVAPMergedInfo*> *> *mergedConfig; ///@{帧,@[多个融合信息]}
@end
#pragma mark - 整体信息
@interface QGVAPCommonInfo : NSObject
@property (nonatomic, assign) NSInteger version;
@property (nonatomic, assign) NSInteger framesCount;
@property (nonatomic, assign) CGSize size;
@property (nonatomic, assign) CGSize videoSize;
@property (nonatomic, assign) QGVAPOrientation targetOrientaion;
@property (nonatomic, assign) NSInteger fps;
@property (nonatomic, assign) BOOL isMerged;
@property (nonatomic, assign) CGRect alphaAreaRect;
@property (nonatomic, assign) CGRect rgbAreaRect;
@end
#pragma mark - 渲染资源信息
@interface QGVAPSourceInfo : NSObject
//原始信息
@property (nonatomic, strong) QGAGAttachmentSourceType type;
@property (nonatomic, strong) QGAGAttachmentSourceLoadType loadType;
@property (nonatomic, strong) NSString *contentTag;
@property (nonatomic, strong) NSString *contentTagValue;
@property (nonatomic, strong) UIColor *color;
@property (nonatomic, strong) QGAGAttachmentSourceStyle style;
@property (nonatomic, assign) CGSize size;
@property (nonatomic, strong) QGAGAttachmentFitType fitType;
//加载内容
@property (nonatomic, strong) UIImage *sourceImage;
@property (nonatomic, strong) id<MTLTexture> texture;
@property (nonatomic, strong) id<MTLBuffer> colorParamsBuffer;
@end
@interface QGVAPSourceDisplayItem : NSObject
@property (nonatomic, assign) CGRect frame;
@property (nonatomic, strong) QGVAPSourceInfo *sourceInfo;
@end
#pragma mark - 融合信息
@interface QGVAPMergedInfo : NSObject
@property (nonatomic, strong) QGVAPSourceInfo *source;
@property (nonatomic, assign) NSInteger renderIndex;
@property (nonatomic, assign) CGRect renderRect;
@property (nonatomic, assign) BOOL needMask;
@property (nonatomic, assign) CGRect maskRect;
@property (nonatomic, assign) NSInteger maskRotation;
//加载内容
- (id<MTLBuffer>)vertexBufferWithContainerSize:(CGSize)size maskContianerSize:(CGSize)mSize device:(id<MTLDevice>)device;
@end

View File

@@ -0,0 +1,108 @@
// QGVAPConfigModel.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPConfigModel.h"
#import "NSDictionary+VAPUtil.h"
#import "QGVAPMetalUtil.h"
#import "QGVAPLogger.h"
#import "UIDevice+VAPUtil.h"
//
QGAGAttachmentFitType const kQGAGAttachmentFitTypeFitXY = @"fitXY"; //
QGAGAttachmentFitType const kQGAGAttachmentFitTypeCenterFull = @"centerFull"; //
//
QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeTextStr = @"textStr"; //
QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeImgUrl = @"imgUrl"; //
QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeText = @"txt"; //
QGAGAttachmentSourceType const kQGAGAttachmentSourceTypeImg = @"img"; //
QGAGAttachmentSourceLoadType const QGAGAttachmentSourceLoadTypeLocal = @"local";
QGAGAttachmentSourceLoadType const QGAGAttachmentSourceLoadTypeNet = @"net";
//
QGAGAttachmentSourceStyle const kQGAGAttachmentSourceStyleBoldText = @"b"; //
@implementation QGVAPConfigModel
- (NSString *)description {
return [NSString stringWithFormat:@"<%@: %p> {info:%@, configs:%@}", self.class, self, _info, _mergedConfig];
}
@end
@implementation QGVAPCommonInfo
- (NSString *)description {
return [NSString stringWithFormat:@"<%@: %p> {version:%@, frames:%@, size:(%@,%@), videoSize:(%@,%@) orien:%@, fps:%@, merged:%@, alpha:(%@,%@,%@,%@), rgb:(%@,%@,%@,%@)}", self.class, self, @(_version), @(_framesCount), @(_size.width), @(_size.height), @(_videoSize.width), @(_videoSize.height), @(_targetOrientaion), @(_fps), @(_isMerged), @(_alphaAreaRect.origin.x), @(_alphaAreaRect.origin.y), @(_alphaAreaRect.size.width), @(_alphaAreaRect.size.height), @(_rgbAreaRect.origin.x), @(_rgbAreaRect.origin.y), @(_rgbAreaRect.size.width), @(_rgbAreaRect.size.height)];
}
@end
@implementation QGVAPSourceInfo
- (NSString *)description {
return [NSString stringWithFormat:@"<%@: %p> {type:%@, tag:%@-%@ color:%@, style:%@, size:(%@,%@), fitType:%@}", self.class, self, _type, _contentTag, _contentTagValue, _color, _style, @(_size.width), @(_size.height), _fitType];
}
@end
@implementation QGVAPSourceDisplayItem
@end
@implementation QGVAPMergedInfo
- (id<MTLBuffer>)vertexBufferWithContainerSize:(CGSize)size maskContianerSize:(CGSize)mSize device:(id<MTLDevice>)device {
if (size.width <= 0 || size.height <= 0 || mSize.width <= 0 || mSize.height <= 0) {
VAP_Error(kQGVAPModuleCommon, @"vertexBufferWithContainerSize size error! :%@ - %@", [NSValue valueWithCGSize:size], [NSValue valueWithCGSize:mSize]);
NSAssert(0, @"vertexBufferWithContainerSize size error!");
return nil;
}
const int colunmCountForVertices = 4, colunmCountForCoordinate = 2, vertexDataLength = 32;
float vertices[16], maskCoordinates[8], sourceCoordinates[8];
genMTLVertices(self.renderRect, size, vertices, NO);
genMTLTextureCoordinates(self.maskRect, mSize, maskCoordinates,YES, self.maskRotation);
if ([self.source.fitType isEqualToString:kQGAGAttachmentFitTypeCenterFull]) {
CGRect sourceRect = vapRectForCenterFull(self.source.size, self.renderRect.size);
CGSize sourceSize = vapSourceSizeForCenterFull(self.source.size, self.renderRect.size);
genMTLTextureCoordinates(sourceRect, sourceSize, sourceCoordinates,NO, 0);
} else {
replaceArrayElements(sourceCoordinates, (void*)kVAPMTLTextureCoordinatesIdentity, 8);
}
static float vertexData[vertexDataLength];
int indexForVertexData = 0;
//++
for (int i = 0; i < 16; i ++) {
vertexData[indexForVertexData++] = ((float*)vertices)[i];
if (i%colunmCountForVertices == colunmCountForVertices-1) {
int row = i/colunmCountForVertices;
vertexData[indexForVertexData++] = ((float*)sourceCoordinates)[row*colunmCountForCoordinate];
vertexData[indexForVertexData++] = ((float*)sourceCoordinates)[row*colunmCountForCoordinate+1];
vertexData[indexForVertexData++] = ((float*)maskCoordinates)[row*colunmCountForCoordinate];
vertexData[indexForVertexData++] = ((float*)maskCoordinates)[row*colunmCountForCoordinate+1];
}
}
NSUInteger allocationSize = vertexDataLength * sizeof(float);
id<MTLBuffer> vertexBuffer = [device newBufferWithBytes:vertexData length:allocationSize options:kDefaultMTLResourceOption];
return vertexBuffer;
}
- (NSString *)description {
return [NSString stringWithFormat:@"<%@: %p> {index:%@, rect:(%@,%@,%@,%@), mask:%@, maskRect:(%@,%@,%@,%@), maskRotation:%@, source:%@}", self.class, self, @(_renderIndex), @(_renderRect.origin.x), @(_renderRect.origin.y), @(_renderRect.size.width), @(_renderRect.size.height), @(_needMask), @(_maskRect.origin.x), @(_maskRect.origin.y), @(_maskRect.size.width), @(_maskRect.size.height), @(_maskRotation), _source];
}
@end

View File

@@ -0,0 +1,40 @@
// QGVAPMaskInfo.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <Metal/Metal.h>
typedef NSUInteger QGVAPMaskValues;
NS_ASSUME_NONNULL_BEGIN
// 如果要更新data、rect、size必须重新创建QGVAPMaskInfo对象
@interface QGVAPMaskInfo : NSObject
/** mask数据 0/1 Byte */
@property (nonatomic, strong) NSData *data;
/** 采样范围 与datasize单位一致 */
@property (nonatomic, assign) CGRect sampleRect;
/** mask 大小 单位pixel */
@property (nonatomic, assign) CGSize dataSize;
/** 模糊范围单位pixel */
@property (nonatomic, assign) NSInteger blurLength;
/** mask纹理 */
@property (nonatomic, strong, readonly) id<MTLTexture> texture;
@end
NS_ASSUME_NONNULL_END

View File

@@ -0,0 +1,31 @@
// QGVAPMaskInfo.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPMaskInfo.h"
#import "QGVAPTextureLoader.h"
#import "QGHWDMetalRenderer.h"
@implementation QGVAPMaskInfo
@synthesize texture = _texture;
- (id<MTLTexture>)texture {
if (!_texture) {
_texture = [QGVAPTextureLoader loadTextureWithData:self.data device:kQGHWDMetalRendererDevice width:self.dataSize.width height:self.dataSize.height];
}
return _texture;
}
@end

View File

@@ -0,0 +1,31 @@
// QGVAPTextureLoader.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import <Metal/Metal.h>
@interface QGVAPTextureLoader : NSObject
+ (id<MTLBuffer>)loadVapColorFillBufferWith:(UIColor *)color device:(id<MTLDevice>)device;
+ (id<MTLTexture>)loadTextureWithImage:(UIImage *)image device:(id<MTLDevice>)device;
+ (id<MTLTexture>)loadTextureWithData:(NSData *)data device:(id<MTLDevice>)device width:(CGFloat)width height:(CGFloat)height;
+ (UIImage *)drawingImageForText:(NSString *)textStr color:(UIColor *)color size:(CGSize)size bold:(BOOL)bold;
+ (UIFont *)getAppropriateFontWith:(NSString *)text rect:(CGRect)fitFrame designedSize:(CGFloat)designedFontSize bold:(BOOL)isBold textSize:(CGSize *)textSize;
@end

View File

@@ -0,0 +1,183 @@
// QGVAPTextureLoader.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPTextureLoader.h"
#import <MetalKit/MetalKit.h>
#import "QGHWDShaderTypes.h"
#import "QGVAPLogger.h"
#import "UIDevice+VAPUtil.h"
@implementation QGVAPTextureLoader
#if TARGET_OS_SIMULATOR//
+ (id<MTLBuffer>)loadVapColorFillBufferWith:(UIColor *)color device:(id<MTLDevice>)device {return nil;}
+ (id<MTLTexture>)loadTextureWithImage:(UIImage *)image device:(id<MTLDevice>)device {return nil;}
+ (UIImage *)drawingImageForText:(NSString *)textStr color:(UIColor *)color size:(CGSize)size bold:(BOOL)bold {return nil;}
+ (UIFont *)getAppropriateFontWith:(NSString *)text rect:(CGRect)fitFrame designedSize:(CGFloat)designedFontSize bold:(BOOL)isBold textSize:(CGSize *)textSize {return nil;}
#else
+ (id<MTLBuffer>)loadVapColorFillBufferWith:(UIColor *)color device:(id<MTLDevice>)device {
CGFloat red = 0.0, green = 0.0, blue = 0.0, alpha = 0.0;
if (color) {
[color getRed:&red green:&green blue:&blue alpha:&alpha];
}
struct VapAttachmentFragmentParameter colorParams[] = {{color != nil ? 0 : 1, {red, green, blue, alpha}}};
NSUInteger colorParamsSize = sizeof(struct VapAttachmentFragmentParameter);
id<MTLBuffer> buffer = [device newBufferWithBytes:colorParams length:colorParamsSize options:kDefaultMTLResourceOption];
return buffer;
}
+ (id<MTLTexture>)loadTextureWithImage:(UIImage *)image device:(id<MTLDevice>)device {
if (!image) {
VAP_Error(kQGVAPModuleCommon, @"attemp to loadTexture with nil image");
return nil;
}
if (@available(iOS 10.0, *)) {
MTKTextureLoader *loader = [[MTKTextureLoader alloc] initWithDevice:device];
NSError *error = nil;
id<MTLTexture> texture = [loader newTextureWithCGImage:image.CGImage options:@{MTKTextureLoaderOptionOrigin : MTKTextureLoaderOriginFlippedVertically,MTKTextureLoaderOptionSRGB:@(NO)} error:&error];
if (!texture || error) {
VAP_Error(kQGVAPModuleCommon, @"loadTexture error:%@", error);
return nil;
}
return texture;
}
return [self cg_loadTextureWithImage:image device:device];
}
+ (UIImage *)drawingImageForText:(NSString *)textStr color:(UIColor *)color size:(CGSize)size bold:(BOOL)bold {
if (textStr.length == 0) {
VAP_Error(kQGVAPModuleCommon, @"draw text resource fail cuz text is nil !!");
return nil;
}
if (!color) {
color = [UIColor blackColor];
}
CGRect rect = CGRectMake(0, 0, size.width/2.0, size.height/2.0);
CGSize textSize = CGSizeZero;
UIFont *font = [QGVAPTextureLoader getAppropriateFontWith:textStr rect:rect designedSize:rect.size.height*0.8 bold:bold textSize:&textSize];
if (!font) {
VAP_Error(kQGVAPModuleCommon, @"draw text resource:%@ fail cuz font is nil !!", textStr);
return nil;
}
NSMutableParagraphStyle *paragraphStyle = [NSMutableParagraphStyle new];
paragraphStyle.alignment = NSTextAlignmentCenter;
paragraphStyle.lineBreakMode = NSLineBreakByTruncatingTail;
NSDictionary *attr = @{NSFontAttributeName:font, NSParagraphStyleAttributeName:paragraphStyle, NSForegroundColorAttributeName:color};
UIGraphicsBeginImageContextWithOptions(rect.size, NO, [UIScreen mainScreen].scale);
rect.origin.y = (rect.size.height - font.lineHeight)/2.0;
[textStr drawWithRect:rect options:NSStringDrawingUsesLineFragmentOrigin attributes:attr context:nil];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if (!image) {
VAP_Error(kQGVAPModuleCommon, @"draw text resource:%@ fail cuz UIGraphics fail.", textStr);
return nil;
}
return image;
}
+ (id<MTLTexture>)cg_loadTextureWithImage:(UIImage *)image device:(id<MTLDevice>)device {
CGImageRef imageRef = image.CGImage;
if (!device || imageRef == nil) {
VAP_Error(kQGVAPModuleCommon, @"load texture fail,cuz device/image is nil-device:%@ imaghe%@", device, imageRef);
return nil;
}
CGFloat width = CGImageGetWidth(imageRef), height = CGImageGetHeight(imageRef);
NSInteger bytesPerPixel = 4, bytesPerRow = bytesPerPixel * width, bitsPerComponent = 8;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
void *rawData = calloc(height * width * bytesPerPixel, sizeof(uint8_t));
if (rawData == nil) {
VAP_Error(kQGVAPModuleCommon, @"load texture fail,cuz alloc mem fail!width:%@ height:%@ bytesPerPixel:%@", @(width), @(height), @(bytesPerPixel));
CGColorSpaceRelease(colorSpace);
colorSpace = NULL;
return nil;
}
CGContextRef context = CGBitmapContextCreate(rawData, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast|kCGImageByteOrder32Big);
CGColorSpaceRelease(colorSpace);
colorSpace = NULL;
if (context == nil) {
VAP_Error(kQGVAPModuleCommon, @"CGBitmapContextCreate error width:%@ height:%@ bitsPerComponent:%@ bytesPerRow:%@", @(width), @(height), @(bitsPerComponent), @(bytesPerRow));
free(rawData);
return nil;
}
CGContextTranslateCTM(context, 0, height);
CGContextScaleCTM(context, 1, -1);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatRGBA8Unorm width:width height:height mipmapped:NO];
id<MTLTexture> texture = [device newTextureWithDescriptor:textureDescriptor];
if (!texture) {
VAP_Error(kQGVAPModuleCommon, @"load texture fail,cuz fail getting texture");
free(rawData);
CGContextRelease(context);
return nil;
}
MTLRegion region = MTLRegionMake3D(0, 0, 0, width, height, 1);
[texture replaceRegion:region mipmapLevel:0 withBytes:rawData bytesPerRow:bytesPerRow];
free(rawData);
CGContextRelease(context);
return texture;
}
+ (id<MTLTexture>)loadTextureWithData:(NSData *)data device:(id<MTLDevice>)device width:(CGFloat)width height:(CGFloat)height {
if (!data) {
VAP_Error(kQGVAPModuleCommon, @"attemp to loadTexture with nil data");
return nil;
}
MTLTextureDescriptor *textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm width:width height:height mipmapped:NO];
id<MTLTexture> texture = [device newTextureWithDescriptor:textureDescriptor];
if (!texture) {
VAP_Error(kQGVAPModuleCommon, @"load texture fail,cuz fail getting texture");
return nil;
}
MTLRegion region = MTLRegionMake3D(0, 0, 0, width, height, 1);
const void *bytes = [data bytes];
[texture replaceRegion:region mipmapLevel:0 withBytes:bytes bytesPerRow:width];
return texture;
}
//
+ (UIFont *)getAppropriateFontWith:(NSString *)text rect:(CGRect)fitFrame designedSize:(CGFloat)designedFontSize bold:(BOOL)isBold textSize:(CGSize *)textSize {
UIFont *designedFont = isBold? [UIFont boldSystemFontOfSize:designedFontSize] : [UIFont systemFontOfSize:designedFontSize];
if (text.length == 0 || CGRectEqualToRect(CGRectZero, fitFrame) || !designedFont) {
*textSize = fitFrame.size;
return designedFont ;
}
CGSize stringSize = [text sizeWithAttributes:@{NSFontAttributeName:designedFont}];
CGFloat fontSize = designedFontSize;
NSInteger remainExcuteCount = 100;
while (stringSize.width > fitFrame.size.width && fontSize > 2.0 && remainExcuteCount > 0) {
fontSize *= 0.9;
remainExcuteCount -= 1;
designedFont = isBold? [UIFont boldSystemFontOfSize:fontSize] : [UIFont systemFontOfSize:fontSize];
stringSize = [text sizeWithAttributes:@{NSFontAttributeName:designedFont}];
}
if (remainExcuteCount < 1 || fontSize < 5.0) {
VAP_Event(kQGVAPModuleCommon, @"data exception content:%@ rect:%@ designedSize:%@ isBold:%@", text, [NSValue valueWithCGRect:fitFrame], @(designedFontSize), @(isBold));
}
*textSize = stringSize;
return designedFont;
}
#endif
@end

View File

@@ -0,0 +1,82 @@
// UIView+VAP.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "UIView+VAP.h"
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSUInteger, QGVAPWrapViewContentMode) {
QGVAPWrapViewContentModeScaleToFill,
QGVAPWrapViewContentModeAspectFit,
QGVAPWrapViewContentModeAspectFill,
};
@protocol VAPWrapViewDelegate <NSObject>
@optional
//即将开始播放时询问true马上开始播放false放弃播放
- (BOOL)vapWrap_viewshouldStartPlayMP4:(VAPView *)container config:(QGVAPConfigModel *)config;
- (void)vapWrap_viewDidStartPlayMP4:(VAPView *)container;
- (void)vapWrap_viewDidPlayMP4AtFrame:(QGMP4AnimatedImageFrame*)frame view:(VAPView *)container;
- (void)vapWrap_viewDidStopPlayMP4:(NSInteger)lastFrameIndex view:(VAPView *)container;
- (void)vapWrap_viewDidFinishPlayMP4:(NSInteger)totalFrameCount view:(VAPView *)container;
- (void)vapWrap_viewDidFailPlayMP4:(NSError *)error;
//vap APIs
- (NSString *)vapWrapview_contentForVapTag:(NSString *)tag resource:(QGVAPSourceInfo *)info; //替换配置中的资源占位符不处理直接返回tag
- (void)vapWrapView_loadVapImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock; //由于组件内不包含网络图片加载的模块,因此需要外部支持图片加载。
@end
/*
封装VAPView本身不响应手势
提供ContentMode功能
播放完成后会自动移除内部的VAPView可选
*/
@interface QGVAPWrapView : UIView
/// default is QGVAPWrapViewContentModeScaleToFill
@property (nonatomic, assign) QGVAPWrapViewContentMode contentMode;
// 是否在播放完成后自动移除内部VAPView, 如果外部用法会复用当前View可以不移除
@property (nonatomic, assign) BOOL autoDestoryAfterFinish;
- (void)playHWDMP4:(NSString *)filePath
repeatCount:(NSInteger)repeatCount
delegate:(id<VAPWrapViewDelegate>)delegate;
- (void)stopHWDMP4;
- (void)pauseHWDMP4;
- (void)resumeHWDMP4;
//设置是否静音播放素材,注:在播放开始时进行设置,播放过程中设置无效
- (void)setMute:(BOOL)isMute;
//增加点击的手势识别, 如果开启了autoDestoryAfterFinish那么手势将在播放完毕后失效
- (void)addVapTapGesture:(VAPGestureEventBlock)handler;
//手势识别通用接口, 如果开启了autoDestoryAfterFinish那么手势将在播放完毕后失效
- (void)addVapGesture:(UIGestureRecognizer *)gestureRecognizer callback:(VAPGestureEventBlock)handler;
/*
QGVAPWrapView本身不响应手势只有子视图响应手势请使用vapWrapView_addVapTapGesture / vapWrapView_addVapGesture添加
*/
- (void)addGestureRecognizer:(UIGestureRecognizer *)gestureRecognizer NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END

View File

@@ -0,0 +1,229 @@
// UIView+VAP.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPWrapView.h"
#import "QGVAPConfigModel.h"
@interface QGVAPWrapView()<VAPWrapViewDelegate, HWDMP4PlayDelegate>
@property (nonatomic, weak) id<VAPWrapViewDelegate> delegate;
@property (nonatomic, strong) VAPView *vapView;
@end
@implementation QGVAPWrapView
- (instancetype)init {
if (self = [super init]) {
[self commonInit];
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
[self commonInit];
}
return self;
}
- (void)commonInit {
_autoDestoryAfterFinish = YES;
}
// VAPView
- (void)initVAPViewIfNeed {
if (!_vapView) {
_vapView = [[VAPView alloc] initWithFrame:self.bounds];
[self addSubview:_vapView];
}
}
- (void)playHWDMP4:(NSString *)filePath
repeatCount:(NSInteger)repeatCount
delegate:(id<VAPWrapViewDelegate>)delegate {
self.delegate = delegate;
[self initVAPViewIfNeed];
[self.vapView playHWDMP4:filePath repeatCount:repeatCount delegate:self];
}
- (void)stopHWDMP4 {
[self.vapView stopHWDMP4];
}
- (void)pauseHWDMP4 {
[self.vapView pauseHWDMP4];
}
- (void)resumeHWDMP4 {
[self.vapView resumeHWDMP4];
}
- (void)setMute:(BOOL)isMute {
[self initVAPViewIfNeed];
[self.vapView setMute:isMute];
}
- (void)addVapGesture:(UIGestureRecognizer *)gestureRecognizer callback:(VAPGestureEventBlock)handler {
[self initVAPViewIfNeed];
[self.vapView addVapGesture:gestureRecognizer callback:handler];
}
- (void)addVapTapGesture:(VAPGestureEventBlock)handler {
[self initVAPViewIfNeed];
[self.vapView addVapTapGesture:handler];
}
#pragma mark - UIView
//
- (UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event {
if (!self.isUserInteractionEnabled || self.isHidden || self.alpha < 0.01) {
return nil;
}
if ([self pointInside:point withEvent:event]) {
for (UIView *subview in [self.subviews reverseObjectEnumerator]) {
CGPoint convertedPoint = [self convertPoint:point toView:subview];
UIView *hitView = [subview hitTest:convertedPoint withEvent:event];
if (hitView) {
return hitView;
}
}
return nil;
}
return nil;
}
#pragma mark - Private
- (void)p_setupContentModeWithConfig:(QGVAPConfigModel *)config {
CGFloat realWidth = 0.;
CGFloat realHeight = 0.;
CGFloat layoutWidth = self.bounds.size.width;
CGFloat layoutHeight = self.bounds.size.height;
CGFloat layoutRatio = self.bounds.size.width / self.bounds.size.height;
CGFloat videoRatio = config.info.size.width / config.info.size.height;
switch (self.contentMode) {
case QGVAPWrapViewContentModeScaleToFill: {
}
break;
case QGVAPWrapViewContentModeAspectFit: {
if (layoutRatio < videoRatio) {
realWidth = layoutWidth;
realHeight = realWidth / videoRatio;
} else {
realHeight = layoutHeight;
realWidth = videoRatio * realHeight;
}
self.vapView.frame = CGRectMake(0, 0, realWidth, realHeight);
self.vapView.center = self.center;
}
break;;
case QGVAPWrapViewContentModeAspectFill: {
if (layoutRatio > videoRatio) {
realWidth = layoutWidth;
realHeight = realWidth / videoRatio;
} else {
realHeight = layoutHeight;
realWidth = videoRatio * realHeight;
}
self.vapView.frame = CGRectMake(0, 0, realWidth, realHeight);
self.vapView.center = self.center;
}
break;;
default:
break;
}
}
#pragma mark - mp4 hwd delegate
#pragma mark --
- (void)viewDidStartPlayMP4:(VAPView *)container {
if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidStartPlayMP4:)]) {
[self.delegate vapWrap_viewDidStartPlayMP4:container];
}
}
- (void)viewDidFinishPlayMP4:(NSInteger)totalFrameCount view:(UIView *)container {
//note:线
if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidFinishPlayMP4:view:)]) {
[self.delegate vapWrap_viewDidFinishPlayMP4:totalFrameCount view:container];
}
}
- (void)viewDidPlayMP4AtFrame:(QGMP4AnimatedImageFrame *)frame view:(UIView *)container {
//note:线
if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidPlayMP4AtFrame:view:)]) {
[self.delegate vapWrap_viewDidPlayMP4AtFrame:frame view:container];
}
}
- (void)viewDidStopPlayMP4:(NSInteger)lastFrameIndex view:(UIView *)container {
//note:线
if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidStopPlayMP4:view:)]) {
[self.delegate vapWrap_viewDidStopPlayMP4:lastFrameIndex view:container];
}
dispatch_async(dispatch_get_main_queue(), ^{
if (self.autoDestoryAfterFinish) {
[self.vapView removeFromSuperview];
self.vapView = nil;
}
});
}
- (BOOL)shouldStartPlayMP4:(VAPView *)container config:(QGVAPConfigModel *)config {
[self p_setupContentModeWithConfig:config];
if ([self.delegate respondsToSelector:@selector(vapWrap_viewshouldStartPlayMP4:config:)]) {
return [self.delegate vapWrap_viewshouldStartPlayMP4:container config:config];
}
return YES;
}
- (void)viewDidFailPlayMP4:(NSError *)error {
if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidFailPlayMP4:)]) {
[self.delegate vapWrap_viewDidFailPlayMP4:error];
}
}
#pragma mark -- vapx
//provide the content for tags, maybe text or url string ...
- (NSString *)contentForVapTag:(NSString *)tag resource:(QGVAPSourceInfo *)info {
if ([self.delegate respondsToSelector:@selector(vapWrapview_contentForVapTag:resource:)]) {
return [self.delegate vapWrapview_contentForVapTag:tag resource:info];
}
return nil;
}
//provide image for url from tag content
- (void)loadVapImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock {
if ([self.delegate respondsToSelector:@selector(vapWrapView_loadVapImageWithURL:context:completion:)]) {
[self.delegate vapWrapView_loadVapImageWithURL:urlStr context:context completion:completionBlock];
}
}
@end

View File

@@ -0,0 +1,27 @@
// QGVAPlayer.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "UIView+VAP.h"
//! Project version number for QGVAPlayer.
FOUNDATION_EXPORT double QGVAPlayerVersionNumber;
//! Project version string for QGVAPlayer.
FOUNDATION_EXPORT const unsigned char QGVAPlayerVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <QGVAPlayer/PublicHeader.h>

View File

@@ -0,0 +1,99 @@
// UIView+VAP.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "VAPMacros.h"
#import "QGVAPLogger.h"
// 退后台时的行为
typedef NS_ENUM(NSUInteger, HWDMP4EBOperationType) {
HWDMP4EBOperationTypeStop, // 退后台时结束VAP播放
HWDMP4EBOperationTypePauseAndResume, // 退后台时暂停、回到前台时自动恢复 需要从关键帧解码到当前帧以解决VTSession失效问题建议低端机型不要设置此选项暂停时间较长、CPU占用较大
HWDMP4EBOperationTypeDoNothing, // VAP自身不进行控制当外部进行控制时可以使用这个仅用于防止覆盖外界的pause调用的问题
};
@class QGMP4AnimatedImageFrame,QGVAPConfigModel, QGVAPSourceInfo;
/** 注意:回调方法会在子线程被执行。*/
@protocol HWDMP4PlayDelegate <NSObject>
@optional
//即将开始播放时询问true马上开始播放false放弃播放
- (BOOL)shouldStartPlayMP4:(VAPView *)container config:(QGVAPConfigModel *)config;
- (void)viewDidStartPlayMP4:(VAPView *)container;
- (void)viewDidPlayMP4AtFrame:(QGMP4AnimatedImageFrame*)frame view:(VAPView *)container;
- (void)viewDidStopPlayMP4:(NSInteger)lastFrameIndex view:(VAPView *)container;
- (void)viewDidFinishPlayMP4:(NSInteger)totalFrameCount view:(VAPView *)container;
- (void)viewDidFailPlayMP4:(NSError *)error;
//vap APIs
- (NSString *)contentForVapTag:(NSString *)tag resource:(QGVAPSourceInfo *)info; //替换配置中的资源占位符不处理直接返回tag
- (void)loadVapImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock; //由于组件内不包含网络图片加载的模块,因此需要外部支持图片加载。
@end
@interface UIView (VAP)
@property (nonatomic, weak) id<HWDMP4PlayDelegate> hwd_Delegate;
@property (nonatomic, readonly) QGMP4AnimatedImageFrame *hwd_currentFrame;
@property (nonatomic, strong) NSString *hwd_MP4FilePath;
@property (nonatomic, assign) NSInteger hwd_fps; //fps for dipslay, each frame's duration would be set by fps value before display.
@property (nonatomic, assign) BOOL hwd_renderByOpenGL; //是否使用opengl渲染默认使用metal
@property (nonatomic, assign) HWDMP4EBOperationType hwd_enterBackgroundOP; // 在退后台时的行为,默认为结束
- (void)playHWDMp4:(NSString *)filePath;
- (void)playHWDMP4:(NSString *)filePath delegate:(id<HWDMP4PlayDelegate>)delegate;
- (void)playHWDMP4:(NSString *)filePath repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate;
- (void)stopHWDMP4;
- (void)pauseHWDMP4;
- (void)resumeHWDMP4;
+ (void)registerHWDLog:(QGVAPLoggerFunc)logger;
//当素材不包含vapc box时只有在播放素材前调用此接口设置enable才可播放素材否则素材无法播放
- (void)enableOldVersion:(BOOL)enable;
//设置是否静音播放素材,注:在播放开始时进行设置,播放过程中设置无效,循环播放则设置后的下一次播放开始生效
- (void)setMute:(BOOL)isMute;
@end
@interface UIView (VAPGesture)
//增加点击的手势识别
- (void)addVapTapGesture:(VAPGestureEventBlock)handler;
//手势识别通用接口
- (void)addVapGesture:(UIGestureRecognizer *)gestureRecognizer callback:(VAPGestureEventBlock)handler;
@end
@interface UIView (VAPMask)
@property (nonatomic, strong) QGVAPMaskInfo *vap_maskInfo;
@end
@interface UIView (MP4HWDDeprecated)
- (void)playHWDMP4:(NSString *)filePath blendMode:(QGHWDTextureBlendMode)mode delegate:(id<HWDMP4PlayDelegate>)delegate __attribute__((deprecated("QGHWDTextureBlendMode is no longer work in vap, use playHWDMP4:delegate: instead")));
- (void)playHWDMP4:(NSString *)filePath blendMode:(QGHWDTextureBlendMode)mode repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate __attribute__((deprecated("QGHWDTextureBlendMode is no longer work in vap, use playHWDMP4:repeatCount:delegate: instead")));
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps delegate:(id<HWDMP4PlayDelegate>)delegate __attribute__((deprecated("customized fps is not recommended, use playHWDMP4:delegate: instead")));
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate __attribute__((deprecated("customized fps is not recommended, use playHWDMP4:repeatCount:delegate: instead")));
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps blendMode:(QGHWDTextureBlendMode)mode delegate:(id<HWDMP4PlayDelegate>)delegate __attribute__((deprecated("customized fps is not recommended, use playHWDMP4:delegate: instead")));
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps blendMode:(QGHWDTextureBlendMode)mode repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate __attribute__((deprecated("customized fps is not recommended, use playHWDMP4:repeatCount:delegate: instead")));
@end

View File

@@ -0,0 +1,783 @@
// UIView+VAP.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import <objc/runtime.h>
#import "UIView+VAP.h"
#import "QGAnimatedImageDecodeManager.h"
#import "QGMP4HWDFileInfo.h"
#import "QGMP4FrameHWDecoder.h"
#import "QGBaseAnimatedImageFrame+Displaying.h"
#import "QGHWDMP4OpenGLView.h"
#import "QGVAPWeakProxy.h"
#import "NSNotificationCenter+VAPThreadSafe.h"
#import "QGHWDMP4OpenGLView.h"
#import "QGMP4FrameHWDecoder.h"
#import "QGMP4AnimatedImageFrame.h"
#import "QGMP4FrameHWDecoder.h"
#import "QGHWDMetalView.h"
#import "QGVAPMetalView.h"
#import "QGBaseAnimatedImageFrame+Displaying.h"
#import "QGVAPConfigManager.h"
#import "QGHWDMetalRenderer.h"
#import "UIGestureRecognizer+VAPUtil.h"
NSInteger const kQGHWDMP4DefaultFPS = 20;
NSInteger const kQGHWDMP4MinFPS = 1;
NSInteger const QGHWDMP4MaxFPS = 60;
NSInteger const VapMaxCompatibleVersion = 2;
@interface UIView () <QGAnimatedImageDecoderDelegate,QGHWDMP4OpenGLViewDelegate, QGHWDMetelViewDelegate, QGVAPMetalViewDelegate, QGVAPConfigDelegate>
@property (nonatomic, assign) QGHWDTextureBlendMode hwd_blendMode; //alpha
@property (nonatomic, strong) QGMP4AnimatedImageFrame *hwd_currentFrameInstance; //store the frame value
@property (nonatomic, strong) QGMP4HWDFileInfo *hwd_fileInfo; //MP4
@property (nonatomic, strong) QGAnimatedImageDecodeManager *hwd_decodeManager; //
@property (nonatomic, strong) QGAnimatedImageDecodeConfig *hwd_decodeConfig; //线buffer
@property (nonatomic, strong) NSOperationQueue *hwd_callbackQueue; //
@property (nonatomic, assign) BOOL hwd_onPause; //
@property (nonatomic, assign) BOOL hwd_onSeek; //seek
@property (nonatomic, strong) QGHWDMP4OpenGLView *hwd_openGLView; //opengl
@property (nonatomic, strong) QGHWDMetalView *hwd_metalView; //metal
@property (nonatomic, strong) QGVAPMetalView *vap_metalView; //vapmp4
@property (nonatomic, assign) BOOL hwd_isFinish; //
@property (nonatomic, assign) NSInteger hwd_repeatCount; //-1
@property (nonatomic, strong) QGVAPConfigManager *hwd_configManager; //
@property (nonatomic, strong) dispatch_queue_t vap_renderQueue; //
@property (nonatomic, assign) BOOL vap_enableOldVersion; //vapc box
@property (nonatomic, assign) BOOL vap_isMute; //
@end
@implementation UIView (VAP)
#pragma mark - private methods
- (void)hwd_registerNotification {
[[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveEnterBackgroundNotification:) name:UIApplicationDidEnterBackgroundNotification object:nil];
[[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveWillEnterForegroundNotification:) name:UIApplicationWillEnterForegroundNotification object:nil];
[[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveSeekStartNotification:) name:kQGVAPDecoderSeekStart object:nil];
[[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveSeekFinishNotification:) name:kQGVAPDecoderSeekFinish object:nil];
}
- (void)hwd_didReceiveEnterBackgroundNotification:(NSNotification *)notification {
switch (self.hwd_enterBackgroundOP) {
case HWDMP4EBOperationTypePauseAndResume:
[self pauseHWDMP4];
break;
case HWDMP4EBOperationTypeDoNothing:
break;
default:
[self stopHWDMP4];
}
}
- (void)hwd_didReceiveWillEnterForegroundNotification:(NSNotification *)notification {
switch (self.hwd_enterBackgroundOP) {
case HWDMP4EBOperationTypePauseAndResume:
[self resumeHWDMP4];
break;
default:
break;
}
}
- (void)hwd_didReceiveSeekStartNotification:(NSNotification *)notification {
if ([self.hwd_decodeManager containsThisDeocder:notification.object]) {
self.hwd_onSeek = YES;
}
}
- (void)hwd_didReceiveSeekFinishNotification:(NSNotification *)notification {
if ([self.hwd_decodeManager containsThisDeocder:notification.object]) {
self.hwd_onSeek = NO;
}
}
//
- (void)hwd_stopHWDMP4 {
VAP_Info(kQGVAPModuleCommon, @"hwd stop playing");
self.hwd_repeatCount = 0;
if (self.hwd_isFinish) {
VAP_Info(kQGVAPModuleCommon, @"isFinish already set");
return ;
}
self.hwd_isFinish = YES;
self.hwd_onPause = YES;
if (self.hwd_openGLView) {
self.hwd_openGLView.pause = YES;
if ([EAGLContext currentContext] != self.hwd_openGLView.glContext) {
[EAGLContext setCurrentContext:self.hwd_openGLView.glContext];
}
[self.hwd_openGLView dispose];
glFinish();
}
if (self.hwd_metalView) {
[self.hwd_metalView dispose];
}
if (self.vap_metalView) {
[self.vap_metalView dispose];
}
[self.hwd_decodeManager tryToStopAudioPlay];
[self.hwd_callbackQueue addOperationWithBlock:^{
//
if ([self.hwd_Delegate respondsToSelector:@selector(viewDidStopPlayMP4:view:)]) {
[self.hwd_Delegate viewDidStopPlayMP4:self.hwd_currentFrame.frameIndex view:self];
}
}];
self.hwd_decodeManager = nil;
self.hwd_decodeConfig = nil;
self.hwd_currentFrameInstance = nil;
self.hwd_fileInfo = nil;
[EAGLContext setCurrentContext:nil];
}
//
- (void)hwd_didFinishDisplay {
VAP_Info(kQGVAPModuleCommon, @"hwd didFinishDisplay");
[self.hwd_callbackQueue addOperationWithBlock:^{
//
if ([self.hwd_Delegate respondsToSelector:@selector(viewDidFinishPlayMP4:view:)]) {
[self.hwd_Delegate viewDidFinishPlayMP4:self.hwd_currentFrame.frameIndex+1 view:self];
}
}];
NSInteger currentCount = self.hwd_repeatCount;
if (currentCount == -1 || currentCount-- > 0) {
//continuing
VAP_Info(kQGVAPModuleCommon, @"continue to display. currentCount:%@", @(currentCount));
[self p_playHWDMP4:self.hwd_fileInfo.filePath
fps:self.hwd_fps
blendMode:self.hwd_blendMode
repeatCount:currentCount
delegate:self.hwd_Delegate];
return ;
}
[self hwd_stopHWDMP4];
}
- (void)hwd_loadMetalViewIfNeed:(QGHWDTextureBlendMode)mode {
if (self.hwd_renderByOpenGL) {
return ;
}
//use vap metal
if (self.useVapMetalView) {
if (self.vap_metalView) {
self.vap_metalView.commonInfo = self.hwd_configManager.model.info;
return ;
}
QGVAPMetalView *vapMetalView = [[QGVAPMetalView alloc] initWithFrame:self.bounds];
vapMetalView.commonInfo = self.hwd_configManager.model.info;
vapMetalView.maskInfo = self.vap_maskInfo;
vapMetalView.delegate = self;
[self addSubview:vapMetalView];
vapMetalView.translatesAutoresizingMaskIntoConstraints = false;
NSDictionary *views = @{@"vapMetalView": vapMetalView};
[self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@"V:|[vapMetalView]|" options:0 metrics:nil views:views]];
[self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@"H:|[vapMetalView]|" options:0 metrics:nil views:views]];
self.vap_metalView = vapMetalView;
[self hwd_registerNotification];
return ;
}
//use hwd metal
if (self.hwd_metalView) {
self.hwd_metalView.blendMode = mode;
return ;
}
QGHWDMetalView *metalView = [[QGHWDMetalView alloc] initWithFrame:self.bounds blendMode:mode];
if (!metalView) {
VAP_Event(kQGVAPModuleCommon, @"metal view is nil!");
return ;
}
metalView.blendMode = mode;
metalView.delegate = self;
[self addSubview:metalView];
metalView.translatesAutoresizingMaskIntoConstraints = false;
NSDictionary *views = @{@"metalView": metalView};
[self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@"V:|[metalView]|" options:0 metrics:nil views:views]];
[self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@"H:|[metalView]|" options:0 metrics:nil views:views]];
self.hwd_metalView = metalView;
[self hwd_registerNotification];
}
- (void)hwd_loadMetalDataIfNeed {
[self.hwd_configManager loadMTLTextures:kQGHWDMetalRendererDevice];//
[self.hwd_configManager loadMTLBuffers:kQGHWDMetalRendererDevice];//buffer
}
- (void)hwd_loadOpenglViewIfNeed:(QGHWDTextureBlendMode)mode {
if (!self.hwd_renderByOpenGL) {
return ;
}
if (self.hwd_openGLView) {
self.hwd_openGLView.blendMode = mode;
self.hwd_openGLView.pause = NO;
VAP_Info(kQGVAPModuleCommon, @"quit loading openglView for already loaded.");
return ;
}
QGHWDMP4OpenGLView *openGLView = [[QGHWDMP4OpenGLView alloc] initWithFrame:self.bounds];
openGLView.displayDelegate = self;
openGLView.blendMode = mode;
[self addSubview:openGLView];
openGLView.userInteractionEnabled = NO;
[openGLView setupGL];
self.hwd_openGLView = openGLView;
NSDictionary *views = @{@"openGLView": openGLView};
[self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@"V:|[openGLView]|" options:0 metrics:nil views:views]];
[self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@"H:|[openGLView]|" options:0 metrics:nil views:views]];
[self hwd_registerNotification];
}
//fps使fps使mp4使18
- (NSTimeInterval)hwd_appropriateDurationForFrame:(QGMP4AnimatedImageFrame *)frame {
NSInteger fps = self.hwd_fps;
if (fps < kQGHWDMP4MinFPS || fps > QGHWDMP4MaxFPS) {
if (frame.defaultFps >= kQGHWDMP4MinFPS && frame.defaultFps <= QGHWDMP4MaxFPS) {
fps = frame.defaultFps;
}else {
fps = kQGHWDMP4DefaultFPS;
}
}
return 1000/(double)fps;
}
#pragma mark - main
/**
playHWDMP4:blendMode:repeatCount:delegate:
alpha
*/
- (void)playHWDMp4:(NSString *)filePath {
[self playHWDMP4:filePath delegate:nil];
}
/**
playHWDMP4:blendMode:repeatCount:delegate:
alpha,
*/
- (void)playHWDMP4:(NSString *)filePath delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:0 blendMode:QGHWDTextureBlendMode_AlphaLeft repeatCount:0 delegate:delegate];
}
/**
playHWDMP4:blendMode:repeatCount:delegate:
alpha
*/
- (void)playHWDMP4:(NSString *)filePath repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:0 blendMode:QGHWDTextureBlendMode_AlphaLeft repeatCount:repeatCount delegate:delegate];
}
- (void)p_playHWDMP4:(NSString *)filePath
fps:(NSInteger)fps
blendMode:(QGHWDTextureBlendMode)mode
repeatCount:(NSInteger)repeatCount
delegate:(id<HWDMP4PlayDelegate>)delegate {
VAP_Info(kQGVAPModuleCommon, @"try to display mp4:%@ blendMode:%@ fps:%@ repeatCount:%@", filePath, @(mode), @(fps), @(repeatCount));
NSAssert([NSThread isMainThread], @"HWDMP4 needs to be accessed on the main thread.");
//filePath check
if (!filePath || filePath.length == 0) {
VAP_Error(kQGVAPModuleCommon, @"playHWDMP4 error! has no filePath!");
return ;
}
NSFileManager *fileMgr = [NSFileManager defaultManager];
if (![fileMgr fileExistsAtPath:filePath]) {
VAP_Error(kQGVAPModuleCommon, @"playHWDMP4 error! fileNotExistsAtPath filePath:%#", filePath);
return ;
}
self.hwd_isFinish = NO;
self.hwd_blendMode = mode;
self.hwd_fps = fps;
self.hwd_repeatCount = repeatCount;
self.hwd_Delegate = delegate;
if (self.hwd_Delegate && !self.hwd_callbackQueue) {
NSOperationQueue *queue = [[NSOperationQueue alloc] init];
queue.maxConcurrentOperationCount = 1;
self.hwd_callbackQueue = queue;
}
//mp4 info
QGMP4HWDFileInfo *fileInfo = [[QGMP4HWDFileInfo alloc] init];
fileInfo.filePath = filePath;
fileInfo.mp4Parser = [[QGMP4ParserProxy alloc] initWithFilePath:fileInfo.filePath];
[fileInfo.mp4Parser parse];
self.hwd_fileInfo = fileInfo;
//config manager
QGVAPConfigManager *configManager = [[QGVAPConfigManager alloc] initWith:fileInfo];
configManager.delegate = self;
self.hwd_configManager = configManager;
if (configManager.model.info.version > VapMaxCompatibleVersion) {
VAP_Error(kQGVAPModuleCommon, @"playHWDMP4 error! not compatible vap version:%@!", @(configManager.model.info.version));
[self stopHWDMP4];
return ;
}
if (!configManager.hasValidConfig && !self.vap_enableOldVersion) {
VAP_Error(kQGVAPModuleCommon, @"playHWDMP4 error! don't has vapc box and enableOldVersion is false!");
[self stopHWDMP4];
return ;
}
//reset
self.hwd_currentFrameInstance = nil;
self.hwd_decodeManager = nil;
self.hwd_onPause = NO;
if (!self.hwd_decodeConfig) {
self.hwd_decodeConfig = [QGAnimatedImageDecodeConfig defaultConfig];
}
//OpenGLView
[self hwd_loadOpenglViewIfNeed:mode];
//metalView
[self hwd_loadMetalViewIfNeed:mode];
if ([[UIDevice currentDevice] hwd_isSimulator]) {
VAP_Error(kQGVAPModuleCommon, @"playHWDMP4 error! not allowed in Simulator!");
[self stopHWDMP4];
return ;
}
if (!self.vap_renderQueue) {
self.vap_renderQueue = dispatch_queue_create("com.qgame.vap.render", DISPATCH_QUEUE_SERIAL);
}
self.hwd_decodeManager = [[QGAnimatedImageDecodeManager alloc] initWith:self.hwd_fileInfo config:self.hwd_decodeConfig delegate:self];
[self.hwd_configManager loadConfigResources]; // - onVAPConfigResourcesLoaded
}
#pragma mark - play run
- (void)hwd_renderVideoRun {
static NSTimeInterval durationForWaitingFrame = 16/1000.0;
static NSTimeInterval minimumDurationForLoop = 1/1000.0;
__block NSTimeInterval lastRenderingInterval = 0;
__block NSTimeInterval lastRenderingDuration = 0;
dispatch_async(self.vap_renderQueue, ^{
if (self.hwd_onPause || self.hwd_isFinish) {
return ;
}
//self.hwd_onPausewhilereleasepool
while (YES) {
@autoreleasepool {
if (self.hwd_isFinish) {
break ;
}
if (self.hwd_onPause || self.hwd_onSeek) {
lastRenderingInterval = NSDate.timeIntervalSinceReferenceDate;
[NSThread sleepForTimeInterval:durationForWaitingFrame];
continue;
}
__block QGMP4AnimatedImageFrame *nextFrame = nil;
dispatch_sync(dispatch_get_main_queue(), ^{
nextFrame = [self hwd_displayNext];
});
NSTimeInterval duration = nextFrame.duration/1000.0;
if (duration == 0) {
duration = durationForWaitingFrame;
}
NSTimeInterval currentTimeInterval = NSDate.timeIntervalSinceReferenceDate;
if (nextFrame && nextFrame.frameIndex != 0) {
duration -= ((currentTimeInterval-lastRenderingInterval) - lastRenderingDuration); //
}
duration = MAX(minimumDurationForLoop, duration);
lastRenderingInterval = currentTimeInterval;
lastRenderingDuration = duration;
[NSThread sleepForTimeInterval:duration];
}
}
});
}
- (QGMP4AnimatedImageFrame *)hwd_displayNext {
if (self.hwd_onPause || self.hwd_isFinish) {
return nil;
}
NSInteger nextIndex = self.hwd_currentFrame.frameIndex + 1;
if (!self.hwd_currentFrame) {
nextIndex = 0;
}
QGMP4AnimatedImageFrame *nextFrame = (QGMP4AnimatedImageFrame *)[self.hwd_decodeManager consumeDecodedFrame:nextIndex];
//
if (!nextFrame || nextFrame.frameIndex != nextIndex || ![nextFrame isKindOfClass:[QGMP4AnimatedImageFrame class]]) {
return nil;
}
//
if (nextIndex == 0) {
[self.hwd_decodeManager tryToStartAudioPlay];
}
nextFrame.duration = [self hwd_appropriateDurationForFrame:nextFrame];
//VAP_Debug(kQGVAPModuleCommon, @"display frame:%@, has frameBuffer:%@",@(nextIndex),@(nextFrame.pixelBuffer != nil));
if (self.hwd_renderByOpenGL) {
[self.hwd_openGLView displayPixelBuffer:nextFrame.pixelBuffer];
} else if (self.useVapMetalView) {
NSArray<QGVAPMergedInfo *> *mergeInfos = self.hwd_configManager.model.mergedConfig[@(nextFrame.frameIndex)];
[self.vap_metalView display:nextFrame.pixelBuffer mergeInfos:mergeInfos];
} else {
[self.hwd_metalView display:nextFrame.pixelBuffer];
}
self.hwd_currentFrameInstance = nextFrame;
[self.hwd_callbackQueue addOperationWithBlock:^{
if (nextIndex == 0 && [self.hwd_Delegate respondsToSelector:@selector(viewDidStartPlayMP4:)]) {
[self.hwd_Delegate viewDidStartPlayMP4:self];
}
//
if ([self.hwd_Delegate respondsToSelector:@selector(viewDidPlayMP4AtFrame:view:)]) {
[self.hwd_Delegate viewDidPlayMP4AtFrame:self.hwd_currentFrame view:self];
}
}];
return nextFrame;
}
//
- (void)stopHWDMP4 {
[self hwd_stopHWDMP4];
}
- (void)pauseHWDMP4 {
VAP_Info(kQGVAPModuleCommon, @"pauseHWDMP4");
self.hwd_onPause = YES;
[self.hwd_decodeManager tryToPauseAudioPlay];
// pausestop使viewresume
// [self.hwd_callbackQueue addOperationWithBlock:^{
// //
// if ([self.hwd_Delegate respondsToSelector:@selector(viewDidStopPlayMP4:view:)]) {
// [self.hwd_Delegate viewDidStopPlayMP4:self.hwd_currentFrame.frameIndex view:self];
// }
// }];
}
- (void)resumeHWDMP4 {
VAP_Info(kQGVAPModuleCommon, @"resumeHWDMP4");
self.hwd_onPause = NO;
self.hwd_openGLView.pause = NO;
// 使
[self.hwd_decodeManager tryToResumeAudioPlay];
}
+ (void)registerHWDLog:(QGVAPLoggerFunc)logger {
[QGVAPLogger registerExternalLog:logger];
}
- (void)enableOldVersion:(BOOL)enable {
self.vap_enableOldVersion = enable;
}
- (void)setMute:(BOOL)isMute {
self.vap_isMute = isMute;
}
#pragma mark - delegate
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation"
//decoder
- (Class)decoderClassForManager:(QGAnimatedImageDecodeManager *)manager {
return [QGMP4FrameHWDecoder class];
}
- (BOOL)shouldSetupAudioPlayer {
return !self.vap_isMute;
}
- (void)decoderDidFinishDecode:(QGBaseDecoder *)decoder {
VAP_Info(kQGVAPModuleCommon, @"decoderDidFinishDecode.");
[self hwd_didFinishDisplay];
}
- (void)decoderDidFailDecode:(QGBaseDecoder *)decoder error:(NSError *)error{
VAP_Error(kQGVAPModuleCommon, @"decoderDidFailDecode:%@", error);
[self hwd_stopHWDMP4];
[self.hwd_callbackQueue addOperationWithBlock:^{
//
if ([self.hwd_Delegate respondsToSelector:@selector(viewDidFailPlayMP4:)]) {
[self.hwd_Delegate viewDidFailPlayMP4:error];
}
}];
}
//opengl
- (void)onViewUnavailableStatus {
VAP_Error(kQGVAPModuleCommon, @"onViewUnavailableStatus");
[self hwd_stopHWDMP4];
}
//metal
- (void)onMetalViewUnavailable {
VAP_Error(kQGVAPModuleCommon, @"onMetalViewUnavailable");
[self stopHWDMP4];
}
//config resources loaded
- (void)onVAPConfigResourcesLoaded:(QGVAPConfigModel *)config error:(NSError *)error {
[self hwd_loadMetalDataIfNeed];
if ([self.hwd_Delegate respondsToSelector:@selector(shouldStartPlayMP4:config:)]) {
BOOL shouldStart = [self.hwd_Delegate shouldStartPlayMP4:self config:self.hwd_configManager.model];
if (!shouldStart) {
VAP_Event(kQGVAPModuleCommon, @"shouldStartPlayMP4 return no!");
[self hwd_stopHWDMP4];
return ;
}
}
[self hwd_renderVideoRun];
}
- (NSString *)vap_contentForTag:(NSString *)tag resource:(QGVAPSourceInfo *)info {
if ([self.hwd_Delegate respondsToSelector:@selector(contentForVapTag:resource:)]) {
return [self.hwd_Delegate contentForVapTag:tag resource:info];
}
return nil;
}
- (void)vap_loadImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock {
if ([self.hwd_Delegate respondsToSelector:@selector(loadVapImageWithURL:context:completion:)]) {
[self.hwd_Delegate loadVapImageWithURL:urlStr context:context completion:completionBlock];
} else if (completionBlock) {
NSError *error = [NSError errorWithDomain:NSURLErrorDomain code:-1 userInfo:@{@"msg" : @"hwd_Delegate doesn't responds to selector loadVapImageWithURL:context:completion:"}];
completionBlock(nil, error, nil);
}
}
#pragma clang diagnostic pop
#pragma mark - setters&getters
- (BOOL)useVapMetalView {
return self.hwd_configManager.hasValidConfig;
}
- (QGMP4AnimatedImageFrame *)hwd_currentFrame {
return self.hwd_currentFrameInstance;
}
- (id<HWDMP4PlayDelegate>)hwd_Delegate {
return objc_getAssociatedObject(self, @"MP4PlayDelegate");
}
- (void)setHwd_Delegate:(id<HWDMP4PlayDelegate>)MP4PlayDelegate {
//weakproxy
id weakDelegate = MP4PlayDelegate;
if (![MP4PlayDelegate isKindOfClass:[QGVAPWeakProxy class]]) {
weakDelegate = [QGVAPWeakProxy proxyWithTarget:MP4PlayDelegate];
}
return objc_setAssociatedObject(self, @"MP4PlayDelegate", weakDelegate, OBJC_ASSOCIATION_RETAIN);
}
//category methods
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_onPause, setHwd_onPause, BOOL)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_onSeek, setHwd_onSeek, BOOL)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_enterBackgroundOP, setHwd_enterBackgroundOP, HWDMP4EBOperationType)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_renderByOpenGL, setHwd_renderByOpenGL, BOOL)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_isFinish, setHwd_isFinish, BOOL)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_fps, setHwd_fps, NSInteger)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_blendMode, setHwd_blendMode, NSInteger)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_repeatCount, setHwd_repeatCount, NSInteger)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_currentFrameInstance, setHwd_currentFrameInstance, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_MP4FilePath, setHwd_MP4FilePath, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_decodeManager, setHwd_decodeManager, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_fileInfo, setHwd_fileInfo, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_decodeConfig, setHwd_decodeConfig, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_callbackQueue, setHwd_callbackQueue, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_openGLView, setHwd_openGLView, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_metalView, setHwd_metalView, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(vap_metalView, setVap_metalView, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_attachmentsModel, setHwd_attachmentsModel, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_configManager, setHwd_configManager, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(vap_renderQueue, setVap_renderQueue, OBJC_ASSOCIATION_RETAIN)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(vap_enableOldVersion, setVap_enableOldVersion, BOOL)
HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(vap_isMute, setVap_isMute, BOOL)
@end
/// vap
@implementation UIView (VAPGesture)
///
/// @param gestureRecognizer
/// @param handler gestureRecognizer
/// @note [mp4View addVapGesture:[UILongPressGestureRecognizer new] callback:^(UIGestureRecognizer *gestureRecognizer, BOOL insideSource,QGVAPSourceDisplayItem *source) { NSLog(@"long press"); }];
- (void)addVapGesture:(UIGestureRecognizer *)gestureRecognizer callback:(VAPGestureEventBlock)handler {
if (!gestureRecognizer) {
VAP_Event(kQGVAPModuleCommon, @"addVapTapGesture with empty gestureRecognizer!");
return ;
}
if (!handler) {
VAP_Event(kQGVAPModuleCommon, @"addVapTapGesture with empty handler!");
return ;
}
__weak __typeof(self) weakSelf = self;
[gestureRecognizer addVapActionBlock:^(UITapGestureRecognizer *sender) {
QGVAPSourceDisplayItem *diplaySource = [weakSelf displayingSourceAt:[sender locationInView:weakSelf]];
if (diplaySource) {
handler(sender, YES, diplaySource);
} else {
handler(sender, NO, nil);
}
}];
[self addGestureRecognizer:gestureRecognizer];
}
///
/// @param handler
- (void)addVapTapGesture:(VAPGestureEventBlock)handler {
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] init];
[self addVapGesture:tapGesture callback:handler];
}
/// pointsourcenil
/// @param point view
- (QGVAPSourceDisplayItem *)displayingSourceAt:(CGPoint)point {
NSArray<QGVAPMergedInfo *> *mergeInfos = self.hwd_configManager.model.mergedConfig[@(self.hwd_currentFrame.frameIndex)];
mergeInfos = [mergeInfos sortedArrayUsingComparator:^NSComparisonResult(QGVAPMergedInfo *obj1, QGVAPMergedInfo *obj2) {
return [@(obj2.renderIndex) compare:@(obj1.renderIndex)];
}];
CGSize renderingPixelSize = self.hwd_configManager.model.info.size;
if (renderingPixelSize.width <= 0 || renderingPixelSize.height <= 0) {
return nil;
}
__block QGVAPMergedInfo *targetMergeInfo = nil;
__block CGRect targetSourceFrame = CGRectZero;
CGSize viewSize = self.frame.size;
CGFloat xRatio = viewSize.width / renderingPixelSize.width;
CGFloat yRatio = viewSize.height / renderingPixelSize.height;
[mergeInfos enumerateObjectsUsingBlock:^(QGVAPMergedInfo * mergeInfo, NSUInteger idx, BOOL * _Nonnull stop) {
CGRect sourceRenderingRect = mergeInfo.renderRect;
CGRect sourceRenderingFrame = CGRectMake(CGRectGetMinX(sourceRenderingRect) * xRatio, CGRectGetMinY(sourceRenderingRect) * yRatio, CGRectGetWidth(sourceRenderingRect) * xRatio, CGRectGetHeight(sourceRenderingRect) * yRatio);
BOOL inside = CGRectContainsPoint(sourceRenderingFrame, point);
if (inside) {
targetMergeInfo = mergeInfo;
targetSourceFrame = sourceRenderingFrame;
*stop = YES;
}
}];
if (!targetMergeInfo) {
return nil;
}
QGVAPSourceDisplayItem *diplayItem = [QGVAPSourceDisplayItem new];
diplayItem.sourceInfo = targetMergeInfo.source;
diplayItem.frame = targetSourceFrame;
return diplayItem;
}
@end
@implementation UIView (VAPMask)
- (void)setVap_maskInfo:(QGVAPMaskInfo *)vap_maskInfo {
objc_setAssociatedObject(self, @"VAPMaskInfo", vap_maskInfo, OBJC_ASSOCIATION_RETAIN);
[self.vap_metalView setMaskInfo:vap_maskInfo];
}
- (QGVAPMaskInfo *)vap_maskInfo {
return objc_getAssociatedObject(self, @"VAPMaskInfo");
}
@end
@implementation UIView (MP4HWDDeprecated)
/**
playHWDMP4:blendMode:repeatCount:delegate:
@param fps 使使mp4使18
alpha,
*/
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:fps blendMode:QGHWDTextureBlendMode_AlphaLeft repeatCount:0 delegate:delegate];
}
/**
playHWDMP4:blendMode:repeatCount:delegate:
@param fps 使使mp4使18
alpha
*/
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:fps blendMode:QGHWDTextureBlendMode_AlphaLeft repeatCount:repeatCount delegate:delegate];
}
/**
playHWDMP4:blendMode:repeatCount:delegate:
*/
- (void)playHWDMP4:(NSString *)filePath blendMode:(QGHWDTextureBlendMode)mode delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:0 blendMode:mode repeatCount:0 delegate:delegate];
}
/**
GPUmp4-h.264
@param filePath mp4s
@param mode alphaQGHWDTextureBlendMode_AlphaLeft
@param repeatCount repeatCount==n, n+1repeatCount==-1.
@param delegate 线
@note
*/
- (void)playHWDMP4:(NSString *)filePath blendMode:(QGHWDTextureBlendMode)mode repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:0 blendMode:mode repeatCount:repeatCount delegate:delegate];
}
/**
playHWDMP4:blendMode:repeatCount:delegate:
@param fps 使使mp4使18
*/
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps blendMode:(QGHWDTextureBlendMode)mode delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:fps blendMode:mode repeatCount:0 delegate:delegate];
}
/**
playHWDMP4:blendMode:repeatCount:delegate:
@param fps 使使mp4使18
*/
- (void)playHWDMP4:(NSString *)filePath fps:(NSInteger)fps blendMode:(QGHWDTextureBlendMode)mode repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate {
[self p_playHWDMP4:filePath fps:fps blendMode:mode repeatCount:repeatCount delegate:delegate];
}
@end

View File

@@ -0,0 +1,22 @@
// NSArray+VAPUtil.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
@interface NSArray (VAPUtil)
- (CGRect)hwd_rectValue;
@end

View File

@@ -0,0 +1,37 @@
// NSArray+VAPUtil.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "NSArray+VAPUtil.h"
@implementation NSArray (VAPUtil)
- (CGRect)hwd_rectValue {
if (self.count < 4) {
return CGRectZero;
}
for (int i = 0; i < self.count; i++) {
id value = self[i];
if (i >= 4) {
break ;
}
if (![value isKindOfClass:[NSString class]] && ![value isKindOfClass:[NSNumber class]]) {
return CGRectZero;
}
}
return CGRectMake([self[0] floatValue], [self[1] floatValue], [self[2] floatValue], [self[3] floatValue]);
}
@end

View File

@@ -0,0 +1,28 @@
// NSDictionary+VAPUtil.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
@interface NSDictionary (VAPUtil)
- (CGFloat)hwd_floatValue:(NSString *)key;
- (NSInteger)hwd_integerValue:(NSString *)key;
- (NSString *)hwd_stringValue:(NSString *)key;
- (NSDictionary *)hwd_dicValue:(NSString *)key;
- (NSArray *)hwd_arrValue:(NSString *)key;
@end

View File

@@ -0,0 +1,71 @@
// NSDictionary+VAPUtil.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "NSDictionary+VAPUtil.h"
#define HWD_RETURN_VALUE(_type_, _default_) \
if (!key) return _default_; \
id value = self[key]; \
if (!value || value == [NSNull null]) return _default_; \
if ([value isKindOfClass:[NSNumber class]]) return ((NSNumber *)value)._type_; \
if ([value isKindOfClass:[NSString class]]) return ((NSString *)value)._type_; \
return _default_;
@implementation NSDictionary (VAPUtil)
- (CGFloat)hwd_floatValue:(NSString *)key {
HWD_RETURN_VALUE(floatValue, 0.0);
}
- (NSInteger)hwd_integerValue:(NSString *)key {
HWD_RETURN_VALUE(integerValue, 0);
}
- (NSString *)hwd_stringValue:(NSString *)key {
NSString *defaultValue = @"";
if (!key) return defaultValue;
id value = self[key];
if (!value || value == [NSNull null]) return defaultValue;
if ([value isKindOfClass:[NSString class]]) return value;
if ([value isKindOfClass:[NSNumber class]]) return ((NSNumber *)value).description;
return defaultValue;
}
- (NSDictionary *)hwd_dicValue:(NSString *)key {
if (!key) {
return nil;
}
id value = self[key];
if (![value isKindOfClass:[NSDictionary class]]) {
return nil;
}
return value;
}
- (NSArray *)hwd_arrValue:(NSString *)key {
if (!key) {
return nil;
}
id value = self[key];
if (![value isKindOfClass:[NSArray class]]) {
return nil;
}
return value;
}
@end

View File

@@ -0,0 +1,54 @@
// NSNotificationCenter+VAPThreadSafe.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
@interface NSNotificationCenter (VAPThreadSafe)
/**
该方法能够保证通知的执行和移除是线程安全的
不需要手动移除通知
(iOS9以下系统方法addObserver:selector:name:object:通常用法是不安全的。
@note 该方法适用于代替原来需要在dealloc内移除通知的场景
@param observer Object registering as an observer. This value must not be nil.
@param aSelector Selector that specifies the message the receiver sends observer to notify it of the notification posting. The method specified by aSelector must have one and only one argument (an instance of NSNotification).
@param aName The name of the notification for which to register the observer; that is, only notifications with this name are delivered to the observer.
If you pass nil, the notification center doesnt use a notifications name to decide whether to deliver it to the observer.
@param anObject The object whose notifications the observer wants to receive; that is, only notifications sent by this sender are delivered to the observer.
If you pass nil, the notification center doesnt use a notifications sender to decide whether to deliver it to the observer.
*/
- (void)hwd_addSafeObserver:(id)observer selector:(SEL)aSelector name:(NSNotificationName)aName object:(id)anObject;
- (void)hwd_addWeakObserver:( id)weakObserver name:(NSNotificationName)aName usingBlock:(void (^)(NSNotification *note,id observer))block;
/**
1.设置接收通知的queue
2.调用- (void)addSafeObserver:(id)observer selector:(SEL)aSelector name:(NSNotificationName)aName object:(id)anObject
@param observer Object registering as an observer. This value must not be nil.
@param aSelector Selector that specifies the message the receiver sends observer to notify it of the notification posting. The method specified by aSelector must have one and only one argument (an instance of NSNotification).
@param aName The name of the notification for which to register the observer; that is, only notifications with this name are delivered to the observer.
If you pass nil, the notification center doesnt use a notifications name to decide whether to deliver it to the observer.
@param anObject The object whose notifications the observer wants to receive; that is, only notifications sent by this sender are delivered to the observer.
If you pass nil, the notification center doesnt use a notifications sender to decide whether to deliver it to the observer.
@param queue The operation queue to which callbackoperation should be added.
If you pass nil, the block is run asynchronously on queue which hold by this notification.
*/
- (void)hwd_addSafeObserver:(id)observer selector:(SEL)aSelector name:(NSNotificationName)aName object:(id)anObject queue:(NSOperationQueue *)queue;
@end

View File

@@ -0,0 +1,89 @@
// NSNotificationCenter+ThreadSafe.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "NSNotificationCenter+VAPThreadSafe.h"
#import "QGVAPSafeMutableDictionary.h"
#import <UIKit/UIKit.h>
#import <objc/runtime.h>
@interface NSObject (SafeNotification)
@property (nonatomic, strong) NSOperationQueue *notificationOperationQueue;
@end
@implementation NSObject (SafeNotification)
- (NSOperationQueue *)notificationOperationQueue {
@synchronized (self) {
NSOperationQueue *queue = objc_getAssociatedObject(self, @"notificationOperationQueue");
if (!queue) {
queue = [[NSOperationQueue alloc] init];
queue.maxConcurrentOperationCount = 1;
self.notificationOperationQueue = queue;
}
return queue;
}
}
- (void)setNotificationOperationQueue:(NSOperationQueue *)notificationOperationQueue {
@synchronized (self) {
objc_setAssociatedObject(self, @"notificationOperationQueue", notificationOperationQueue, OBJC_ASSOCIATION_RETAIN);
}
}
@end
@implementation NSNotificationCenter (VAPThreadSafe)
- (void)hwd_addSafeObserver:(id)observer selector:(SEL)aSelector name:(NSNotificationName)aName object:(id)anObject {
double sysVersion = [[[UIDevice currentDevice] systemVersion] doubleValue];;
if (sysVersion >= 9.0) {
return [self addObserver:observer selector:aSelector name:aName object:anObject];
}
__weak typeof(observer) weakObserver = observer;
__block NSObject *blockObserver = [self addObserverForName:aName object:anObject queue:aName.notificationOperationQueue usingBlock:^(NSNotification * _Nonnull note) {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
__strong __typeof__(weakObserver) strongObserver = weakObserver;
[strongObserver performSelector:aSelector withObject:note];
#pragma clang diagnostic pop
if (!weakObserver) {
[[NSNotificationCenter defaultCenter] removeObserver:blockObserver];
blockObserver = nil;
}
}];
}
- (void)hwd_addWeakObserver:( id)Observer name:(NSNotificationName)aName usingBlock:(void (^)(NSNotification *note,id observer))block{
__weak id weakObserver=Observer;
__block NSObject *blockObserver = [self addObserverForName:aName object:nil queue:nil usingBlock:^(NSNotification * _Nonnull note) {
__strong id strongObserver = weakObserver;
if(!weakObserver ){
[[NSNotificationCenter defaultCenter] removeObserver:blockObserver];
blockObserver = nil;
}else{
block(note,strongObserver);
}
}];
}
- (void)hwd_addSafeObserver:(id)observer selector:(SEL)aSelector name:(NSNotificationName)aName object:(id)anObject queue:(NSOperationQueue *)queue {
aName.notificationOperationQueue = queue;
[self hwd_addSafeObserver:observer selector:aSelector name:aName object:anObject];
}
@end

View File

@@ -0,0 +1,26 @@
// UIColor+VAPUtil.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface UIColor (VAPUtil)
+ (instancetype)hwd_colorWithHexString:(NSString *)hexStr;
@end
NS_ASSUME_NONNULL_END

View File

@@ -0,0 +1,66 @@
// UIColor+VAPUtil.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "UIColor+VAPUtil.h"
@implementation UIColor (VAPUtil)
static inline NSUInteger hwd_hexStrToInt(NSString *str) {
uint32_t result = 0;
sscanf([str UTF8String], "%X", &result);
return result;
}
static BOOL hwd_hexStrToRGBA(NSString *str,
CGFloat *r, CGFloat *g, CGFloat *b, CGFloat *a) {
NSCharacterSet *cset = [NSCharacterSet whitespaceAndNewlineCharacterSet];
str = [[str stringByTrimmingCharactersInSet:cset] uppercaseString];
if ([str hasPrefix:@"#"]) {
str = [str substringFromIndex:1];
} else if ([str hasPrefix:@"0X"]) {
str = [str substringFromIndex:2];
}
NSUInteger length = [str length];
//RGB||RGBA||RRGGBB||RRGGBBAA
if (length != 3 && length != 4 && length != 6 && length != 8) {
return NO;
}
if (length < 5) {
*r = hwd_hexStrToInt([str substringWithRange:NSMakeRange(0, 1)]) / 255.0f;
*g = hwd_hexStrToInt([str substringWithRange:NSMakeRange(1, 1)]) / 255.0f;
*b = hwd_hexStrToInt([str substringWithRange:NSMakeRange(2, 1)]) / 255.0f;
if (length == 4) *a = hwd_hexStrToInt([str substringWithRange:NSMakeRange(3, 1)]) / 255.0f;
else *a = 1;
} else {
*r = hwd_hexStrToInt([str substringWithRange:NSMakeRange(0, 2)]) / 255.0f;
*g = hwd_hexStrToInt([str substringWithRange:NSMakeRange(2, 2)]) / 255.0f;
*b = hwd_hexStrToInt([str substringWithRange:NSMakeRange(4, 2)]) / 255.0f;
if (length == 8) *a = hwd_hexStrToInt([str substringWithRange:NSMakeRange(6, 2)]) / 255.0f;
else *a = 1;
}
return YES;
}
+ (instancetype)hwd_colorWithHexString:(NSString *)hexStr {
CGFloat r, g, b, a;
if (hwd_hexStrToRGBA(hexStr, &r, &g, &b, &a)) {
return [UIColor colorWithRed:r green:g blue:b alpha:a];
}
return nil;
}
@end

View File

@@ -0,0 +1,45 @@
// UIDevice+VAPUtil.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import <Metal/Metal.h>
#ifndef kHWDSystemVersion
#define kHWDSystemVersion [UIDevice systemVersionNum]
#endif
#ifndef kHWDiOS9Later
#define kHWDiOS9Later (kHWDSystemVersion >= 9)
#endif
#define kDefaultMTLResourceOption getDefaultMTLResourceOption()
#ifdef __cplusplus
extern "C" {
#endif
MTLResourceOptions getDefaultMTLResourceOption(void);
#ifdef __cplusplus
}
#endif
NS_ASSUME_NONNULL_BEGIN
@interface UIDevice (VAPUtil)
+ (double)systemVersionNum;
@end
NS_ASSUME_NONNULL_END

View File

@@ -0,0 +1,38 @@
// UIDevice+VAPUtil.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "UIDevice+VAPUtil.h"
MTLResourceOptions getDefaultMTLResourceOption() {
if (@available(iOS 9.0, *)) {
return MTLResourceStorageModeShared;
} else {
return MTLResourceCPUCacheModeDefaultCache;
}
}
@implementation UIDevice (VAPUtil)
+ (double)systemVersionNum {
static double version;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
version = [UIDevice currentDevice].systemVersion.doubleValue;
});
return version;
}
@end

View File

@@ -0,0 +1,26 @@
// UIGestureRecognizer+VAPUtil.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
@interface UIGestureRecognizer (VAPUtil)
- (instancetype)initWithVapActionBlock:(void (^)(id sender))block;
- (void)addVapActionBlock:(void (^)(id sender))block;
- (void)removeAllVapActionBlocks;
@end

View File

@@ -0,0 +1,84 @@
// UIGestureRecognizer+VAPUtil.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "UIGestureRecognizer+VAPUtil.h"
#import <objc/runtime.h>
#import "QGVAPSafeMutableArray.h"
static const int vap_block_key;
@interface _VAPUIGestureRecognizerBlockTarget : NSObject
@property (nonatomic, copy) void (^block)(id sender);
- (id)initWithBlock:(void (^)(id sender))block;
- (void)invoke:(id)sender;
@end
@implementation _VAPUIGestureRecognizerBlockTarget
- (id)initWithBlock:(void (^)(id sender))block{
if (self = [super init]) {
_block = [block copy];
}
return self;
}
- (void)invoke:(id)sender {
if (_block) _block(sender);
}
@end
@implementation UIGestureRecognizer (VAPUtil)
- (instancetype)initWithVapActionBlock:(void (^)(id sender))block {
if (self = [self init]) {
[self addVapActionBlock:block];
}
return self;
}
- (void)addVapActionBlock:(void (^)(id sender))block {
_VAPUIGestureRecognizerBlockTarget *target = [[_VAPUIGestureRecognizerBlockTarget alloc] initWithBlock:block];
[self addTarget:target action:@selector(invoke:)];
NSMutableArray *targets = [self _vap_allUIGestureRecognizerBlockTargets];
[targets addObject:target];
}
- (NSMutableArray *)_vap_allUIGestureRecognizerBlockTargets {
NSMutableArray *targets = objc_getAssociatedObject(self, &vap_block_key);
if (!targets) {
targets = [QGVAPSafeMutableArray new];
objc_setAssociatedObject(self, &vap_block_key, targets, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
return targets;
}
- (void)removeAllVapActionBlocks {
NSMutableArray *targets = [self _vap_allUIGestureRecognizerBlockTargets];
[targets enumerateObjectsUsingBlock:^(id target, NSUInteger idx, BOOL *stop) {
[self removeTarget:target action:@selector(invoke:)];
}];
[targets removeAllObjects];
}
@end

View File

@@ -0,0 +1,25 @@
// UIView+MP4HWDecode.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "UIView+VAP.h"
NS_ASSUME_NONNULL_BEGIN
@interface UIView (MP4HWDecode)
@end
NS_ASSUME_NONNULL_END

View File

@@ -0,0 +1,20 @@
// UIView+MP4HWDecode.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "UIView+MP4HWDecode.h"
@implementation UIView (MP4HWDecode)
@end

View File

@@ -0,0 +1,65 @@
// QGVAPLogger.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#define kQGVAPModuleCommon @"kQGVAPModuleCommon"
NS_ASSUME_NONNULL_BEGIN
#define VAP_Logger(level, module, format, ...) if(external_VAP_Logger)external_VAP_Logger(level, __FILE__, __LINE__, __FUNCTION__, module, format, ##__VA_ARGS__); else internal_VAP_Logger_handler(level, __FILE__, __LINE__, __FUNCTION__, module, format, ##__VA_ARGS__);
#define VAP_Error(module, format, ...) VAP_Logger(VAPLogLevelError, module, format, ##__VA_ARGS__)
#define VAP_Event(module, format, ...) VAP_Logger(VAPLogLevelEvent, module, format, ##__VA_ARGS__)
#define VAP_Warn(module, format, ...) VAP_Logger(VAPLogLevelWarn, module, format, ##__VA_ARGS__)
#define VAP_Info(module, format, ...) VAP_Logger(VAPLogLevelInfo, module, format, ##__VA_ARGS__)
#define VAP_Debug(module, format, ...) VAP_Logger(VAPLogLevelDebug, module, format, ##__VA_ARGS__)
typedef enum {
VAPLogLevelAll = 0,
VAPLogLevelDebug, // Detailed information on the flow through the system.
VAPLogLevelInfo, // Interesting runtime events (startup/shutdown), should be conservative and keep to a minimum.
VAPLogLevelEvent,
VAPLogLevelWarn, // Other runtime situations that are undesirable or unexpected, but not necessarily "wrong".
VAPLogLevelError, // Other runtime errors or unexpected conditions.
VAPLogLevelFatal, // Severe errors that cause premature termination.
VAPLogLevelNone, // Special level used to disable all log messages.
} VAPLogLevel;
typedef VAPLogLevel HWDLogLevel;
//void qg_VAP_Logger(VAPLogLevel level, const char* file, int line, const char* func, NSString *MODULE, NSString *format, ...);
typedef void (*QGVAPLoggerFunc)(VAPLogLevel, const char*, int, const char*, NSString *, NSString *, ...);
#if defined __cplusplus
extern "C" {
#endif
extern QGVAPLoggerFunc external_VAP_Logger;
void internal_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const char* func, NSString *module, NSString *format, ...);
#if defined __cplusplus
};
#endif
@interface QGVAPLogger : NSObject
+ (void)registerExternalLog:(QGVAPLoggerFunc)externalLog;
+ (void)log:(VAPLogLevel)level file:(NSString *)file line:(NSInteger)line func:(NSString *)func module:(NSString *)module message:(NSString *)message;
@end
NS_ASSUME_NONNULL_END

View File

@@ -0,0 +1,53 @@
// QGVAPLogger.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPLogger.h"
QGVAPLoggerFunc external_VAP_Logger;
@implementation QGVAPLogger
#pragma mark - Extenral log
void internal_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const char* func, NSString *module, NSString *format, ...) {
#ifdef DEBUG
va_list arg_list;
va_start (arg_list, format);
NSString *formattedString = [[NSString alloc] initWithFormat:format arguments:arg_list];
va_end(arg_list);
file = [NSString stringWithUTF8String:file].lastPathComponent.UTF8String;
NSLog(@"<%@> %s(%@):%s [%@] - %@",@(level), file, @(line), func, module, formattedString);
#endif
}
+ (void)registerExternalLog:(QGVAPLoggerFunc)externalLog {
external_VAP_Logger = externalLog;
}
+ (void)log:(VAPLogLevel)level file:(NSString *)file line:(NSInteger)line func:(NSString *)func module:(NSString *)module message:(NSString *)message {
if ([message containsString:@"%"]) {
//%formmatcrash
[message stringByReplacingOccurrencesOfString:@"%" withString:@""];
}
if (external_VAP_Logger) {
external_VAP_Logger(level, file.UTF8String, (int)line, func.UTF8String, module, message);
} else {
internal_VAP_Logger_handler(level, file.UTF8String, (int)line, func.UTF8String, module, message);
}
}
@end

View File

@@ -0,0 +1,24 @@
// QGVAPMetalShaderFunctionLoader.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import <MetalKit/MetalKit.h>
@interface QGVAPMetalShaderFunctionLoader : NSObject
- (instancetype)initWithDevice:(id<MTLDevice>)device;
- (id<MTLFunction>)loadFunctionWithName:(NSString *)funcName;
@end

View File

@@ -0,0 +1,92 @@
// QGVAPMetalShaderFunctionLoader.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPMetalShaderFunctionLoader.h"
#import "QGHWDMetalShaderSourceDefine.h"
#import "QGHWDShaderTypes.h"
#import "QGVAPLogger.h"
@interface QGVAPMetalShaderFunctionLoader () {
BOOL _alreadyLoadDefaultLibrary;
BOOL _alreadyLoadHWDLibrary;
}
@property (nonatomic, strong) id<MTLDevice> device;
@property (nonatomic, strong) id<MTLLibrary> defaultLibrary;
@property (nonatomic, strong) id<MTLLibrary> hwdLibrary;
@end
@implementation QGVAPMetalShaderFunctionLoader
- (instancetype)initWithDevice:(id<MTLDevice>)device {
if (self = [super init]) {
_device = device;
}
return self;
}
- (id<MTLFunction>)loadFunctionWithName:(NSString *)funcName {
id<MTLFunction> program = nil;
[self loadDefaultLibraryIfNeed];
program = [self.defaultLibrary newFunctionWithName:funcName];
//defaultLibrary || defaultLibraryfucntion
if (!program) {
[self loadHWDLibraryIfNeed];
program = [self.hwdLibrary newFunctionWithName:funcName];
}
return program;
}
- (void)loadDefaultLibraryIfNeed {
if (self.defaultLibrary || _alreadyLoadDefaultLibrary) {
return ;
}
NSBundle *bundle = [NSBundle bundleForClass:self.class];
NSString *metalLibPath = [bundle pathForResource:@"default" ofType:@"metallib"];
if (metalLibPath.length == 0) {
return ;
}
NSError *error = nil;
id<MTLLibrary> defaultLibrary = [self.device newLibraryWithFile:metalLibPath error:&error];
if (!defaultLibrary || error) {
VAP_Error(kQGVAPModuleCommon, @"loadDefaultLibrary error!:%@", error);
return ;
}
self.defaultLibrary = defaultLibrary;
_alreadyLoadDefaultLibrary = YES;
}
- (void)loadHWDLibraryIfNeed {
if (self.hwdLibrary || _alreadyLoadHWDLibrary) {
return ;
}
NSError *error = nil;
NSString *sourceString = [NSString stringWithFormat:@"%@%@%@", kQGHWDMetalShaderSourceImports, kQGHWDMetalShaderTypeDefines, kQGHWDMetalShaderSourceString];
id<MTLLibrary> hwdLibrary = [self.device newLibraryWithSource:sourceString options:nil error:&error];
if (!hwdLibrary || error) {
VAP_Error(kQGVAPModuleCommon, @"loadHWDLibrary error!:%@", error);
return ;
}
self.hwdLibrary = hwdLibrary;
_alreadyLoadHWDLibrary = YES;
}
@end

View File

@@ -0,0 +1,46 @@
// QGVAPMetalUtil.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
UIKIT_EXTERN NSString *const kHWDAttachmentVertexFunctionName;
UIKIT_EXTERN NSString *const kVAPAttachmentVertexFunctionName;
UIKIT_EXTERN NSString *const kVAPAttachmentFragmentFunctionName;
UIKIT_EXTERN NSString *const kVAPVertexFunctionName;
UIKIT_EXTERN NSString *const kVAPYUVFragmentFunctionName;
UIKIT_EXTERN NSString *const kVAPMaskFragmentFunctionName;
UIKIT_EXTERN NSString *const kVAPMaskBlurFragmentFunctionName;
extern float const kVAPMTLVerticesIdentity[16];
extern float const kVAPMTLTextureCoordinatesIdentity[8];
extern float const kVAPMTLTextureCoordinatesFor90[8];
#ifdef __cplusplus
extern "C" {
#endif
void genMTLVertices(CGRect rect, CGSize containerSize, float vertices[16], BOOL reverse); //生成顶点坐标
void genMTLTextureCoordinates(CGRect rect, CGSize containerSize, float coordinates[8], BOOL reverse, NSInteger degree); //生成纹理坐标
void replaceArrayElements(float arr0[], float arr1[], int size); //arr0[0...(size-1)] <- arr1[0...(size-1)]
CGSize vapSourceSizeForCenterFull(CGSize sourceSize, CGSize renderSize);
CGRect vapRectForCenterFull(CGSize sourceSize, CGSize renderSize);
CGRect vapRectWithContentModeInsideRect(CGRect boundingRect, CGSize aspectRatio, UIViewContentMode contentMode);
#ifdef __cplusplus
}
#endif
@interface QGVAPMetalUtil : NSObject
@end

View File

@@ -0,0 +1,190 @@
// QGVAPMetalUtil.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPMetalUtil.h"
#import <AVFoundation/AVFoundation.h>
#import "QGVAPLogger.h"
#import <UIKit/UIKit.h>
NSString *const kVAPAttachmentVertexFunctionName = @"vapAttachment_vertexShader";
NSString *const kVAPAttachmentFragmentFunctionName = @"vapAttachment_FragmentShader";
NSString *const kVAPVertexFunctionName = @"vap_vertexShader";
NSString *const kVAPYUVFragmentFunctionName = @"vap_yuvFragmentShader";
NSString *const kVAPMaskFragmentFunctionName = @"vap_maskFragmentShader";
NSString *const kVAPMaskBlurFragmentFunctionName = @"vap_maskBlurFragmentShader";
float const kVAPMTLVerticesIdentity[16] = {-1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 0.0, 1.0, 1.0, -1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0};
float const kVAPMTLTextureCoordinatesIdentity[8] = {0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0};
float const kVAPMTLTextureCoordinatesFor90[8] = {0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0};
void replaceArrayElements(float arr0[], float arr1[], int size) {
if ((arr0 == NULL || arr1 == NULL) && size > 0) {
assert(0);
}
if (size < 0) {
assert(0);
}
for (int i = 0; i < size; i++) {
arr0[i] = arr1[i];
}
}
//N
void genMTLVertices(CGRect rect, CGSize containerSize, float vertices[16], BOOL reverse) {
if (vertices == NULL) {
VAP_Error(kQGVAPModuleCommon, @"generateMTLVertices params illegal.");
assert(0);
return ;
}
if (containerSize.width <= 0 || containerSize.height <= 0) {
VAP_Error(kQGVAPModuleCommon, @"generateMTLVertices params containerSize illegal.");
assert(0);
return ;
}
float originX, originY, width, height;
originX = -1+2*rect.origin.x/containerSize.width;
originY = 1-2*rect.origin.y/containerSize.height;
width = 2*rect.size.width/containerSize.width;
height = 2*rect.size.height/containerSize.height;
if (reverse) {
float tempVertices[] = {originX, originY-height, 0.0, 1.0, originX, originY, 0.0, 1.0, originX+width, originY-height, 0.0, 1.0, originX+width, originY, 0.0, 1.0};
replaceArrayElements(vertices, tempVertices, 16);
return ;
}
float tempVertices[] = {originX, originY, 0.0, 1.0, originX, originY-height, 0.0, 1.0, originX+width, originY, 0.0, 1.0 , originX+width, originY-height, 0.0, 1.0};
replaceArrayElements(vertices, tempVertices, 16);
}
//N
void genMTLTextureCoordinates(CGRect rect, CGSize containerSize, float coordinates[8], BOOL reverse, NSInteger degree) {
//degree
if (coordinates == NULL) {
VAP_Error(kQGVAPModuleCommon, @"generateMTLTextureCoordinates params coordinates illegal.");
assert(0);
return ;
}
if (containerSize.width <= 0 || containerSize.height <= 0) {
VAP_Error(kQGVAPModuleCommon, @"generateMTLTextureCoordinates params containerSize illegal.");
assert(0);
return ;
}
float originX, originY, width, height;
originX = rect.origin.x/containerSize.width;
originY = rect.origin.y/containerSize.height;
width = rect.size.width/containerSize.width;
height = rect.size.height/containerSize.height;
if (reverse) {
float tempCoordintes[] = {originX, originY, originX, originY+height , originX+width, originY,originX+width, originY+height};
replaceArrayElements(coordinates, tempCoordintes, 8);
return ;
}
float tempCoordintes[] = {originX, originY+height, originX, originY, originX+width, originY+height, originX+width, originY};
replaceArrayElements(coordinates, tempCoordintes, 8);
}
CGSize vapSourceSizeForCenterFull(CGSize sourceSize, CGSize renderSize) {
//sourcerender
if (sourceSize.width >= renderSize.width && sourceSize.height >= renderSize.height) {
return sourceSize;
}
CGRect rectForAspectFill = vapRectWithContentModeInsideRect(CGRectMake(0, 0, renderSize.width, renderSize.height), sourceSize, UIViewContentModeScaleAspectFill);
return rectForAspectFill.size;
}
CGRect vapRectForCenterFull(CGSize sourceSize, CGSize renderSize) {
//sourcerender
if (sourceSize.width >= renderSize.width && sourceSize.height >= renderSize.height) {
return CGRectMake((sourceSize.width-renderSize.width)/2.0, (sourceSize.height-renderSize.height)/2.0, renderSize.width, renderSize.height);
}
CGRect rectForAspectFill = vapRectWithContentModeInsideRect(CGRectMake(0, 0, renderSize.width, renderSize.height), sourceSize, UIViewContentModeScaleAspectFill);
CGRect intersection = CGRectMake(-rectForAspectFill.origin.x, -rectForAspectFill.origin.y, renderSize.width, renderSize.height);
return intersection;
}
CGRect vapRectWithContentModeInsideRect(CGRect boundingRect, CGSize aspectRatio, UIViewContentMode contentMode) {
if (aspectRatio.width <= 0 || aspectRatio.height <= 0) {
return boundingRect;
}
CGRect desRect = CGRectZero;
switch (contentMode) {
case UIViewContentModeScaleToFill: {
desRect = boundingRect;
}
break;
case UIViewContentModeScaleAspectFit: {
desRect = AVMakeRectWithAspectRatioInsideRect(aspectRatio, boundingRect);
}
break;
case UIViewContentModeScaleAspectFill: {
CGFloat ratio = MAX(CGRectGetWidth(boundingRect)/aspectRatio.width, CGRectGetHeight(boundingRect)/aspectRatio.height);
CGSize contentSize = CGSizeMake(aspectRatio.width*ratio, aspectRatio.height*ratio);
desRect = CGRectMake(boundingRect.origin.x+(CGRectGetWidth(boundingRect)-contentSize.width)/2.0, boundingRect.origin.y+(CGRectGetHeight(boundingRect)-contentSize.height)/2.0, contentSize.width, contentSize.height);
}
break;
case UIViewContentModeCenter: {
desRect = CGRectMake(boundingRect.origin.x+(CGRectGetWidth(boundingRect)-aspectRatio.width)/2.0, boundingRect.origin.y+(CGRectGetHeight(boundingRect)-aspectRatio.height)/2.0, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeTop: {
desRect = CGRectMake(boundingRect.origin.x+(CGRectGetWidth(boundingRect)-aspectRatio.width)/2.0, boundingRect.origin.y, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeBottom: {
desRect = CGRectMake(boundingRect.origin.x+(CGRectGetWidth(boundingRect)-aspectRatio.width)/2.0, boundingRect.origin.y+CGRectGetHeight(boundingRect)-aspectRatio.height, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeLeft: {
desRect = CGRectMake(boundingRect.origin.x, boundingRect.origin.y+(CGRectGetHeight(boundingRect)-aspectRatio.height)/2.0, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeRight: {
desRect = CGRectMake(boundingRect.origin.x+CGRectGetWidth(boundingRect)-aspectRatio.width, boundingRect.origin.y+(CGRectGetHeight(boundingRect)-aspectRatio.height)/2.0, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeTopLeft: {
desRect = CGRectMake(boundingRect.origin.x, boundingRect.origin.y, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeTopRight: {
desRect = CGRectMake(boundingRect.origin.x+CGRectGetWidth(boundingRect)-aspectRatio.width, boundingRect.origin.y, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeBottomLeft: {
desRect = CGRectMake(boundingRect.origin.x, boundingRect.origin.y+CGRectGetHeight(boundingRect)-aspectRatio.height, aspectRatio.width, aspectRatio.height);
}
break;
case UIViewContentModeBottomRight: {
desRect = CGRectMake(boundingRect.origin.x+CGRectGetWidth(boundingRect)-aspectRatio.width, boundingRect.origin.y+CGRectGetHeight(boundingRect)-aspectRatio.height, aspectRatio.width, aspectRatio.height);
}
break;
default:
break;
}
return desRect;
}
@implementation QGVAPMetalUtil
@end

View File

@@ -0,0 +1,28 @@
// QGVAPSafeMutableArray.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
/**
This class inherits from NSMutableArray,make it tread safe and allow Recursive lock.
@discussion access performance would lower than NSMutableArray, or using semaphore but equal to @sychronized.
@warning Fast enumerate and enumerator are not thread safe
*/
@interface QGVAPSafeMutableArray : NSMutableArray
@end

View File

@@ -0,0 +1,384 @@
// QGVAPSafeMutableArray.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPSafeMutableArray.h"
#define VAP_INIT(...) self = super.init; \
if (!self) return nil; \
__VA_ARGS__; \
if (!_arr) return nil; \
_lock = [[NSRecursiveLock alloc] init]; \
return self;
#define VAP_LOCK(...) [_lock lock]; \
__VA_ARGS__; \
[_lock unlock];
@interface QGVAPSafeMutableArray (){
NSMutableArray *_arr;
NSRecursiveLock *_lock;
}
@end
@implementation QGVAPSafeMutableArray
#pragma mark - init
-(instancetype)init{
VAP_INIT(_arr = [NSMutableArray new]);
}
-(instancetype)initWithCapacity:(NSUInteger)numItems{
VAP_INIT(_arr = [[NSMutableArray alloc] initWithCapacity:numItems]);
}
-(instancetype)initWithArray:(NSArray *)array{
VAP_INIT(_arr = [[NSMutableArray alloc] initWithArray:array]);
}
-(instancetype)initWithObjects:(id _Nonnull const [])objects count:(NSUInteger)cnt{
VAP_INIT(_arr = [[NSMutableArray alloc] initWithObjects:objects count:cnt]);
}
- (instancetype)initWithContentsOfFile:(NSString *)path {
VAP_INIT(_arr = [[NSMutableArray alloc] initWithContentsOfFile:path]);
}
- (instancetype)initWithContentsOfURL:(NSURL *)url {
VAP_INIT(_arr = [[NSMutableArray alloc] initWithContentsOfURL:url]);
}
#pragma mark - methods
- (NSUInteger)count {
VAP_LOCK(NSUInteger count = _arr.count); return count;
}
- (id)objectAtIndex:(NSUInteger)index {
VAP_LOCK(id obj = [_arr objectAtIndex:index]); return obj;
}
- (NSArray *)arrayByAddingObject:(id)anObject {
VAP_LOCK(NSArray * arr = [_arr arrayByAddingObject:anObject]); return arr;
}
- (NSArray *)arrayByAddingObjectsFromArray:(NSArray *)otherArray {
VAP_LOCK(NSArray * arr = [_arr arrayByAddingObjectsFromArray:otherArray]); return arr;
}
- (NSString *)componentsJoinedByString:(NSString *)separator {
VAP_LOCK(NSString * str = [_arr componentsJoinedByString:separator]); return str;
}
- (BOOL)containsObject:(id)anObject {
VAP_LOCK(BOOL c = [_arr containsObject:anObject]); return c;
}
- (NSString *)description {
VAP_LOCK(NSString * d = _arr.description); return d;
}
- (NSString *)descriptionWithLocale:(id)locale {
VAP_LOCK(NSString * d = [_arr descriptionWithLocale:locale]); return d;
}
- (NSString *)descriptionWithLocale:(id)locale indent:(NSUInteger)level {
VAP_LOCK(NSString * d = [_arr descriptionWithLocale:locale indent:level]); return d;
}
- (id)firstObjectCommonWithArray:(NSArray *)otherArray {
VAP_LOCK(id o = [_arr firstObjectCommonWithArray:otherArray]); return o;
}
- (void)getObjects:(id __unsafe_unretained[])objects range:(NSRange)range {
VAP_LOCK([_arr getObjects:objects range:range]);
}
- (NSUInteger)indexOfObject:(id)anObject {
VAP_LOCK(NSUInteger i = [_arr indexOfObject:anObject]); return i;
}
- (NSUInteger)indexOfObject:(id)anObject inRange:(NSRange)range {
VAP_LOCK(NSUInteger i = [_arr indexOfObject:anObject inRange:range]); return i;
}
- (NSUInteger)indexOfObjectIdenticalTo:(id)anObject {
VAP_LOCK(NSUInteger i = [_arr indexOfObjectIdenticalTo:anObject]); return i;
}
- (NSUInteger)indexOfObjectIdenticalTo:(id)anObject inRange:(NSRange)range {
VAP_LOCK(NSUInteger i = [_arr indexOfObjectIdenticalTo:anObject inRange:range]); return i;
}
- (id)firstObject {
VAP_LOCK(id o = _arr.firstObject); return o;
}
- (id)lastObject {
VAP_LOCK(id o = _arr.lastObject); return o;
}
- (NSEnumerator *)objectEnumerator {
VAP_LOCK(NSEnumerator * e = [_arr objectEnumerator]); return e;
}
- (NSEnumerator *)reverseObjectEnumerator {
VAP_LOCK(NSEnumerator * e = [_arr reverseObjectEnumerator]); return e;
}
- (NSData *)sortedArrayHint {
VAP_LOCK(NSData * d = [_arr sortedArrayHint]); return d;
}
- (NSArray *)sortedArrayUsingFunction:(__attribute__((noescape)) NSInteger (*)(id, id, void *))comparator context:(void *)context {
VAP_LOCK(NSArray * arr = [_arr sortedArrayUsingFunction:comparator context:context]) return arr;
}
- (NSArray *)sortedArrayUsingFunction:(__attribute__((noescape)) NSInteger (*)(id, id, void *))comparator context:(void *)context hint:(NSData *)hint {
VAP_LOCK(NSArray * arr = [_arr sortedArrayUsingFunction:comparator context:context hint:hint]); return arr;
}
- (NSArray *)sortedArrayUsingSelector:(SEL)comparator {
VAP_LOCK(NSArray * arr = [_arr sortedArrayUsingSelector:comparator]); return arr;
}
- (NSArray *)subarrayWithRange:(NSRange)range {
VAP_LOCK(NSArray * arr = [_arr subarrayWithRange:range]) return arr;
}
- (void)makeObjectsPerformSelector:(SEL)aSelector {
VAP_LOCK([_arr makeObjectsPerformSelector:aSelector]);
}
- (void)makeObjectsPerformSelector:(SEL)aSelector withObject:(id)argument {
VAP_LOCK([_arr makeObjectsPerformSelector:aSelector withObject:argument]);
}
- (NSArray *)objectsAtIndexes:(NSIndexSet *)indexes {
VAP_LOCK(NSArray * arr = [_arr objectsAtIndexes:indexes]); return arr;
}
- (id)objectAtIndexedSubscript:(NSUInteger)idx {
VAP_LOCK(id o = [_arr objectAtIndexedSubscript:idx]); return o;
}
- (void)enumerateObjectsUsingBlock:(__attribute__((noescape)) void (^)(id obj, NSUInteger idx, BOOL *stop))block {
VAP_LOCK([_arr enumerateObjectsUsingBlock:block]);
}
- (void)enumerateObjectsWithOptions:(NSEnumerationOptions)opts usingBlock:(__attribute__((noescape)) void (^)(id obj, NSUInteger idx, BOOL *stop))block {
VAP_LOCK([_arr enumerateObjectsWithOptions:opts usingBlock:block]);
}
- (void)enumerateObjectsAtIndexes:(NSIndexSet *)s options:(NSEnumerationOptions)opts usingBlock:(__attribute__((noescape)) void (^)(id obj, NSUInteger idx, BOOL *stop))block {
VAP_LOCK([_arr enumerateObjectsAtIndexes:s options:opts usingBlock:block]);
}
- (NSUInteger)indexOfObjectPassingTest:(__attribute__((noescape)) BOOL (^)(id obj, NSUInteger idx, BOOL *stop))predicate {
VAP_LOCK(NSUInteger i = [_arr indexOfObjectPassingTest:predicate]); return i;
}
- (NSUInteger)indexOfObjectWithOptions:(NSEnumerationOptions)opts passingTest:(__attribute__((noescape)) BOOL (^)(id obj, NSUInteger idx, BOOL *stop))predicate {
VAP_LOCK(NSUInteger i = [_arr indexOfObjectWithOptions:opts passingTest:predicate]); return i;
}
- (NSUInteger)indexOfObjectAtIndexes:(NSIndexSet *)s options:(NSEnumerationOptions)opts passingTest:(__attribute__((noescape)) BOOL (^)(id obj, NSUInteger idx, BOOL *stop))predicate {
VAP_LOCK(NSUInteger i = [_arr indexOfObjectAtIndexes:s options:opts passingTest:predicate]); return i;
}
- (NSIndexSet *)indexesOfObjectsPassingTest:(__attribute__((noescape)) BOOL (^)(id obj, NSUInteger idx, BOOL *stop))predicate {
VAP_LOCK(NSIndexSet * i = [_arr indexesOfObjectsPassingTest:predicate]); return i;
}
- (NSIndexSet *)indexesOfObjectsWithOptions:(NSEnumerationOptions)opts passingTest:(__attribute__((noescape)) BOOL (^)(id obj, NSUInteger idx, BOOL *stop))predicate {
VAP_LOCK(NSIndexSet * i = [_arr indexesOfObjectsWithOptions:opts passingTest:predicate]); return i;
}
- (NSIndexSet *)indexesOfObjectsAtIndexes:(NSIndexSet *)s options:(NSEnumerationOptions)opts passingTest:(__attribute__((noescape)) BOOL (^)(id obj, NSUInteger idx, BOOL *stop))predicate {
VAP_LOCK(NSIndexSet * i = [_arr indexesOfObjectsAtIndexes:s options:opts passingTest:predicate]); return i;
}
- (NSArray *)sortedArrayUsingComparator:(__attribute__((noescape)) NSComparator)cmptr {
VAP_LOCK(NSArray * a = [_arr sortedArrayUsingComparator:cmptr]); return a;
}
- (NSArray *)sortedArrayWithOptions:(NSSortOptions)opts usingComparator:(__attribute__((noescape)) NSComparator)cmptr {
VAP_LOCK(NSArray * a = [_arr sortedArrayWithOptions:opts usingComparator:cmptr]); return a;
}
- (NSUInteger)indexOfObject:(id)obj inSortedRange:(NSRange)r options:(NSBinarySearchingOptions)opts usingComparator:(__attribute__((noescape)) NSComparator)cmp {
VAP_LOCK(NSUInteger i = [_arr indexOfObject:obj inSortedRange:r options:opts usingComparator:cmp]); return i;
}
#pragma mark - mutable
- (void)addObject:(id)anObject {
VAP_LOCK([_arr addObject:anObject]);
}
- (void)insertObject:(id)anObject atIndex:(NSUInteger)index {
VAP_LOCK([_arr insertObject:anObject atIndex:index]);
}
- (void)removeLastObject {
VAP_LOCK([_arr removeLastObject]);
}
- (void)removeObjectAtIndex:(NSUInteger)index {
VAP_LOCK([_arr removeObjectAtIndex:index]);
}
- (void)replaceObjectAtIndex:(NSUInteger)index withObject:(id)anObject {
VAP_LOCK([_arr replaceObjectAtIndex:index withObject:anObject]);
}
- (void)addObjectsFromArray:(NSArray *)otherArray {
VAP_LOCK([_arr addObjectsFromArray:otherArray]);
}
- (void)exchangeObjectAtIndex:(NSUInteger)idx1 withObjectAtIndex:(NSUInteger)idx2 {
VAP_LOCK([_arr exchangeObjectAtIndex:idx1 withObjectAtIndex:idx2]);
}
- (void)removeAllObjects {
VAP_LOCK([_arr removeAllObjects]);
}
- (void)removeObject:(id)anObject inRange:(NSRange)range {
VAP_LOCK([_arr removeObject:anObject inRange:range]);
}
- (void)removeObject:(id)anObject {
VAP_LOCK([_arr removeObject:anObject]);
}
- (void)removeObjectIdenticalTo:(id)anObject inRange:(NSRange)range {
VAP_LOCK([_arr removeObjectIdenticalTo:anObject inRange:range]);
}
- (void)removeObjectIdenticalTo:(id)anObject {
VAP_LOCK([_arr removeObjectIdenticalTo:anObject]);
}
- (void)removeObjectsInArray:(NSArray *)otherArray {
VAP_LOCK([_arr removeObjectsInArray:otherArray]);
}
- (void)removeObjectsInRange:(NSRange)range {
VAP_LOCK([_arr removeObjectsInRange:range]);
}
- (void)replaceObjectsInRange:(NSRange)range withObjectsFromArray:(NSArray *)otherArray range:(NSRange)otherRange {
VAP_LOCK([_arr replaceObjectsInRange:range withObjectsFromArray:otherArray range:otherRange]);
}
- (void)replaceObjectsInRange:(NSRange)range withObjectsFromArray:(NSArray *)otherArray {
VAP_LOCK([_arr replaceObjectsInRange:range withObjectsFromArray:otherArray]);
}
- (void)setArray:(NSArray *)otherArray {
VAP_LOCK([_arr setArray:otherArray]);
}
- (void)sortUsingFunction:(__attribute__((noescape)) NSInteger (*)(id, id, void *))compare context:(void *)context {
VAP_LOCK([_arr sortUsingFunction:compare context:context]);
}
- (void)sortUsingSelector:(SEL)comparator {
VAP_LOCK([_arr sortUsingSelector:comparator]);
}
- (void)insertObjects:(NSArray *)objects atIndexes:(NSIndexSet *)indexes {
VAP_LOCK([_arr insertObjects:objects atIndexes:indexes]);
}
- (void)removeObjectsAtIndexes:(NSIndexSet *)indexes {
VAP_LOCK([_arr removeObjectsAtIndexes:indexes]);
}
- (void)replaceObjectsAtIndexes:(NSIndexSet *)indexes withObjects:(NSArray *)objects {
VAP_LOCK([_arr replaceObjectsAtIndexes:indexes withObjects:objects]);
}
- (void)setObject:(id)obj atIndexedSubscript:(NSUInteger)idx {
VAP_LOCK([_arr setObject:obj atIndexedSubscript:idx]);
}
- (void)sortUsingComparator:(__attribute__((noescape)) NSComparator)cmptr {
VAP_LOCK([_arr sortUsingComparator:cmptr]);
}
- (void)sortWithOptions:(NSSortOptions)opts usingComparator:(__attribute__((noescape)) NSComparator)cmptr {
VAP_LOCK([_arr sortWithOptions:opts usingComparator:cmptr]);
}
- (BOOL)isEqualToArray:(NSArray *)otherArray {
if (otherArray == self) return YES;
if ([otherArray isKindOfClass:QGVAPSafeMutableArray.class]) {
QGVAPSafeMutableArray *other = (id)otherArray;
BOOL isEqual;
[_lock lock];
[other->_lock lock];
isEqual = [_arr isEqualToArray:other->_arr];
[other->_lock unlock];
[_lock unlock];
return isEqual;
}
return NO;
}
#pragma mark - protocol
- (id)copyWithZone:(NSZone *)zone {
return [self mutableCopyWithZone:zone];
}
- (id)mutableCopyWithZone:(NSZone *)zone {
VAP_LOCK(id copiedArr = [[self.class allocWithZone:zone] initWithArray:_arr]);
return copiedArr;
}
- (NSUInteger)countByEnumeratingWithState:(NSFastEnumerationState *)state
objects:(id __unsafe_unretained[])stackbuf
count:(NSUInteger)len {
VAP_LOCK(NSUInteger count = [_arr countByEnumeratingWithState:state objects:stackbuf count:len]);
return count;
}
- (BOOL)isEqual:(id)object {
if (object == self) return YES;
if ([object isKindOfClass:[QGVAPSafeMutableArray class]]) {
QGVAPSafeMutableArray *other = object;
BOOL isEqual;
[_lock lock];
[other->_lock lock];
isEqual = [_arr isEqual:other->_arr];
[other->_lock unlock];
[_lock unlock];
return isEqual;
}
return NO;
}
- (NSUInteger)hash {
VAP_LOCK(NSUInteger hash = [_arr hash]);
return hash;
}
@end

View File

@@ -0,0 +1,27 @@
// QGVAPSafeMutableDictionary.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
/**
This class inherits from NSMutableDictionary, make it thread safe and allow Recursive lock.
@discussion access performance would lower than NSMutableDictionary, or using semaphore but equal to @sychronized.
@warning Fast enumerate and enumerator are not thread safe
*/
@interface QGVAPSafeMutableDictionary : NSMutableDictionary
@end

View File

@@ -0,0 +1,237 @@
// QGVAPSafeMutableDictionary.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPSafeMutableDictionary.h"
#define VAP_INIT(...) self = super.init; \
if (!self) return nil; \
__VA_ARGS__; \
if (!_dic) return nil; \
_lock = [[NSRecursiveLock alloc] init]; \
return self;
#define VAP_LOCK(...) [_lock lock]; \
__VA_ARGS__; \
[_lock unlock];
@interface QGVAPSafeMutableDictionary(){
NSMutableDictionary *_dic;
NSRecursiveLock *_lock;
}
@end
@implementation QGVAPSafeMutableDictionary
#pragma mark - init
- (instancetype)init {
VAP_INIT(_dic = [[NSMutableDictionary alloc] init]);
}
- (instancetype)initWithObjects:(NSArray *)objects forKeys:(NSArray *)keys {
VAP_INIT(_dic = [[NSMutableDictionary alloc] initWithObjects:objects forKeys:keys]);
}
- (instancetype)initWithCapacity:(NSUInteger)capacity {
VAP_INIT(_dic = [[NSMutableDictionary alloc] initWithCapacity:capacity]);
}
- (instancetype)initWithObjects:(const id[])objects forKeys:(const id <NSCopying>[])keys count:(NSUInteger)cnt {
VAP_INIT(_dic = [[NSMutableDictionary alloc] initWithObjects:objects forKeys:keys count:cnt]);
}
- (instancetype)initWithDictionary:(NSDictionary *)otherDictionary {
VAP_INIT(_dic = [[NSMutableDictionary alloc] initWithDictionary:otherDictionary]);
}
- (instancetype)initWithDictionary:(NSDictionary *)otherDictionary copyItems:(BOOL)flag {
VAP_INIT(_dic = [[NSMutableDictionary alloc] initWithDictionary:otherDictionary copyItems:flag]);
}
#pragma mark - method
- (NSUInteger)count {
VAP_LOCK(NSUInteger c = _dic.count); return c;
}
- (id)objectForKey:(id)aKey {
VAP_LOCK(id o = [_dic objectForKey:aKey]); return o;
}
- (NSEnumerator *)keyEnumerator {
VAP_LOCK(NSEnumerator * e = [_dic keyEnumerator]); return e;
}
- (NSArray *)allKeys {
VAP_LOCK(NSArray * a = [_dic allKeys]); return a;
}
- (NSArray *)allKeysForObject:(id)anObject {
VAP_LOCK(NSArray * a = [_dic allKeysForObject:anObject]); return a;
}
- (NSArray *)allValues {
VAP_LOCK(NSArray * a = [_dic allValues]); return a;
}
- (NSString *)description {
VAP_LOCK(NSString * d = [_dic description]); return d;
}
- (NSString *)descriptionInStringsFileFormat {
VAP_LOCK(NSString * d = [_dic descriptionInStringsFileFormat]); return d;
}
- (NSString *)descriptionWithLocale:(id)locale {
VAP_LOCK(NSString * d = [_dic descriptionWithLocale:locale]); return d;
}
- (NSString *)descriptionWithLocale:(id)locale indent:(NSUInteger)level {
VAP_LOCK(NSString * d = [_dic descriptionWithLocale:locale indent:level]); return d;
}
- (BOOL)isEqualToDictionary:(NSDictionary *)otherDictionary {
if (otherDictionary == self) return YES;
if ([otherDictionary isKindOfClass:QGVAPSafeMutableDictionary.class]) {
QGVAPSafeMutableDictionary *other = (id)otherDictionary;
BOOL isEqual;
[_lock lock];
[other->_lock lock];
isEqual = [_dic isEqual:other->_dic];
[other->_lock unlock];
[_lock unlock];
return isEqual;
}
return NO;
}
- (NSEnumerator *)objectEnumerator {
VAP_LOCK(NSEnumerator * e = [_dic objectEnumerator]); return e;
}
- (NSArray *)objectsForKeys:(NSArray *)keys notFoundMarker:(id)marker {
VAP_LOCK(NSArray * a = [_dic objectsForKeys:keys notFoundMarker:marker]); return a;
}
- (NSArray *)keysSortedByValueUsingSelector:(SEL)comparator {
VAP_LOCK(NSArray * a = [_dic keysSortedByValueUsingSelector:comparator]); return a;
}
- (void)getObjects:(id __unsafe_unretained[])objects andKeys:(id __unsafe_unretained[])keys {
VAP_LOCK([_dic getObjects:objects andKeys:keys]);
}
- (id)objectForKeyedSubscript:(id)key {
VAP_LOCK(id o = [_dic objectForKeyedSubscript:key]); return o;
}
- (void)enumerateKeysAndObjectsUsingBlock:(__attribute__((noescape)) void (^)(id key, id obj, BOOL *stop))block {
VAP_LOCK([_dic enumerateKeysAndObjectsUsingBlock:block]);
}
- (void)enumerateKeysAndObjectsWithOptions:(NSEnumerationOptions)opts usingBlock:(__attribute__((noescape)) void (^)(id key, id obj, BOOL *stop))block {
VAP_LOCK([_dic enumerateKeysAndObjectsWithOptions:opts usingBlock:block]);
}
- (NSArray *)keysSortedByValueUsingComparator:(__attribute__((noescape)) NSComparator)cmptr {
VAP_LOCK(NSArray * a = [_dic keysSortedByValueUsingComparator:cmptr]); return a;
}
- (NSArray *)keysSortedByValueWithOptions:(NSSortOptions)opts usingComparator:(__attribute__((noescape)) NSComparator)cmptr {
VAP_LOCK(NSArray * a = [_dic keysSortedByValueWithOptions:opts usingComparator:cmptr]); return a;
}
- (NSSet *)keysOfEntriesPassingTest:(__attribute__((noescape)) BOOL (^)(id key, id obj, BOOL *stop))predicate {
VAP_LOCK(NSSet * a = [_dic keysOfEntriesPassingTest:predicate]); return a;
}
- (NSSet *)keysOfEntriesWithOptions:(NSEnumerationOptions)opts passingTest:(__attribute__((noescape)) BOOL (^)(id key, id obj, BOOL *stop))predicate {
VAP_LOCK(NSSet * a = [_dic keysOfEntriesWithOptions:opts passingTest:predicate]); return a;
}
#pragma mark - mutable
- (void)removeObjectForKey:(id)aKey {
VAP_LOCK([_dic removeObjectForKey:aKey]);
}
- (void)setObject:(id)anObject forKey:(id <NSCopying> )aKey {
VAP_LOCK([_dic setObject:anObject forKey:aKey]);
}
- (void)addEntriesFromDictionary:(NSDictionary *)otherDictionary {
VAP_LOCK([_dic addEntriesFromDictionary:otherDictionary]);
}
- (void)removeAllObjects {
VAP_LOCK([_dic removeAllObjects]);
}
- (void)removeObjectsForKeys:(NSArray *)keyArray {
VAP_LOCK([_dic removeObjectsForKeys:keyArray]);
}
- (void)setDictionary:(NSDictionary *)otherDictionary {
VAP_LOCK([_dic setDictionary:otherDictionary]);
}
- (void)setObject:(id)obj forKeyedSubscript:(id <NSCopying> )key {
VAP_LOCK([_dic setObject:obj forKeyedSubscript:key]);
}
#pragma mark - protocol
- (id)copyWithZone:(NSZone *)zone {
return [self mutableCopyWithZone:zone];
}
- (id)mutableCopyWithZone:(NSZone *)zone {
VAP_LOCK(id copiedDictionary = [[self.class allocWithZone:zone] initWithDictionary:_dic]);
return copiedDictionary;
}
- (NSUInteger)countByEnumeratingWithState:(NSFastEnumerationState *)state
objects:(id __unsafe_unretained[])stackbuf
count:(NSUInteger)len {
VAP_LOCK(NSUInteger count = [_dic countByEnumeratingWithState:state objects:stackbuf count:len]);
return count;
}
- (BOOL)isEqual:(id)object {
if (object == self) return YES;
if ([object isKindOfClass:QGVAPSafeMutableDictionary.class]) {
QGVAPSafeMutableDictionary *other = object;
BOOL isEqual;
[_lock lock];
[other->_lock lock];
isEqual = [_dic isEqual:other->_dic];
[other->_lock unlock];
[_lock unlock];
return isEqual;
}
return NO;
}
- (NSUInteger)hash {
VAP_LOCK(NSUInteger hash = [_dic hash]);
return hash;
}
@end

View File

@@ -0,0 +1,24 @@
// QGVAPWeakProxy.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
@interface QGVAPWeakProxy : NSObject
- (instancetype)initWithTarget:(id)target;
+ (instancetype)proxyWithTarget:(id)target;
@end

View File

@@ -0,0 +1,52 @@
// QGVAPWeakProxy.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPWeakProxy.h"
@implementation QGVAPWeakProxy {
__weak id _target;
}
- (instancetype)initWithTarget:(id)target {
if (self = [super init]) {
_target = target;
}
return self;
}
+ (instancetype)proxyWithTarget:(id)target {
return [[QGVAPWeakProxy alloc] initWithTarget:target];
}
// 1.
- (id)forwardingTargetForSelector:(SEL)aSelector {
return _target;
}
// 2. <1>nilCrashunrecognized selector. .
- (NSMethodSignature *)methodSignatureForSelector:(SEL)aSelector {
return [NSObject instanceMethodSignatureForSelector:@selector(init)];
}
- (void)forwardInvocation:(NSInvocation *)anInvocation {
void *null = NULL;
[anInvocation setReturnValue:&null];
}
- (BOOL)respondsToSelector:(SEL)aSelector {
return [_target respondsToSelector:aSelector];
}
@end

View File

@@ -0,0 +1,78 @@
// VAPMacros.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#ifndef VAPMacros_h
#define VAPMacros_h
#define STRINGIZE(x) #x
#define STRINGIZE2(x) STRINGIZE(x)
#define SHADER_STRING(text) @ STRINGIZE2(text)
#ifndef HWDSYNTH_DYNAMIC_PROPERTY_OBJECT
#define HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(_dynamic_getter_, _dynamic_setter_, _association_policy_) \
- (void)_dynamic_setter_ : (id)object { \
[self willChangeValueForKey:@#_dynamic_getter_]; \
objc_setAssociatedObject(self, _cmd, object, _association_policy_); \
[self didChangeValueForKey:@#_dynamic_getter_]; \
} \
- (id)_dynamic_getter_ { \
return objc_getAssociatedObject(self, @selector(_dynamic_setter_:)); \
}
#endif
#ifndef HWDSYNTH_DYNAMIC_PROPERTY_CTYPE
#define HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(_dynamic_getter_, _dynamic_setter_, _type_) \
- (void)_dynamic_setter_ : (_type_)object { \
[self willChangeValueForKey:@#_dynamic_getter_]; \
NSValue *value = [NSValue value:&object withObjCType:@encode(_type_)]; \
objc_setAssociatedObject(self, _cmd, value, OBJC_ASSOCIATION_RETAIN); \
[self didChangeValueForKey:@#_dynamic_getter_]; \
} \
- (_type_)_dynamic_getter_ { \
_type_ cValue = { 0 }; \
NSValue *value = objc_getAssociatedObject(self, @selector(_dynamic_setter_:)); \
[value getValue:&cValue]; \
return cValue; \
}
#endif
#import "QGHWDShaderTypes.h"
#import <UIKit/UIKit.h>
#import "QGVAPMaskInfo.h"
extern NSInteger const kQGHWDMP4DefaultFPS; //默认fps 25
extern NSInteger const kQGHWDMP4MinFPS; //最小fps 1
extern NSInteger const QGHWDMP4MaxFPS; //最大fps 60
extern NSInteger const VapMaxCompatibleVersion; //最大兼容版本
@class QGVAPSourceDisplayItem;
typedef UIView VAPView; //特效播放容器
/* mp4素材中每一帧alpha通道数据的位置*/
typedef NS_ENUM(NSInteger, QGHWDTextureBlendMode){
QGHWDTextureBlendMode_AlphaLeft = 0, // 左侧alpha
QGHWDTextureBlendMode_AlphaRight = 1, // 右侧alpha
QGHWDTextureBlendMode_AlphaTop = 2, // 上侧alpha
QGHWDTextureBlendMode_AlphaBottom = 3, // 下测alpha
};
typedef void(^VAPImageCompletionBlock)(UIImage * image, NSError * error,NSString *imageURL);
typedef void(^VAPGestureEventBlock)(UIGestureRecognizer *gestureRecognizer, BOOL insideSource, QGVAPSourceDisplayItem *source);
#endif /* VAPMacros_h */

View File

@@ -0,0 +1,54 @@
// QGHWDMetalRenderer.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import <Metal/Metal.h>
#import "VAPMacros.h"
UIKIT_EXTERN NSString *const kQGHWDVertexFunctionName;
UIKIT_EXTERN NSString *const kQGHWDYUVFragmentFunctionName;
extern matrix_float3x3 const kQGColorConversionMatrix601Default;
extern matrix_float3x3 const kQGColorConversionMatrix601FullRangeDefault;
extern matrix_float3x3 const kQGColorConversionMatrix709Default;
extern matrix_float3x3 const kQGColorConversionMatrix709FullRangeDefault;
extern matrix_float3x3 const kQGBlurWeightMatrixDefault;
extern id<MTLDevice> kQGHWDMetalRendererDevice;
#if TARGET_OS_SIMULATOR//模拟器
@interface QGHWDMetalRenderer : NSObject
@property (nonatomic, assign) QGHWDTextureBlendMode blendMode;
- (instancetype)initWithMetalLayer:(id)layer blendMode:(QGHWDTextureBlendMode)mode;
- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer metalLayer:(id)layer;
- (void)dispose;
@end
#else
@interface QGHWDMetalRenderer : NSObject
@property (nonatomic, assign) QGHWDTextureBlendMode blendMode;
- (instancetype)initWithMetalLayer:(CAMetalLayer *)layer blendMode:(QGHWDTextureBlendMode)mode;
- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer metalLayer:(CAMetalLayer *)layer;
- (void)dispose;
@end
#endif

View File

@@ -0,0 +1,321 @@
// QGHWDMetalRenderer.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGHWDMetalRenderer.h"
#import "QGHWDShaderTypes.h"
#import "QGVAPLogger.h"
#import <simd/simd.h>
#import <MetalKit/MetalKit.h>
#import "UIDevice+VAPUtil.h"
#import "QGVAPMetalUtil.h"
#import "QGVAPMetalShaderFunctionLoader.h"
#pragma mark - constants
NSString *const kQGHWDVertexFunctionName = @"hwd_vertexShader";
NSString *const kQGHWDYUVFragmentFunctionName = @"hwd_yuvFragmentShader";
static NSInteger const kQGQuadVerticesConstantsRow = 4;
static NSInteger const kQGQuadVerticesConstantsColumn = 32;
static NSInteger const kQGHWDVertexCount = 4;
id<MTLDevice> kQGHWDMetalRendererDevice;
// BT.601, which is the standard for SDTV.
matrix_float3x3 const kQGColorConversionMatrix601Default = {{
{1.164, 1.164, 1.164},
{0.0, -0.392, 2.017},
{1.596, -0.813, 0.0}
}};
/*
1.0 0.0 1.4
[1.0 -0.343 -0.711 ]
1.0 1.765 0.0
*/
//ITU BT.601 Full Range
matrix_float3x3 const kQGColorConversionMatrix601FullRangeDefault = {{
{1.0, 1.0, 1.0},
{0.0, -0.34413, 1.772},
{1.402, -0.71414, 0.0}
}};
// BT.709, which is the standard for HDTV.
matrix_float3x3 const kQGColorConversionMatrix709Default = {{
{1.164, 1.164, 1.164},
{0.0, -0.213, 2.112},
{1.793, -0.533, 0.0}
}};
// BT.709 Full Range.
matrix_float3x3 const kQGColorConversionMatrix709FullRangeDefault = {{
{1.0, 1.0, 1.0},
{0.0, -.18732, 1.8556},
{1.57481, -.46813, 0.0}
}};
// Blur weight matrix.
matrix_float3x3 const kQGBlurWeightMatrixDefault = {{
{0.0625, 0.125, 0.0625},
{0.125, 0.25, 0.125},
{0.0625, 0.125, 0.0625}
}};
//QGHWDVertex +rgb+alpha
static const float kQGQuadVerticesConstants[kQGQuadVerticesConstantsRow][kQGQuadVerticesConstantsColumn] = {
//alpha
{-1.0, -1.0, 0.0, 1.0, 0.5, 1.0, 0.0, 1.0,
-1.0, 1.0, 0.0, 1.0, 0.5, 0.0, 0.0, 0.0,
1.0, -1.0, 0.0, 1.0, 1.0, 1.0, 0.5, 1.0,
1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.5, 0.0},
//alpha
{-1.0, -1.0, 0.0, 1.0, 0.0, 1.0, 0.5, 1.0,
-1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.5, 0.0,
1.0, -1.0, 0.0, 1.0, 0.5, 1.0, 1.0, 1.0,
1.0, 1.0, 0.0, 1.0, 0.5, 0.0, 1.0, 0.0},
//alpha
{-1.0, -1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.5,
-1.0, 1.0, 0.0, 1.0, 0.0, 0.5, 0.0, 0.0,
1.0, -1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.5,
1.0, 1.0, 0.0, 1.0, 1.0, 0.5, 1.0, 0.0},
//alpha
{-1.0, -1.0, 0.0, 1.0, 0.0, 0.5, 0.0, 1.0,
-1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.5,
1.0, -1.0, 0.0, 1.0, 1.0, 0.5, 1.0, 1.0,
1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.5}
};
#if TARGET_OS_SIMULATOR//
#else
@interface QGHWDMetalRenderer () {
BOOL _renderingResourcesDisposed; //
matrix_float3x3 _currentColorConversionMatrix;
}
@property (nonatomic, strong) id<MTLBuffer> vertexBuffer;
@property (nonatomic, strong) id<MTLBuffer> yuvMatrixBuffer;
@property (nonatomic, strong) id<MTLRenderPipelineState> pipelineState;//This will keep track of the compiled render pipeline youre about to create.
@property (nonatomic, strong) id<MTLCommandQueue> commandQueue;
@property (nonatomic, assign) int vertexCount;
@property (nonatomic, assign) CVMetalTextureCacheRef videoTextureCache;//need release
@property (nonatomic, strong) QGVAPMetalShaderFunctionLoader *shaderFuncLoader;
@end
@implementation QGHWDMetalRenderer
#pragma mark - main
- (instancetype)initWithMetalLayer:(CAMetalLayer *)layer blendMode:(QGHWDTextureBlendMode)mode {
self = [super init];
if (self) {
_blendMode = mode;
if (!kQGHWDMetalRendererDevice) {
kQGHWDMetalRendererDevice = MTLCreateSystemDefaultDevice();
}
layer.device = kQGHWDMetalRendererDevice;
[self setupConstants];
[self setupPipelineStatesWithMetalLayer:layer];
}
return self;
}
/**
*/
- (void)dispose {
_commandQueue = nil;
_pipelineState = nil;
_vertexBuffer = nil;
_yuvMatrixBuffer = nil;
_shaderFuncLoader = nil;
if (_videoTextureCache) {
CVMetalTextureCacheFlush(_videoTextureCache, 0);
CFRelease(_videoTextureCache);
_videoTextureCache = NULL;
}
_renderingResourcesDisposed = YES;
}
- (void)dealloc {
[self dispose];
}
- (void)setupConstants {
//buffers
const void *vertices = [self suitableQuadVertices];
NSUInteger allocationSize = kQGQuadVerticesConstantsColumn * sizeof(float);
_vertexBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:vertices length:allocationSize options:kDefaultMTLResourceOption];
_vertexCount = kQGHWDVertexCount;
_currentColorConversionMatrix = kQGColorConversionMatrix601FullRangeDefault;
struct ColorParameters yuvMatrixs[] = {{_currentColorConversionMatrix,{0.5, 0.5}}};
NSUInteger yuvMatrixsDataSize = sizeof(struct ColorParameters);
_yuvMatrixBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:yuvMatrixs length:yuvMatrixsDataSize options:kDefaultMTLResourceOption];
}
- (void)updateMetalPropertiesIfNeed:(CVPixelBufferRef)pixelBuffer {
if (!pixelBuffer) {
return ;
}
CFTypeRef yCbCrMatrixType = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
matrix_float3x3 matrix = kQGColorConversionMatrix601FullRangeDefault;
if (CFStringCompare(yCbCrMatrixType, kCVImageBufferYCbCrMatrix_ITU_R_709_2, 0) == kCFCompareEqualTo) {
matrix = kQGColorConversionMatrix709FullRangeDefault;
}
if (simd_equal(_currentColorConversionMatrix, matrix)) {
return ;
}
_currentColorConversionMatrix = matrix;
struct ColorParameters yuvMatrixs[] = {{_currentColorConversionMatrix,{0.5, 0.5}}};
NSUInteger yuvMatrixsDataSize = sizeof(struct ColorParameters);
_yuvMatrixBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:yuvMatrixs length:yuvMatrixsDataSize options:kDefaultMTLResourceOption];
}
- (void)setupPipelineStatesWithMetalLayer:(CAMetalLayer *)metalLayer {
self.shaderFuncLoader = [[QGVAPMetalShaderFunctionLoader alloc] initWithDevice:kQGHWDMetalRendererDevice];
id<MTLFunction> vertexProgram = [self.shaderFuncLoader loadFunctionWithName:kQGHWDVertexFunctionName];
id<MTLFunction> fragmentProgram = [self.shaderFuncLoader loadFunctionWithName:kQGHWDYUVFragmentFunctionName];
if (!vertexProgram || !fragmentProgram) {
VAP_Error(kQGVAPModuleCommon, @"setupPipelineStatesWithMetalLayer fail! cuz: shader load fail");
NSAssert(0, @"check if .metal files been compiled to correct target!");
return ;
}
MTLRenderPipelineDescriptor *pipelineStateDescriptor = [MTLRenderPipelineDescriptor new];
pipelineStateDescriptor.vertexFunction = vertexProgram;
pipelineStateDescriptor.fragmentFunction = fragmentProgram;
pipelineStateDescriptor.colorAttachments[0].pixelFormat = metalLayer.pixelFormat;
NSError *psError = nil;
id<MTLRenderPipelineState> pipelineState = [kQGHWDMetalRendererDevice newRenderPipelineStateWithDescriptor:pipelineStateDescriptor error:&psError];
if (!pipelineState || psError) {
VAP_Error(kQGVAPModuleCommon, @"newRenderPipelineStateWithDescriptor error!:%@", psError);
return ;
}
self.pipelineState = pipelineState;
self.commandQueue = [kQGHWDMetalRendererDevice newCommandQueue];
CVReturn textureCacheError = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, kQGHWDMetalRendererDevice, nil, &_videoTextureCache);
if (textureCacheError != kCVReturnSuccess) {
VAP_Error(kQGVAPModuleCommon, @"create texture cache fail!:%@", textureCacheError);
return ;
}
}
/**
使metal线CVPixelBufferRef线
@param pixelBuffer
@param layer metalLayer
*/
- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer metalLayer:(CAMetalLayer *)layer {
if (!layer.superlayer || layer.bounds.size.width <= 0 || layer.bounds.size.height <= 0) {
//https://forums.developer.apple.com/thread/26278
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz layer.superlayer or size error is nil! superlayer:%@ height:%@ width:%@", layer.superlayer, @(layer.bounds.size.height), @(layer.bounds.size.width));
return ;
}
[self reconstructIfNeed:layer];
if (pixelBuffer == NULL || !self.commandQueue || !self.pipelineState) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz pixelbuffer is nil!");
return ;
}
[self updateMetalPropertiesIfNeed:pixelBuffer];
CVMetalTextureCacheFlush(_videoTextureCache, 0);
CVMetalTextureRef yTextureRef = nil, uvTextureRef = nil;
size_t yWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
size_t yHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
size_t uvWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
size_t uvHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
//r8Unorm
CVReturn yStatus = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _videoTextureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, yWidth, yHeight, 0, &yTextureRef);
//rg8Unorm
CVReturn uvStatus = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _videoTextureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, uvWidth, uvHeight, 1, &uvTextureRef);
if (yStatus != kCVReturnSuccess || uvStatus != kCVReturnSuccess) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz failing getting yuv texture-yStatus%@:uvStatus%@", @(yStatus), @(uvStatus));
return ;
}
id<MTLTexture> yTexture = CVMetalTextureGetTexture(yTextureRef);
id<MTLTexture> uvTexture = CVMetalTextureGetTexture(uvTextureRef);
CVBufferRelease(yTextureRef);
CVBufferRelease(uvTextureRef);
CVMetalTextureCacheFlush(_videoTextureCache, 0);
yTextureRef = NULL;
uvTextureRef = NULL;
if (!yTexture || !uvTexture || !layer) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz content is nil! y:%@ uv:%@, layer:%@", @(yTexture != nil), @(uvTexture != nil), @(layer != nil));
return ;
}
if (layer.drawableSize.width <= 0 || layer.drawableSize.height <= 0) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz drawableSize is 0");
return ;
}
id<CAMetalDrawable> drawable = layer.nextDrawable;
if (!drawable) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz nextDrawable is nil!");
return ;
}
MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor new];
renderPassDescriptor.colorAttachments[0].texture = drawable.texture; //which returns the texture in which you need to draw in order for something to appear on the screen.
renderPassDescriptor.colorAttachments[0].loadAction = MTLLoadActionClear; //set the texture to the clear color before doing any drawing,
renderPassDescriptor.colorAttachments[0].clearColor =MTLClearColorMake(1.0, 1.0, 1.0, 1.0);
id<MTLCommandBuffer> commandBuffer = [self.commandQueue commandBuffer];
id<MTLRenderCommandEncoder> renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
[renderEncoder setRenderPipelineState:self.pipelineState];
[renderEncoder setVertexBuffer:self.vertexBuffer offset:0 atIndex:0];
[renderEncoder setFragmentBuffer:self.yuvMatrixBuffer offset:0 atIndex:0];
[renderEncoder setFragmentTexture:yTexture atIndex:QGHWDYUVFragmentTextureIndexLuma];
[renderEncoder setFragmentTexture:uvTexture atIndex:QGHWDYUVFragmentTextureIndexChroma];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:self.vertexCount instanceCount:1];
[renderEncoder endEncoding];
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
}
#pragma mark - private
/**
便
@param layer metalLayer
*/
- (void)reconstructIfNeed:(CAMetalLayer *)layer {
if (_renderingResourcesDisposed) {
[self setupConstants];
[self setupPipelineStatesWithMetalLayer:layer];
_renderingResourcesDisposed = NO;
}
}
- (const void *)suitableQuadVertices {
switch (self.blendMode) {
case QGHWDTextureBlendMode_AlphaLeft:
return kQGQuadVerticesConstants[0];
case QGHWDTextureBlendMode_AlphaRight:
return kQGQuadVerticesConstants[1];
case QGHWDTextureBlendMode_AlphaTop:
return kQGQuadVerticesConstants[2];
case QGHWDTextureBlendMode_AlphaBottom:
return kQGQuadVerticesConstants[3];
default:
break;
}
return kQGQuadVerticesConstants[0];
}
@end
#endif

View File

@@ -0,0 +1,47 @@
// QGHWDMetalView.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "VAPMacros.h"
//https://developer.apple.com/library/archive/documentation/Miscellaneous/Conceptual/MetalProgrammingGuide/Device/Device.html#//apple_ref/doc/uid/TP40014221-CH2-SW1
/*
2018-12-31 00:01:51.349229+0800 MetalTest[28134:2050088] [DYMTLInitPlatform] platform initialization successful
2018-12-31 00:01:51.413574+0800 MetalTest[28134:2050043] Metal GPU Frame Capture Enabled
2018-12-31 00:01:51.414037+0800 MetalTest[28134:2050043] Metal API Validation Enabled
2018-12-31 00:01:54.008682+0800 MetalTest[28134:2050086] Execution of the command buffer was aborted due to an error during execution. Insufficient Permission (to submit GPU work from background) (IOAF code 6)
2018-12-31 00:01:54.009053+0800 MetalTest[28134:2050086] Execution of the command buffer was aborted due to an error during execution. Insufficient Permission (to submit GPU work from background) (IOAF code 6)
2018-12-31 00:01:54.011370+0800 MetalTest[28134:2050086] Execution of the command buffer was aborted due to an error during execution. Insufficient Permission (to submit GPU work from background) (IOAF code 6)
2018-12-31 00:01:54.011710+0800 MetalTest[28134:2050086] Execution of the command buffer was aborted due to an error during execution. Insufficient Permission (to submit GPU work from background) (IOAF code 6)
*/
@protocol QGHWDMetelViewDelegate <NSObject>
- (void)onMetalViewUnavailable;
@end
@interface QGHWDMetalView : UIView
@property (nonatomic, weak) id<QGHWDMetelViewDelegate> delegate;
@property (nonatomic, assign) QGHWDTextureBlendMode blendMode;
- (instancetype)initWithFrame:(CGRect)frame blendMode:(QGHWDTextureBlendMode)mode;
- (void)display:(CVPixelBufferRef)pixelBuffer;
- (void)dispose;
@end

View File

@@ -0,0 +1,134 @@
// QGHWDMetalView.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGHWDMetalView.h"
#import "QGVAPLogger.h"
#import "QGHWDMetalRenderer.h"
#if TARGET_OS_SIMULATOR//
@implementation QGHWDMetalView
- (instancetype)initWithFrame:(CGRect)frame blendMode:(QGHWDTextureBlendMode)mode {
return [self initWithFrame:frame];
}
- (void)display:(CVPixelBufferRef)pixelBuffer {}
-(void)dispose {}
@end
#else
@interface QGHWDMetalView ()
@property (nonatomic, strong) CAMetalLayer *metalLayer;
@property (nonatomic, strong) QGHWDMetalRenderer *renderer;
@property (nonatomic, assign) BOOL drawableSizeShouldUpdate;
@end
@implementation QGHWDMetalView
#pragma mark - override
+ (Class)layerClass {
return [CAMetalLayer class];
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
NSAssert(0, @"initWithCoder: has not been implemented");
return nil;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_drawableSizeShouldUpdate = YES;
_blendMode = QGHWDTextureBlendMode_AlphaLeft;
}
return self;
}
- (void)didMoveToWindow {
[super didMoveToWindow];
self.drawableSizeShouldUpdate = YES;
}
- (void)layoutSubviews {
[super layoutSubviews];
self.drawableSizeShouldUpdate = YES;
}
- (void)dealloc {
[self onMetalViewUnavailable];
}
#pragma mark - main
- (instancetype)initWithFrame:(CGRect)frame blendMode:(QGHWDTextureBlendMode)mode {
if (self = [super initWithFrame:frame]) {
_drawableSizeShouldUpdate = YES;
_blendMode = QGHWDTextureBlendMode_AlphaLeft;
_metalLayer = (CAMetalLayer *)self.layer;
_metalLayer.frame = self.frame;
_metalLayer.opaque = NO;
_blendMode = mode;
_renderer = [[QGHWDMetalRenderer alloc] initWithMetalLayer:_metalLayer blendMode:mode];
_metalLayer.contentsScale = [UIScreen mainScreen].scale;
_metalLayer.pixelFormat = MTLPixelFormatBGRA8Unorm;
_metalLayer.framebufferOnly = YES;
}
return self;
}
- (void)display:(CVPixelBufferRef)pixelBuffer {
if (!self.window) {
VAP_Event(kQGVAPModuleCommon, @"quit display pixelbuffer, cuz window is nil!");
[self onMetalViewUnavailable];
return ;
}
if (self.drawableSizeShouldUpdate) {
CGFloat nativeScale = [UIScreen mainScreen].nativeScale;
CGSize drawableSize = CGSizeMake(CGRectGetWidth(self.bounds)*nativeScale, CGRectGetHeight(self.bounds)*nativeScale);
self.metalLayer.drawableSize = drawableSize;
VAP_Event(kQGVAPModuleCommon, @"update drawablesize :%@", [NSValue valueWithCGSize:drawableSize]);
self.drawableSizeShouldUpdate = NO;
}
self.renderer.blendMode = self.blendMode;
[self.renderer renderPixelBuffer:pixelBuffer metalLayer:self.metalLayer];
}
/**
*/
- (void)dispose {
[self.renderer dispose];
}
#pragma mark - private
- (void)onMetalViewUnavailable{
if ([self.delegate respondsToSelector:@selector(onMetalViewUnavailable)]) {
[self.delegate onMetalViewUnavailable];
}
}
@end
#endif

View File

@@ -0,0 +1,48 @@
// QGVAPMetalRenderer.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
#import "QGVAPConfigModel.h"
#import <Metal/Metal.h>
#import "VAPMacros.h"
#if TARGET_OS_SIMULATOR//模拟器
@interface QGVAPMetalRenderer : NSObject
@property (nonatomic, strong) QGVAPCommonInfo *commonInfo;
- (instancetype)initWithMetalLayer:(id)layer;
- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer metalLayer:(id)layer mergeInfos:(NSArray<QGVAPMergedInfo *> *)infos;
- (void)dispose;
@end
#else
@interface QGVAPMetalRenderer : NSObject
@property (nonatomic, strong) QGVAPCommonInfo *commonInfo;
@property (nonatomic, strong) QGVAPMaskInfo *maskInfo;
- (instancetype)initWithMetalLayer:(CAMetalLayer *)layer;
- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer metalLayer:(CAMetalLayer *)layer mergeInfos:(NSArray<QGVAPMergedInfo *> *)infos;
- (void)dispose;
@end
#endif

View File

@@ -0,0 +1,426 @@
// QGVAPMetalRenderer.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPMetalRenderer.h"
#import "QGHWDMetalRenderer.h"
#import <MetalKit/MetalKit.h>
#import "QGVAPLogger.h"
#import <simd/simd.h>
#import "UIDevice+VAPUtil.h"
#import "QGVAPMetalUtil.h"
#import "QGVAPMetalShaderFunctionLoader.h"
#if TARGET_OS_SIMULATOR//
#else
@interface QGVAPMetalRenderer () {
BOOL _renderingResourcesDisposed; //
matrix_float3x3 _currentColorConversionMatrix;
}
@property (nonatomic, strong) id<MTLBuffer> vertexBuffer;
@property (nonatomic, strong) id<MTLBuffer> yuvMatrixBuffer;
@property (nonatomic, strong) id<MTLBuffer> maskBlurBuffer;
@property (nonatomic, strong) id<MTLRenderPipelineState> attachmentPipelineState;
@property (nonatomic, strong) id<MTLRenderPipelineState> defaultMainPipelineState;
@property (nonatomic, strong) id<MTLRenderPipelineState> mainPipelineStateForMask; //线
@property (nonatomic, strong) id<MTLRenderPipelineState> mainPipelineStateForMaskBlur; //线
@property (nonatomic, strong) id<MTLCommandQueue> commandQueue;
@property (nonatomic, assign) CVMetalTextureCacheRef videoTextureCache;//need release
@property (nonatomic, strong) QGVAPMetalShaderFunctionLoader *shaderFuncLoader;
@property (nonatomic, weak) CAMetalLayer *metalLayer;
@end
@implementation QGVAPMetalRenderer
- (instancetype)initWithMetalLayer:(CAMetalLayer *)layer {
if (self = [super init]) {
if (!kQGHWDMetalRendererDevice) {
kQGHWDMetalRendererDevice = MTLCreateSystemDefaultDevice();
}
layer.device = kQGHWDMetalRendererDevice;
_metalLayer = layer;
[self setupRenderContext];
}
return self;
}
#pragma mark - main
- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer metalLayer:(CAMetalLayer *)layer mergeInfos:(NSArray<QGVAPMergedInfo *> *)infos {
if (!layer.superlayer || layer.bounds.size.width <= 0 || layer.bounds.size.height <= 0) {
//https://forums.developer.apple.com/thread/26278
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz layer.superlayer or size error is nil! superlayer:%@ height:%@ width:%@", layer.superlayer, @(layer.bounds.size.height), @(layer.bounds.size.width));
return ;
}
[self reconstructIfNeed:layer];
if (pixelBuffer == NULL || !self.commandQueue) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz pixelbuffer is nil!");
return ;
}
[self updateMetalPropertiesIfNeed:pixelBuffer];
CVMetalTextureCacheFlush(_videoTextureCache, 0);
CVMetalTextureRef yTextureRef = nil, uvTextureRef = nil;
size_t yWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
size_t yHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
size_t uvWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
size_t uvHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
//r8Unorm
CVReturn yStatus = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _videoTextureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, yWidth, yHeight, 0, &yTextureRef);
//rg8Unorm
CVReturn uvStatus = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _videoTextureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, uvWidth, uvHeight, 1, &uvTextureRef);
if (yStatus != kCVReturnSuccess || uvStatus != kCVReturnSuccess) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz failing getting yuv texture-yStatus%@:uvStatus%@", @(yStatus), @(uvStatus));
return ;
}
id<MTLTexture> yTexture = CVMetalTextureGetTexture(yTextureRef);
id<MTLTexture> uvTexture = CVMetalTextureGetTexture(uvTextureRef);
CVBufferRelease(yTextureRef);
CVBufferRelease(uvTextureRef);
CVMetalTextureCacheFlush(_videoTextureCache, 0);
yTextureRef = NULL;
uvTextureRef = NULL;
if (!yTexture || !uvTexture || !layer) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz content is nil! y:%@ uv:%@, layer:%@", @(yTexture != nil), @(uvTexture != nil), @(layer != nil));
return ;
}
if (layer.drawableSize.width <= 0 || layer.drawableSize.height <= 0) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz drawableSize is 0");
return ;
}
id<CAMetalDrawable> drawable = layer.nextDrawable;
if (!drawable) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz nextDrawable is nil!");
return ;
}
MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor new];
renderPassDescriptor.colorAttachments[0].texture = drawable.texture; //which returns the texture in which you need to draw in order for something to appear on the screen.
renderPassDescriptor.colorAttachments[0].loadAction = MTLLoadActionClear; //set the texture to the clear color before doing any drawing,
renderPassDescriptor.colorAttachments[0].clearColor =MTLClearColorMake(0.0, 0.0, 0.0, 0.0);
id<MTLCommandBuffer> commandBuffer = [self.commandQueue commandBuffer];
id<MTLRenderCommandEncoder> renderEncoder = [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
if (renderEncoder == nil) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz renderEncoder:%p or self.pipelineState:%p is nil!", renderEncoder);
return ;
}
if (self.vertexBuffer == nil || self.yuvMatrixBuffer == nil) {
VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz vertexBuffer:%p or yuvMatrixBuffer:%p is nil!", self.vertexBuffer, self.yuvMatrixBuffer);
[renderEncoder endEncoding];
return ;
}
[self drawBackground:yTexture uvTexture:uvTexture encoder:renderEncoder];
[self drawMergedAttachments:infos yTexture:yTexture uvTexture:uvTexture renderEncoder:renderEncoder metalLayer:layer];
[renderEncoder endEncoding];
[commandBuffer presentDrawable:drawable];
[commandBuffer commit];
}
- (void)drawBackground:(id<MTLTexture>)yTexture uvTexture:(id<MTLTexture>)uvTexture encoder:(id<MTLRenderCommandEncoder>)renderEncoder {
if (self.maskInfo) {
id<MTLTexture> maskTexture = self.maskInfo.texture;
if (!maskTexture) {
VAP_Error(kQGVAPModuleCommon, @"maskTexture error! maskTexture is nil");
return;
}
if (!self.mainPipelineStateForMask) {
VAP_Error(kQGVAPModuleCommon, @"maskPipelineState error! maskTexture is nil");
return;
}
if (self.maskInfo.blurLength > 0) {
[renderEncoder setRenderPipelineState:self.mainPipelineStateForMaskBlur];
[renderEncoder setVertexBuffer:self.vertexBuffer offset:0 atIndex:0];
[renderEncoder setFragmentBuffer:self.yuvMatrixBuffer offset:0 atIndex:0];
[renderEncoder setFragmentBuffer:self.maskBlurBuffer offset:0 atIndex:1];
[renderEncoder setFragmentTexture:yTexture atIndex:QGHWDYUVFragmentTextureIndexLuma];
[renderEncoder setFragmentTexture:uvTexture atIndex:QGHWDYUVFragmentTextureIndexChroma];
[renderEncoder setFragmentTexture:maskTexture atIndex:QGHWDYUVFragmentTextureIndexAttachmentStart];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4 instanceCount:1];
} else {
[renderEncoder setRenderPipelineState:self.mainPipelineStateForMask];
[renderEncoder setVertexBuffer:self.vertexBuffer offset:0 atIndex:0];
[renderEncoder setFragmentBuffer:self.yuvMatrixBuffer offset:0 atIndex:0];
[renderEncoder setFragmentTexture:yTexture atIndex:QGHWDYUVFragmentTextureIndexLuma];
[renderEncoder setFragmentTexture:uvTexture atIndex:QGHWDYUVFragmentTextureIndexChroma];
[renderEncoder setFragmentTexture:maskTexture atIndex:QGHWDYUVFragmentTextureIndexAttachmentStart];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4 instanceCount:1];
}
} else {
if (!self.defaultMainPipelineState) {
VAP_Error(kQGVAPModuleCommon, @"yuvPipelineState error! maskTexture is nil");
return;
}
[renderEncoder setRenderPipelineState:self.defaultMainPipelineState];
[renderEncoder setVertexBuffer:self.vertexBuffer offset:0 atIndex:0];
[renderEncoder setFragmentBuffer:self.yuvMatrixBuffer offset:0 atIndex:0];
[renderEncoder setFragmentTexture:yTexture atIndex:QGHWDYUVFragmentTextureIndexLuma];
[renderEncoder setFragmentTexture:uvTexture atIndex:QGHWDYUVFragmentTextureIndexChroma];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4 instanceCount:1];
}
}
- (void)dispose {
_commandQueue = nil;
_vertexBuffer = nil;
_yuvMatrixBuffer = nil;
_attachmentPipelineState = nil;
_shaderFuncLoader = nil;
if (_videoTextureCache) {
CVMetalTextureCacheFlush(_videoTextureCache, 0);
CFRelease(_videoTextureCache);
_videoTextureCache = NULL;
}
_renderingResourcesDisposed = YES;
_mainPipelineStateForMask = nil;
_defaultMainPipelineState = nil;
}
-(void)dealloc {
[self dispose];
}
- (void)setupRenderContext {
//constants
_currentColorConversionMatrix = kQGColorConversionMatrix601FullRangeDefault;
struct ColorParameters yuvMatrixs[] = {{_currentColorConversionMatrix,{0.5, 0.5}}};
NSUInteger yuvMatrixsDataSize = sizeof(struct ColorParameters);
_yuvMatrixBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:yuvMatrixs length:yuvMatrixsDataSize options:kDefaultMTLResourceOption];
//function loader
self.shaderFuncLoader = [[QGVAPMetalShaderFunctionLoader alloc] initWithDevice:kQGHWDMetalRendererDevice];
//command queue
self.commandQueue = [kQGHWDMetalRendererDevice newCommandQueue];
//texture cache
CVReturn textureCacheError = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, kQGHWDMetalRendererDevice, nil, &_videoTextureCache);
if (textureCacheError != kCVReturnSuccess) {
VAP_Error(kQGVAPModuleCommon, @"create texture cache fail!:%@", textureCacheError);
}
}
- (void)drawMergedAttachments:(NSArray<QGVAPMergedInfo *> *)infos
yTexture:(id<MTLTexture>)yTexture
uvTexture:(id<MTLTexture>)uvTexture
renderEncoder:(id<MTLRenderCommandEncoder>)encoder
metalLayer:(CAMetalLayer *)layer {
if (infos.count == 0) {
return ;
}
if (!encoder || !self.commonInfo || !self.attachmentPipelineState) {
VAP_Error(kQGVAPModuleCommon, @"renderMergedAttachments error! infos:%@ encoder:%p commonInfo:%@ attachmentPipelineState:%p", @(infos.count), encoder, @(self.commonInfo != nil), self.attachmentPipelineState);
return ;
}
if (yTexture == nil || uvTexture == nil) {
VAP_Error(kQGVAPModuleCommon, @"renderMergedAttachments error! cuz yTexture:%p or uvTexture:%p is nil!", yTexture, uvTexture);
return ;
}
[infos enumerateObjectsUsingBlock:^(QGVAPMergedInfo * _Nonnull mergeInfo, NSUInteger idx, BOOL * _Nonnull stop) {
[encoder setRenderPipelineState:self.attachmentPipelineState];
id<MTLTexture> sourceTexture = mergeInfo.source.texture;//
id<MTLBuffer> vertexBuffer = [mergeInfo vertexBufferWithContainerSize:self.commonInfo.size maskContianerSize:self.commonInfo.videoSize device:kQGHWDMetalRendererDevice];
id<MTLBuffer> colorParamsBuffer = mergeInfo.source.colorParamsBuffer;
id<MTLBuffer> yuvMatrixBuffer = self.yuvMatrixBuffer;
if (!sourceTexture || !vertexBuffer || !colorParamsBuffer || !yuvMatrixBuffer) {
//VAP_Error(kQGVAPModuleCommon, @"quit attachment:%p cuz-source:%p vertex:%p",mergeInfo, sourceTexture, vertexBuffer);
return ;
}
[encoder setVertexBuffer:vertexBuffer offset:0 atIndex:0];
[encoder setFragmentBuffer:yuvMatrixBuffer offset:0 atIndex:0];
[encoder setFragmentBuffer:colorParamsBuffer offset:0 atIndex:1];
//
[encoder setFragmentTexture:yTexture atIndex:QGHWDYUVFragmentTextureIndexLuma];
[encoder setFragmentTexture:uvTexture atIndex:QGHWDYUVFragmentTextureIndexChroma];
[encoder setFragmentTexture:sourceTexture atIndex:QGHWDYUVFragmentTextureIndexAttachmentStart];
[encoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4 instanceCount:1];
}];
}
#pragma mark - setter&getter
- (id<MTLBuffer>)maskBlurBuffer {
if (!_maskBlurBuffer) {
struct MaskParameters parameters[] = {{kQGBlurWeightMatrixDefault, 3, 0.01}};
NSUInteger parametersSize = sizeof(struct MaskParameters);
_maskBlurBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:parameters length:parametersSize options:kDefaultMTLResourceOption];
}
return _maskBlurBuffer;
}
- (void)setCommonInfo:(QGVAPCommonInfo *)commonInfo {
_commonInfo = commonInfo;
[self updateMainVertexBuffer];
}
- (void)setMaskInfo:(QGVAPMaskInfo *)maskInfo {
if (maskInfo && (!maskInfo.data || maskInfo.dataSize.width <= 0 || maskInfo.dataSize.height <= 0)) {
VAP_Error(kQGVAPModuleCommon, @"setMaskInfo fail: data:%@, size:%@", maskInfo.data, NSStringFromCGSize(maskInfo.dataSize));
return;
}
if (_maskInfo == maskInfo) {
return ;
}
_maskInfo = maskInfo;
if (_vertexBuffer) {
[self updateMainVertexBuffer];
}
}
#pragma mark - pipelines
- (id<MTLRenderPipelineState>)createPipelineState:(NSString *)vertexFunction fragmentFunction:(NSString *)fragmentFunction {
id<MTLFunction> vertexProgram = [self.shaderFuncLoader loadFunctionWithName:vertexFunction];
id<MTLFunction> fragmentProgram = [self.shaderFuncLoader loadFunctionWithName:fragmentFunction];
if (!vertexProgram || !fragmentProgram) {
VAP_Error(kQGVAPModuleCommon, @"setupPipelineStatesWithMetalLayer fail! cuz: shader load fail!");
NSAssert(0, @"check if .metal files been compiled to correct target!");
return nil;
}
//
//https://objccn.io/issue-3-1/
//https://www.andersriggelsen.dk/glblendfunc.php
MTLRenderPipelineDescriptor *pipelineStateDescriptor = [MTLRenderPipelineDescriptor new];
pipelineStateDescriptor.vertexFunction = vertexProgram;
pipelineStateDescriptor.fragmentFunction = fragmentProgram;
pipelineStateDescriptor.colorAttachments[0].pixelFormat = _metalLayer.pixelFormat;
[pipelineStateDescriptor.colorAttachments[0] setBlendingEnabled:YES];
pipelineStateDescriptor.colorAttachments[0].rgbBlendOperation = MTLBlendOperationAdd;
pipelineStateDescriptor.colorAttachments[0].alphaBlendOperation = MTLBlendOperationAdd;
pipelineStateDescriptor.colorAttachments[0].sourceRGBBlendFactor = MTLBlendFactorSourceAlpha;
pipelineStateDescriptor.colorAttachments[0].sourceAlphaBlendFactor = MTLBlendFactorSourceAlpha;
pipelineStateDescriptor.colorAttachments[0].destinationRGBBlendFactor = MTLBlendFactorOneMinusSourceAlpha;
pipelineStateDescriptor.colorAttachments[0].destinationAlphaBlendFactor = MTLBlendFactorOneMinusSourceAlpha;
NSError *psError = nil;
id<MTLRenderPipelineState> pipelineState = [kQGHWDMetalRendererDevice newRenderPipelineStateWithDescriptor:pipelineStateDescriptor error:&psError];
if (!pipelineState || psError) {
VAP_Error(kQGVAPModuleCommon, @"newRenderPipelineStateWithDescriptor error!:%@", psError);
return nil;
}
return pipelineState;
}
- (id<MTLRenderPipelineState>)defaultMainPipelineState {
if (!_defaultMainPipelineState) {
_defaultMainPipelineState = [self createPipelineState:kVAPVertexFunctionName fragmentFunction:kVAPYUVFragmentFunctionName];
}
return _defaultMainPipelineState;
}
- (id<MTLRenderPipelineState>)mainPipelineStateForMask {
if (!_mainPipelineStateForMask) {
_mainPipelineStateForMask = [self createPipelineState:kVAPVertexFunctionName fragmentFunction:kVAPMaskFragmentFunctionName];
}
return _mainPipelineStateForMask;
}
- (id<MTLRenderPipelineState>)attachmentPipelineState {
if (!_attachmentPipelineState) {
_attachmentPipelineState = [self createPipelineState:kVAPAttachmentVertexFunctionName fragmentFunction:kVAPAttachmentFragmentFunctionName];
}
return _attachmentPipelineState;
}
- (id<MTLRenderPipelineState>)mainPipelineStateForMaskBlur {
if (!_mainPipelineStateForMaskBlur) {
_mainPipelineStateForMaskBlur = [self createPipelineState:kVAPVertexFunctionName fragmentFunction:kVAPMaskBlurFragmentFunctionName];
}
return _mainPipelineStateForMaskBlur;
}
#pragma mark - private
- (void)reconstructIfNeed:(CAMetalLayer *)layer {
if (_renderingResourcesDisposed) {
[self setupRenderContext];
_renderingResourcesDisposed = NO;
}
}
- (void)updateMetalPropertiesIfNeed:(CVPixelBufferRef)pixelBuffer {
if (!pixelBuffer) {
return ;
}
CFTypeRef yCbCrMatrixType = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
matrix_float3x3 matrix = kQGColorConversionMatrix601FullRangeDefault;
if (CFStringCompare(yCbCrMatrixType, kCVImageBufferYCbCrMatrix_ITU_R_709_2, 0) == kCFCompareEqualTo) {
matrix = kQGColorConversionMatrix709FullRangeDefault;
}
if (simd_equal(_currentColorConversionMatrix, matrix)) {
return ;
}
_currentColorConversionMatrix = matrix;
struct ColorParameters yuvMatrixs[] = {{_currentColorConversionMatrix,{0.5, 0.5}}};
NSUInteger yuvMatrixsDataSize = sizeof(struct ColorParameters);
_yuvMatrixBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:yuvMatrixs length:yuvMatrixsDataSize options:kDefaultMTLResourceOption];
}
- (void)updateMainVertexBuffer {
const int colunmCountForVertices = 4, colunmCountForCoordinate = 2, vertexDataLength = 40; //(x,y,z,w),(x,x),
static float vertexData[vertexDataLength]; //+
float rgbCoordinates[8], alphaCoordinates[8];
float maskCoordinates[8] = {0};
const void *vertices = kVAPMTLVerticesIdentity;
genMTLTextureCoordinates(self.commonInfo.rgbAreaRect, self.commonInfo.videoSize, rgbCoordinates, NO, 0);
genMTLTextureCoordinates(self.commonInfo.alphaAreaRect, self.commonInfo.videoSize, alphaCoordinates, NO, 0);
if (self.maskInfo) {
genMTLTextureCoordinates(self.maskInfo.sampleRect, self.maskInfo.dataSize, maskCoordinates, NO, 0);
}
int indexForVertexData = 0;
//+==>
for (int i = 0; i < 4 * colunmCountForVertices; i ++) {
//
vertexData[indexForVertexData++] = ((float*)vertices)[i];
//
if (i%colunmCountForVertices == colunmCountForVertices-1) {
int row = i/colunmCountForVertices;
//rgb
vertexData[indexForVertexData++] = ((float*)rgbCoordinates)[row*colunmCountForCoordinate];
vertexData[indexForVertexData++] = ((float*)rgbCoordinates)[row*colunmCountForCoordinate+1];
//alpha
vertexData[indexForVertexData++] = ((float*)alphaCoordinates)[row*colunmCountForCoordinate];
vertexData[indexForVertexData++] = ((float*)alphaCoordinates)[row*colunmCountForCoordinate+1];
//mask
vertexData[indexForVertexData++] = ((float*)maskCoordinates)[row*colunmCountForCoordinate];
vertexData[indexForVertexData++] = ((float*)maskCoordinates)[row*colunmCountForCoordinate+1];
}
}
NSUInteger allocationSize = vertexDataLength * sizeof(float);
id<MTLBuffer> vertexBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:vertexData length:allocationSize options:kDefaultMTLResourceOption];
_vertexBuffer = vertexBuffer;
}
@end
#endif

View File

@@ -0,0 +1,37 @@
// QGVAPMetalView.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import "QGVAPConfigModel.h"
@class QGVAPMaskInfo;
@protocol QGVAPMetalViewDelegate <NSObject>
- (void)onMetalViewUnavailable;
@end
@interface QGVAPMetalView : UIView
@property (nonatomic, weak) id<QGVAPMetalViewDelegate> delegate;
@property (nonatomic, strong) QGVAPCommonInfo *commonInfo;
@property (nonatomic, strong) QGVAPMaskInfo *maskInfo;
- (void)display:(CVPixelBufferRef)pixelBuffer mergeInfos:(NSArray<QGVAPMergedInfo *> *)infos;
- (void)dispose;
@end

View File

@@ -0,0 +1,132 @@
// QGVAPMetalView.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGVAPMetalView.h"
#import "QGVAPMetalRenderer.h"
#import "QGVAPLogger.h"
#if TARGET_OS_SIMULATOR//
@implementation QGVAPMetalView
- (void)display:(CVPixelBufferRef)pixelBuffer mergeInfos:(NSArray<QGVAPMergedInfo *> *)infos {}
- (void)dispose {}
@end
#else
@interface QGVAPMetalView ()
@property (nonatomic, strong) CAMetalLayer *metalLayer;
@property (nonatomic, strong) QGVAPMetalRenderer *renderer;
@property (nonatomic, assign) BOOL drawableSizeShouldUpdate;
@end
@implementation QGVAPMetalView
#pragma mark - override
+ (Class)layerClass {
return [CAMetalLayer class];
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
NSAssert(0, @"initWithCoder: has not been implemented");
if (self = [super initWithCoder:aDecoder]) {
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_drawableSizeShouldUpdate = YES;
_metalLayer = (CAMetalLayer *)self.layer;
_metalLayer.frame = self.frame;
_metalLayer.opaque = NO;
_renderer = [[QGVAPMetalRenderer alloc] initWithMetalLayer:_metalLayer];
_metalLayer.contentsScale = [UIScreen mainScreen].scale;
_metalLayer.pixelFormat = MTLPixelFormatBGRA8Unorm;
_metalLayer.framebufferOnly = YES;
}
return self;
}
- (void)didMoveToWindow {
[super didMoveToWindow];
self.drawableSizeShouldUpdate = YES;
}
- (void)layoutSubviews {
[super layoutSubviews];
self.drawableSizeShouldUpdate = YES;
}
- (void)dealloc {
[self onMetalViewUnavailable];
}
#pragma mark - getter&setter
- (QGVAPCommonInfo *)commonInfo {
return self.renderer.commonInfo;
}
- (void)setCommonInfo:(QGVAPCommonInfo *)commonInfo {
[self.renderer setCommonInfo:commonInfo];
}
- (void)setMaskInfo:(QGVAPMaskInfo *)maskInfo {
[self.renderer setMaskInfo:maskInfo];
}
#pragma mark - main
- (void)display:(CVPixelBufferRef)pixelBuffer mergeInfos:(NSArray<QGVAPMergedInfo *> *)infos {
if (!self.window) {
VAP_Event(kQGVAPModuleCommon, @"quit display pixelbuffer, cuz window is nil!");
[self onMetalViewUnavailable];
return ;
}
if (self.drawableSizeShouldUpdate) {
CGFloat nativeScale = [UIScreen mainScreen].nativeScale;
CGSize drawableSize = CGSizeMake(CGRectGetWidth(self.bounds)*nativeScale, CGRectGetHeight(self.bounds)*nativeScale);
self.metalLayer.drawableSize = drawableSize;
VAP_Event(kQGVAPModuleCommon, @"update drawablesize :%@", [NSValue valueWithCGSize:drawableSize]);
self.drawableSizeShouldUpdate = NO;
}
[self.renderer renderPixelBuffer:pixelBuffer metalLayer:self.metalLayer mergeInfos:infos];
}
- (void)dispose {
[self.renderer dispose];
}
#pragma mark - private
- (void)onMetalViewUnavailable{
if ([self.delegate respondsToSelector:@selector(onMetalViewUnavailable)]) {
[self.delegate onMetalViewUnavailable];
}
}
@end
#endif

View File

@@ -0,0 +1,41 @@
// QGHWDMP4OpenGLView.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import <UIKit/UIKit.h>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#import "UIView+VAP.h"
@protocol QGHWDMP4OpenGLViewDelegate <NSObject>
- (void)onViewUnavailableStatus;
@end
@interface QGHWDMP4OpenGLView : UIView
@property (nonatomic, strong) EAGLContext *glContext;
@property (nonatomic, weak) id<QGHWDMP4OpenGLViewDelegate> displayDelegate;
@property (nonatomic, assign) QGHWDTextureBlendMode blendMode;
@property (nonatomic, assign) BOOL pause;
- (void)setupGL;
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (void)dispose;
//update glcontext's viewport size by layer bounds
- (void)updateBackingSize;
@end

View File

@@ -0,0 +1,645 @@
// QGHWDMP4OpenGLView.m
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#import "QGHWDMP4OpenGLView.h"
#import <QuartzCore/QuartzCore.h>
#import <AVFoundation/AVUtilities.h>
#import <mach/mach_time.h>
#import <GLKit/GLKit.h>
#import "VAPMacros.h"
// Uniform index.
enum {
HWD_UNIFORM_Y,
HWD_UNIFORM_UV,
HWD_UNIFORM_COLOR_CONVERSION_MATRIX,
HWD_NUM_UNIFORMS
};
GLint hwd_uniforms[HWD_NUM_UNIFORMS];
// Attribute index.
enum {
ATTRIB_VERTEX,
ATTRIB_TEXCOORD_RGB,
ATTRIB_TEXCOORD_ALPHA,
NUM_ATTRIBUTES
};
// BT.709-HDTV.
static const GLfloat kQGColorConversion709[] = {
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
// BT.601 full range-http://www.equasys.de/colorconversion.html
const GLfloat kQGColorConversion601FullRange[] = {
1.0, 1.0, 1.0,
0.0, -0.343, 1.765,
1.4, -0.711, 0.0,
};
// texture coords for blend
const GLfloat textureCoordLeft[] = { //
0.5, 0.0,
0.0, 0.0,
0.5, 1.0,
0.0, 1.0
};
const GLfloat textureCoordRight[] = { //
1.0, 0.0,
0.5, 0.0,
1.0, 1.0,
0.5, 1.0
};
const GLfloat textureCoordTop[] = { //
1.0, 0.0,
0.0, 0.0,
1.0, 0.5,
0.0, 0.5
};
const GLfloat textureCoordBottom[] = { //
1.0, 0.5,
0.0, 0.5,
1.0, 1.0,
0.0, 1.0
};
#undef cos
#undef sin
NSString *const kVertexShaderSource = SHADER_STRING
(
attribute vec4 position;
attribute vec2 RGBTexCoord;
attribute vec2 alphaTexCoord;
varying vec2 RGBTexCoordVarying;
varying vec2 alphaTexCoordVarying;
void main()
{
float preferredRotation = 3.14;
mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0,sin(preferredRotation),cos(preferredRotation), 0.0, 0.0,0.0,0.0,1.0,0.0,0.0,0.0, 0.0,1.0);
gl_Position = rotationMatrix * position;
RGBTexCoordVarying = RGBTexCoord;
alphaTexCoordVarying = alphaTexCoord;
}
);
NSString *const kFragmentShaderSource = SHADER_STRING
(
varying highp vec2 RGBTexCoordVarying;
varying highp vec2 alphaTexCoordVarying;
precision mediump float;
uniform sampler2D SamplerY;
uniform sampler2D SamplerUV;
uniform mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv_rgb;
lowp vec3 rgb_rgb;
mediump vec3 yuv_alpha;
lowp vec3 rgb_alpha;
// Subtract constants to map the video range start at 0
yuv_rgb.x = (texture2D(SamplerY, RGBTexCoordVarying).r);// - (16.0/255.0));
yuv_rgb.yz = (texture2D(SamplerUV, RGBTexCoordVarying).ra - vec2(0.5, 0.5));
rgb_rgb = colorConversionMatrix * yuv_rgb;
yuv_alpha.x = (texture2D(SamplerY, alphaTexCoordVarying).r);// - (16.0/255.0));
yuv_alpha.yz = (texture2D(SamplerUV, alphaTexCoordVarying).ra - vec2(0.5, 0.5));
rgb_alpha = colorConversionMatrix * yuv_alpha;
gl_FragColor = vec4(rgb_rgb,rgb_alpha.r);
// gl_FragColor = vec4(1, 0, 0, 1);
}
);
@interface QGHWDMP4OpenGLView() {
GLint _backingWidth;
GLint _backingHeight;
CVOpenGLESTextureRef _lumaTexture;
CVOpenGLESTextureRef _chromaTexture;
CVOpenGLESTextureCacheRef _textureCache;
GLuint _frameBufferHandle;
GLuint _colorBufferHandle;
const GLfloat *_preferredConversion;
}
@property GLuint program;
- (void)setupBuffers;
- (void)cleanupTextures;
- (BOOL)isValidateProgram:(GLuint)prog;
- (BOOL)loadShaders;
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL;
- (BOOL)linkProgram:(GLuint)prog;
@end
@implementation QGHWDMP4OpenGLView
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (id)initWithCoder:(NSCoder *)aDecoder {
if ((self = [super initWithCoder:aDecoder])) {
if (![self commonInit]) {
return nil;
}
}
return self;
}
- (instancetype)init {
if (self = [super init]) {
if (![self commonInit]) {
return nil;
}
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
if (![self commonInit]) {
return nil;
}
}
return self;
}
- (BOOL)commonInit {
self.contentScaleFactor = [[UIScreen mainScreen] scale];
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = NO;
eaglLayer.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:NO],
kEAGLDrawablePropertyColorFormat : kEAGLColorFormatRGBA8};
_glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!_glContext || ![EAGLContext setCurrentContext:_glContext] || ![self loadShaders]) {
return NO;
}
_preferredConversion = kQGColorConversion709;
return YES;
}
- (void)dealloc {
[self cleanupTextures];
if(_textureCache) {
CFRelease(_textureCache);
}
if ([self.displayDelegate respondsToSelector:@selector(onViewUnavailableStatus)]) {
[self.displayDelegate onViewUnavailableStatus];
}
}
# pragma mark - OpenGL setup
/**
opengl
*/
- (void)setupGL {
VAP_Info(kQGVAPModuleCommon, @"setupGL");
[EAGLContext setCurrentContext:_glContext];
[self setupBuffers];
[self loadShaders];
glUseProgram(self.program);
glUniform1i(hwd_uniforms[HWD_UNIFORM_Y], 0);
glUniform1i(hwd_uniforms[HWD_UNIFORM_UV], 1);
glUniformMatrix3fv(hwd_uniforms[HWD_UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
// Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
if (!_textureCache) {
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _glContext, NULL, &_textureCache);
if (err != noErr) {
VAP_Event(kQGVAPModuleCommon, @"Error at CVOpenGLESTextureCacheCreate %d", err);
return;
}
}
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
}
#pragma mark - Utilities
- (void)setupBuffers {
glDisable(GL_DEPTH_TEST);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_RGB);
glVertexAttribPointer(ATTRIB_TEXCOORD_RGB, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_ALPHA);
glVertexAttribPointer(ATTRIB_TEXCOORD_ALPHA, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
glGenFramebuffers(1, &_frameBufferHandle);
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
glGenRenderbuffers(1, &_colorBufferHandle);
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
[_glContext renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
VAP_Event(kQGVAPModuleCommon, @"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
}
- (void)layoutSubviews {
[super layoutSubviews];
[self updateBackingSize];
}
/**
*/
- (void)updateBackingSize {
if ([EAGLContext currentContext] != _glContext) {
[EAGLContext setCurrentContext:_glContext];
}
[_glContext renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
}
- (void)cleanupTextures {
if (_lumaTexture) {
CFRelease(_lumaTexture);
_lumaTexture = NULL;
}
if (_chromaTexture) {
CFRelease(_chromaTexture);
_chromaTexture = NULL;
}
CVOpenGLESTextureCacheFlush(_textureCache, 0);
}
#pragma mark - OpenGLES drawing
/**
@param pixelBuffer samplebuffer
*/
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer {
if (!self.window && [self.displayDelegate respondsToSelector:@selector(onViewUnavailableStatus)]) {
[self.displayDelegate onViewUnavailableStatus];
return ;
}
if ([EAGLContext currentContext] != _glContext) {
[EAGLContext setCurrentContext:_glContext];
}
CVReturn err;
if (pixelBuffer != NULL) {
int frameWidth = (int)CVPixelBufferGetWidth(pixelBuffer);
int frameHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
if (!_textureCache) {
VAP_Event(kQGVAPModuleCommon, @"No video texture cache");
return;
}
[self cleanupTextures];
_preferredConversion = kQGColorConversion601FullRange;
//y
glActiveTexture(GL_TEXTURE0);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_textureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_LUMINANCE,
frameWidth,
frameHeight,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
0,
&_lumaTexture);
if (err) {
VAP_Event(kQGVAPModuleCommon, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// uv
glActiveTexture(GL_TEXTURE1);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_textureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_LUMINANCE_ALPHA,
frameWidth / 2.0,
frameHeight / 2.0,
GL_LUMINANCE_ALPHA,
GL_UNSIGNED_BYTE,
1,
&_chromaTexture);
if (err) {
VAP_Error(kQGVAPModuleCommon, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
// Set the view port to the entire view.
glViewport(0, 0, _backingWidth, _backingHeight);
}
// glClearColor(0.1f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(self.program);
glUniformMatrix3fv(hwd_uniforms[HWD_UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
//
CGRect vertexRect = AVMakeRectWithAspectRatioInsideRect(CGSizeMake(_backingWidth, _backingHeight), self.layer.bounds);
//
CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0);
CGSize cropScaleAmount = CGSizeMake(vertexRect.size.width/self.layer.bounds.size.width, vertexRect.size.height/self.layer.bounds.size.height);
if (cropScaleAmount.width > cropScaleAmount.height) {
normalizedSamplingSize.width = 1.0;
normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
} else {
normalizedSamplingSize.width = 1.0;
normalizedSamplingSize.height = cropScaleAmount.width/cropScaleAmount.height;
}
GLfloat quadVertexData [] = {
-1 * normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
-1 * normalizedSamplingSize.width, normalizedSamplingSize.height,
normalizedSamplingSize.width, normalizedSamplingSize.height,
};
//
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_TEXCOORD_RGB, 2, GL_FLOAT, 0, 0, [self quadTextureRGBData]);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_RGB);
glVertexAttribPointer(ATTRIB_TEXCOORD_ALPHA, 2, GL_FLOAT, 0, 0, [self quedTextureAlphaData]);
glEnableVertexAttribArray(ATTRIB_TEXCOORD_ALPHA);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
if ([EAGLContext currentContext] == _glContext && !self.pause && self.window && [UIApplication sharedApplication].applicationState != UIApplicationStateBackground) {
[_glContext presentRenderbuffer:GL_RENDERBUFFER];
}
}
- (const void *)quedTextureAlphaData {
switch (self.blendMode) {
case QGHWDTextureBlendMode_AlphaLeft:
return textureCoordLeft;
case QGHWDTextureBlendMode_AlphaRight:
return textureCoordRight;
case QGHWDTextureBlendMode_AlphaTop:
return textureCoordTop;
case QGHWDTextureBlendMode_AlphaBottom:
return textureCoordBottom;
default:
return textureCoordLeft;
}
}
- (const void *)quadTextureRGBData {
switch (self.blendMode) {
case QGHWDTextureBlendMode_AlphaLeft:
return textureCoordRight;
case QGHWDTextureBlendMode_AlphaRight:
return textureCoordLeft;
case QGHWDTextureBlendMode_AlphaTop:
return textureCoordBottom;
case QGHWDTextureBlendMode_AlphaBottom:
return textureCoordTop;
default:
return textureCoordRight;
}
}
#pragma mark - OpenGL ES 2 shader compilation
- (BOOL)loadShaders {
GLuint vShader, fShader;
self.program = glCreateProgram();
// Create and compile the vertex shader.
if (![self compileShader:&vShader type:GL_VERTEX_SHADER source:kVertexShaderSource]) {
VAP_Error(kQGVAPModuleCommon, @"Failed to compile vertex shader");
return NO;
}
// Create and compile fragment shader.
if (![self compileShader:&fShader type:GL_FRAGMENT_SHADER source:kFragmentShaderSource]) {
VAP_Error(kQGVAPModuleCommon, @"Failed to compile fragment shader");
return NO;
}
// Attach vertex shader to program.
glAttachShader(self.program, vShader);
// Attach fragment shader to program.
glAttachShader(self.program, fShader);
// Bind attribute locations. This needs to be done prior to linking.
glBindAttribLocation(self.program, ATTRIB_VERTEX, "position");
glBindAttribLocation(self.program, ATTRIB_TEXCOORD_RGB, "RGBTexCoord");
glBindAttribLocation(self.program, ATTRIB_TEXCOORD_ALPHA, "alphaTexCoord");
// Link the program.
if (![self linkProgram:self.program]) {
VAP_Event(kQGVAPModuleCommon, @"Failed to link program: %d", self.program);
if (vShader) {
glDeleteShader(vShader);
vShader = 0;
}
if (fShader) {
glDeleteShader(fShader);
fShader = 0;
}
if (self.program) {
glDeleteProgram(self.program);
self.program = 0;
}
return NO;
}
// Get uniforms' location.
hwd_uniforms[HWD_UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
hwd_uniforms[HWD_UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
hwd_uniforms[HWD_UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix");
// Release vertex and fragment shaders.
if (vShader) {
glDetachShader(self.program, vShader);
glDeleteShader(vShader);
}
if (fShader) {
glDetachShader(self.program, fShader);
glDeleteShader(fShader);
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type source:(const NSString *)sourceString {
GLint status;
const GLchar *source;
source = (GLchar *)[sourceString UTF8String];
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint lengthOfLog;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &lengthOfLog);
if (lengthOfLog > 0) {
GLchar *log = (GLchar *)malloc(lengthOfLog);
glGetShaderInfoLog(*shader, lengthOfLog, &lengthOfLog, log);
VAP_Info(kQGVAPModuleCommon, @"MODULE_DECODE Shader compile log:\n%s", log)
free(log);
}
#endif
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL {
VAP_Info(kQGVAPModuleCommon, @"compileShader");
NSError *error;
NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
if (sourceString == nil) {
VAP_Event(kQGVAPModuleCommon, @"Failed to load vertex shader: %@", [error localizedDescription]);
return NO;
}
const GLchar *source;
source = (GLchar *)[sourceString UTF8String];
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint lengthOfLog;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &lengthOfLog);
if (lengthOfLog > 0) {
GLchar *log = (GLchar *)malloc(lengthOfLog);
glGetShaderInfoLog(*shader, lengthOfLog, &lengthOfLog, log);
VAP_Info(kQGVAPModuleCommon, @"Shader compile log:\n%s", log);
free(log);
}
#endif
GLint status;
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)linkProgram:(GLuint)prog {
GLint status;
glLinkProgram(prog);
#if defined(DEBUG)
GLint lengthOfLog;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &lengthOfLog);
if (lengthOfLog > 0) {
GLchar *log = (GLchar *)malloc(lengthOfLog);
glGetProgramInfoLog(prog, lengthOfLog, &lengthOfLog, log);
VAP_Info(kQGVAPModuleCommon, @"Program link log:\n%s", log);
free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (BOOL)isValidateProgram:(GLuint)prog {
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
VAP_Info(kQGVAPModuleCommon, @"Program validate log:\n%s", log);
free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == 0) {
VAP_Event(kQGVAPModuleCommon, @"program is not valid:%@",@(status));
return NO;
}
VAP_Info(kQGVAPModuleCommon, @"programe is valid");
return YES;
}
- (void)dispose {
glDisableVertexAttribArray(ATTRIB_VERTEX);
glDisableVertexAttribArray(ATTRIB_TEXCOORD_RGB);
glDisableVertexAttribArray(ATTRIB_TEXCOORD_ALPHA);
}
@end

View File

@@ -0,0 +1,224 @@
// QGHWDMetalShaderSourceDefine.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#ifndef QGHWDMetalShaderSourceDefine_h
#define QGHWDMetalShaderSourceDefine_h
#import "VAPMacros.h"
#import "QGHWDShaderTypes.h"
/*
!!!!!!!!!important!!!!!!!!!
!!所有.metal文件更新都需要同步到这个文件中
!!本文件内着色器代码作为兜底逻辑,当无法找到预编译着色器时使用本文件定义的着色器字符串进行编译!
!!!!!!!!!!!!!!!!!!!!!!!!!!
*/
//The source may only import the Metal standard library. There is no search path to find other functions.
//头文件引入
static NSString * const kQGHWDMetalShaderSourceImports =
@"#include <metal_stdlib> \n#import <simd/simd.h>\n";
//类型定义
static NSString * const kQGHWDMetalShaderTypeDefines =
SHADER_STRING(
typedef struct {
packed_float4 position;
packed_float2 textureColorCoordinate;
packed_float2 textureAlphaCoordinate;
} QGHWDVertex;
typedef struct {
packed_float4 position;
packed_float2 textureColorCoordinate;
packed_float2 textureAlphaCoordinate;
packed_float2 textureMaskCoordinate;
} QGVAPVertex;
struct ColorParameters {
matrix_float3x3 matrix;
packed_float2 offset;
};
struct MaskParameters {
matrix_float3x3 weightMatrix;
int coreSize;
float texelOffset;
};
typedef struct {
packed_float4 position;
packed_float2 sourceTextureCoordinate;
packed_float2 maskTextureCoordinate;
} QGHWDAttachmentVertex;
struct VapAttachmentFragmentParameter {
int needOriginRGB;
packed_float4 fillColor;
};
);
//着色器代码
static NSString * const kQGHWDMetalShaderSourceString =
SHADER_STRING(
//QGHWDShaders.metal
using namespace metal;
typedef struct {
float4 clipSpacePostion [[ position ]];
float2 textureColorCoordinate;
float2 textureAlphaCoordinate;
} HWDRasterizerData;
typedef struct {
float4 clipSpacePostion [[ position ]];
float2 textureColorCoordinate;
float2 textureAlphaCoordinate;
float2 textureMaskCoordinate;
} VAPRasterizerData;
typedef struct {
float4 position [[ position ]];
float2 sourceTextureCoordinate;
float2 maskTextureCoordinate;
} VAPAttachmentRasterizerData;
float3 RGBColorFromYuvTextures(sampler textureSampler, float2 coordinate, texture2d<float> texture_luma, texture2d<float> texture_chroma, matrix_float3x3 rotationMatrix, float2 offset) {
float3 color;
color.x = texture_luma.sample(textureSampler, coordinate).r;
color.yz = texture_chroma.sample(textureSampler, coordinate).rg - offset;
return float3(rotationMatrix * color);
}
float4 RGBAColor(sampler textureSampler, float2 colorCoordinate, float2 alphaCoordinate, texture2d<float> lumaTexture, texture2d<float> chromaTexture, constant ColorParameters *colorParameters) {
matrix_float3x3 rotationMatrix = colorParameters[0].matrix;
float2 offset = colorParameters[0].offset;
float3 color = RGBColorFromYuvTextures(textureSampler, colorCoordinate, lumaTexture, chromaTexture, rotationMatrix, offset);
float3 alpha = RGBColorFromYuvTextures(textureSampler, alphaCoordinate, lumaTexture, chromaTexture, rotationMatrix, offset);
return float4(color, alpha.r);
}
vertex HWDRasterizerData hwd_vertexShader(uint vertexID [[ vertex_id ]], constant QGHWDVertex *vertexArray [[ buffer(0) ]]) {
HWDRasterizerData out;
out.clipSpacePostion = vertexArray[vertexID].position;
out.textureColorCoordinate = vertexArray[vertexID].textureColorCoordinate;
out.textureAlphaCoordinate = vertexArray[vertexID].textureAlphaCoordinate;
return out;
}
fragment float4 hwd_yuvFragmentShader(HWDRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
return RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
}
vertex VAPRasterizerData vap_vertexShader(uint vertexID [[ vertex_id ]], constant QGVAPVertex *vertexArray [[ buffer(0) ]]) {
VAPRasterizerData out;
out.clipSpacePostion = vertexArray[vertexID].position;
out.textureColorCoordinate = vertexArray[vertexID].textureColorCoordinate;
out.textureAlphaCoordinate = vertexArray[vertexID].textureAlphaCoordinate;
out.textureMaskCoordinate = vertexArray[vertexID].textureMaskCoordinate;
return out;
}
fragment float4 vap_yuvFragmentShader(VAPRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
return RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
}
fragment float4 vap_maskFragmentShader(VAPRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
texture2d<float> maskTexture [[ texture(2) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
float4 originColor = RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
float4 maskColor = maskTexture.sample(textureSampler, input.textureMaskCoordinate);
float needMask = maskColor.r * 255;
return float4(originColor.rgb, (1 - needMask) * originColor.a);
}
fragment float4 vap_maskBlurFragmentShader(VAPRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
texture2d<float> maskTexture [[ texture(2) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]],
constant MaskParameters *maskParameters [[ buffer(1) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
float4 originColor = RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
int uniform = 255;
float3x3 weightMatrix = maskParameters[0].weightMatrix;
int coreSize = maskParameters[0].coreSize;
float texelOffset = maskParameters[0].texelOffset;
float alphaResult = 0;
// 循环9次可以写成for循环
for (int y = 0; y < coreSize; y++) {
for (int x = 0; x < coreSize; x++) {
float2 nearMaskColor = float2(input.textureMaskCoordinate.x + (-1.0 + float(x)) * texelOffset, input.textureMaskCoordinate.y + (-1.0 + float(y)) * texelOffset);
alphaResult += maskTexture.sample(textureSampler, nearMaskColor).r * uniform * weightMatrix[x][y];
}
}
int needOrigin = step(alphaResult, 0.01) + step(originColor.a, 0.01);
return float4(originColor.rgb, needOrigin * originColor.a + (1 - needOrigin) * (1 - alphaResult));
}
vertex VAPAttachmentRasterizerData vapAttachment_vertexShader(uint vertexID [[ vertex_id ]], constant QGHWDAttachmentVertex *vertexArray [[ buffer(0) ]]) {
VAPAttachmentRasterizerData out;
out.position = vertexArray[vertexID].position;
out.sourceTextureCoordinate = vertexArray[vertexID].sourceTextureCoordinate;
out.maskTextureCoordinate = vertexArray[vertexID].maskTextureCoordinate;
return out;
}
fragment float4 vapAttachment_FragmentShader(VAPAttachmentRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
texture2d<float> sourceTexture [[ texture(2) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]],
constant VapAttachmentFragmentParameter *fillParams [[ buffer(1) ]]) {
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
matrix_float3x3 rotationMatrix = colorParameters[0].matrix;
float2 offset = colorParameters[0].offset;
float3 mask = RGBColorFromYuvTextures(textureSampler, input.maskTextureCoordinate, lumaTexture, chromaTexture, rotationMatrix, offset);
float4 source = sourceTexture.sample(textureSampler, input.sourceTextureCoordinate);
return float4(source.rgb, source.a * mask.r);
}
);
#endif /* QGHWDMetalShaderSourceDefine_h */

View File

@@ -0,0 +1,68 @@
// QGHWDShaderTypes.h
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#ifndef QGHWDShaderTypes_h
#define QGHWDShaderTypes_h
/*
请注意更新本文件后请同步到QGHWDMetalShaderSourceDefine.h
*/
#import <simd/simd.h>
typedef struct {
packed_float4 position;
packed_float2 textureColorCoordinate;
packed_float2 textureAlphaCoordinate;
} QGHWDVertex;
typedef struct {
packed_float4 position;
packed_float2 textureColorCoordinate;
packed_float2 textureAlphaCoordinate;
packed_float2 textureMaskCoordinate;
} QGVAPVertex;
struct ColorParameters {
matrix_float3x3 matrix;
packed_float2 offset;
};
struct MaskParameters {
matrix_float3x3 weightMatrix;
int coreSize;
float texelOffset;
};
struct VapAttachmentFragmentParameter {
int needOriginRGB;
packed_float4 fillColor;
};
typedef struct {
packed_float4 position;
packed_float2 sourceTextureCoordinate;
packed_float2 maskTextureCoordinate;
} QGHWDAttachmentVertex;
typedef enum QGHWDYUVFragmentTextureIndex {
QGHWDYUVFragmentTextureIndexLuma = 0,
QGHWDYUVFragmentTextureIndexChroma = 1,
QGHWDYUVFragmentTextureIndexAttachmentStart = 2,
} QGHWDYUVFragmentTextureIndex;
#endif /* QGHWDShaderTypes_h */

View File

@@ -0,0 +1,163 @@
// Shaders.metal
// Tencent is pleased to support the open source community by making vap available.
//
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the MIT License (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
#include <metal_stdlib>
#import "QGHWDShaderTypes.h"
/*
请注意更新本文件后请同步到QGHWDMetalShaderSourceDefine.h
*/
using namespace metal;
typedef struct {
float4 clipSpacePostion [[ position ]];
float2 textureColorCoordinate;
float2 textureAlphaCoordinate;
} HWDRasterizerData;
typedef struct {
float4 clipSpacePostion [[ position ]];
float2 textureColorCoordinate;
float2 textureAlphaCoordinate;
float2 textureMaskCoordinate;
} VAPRasterizerData;
typedef struct {
float4 position [[ position ]];
float2 sourceTextureCoordinate;
float2 maskTextureCoordinate;
} VAPAttachmentRasterizerData;
float3 RGBColorFromYuvTextures(sampler textureSampler, float2 coordinate, texture2d<float> texture_luma, texture2d<float> texture_chroma, matrix_float3x3 rotationMatrix, float2 offset) {
float3 color;
color.x = texture_luma.sample(textureSampler, coordinate).r;
color.yz = texture_chroma.sample(textureSampler, coordinate).rg - offset;
return float3(rotationMatrix * color);
}
float4 RGBAColor(sampler textureSampler, float2 colorCoordinate, float2 alphaCoordinate, texture2d<float> lumaTexture, texture2d<float> chromaTexture, constant ColorParameters *colorParameters) {
matrix_float3x3 rotationMatrix = colorParameters[0].matrix;
float2 offset = colorParameters[0].offset;
float3 color = RGBColorFromYuvTextures(textureSampler, colorCoordinate, lumaTexture, chromaTexture, rotationMatrix, offset);
float3 alpha = RGBColorFromYuvTextures(textureSampler, alphaCoordinate, lumaTexture, chromaTexture, rotationMatrix, offset);
return float4(color, alpha.r);
}
vertex HWDRasterizerData hwd_vertexShader(uint vertexID [[ vertex_id ]], constant QGHWDVertex *vertexArray [[ buffer(0) ]]) {
HWDRasterizerData out;
out.clipSpacePostion = vertexArray[vertexID].position;
out.textureColorCoordinate = vertexArray[vertexID].textureColorCoordinate;
out.textureAlphaCoordinate = vertexArray[vertexID].textureAlphaCoordinate;
return out;
}
fragment float4 hwd_yuvFragmentShader(HWDRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
return RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
}
vertex VAPRasterizerData vap_vertexShader(uint vertexID [[ vertex_id ]], constant QGVAPVertex *vertexArray [[ buffer(0) ]]) {
VAPRasterizerData out;
out.clipSpacePostion = vertexArray[vertexID].position;
out.textureColorCoordinate = vertexArray[vertexID].textureColorCoordinate;
out.textureAlphaCoordinate = vertexArray[vertexID].textureAlphaCoordinate;
out.textureMaskCoordinate = vertexArray[vertexID].textureMaskCoordinate;
return out;
}
fragment float4 vap_yuvFragmentShader(VAPRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
return RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
}
fragment float4 vap_maskFragmentShader(VAPRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
texture2d<float> maskTexture [[ texture(2) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
float4 originColor = RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
float4 maskColor = maskTexture.sample(textureSampler, input.textureMaskCoordinate);
float needMask = maskColor.r * 255;
return float4(originColor.rgb, (1 - needMask) * originColor.a);
}
fragment float4 vap_maskBlurFragmentShader(VAPRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
texture2d<float> maskTexture [[ texture(2) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]],
constant MaskParameters *maskParameters [[ buffer(1) ]]) {
//signifies that an expression may be computed at compile-time rather than runtime
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
float4 originColor = RGBAColor(textureSampler, input.textureColorCoordinate, input.textureAlphaCoordinate, lumaTexture, chromaTexture, colorParameters);
int uniform = 255;
float3x3 weightMatrix = maskParameters[0].weightMatrix;
int coreSize = maskParameters[0].coreSize;
float texelOffset = maskParameters[0].texelOffset;
float alphaResult = 0;
// 循环9次可以写成for循环
for (int y = 0; y < coreSize; y++) {
for (int x = 0; x < coreSize; x++) {
float2 nearMaskColor = float2(input.textureMaskCoordinate.x + (-1.0 + float(x)) * texelOffset, input.textureMaskCoordinate.y + (-1.0 + float(y)) * texelOffset);
alphaResult += maskTexture.sample(textureSampler, nearMaskColor).r * uniform * weightMatrix[x][y];
}
}
int needOrigin = step(alphaResult, 0.01) + step(originColor.a, 0.01);
return float4(originColor.rgb, needOrigin * originColor.a + (1 - needOrigin) * (1 - alphaResult));
}
vertex VAPAttachmentRasterizerData vapAttachment_vertexShader(uint vertexID [[ vertex_id ]], constant QGHWDAttachmentVertex *vertexArray [[ buffer(0) ]]) {
VAPAttachmentRasterizerData out;
out.position = vertexArray[vertexID].position;
out.sourceTextureCoordinate = vertexArray[vertexID].sourceTextureCoordinate;
out.maskTextureCoordinate = vertexArray[vertexID].maskTextureCoordinate;
return out;
}
fragment float4 vapAttachment_FragmentShader(VAPAttachmentRasterizerData input [[ stage_in ]],
texture2d<float> lumaTexture [[ texture(0) ]],
texture2d<float> chromaTexture [[ texture(1) ]],
texture2d<float> sourceTexture [[ texture(2) ]],
constant ColorParameters *colorParameters [[ buffer(0) ]],
constant VapAttachmentFragmentParameter *fillParams [[ buffer(1) ]]) {
constexpr sampler textureSampler (mag_filter::linear, min_filter::linear);
matrix_float3x3 rotationMatrix = colorParameters[0].matrix;
float2 offset = colorParameters[0].offset;
float3 mask = RGBColorFromYuvTextures(textureSampler, input.maskTextureCoordinate, lumaTexture, chromaTexture, rotationMatrix, offset);
float4 source = sourceTexture.sample(textureSampler, input.sourceTextureCoordinate);
return float4(source.rgb, source.a * mask.r);
}