AppDelegate.m中引入 #import <TuSDK/TuSDK.h>。(1)model 文件,鉴权文件,必须保留。
(2)others 文件夹,包含使用到的滤镜资源文件的索引,进行滤镜资源文件操作是需要进行替换。
(3)stickers 文件夹,包含打包到本地使用的贴纸的资源文件,进行资源文件操作是需要进行替换(无贴纸功能的可删除)。
(4)textures 文件夹,包含打包到本地使用的滤镜的资源文件,进行资源文件操作是需要进行替换。
在七牛PLMediaStreamingSession的代理方法中添加如下代码:
// 摄像头采集的数据回调
- (CVPixelBufferRef)mediaStreamingSession:(PLMediaStreamingSession *)session cameraSourceDidGetPixelBuffer:(CVPixelBufferRef)pixelBuffer {
    CVPixelBufferRef ttPixelBuffer = [[TTPipeMediator shareInstance] sendPixelBuffer:pixelBuffer];
    if (ttPixelBuffer) {
        return ttPixelBuffer;
    }
    return pixelBuffer;
}
开启自定义视频采集
// Swift
// 调用 Agora 的 setExternalVideoSource 方法,告知 SDK 使用自采集的视频数据。
agoraKit.setExternalVideoSource(true, useTexture: true, pushMode: true)
核心处理&渲染代码
func myVideoCapture(_ capture: AgoraCameraSourcePush, didOutputSampleBuffer pixelBuffer: CVPixelBuffer, rotation: Int, timeStamp: CMTime) {
        let ts: Int64 = Int64(timeStamp.seconds * 1000)
        let ttPixelBuffer = TTPipeMediator.shareInstance().send(pixelBuffer, withTimestamp: ts, rotation: Int32(rotation)).takeUnretainedValue()
        let videoFrame = AgoraVideoFrame()
        videoFrame.format = 12
        videoFrame.textureBuf = ttPixelBuffer
        videoFrame.time = timeStamp
        videoFrame.rotation = Int32(rotation)
        // 测试 工程中推荐使用 TUPFPImage 渲染
        self.displayView.image = UIImage(ciImage: CIImage(cvPixelBuffer: ttPixelBuffer))
        //once we have the video frame, we can push to agora sdk
        agoraKit?.pushExternalVideoFrame(videoFrame)
}
设置声网为MediaIO方式
agoraKit.setVideoSource(customCamera)
实现 AgoraVideoSourceProtocol 协议 (参考AgoraCameraSourceMediaIO)
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) == kCVReturnSuccess else {
            return
        }
        defer {
            CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
        }
        let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        let rotation = viewOrientation.agoraRotation()
        let ts: Int64 = Int64(time.seconds * 1000)
        let newPixelBuffer = TTPipeMediator.shareInstance().send(pixelBuffer, withTimestamp: ts, rotation: 90).takeUnretainedValue()
        consumer?.consumePixelBuffer(newPixelBuffer, withTimestamp: time, rotation: rotation)
    }
首先需要调用 V2TXLivePusher 的 enableCustomVideoCapture 接口开启自定义采集。
- (void)onVideoSampleBuffer:(CMSampleBufferRef)videoBuffer {
    CVPixelBufferRef newPixelBuffer = [[TTPipeMediator shareInstance] sendVideoSampleBuffer:videoBuffer];
    V2TXLiveVideoFrame *videoFrame = [[V2TXLiveVideoFrame alloc] init];
    videoFrame.bufferType = V2TXLiveBufferTypePixelBuffer;
    videoFrame.pixelFormat = V2TXLivePixelFormatI420;
    videoFrame.pixelBuffer = newPixelBuffer;
    [self.livePusher sendCustomVideoFrame:videoFrame];
}
首先需要调用 V2TXLivePusher 的 enableCustomVideoProcess 开启自定义视频处理,才会收到这个回调通知。
- (void)onProcessVideoFrame:(V2TXLiveVideoFrame *)srcFrame dstFrame:(V2TXLiveVideoFrame *)dstFrame {
    if (!_currentContext) {
        _currentContext = [EAGLContext currentContext];
        [TTLiveMediator setupContext:_currentContext];
        [[TTLiveMediator shareInstance] setPixelFormat:TTVideoPixelFormat_Texture2D];
    }
    TUPFPImage *fpImage = [[TTLiveMediator shareInstance] sendVideoTexture2D:srcFrame.textureId width:(int)srcFrame.width height:(int)srcFrame.height];
    dstFrame.bufferType = V2TXLiveBufferTypeTexture;
    dstFrame.pixelFormat = V2TXLivePixelFormatTexture2D;
    dstFrame.textureId = [fpImage getTextureID];
}
在即构的ZGCustomVideoCapturePixelBufferDelegate的代理回调中添加如下方法
- (void)captureDevice:(id<ZGCaptureDevice>)device didCapturedData:(CMSampleBufferRef)data {
    if (self.captureBufferType == ZGCustomVideoCaptureBufferTypeCVPixelBuffer) {
        // BufferType: CVPixelBuffer
        CVPixelBufferRef ttPixelBuffer = [[[TTLiveMediator shareInstance] sendVideoPixelBuffer:CMSampleBufferGetImageBuffer(data)] getCVPixelBuffer];
        if (ttPixelBuffer) {
            // Send pixel buffer to ZEGO SDK
            [[ZegoExpressEngine sharedEngine] sendCustomVideoCapturePixelBuffer:ttPixelBuffer timestamp:CMSampleBufferGetPresentationTimeStamp(data)];
        } else {
            // Send pixel buffer to ZEGO SDK
            [[ZegoExpressEngine sharedEngine] sendCustomVideoCapturePixelBuffer:CMSampleBufferGetImageBuffer(data) timestamp:CMSampleBufferGetPresentationTimeStamp(data)];
        }
    } else if (self.captureBufferType == ZGCustomVideoCaptureBufferTypeEncodedFrame) {
        // BufferType: Encoded frame (H.264)
        // Need to encode frame
        [self.encoder encodeBuffer:data];
    }
}
设置美颜参数也是使用 TTPipeMediator中 TTBeautyManager 提供的函数,更多的使用可以参考我们的 demo 里的做法。
VideoFair 就是该滤镜的filterCode ,在_videoFilters = @[@"VideoFair"];可以进行选择使用滤镜的设置。
                ©2019-2025 TUTUCLOUD. All Rights Reserved. 杭州元凡视觉智能科技有限公司 | 浙ICP备14040447号-1 | 
浙公网安备33010602001649号