You need to enable JavaScript to run this app.
导航
画中画
最近更新时间:2025.11.13 20:31:26首次发布时间:2025.06.24 20:12:16
复制全文
我的收藏
有用
有用
无用
无用

画中画(Picture-in-Picture, PiP)功能允许用户在屏幕一角的小窗口中观看视频,同时在主屏幕上继续与其他应用或内容进行交互。适用于用户希望在浏览网页、回复消息或使用其他应用时,仍能保持视频播放不中断的场景。本文将指导您如何基于苹果官方的 AVPictureInPictureControllerAVSampleBufferDisplayLayer,并结合播放器 SDK 的视频帧回调,实现高性能、无缝切换的画中画功能。

前提条件

  • 设备系统版本:iOS 15 及以上版本
  • 系统画中画设置:请确保用户已在设备的设置 > 通用 > 画中画页面中,开启了自动开启画中画 开关。
    Image

实现步骤

步骤 1:配置项目能力

首先,您需要在 Xcode 项目中为您的应用开启画中画所需的后台模式。在 Xcode 项目中:

  1. 选择 App 的 Target。
  2. 单击 Signing & Capabilities 页签。
  3. 单击 + Capability 按钮添加 Background Modes 功能,勾选 Audio, AirPlay, and Picture in Picture
    Image

步骤 2:初始化画中画环境

在您的播放器 UIViewController 中,需要完成画中画所需的核心组件的初始化,包括配置音频会话、自定义渲染视图和画中画控制器。

// 自定义 DisplayView,提供 AVSampleBufferDisplayLayer
@implementation VideoPlayerPipDisplayView
+ (Class)layerClass {
    return [AVSampleBufferDisplayLayer class];
}
@end

// 配置 AudioSession,以便系统正确处理画中画模式下的音频播放。
- (void)__updateAudioSession {
    AVAudioSession *audioSession = [AVAudioSession sharedInstance];
    NSError *categoryError = nil;
    [audioSession setCategory:AVAudioSessionCategoryPlayback
                       mode:AVAudioSessionModeMoviePlayback
                    options:AVAudioSessionCategoryOptionOverrideMutedMicrophoneInterruption
                      error:&categoryError];
    if (categoryError) {
        NSLog(@"volc--set audio session category error: %@", categoryError.localizedDescription);
    }
    NSError *activeError = nil;
    [audioSession setActive:YES error:&activeError];
    if (activeError) {
        NSLog(@"volc--set audio session active error: %@", activeError.localizedDescription);
    }
}

// 创建并配置显示视图
- (void)__setupDisplayerView {
    self.displayView = [[VideoPlayerPipDisplayView alloc] init];
    self.displayView.userInteractionEnabled = NO;
    self.displayView.clipsToBounds = YES;
    self.displayLayer = (AVSampleBufferDisplayLayer *)self.displayView.layer;
    self.displayLayer.opaque = YES;
    self.displayLayer.videoGravity = AVLayerVideoGravityResizeAspect;
    [self.view addSubview:self.displayView];
    [self.displayView mas_makeConstraints:^(MASConstraintMaker *make) {
        make.edges.equalTo(self.view);
    }];
}

// 创建并配置 pipController
// AVPictureInPictureController 是 iOS 系统管理画中画生命周期的核心控制器,用来启动和停止画中画模式
- (void)__setupPipController {
    AVPictureInPictureControllerContentSource *contentSource = [[AVPictureInPictureControllerContentSource alloc] initWithSampleBufferDisplayLayer:self.displayLayer playbackDelegate:self];
    self.pipController = [[AVPictureInPictureController alloc] initWithContentSource:contentSource];
    self.pipController.canStartPictureInPictureAutomaticallyFromInline = YES;
    self.pipController.requiresLinearPlayback = YES;
    self.pipController.delegate = self;
}

步骤 3:接管并渲染视频帧

为了让主播放窗口和画中画窗口能共享同一份视频数据,您需要“接管”播放器解码后的视频帧 CVPixelBufferRef,然后手动将其渲染到您创建的 displayLayer 上。

  1. 在您的 .m 文件顶部(或一个公共的工具类中)定义以下辅助函数。

    static inline void RunOnMainThread(void (^ _Nullable block)(void)) {
        if ([NSThread isMainThread]) {
            block();
        } else {
            dispatch_async(dispatch_get_main_queue(), block);
        }
    }
    
  2. 添加设置视频帧回调、处理视频帧并将其送入渲染队列的核心逻辑:

    // --- 视频帧处理逻辑 ---
    // C 风格的回调函数,用于接收来自播放器 SDK 的视频帧
    static void processPixelBuffer(void *context, CVPixelBufferRef frame, int64_t timestamp) {
        // 将 context 桥接回 UIViewController 实例
        PipViewController *pSelf = (__bridge PipViewController*)context;
        // 将 CVPixelBufferRef 传递给 OC 方法进行处理
        [pSelf __dispatchPixelBuffer:frame];
    }
    
    // 视频帧回调的释放函数
    static void releasePixelBuffer(void *context) {
        NSLog(@"volc--Frame callback context released.");
    }
    
    // 调用此方法来设置视频帧回调。
    - (void)__startObserveVideoFrame {
        EngineVideoWrapper *wrapper = malloc(sizeof(EngineVideoWrapper));
        wrapper->process = processPixelBuffer;
        wrapper->release = releasePixelBuffer;
        wrapper->context = (__bridge void *)self;
        
        // 将这个 wrapper 设置给播放器引擎
        [self.videoEngine setVideoWrapper:wrapper];
    }
    
    // 将播放器回调的 CVPixelBufferRef 转换为 CMSampleBufferRef
    - (void)__dispatchPixelBuffer:(CVPixelBufferRef)pixelBuffer {
        if (!pixelBuffer) {
            return;
        }
        
        // 1. 根据 PixelBuffer 创建视频格式描述
        CMVideoFormatDescriptionRef videoInfo = NULL;
        OSStatus result = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
        NSParameterAssert(result == 0 && videoInfo != NULL);
    
        // 2. 创建一个不带具体时间的 Sample Timing Info
        CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
        
        // 3. 将 PixelBuffer 包装成 CMSampleBuffer
        CMSampleBufferRef sampleBuffer = NULL;
        result = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
        NSParameterAssert(result == 0 && sampleBuffer != NULL);
        CFRelease(videoInfo);
    
        // 4. 设置 SampleBuffer 的附件,使其立即显示
        CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
        CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
        CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
        
        // 5. 将 SampleBuffer 送入渲染队列
        [self enqueueSampleBuffer:sampleBuffer toLayer:self.displayLayer];
        
        // 6. 释放创建的 SampleBuffer
        CFRelease(sampleBuffer);
    }
    
    // 将 CMSampleBufferRef 安全地送入 AVSampleBufferDisplayLayer 的渲染队列
    - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer toLayer:(AVSampleBufferDisplayLayer*)layer {
        if (!sampleBuffer || !layer.readyForMoreMediaData) {
            NSLog(@"volc--sampleBuffer is invalid or layer is not ready. Skipping frame.");
            return;
        }
        // 兼容性处理:在某些系统版本,如果 Layer 状态异常,先 flush
        if (@available(iOS 16.0, *)) {
            if (layer.status == AVQueuedSampleBufferRenderingStatusFailed) {
                NSLog(@"volc--sampleBufferLayer failed with error: %@. Flushing layer.", layer.error);
                [layer flush];
            }
        } else {
            // 在较低版本上,更保守地在每次 enqueue 前 flush 可能有助于避免一些渲染问题
            [layer flush];
        }
        
        // 在 iOS 15 之前的版本,enqueueSampleBuffer 可能需要在主线程调用
        if (@available(iOS 15.0, *)) {
            [layer enqueueSampleBuffer:sampleBuffer];
        } else {
            RunOnMainThread(^{
                [layer enqueueSampleBuffer:sampleBuffer];
            });
        }
    }
    

步骤 4:开启或关闭画中画功能

在用户界面中添加画中画控制按钮,实现开启或关闭画中画功能。

  1. 实现画中画切换方法:

    if (self.pipController.isPictureInPictureActive) {
        [self.pipController stopPictureInPicture];
    } else {
        [self.pipController startPictureInPicture];
    }
    
  2. 将此方法绑定到用户界面上的画中画按钮。用户可以通过点击画中画按钮在全屏播放和画中画模式之间切换,实现视频内容的无缝观看体验。

完整示例代码

// 在支持画中画的 VC 中,实现TTVideoEngineDelegate、
// AVPictureInPictureSampleBufferPlaybackDelegate、
// AVPictureInPictureControllerDelegate
// 以便与画中画交互
@interface PipViewController () <TTVideoEngineDelegate, AVPictureInPictureSampleBufferPlaybackDelegate, AVPictureInPictureControllerDelegate>

@property (nonatomic, strong) TTVideoEngine* engine;
@property (nonatomic, strong) VideoPlayerPipDisplayView *displayView;
@property (nonatomic, strong) AVSampleBufferDisplayLayer *displayLayer;
@property (nonatomic, strong) AVPictureInPictureController *pipController;

@end

@implementation PipViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    ...
    // 初始化画中画功能
    [self __updateAudioSession];
    [self __setupDisplayerView];
    [self __setupPipController];
    ...
}

- (void)play {
    // 设置播放源并播放
    ...
    // 接管视频帧
    [self __startObservingVideoFrame];
    ...
}

// 手动开启/关闭画中画
- (void)switchPip {
    if (self.pipController.isPictureInPictureActive) {
        [self.pipController stopPictureInPicture];
    } else {
        [self.pipController startPictureInPicture];
    }
}

#pragma mark - AVPictureInPictureSampleBufferPlaybackDelegate
- (void)pictureInPictureController:(nonnull AVPictureInPictureController *)pictureInPictureController didTransitionToRenderSize:(CMVideoDimensions)newRenderSize {
    
}

// 响应画中画播放按钮点击事件
- (void)pictureInPictureController:(nonnull AVPictureInPictureController *)pictureInPictureController setPlaying:(BOOL)playing {
    if (playing) {
        [self.engine play];
    } else {
        [self.engine pause];
    }
    RunOnMainThread(^{
        [self.pipController invalidatePlaybackState];
    });
}

// 响应画中画查询播放状态事件
- (BOOL)pictureInPictureControllerIsPlaybackPaused:(nonnull AVPictureInPictureController *)pictureInPictureController {
    TTVideoEnginePlaybackState state = self.engine.playbackState;
    if (state != TTVideoEnginePlaybackStatePaused && state != TTVideoEnginePlaybackStateStopped && state != TTVideoEnginePlaybackStateError) {
        return NO;
    }

    return YES;
}

// 响应画中画查询播放进度事件
- (CMTimeRange)pictureInPictureControllerTimeRangeForPlayback:(nonnull AVPictureInPictureController *)pictureInPictureController {
    TTVideoEnginePlaybackState state = self.engine.playbackState;
    if (state != TTVideoEnginePlaybackStatePlaying && state != TTVideoEnginePlaybackStatePaused) {
        return CMTimeRangeMake(CMTimeMake(0, 1), CMTimeMake(100, 1));
    }

    NSInteger pos = self.engine.currentPlaybackTime;
    NSInteger dura = self.engine.duration;
    NSInteger interval = dura - pos;
    NSInteger timeBase = CACurrentMediaTime();
    NSInteger start = timeBase - pos;
    NSInteger end = timeBase + interval;
    CMTime t1 = CMTimeMakeWithSeconds(start, 1);
    CMTime t2 = CMTimeMakeWithSeconds(end, 1);

    return CMTimeRangeFromTimeToTime(t1, t2);
}

#pragma mark - AVPictureInPictureControllerDelegate
- (void)pictureInPictureControllerDidStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
    // 进入画中画后,开启后台解码,确保 seek、播放下一集可以正常渲染画面
    [TTVideoEngine setGlobalForKey:VEGSKeyEnableBackgroundMode value:@(1)];
}

- (void)pictureInPictureControllerDidStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
    // 停止画中画后,关闭后台解码,防止占用系统资源
    [TTVideoEngine setGlobalForKey:VEGSKeyEnableBackgroundMode value:@(0)];
}

#pragma mark - TTVideoEngineDelegate
// 播放状态变化时,通知画中画更新状态
- (void)videoEngine:(TTVideoEngine *)videoEngine playbackStateDidChanged:(TTVideoEnginePlaybackState)playbackState {
    RunOnMainThread(^{
        [self.pipController invalidatePlaybackState];
    });
}