You need to enable JavaScript to run this app.
最新活动
产品
解决方案
定价
生态与合作
支持与服务
开发者
了解我们
导航

iOS

最近更新时间2023.09.22 16:33:51

首次发布时间2022.07.18 21:23:18

本章节介绍 iOS 端互动直播场景核心功能的实现方式。

前提条件

版本说明

  • 互动直播功能需要使用互动直播版 SDK,请您在安装 SDK 时选择正确的版本。

  • 以下接入内容对应互动直播版本为 v1.37.3。

主播端核心功能实现

以下是主播端核心功能实现的时序图和参考接入代码。

主播开播

时序图

示例代码

  • 创建 RTC 视频引擎,设置本地预览视图,设置视频编码参数。
// 初始化 ByteRTCVideo 对象
self.rtcVideo = [ByteRTCVideo createRTCVideo:self.appId delegate:self parameters:@{}];

// 设置本地预览视图
ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
canvasView.view = view;
canvasView.renderMode = ByteRTCRenderModeHidden;
[self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView];

// 设置视频编码参数
ByteRTCVideoEncoderConfig *solution = [[ByteRTCVideoEncoderConfig alloc] init];
solution.width = self.config.captureWidth;
solution.height = self.config.captureHeight;
solution.frameRate = self.config.captureFps;
solution.maxBitrate = self.config.videoEncoderKBitrate;
[self.rtcVideo SetMaxVideoEncoderConfig:solution];     
  • 订阅 RTC 本地音视频数据。
// 订阅本地视频数据
[self.rtcVideo setLocalVideoSink:ByteRTCStreamIndexMain
                        withSink:self
                 withPixelFormat:(ByteRTCVideoSinkPixelFormatI420)];

// 订阅本地音频数据
ByteRTCAudioFormat *audioFormat = [[ByteRTCAudioFormat alloc] init];
audioFormat.channel = ByteRTCAudioChannelStereo;
audioFormat.sampleRate = ByteRTCAudioSampleRate44100;
[self.rtcVideo enableAudioFrameCallback:(ByteRTCAudioFrameCallbackRecord) format:audioFormat];
[self.rtcVideo setAudioFrameObserver:self];
  • 创建推流引擎,设置推流视频编码参数,设置推流地址。
// 创建推流引擎
self.liveEngine = [[LiveCore alloc] initWithMode:(LiveCoreModuleLiveStreaming)];

// 配置推流参数
LiveStreamConfiguration *streamConfig = [LiveStreamConfiguration defaultConfiguration];
streamConfig.outputSize  = CGSizeMake(self.config.videoEncoderWith, self.config.videoEncoderHeight);
streamConfig.bitrate     = self.config.videoEncoderKBitrate * 1000;
streamConfig.minBitrate  = self.config.videoEncoderKBitrate * 1000;
streamConfig.maxBitrate  = self.config.videoEncoderKBitrate * 1000;
streamConfig.videoFPS    = self.config.videoEncoderFps;

// url 为推流地址
streamConfig.URLs = @[url];
[self.liveEngine setupLiveSessionWithConfig:streamConfig];
  • 开启 RTC 音视频采集。
// 开始视频采集
[self.rtcVideo startVideoCapture];

// 开始音频采集
[self.rtcVideo startAudioCapture];
  • 开启推流引擎推流。
// 开始推流
[self.liveEngine startStreaming];
  • RTC 本地音视频回调数据发送给推流引擎。
// 视频采集回调, 发送视频数据给推流引擎
- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer
                 rotation:(ByteRTCVideoRotation)rotation
              contentType:(ByteRTCVideoContentType)contentType
             extendedData:(NSData *)extendedData {
    int64_t value = (int64_t)(CACurrentMediaTime() * 1000000000);
    int32_t timeScale = 1000000000;
    CMTime pts = CMTimeMake(value, timeScale);
    [self.liveEngine.liveSession pushVideoBuffer:pixelBuffer andCMTime:pts rotation:(int)rotation];
}

// 音频采集回调,发送音频数据给推流引擎
- (void)onRecordAudioFrame:(ByteRTCAudioFrame * _Nonnull)audioFrame {
    int channel = 2;
    if (audioFrame.channel == ByteRTCAudioChannelMono) {
        channel = 1;
    } else if (audioFrame.channel == ByteRTCAudioChannelStereo) {
        channel = 2;
    }
    
    int64_t value = (int64_t)(CACurrentMediaTime() * 1000000000);
    int32_t timeScale = 1000000000;
    CMTime pts = CMTimeMake(value, timeScale);
    
    int bytesPerFrame = 16 * channel / 8;
    int numFrames = (int)(audioFrame.buffer.length / bytesPerFrame);
    
    [self.liveEngine pushAudioBuffer:(uint8_t*)[audioFrame.buffer bytes]
                        andDataLen:(size_t)audioFrame.buffer.length
                 andInNumberFrames:numFrames
                         andCMTime:pts];
}

主播美颜(可选)

主播美颜功能都是通过 RTC 引擎进行对接,使用方式请参考 美颜特效(CV)

主播上麦

主播停止推流引擎推流,通过 RTC 引擎加入房间连麦,并开启 RTC 服务端合流转推。

时序图

代码示例

  • 停止推流引擎推流。
// 停止推流引擎推流
[self.liveEngine stopStreaming];
  • 创建 RTC 房间,设置用户信息,加入 RTC 房间。参考使用 Token 完成鉴权了解如何通过业务服务器获取鉴权 token。
// 创建 RTC 房间
self.rtcRoom = [self.rtcVideo createRTCRoom:self.roomId];
self.rtcRoom.delegate = self;

 // 设置用户信息
ByteRTCUserInfo *userInfo = [[ByteRTCUserInfo alloc] init];
userInfo.userId = self.userId;

ByteRTCRoomConfig *config = [ByteRTCRoomConfig new];
config.isAutoPublish = YES;
config.isAutoSubscribeAudio = YES;
config.isAutoSubscribeVideo = YES;

// 加入房间,token 信息通过业务服务器申请
[self.rtcRoom joinRoomByToken:token userInfo:userInfo roomConfig:config];
  • 收到加入 RTC 房间成功回调,开启 RTC 服务端合流转推。
// 加入房间成功通知
- (void)rtcRoom:(ByteRTCRoom *_Nonnull)rtcRoom
   onRoomStateChanged:(NSString *_Nonnull)roomId
            withUid:(nonnull NSString *)uid
          state:(NSInteger)state
      extraInfo:(NSString *_Nonnull)extraInfo {
      
      // 创建 RTC 服务端合流配置
      self.rtcLiveTranscoding = [ByteRTCLiveTranscoding defaultTranscoding];
      self.rtcLiveTranscoding.roomId = self.roomId;
      self.rtcLiveTranscoding.userId = self.userId;
        
      // 设置视频编码参数。该参数需要和推流视频编码参数保持一致
      self.rtcLiveTranscoding.video.width = self.config.videoEncoderWith;
      self.rtcLiveTranscoding.video.height = self.config.videoEncoderHeight;
      self.rtcLiveTranscoding.video.fps = self.config.videoEncoderFps;
      self.rtcLiveTranscoding.video.kBitRate = self.config.videoEncoderKBitrate;
        
      // 设置音频编码参数。该参数需要和推流音频编码参数保持一致
      self.rtcLiveTranscoding.audio.sampleRate = self.config.audioEncoderSampleRate;
      self.rtcLiveTranscoding.audio.channels = self.config.audioEncoderChannel;
      self.rtcLiveTranscoding.audio.kBitRate = self.config.audioEncoderKBitrate;
      
      // 设置推流地址,这里为主播的 RTMP 推流地址
      self.rtcLiveTranscoding.url = self.streamUrl;
      
      // 服务端合流
      self.rtcLiveTranscoding.expectedMixingType = ByteRTCStreamMixingTypeByServer;
        
      // 初始化布局
      ByteRTCVideoCompositingLayout * layout = [[ByteRTCVideoCompositingLayout alloc]init];
      
      // 设置背景色
      layout.backgroundColor = @"#000000"; //仅供参考
      
      NSMutableArray *regions = [[NSMutableArray alloc]initWithCapacity:6];
      
      // 主播合流布局
      ByteRTCVideoCompositingRegion *region = [[ByteRTCVideoCompositingRegion alloc]init];
      region.uid          = self.userId; // 主播uid
      region.roomId       = self.roomId;
      region.localUser    = YES;
      region.renderMode   = ByteRTCRenderModeHidden;
      region.x        = 0.0; //仅供参考
      region.y        = 0.0; //仅供参考
      region.width    = 0.5; //仅供参考
      region.height   = 0.5; //仅供参考
      region.zOrder   = 0;   //仅供参考
      region.alpha    = 1.0; //仅供参考
      [regions addObject:region];
      layout.regions = regions;
      
      // 设置合流模版
      self.rtcLiveTranscoding.layout = layout;
        
      // 设置合流任务 ID
      self.rtcTaskId = @"";
      
      // 开启 RTC 服务端合流转推
      [self.rtcVideo startLiveTranscoding:self.rtcTaskId transcoding:self.rtcLiveTranscoding observer:self];

}
  • 收到房间内连麦用户的音视频流发布通知后,调整用户视图以及合流布局。
- (void)rtcRoom:(ByteRTCRoom *)rtcRoom onUserPublishStream:(NSString *)userId type:(ByteRTCMediaStreamType)type {

     if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) {
          // 设置连麦用户视图
         ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
         canvasView.uid = uid;
         canvasView.view = view;
         canvasView.roomId = self.roomId;
         canvasView.renderMode = ByteRTCRenderModeHidden;
         [self.rtcVideo setRemoteVideoCanvas:uid withIndex:(ByteRTCStreamIndexMain) withCanvas:canvasView];
     }
    
     // 更新合流模版
     NSMutableArray *regions = [[NSMutableArray alloc]initWithCapacity:6];
      
      // 主播合流布局
      ByteRTCVideoCompositingRegion *region = [[ByteRTCVideoCompositingRegion alloc]init];
      region.uid          = self.userId; // 主播uid
      region.roomId       = self.roomId;
      region.localUser    = YES;
      region.renderMode   = ByteRTCRenderModeHidden;
      region.x        = 0.0; //仅供参考
      region.y        = 0.0; //仅供参考
      region.width    = 0.5; //仅供参考
      region.height   = 0.5; //仅供参考
      region.zOrder   = 0;   //仅供参考
      region.alpha    = 1.0; //仅供参考
      [regions addObject:region];
      
      // 连麦用户合流布局
      ByteRTCVideoCompositingRegion *regionRemote = [[ByteRTCVideoCompositingRegion alloc]init];
      regionRemote.uid          = userId; // 连麦uid
      regionRemote.roomId       = self.roomId;
      regionRemote.localUser    = NO;
      regionRemote.renderMode   = ByteRTCRenderModeHidden;
      regionRemote.x        = 0.5; //仅供参考
      regionRemote.y        = 0.0; //仅供参考
      regionRemote.width    = 0.5; //仅供参考
      regionRemote.height   = 0.5; //仅供参考
      regionRemote.zOrder   = 1;   //仅供参考
      regionRemote.alpha    = 1.0; //仅供参考
      [regions addObject:regionRemote];
      
      layout.regions = regions;
      
      // 更新合流
      self.rtcLiveTranscoding.layout = layout;
      [self.rtcVideo updateLiveTranscoding:self.rtcTaskId transcoding:self.rtcLiveTranscoding];
    
}

- (void)manager:(VeLiveAnchorManager *)manager onUserUnPublishStream:(NSString *)uid type:(ByteRTCMediaStreamType)streamType reason:(ByteRTCStreamRemoveReason)reason {

     if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) {
          // 移除连麦用户视图
         ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
         canvasView.uid = uid;
         canvasView.view = nil;
         canvasView.roomId = self.roomId;
         canvasView.renderMode = ByteRTCRenderModeHidden;
         [self.rtcVideo setRemoteVideoCanvas:uid withIndex:(ByteRTCStreamIndexMain) withCanvas:canvasView];
     }
      
     // 更新合流模版
     NSMutableArray *regions = [[NSMutableArray alloc]initWithCapacity:6];
      
      // 只保留主播合流布局
      ByteRTCVideoCompositingRegion *region = [[ByteRTCVideoCompositingRegion alloc]init];
      region.uid          = self.userId; // 主播uid
      region.roomId       = self.roomId;
      region.localUser    = YES;
      region.renderMode   = ByteRTCRenderModeHidden;
      region.x        = 0.0; //仅供参考
      region.y        = 0.0; //仅供参考
      region.width    = 0.5; //仅供参考
      region.height   = 0.5; //仅供参考
      region.zOrder   = 0;   //仅供参考
      region.alpha    = 1.0; //仅供参考
      [regions addObject:region];
      
      layout.regions = regions;
      
      // 更新合流
      self.rtcLiveTranscoding.layout = layout;
      [self.rtcVideo updateLiveTranscoding:self.rtcTaskId transcoding:self.rtcLiveTranscoding];
    
}

主播下麦

主播停止 RTC 服务端合流转推,离开 RTC 房间,开启推流引擎推流。

时序图

示例代码

  • 停止 RTC 服务端合流,离开房间,移除连麦用户视图。
// 停止 RTC 服务端合流转推
[self.rtcVideo stopLiveTranscoding:self.rtcTaskId];

// 离开 RTC 房间
[self.rtcRoom leaveRoom];

// 移除连麦用户视图
ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
canvasView.uid = uid;
canvasView.view = nil; // 设置为nil
canvasView.roomId = self.roomId;
canvasView.renderMode = ByteRTCRenderModeHidden;
[self.rtcVideo setRemoteVideoCanvas:uid withIndex:(ByteRTCStreamIndexMain) withCanvas:canvasView];
  • 开启推流引擎推流。
// 开始推流引擎推流
[self.liveEngine startStreaming];

主播停播

主播停止直播,销毁 RTC 引擎和推流引擎。

时序图

代码示例

  • 停止推流引擎推流,销毁推流引擎。
// 停止推流引擎推流
[self.liveEngine stopStreaming];

// 销毁推流引擎
self.liveEngine = nil;
  • 停止 RTC 音视频采集,移除本地预览视图。
// 停止 RTC 视频采集
[self.rtcVideo stopVideoCapture];

// 停止 RTC 音频采集
[self.rtcVideo startAudioCapture];

// 移除 RTC 本地预览视图
ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
canvasView.view = nil;
canvasView.renderMode = ByteRTCRenderModeHidden;
[self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView];
  • 销毁 RTC 房间,销毁 RTC 视频引擎。
// 销毁 RTC 房间
[self.rtcRoom destroy];
self.rtcRoom = nil;

// 销毁 RTC 视频引擎
[ByteRTCVideo destroyRTCVideo];
self.rtcVideo = nil;

观众端核心功能实现

以下是观众端核心功能实现的时序图和参考接入代码。

观众拉流

观众端通过播放器拉流观看直播。

时序图

示例代码

  • 创建和设置播放器。
// 创建播放器
TVLManager *livePlayer = [[TVLManager alloc] init];
self.livePlayer = livePlayer;

// 设置播放器回调
[self.livePlayer setObserver:self];

// 播放器参数设置
VeLivePlayerConfiguration *config = [[VeLivePlayerConfiguration alloc]init];
config.enableStatisticsCallback = YES;
config.enableLiveDNS = YES;
config.enableSei = YES;
[self.livePlayer setConfig:config];
  • 设置播放器视图,设置直播地址,开启拉流播放。
// 设置播放器视图
self.livePlayer.playerView.frame = UIScreen.mainScreen.bounds;
[self.view addSubview:self.livePlayer.playerView];

// 设置播放地址
[self.livePlayer setPlayUrl:LIVE_PULL_URL];

// 开始播放
[self.livePlayer play];

观众上麦

连麦观众停止播放器拉流播放,通过 RTC 引擎进行连麦。

时序图

示例代码

  • 停止播放,移除播放器视图。
// 停止播放
[self.livePlayer stop];

// 移除播放器视图
self.livePlayer.playerView.hidden = YES;
  • 创建 RTC 视频引擎,设置本地预览视图,设置视频编码参数。
// 初始化 ByteRTCVideo 对象
self.rtcVideo = [ByteRTCVideo createRTCVideo:self.appId delegate:self parameters:@{}];

// 设置本地视图
ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
canvasView.view = view;
canvasView.renderMode = ByteRTCRenderModeHidden;
[self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView];

// 设置视频编码参数
ByteRTCVideoEncoderConfig *solution = [[ByteRTCVideoEncoderConfig alloc] init];
solution.width = self.config.captureWidth;
solution.height = self.config.captureHeight;
solution.frameRate = self.config.captureFps;
solution.maxBitrate = self.config.videoEncoderKBitrate;
[self.rtcVideo SetMaxVideoEncoderConfig:solution];
  • 开启 RTC 音视频采集。
// 开始视频采集
[self.rtcVideo startVideoCapture];

// 开始音频采集
[self.rtcVideo startAudioCapture];
  • 创建 RTC 房间,设置用户信息,加入 RTC 房间。
// 创建 RTC 房间
self.rtcRoom = [self.rtcVideo createRTCRoom:self.roomId];
self.rtcRoom.delegate = self;

// 设置用户信息
ByteRTCUserInfo *userInfo = [[ByteRTCUserInfo alloc] init];
userInfo.userId = self.userId;

// 加入房间,开始连麦
ByteRTCRoomConfig *config = [ByteRTCRoomConfig new];
config.isAutoPublish = YES;
config.isAutoSubscribeAudio = YES;
config.isAutoSubscribeVideo = YES;
// token 信息通过业务服务器申请
[self.rtcRoom joinRoomByToken:token userInfo:userInfo roomConfig:config];
  • 收到房间内连麦用户的音视频流发布通知后,调整用户视图。
- (void)rtcRoom:(ByteRTCRoom *)rtcRoom onUserPublishStream:(NSString *)userId type:(ByteRTCMediaStreamType)type {

     if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) {
          // 设置连麦用户视图
         ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
         canvasView.uid = uid;
         canvasView.view = view;
         canvasView.roomId = self.roomId;
         canvasView.renderMode = ByteRTCRenderModeHidden;
         [self.rtcVideo setRemoteVideoCanvas:uid withIndex:(ByteRTCStreamIndexMain) withCanvas:canvasView];
     }    
}

- (void)manager:(VeLiveAnchorManager *)manager onUserUnPublishStream:(NSString *)uid type:(ByteRTCMediaStreamType)streamType reason:(ByteRTCStreamRemoveReason)reason {

     if (streamType == ByteRTCMediaStreamTypeVideo || streamType == ByteRTCMediaStreamTypeBoth) {
          // 移除连麦用户视图
         ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
         canvasView.uid = uid;
         canvasView.view = nil;
         canvasView.roomId = self.roomId;
         canvasView.renderMode = ByteRTCRenderModeHidden;
         [self.rtcVideo setRemoteVideoCanvas:uid withIndex:(ByteRTCStreamIndexMain) withCanvas:canvasView];
     }  
}

观众美颜(可选)

观众连麦美时颜功能都是通过 RTC 引擎进行对接,使用方式请参考 美颜特效(CV)

观众下麦

观众停止 RTC 引擎连麦,恢复拉流观看直播。

时序图

示例代码

  • 离开 RTC 房间,停止 RTC 音视频采集,移除 RTC 本地和远端视图。
// 离开 RTC 房间
[self.rtcRoom leaveRoom];

// 移除本地视图
ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
canvasView.view = nil; // 设置为nil
canvasView.renderMode = ByteRTCRenderModeHidden;
[self.rtcVideo setLocalVideoCanvas:ByteRTCStreamIndexMain withCanvas:canvasView];

// 移除连麦用户视图
ByteRTCVideoCanvas *canvasView = [[ByteRTCVideoCanvas alloc] init];
canvasView.uid = uid;
canvasView.view = nil; // 设置为nil
canvasView.roomId = self.roomId;
canvasView.renderMode = ByteRTCRenderModeHidden;
[self.rtcVideo setRemoteVideoCanvas:uid withIndex:(ByteRTCStreamIndexMain) withCanvas:canvasView];

// 停止视频采集
[self.rtcVideo stopVideoCapture];

// 停止音频采集
[self.rtcVideo stopAudioCapture];
  • 设置播放器视图,启动拉流播放。
// 添加播放器视图
self.livePlayer.playerView.hidden = NO;

// 开始播放
[self.livePlayer play];

观众离开

观众停止观看直播,销毁播放器。

时序图

示例代码

  • 停止播放,移除播放器视图,销毁播放器。
// 停止播放
[self.livePlayer stop];

// 移除播放器视图
self.livePlayer.playerView.hidden = YES;

// 销毁播放器
[self.livePlayer destroy];
self.livePlayer = nil
  • 销毁 RTC 房间,销毁 RTC 引擎。
// 销毁 RTC 房间
[self.rtcRoom destroy];
self.rtcRoom = nil;

// 销毁 RTC 引擎
[ByteRTCVideo destroyRTCVideo];
self.rtcVideo = nil;