SellyCloudSDK_demo/Example/SellyCloudSDK/VideoCall/SellyCallPiPManager.m

214 lines
7.2 KiB
Objective-C
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// SellyCallPiPManager.m
// SellyCloudSDK_Example
//
// Created by Caleb on 19/11/25.
// Copyright © 2025 Caleb. All rights reserved.
//
#import "SellyCallPiPManager.h"
#import <AVKit/AVKit.h>
#import <AVFoundation/AVFoundation.h>
@interface SellyCallPiPManager ()
@property (nonatomic, weak) UIView *renderView;
@property (nonatomic, strong) AVSampleBufferDisplayLayer *pipSampleBufferLayer;
@property (nonatomic, strong) AVPictureInPictureController *pipController;
@property (nonatomic, strong) dispatch_queue_t pipQueue;
@property (nonatomic, assign, readwrite) BOOL pipPossible;
@property (nonatomic, assign, readwrite) BOOL pipActive;
@end
@implementation SellyCallPiPManager
- (instancetype)initWithRenderView:(UIView *)renderView {
self = [super init];
if (self) {
_renderView = renderView;
}
return self;
}
- (void)setupIfNeeded {
if (@available(iOS 15.0, *)) {
if (![AVPictureInPictureController isPictureInPictureSupported]) {
self.pipPossible = NO;
return;
}
self.pipQueue = dispatch_queue_create("com.sellycloud.pip.queue", DISPATCH_QUEUE_SERIAL);
// SampleBuffer layer你可以决定要不要加到 renderView 上显示)
self.pipSampleBufferLayer = [[AVSampleBufferDisplayLayer alloc] init];
self.pipSampleBufferLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
if (self.renderView) {
self.pipSampleBufferLayer.frame = self.renderView.bounds;
[self.renderView.layer addSublayer:self.pipSampleBufferLayer];
}
// PiP content source
AVPictureInPictureControllerContentSource *source =
[[AVPictureInPictureControllerContentSource alloc] initWithSampleBufferDisplayLayer:self.pipSampleBufferLayer
playbackDelegate:self];
self.pipController = [[AVPictureInPictureController alloc] initWithContentSource:source];
self.pipController.delegate = self;
self.pipPossible = (self.pipController != nil);
} else {
self.pipPossible = NO;
}
}
- (void)togglePiP {
if (@available(iOS 15.0, *)) {
if (!self.pipController || !self.pipController.isPictureInPicturePossible) {
NSLog(@"[PiP] not possible");
return;
}
if (!self.pipController.isPictureInPictureActive) {
[self.pipController startPictureInPicture];
} else {
[self.pipController stopPictureInPicture];
}
}
}
#pragma mark - Feed frame
- (void)feedVideoFrame:(SellyRTCVideoFrame *)frame {
if (!self.pipSampleBufferLayer || !frame.pixelBuffer) return;
if (!self.pipPossible) return;
CMSampleBufferRef sb = [self createSampleBufferFromVideoFrame:frame];
if (!sb) return;
dispatch_async(self.pipQueue, ^{
if (self.pipSampleBufferLayer.status == AVQueuedSampleBufferRenderingStatusFailed) {
NSLog(@"[PiP] display layer failed: %@",
self.pipSampleBufferLayer.error);
[self.pipSampleBufferLayer flushAndRemoveImage];
}
[self.pipSampleBufferLayer enqueueSampleBuffer:sb];
CFRelease(sb);
});
}
- (CMSampleBufferRef)createSampleBufferFromVideoFrame:(SellyRTCVideoFrame *)videoData {
if (!videoData || !videoData.pixelBuffer) {
return nil;
}
CVPixelBufferRef pixelBuffer = videoData.pixelBuffer;
CMVideoFormatDescriptionRef videoInfo = NULL;
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault,
pixelBuffer,
&videoInfo);
if (status != noErr || !videoInfo) {
return nil;
}
CMTime pts;
if (videoData.timestamp > 0) {
// 假设 timestamp 是 ns
pts = CMTimeMake(videoData.timestamp, 1000000000);
} else {
CFTimeInterval t = CACurrentMediaTime();
int64_t ms = (int64_t)(t * 1000);
pts = CMTimeMake(ms, 1000);
}
CMSampleTimingInfo timingInfo;
timingInfo.duration = kCMTimeInvalid;
timingInfo.decodeTimeStamp = kCMTimeInvalid;
timingInfo.presentationTimeStamp = pts;
CMSampleBufferRef sampleBuffer = NULL;
status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault,
pixelBuffer,
videoInfo,
&timingInfo,
&sampleBuffer);
CFRelease(videoInfo);
if (status != noErr) {
if (sampleBuffer) {
CFRelease(sampleBuffer);
}
return nil;
}
// 标记立即显示
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
if (attachments && CFArrayGetCount(attachments) > 0) {
CFMutableDictionaryRef attachment =
(CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
CFDictionarySetValue(attachment,
kCMSampleAttachmentKey_DisplayImmediately,
kCFBooleanTrue);
}
return sampleBuffer; // 调用方 CFRelease
}
#pragma mark - AVPictureInPictureSampleBufferPlaybackDelegate
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
setPlaying:(BOOL)playing {
NSLog(@"[PiP] setPlaying = %d", playing);
}
- (CMTimeRange)pictureInPictureControllerTimeRangeForPlayback:(AVPictureInPictureController *)pictureInPictureController {
return CMTimeRangeMake(kCMTimeZero, CMTimeMake(INT64_MAX, 1000));
}
- (BOOL)pictureInPictureControllerIsPlaybackPaused:(AVPictureInPictureController *)pictureInPictureController {
return NO;
}
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
didTransitionToRenderSize:(CMVideoDimensions)newRenderSize {
NSLog(@"[PiP] render size = %d x %d", newRenderSize.width, newRenderSize.height);
}
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
skipByInterval:(CMTime)skipInterval
completionHandler:(void (^)(void))completionHandler {
if (completionHandler) {
completionHandler();
}
}
- (BOOL)pictureInPictureControllerShouldProhibitBackgroundAudioPlayback:(AVPictureInPictureController *)pictureInPictureController {
return NO;
}
- (void)invalidatePlaybackState {
// 实时流一般不用
}
#pragma mark - AVPictureInPictureControllerDelegate
- (void)pictureInPictureControllerWillStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
NSLog(@"[PiP] will start");
self.pipActive = YES;
}
- (void)pictureInPictureControllerDidStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
NSLog(@"[PiP] did start");
self.pipActive = YES;
}
- (void)pictureInPictureControllerDidStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
NSLog(@"[PiP] did stop");
self.pipActive = NO;
}
@end