initial commit
This commit is contained in:
265
Example/SellyCloudSDK/VideoCall/SellyCallPiPManager.m
Normal file
265
Example/SellyCloudSDK/VideoCall/SellyCallPiPManager.m
Normal file
@@ -0,0 +1,265 @@
|
||||
//
|
||||
// SellyCallPiPManager.m
|
||||
// SellyCloudSDK_Example
|
||||
//
|
||||
// Created by Caleb on 19/11/25.
|
||||
// Copyright © 2025 Caleb. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SellyCallPiPManager.h"
|
||||
#import <AVKit/AVKit.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
@interface SellyCallPiPManager ()
|
||||
|
||||
@property (nonatomic, weak) UIView *renderView;
|
||||
|
||||
@property (nonatomic, strong) AVSampleBufferDisplayLayer *pipSampleBufferLayer;
|
||||
@property (nonatomic, strong) AVPictureInPictureController *pipController;
|
||||
@property (nonatomic, strong) dispatch_queue_t pipQueue;
|
||||
@property (nonatomic, assign, readwrite) BOOL pipPossible;
|
||||
@property (nonatomic, assign, readwrite) BOOL pipActive;
|
||||
|
||||
@end
|
||||
|
||||
@implementation SellyCallPiPManager
|
||||
|
||||
- (instancetype)initWithRenderView:(UIView *)renderView {
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_renderView = renderView;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setupIfNeeded {
|
||||
if (@available(iOS 15.0, *)) {
|
||||
if (![AVPictureInPictureController isPictureInPictureSupported]) {
|
||||
self.pipPossible = NO;
|
||||
return;
|
||||
}
|
||||
|
||||
self.pipQueue = dispatch_queue_create("com.sellycloud.pip.queue", DISPATCH_QUEUE_SERIAL);
|
||||
|
||||
// SampleBuffer layer(你可以决定要不要加到 renderView 上显示)
|
||||
self.pipSampleBufferLayer = [[AVSampleBufferDisplayLayer alloc] init];
|
||||
self.pipSampleBufferLayer.videoGravity = AVLayerVideoGravityResizeAspect;
|
||||
|
||||
if (self.renderView) {
|
||||
self.pipSampleBufferLayer.frame = self.renderView.bounds;
|
||||
[self.renderView.layer addSublayer:self.pipSampleBufferLayer];
|
||||
}
|
||||
|
||||
// PiP content source
|
||||
AVPictureInPictureControllerContentSource *source =
|
||||
[[AVPictureInPictureControllerContentSource alloc] initWithSampleBufferDisplayLayer:self.pipSampleBufferLayer
|
||||
playbackDelegate:self];
|
||||
|
||||
self.pipController = [[AVPictureInPictureController alloc] initWithContentSource:source];
|
||||
self.pipController.delegate = self;
|
||||
self.pipController.canStartPictureInPictureAutomaticallyFromInline = true;
|
||||
self.pipController.requiresLinearPlayback = true;
|
||||
|
||||
self.pipPossible = (self.pipController != nil);
|
||||
} else {
|
||||
self.pipPossible = NO;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)togglePiP {
|
||||
if (@available(iOS 15.0, *)) {
|
||||
NSLog(@"[PiP] togglePiP called");
|
||||
NSLog(@"[PiP] pipController: %@", self.pipController ? @"存在" : @"nil");
|
||||
NSLog(@"[PiP] isPictureInPicturePossible: %d", self.pipController.isPictureInPicturePossible);
|
||||
NSLog(@"[PiP] isPictureInPictureActive: %d", self.pipController.isPictureInPictureActive);
|
||||
NSLog(@"[PiP] isPictureInPictureSupported: %d", [AVPictureInPictureController isPictureInPictureSupported]);
|
||||
NSLog(@"[PiP] sampleBufferLayer status: %ld", (long)self.pipSampleBufferLayer.status);
|
||||
|
||||
if (!self.pipController) {
|
||||
NSLog(@"[PiP] ❌ pipController is nil");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!self.pipController.isPictureInPicturePossible) {
|
||||
NSLog(@"[PiP] ❌ isPictureInPicturePossible is NO");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!self.pipController.isPictureInPictureActive) {
|
||||
// 🔧 先不停止喂数据,让 layer 保持有内容
|
||||
NSLog(@"[PiP] ✅ 开始启动画中画...");
|
||||
[self.pipController startPictureInPicture];
|
||||
} else {
|
||||
NSLog(@"[PiP] ✅ 停止画中画...");
|
||||
[self.pipController stopPictureInPicture];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Feed frame
|
||||
|
||||
- (void)feedVideoFrame:(SellyRTCVideoFrame *)frame {
|
||||
if (!self.pipSampleBufferLayer || !frame.pixelBuffer) return;
|
||||
if (!self.pipPossible) return;
|
||||
|
||||
// 🔧 不再阻止喂数据,让 PiP 可以持续显示画面
|
||||
|
||||
CMSampleBufferRef sb = [self createSampleBufferFromVideoFrame:frame];
|
||||
if (!sb) return;
|
||||
|
||||
dispatch_async(self.pipQueue, ^{
|
||||
if (self.pipSampleBufferLayer.status == AVQueuedSampleBufferRenderingStatusFailed) {
|
||||
NSLog(@"[PiP] display layer failed: %@",
|
||||
self.pipSampleBufferLayer.error);
|
||||
[self.pipSampleBufferLayer flush];
|
||||
}
|
||||
[self.pipSampleBufferLayer enqueueSampleBuffer:sb];
|
||||
CFRelease(sb);
|
||||
});
|
||||
}
|
||||
|
||||
- (CMSampleBufferRef)createSampleBufferFromVideoFrame:(SellyRTCVideoFrame *)videoData {
|
||||
if (!videoData || !videoData.pixelBuffer) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
CVPixelBufferRef pixelBuffer = videoData.pixelBuffer;
|
||||
|
||||
CMVideoFormatDescriptionRef videoInfo = NULL;
|
||||
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault,
|
||||
pixelBuffer,
|
||||
&videoInfo);
|
||||
if (status != noErr || !videoInfo) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
CMTime pts;
|
||||
if (videoData.timestamp > 0) {
|
||||
// 假设 timestamp 是 ns
|
||||
pts = CMTimeMake(videoData.timestamp, 1000000000);
|
||||
} else {
|
||||
CFTimeInterval t = CACurrentMediaTime();
|
||||
int64_t ms = (int64_t)(t * 1000);
|
||||
pts = CMTimeMake(ms, 1000);
|
||||
}
|
||||
|
||||
CMSampleTimingInfo timingInfo;
|
||||
timingInfo.duration = kCMTimeInvalid;
|
||||
timingInfo.decodeTimeStamp = kCMTimeInvalid;
|
||||
timingInfo.presentationTimeStamp = pts;
|
||||
|
||||
CMSampleBufferRef sampleBuffer = NULL;
|
||||
status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault,
|
||||
pixelBuffer,
|
||||
videoInfo,
|
||||
&timingInfo,
|
||||
&sampleBuffer);
|
||||
CFRelease(videoInfo);
|
||||
|
||||
if (status != noErr) {
|
||||
if (sampleBuffer) {
|
||||
CFRelease(sampleBuffer);
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
// 标记立即显示
|
||||
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
|
||||
if (attachments && CFArrayGetCount(attachments) > 0) {
|
||||
CFMutableDictionaryRef attachment =
|
||||
(CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
|
||||
CFDictionarySetValue(attachment,
|
||||
kCMSampleAttachmentKey_DisplayImmediately,
|
||||
kCFBooleanTrue);
|
||||
}
|
||||
|
||||
return sampleBuffer; // 调用方 CFRelease
|
||||
}
|
||||
|
||||
#pragma mark - AVPictureInPictureSampleBufferPlaybackDelegate
|
||||
|
||||
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
|
||||
setPlaying:(BOOL)playing {
|
||||
NSLog(@"[PiP] setPlaying = %d", playing);
|
||||
}
|
||||
|
||||
- (CMTimeRange)pictureInPictureControllerTimeRangeForPlayback:(AVPictureInPictureController *)pictureInPictureController {
|
||||
return CMTimeRangeMake(kCMTimeZero, CMTimeMake(INT64_MAX, 1000));
|
||||
}
|
||||
|
||||
- (BOOL)pictureInPictureControllerIsPlaybackPaused:(AVPictureInPictureController *)pictureInPictureController {
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
|
||||
didTransitionToRenderSize:(CMVideoDimensions)newRenderSize {
|
||||
NSLog(@"[PiP] render size = %d x %d", newRenderSize.width, newRenderSize.height);
|
||||
}
|
||||
|
||||
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
|
||||
skipByInterval:(CMTime)skipInterval
|
||||
completionHandler:(void (^)(void))completionHandler {
|
||||
if (completionHandler) {
|
||||
completionHandler();
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)pictureInPictureControllerShouldProhibitBackgroundAudioPlayback:(AVPictureInPictureController *)pictureInPictureController {
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (void)invalidatePlaybackState {
|
||||
// 实时流一般不用
|
||||
}
|
||||
|
||||
#pragma mark - AVPictureInPictureControllerDelegate
|
||||
|
||||
- (void)pictureInPictureControllerWillStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
|
||||
NSLog(@"[PiP] will start");
|
||||
self.pipActive = YES;
|
||||
}
|
||||
|
||||
- (void)pictureInPictureControllerDidStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
|
||||
NSLog(@"[PiP] did start");
|
||||
self.pipActive = YES;
|
||||
}
|
||||
|
||||
- (void)pictureInPictureControllerDidStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
|
||||
NSLog(@"[PiP] did stop");
|
||||
self.pipActive = NO;
|
||||
}
|
||||
|
||||
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
|
||||
failedToStartPictureInPictureWithError:(NSError *)error {
|
||||
NSLog(@"[PiP] ❌❌❌ failed to start with error: %@", error);
|
||||
self.pipActive = NO;
|
||||
}
|
||||
|
||||
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController
|
||||
restoreUserInterfaceForPictureInPictureStopWithCompletionHandler:(void (^)(BOOL))completionHandler {
|
||||
NSLog(@"[PiP] restore UI");
|
||||
if (completionHandler) {
|
||||
completionHandler(YES);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)invalidate {
|
||||
if (@available(iOS 15.0, *)) {
|
||||
if (self.pipController.isPictureInPictureActive) {
|
||||
[self.pipController stopPictureInPicture];
|
||||
}
|
||||
self.pipController.delegate = nil;
|
||||
self.pipController = nil;
|
||||
[self.pipSampleBufferLayer flush];
|
||||
[self.pipSampleBufferLayer removeFromSuperlayer];
|
||||
self.pipSampleBufferLayer = nil;
|
||||
self.pipPossible = NO;
|
||||
self.pipActive = NO;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self invalidate];
|
||||
}
|
||||
|
||||
@end
|
||||
Reference in New Issue
Block a user