Compare commits

7 Commits
main ... uu

50 changed files with 3966 additions and 1929 deletions

644
README.md
View File

@@ -1,643 +1 @@
SellyCloudSDK
SellyRTC Android SDK 接入文档
本文档介绍如何在 Android 中使用 SellyRTC 快速集成一对一或多人音视频通话功能,包括:
- 基本接入
- 音视频控制
- 数据处理(如美颜)
- 事件回调
- 通话统计
- Token 生成与更新机制
---
## 目录
1. 准备工作
2. 快速开始
- 创建引擎
- 设置本地/远端画面
- 配置视频参数
- 加入频道
- 结束通话
3. 常用功能
- 开关本地音视频
- 切换摄像头
- 静音远端音视频
- 音频输出控制(扬声器 / 听筒)
- 发送自定义消息
- 美颜开关
4. 视频帧处理(美颜等)
5. 事件回调 (InteractiveRtcEngineEventHandler)
6. 通话统计
7. Token 过期机制
8. 常见问题
---
# 1. 准备工作
## 1.1 集成 SellyCloudSDK
或如果目前是通过本地 AAR 集成demo 方式):
```gradle
dependencies {
implementation files("libs/sellycloudsdk-release.aar")
}
```
> 注意:如果你的业务侧还依赖 WebRTC、ijkplayer、美颜等第三方库请保持与 SDK Demo 中的依赖版本一致。
## 1.2 必要权限
`AndroidManifest.xml` 中声明音视频必需权限:
```xml
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
```
在 Android 6.0+ 设备上运行时还需要动态申请权限示例见后文Demo 中的 `requiredPermissions` + `ActivityResultContracts.RequestMultiplePermissions` 已经实现)。
## 1.3 获取 AppId / Secret / Token
从 SellyCloud 控制台获取:
- `signaling_app_id`
- `signaling_secret`(用于服务端生成 Token
- 或直接配置一个测试用的 `signaling_token`
在 Demo 中,这些值通常配置在 `res/values/strings.xml`
```xml
<string name="signaling_app_id">your-app-id</string>
<string name="signaling_secret">your-secret</string>
<string name="signaling_token"></string> <!-- 可选:直接写死 token -->
```
> 生产环境建议:
> 不要在 App 里写 secret而是在你们自己的业务服务器上生成 TokenApp 只向服务器请求 Token。
---
# 2. 快速开始
以下示例基于 Demo 中的 `InteractiveLiveActivity`,展示最小接入流程。
## 2.1 创建引擎 InteractiveRtcEngine
`Activity` 中创建并配置 RTC 引擎:
```kotlin
private var rtcEngine: InteractiveRtcEngine? = null
private var beautyRenderer: FURenderer? = null
private var fuFrameInterceptor: FuVideoFrameInterceptor? = null
@Volatile private var isFrontCamera = true
@Volatile private var beautyEnabled: Boolean = true
private fun initRtcEngine() {
val appId = getString(R.string.signaling_app_id)
val token = getString(R.string.signaling_token).takeIf { it.isNotBlank() }
// 可选:初始化美颜
beautyRenderer = FURenderer(this).also { it.setup() }
fuFrameInterceptor = beautyRenderer?.let {
FuVideoFrameInterceptor(it).apply {
setFrontCamera(isFrontCamera)
setEnabled(beautyEnabled)
}
}
rtcEngine = InteractiveRtcEngine.create(
InteractiveRtcEngineConfig(
context = applicationContext,
appId = appId,
defaultToken = token
)
).apply {
// 设置回调
setEventHandler(rtcEventHandler)
// 角色:主播/观众Demo 里默认主播BROADCASTER
setClientRole(InteractiveRtcEngine.ClientRole.BROADCASTER)
// 配置视频参数(可选,见下一节)
setVideoEncoderConfiguration(
InteractiveVideoEncoderConfig(
width = 640,
height = 480,
fps = 20,
minBitrateKbps = 150,
maxBitrateKbps = 350
)
)
// 默认走扬声器
setDefaultAudioRoutetoSpeakerphone(true)
// 视频采集前拦截(用于美颜等)
setCaptureVideoFrameInterceptor { frame ->
if (!beautyEnabled) return@setCaptureVideoFrameInterceptor frame
fuFrameInterceptor?.process(frame) ?: frame
}
}
}
```
生命周期注意:
`onDestroy` 中记得 `leaveChannel()` 并销毁引擎,避免内存泄漏:
```kotlin
override fun onDestroy() {
super.onDestroy()
rtcEngine?.setCaptureVideoFrameInterceptor(null)
leaveChannel()
InteractiveRtcEngine.destroy(rtcEngine)
rtcEngine = null
// 释放 renderer / 美颜资源...
}
```
## 2.2 设置本地 & 远端画面
SellyRTC 使用 `InteractiveVideoCanvas + SurfaceViewRenderer` 来承载视频画面。
### 初始化本地与远端渲染 View
```kotlin
private var localRenderer: SurfaceViewRenderer? = null
private val remoteRendererMap = mutableMapOf<String, SurfaceViewRenderer>()
private fun createRenderer(): SurfaceViewRenderer =
SurfaceViewRenderer(this).apply {
setZOrderMediaOverlay(false)
}
private fun setupVideoSlots() {
// 本地 slot
if (localRenderer == null) {
localRenderer = createRenderer()
}
localRenderer?.let { renderer ->
// Demo 中使用自定义的 VideoReportLayout 来承载
binding.flLocal.attachRenderer(renderer)
}
// 远端 slot 见 Demo 中的 remoteSlots / ensureRemoteRenderer
}
```
### 绑定本地视频
在加入频道前/时,设置本地视频 canvas
```kotlin
val renderer = localRenderer ?: createRenderer().also { localRenderer = it }
rtcEngine?.setupLocalVideo(InteractiveVideoCanvas(renderer, localUserId))
```
### 绑定远端视频
`onUserJoined` 或业务逻辑中,为某个 `userId` 分配一个远端窗口:
```kotlin
private fun ensureRemoteRenderer(userId: String): SurfaceViewRenderer {
return remoteRendererMap[userId] ?: createRenderer().also { renderer ->
remoteRendererMap[userId] = renderer
rtcEngine?.setupRemoteVideo(InteractiveVideoCanvas(renderer, userId))
}
}
```
> 多人会议:为不同的 `userId` 分配不同的 View / slot即可实现多路画面显示。
## 2.3 配置视频参数(可选)
视频编码参数需要在加入频道前配置:
```kotlin
rtcEngine?.setVideoEncoderConfiguration(
InteractiveVideoEncoderConfig(
width = 640,
height = 480,
fps = 20,
minBitrateKbps = 150,
maxBitrateKbps = 350
)
)
// 不设置则使用 SDK 默认配置
```
## 2.4 加入频道 / 发起通话
### 1准备 CallType 等入会参数
```kotlin
val options = InteractiveChannelMediaOptions(
callType = if (isP2P) CallType.ONE_TO_ONE else CallType.GROUP
)
```
其中:
- `CallType.ONE_TO_ONE`:一对一视频通话
- `CallType.GROUP`:多人会议 / 互动直播
### 2生成 Token
Demo 中的策略(简化):
```kotlin
private val defaultTokenTtlSeconds = InteractiveCallConfig.DEFAULT_TOKEN_TTL_SECONDS
private fun buildToken(appId: String, callId: String, userId: String): TokenBundle? {
val manualToken = getString(R.string.signaling_token).takeIf { it.isNotBlank() }
if (manualToken != null) {
return TokenBundle(
token = manualToken,
expiresAtSec = parseExprTime(manualToken),
secret = null
)
}
val secret = getString(R.string.signaling_secret)
if (secret.isBlank()) {
Toast.makeText(
this,
"请在 strings.xml 配置 signaling_secret 用于生成 token或直接填写 signaling_token",
Toast.LENGTH_LONG
).show()
return null
}
return try {
val generated = TokenGenerator.generate(
appId = appId,
userId = userId,
callId = callId,
secret = secret,
ttlSeconds = defaultTokenTtlSeconds
)
TokenBundle(
token = generated.token,
expiresAtSec = generated.expiresAtSec,
secret = secret
)
} catch (t: Throwable) {
Toast.makeText(this, "生成 token 失败: ${t.message}", Toast.LENGTH_LONG).show()
null
}
}
```
> 生产环境建议:
> 将 `TokenGenerator` 放在你的业务服务器,客户端只请求业务服务器获取 Token。
### 3调用 joinChannel
```kotlin
rtcEngine?.joinChannel(
token = request.token,
callId = request.callId,
userId = request.userId,
options = request.options, // CallType 等
tokenSecret = request.tokenSecret, // 可为空
tokenExpiresAtSec = request.tokenExpiresAtSec,
tokenTtlSeconds = request.tokenTtlSeconds
)
```
成功后,会回调:
```kotlin
override fun onJoinChannelSuccess(channel: String, userId: String, code: Int) {
// 已成功加入频道,可更新 UI 状态
}
```
## 2.5 结束通话
业务结束通话时调用:
```kotlin
private fun leaveChannel() {
rtcEngine?.leaveChannel()
resetUiAfterLeave() // 清 UI、清理 renderer 等
}
```
SDK 会通过:
```kotlin
override fun onLeaveChannel(durationSeconds: Int) {
// 通话结束时长(秒)
}
```
通知已经离开频道。
---
# 3. 常用功能
以下示例同样来自 Demo可直接复用。
## 3.1 开/关本地视频
```kotlin
private var isLocalVideoEnabled = true
private var isLocalPreviewEnabled = true
binding.btnToggleCamera.setOnClickListener {
isLocalVideoEnabled = !isLocalVideoEnabled
rtcEngine?.enableLocalVideo(isLocalVideoEnabled)
isLocalPreviewEnabled = isLocalVideoEnabled
updateControlButtons()
}
```
## 3.2 开/关本地音频采集
```kotlin
private var isLocalAudioEnabled = true
binding.btnToggleMic.setOnClickListener {
isLocalAudioEnabled = !isLocalAudioEnabled
rtcEngine?.enableLocalAudio(isLocalAudioEnabled)
updateControlButtons()
}
```
## 3.3 切换前后摄像头
```kotlin
binding.btnSwitchCamera.setOnClickListener {
isFrontCamera = !isFrontCamera
fuFrameInterceptor?.setFrontCamera(isFrontCamera)
rtcEngine?.switchCamera()
}
```
## 3.4 静音远端音视频
按用户静音远端音频 / 视频:
```kotlin
private fun muteRemoteUserAudio(targetUserId: String, muted: Boolean) {
rtcEngine?.muteRemoteAudioStream(targetUserId, muted)
}
private fun muteRemoteUserVideo(targetUserId: String, muted: Boolean) {
rtcEngine?.muteRemoteVideoStream(targetUserId, muted)
}
```
## 3.5 控制音频输出(扬声器 / 听筒)
```kotlin
private var isSpeakerOn = true
binding.btnToggleAudioRoute.setOnClickListener {
isSpeakerOn = !isSpeakerOn
rtcEngine?.setDefaultAudioRoutetoSpeakerphone(isSpeakerOn)
updateControlButtons()
}
```
## 3.6 发送自定义消息
```kotlin
binding.btnSendMessage.setOnClickListener {
val text = binding.etMessage.text?.toString()?.trim().orEmpty()
if (text.isEmpty()) {
Toast.makeText(this, "请输入消息内容", Toast.LENGTH_SHORT).show()
} else if (currentCallId == null) {
Toast.makeText(this, "请先加入频道", Toast.LENGTH_SHORT).show()
} else {
rtcEngine?.sendMessage(text) { error ->
runOnUiThread {
if (error != null) {
Toast.makeText(this, "发送失败: ${error.message}", Toast.LENGTH_SHORT).show()
} else {
Toast.makeText(this, "已发送", Toast.LENGTH_SHORT).show()
binding.etMessage.text?.clear()
binding.tvMessageLog.text = "我: $text"
}
}
}
}
}
```
收到消息的回调见后文 `onMessageReceived`
## 3.7 美颜开关
```kotlin
binding.btnToggleBeauty.setOnClickListener {
beautyEnabled = !beautyEnabled
fuFrameInterceptor?.setEnabled(beautyEnabled)
updateControlButtons()
}
```
---
# 4. 视频帧处理(美颜等)
SellyRTC 提供视频采集前拦截接口,可以在推流前做美颜、滤镜等处理。
在创建引擎后设置:
```kotlin
rtcEngine?.setCaptureVideoFrameInterceptor { frame ->
if (!beautyEnabled) return@setCaptureVideoFrameInterceptor frame
fuFrameInterceptor?.process(frame) ?: frame
}
```
其中 `FuVideoFrameInterceptor` 内部使用 `FURenderer` 做实际美颜处理。
> 你也可以替换为自己的处理逻辑:
> - 对 `frame` 做 GPU 或 CPU 处理
> - 返回处理后的帧给 SDK 继续编码和发送
---
# 5. 事件回调 (InteractiveRtcEngineEventHandler)
实现 `InteractiveRtcEngineEventHandler`,监听通话过程中发生的事件:
```kotlin
private val rtcEventHandler = object : InteractiveRtcEngineEventHandler {
override fun onJoinChannelSuccess(channel: String, userId: String, code: Int) { ... }
override fun onLeaveChannel(durationSeconds: Int) { ... }
override fun onUserJoined(userId: String, code: Int) { ... }
override fun onUserLeave(userId: String, code: Int) { ... }
override fun onConnectionStateChanged(
state: InteractiveConnectionState,
reason: Int,
userId: String?
) { ... }
override fun onError(code: String, message: String) { ... }
override fun onLocalVideoStats(stats: InteractiveStreamStats) { ... }
override fun onRemoteVideoStats(stats: InteractiveStreamStats) { ... }
override fun onMessageReceived(message: String, userId: String?) { ... }
override fun onTokenWillExpire(token: String?, expiresAt: Long) { ... }
override fun onTokenExpired(token: String?, expiresAt: Long) { ... }
override fun onDuration(durationSeconds: Long) { ... }
override fun onRemoteVideoEnabled(enabled: Boolean, userId: String?) { ... }
override fun onRemoteAudioEnabled(enabled: Boolean, userId: String?) { ... }
override fun onStreamStateChanged(
peerId: String,
state: RemoteState,
code: Int,
message: String?
) { ... }
}
```
**常见事件说明:**
- `onConnectionStateChanged`连接状态变化Disconnected / Connecting / Connected / Reconnecting / Failed
- `onUserJoined` / `onUserLeave`:远端用户加入/离开频道
- `onRemoteVideoEnabled` / `onRemoteAudioEnabled`:远端用户开关音视频
- `onMessageReceived`:收到自定义消息
- `onDuration`:通话时长更新(秒)
- `onError`:错误回调(建议弹窗 + 打日志)
---
# 6. 通话统计信息
## 6.1 单路流统计InteractiveStreamStats
在本地/远端视频统计回调中获取:
```kotlin
override fun onLocalVideoStats(stats: InteractiveStreamStats) {
// stats.width / height / fps / videoBitrateKbps / audioBitrateKbps / rttMs 等
}
override fun onRemoteVideoStats(stats: InteractiveStreamStats) {
// 针对某个 userId 的码率、分辨率、丢包、RTT 等
}
```
你可以将这些信息显示在 UI 上Demo 中的 `buildStatsLabel` 已经示范了如何构造:
```kotlin
private fun buildStatsLabel(header: String, stats: InteractiveStreamStats?): String {
// Res: WxH, FPS, Codec, Video/Audio Kbps, RTT 等
}
```
## 6.2 通话结束时长onLeaveChannel
`onLeaveChannel` 中可以拿到本次通话时长(秒),无论是主动离开还是断网/失败结束,只要曾加入成功都会回调:
```kotlin
override fun onLeaveChannel(durationSeconds: Int) {
Log.d(TAG, "onLeaveChannel duration=${durationSeconds}s")
}
```
---
# 7. Token 过期机制
SDK 在 Token 生命周期内会通过事件提醒你续期:
## 7.1 Token 即将过期
```kotlin
override fun onTokenWillExpire(token: String?, expiresAt: Long) {
Toast.makeText(
this@InteractiveLiveActivity,
"Token 即将过期,请及时续期",
Toast.LENGTH_LONG
).show()
// 1. 通知业务服务器刷新 Token
// 2. 拿到新 Token 后调用 rtcEngine?.renewToken(newToken)(具体接口以实际 SDK 为准)
}
```
## 7.2 Token 已过期
```kotlin
override fun onTokenExpired(token: String?, expiresAt: Long) {
Toast.makeText(
this@InteractiveLiveActivity,
"Token 已过期,断线后将无法重连",
Toast.LENGTH_LONG
).show()
}
```
> 说明:
> - Token 过期后,**当前通话不会立刻中断**,但网络异常时自动重连会失败。
> - 请务必在 `onTokenWillExpire` 阶段就完成续期。
---
# 8. 常见问题 (FAQ)
## Q1多人远端画面如何渲染
为每一个远端用户(`userId`)分配一个 `SurfaceViewRenderer`,并调用:
```kotlin
val canvas = InteractiveVideoCanvas(renderer, userId)
rtcEngine?.setupRemoteVideo(canvas)
```
在布局层面,你可以将多个 `renderer` 放到不同的容器中(网格布局 / 自定义九宫格等),参考 Demo 中的 `remoteSlots`
---
## Q2远端画面不显示怎么办
排查方向:
1. 是否收到了 `onUserJoined` 回调?
2. 有没有为该 `userId` 调用 `setupRemoteVideo` 并绑定到一个可见的 View
3. View 是否被其他控件覆盖?
4. 远端用户是否已开启视频(可监听 `onRemoteVideoEnabled` 回调)?
---
## Q3如何实现画中画 / 小窗布局?
这是布局层面的工作,与 SDK 解耦:
- 将远端大画面放在父容器(如 `FrameLayout`)中
- 再将本地小窗 View 作为子 View 添加在右下角,并设置合适的 `layoutParams`
- SDK 会把视频渲染到对应的 View 上,你只需要控制 View 的大小和位置即可
---
## Q4如何在后台保持通话
Demo 中使用了一个前台 Service
```kotlin
InteractiveForegroundService.start(this)
// 离开频道后记得 stop
InteractiveForegroundService.stop(this)
```
文档参考 doc目录下

View File

@@ -1,6 +1,6 @@
# Selly Live SDK 推拉流接入文档Android
> 统一 SDK 名称:**SellyCloudSDK**
> 统一 SDK 名称:**SellyCloudSDK**
> 本文档适用于 Android 客户端,面向对外集成方与内部使用。
---
@@ -12,13 +12,17 @@ Selly Live SDK 提供完整的音视频直播能力,支持 **推流(直播
### 主要能力
- 支持 **RTMP / RTC** 推流与播放模式
- 支持 **SurfaceView / TextureView** 两套渲染后端
- 直播播放器与点播播放器支持 **SurfaceTexture** 高级渲染接入
- 高性能音视频采集与编码
- 灵活的视频参数配置(分辨率 / 帧率 / 码率)
- 推流状态与统计回调
- 拉流播放状态与错误回调
- 支持视频帧处理(美颜 / 滤镜 / 水印)
- 基于 **Token 的安全鉴权机制**
- 支持 **RTMP H264 + AAC payload XOR 保护(可选)**
- 支持 **RTMP Payload XOR 保护(可选)**
- 支持 **RTCWHEP/WHIPWebRTC Frame XOR 加解密(可选)**
- 支持 **外部代理地址注入**(如洋葱盾等第三方安全代理)
---
@@ -34,8 +38,8 @@ Selly Live SDK 提供完整的音视频直播能力,支持 **推流(直播
### 3.1 项目结构参考
- `example/`Android Demo 工程
- 推流示例:`example/src/main/java/com/demo/SellyCloudSDK/live/LivePushActivity.kt`
- `example/`Android Demo 工程
- 推流示例:`example/src/main/java/com/demo/SellyCloudSDK/live/LivePushActivity.kt`
- 拉流示例:`example/src/main/java/com/demo/SellyCloudSDK/live/LivePlayActivity.kt`
- `example/libs/`:本地 AAR 依赖存放目录
@@ -43,7 +47,7 @@ Selly Live SDK 提供完整的音视频直播能力,支持 **推流(直播
```gradle
dependencies {
implementation files("libs/sellycloudsdk-1.0.0.aar")
implementation files("libs/sellycloudsdk-1.0.1.aar")
}
```
@@ -60,9 +64,81 @@ dependencies {
---
## 4. Token 鉴权机制(重点)
## 4. SDK 初始化与代理配置
### 4.1 Token 注入方式
### 4.1 SDK 初始化
在使用任何推流 / 拉流功能前,必须先初始化 SDK
```kotlin
SellyCloudManager.initialize(
context = applicationContext,
appId = "your-app-id",
config = SellyCloudConfig(
vhost = "your-vhost",
vhostKey = "your-vhost-key",
defaultStreamId = "default-stream",
defaultLiveMode = SellyLiveMode.RTMP
)
)
```
`initialize` 参数说明:
| 参数 | 类型 | 说明 |
| ---- | ---- | ---- |
| `context` | Context | 应用上下文 |
| `appId` | String | 应用 ID权威值会覆盖 config 中的 appId |
| `config` | SellyCloudConfig? | 可选配置,不传则使用默认值 |
`SellyCloudConfig` 字段说明:
| 字段 | 类型 | 说明 |
| ---- | ---- | ---- |
| `vhost` | String | 虚拟主机 |
| `vhostKey` | String | vhost 密钥(用于鉴权签名) |
| `defaultStreamId` | String | 默认流 ID |
| `logEnabled` | Boolean | 是否启用日志,默认 true |
| `defaultLiveMode` | SellyLiveMode | 默认推拉流模式RTMP / RTC |
| `appName` | String | 应用名称,为空时自动使用 appId一般无需设置 |
> `config.appId` 无需设置SDK 内部会用 `initialize(appId=)` 参数覆盖。
### 4.2 代理地址配置(可选)
SDK 支持通过外部代理(如洋葱盾等安全加速服务)进行流媒体连接。代理地址由业务方在 SDK 外部获取,然后通过以下接口注入:
```kotlin
// 设置代理地址
SellyCloudManager.setProxyAddress("http://127.0.0.1:12345")
// 清除代理(恢复直连)
SellyCloudManager.setProxyAddress(null)
// 查询当前代理地址
val proxy = SellyCloudManager.getProxyAddress() // null 表示未设置
```
**格式要求:**
- 必须以 `http://``https://` 开头
-`null` 或空字符串表示清除代理
- 格式不合法时抛出 `IllegalArgumentException`
**生效范围:**
- 设置后对 RTMP 推拉流、RTCWHEP/WHIP播放推流、Signaling 信令连接均生效
- SDK 内部通过代理地址解析真实服务器 IP对上层透明
**时机要求:**
- 必须在推流 / 拉流 **开始之前** 设置
- 推流 / 拉流过程中修改代理地址,需停止后重新开始才能生效
> Demo 中使用 `KiwiHelper` 封装了洋葱盾 SDK 的初始化与代理地址获取流程,通过 `SellyCloudManager.setProxyAddress()` 将结果传给 SDK。详见 `example/src/main/java/com/demo/SellyCloudSDK/KiwiHelper.kt`。
---
## 5. Token 鉴权机制(重点)
### 5.1 Token 注入方式
| 场景 | 设置位置 |
| ---- | ---- |
@@ -76,7 +152,7 @@ dependencies {
- SDK 内部在建立连接时自动携带当前 Token
- 直接使用 RTMP 地址推/拉流不需要 Token可不设置
### 4.2 Token 设置时机(强约束)
### 5.2 Token 设置时机(强约束)
#### 推流
@@ -91,9 +167,9 @@ dependencies {
- `prepareToPlay()`
- `play()`
> ⚠️ 在连接建立后修改 Token不会影响当前连接。
> 在连接建立后修改 Token不会影响当前连接。
### 4.3 Token 刷新机制说明
### 5.3 Token 刷新机制说明
- SDK **不提供自动刷新**
- 业务层可在任意时刻 **重新设置 token 属性**
@@ -104,17 +180,18 @@ dependencies {
2. 调用 `pusher.token = newToken` / `player.token = newToken`
3. 停止并重新开始推流 / 拉流流程
### 4.4 RTMP Payload XOR 保护(可选)
### 5.4 RTMP / WebRTC XOR 保护(可选)
用途:
- 防止他人拿到 RTMP 地址后直接播放、转码或截图
- 提高流地址泄露后被直接播放、转码或抓流的门槛
生效范围与约束:
- 仅对 **RTMP** 生效
- 仅支持 **H264 + AAC**(当前版本)
- 只处理 payload配置帧SPS/PPS、AAC Sequence Header保持不变
- **RTMP** 推拉流:支持 payload XOR当前仅支持 **H264 + AAC**
- **RTCWHEP/WHIP** 推拉流:支持 WebRTC frame XOR 加解密
- 当前这里的 WebRTC 指直播 RTC 推拉流,不包含互动通话高层 API
- RTMP 只处理 payload配置帧SPS/PPS、AAC Sequence Header保持不变
- 推流端与播放端必须使用**同一个 key**
Key 格式:
@@ -122,18 +199,19 @@ Key 格式:
- `hex` 字符串,建议 16 或 32 字节(即 32/64 个 hex 字符)
- 支持 `0x` 前缀
- 长度必须为偶数
- 非法 key 会被忽略并关闭 XOR会输出 warning 日志)
- 非法 key 会直接抛出 `IllegalArgumentException`,不会静默降级
时机要求:
- 推流:请在 `startLiveWithStreamId(...)` / `startLiveWithUrl(...)` 之前设置 key
- 推流:请在 `startLiveWithStreamId(...)` / `startLiveWithUrl(...)` 之前调用 `setXorKey(...)`
- 拉流:请在 `initWithStreamId(...)` / `initWithUrl(...)` 创建播放器时传入 `xorKeyHex`
- 运行中修改 key 不会影响当前连接,需重启推流或重建播放器实例
---
## 5. 推流接入详解
## 6. 推流接入详解
### 5.1 创建推流实例
### 6.1 创建推流实例
```kotlin
val pusher = SellyLiveVideoPusher.initWithLiveMode(
@@ -147,7 +225,7 @@ pusher.delegate = object : SellyLiveVideoPusherDelegate {
}
```
### 5.2 视频参数配置与预览
### 6.2 视频参数配置与预览
```kotlin
val config = SellyLiveVideoConfiguration.defaultConfiguration().apply {
@@ -158,7 +236,7 @@ val config = SellyLiveVideoConfiguration.defaultConfiguration().apply {
outputImageOrientation = SellyLiveOrientation.PORTRAIT
}
pusher.attachPreview(previewContainer)
pusher.attachPreview(previewContainer, useTextureView = false)
pusher.startRunning(
cameraPosition = SellyLiveCameraPosition.FRONT,
videoConfig = config,
@@ -166,13 +244,44 @@ pusher.startRunning(
)
```
### 5.3 设置推流 Token使用 streamId 时)
### 6.2.1 预览后端选择
推流预览支持两种接入方式:
- `attachPreview(container, useTextureView = false)`SDK 创建预览 View默认走旧的 `Surface/OpenGL` 预览链路
- `attachPreview(container, useTextureView = true)`SDK 创建 `TextureView` 预览,适合需要普通 View 层级混排的场景
- `setPreviewView(view)`:手动传入预览 View
- `setPreviewView(view, mode)`:当传入 `TextureView` 时,建议使用这个显式协议版本
示例:
```kotlin
// 默认旧路径
pusher.attachPreview(previewContainer, useTextureView = false)
// TextureView 路径
pusher.attachPreview(previewContainer, useTextureView = true)
```
```kotlin
// 手动指定 TextureView 时,建议显式传入 liveMode
val textureView = com.sellycloud.sellycloudsdk.widget.AspectRatioTextureView(this)
pusher.setPreviewView(textureView, SellyLiveMode.RTMP)
```
说明:
- `RTMP` 模式下SDK 内部会根据预览 View 类型自动选择 `OpenGlView``TextureView`
- `RTC/WHIP` 预览也支持 `TextureView`
- 当前版本建议在 **开始采集/推流前** 选定预览后端;不保证运行中热切换预览后端
### 6.3 设置推流 Token使用 streamId 时)
```kotlin
pusher.token = pushToken
```
#### RTMP Payload XOR可选)
#### 推流 XORRTMP / RTC-WHIP可选)
```kotlin
val xorKeyHex = "A1B2C3D4E5F6A7B8C9D0E1F2A3B4C5D6"
@@ -181,9 +290,9 @@ val xorKeyHex = "A1B2C3D4E5F6A7B8C9D0E1F2A3B4C5D6"
pusher.setXorKey(xorKeyHex)
```
> 若在推流中修改 key需停止并重新开始推流后才会使用新 key。
> `setXorKey(...)` 同时作用于 RTMP 推流与 RTC/WHIP 推流。若在推流中修改 key需停止并重新开始推流后才会使用新 key。
### 5.4 开始/停止推流
### 6.4 开始/停止推流
```kotlin
pusher.startLiveWithStreamId(streamId)
@@ -212,7 +321,7 @@ pusher.stopLive { error ->
}
```
### 5.5 常用控制接口
### 6.5 常用控制接口
- `setMuted(true/false)`:静音
- `switchCameraPosition(...)`:切换摄像头
@@ -222,11 +331,119 @@ pusher.stopLive { error ->
- `setCameraEnabled(true/false)`:关闭/开启摄像头
- `setStreamOrientation(...)`:切换推流方向
- `setVideoConfiguration(...)` + `changeResolution(...)`:动态调整分辨率
- `setAutoFramingEnabled(...)` / `getAutoFramingCapability()` / `getAutoFramingState()`:自动取景
- `setBeautyEngine(...)` + `setBeautyEnabled(...)`:接入美颜
- `setBeautyLevel(level)`:设置美颜强度
- `setBitmapAsVideoSource(...)` / `restoreCameraVideoSource()`:背景图推流
### 5.6 生命周期建议
### 6.5.1 美颜引擎接入
当前版本推荐通过 `BeautyEngine` + `VideoProcessor` 接入美颜。Demo 使用 `FaceUnityBeautyEngine`,位于:
- `example/src/main/java/com/demo/SellyCloudSDK/beauty/FaceUnityBeautyEngine.kt`
接入示例:
```kotlin
val beautyEngine = FaceUnityBeautyEngine()
pusher.setBeautyEngine(beautyEngine)
pusher.setBeautyEnabled(true)
pusher.setBeautyLevel(3.0f)
```
说明:
- `BeautyEngine.createProcessor()` 返回的是 SDK V2 `VideoProcessor`
- 当前 Demo 的美颜实现走 `TEXTURE_2D + READ_WRITE`
- 美颜属于“完整重写输出”的场景,建议在 `VideoProcessorConfig` 中设置 `fullRewrite = true`
- `RTC/WHIP` 路径优先推荐 `TEXTURE_2D`,避免对 texture-backed 帧做额外的 texture-to-CPU 转换
### 6.5.2 推流前帧处理与观察
直播推流支持:
- 一个可写 `VideoProcessor`
- 多个只读 `VideoFrameObserver`
只读观测示例:
```kotlin
val disposable = pusher.addVideoFrameObserver(object : VideoFrameObserver {
override val config = VideoFrameObserverConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D
)
override fun onTextureFrame(frame: VideoTextureFrame) {
// 只读观测,不修改输出
}
})
```
可写处理示例:
```kotlin
pusher.setVideoProcessor(object : VideoProcessor {
override val config = VideoProcessorConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
mode = VideoProcessMode.READ_WRITE
)
override fun processTexture(input: VideoTextureFrame, outputTextureId: Int) {
// 将滤镜/水印直接写入 SDK 提供的 outputTextureId
}
})
```
当前 SDK / Demo 的处理建议:
- `RTC/WHIP` 路径优先使用 `TEXTURE_2D`
- `RTMP` 在确实需要 CPU 像素时,可使用 `I420` / `RGBA`
- `READ_WRITE` 模式下SDK 会准备输出缓冲;只有“完整覆盖输出”的场景才建议 `fullRewrite = true`
- `outputTextureId` 由 SDK 管理,处理器不应转移所有权,也不应在回调里主动删除纹理
- `VideoFrameObserverConfig` 的默认值仍为 `I420` 以兼容旧接入;新接入建议显式声明 `preferredFormat`
Demo 中当前可直接验证的模式:
- `帧回调纹理``TEXTURE_2D` observer
- `帧回调空CPU`:声明 `I420`,不处理像素
- `帧回调单CPU`:单个 `I420` observer
- `帧回调双CPU`:两个 `I420` observer共享同一次 CPU 转换
- `改帧``RTC` 下走 `TEXTURE_2D``RTMP` 示例走 `RGBA`
### 6.5.3 自动取景Auto Framing
当前高层 API 已暴露:
- `setAutoFramingEnabled(enabled)`:开启 / 关闭自动取景
- `getAutoFramingCapability()`:查询当前是否支持及原因
- `getAutoFramingState()`:读取当前状态
- `delegate.onAutoFramingStateChanged(state)`:接收状态变化回调
状态枚举:
- `OFF`
- `INACTIVE`
- `FRAMING`
- `CONVERGED`
- `UNSUPPORTED`
当前约束:
- 当前自动取景只在 **RTMP 推流** 路径可用
- `RTC / WHIP` 推流当前会返回 `UNSUPPORTED`
- 需要摄像头已启动后再查询 capability相机关闭、背景图推流等场景也会返回不支持
示例:
```kotlin
val capability = pusher.getAutoFramingCapability()
if (capability.supported) {
pusher.setAutoFramingEnabled(true)
}
```
### 6.6 生命周期建议
在宿主 Activity 中对齐生命周期:
@@ -234,7 +451,7 @@ pusher.stopLive { error ->
- `onPause()``pusher.onPause()`
- `onDestroy()``pusher.release()`
### 5.7 状态与统计回调
### 6.7 状态与统计回调
**状态枚举:**
@@ -251,21 +468,25 @@ pusher.stopLive { error ->
- videoBitrateKbps / audioBitrateKbps
- rttMs
- cpu 使用率Demo 通过 `CpuUsage` 读取)
- auto framing state通过 `onAutoFramingStateChanged` / `getAutoFramingState()` 获取)
### 5.8 推流 API 速览(含 Demo 未覆盖)
### 6.8 推流 API 速览(含 Demo 未覆盖)
初始化与预览:
- `initWithLiveMode(context, liveMode)`:创建推流实例
- `setPreviewView(view)`:设置预览 View
- `attachPreview(container)`:将预览 View 添加到容器
- `setPreviewView(view)`:设置预览 View`TextureView` 会按当前 `liveMode` 选择协议
- `setPreviewView(view, mode)`:显式设置预览 View 与协议,`TextureView` 推荐使用
- `attachPreview(container)`:将默认预览 View 添加到容器
- `attachPreview(container, useTextureView)`:创建并绑定 `Surface/OpenGL``TextureView` 预览
- `getPreviewView()`:获取当前预览 View
采集与推流:
- `startRunning(cameraPosition, videoConfig, audioConfig)`:开始采集预览
- `setVideoConfiguration(config)`:更新视频参数
- `setXorKey(hexKey)`:设置 RTMP payload XOR key可选)
- `setXorKey(hexKey)`:设置推流 XOR keyRTMP payload / RTC-WHIP frame可选)
- `setAutoFramingEnabled(enabled)` / `getAutoFramingCapability()` / `getAutoFramingState()`:自动取景控制与状态查询
- `startLiveWithStreamId(streamId)`:使用 streamId 推流
- `startLiveWithUrl(url)`:使用完整 URL 推流
- `stopLive()` / `stopLive(callback)`:停止推流
@@ -283,6 +504,7 @@ pusher.stopLive { error ->
- `setBeautyEngine(engine)`:设置美颜引擎
- `setBeautyEnabled(true/false)`:启用 / 关闭美颜
- `setBeautyLevel(level)`:设置美颜强度
- `onAutoFramingStateChanged(state)`:自动取景状态回调
- `setStreamOrientation(orientation)`:设置推流方向
- `changeResolution(width, height)`:动态调整分辨率
- `setBitmapAsVideoSource(bitmap)` / `restoreCameraVideoSource()`:背景图推流
@@ -293,16 +515,16 @@ pusher.stopLive { error ->
---
## 6. 拉流接入详解
## 7. 拉流接入详解
### 6.1 创建播放器
### 7.1 创建播放器
```kotlin
val player = SellyLiveVideoPlayer.initWithStreamId(
context = this,
streamId = streamId,
liveMode = SellyLiveMode.RTC,
xorKeyHex = "" // RTC 场景可留空
xorKeyHex = "" // 加密流传入同一 key明文流可留空
)
// 或直接使用完整 URL
// val player = SellyLiveVideoPlayer.initWithUrl(this, playUrl, xorKeyHex = "A1B2...")
@@ -321,23 +543,49 @@ val player = SellyLiveVideoPlayer.initWithStreamId(
)
```
> 使用 RTMP 加密流时,请在创建播放器时传入 `xorKeyHex`;后续如需换 key请重建播放器实例。
> 使用 RTMP 或 RTC/WHEP 加密流时,请在创建播放器时传入 `xorKeyHex`;后续如需换 key请重建播放器实例。
### 6.2 设置拉流 Token使用 streamId 时)
### 7.2 设置拉流 Token使用 streamId 时)
```kotlin
player.token = playToken
```
> 直接使用 RTMP 地址拉流不需要 Token可不设置。
### 6.3 播放流程
### 7.3 播放流程
```kotlin
player.attachRenderView(renderContainer)
player.attachRenderView(renderContainer, com.sellycloud.sellycloudsdk.render.RenderBackend.SURFACE_VIEW)
player.prepareToPlay()
player.play()
```
### 7.3.1 播放渲染后端选择
直播播放器支持以下渲染接入方式:
- `attachRenderView(container, RenderBackend.SURFACE_VIEW)`:默认旧路径
- `attachRenderView(container, RenderBackend.TEXTURE_VIEW)`:使用 `TextureView`
- `setRenderView(view)`:手动传入 `SurfaceView``SurfaceViewRenderer``TextureView`
- `setRenderSurfaceTexture(surfaceTexture, width, height)`:高级场景下直接绑定 `SurfaceTexture`(调用方负责 SurfaceTexture 生命周期)
示例:
```kotlin
val backend = com.sellycloud.sellycloudsdk.render.RenderBackend.TEXTURE_VIEW
player.attachRenderView(renderContainer, backend)
player.prepareToPlay()
player.play()
```
说明:
- `RTMP` 播放支持 `SurfaceView``TextureView``SurfaceTexture`
- `RTC/WHEP` 播放支持 `SurfaceViewRenderer``TextureView`,以及高级场景下的 `SurfaceTexture`
- `RTMP/VOD``TextureView / SurfaceTexture` 默认走 **direct output**,优先保证首帧和低延迟
- 当前版本建议在 **开始播放前** 选定渲染后端;运行中如需变更目标,请走 `clearRenderTarget()` + `setRenderView(...)` / `setRenderSurfaceTexture(...)` 的显式重绑流程
- Flutter 场景优先使用 `setRenderSurfaceTexture(...)`,配合 Flutter `Texture` widget 使用;如 UI 层级正确性优先,不建议继续依赖 `Hybrid Composition + SurfaceView`
控制接口:
- `pause()`
@@ -349,9 +597,34 @@ player.play()
补充接口Demo 未覆盖):
- `setRenderView(view)`:手动指定渲染 View
- `setRenderSurfaceTexture(surfaceTexture, width, height)`:直接绑定 `SurfaceTexture`(调用方负责 SurfaceTexture 生命周期)
- `clearRenderTarget()`:解绑当前渲染面,播放会话可继续存活
- `seekBy(deltaMs)`:播放进度跳转(仅在流支持快进/回放时有效)
### 6.4 播放回调
### 7.3.2 Flutter / SurfaceTexture 接入建议
如果业务侧需要把视频放到 Flutter UI 层下面,并正常叠加按钮、封面、弹层、动画,推荐使用:
- Flutter 侧创建 `TextureRegistry.SurfaceTextureEntry`
- Android 插件层取出 `SurfaceTexture`
- 调用 `setRenderSurfaceTexture(surfaceTexture, width, height)`
- Flutter 页面使用 `Texture(textureId)` 显示视频
示意:
```kotlin
player.setRenderSurfaceTexture(surfaceTexture, width, height)
player.prepareToPlay()
player.play()
```
说明:
- `SurfaceTexture` 生命周期由调用方负责
- 销毁前建议先调用 `clearRenderTarget()` 或直接 `release()`
- 如果页面重建、Texture 重新申请或 Flutter 侧切换 textureId需要重新绑定新的 `SurfaceTexture`
### 7.4 播放回调
```kotlin
player.delegate = object : SellyLiveVideoPlayerDelegate {
@@ -374,14 +647,74 @@ player.delegate = object : SellyLiveVideoPlayerDelegate {
- `Reconnecting`
- `Failed`
### 6.5 播放 API 速览(含 Demo 未覆盖)
首帧语义说明:
- 默认 `DIRECT` 模式下,`onFirstVideoFrameRendered()` 对应 decoder 首帧可用时机
-`TextureView / SurfaceTexture` 且启用了 playback processing 的场景SDK 会等待目标渲染面确认首帧已真正呈现后,再触发 `onFirstVideoFrameRendered()`
- `onFirstAudioFrameRendered()` 仍表示音频首帧可播放时机;在 texture-backed processing 场景中,音频与视频首帧不一定完全同一时刻
### 7.4.1 播放侧帧回调与二次处理
播放器支持一组独立于采集/推流链路的播放侧高级能力:
- `setPlaybackFrameObserver(observer)`:播放侧只读帧回调
- `setPlaybackVideoProcessor(processor)`:播放侧可写纹理处理
当前能力边界:
- 当前仅支持 **texture-backed** 播放目标:`TextureView` / `SurfaceTexture`
- 当前仅支持 `preferredFormat = TEXTURE_2D`
- 当前仅支持 `stage = RENDER_PRE_DISPLAY`
- 当前默认渲染模式为 `DIRECT`
- 只有设置了有效的 observer / processor才会切到 `PROCESSING`
- 如果当前 render target 已经绑定,新增或移除 observer / processor 后,需要 **重绑一次 texture render target** 才会生效
- `RTC/WHEP` 播放当前不支持这套 playback processing当前主要用于 `RTMP/VOD` 播放链
只读 observer 示例:
```kotlin
player.setPlaybackFrameObserver(object : PlaybackFrameObserver {
override val config = PlaybackFrameObserverConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
stage = VideoStage.RENDER_PRE_DISPLAY
)
override fun onTextureFrame(frame: VideoTextureFrame) {
// 读取播放侧纹理帧信息
}
})
```
可写 processor 示例:
```kotlin
player.setPlaybackVideoProcessor(object : PlaybackVideoProcessor {
override val config = PlaybackVideoProcessorConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
mode = VideoProcessMode.READ_WRITE,
stage = VideoStage.RENDER_PRE_DISPLAY
)
override fun processTexture(input: VideoTextureFrame, outputTextureId: Int) {
// 将后处理结果写入 outputTextureId
}
})
```
### 7.5 播放 API 速览(含 Demo 未覆盖)
创建与渲染:
- `initWithStreamId(context, streamId, liveMode, vhost, appName, xorKeyHex)`:使用 streamId 创建播放器
- `initWithUrl(context, url, xorKeyHex)`:使用完整 URL 创建播放器
- `attachRenderView(container)` / `setRenderView(view)`:设置渲染 View
- `attachRenderView(container)`:创建默认 `SurfaceView` 渲染 View
- `attachRenderView(container, backend)`:创建指定 backend 的渲染 View
- `setRenderView(view)`:手动设置渲染 View
- `setRenderSurfaceTexture(surfaceTexture, width, height)`:绑定 `SurfaceTexture`(调用方负责 SurfaceTexture 生命周期)
- `clearRenderTarget()`:解绑当前渲染面
- `getRenderView()`:获取当前渲染 View
- `setPlaybackFrameObserver(observer)`:设置播放侧只读 observertexture 路径)
- `setPlaybackVideoProcessor(processor)`:设置播放侧 processortexture 路径)
播放控制:
@@ -395,9 +728,26 @@ player.delegate = object : SellyLiveVideoPlayerDelegate {
- `setStatsListener { snapshot -> }`:播放统计回调
- `release()`:释放播放器资源
### 7.6 点播播放器渲染说明
`SellyVodPlayer` 与直播播放器在渲染后端模型上保持一致:
- `attachRenderView(container, backend)`:支持 `SURFACE_VIEW` / `TEXTURE_VIEW`
- `setRenderView(surfaceView)` / `setRenderView(textureView)`:手动绑定现有 View
- `setRenderSurfaceTexture(surfaceTexture, width, height)`:高级场景使用 `SurfaceTexture`(调用方负责 SurfaceTexture 生命周期)
- `clearRenderTarget()`:解绑当前渲染面但不一定立即销毁播放实例
- `setPlaybackFrameObserver(observer)` / `setPlaybackVideoProcessor(processor)`:点播同样支持 texture-backed playback processing
补充说明:
- 点播在重绑 `TextureView / SurfaceTexture` 后,会自动复用最近一次视频宽高信息,保持正确显示比例
- 如在已有 texture 目标上新增或移除 observer / processor也需要重绑一次 texture render target 才会应用新的渲染模式
因此 Demo 中点播页的 `SurfaceView / TextureView` 选择,也与直播播放页保持一致,均在首页设置中统一生效。
---
## 7. 错误处理与重试建议
## 8. 错误处理与重试建议
### Token 错误
@@ -414,24 +764,33 @@ player.delegate = object : SellyLiveVideoPlayerDelegate {
---
## 8. 最佳实践
## 9. 最佳实践
- 推流前先完成采集预览
- `SurfaceView / TextureView` backend 建议在开始推流或播放前选定
- Flutter 场景优先使用 `setRenderSurfaceTexture(...)`,不要把 `Hybrid Composition + SurfaceView` 当成默认方案
- 普通播放默认保持 `DIRECT`;只有确实需要播放侧帧观察或纹理后处理时,再启用 playback processing
- playback processing 当前仅建议用于 `TextureView / SurfaceTexture + TEXTURE_2D + RENDER_PRE_DISPLAY`
- 变更 texture 路径的 observer / processor 后,显式重绑一次 render target
- `RTC/WHIP` 的美颜、滤镜、水印、观测优先使用 `TEXTURE_2D`
- `I420 / RGBA` 仅在算法必须访问 CPU 像素时再使用
- 完整重写输出的 GPU 处理器设置 `fullRewrite = true`;叠加类处理保留默认值
- Token 即将过期前提前刷新
- 使用统计回调做质量监控
- 拉流失败避免无限重试
- 使用代理时,确保在推拉流开始前代理地址已设置完毕
---
## 9. 常见问题FAQ
## 10. 常见问题FAQ
### Q1Token 可以拼接到 URL 吗?
**A** 不可以。
**A** 不可以。
SDK 不解析 URL 中的鉴权信息,所有鉴权均通过 `token` 属性完成。
### Q2运行中修改 Token 是否生效?
**A**
运行中修改 Token **不会影响当前已建立的连接**
运行中修改 Token **不会影响当前已建立的连接**
**下次重连或重新启动推流 / 拉流时会使用新的 Token**
### Q3播放器出现黑屏怎么办
@@ -447,5 +806,39 @@ SDK 不解析 URL 中的鉴权信息,所有鉴权均通过 `token` 属性完
- 推流端与播放端 `xorKeyHex` 是否完全一致
- key 格式是否为合法 hex偶数长度支持 `0x` 前缀)
- 当前是否为 RTMP + H264 + AAC
- 当前是 `RTMP` 还是 `RTC/WHEP`,两端是否都走了对应的加密流配置
- 变更 key 后是否已重启推流 / 重建播放器
### Q5什么时候选择 `SurfaceView`,什么时候选择 `TextureView`
**A**
- 普通原生 Android 页面,优先使用默认 `SurfaceView`,性能最优
- 需要与按钮、封面、弹层等普通 View 正常混排时,优先使用 `TextureView`
- Flutter 场景通过 `setRenderSurfaceTexture()` 接入,配合 Flutter `Texture` widget 使用
- 当前版本建议在开始推流/播放前选定 backend当前 Demo 在首页设置中统一选择,进入页面后不支持切换
### Q5.1`TextureView` 模式下VOD/RTMP 播放的 `BufferQueueProducer timeout` 日志是什么?
**A**
当前 `RTMP/VOD``TextureView / SurfaceTexture` 默认走 direct output以缩短首帧和减少黑屏。极端机型或系统版本下仍可能偶现 `BufferQueueProducer timeout` / `BufferQueue has been abandoned` 之类系统日志;如果不伴随黑屏、花屏、卡死,通常可视为 Android BufferQueue 机制噪声。开启 playback processing 时texture 路径内部会启用额外的处理中转链,日志形态也可能与 direct 模式不同。
### Q5.2`attach` 和 `set` 两套 API 的区别?
**A**
| API | 谁创建 View | 谁释放 |
|---|---|---|
| `attachRenderView()` / `attachPreview()` | SDK 创建 | SDK 在 `release()` 时自动释放 |
| `setRenderView()` / `setPreviewView()` | 调用方创建并传入 | 调用方负责释放SDK 只做绑定/解绑 |
| `setRenderSurfaceTexture()` | 调用方传入 SurfaceTexture | 调用方负责 SurfaceTexture 生命周期 |
### Q6如何接入代理/加速服务(如洋葱盾)?
**A**
SDK 本身不集成任何第三方代理 SDK。业务方需在 SDK 外部完成代理初始化与地址获取,然后通过 `SellyCloudManager.setProxyAddress(proxyUrl)` 注入。SDK 内部会自动通过代理地址解析真实服务器 IP。
示例流程:
1. 在 Application 或 Activity 中初始化代理 SDK
2. 获取本地代理地址(如 `http://127.0.0.1:12345`
3. 调用 `SellyCloudManager.setProxyAddress("http://127.0.0.1:12345")`
4. 正常进行推流 / 拉流
> Demo 中的 `KiwiHelper` 展示了洋葱盾的完整接入流程,可作为参考。

View File

@@ -4,20 +4,27 @@
SDK 核心以 `InteractiveRtcEngine` 为中心,通过 `InteractiveRtcEngineEventHandler` 回调通话状态、用户事件、音视频状态及异常。
当前版本的互动渲染模型已经从“仅 `SurfaceViewRenderer`”扩展为“`RtcRenderTarget` 抽象 + 多种后端实现”:
- `SurfaceViewRenderer` 旧路径仍可用
- `TextureView` 已可用于本地/远端视频渲染
- 推荐在 **加入频道前** 选定本地渲染后端
---
## 目录
1. 准备工作
2. 快速开始
3. 基础通话流程
4. 常用功能
5. 屏幕分享
6. 视频帧前后处理
7. 事件回调EventHandler
8. 通话统计
9. Token 机制
10. 常见问题FAQ
1. 准备工作
2. 快速开始
3. 基础通话流程
4. 常用功能
5. 屏幕分享
6. 视频帧前后处理
7. 事件回调EventHandler
8. 通话统计
9. Token 机制
10. 代理地址配置
11. 常见问题FAQ
---
@@ -38,19 +45,40 @@ SDK 核心以 `InteractiveRtcEngine` 为中心,通过 `InteractiveRtcEngineEve
## 快速开始
### 1. 创建引擎
### 1. SDK 初始化
在使用音视频通话功能前,需先初始化 SDK
```kotlin
SellyCloudManager.initialize(
context = applicationContext,
appId = "your-app-id"
)
```
> `initialize` 的 `appId` 参数为权威值。可选传入 `SellyCloudConfig` 配置 `vhost`、`logEnabled` 等,详见推拉流文档。
### 2. 代理地址设置(可选)
若需通过代理(如洋葱盾)连接信令服务器,在创建引擎前设置:
```kotlin
SellyCloudManager.setProxyAddress("http://127.0.0.1:12345")
```
> SDK 内部通过代理地址解析真实信令服务器 IP。不设置则使用直连。详见「代理地址配置」章节。
### 3. 创建引擎
```kotlin
val appId = getString(R.string.signaling_app_id)
val token = getString(R.string.signaling_token).takeIf { it.isNotBlank() }
val kiwiRsName = getString(R.string.signaling_kiwi_rsname).trim()
val rtcEngine = InteractiveRtcEngine.create(
InteractiveRtcEngineConfig(
context = applicationContext,
appId = appId,
defaultToken = token,
kiwiRsName = kiwiRsName
defaultToken = token
)
).apply {
setEventHandler(eventHandler)
@@ -68,21 +96,76 @@ val rtcEngine = InteractiveRtcEngine.create(
}
```
> `InteractiveRtcEngineConfig` 与默认 token 配置见 `example/src/main/java/com/demo/SellyCloudSDK/interactive/InteractiveLiveActivity.kt`。
`InteractiveRtcEngineConfig` 参数说明:
### 2. 设置本地/远端画布
| 参数 | 类型 | 说明 |
| ---- | ---- | ---- |
| `context` | Context | 应用上下文 |
| `appId` | String | 应用 ID |
| `defaultCallType` | CallType | 默认通话类型,默认 ONE_TO_ONE |
| `defaultToken` | String? | 默认 Token |
| `signalingUrlPrefix` | String | 信令 URL 前缀,默认 `ws://` |
| `signalingUrlSuffix` | String | 信令 URL 后缀,默认 `/ws/signaling` |
> 完整 Demo 见 `example/src/main/java/com/demo/SellyCloudSDK/interactive/InteractiveLiveActivity.kt`。
### 4. 设置本地/远端画布
推荐使用 `InteractiveVideoCanvas(renderTarget, userId)` 新接口。
#### 4.1 SurfaceViewRenderer 旧路径
```kotlin
val localRenderer = SurfaceViewRenderer(this)
rtcEngine.setupLocalVideo(InteractiveVideoCanvas(localRenderer, userId))
val localCanvas = InteractiveVideoCanvas(
com.sellycloud.sellycloudsdk.render.SurfaceViewRtcTarget(localRenderer),
userId
)
rtcEngine.setupLocalVideo(localCanvas)
```
```kotlin
val remoteRenderer = SurfaceViewRenderer(this)
rtcEngine.setupRemoteVideo(InteractiveVideoCanvas(remoteRenderer, remoteUserId))
val remoteCanvas = InteractiveVideoCanvas(
com.sellycloud.sellycloudsdk.render.SurfaceViewRtcTarget(remoteRenderer),
remoteUserId
)
rtcEngine.setupRemoteVideo(remoteCanvas)
```
### 3. 加入通话
#### 4.2 TextureView 路径
```kotlin
val localTextureView = com.sellycloud.sellycloudsdk.widget.AspectRatioTextureView(this)
val localCanvas = InteractiveVideoCanvas(
com.sellycloud.sellycloudsdk.render.TextureViewRtcTarget(localTextureView),
userId
)
rtcEngine.setupLocalVideo(localCanvas)
```
```kotlin
val remoteTextureView = com.sellycloud.sellycloudsdk.widget.AspectRatioTextureView(this)
val remoteCanvas = InteractiveVideoCanvas(
com.sellycloud.sellycloudsdk.render.TextureViewRtcTarget(remoteTextureView),
remoteUserId
)
rtcEngine.setupRemoteVideo(remoteCanvas)
```
兼容说明:
- `InteractiveVideoCanvas(view: SurfaceViewRenderer, userId)` 旧构造仍可用deprecated
- 推荐新接入统一走 `RtcRenderTarget`
- 当前高层互动 API 还没有直接暴露 `SurfaceTexture` 入口Android 场景推荐 `SurfaceViewRenderer``TextureView`
所有权说明:
- 调用方自己创建的 `SurfaceViewRenderer` / `TextureView`,由调用方负责释放
- SDK 只在 `setupLocalVideo` / `setupRemoteVideo` 中绑定 target`leaveChannel` 时解绑
- 调用方应在 `leaveChannel` 之后、Activity 销毁前释放自己创建的 View
### 5. 加入通话
```kotlin
val options = InteractiveChannelMediaOptions(callType = CallType.ONE_TO_ONE)
@@ -109,23 +192,9 @@ rtcEngine.leaveChannel()
InteractiveRtcEngine.destroy(rtcEngine)
```
### 4. 进阶配置Demo 未覆盖)
### 6. 进阶配置Demo 未覆盖)
#### 4.1 InteractiveRtcEngineConfig 高级字段
```kotlin
val config = InteractiveRtcEngineConfig(
context = applicationContext,
appId = appId,
defaultCallType = CallType.ONE_TO_ONE,
defaultToken = token,
kiwiRsName = kiwiRsName,
signalingUrlPrefix = "https://",
signalingUrlSuffix = "/signaling"
)
```
#### 4.2 InteractiveChannelMediaOptions 订阅控制
#### 6.1 InteractiveChannelMediaOptions 订阅控制
```kotlin
val options = InteractiveChannelMediaOptions(
@@ -135,7 +204,7 @@ val options = InteractiveChannelMediaOptions(
)
```
#### 4.3 InteractiveVideoEncoderConfig 更多参数
#### 6.2 InteractiveVideoEncoderConfig 更多参数
可选项(按需设置):
@@ -150,14 +219,16 @@ val options = InteractiveChannelMediaOptions(
## 基础通话流程
1. 创建 `InteractiveRtcEngine`
2. 设置 `EventHandler`
3. 配置 `InteractiveVideoEncoderConfig`
4. 设置本地画布 `setupLocalVideo`
5. `joinChannel` 加入频道
6. `onUserJoined` 后设置远端画布
7. 通话中进行音视频控制
8. `leaveChannel` 并释放资源
1. 初始化 SDK`SellyCloudManager.initialize`
2. 设置代理地址(可选,`SellyCloudManager.setProxyAddress`
3. 创建 `InteractiveRtcEngine`
4. 设置 `EventHandler`
5. 配置 `InteractiveVideoEncoderConfig`
6. 设置本地画布 `setupLocalVideo`(建议在 `joinChannel` 前完成,并在此阶段确定 backend
7. `joinChannel` 加入频道
8. `onUserJoined` 后设置远端画布;也可以提前为某个 `userId` 调用 `setupRemoteVideo`SDK 会在用户真正上线后自动 attach
9. 通话中进行音视频控制
10. `leaveChannel` 并释放资源
---
@@ -247,21 +318,45 @@ val isSharing = rtcEngine.isScreenSharing()
## 视频帧前后处理
```kotlin
rtcEngine.setCaptureVideoFrameInterceptor { frame ->
// 在此处理美颜/滤镜,返回新的 frame
frame
}
rtcEngine.setCaptureVideoProcessor(object : VideoProcessor {
override val config = VideoProcessorConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
mode = VideoProcessMode.READ_WRITE,
fullRewrite = true
)
override fun processTexture(input: VideoTextureFrame, outputTextureId: Int) {
// 推荐在 GPU texture 上处理采集前帧,美颜/滤镜直接写入 outputTextureId
}
})
```
```kotlin
rtcEngine.setRenderVideoFrameInterceptor { frame, userId ->
// 远端渲染前处理,返回 true 表示继续渲染
true
}
val renderObserver = rtcEngine.addRenderVideoFrameObserver(object : VideoFrameObserver {
override val config = VideoFrameObserverConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
stage = VideoStage.RENDER_PRE_DISPLAY
)
override fun onTextureFrame(frame: VideoTextureFrame) {
// 远端渲染前只读观测
val userId = frame.sourceId
}
})
```
> Demo 中的美颜示例见:
> `example/src/main/java/com/demo/SellyCloudSDK/beauty/FuVideoFrameInterceptor.kt`
> 推荐优先使用 `TEXTURE_2D`
> - `TEXTURE_2D` 适合美颜、滤镜、AR、水印等 GPU 处理链路。
> - `I420` / `RGBA` 仅在算法必须访问 CPU 像素时再使用。
> - 对 RTC / WHIP 的 texture-backed 帧,走 CPU observer / processor 会触发额外的 texture-to-CPU 转换。
> - `VideoFrameObserverConfig` 默认仍为 `I420` 以兼容旧接入;新 RTC / WHIP 接入建议显式写 `preferredFormat = TEXTURE_2D`。
> - 完整重写输出的处理器建议设置 `fullRewrite = true`;水印/叠加类处理保留默认值即可。
>
> Demo 中的采集前美颜示例见:
> `example/src/main/java/com/demo/SellyCloudSDK/beauty/FaceUnityBeautyEngine.kt`
>
> 当前 Demo 的互动页接入见:
> `example/src/main/java/com/demo/SellyCloudSDK/interactive/InteractiveLiveActivity.kt`
---
@@ -321,6 +416,61 @@ rtcEngine.renewToken(newToken, expiresAtSec)
---
## 代理地址配置
SDK 支持通过外部代理(如洋葱盾等安全加速服务)连接信令服务器。代理地址由业务方在 SDK 外部获取,然后注入 SDK。
### 设置方式
```kotlin
// 设置代理地址(在 joinChannel 之前)
SellyCloudManager.setProxyAddress("http://127.0.0.1:12345")
// 清除代理(恢复直连)
SellyCloudManager.setProxyAddress(null)
// 查询当前代理地址
val proxy = SellyCloudManager.getProxyAddress() // null 表示未设置
```
### 格式要求
- 必须以 `http://``https://` 开头
-`null` 或空字符串表示清除代理
- 格式不合法时抛出 `IllegalArgumentException`
### 生效范围
设置后SDK 内部通过代理地址解析真实信令服务器 IP对上层接口透明。
### 时机要求
- 必须在 `joinChannel()` **之前** 设置
- 通话过程中修改代理地址,需 `leaveChannel` 后重新 `joinChannel` 才能生效
### Demo 中的接入示例
Demo 使用 `KiwiHelper` 封装洋葱盾的初始化与代理获取,采用三阶段模式:
```kotlin
// 阶段 1Application.onCreate() 异步初始化
KiwiHelper.initializeAsync()
// 阶段 2Activity 初始化时启动代理获取(非阻塞)
KiwiHelper.startProxySetup(enableKiwi = true, rsName = "your-rs-name")
// 阶段 3joinChannel 前确保代理已就绪
lifecycleScope.launch {
KiwiHelper.awaitProxyReady()
rtcEngine.joinChannel(...)
}
```
> `KiwiHelper` 内部通过 `SellyCloudManager.setProxyAddress()` 将代理地址传给 SDK。
> 详见 `example/src/main/java/com/demo/SellyCloudSDK/KiwiHelper.kt`。
---
## 更多 API 速览(含 Demo 未覆盖)
引擎创建与销毁:
@@ -328,6 +478,12 @@ rtcEngine.renewToken(newToken, expiresAtSec)
- `InteractiveRtcEngine.create(config)`:创建引擎
- `InteractiveRtcEngine.destroy(engine)` / `engine.destroy()`:释放引擎
SDK 初始化与代理:
- `SellyCloudManager.initialize(context, appId, config)`:初始化 SDK
- `SellyCloudManager.setProxyAddress(address)`:设置代理地址
- `SellyCloudManager.getProxyAddress()`:获取当前代理地址
通话控制:
- `setEventHandler(handler)`:设置事件回调
@@ -339,6 +495,7 @@ rtcEngine.renewToken(newToken, expiresAtSec)
本地与远端控制:
- `setupLocalVideo(canvas)` / `setupRemoteVideo(canvas)`:设置画布
- `InteractiveVideoCanvas(renderTarget, userId, renderMode)`:推荐画布模型
- `clearRemoteVideo(userId)`:清理远端画面
- `enableLocalVideo(true/false)` / `enableLocalAudio(true/false)`:开关本地音视频
- `muteRemoteAudioStream(userId, true/false)` / `muteRemoteVideoStream(userId, true/false)`:按用户静音
@@ -347,8 +504,9 @@ rtcEngine.renewToken(newToken, expiresAtSec)
帧处理与屏幕共享:
- `setCaptureVideoFrameInterceptor(...)`:采集前处理
- `setRenderVideoFrameInterceptor(...)`渲染前帧处理
- `setCaptureVideoProcessor(...)`:采集前可写处理
- `addCaptureVideoFrameObserver(...)`采集前只读观测
- `addRenderVideoFrameObserver(...)`:远端渲染前只读观测
- `startScreenShare(...)` / `stopScreenShare()` / `isScreenSharing()`:屏幕共享
消息与 Token
@@ -365,11 +523,35 @@ rtcEngine.renewToken(newToken, expiresAtSec)
2. 是否在 `onUserJoined` 后调用 `setupRemoteVideo`
3. 远端是否关闭了视频
### Q互动直播可以用 `TextureView` 吗?
可以。
推荐用法是:
- 本地:`InteractiveVideoCanvas(TextureViewRtcTarget(textureView), userId)`
- 远端:`InteractiveVideoCanvas(TextureViewRtcTarget(textureView), remoteUserId)`
注意:
- 建议在 `joinChannel` 前确定本地 backend
- 当前 Demo 在首页设置中统一选择本地 backend进入互动页面后不再暴露切换入口
- 高层互动 API 当前未直接暴露 `SurfaceTexture` 入口
### Q加入频道失败
1. 检查 `signaling_app_id` 是否正确
2. Token 是否为空或已过期
3. 网络是否受限
4. 若使用代理,检查代理地址是否已正确设置
### Q屏幕分享失败
1. 是否已获取 `MediaProjection` 授权
2. Android 14+ 是否启动前台服务
### Q互动通话支持 XOR 吗?
当前高层互动 API 还没有暴露 `xorKeyHex` 一类的配置入口。
- 目前已支持 XOR 的 WebRTC 路径,是直播 RTC 的 `WHIP / WHEP` 推拉流
- 互动通话如需接入 XOR需要后续在互动链路单独暴露配置并挂载 FrameCrypto
### Q如何接入代理/加速服务?
SDK 本身不集成任何第三方代理 SDK。业务方需在外部完成代理初始化获取本地代理地址后通过 `SellyCloudManager.setProxyAddress()` 注入。详见「代理地址配置」章节。

View File

@@ -3,7 +3,7 @@ plugins {
id 'org.jetbrains.kotlin.android'
}
def sdkAarPath = "libs/${findProperty("sellySdkArtifactId") ?: "sellycloudsdk"}-${findProperty("sellySdkVersion") ?: "1.0.0"}.aar"
def sdkAarPath = "libs/${findProperty("sellySdkArtifactId") ?: "sellycloudsdk"}-${findProperty("sellySdkVersion") ?: "1.0.1"}.aar"
def releaseStorePath = project.rootProject.file(findProperty("MY_STORE_FILE") ?: "release.keystore")
def hasReleaseKeystore = releaseStorePath.exists()
@@ -64,8 +64,9 @@ android {
}
dependencies {
implementation files(sdkAarPath)
implementation files(
sdkAarPath,
"libs/Kiwi.aar",
"libs/fu_core_all_feature_release.aar",
"libs/fu_model_all_feature_release.aar"
)

BIN
example/libs/Kiwi.aar Normal file

Binary file not shown.

View File

@@ -15,9 +15,12 @@
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_MEDIA_PLAYBACK"/>
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" android:maxSdkVersion="28" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" android:maxSdkVersion="32" />
<uses-permission android:name="android.permission.READ_MEDIA_VIDEO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<application
android:name=".DemoApplication"
android:allowBackup="true"
android:label="SellyCloudRTC Demo"
android:icon="@mipmap/ic_launcher"

Binary file not shown.

View File

@@ -0,0 +1,11 @@
package com.demo.SellyCloudSDK
import android.app.Application
class DemoApplication : Application() {
override fun onCreate() {
super.onCreate()
// Kiwi SDK 异步初始化(不阻塞启动)
KiwiHelper.initializeAsync()
}
}

View File

@@ -3,11 +3,19 @@ package com.demo.SellyCloudSDK
import android.app.Dialog
import android.content.Intent
import android.graphics.Rect
import android.graphics.Typeface
import android.net.Uri
import android.os.Bundle
import android.util.TypedValue
import android.view.Gravity
import android.view.View
import android.view.ViewGroup
import android.widget.GridLayout
import android.widget.TextView
import android.widget.Toast
import androidx.core.content.ContextCompat
import androidx.core.view.isVisible
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity
import androidx.recyclerview.widget.GridLayoutManager
import androidx.recyclerview.widget.RecyclerView
@@ -33,6 +41,8 @@ import com.demo.SellyCloudSDK.live.square.AliveStreamItem
import com.demo.SellyCloudSDK.live.square.isPkStream
import com.demo.SellyCloudSDK.login.DemoLoginStore
import com.demo.SellyCloudSDK.login.LoginActivity
import com.demo.SellyCloudSDK.vod.VodListRepository
import com.demo.SellyCloudSDK.vod.VodListResult
import com.demo.SellyCloudSDK.vod.VodPlayActivity
import com.sellycloud.sellycloudsdk.SellyLiveMode
import kotlinx.coroutines.CoroutineScope
@@ -56,6 +66,16 @@ class FeatureHubActivity : AppCompatActivity() {
private val uiScope = CoroutineScope(SupervisorJob() + Dispatchers.Main.immediate)
/** 当前打开的 VOD 配置对话框,便于文件选择完成后直接进入播放页。 */
private var pendingVodDialog: Dialog? = null
private val vodFilePicker = registerForActivityResult(
ActivityResultContracts.GetContent()
) { uri ->
if (uri == null) return@registerForActivityResult
pendingVodDialog?.dismiss()
startVodPlayback(uri.toString(), uri)
}
private lateinit var aliveAdapter: AliveStreamAdapter
private val allAliveItems: MutableList<AliveStreamItem> = mutableListOf()
private var currentPage = 0
@@ -151,7 +171,7 @@ class FeatureHubActivity : AppCompatActivity() {
if (dy <= 0) return
val lastVisible = layoutManager.findLastVisibleItemPosition()
if (lastVisible >= aliveAdapter.itemCount - 2) {
appendNextPage()
recyclerView.post { appendNextPage() }
}
}
})
@@ -223,12 +243,14 @@ class FeatureHubActivity : AppCompatActivity() {
}
val url = item.url?.trim().orEmpty()
val xorKey = item.xorKey.orEmpty()
val intent = if (url.isNotEmpty()) {
LivePlayActivity.createIntent(
this,
resolvePlayModeFromUrl(url),
url,
autoStart = true
autoStart = true,
xorKeyHex = xorKey
)
} else {
val liveMode = resolvePlayMode(item.playProtocol)
@@ -243,7 +265,8 @@ class FeatureHubActivity : AppCompatActivity() {
params.vhost,
params.appName,
params.streamName,
autoStart = true
autoStart = true,
xorKeyHex = xorKey
)
}.apply {
item.previewImage?.let { putExtra(LivePlayActivity.EXTRA_PREVIEW_IMAGE_URL, it) }
@@ -469,6 +492,13 @@ class FeatureHubActivity : AppCompatActivity() {
dialog.window?.setLayout(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)
dialog.setCancelable(true)
pendingVodDialog = dialog
dialog.setOnDismissListener {
if (pendingVodDialog === dialog) {
pendingVodDialog = null
}
}
dialogBinding.btnClose.setOnClickListener { dialog.dismiss() }
dialogBinding.btnStartVod.setOnClickListener {
val input = dialogBinding.etVodUrl.text?.toString()?.trim().orEmpty()
@@ -477,12 +507,110 @@ class FeatureHubActivity : AppCompatActivity() {
return@setOnClickListener
}
dialog.dismiss()
startActivity(VodPlayActivity.createIntent(this, input))
startVodPlayback(input)
}
dialogBinding.btnPickLocalFile.setOnClickListener {
vodFilePicker.launch("video/*")
}
dialogBinding.btnPlayAssetSample.setOnClickListener {
dialog.dismiss()
startVodPlayback("asset:///vod/sample.mp4")
}
loadVodList(dialogBinding)
dialog.show()
}
private fun startVodPlayback(input: String, sourceUri: Uri? = null) {
startActivity(VodPlayActivity.createIntent(this, input, sourceUri))
}
private fun loadVodList(dialogBinding: DialogVodInputBinding) {
dialogBinding.pbVodListFull.isVisible = true
dialogBinding.pbVodList.isVisible = true
dialogBinding.tvVodListError.isVisible = false
dialogBinding.gridVodFormats.isVisible = false
uiScope.launch {
val result = VodListRepository.fetchVodList()
dialogBinding.pbVodListFull.isVisible = false
dialogBinding.pbVodList.isVisible = false
when (result) {
is VodListResult.Success -> {
populateVodChips(dialogBinding, result.formats)
}
is VodListResult.Error -> {
dialogBinding.tvVodListError.text = result.message
dialogBinding.tvVodListError.isVisible = true
dialogBinding.tvVodListError.setOnClickListener {
loadVodList(dialogBinding)
}
}
}
}
}
private fun populateVodChips(
dialogBinding: DialogVodInputBinding,
formats: Map<String, String>
) {
val grid = dialogBinding.gridVodFormats
grid.removeAllViews()
grid.isVisible = true
val dp3 = TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, 3f, resources.displayMetrics
).toInt()
val chipHeightPx = TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, 34f, resources.displayMetrics
).toInt()
var selectedChip: TextView? = null
formats.entries.forEachIndexed { index, (format, url) ->
val chip = TextView(this).apply {
text = format
gravity = Gravity.CENTER
setTextSize(TypedValue.COMPLEX_UNIT_SP, 13f)
typeface = Typeface.DEFAULT_BOLD
setTextColor(ContextCompat.getColor(context, R.color.av_text_primary))
setBackgroundResource(R.drawable.selector_av_vod_chip)
isSelected = false
}
val row = index / 4
val col = index % 4
val param = GridLayout.LayoutParams(
GridLayout.spec(row, 1f),
GridLayout.spec(col, 1f)
).apply {
width = 0
height = chipHeightPx
setMargins(
if (col > 0) dp3 else 0,
if (row > 0) dp3 else 0,
0, 0
)
}
chip.setOnClickListener {
if (selectedChip == chip) return@setOnClickListener
selectedChip?.let { prev ->
prev.isSelected = false
prev.setTextColor(ContextCompat.getColor(this, R.color.av_text_primary))
}
chip.isSelected = true
chip.setTextColor(ContextCompat.getColor(this, R.color.brand_primary_text_on))
selectedChip = chip
dialogBinding.etVodUrl.setText(url)
}
grid.addView(chip, param)
}
}
private fun setupSettingsSave() {
binding.btnSaveSettings.setOnClickListener {
val settings = uiToSettingsOrNull() ?: return@setOnClickListener
@@ -517,6 +645,12 @@ class FeatureHubActivity : AppCompatActivity() {
AvDemoSettings.Resolution.P540 -> binding.rgSettingsResolution.check(R.id.rbSettingsRes540p)
AvDemoSettings.Resolution.P720 -> binding.rgSettingsResolution.check(R.id.rbSettingsRes720p)
}
binding.rgSettingsRenderBackend.check(
when (settings.renderBackendPreference) {
AvDemoSettings.RenderBackendPreference.SURFACE_VIEW -> R.id.rbSettingsRenderSurface
AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW -> R.id.rbSettingsRenderTexture
}
)
restoreEnvSettingsToUi()
}
@@ -556,13 +690,18 @@ class FeatureHubActivity : AppCompatActivity() {
R.id.rbSettingsRes540p -> AvDemoSettings.Resolution.P540
else -> AvDemoSettings.Resolution.P720
}
val renderBackendPreference = when (binding.rgSettingsRenderBackend.checkedRadioButtonId) {
R.id.rbSettingsRenderTexture -> AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW
else -> AvDemoSettings.RenderBackendPreference.SURFACE_VIEW
}
val current = settingsStore.read()
return current.copy(
streamId = streamId,
resolution = res,
fps = fps,
maxBitrateKbps = maxKbps,
minBitrateKbps = minKbps
minBitrateKbps = minKbps,
renderBackendPreference = renderBackendPreference
)
}

View File

@@ -0,0 +1,188 @@
package com.demo.SellyCloudSDK
import android.util.Log
import com.kiwi.sdk.Kiwi
import com.sellycloud.sellycloudsdk.SellyCloudManager
import kotlinx.coroutines.CompletableDeferred
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.Job
import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.ensureActive
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import kotlinx.coroutines.withTimeoutOrNull
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
/**
* Demo 端的 Kiwi 盾 SDK 封装
*
* 三阶段使用模式:
* 1. Application.onCreate() → initializeAsync() 异步初始化 Kiwi SDK
* 2. Activity 初始化 → startProxySetup(...) 后台获取代理地址
* 3. 开播/入会前 → awaitProxyReady() 确保代理已就绪
*/
object KiwiHelper {
private const val TAG = "KiwiHelper"
private const val DEFAULT_APP_KEY = "5XTXUZ/aqOwfjA4zQkY7VpjcNBucWxmNGY4vFNhwSMKWkn2WK383dbNgI+96Y+ttSPMFzqhu8fxP5SiCK5+/6cGrBQQt8pDQAOi3EN4Z6lzkC2cJ5mfjBVi4ZpFASG9e3divF5UqLG6sTmFI3eCuJxy9/kHXPSSkKWJe1MnBMQETpf4FRDVuR9d/LzXKQgA9PsjRbPRLx4f3h0TU2P4GEfv1c70FvkdwpqirQt9ik2hAhKuj0vJY60g+yYhGY19a07vBTW4MprN53RnSH8bCs79NNbWyzsg2++t+sKdZP1WPGeOho/xpsQRP8yWCXIOOdvdjiE3YXVltBgmPnA6gOjFS97WVlBAQ1mJE7rQi+/5hhfTuJlWoBH6000SRe7dc5EA0WGQX9U1Aj96ahBQhyHTrHJySmJ/hRMYMudqByF6K4PtrwZ8zugTjtx1dyLPOonZDlTu7hPAIcUfuaQ9xS3Phbq8lP67EYDsr3pkWuwL6AjrPjFwNmi0P1g+hV1ZQUmDQVGhNHmF3cE9Pd5ZOS10/fwaXYGRhcq9PlUSmcbU3scLtrBlzpOslyjlQ6W57EudCrvvJU3mimfs1A2y7cjpnLlJN1CWh6dQAaGcwSG2QA8+88qmlMH1t627fItTgHYrP1DkExpAr2dqgYDvsICJnHaRSBMe608GrPbFaECutRz5y3BEtQKcVKdgA1e6W4TFnxs5HqGrzc8iHPOOKGf8zHWEXkITPBKEiA86Nz46pDrqM9FKx4upPijn4Dahj8pd7yWTUIdHBT8X39Vm3/TSV5xT/lTinmv8rhBieb/2SQamTjVQ22VFq3nQ1h4TxUYTEc0nSjqcz54fWf1cyBy7uh82q1weKXUAJ8vG9W05vmt3/aDZ9+C8cWm53AQ90xgDvW7M1mZveuyfof2qrPsXTpj+jhpDkJgm6qJsvV5ClmGth8gvCM0rHjSIwxhYDZaIDK5TkFWjwLltt+YhhYLKketwuTHdlO/hCxrsFzlXHhXGVRC+kgXusfQUrHIm1WjW9o9EqasHg9ufUgg7cMO/9FRZhJ+Xdw9erprYDvu84Da9jL6NUUOSNIGTCJ/s29Lz4SIwCVG2lzm2UhD6E9ipGfG9gc6e/2vt1emOsP3/ipHVJf16r/9S4+dGKIjPX6QcHIIL2AMu2Je07nPmEoz7KaeOShox4bG3puMQdkdQo6kRIFpUzwUty+4EWqHmyPHGkGGGfI8gj0EreiZwgVJmBQ/8S5wlK+iUp+TVeoXo="
private const val INIT_TIMEOUT_SECONDS = 3L
private const val CONVERT_TIMEOUT_SECONDS = 1L
private const val AWAIT_INIT_TIMEOUT_MS = 4000L
/** Kiwi.Init 结果 Deferred */
private val initDeferred = CompletableDeferred<Boolean>()
/** 内部受控 scope不依赖外部 lifecycle */
private val helperScope = CoroutineScope(SupervisorJob() + Dispatchers.Main)
/** 当前代理获取 Job */
@Volatile private var currentSetupJob: Job? = null
/**
* 单调递增版本号,用于解决并发取消时旧 Job 覆盖新结果的竞态问题。
* 每次 startProxySetup 递增resolveAndSetProxyAddress 在写入前校验版本一致性。
*/
private val setupVersion = AtomicInteger(0)
// ──────────────── 阶段 1初始化 ────────────────
/**
* 异步初始化 Kiwi SDKApplication.onCreate 调用,只调一次)
*/
fun initializeAsync() {
val executor = Executors.newSingleThreadExecutor()
val future = executor.submit<Int> { Kiwi.Init(DEFAULT_APP_KEY) }
Thread {
try {
val result = future.get(INIT_TIMEOUT_SECONDS, TimeUnit.SECONDS)
val success = result == 0
Log.d(TAG, if (success) "Kiwi 初始化成功" else "Kiwi 初始化失败, code=$result")
initDeferred.complete(success)
} catch (e: Exception) {
Log.e(TAG, "Kiwi 初始化异常: ${e.message}")
future.cancel(true)
initDeferred.complete(false)
} finally {
executor.shutdown()
}
}.start()
}
// ──────────────── 阶段 2启动代理获取 ────────────────
/**
* 启动代理获取(非 suspend可在主线程安全调用
* - 递增版本号 + cancel 前一次 Job保证"最后一次调用生效"
* - 内部协程 await 初始化 + IO ServerToLocal不阻塞调用线程
*/
fun startProxySetup(enableKiwi: Boolean, rsName: String) {
val version = setupVersion.incrementAndGet()
currentSetupJob?.cancel()
if (!enableKiwi || rsName.isBlank()) {
SellyCloudManager.setProxyAddress(null)
currentSetupJob = null
return
}
currentSetupJob = helperScope.launch {
resolveAndSetProxyAddress(rsName, version)
}
}
// ──────────────── 阶段 3等待代理就绪 ────────────────
/**
* 在开播/入会前调用suspend 等待代理获取完成
* 如果 startProxySetup 未调用或已完成,立即返回
*/
suspend fun awaitProxyReady() {
currentSetupJob?.join()
}
// ──────────────── 内部实现 ────────────────
private suspend fun awaitInitialization(): Boolean {
return withTimeoutOrNull(AWAIT_INIT_TIMEOUT_MS) {
initDeferred.await()
} ?: run {
Log.w(TAG, "等待 Kiwi 初始化超时 (${AWAIT_INIT_TIMEOUT_MS}ms)")
false
}
}
private suspend fun resolveAndSetProxyAddress(rsName: String, version: Int): Boolean {
// 等待初始化完成
if (!awaitInitialization()) {
Log.w(TAG, "Kiwi 初始化失败/超时,清除代理")
setProxyIfCurrent(version, null)
return false
}
// 在 IO 线程执行阻塞的 ServerToLocal
return withContext(Dispatchers.IO) {
try {
val proxyUrl = convertRsToLocalUrl(rsName)
// 阻塞调用返回后,检查协程是否已取消
ensureActive()
// 版本校验:只有当前版本一致才写入,防止旧 Job 覆盖新结果
if (proxyUrl != null) {
Log.d(TAG, "Kiwi 代理地址: $proxyUrl")
setProxyIfCurrent(version, proxyUrl)
true
} else {
Log.w(TAG, "Kiwi ServerToLocal 失败,清除代理")
setProxyIfCurrent(version, null)
false
}
} catch (e: Exception) {
Log.e(TAG, "代理解析异常: ${e.message}", e)
setProxyIfCurrent(version, null)
false
}
}
}
/**
* 仅当 version 与当前 setupVersion 一致时才写入代理地址,
* 避免已过期的旧 Job 覆盖最新结果。
*/
private fun setProxyIfCurrent(version: Int, address: String?) {
if (setupVersion.get() == version) {
SellyCloudManager.setProxyAddress(address)
} else {
Log.d(TAG, "跳过过期的代理写入 (version=$version, current=${setupVersion.get()})")
}
}
/**
* Kiwi.ServerToLocal + 返回码校验
*/
private fun convertRsToLocalUrl(rsName: String): String? {
val executor = Executors.newSingleThreadExecutor()
return try {
val future = executor.submit<String?> {
val ip = StringBuffer()
val port = StringBuffer()
val ret = Kiwi.ServerToLocal(rsName, ip, port)
if (ret != 0) {
Log.w(TAG, "ServerToLocal 返回错误码: $ret, rsName=$rsName")
return@submit null
}
val ipStr = ip.toString().trim()
val portStr = port.toString().trim()
if (ipStr.isNotEmpty() && portStr.isNotEmpty()) {
"http://$ipStr:$portStr"
} else {
Log.w(TAG, "ServerToLocal 返回空 ip/port")
null
}
}
future.get(CONVERT_TIMEOUT_SECONDS, TimeUnit.SECONDS)
} catch (e: Exception) {
Log.e(TAG, "ServerToLocal 异常: ${e.message}")
null
} finally {
executor.shutdown()
}
}
}

View File

@@ -2,6 +2,7 @@ package com.demo.SellyCloudSDK.avdemo
import android.content.Context
import androidx.core.content.edit
import com.sellycloud.sellycloudsdk.render.RenderBackend
data class AvDemoSettings(
val streamId: String,
@@ -12,9 +13,20 @@ data class AvDemoSettings(
val xorKeyHex: String = "",
val useUrlMode: Boolean = false,
val pushUrl: String = "",
val renderBackendPreference: RenderBackendPreference = RenderBackendPreference.SURFACE_VIEW,
) {
enum class Resolution { P360, P480, P540, P720 }
enum class RenderBackendPreference {
SURFACE_VIEW,
TEXTURE_VIEW;
fun isTextureView(): Boolean = this == TEXTURE_VIEW
fun toSdkBackend(): RenderBackend =
if (this == TEXTURE_VIEW) RenderBackend.TEXTURE_VIEW else RenderBackend.SURFACE_VIEW
}
fun resolutionSize(): Pair<Int, Int> = when (resolution) {
Resolution.P360 -> 640 to 360
Resolution.P480 -> 854 to 480
@@ -34,6 +46,13 @@ class AvDemoSettingsStore(context: Context) {
AvDemoSettings.Resolution.P540.name -> AvDemoSettings.Resolution.P540
else -> AvDemoSettings.Resolution.P720
}
val renderBackendPreference = when (
prefs.getString(KEY_RENDER_BACKEND, AvDemoSettings.RenderBackendPreference.SURFACE_VIEW.name)
) {
AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW.name ->
AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW
else -> AvDemoSettings.RenderBackendPreference.SURFACE_VIEW
}
return AvDemoSettings(
streamId = prefs.getString(KEY_STREAM_ID, DEFAULT_STREAM_ID).orEmpty(),
resolution = resolution,
@@ -42,7 +61,8 @@ class AvDemoSettingsStore(context: Context) {
minBitrateKbps = prefs.getInt(KEY_MIN_KBPS, DEFAULT_MIN_KBPS),
xorKeyHex = prefs.getString(KEY_XOR_KEY_HEX, "").orEmpty(),
useUrlMode = prefs.getBoolean(KEY_USE_URL_MODE, false),
pushUrl = prefs.getString(KEY_PUSH_URL, "").orEmpty()
pushUrl = prefs.getString(KEY_PUSH_URL, "").orEmpty(),
renderBackendPreference = renderBackendPreference
)
}
@@ -56,6 +76,7 @@ class AvDemoSettingsStore(context: Context) {
putString(KEY_XOR_KEY_HEX, settings.xorKeyHex)
putBoolean(KEY_USE_URL_MODE, settings.useUrlMode)
putString(KEY_PUSH_URL, settings.pushUrl)
putString(KEY_RENDER_BACKEND, settings.renderBackendPreference.name)
}
}
@@ -74,5 +95,6 @@ class AvDemoSettingsStore(context: Context) {
private const val KEY_XOR_KEY_HEX = "xor_key_hex"
private const val KEY_USE_URL_MODE = "use_url_mode"
private const val KEY_PUSH_URL = "push_url"
private const val KEY_RENDER_BACKEND = "render_backend"
}
}

View File

@@ -1,185 +0,0 @@
package com.demo.SellyCloudSDK.beauty
//
//import android.app.Dialog
//import android.content.Context
//import android.os.Bundle
//import android.widget.SeekBar
//import android.widget.TextView
//import android.widget.Switch
//import android.widget.Button
//import android.view.Window
//
///**
// * 美颜参数控制对话框
// */
//class BeautyControlDialog(
// context: Context,
//) : Dialog(context) {
//
// private lateinit var switchBeautyEnable: Switch
// private lateinit var seekBarBeautyIntensity: SeekBar
// private lateinit var seekBarFilterIntensity: SeekBar
// private lateinit var seekBarColorIntensity: SeekBar
// private lateinit var seekBarRedIntensity: SeekBar
// private lateinit var seekBarEyeBrightIntensity: SeekBar
// private lateinit var seekBarToothIntensity: SeekBar
//
// private lateinit var tvBeautyValue: TextView
// private lateinit var tvFilterValue: TextView
// private lateinit var tvColorValue: TextView
// private lateinit var tvRedValue: TextView
// private lateinit var tvEyeBrightValue: TextView
// private lateinit var tvToothValue: TextView
// private lateinit var btnClose: Button
//
// override fun onCreate(savedInstanceState: Bundle?) {
// super.onCreate(savedInstanceState)
// requestWindowFeature(Window.FEATURE_NO_TITLE)
// setContentView(R.layout.dialog_beauty_control)
//
// initViews()
// setupListeners()
// updateUI()
// }
//
// private fun initViews() {
// switchBeautyEnable = findViewById(R.id.switchBeautyEnable)
// seekBarBeautyIntensity = findViewById(R.id.seekBarBeautyIntensity)
// seekBarFilterIntensity = findViewById(R.id.seekBarFilterIntensity)
// seekBarColorIntensity = findViewById(R.id.seekBarColorIntensity)
// seekBarRedIntensity = findViewById(R.id.seekBarRedIntensity)
// seekBarEyeBrightIntensity = findViewById(R.id.seekBarEyeBrightIntensity)
// seekBarToothIntensity = findViewById(R.id.seekBarToothIntensity)
//
// tvBeautyValue = findViewById(R.id.tvBeautyValue)
// tvFilterValue = findViewById(R.id.tvFilterValue)
// tvColorValue = findViewById(R.id.tvColorValue)
// tvRedValue = findViewById(R.id.tvRedValue)
// tvEyeBrightValue = findViewById(R.id.tvEyeBrightValue)
// tvToothValue = findViewById(R.id.tvToothValue)
// btnClose = findViewById(R.id.btnClose)
// }
//
// private fun setupListeners() {
// // 美颜开关
// switchBeautyEnable.setOnCheckedChangeListener { _, isChecked ->
// streamingService?.enableBeauty(isChecked)
// // 根据开关状态启用/禁用参数调节
// updateSeekBarsEnabled(isChecked)
// }
//
// // 美颜强度调节 (0-100, 转换为0.0-10.0)
// seekBarBeautyIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvBeautyValue.text = String.format("%.1f", intensity)
// streamingService?.setBeautyIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 滤镜强度调节 (0-10, 转换为0.0-1.0)
// seekBarFilterIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvFilterValue.text = String.format("%.1f", intensity)
// streamingService?.setFilterIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 美白强度调节
// seekBarColorIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvColorValue.text = String.format("%.1f", intensity)
// streamingService?.setColorIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 红润强度调节
// seekBarRedIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvRedValue.text = String.format("%.1f", intensity)
// streamingService?.setRedIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 亮眼强度调节
// seekBarEyeBrightIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvEyeBrightValue.text = String.format("%.1f", intensity)
// streamingService?.setEyeBrightIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 美牙强度调节
// seekBarToothIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvToothValue.text = String.format("%.1f", intensity)
// streamingService?.setToothIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 关闭按钮
// btnClose.setOnClickListener {
// dismiss()
// }
// }
//
// private fun updateUI() {
// // 获取当前美颜状态并更新UI
// val isBeautyEnabled = streamingService?.isBeautyEnabled() ?: true
// switchBeautyEnable.isChecked = isBeautyEnabled
//
// // 获取当前美颜参数
// val params = streamingService?.getCurrentBeautyParams() ?: mapOf()
//
// // 设置各项参数的当前值
// val blurIntensity = params["blurIntensity"] as? Double ?: 6.0
// val filterIntensity = params["filterIntensity"] as? Double ?: 0.7
// val colorIntensity = params["colorIntensity"] as? Double ?: 0.5
// val redIntensity = params["redIntensity"] as? Double ?: 0.5
// val eyeBrightIntensity = params["eyeBrightIntensity"] as? Double ?: 1.0
// val toothIntensity = params["toothIntensity"] as? Double ?: 1.0
//
// seekBarBeautyIntensity.progress = (blurIntensity * 10).toInt()
// seekBarFilterIntensity.progress = (filterIntensity * 10).toInt()
// seekBarColorIntensity.progress = (colorIntensity * 10).toInt()
// seekBarRedIntensity.progress = (redIntensity * 10).toInt()
// seekBarEyeBrightIntensity.progress = (eyeBrightIntensity * 10).toInt()
// seekBarToothIntensity.progress = (toothIntensity * 10).toInt()
//
// tvBeautyValue.text = String.format("%.1f", blurIntensity)
// tvFilterValue.text = String.format("%.1f", filterIntensity)
// tvColorValue.text = String.format("%.1f", colorIntensity)
// tvRedValue.text = String.format("%.1f", redIntensity)
// tvEyeBrightValue.text = String.format("%.1f", eyeBrightIntensity)
// tvToothValue.text = String.format("%.1f", toothIntensity)
//
// // 根据开关状态启用/禁用参数调节
// updateSeekBarsEnabled(isBeautyEnabled)
// }
//
// private fun updateSeekBarsEnabled(enabled: Boolean) {
// seekBarBeautyIntensity.isEnabled = enabled
// seekBarFilterIntensity.isEnabled = enabled
// seekBarColorIntensity.isEnabled = enabled
// seekBarRedIntensity.isEnabled = enabled
// seekBarEyeBrightIntensity.isEnabled = enabled
// seekBarToothIntensity.isEnabled = enabled
// }
//}

View File

@@ -1,264 +0,0 @@
package com.demo.SellyCloudSDK.beauty
import android.content.Context
import android.opengl.GLES20
import android.opengl.Matrix
import android.util.Log
import com.demo.SellyCloudSDK.R
import com.pedro.encoder.input.gl.render.filters.BaseFilterRender
import com.pedro.encoder.utils.gl.GlUtil
import java.nio.ByteBuffer
import java.nio.ByteOrder
/**
* FaceUnity beauty filter that plugs into RootEncoder's GL filter chain.
* 优化后台兼容性避免依赖Activity上下文
*/
class FUBeautyFilterRender(
private val fuRenderer: FURenderer
) : BaseFilterRender() {
private val TAG = "FUBeautyFilterRender"
// 美颜开关状态
private var isBeautyEnabled = true
// 添加摄像头朝向跟踪
private var currentCameraFacing: com.pedro.encoder.input.video.CameraHelper.Facing =
com.pedro.encoder.input.video.CameraHelper.Facing.BACK
// Standard vertex data following pedro's pattern (X, Y, Z, U, V)
private val squareVertexDataFilter = floatArrayOf(
// X, Y, Z, U, V
-1f, -1f, 0f, 0f, 0f, // bottom left
1f, -1f, 0f, 1f, 0f, // bottom right
-1f, 1f, 0f, 0f, 1f, // top left
1f, 1f, 0f, 1f, 1f // top right
)
private var frameW = 0
private var frameH = 0
private lateinit var appContext: Context
// GLSL program and handles
private var program = -1
private var aPositionHandle = -1
private var aTextureHandle = -1
private var uMVPMatrixHandle = -1
private var uSTMatrixHandle = -1
private var uSamplerHandle = -1
// 添加初始化状态检查
private var isInitialized = false
init {
squareVertex = ByteBuffer.allocateDirect(squareVertexDataFilter.size * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
squareVertex.put(squareVertexDataFilter).position(0)
Matrix.setIdentityM(MVPMatrix, 0)
Matrix.setIdentityM(STMatrix, 0)
}
override fun initGl(
width: Int,
height: Int,
context: Context,
previewWidth: Int,
previewHeight: Int
) {
// GL 上下文可能重建:确保滤镜和 FaceUnity 资源重新初始化
isInitialized = false
program = -1
// 先保存 ApplicationContext避免 super.initGl 内部触发 initGlFilter 时为空
this.appContext = context.applicationContext
super.initGl(width, height, context, previewWidth, previewHeight)
// 确保使用 ApplicationContext避免Activity依赖
frameW = width
frameH = height
// 刷新 FaceUnity GL 资源绑定到新的上下文
fuRenderer.reinitializeGlContextBlocking()
Log.d(TAG, "initGl: width=$width, height=$height, context=${context.javaClass.simpleName}")
}
override fun initGlFilter(context: Context?) {
if (isInitialized) {
Log.d(TAG, "Filter already initialized. Skipping initGlFilter.")
return
}
try {
// 使用 ApplicationContext 避免Activity依赖
val safeContext = context?.applicationContext ?: appContext
val vertexShader = GlUtil.getStringFromRaw(safeContext, R.raw.simple_vertex)
val fragmentShader = GlUtil.getStringFromRaw(safeContext, R.raw.fu_base_fragment)
program = GlUtil.createProgram(vertexShader, fragmentShader)
aPositionHandle = GLES20.glGetAttribLocation(program, "aPosition")
aTextureHandle = GLES20.glGetAttribLocation(program, "aTextureCoord")
uMVPMatrixHandle = GLES20.glGetUniformLocation(program, "uMVPMatrix")
uSTMatrixHandle = GLES20.glGetUniformLocation(program, "uSTMatrix")
uSamplerHandle = GLES20.glGetUniformLocation(program, "uSampler")
isInitialized = true
Log.d(TAG, "initGlFilter completed - program: $program")
} catch (e: Exception) {
Log.e(TAG, "initGlFilter failed", e)
isInitialized = false
}
}
/**
* 设置摄像头朝向(供外部调用)
*/
fun setCameraFacing(facing: com.pedro.encoder.input.video.CameraHelper.Facing) {
currentCameraFacing = facing
fuRenderer.setCameraFacing(facing)
Log.d(TAG, "Camera facing updated: $facing")
}
/**
* Core render step called by BaseFilterRender every frame.
*/
override fun drawFilter() {
// 增加初始化检查
if (!isInitialized) {
Log.w(TAG, "Filter not initialized, skipping draw")
return
}
// 如果美颜被禁用,使用简单的纹理透传渲染
if (!isBeautyEnabled) {
drawPassThrough()
return
}
if (!fuRenderer.isAuthSuccess || fuRenderer.fuRenderKit == null) {
// Fallback: 使用透传渲染而不是直接return
drawPassThrough()
return
}
if (previousTexId <= 0 || frameW <= 0 || frameH <= 0) {
return
}
try {
// 保存当前 FBO 与 viewport避免外部库改写
val prevFbo = IntArray(1)
val prevViewport = IntArray(4)
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, prevFbo, 0)
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, prevViewport, 0)
// 使用带朝向的渲染方法
val processedTexId = fuRenderer.onDrawFrame(previousTexId, frameW, frameH, currentCameraFacing)
// 还原 FBO 与 viewport避免黑屏
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, prevFbo[0])
GLES20.glViewport(prevViewport[0], prevViewport[1], prevViewport[2], prevViewport[3])
// Use processed texture if available, otherwise fallback to original
val textureIdToDraw = if (processedTexId > 0) processedTexId else previousTexId
// Now draw using our own shader program
GLES20.glUseProgram(program)
// Set vertex position
squareVertex.position(SQUARE_VERTEX_DATA_POS_OFFSET)
GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aPositionHandle)
// Set texture coordinates
squareVertex.position(SQUARE_VERTEX_DATA_UV_OFFSET)
GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aTextureHandle)
// Set transformation matrices
GLES20.glUniformMatrix4fv(uMVPMatrixHandle, 1, false, MVPMatrix, 0)
GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, STMatrix, 0)
// Bind texture and draw
GLES20.glUniform1i(uSamplerHandle, 0)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIdToDraw)
// Draw the rectangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
} catch (e: Exception) {
Log.e(TAG, "Error in beauty processing", e)
// Fallback: 使用透传渲染
drawPassThrough()
}
}
/**
* 透传渲染:直接渲染原始纹理,不进行美颜处理
*/
private fun drawPassThrough() {
if (previousTexId <= 0 || !isInitialized) {
return
}
try {
// 使用原始纹理进行渲染
GLES20.glUseProgram(program)
// Set vertex position
squareVertex.position(SQUARE_VERTEX_DATA_POS_OFFSET)
GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aPositionHandle)
// Set texture coordinates
squareVertex.position(SQUARE_VERTEX_DATA_UV_OFFSET)
GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aTextureHandle)
// Set transformation matrices
GLES20.glUniformMatrix4fv(uMVPMatrixHandle, 1, false, MVPMatrix, 0)
GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, STMatrix, 0)
// Bind original texture and draw
GLES20.glUniform1i(uSamplerHandle, 0)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, previousTexId)
// Draw the rectangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
} catch (e: Exception) {
Log.e(TAG, "Error in pass-through rendering", e)
}
}
override fun disableResources() {
GlUtil.disableResources(aTextureHandle, aPositionHandle)
}
override fun release() {
isInitialized = false
if (program != -1) {
GLES20.glDeleteProgram(program)
program = -1
}
isInitialized = false
Log.d(TAG, "FUBeautyFilterRender released")
}
/**
* 设置美颜开关状态
*/
fun setBeautyEnabled(enabled: Boolean) {
isBeautyEnabled = enabled
Log.d(TAG, "Beauty enabled: $enabled")
}
/**
* 获取美颜开关状态
*/
fun isBeautyEnabled(): Boolean = isBeautyEnabled
}

View File

@@ -1,6 +1,7 @@
package com.demo.SellyCloudSDK.beauty
import android.content.Context
import android.opengl.GLES20
import android.util.Log
import com.faceunity.core.callback.OperateCallback
import com.faceunity.core.entity.FUBundleData
@@ -19,9 +20,10 @@ import com.faceunity.wrapper.faceunity
import com.pedro.encoder.input.video.CameraHelper
import java.io.File
import java.io.IOException
import java.util.concurrent.CountDownLatch
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
/**
@@ -52,14 +54,18 @@ class FURenderer(private val context: Context) {
private val BUNDLE_AI_HUMAN = "model" + File.separator + "ai_human_processor.bundle"
private val BUNDLE_FACE_BEAUTY = "graphics" + File.separator + "face_beautification.bundle"
@Volatile
private var workerThreadRef: Thread? = null
private val workerThread = Executors.newSingleThreadExecutor { task ->
Thread(task, "FURenderer-Worker").also { workerThreadRef = it }
Thread(task, "FURenderer-Worker")
}
// 添加摄像头朝向管理
private var currentCameraFacing: CameraHelper.Facing = CameraHelper.Facing.BACK
private var blitProgram = 0
private var blitFramebuffer = 0
private var blitPositionLoc = 0
private var blitTexCoordLoc = 0
private var blitTextureLoc = 0
private var blitQuadBuffer: FloatBuffer? = null
/**
* 初始化美颜SDK
@@ -80,7 +86,7 @@ class FURenderer(private val context: Context) {
// 初始化成功后,在后台线程加载所需资源
workerThread.submit {
try {
faceunity.fuSetUseTexAsync(1)
applyTextureOutputMode()
// 获取 FURenderKit 实例
fuRenderKit = FURenderKit.getInstance()
@@ -142,8 +148,7 @@ class FURenderer(private val context: Context) {
// 重新应用美颜参数与道具
if (faceBeauty == null) loadBeautyBundle()
fuRenderKit?.faceBeauty = faceBeauty
// 再次开启异步纹理模式(稳妥起见)
try { faceunity.fuSetUseTexAsync(1) } catch (_: Throwable) {}
applyTextureOutputMode()
Log.d(TAG, "onGlContextRecreated: done")
} catch (e: Exception) {
Log.e(TAG, "onGlContextRecreated error", e)
@@ -206,6 +211,53 @@ class FURenderer(private val context: Context) {
}
}
fun renderProcessedTextureToOutput(
inputTex: Int,
outputTextureId: Int,
width: Int,
height: Int,
facing: CameraHelper.Facing
) {
if (outputTextureId <= 0) return
val renderedTextureId = onDrawFrame(inputTex, width, height, facing)
val sourceTextureId = when {
renderedTextureId == outputTextureId -> return
renderedTextureId > 0 -> renderedTextureId
else -> inputTex
}
ensureBlitResources()
if (blitProgram <= 0 || blitFramebuffer <= 0) return
val previousFramebuffer = IntArray(1)
val previousViewport = IntArray(4)
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, previousFramebuffer, 0)
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, previousViewport, 0)
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blitFramebuffer)
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
outputTextureId,
0
)
GLES20.glViewport(0, 0, width, height)
drawRgbTexture(sourceTextureId)
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
0,
0
)
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, previousFramebuffer[0])
GLES20.glViewport(
previousViewport[0],
previousViewport[1],
previousViewport[2],
previousViewport[3]
)
}
/**
* 加载美颜道具并设置默认参数
*/
@@ -235,19 +287,7 @@ class FURenderer(private val context: Context) {
if (!isAuthSuccess) return
workerThread.execute {
try {
Log.d(TAG, "Releasing GL context resources for protocol switch")
isGlInitialized = false
// 释放渲染器的 GL 资源
fuRenderKit?.release()
fuRenderKit = null
// 注意:不清空 faceBeauty保留美颜参数配置
Log.d(TAG, "GL context resources released successfully")
} catch (e: Exception) {
Log.e(TAG, "Error releasing GL context", e)
}
releaseGlContextOnCurrentThread()
}
}
@@ -256,33 +296,36 @@ class FURenderer(private val context: Context) {
*/
fun reinitializeGlContext() {
if (!isAuthSuccess) return
workerThread.execute { doReinitializeGlContext() }
workerThread.execute { reinitializeGlContextOnCurrentThread() }
}
/**
* 重新初始化 GL 上下文(同步等待完成,用于避免美颜空窗)
* 供 RTC texture processor 使用:必须在当前持有 GL context 的线程上执行。
*/
fun reinitializeGlContextBlocking(timeoutMs: Long = 2000L) {
fun reinitializeGlContextOnCurrentThread() {
if (!isAuthSuccess) return
if (Thread.currentThread() === workerThreadRef) {
doReinitializeGlContext()
return
}
val latch = CountDownLatch(1)
workerThread.execute {
try {
doReinitializeGlContext()
} finally {
latch.countDown()
}
}
try {
if (!latch.await(timeoutMs, TimeUnit.MILLISECONDS)) {
Log.w(TAG, "GL context reinit timeout: ${timeoutMs}ms")
}
} catch (_: InterruptedException) {
Thread.currentThread().interrupt()
Log.w(TAG, "GL context reinit interrupted")
doReinitializeGlContext()
} catch (e: Exception) {
Log.e(TAG, "Error reinitializing GL context on current thread", e)
isGlInitialized = false
}
}
/**
* 供 RTC texture processor 使用:必须在当前持有 GL context 的线程上执行。
*/
fun releaseGlContextOnCurrentThread() {
if (!isAuthSuccess) return
try {
Log.d(TAG, "Releasing GL context resources on current thread")
isGlInitialized = false
releaseBlitResources()
fuRenderKit?.release()
fuRenderKit = null
Log.d(TAG, "GL context resources released successfully")
} catch (e: Exception) {
Log.e(TAG, "Error releasing GL context on current thread", e)
}
}
@@ -293,8 +336,7 @@ class FURenderer(private val context: Context) {
// 重新获取 FURenderKit 实例(绑定到新的 GL 上下文)
fuRenderKit = FURenderKit.getInstance()
// 重新设置异步纹理模式
faceunity.fuSetUseTexAsync(1)
applyTextureOutputMode()
// 如果之前有美颜配置,重新应用
if (faceBeauty != null) {
@@ -316,6 +358,9 @@ class FURenderer(private val context: Context) {
fun release() {
Log.d(TAG, "Releasing FURenderer resources")
isGlInitialized = false
try {
releaseBlitResources()
} catch (_: Exception) {}
try {
fuRenderKit?.release()
} catch (_: Exception) {}
@@ -327,4 +372,132 @@ class FURenderer(private val context: Context) {
workerThread.shutdown()
} catch (_: Exception) {}
}
private fun ensureBlitResources() {
if (blitProgram > 0 && blitFramebuffer > 0 && blitQuadBuffer != null) return
blitProgram = createProgram(BLIT_VERTEX_SHADER, BLIT_FRAGMENT_SHADER)
if (blitProgram <= 0) return
blitPositionLoc = GLES20.glGetAttribLocation(blitProgram, "aPosition")
blitTexCoordLoc = GLES20.glGetAttribLocation(blitProgram, "aTextureCoord")
blitTextureLoc = GLES20.glGetUniformLocation(blitProgram, "uTexture")
if (blitQuadBuffer == null) {
blitQuadBuffer = ByteBuffer.allocateDirect(BLIT_QUAD.size * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(BLIT_QUAD)
.also { it.position(0) }
}
if (blitFramebuffer <= 0) {
val framebuffers = IntArray(1)
GLES20.glGenFramebuffers(1, framebuffers, 0)
blitFramebuffer = framebuffers[0]
}
}
private fun drawRgbTexture(textureId: Int) {
val quad = blitQuadBuffer ?: return
GLES20.glUseProgram(blitProgram)
quad.position(0)
GLES20.glVertexAttribPointer(blitPositionLoc, 2, GLES20.GL_FLOAT, false, 16, quad)
GLES20.glEnableVertexAttribArray(blitPositionLoc)
quad.position(2)
GLES20.glVertexAttribPointer(blitTexCoordLoc, 2, GLES20.GL_FLOAT, false, 16, quad)
GLES20.glEnableVertexAttribArray(blitTexCoordLoc)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId)
GLES20.glUniform1i(blitTextureLoc, 0)
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
GLES20.glDisableVertexAttribArray(blitPositionLoc)
GLES20.glDisableVertexAttribArray(blitTexCoordLoc)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0)
GLES20.glUseProgram(0)
}
private fun releaseBlitResources() {
if (blitProgram > 0) {
GLES20.glDeleteProgram(blitProgram)
blitProgram = 0
}
if (blitFramebuffer > 0) {
GLES20.glDeleteFramebuffers(1, intArrayOf(blitFramebuffer), 0)
blitFramebuffer = 0
}
blitQuadBuffer = null
}
private fun createProgram(vertexSource: String, fragmentSource: String): Int {
val vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource)
val fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource)
if (vertexShader <= 0 || fragmentShader <= 0) {
if (vertexShader > 0) GLES20.glDeleteShader(vertexShader)
if (fragmentShader > 0) GLES20.glDeleteShader(fragmentShader)
return 0
}
val program = GLES20.glCreateProgram()
if (program <= 0) return 0
GLES20.glAttachShader(program, vertexShader)
GLES20.glAttachShader(program, fragmentShader)
GLES20.glLinkProgram(program)
val status = IntArray(1)
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0)
GLES20.glDeleteShader(vertexShader)
GLES20.glDeleteShader(fragmentShader)
if (status[0] != GLES20.GL_TRUE) {
Log.w(TAG, "Failed to link blit program: ${GLES20.glGetProgramInfoLog(program)}")
GLES20.glDeleteProgram(program)
return 0
}
return program
}
private fun compileShader(type: Int, source: String): Int {
val shader = GLES20.glCreateShader(type)
if (shader <= 0) return 0
GLES20.glShaderSource(shader, source)
GLES20.glCompileShader(shader)
val status = IntArray(1)
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, status, 0)
if (status[0] != GLES20.GL_TRUE) {
Log.w(TAG, "Failed to compile shader: ${GLES20.glGetShaderInfoLog(shader)}")
GLES20.glDeleteShader(shader)
return 0
}
return shader
}
private fun applyTextureOutputMode() {
try {
faceunity.fuSetUseTexAsync(1)
} catch (t: Throwable) {
Log.w(TAG, "Failed to configure texture output mode", t)
}
}
companion object {
private val BLIT_QUAD = floatArrayOf(
-1f, -1f, 0f, 0f,
1f, -1f, 1f, 0f,
-1f, 1f, 0f, 1f,
1f, 1f, 1f, 1f,
)
private const val BLIT_VERTEX_SHADER = """
attribute vec4 aPosition;
attribute vec2 aTextureCoord;
varying vec2 vTextureCoord;
void main() {
gl_Position = aPosition;
vTextureCoord = aTextureCoord;
}
"""
private const val BLIT_FRAGMENT_SHADER = """
precision mediump float;
uniform sampler2D uTexture;
varying vec2 vTextureCoord;
void main() {
gl_FragColor = texture2D(uTexture, vTextureCoord);
}
"""
}
}

View File

@@ -2,9 +2,12 @@ package com.demo.SellyCloudSDK.beauty
import android.content.Context
import android.util.Log
import com.pedro.encoder.input.gl.render.filters.BaseFilterRender
import com.pedro.encoder.input.video.CameraHelper
import com.sellycloud.sellycloudsdk.VideoFrameInterceptor
import com.sellycloud.sellycloudsdk.VideoProcessFormat
import com.sellycloud.sellycloudsdk.VideoProcessMode
import com.sellycloud.sellycloudsdk.VideoProcessor
import com.sellycloud.sellycloudsdk.VideoProcessorConfig
import com.sellycloud.sellycloudsdk.VideoTextureFrame
import com.sellycloud.sellycloudsdk.beauty.BeautyEngine
/**
@@ -16,8 +19,6 @@ class FaceUnityBeautyEngine : BeautyEngine {
private val tag = "FaceUnityBeautyEng"
private var renderer: FURenderer? = null
private var filter: FUBeautyFilterRender? = null
private var whipInterceptor: FuVideoFrameInterceptor? = null
private var initialized = false
private var enabled = true
@@ -31,15 +32,6 @@ class FaceUnityBeautyEngine : BeautyEngine {
val fuRenderer = FURenderer(appCtx).also { it.setup() }
renderer = fuRenderer
filter = FUBeautyFilterRender(fuRenderer).apply {
setBeautyEnabled(enabled)
setCameraFacing(currentFacing)
}
whipInterceptor = FuVideoFrameInterceptor(fuRenderer).apply {
setFrontCamera(currentFacing == CameraHelper.Facing.FRONT)
}
applyIntensity()
initialized = true
Log.d(tag, "FaceUnity beauty engine initialized")
@@ -49,19 +41,40 @@ class FaceUnityBeautyEngine : BeautyEngine {
}
}
override fun obtainFilter(): BaseFilterRender? {
override fun createProcessor(): VideoProcessor? {
applyIntensity()
return filter
}
val textureRenderer = renderer ?: return null
return object : VideoProcessor {
override val config: VideoProcessorConfig = VideoProcessorConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
mode = VideoProcessMode.READ_WRITE,
fullRewrite = true
)
override fun obtainWhipInterceptor(): VideoFrameInterceptor? {
applyIntensity()
return whipInterceptor
override fun onGlContextCreated() {
textureRenderer.reinitializeGlContextOnCurrentThread()
applyIntensity()
}
override fun onGlContextDestroyed() {
textureRenderer.releaseGlContextOnCurrentThread()
}
override fun processTexture(input: VideoTextureFrame, outputTextureId: Int) {
if (!enabled || outputTextureId <= 0) return
textureRenderer.renderProcessedTextureToOutput(
inputTex = input.textureId,
outputTextureId = outputTextureId,
width = input.width,
height = input.height,
facing = currentFacing
)
}
}
}
override fun setEnabled(enabled: Boolean) {
this.enabled = enabled
filter?.setBeautyEnabled(enabled)
}
override fun setIntensity(intensity: Double) {
@@ -71,8 +84,6 @@ class FaceUnityBeautyEngine : BeautyEngine {
override fun onCameraFacingChanged(facing: CameraHelper.Facing) {
currentFacing = facing
filter?.setCameraFacing(facing)
whipInterceptor?.setFrontCamera(facing == CameraHelper.Facing.FRONT)
}
override fun onBeforeGlContextRelease() {
@@ -90,11 +101,8 @@ class FaceUnityBeautyEngine : BeautyEngine {
}
override fun release() {
kotlin.runCatching { filter?.release() }
kotlin.runCatching { renderer?.release() }
filter = null
renderer = null
whipInterceptor = null
initialized = false
}

View File

@@ -6,157 +6,215 @@ import com.faceunity.core.enumeration.CameraFacingEnum
import com.faceunity.core.enumeration.FUExternalInputEnum
import com.faceunity.core.enumeration.FUInputBufferEnum
import com.faceunity.core.enumeration.FUTransformMatrixEnum
import com.sellycloud.sellycloudsdk.VideoFrameInterceptor
import com.sellycloud.sellycloudsdk.SellyVideoFrame
import org.webrtc.JavaI420Buffer
import org.webrtc.VideoFrame
import java.nio.ByteBuffer
/**
* 将 WebRTC 采集的 I420 帧交给 FaceUnity 进行美颜,返回处理后的 NV21 帧
* 最小化侵入:当 SDK 未就绪或出错时,返回 null 让上游透传原始帧
*
* 重要:此拦截器不管理传入帧的生命周期,只负责创建新的处理后帧。
* 将 I420 帧交给 FaceUnity 进行美颜处理
* live 推流走 SDK 的 [SellyVideoFrame];互动 RTC 仍保留 WebRTC [VideoFrame] 的便捷重载
*/
class FuVideoFrameInterceptor(
private val fuRenderer: FURenderer
) : VideoFrameInterceptor {
) {
private val tag = "FuVideoFrameInt"
@Volatile private var isFrontCamera: Boolean = true
@Volatile private var enabled: Boolean = true
fun setFrontCamera(front: Boolean) { isFrontCamera = front }
fun setEnabled(enable: Boolean) { enabled = enable }
override fun process(frame: VideoFrame): VideoFrame? {
fun process(frame: SellyVideoFrame): SellyVideoFrame? {
if (!enabled) return null
val kit = fuRenderer.fuRenderKit
if (!fuRenderer.isAuthSuccess || kit == null) return null
val src = frame.buffer
// 兼容部分 webrtc 版本中 toI420 可能标注为可空的情况
val i420Maybe = try { src.toI420() } catch (_: Throwable) { null }
val i420 = i420Maybe ?: return null
val i420 = frame.buffer as? SellyVideoFrame.I420Buffer ?: return null
val width = i420.width
val height = i420.height
if (width == 0 || height == 0) return null
return try {
val i420Bytes = toI420Bytes(
width = width,
height = height,
dataY = i420.dataY,
strideY = i420.strideY,
dataU = i420.dataU,
strideU = i420.strideU,
dataV = i420.dataV,
strideV = i420.strideV
)
val outI420 = renderI420(width, height, i420Bytes) ?: return null
SellyVideoFrame(fromI420BytesToSellyI420(outI420, width, height), frame.rotation, frame.timestampNs)
} catch (t: Throwable) {
Log.w(tag, "beauty failed: ${t.message}")
null
}
}
fun process(frame: VideoFrame): VideoFrame? {
if (!enabled) return null
val kit = fuRenderer.fuRenderKit
if (!fuRenderer.isAuthSuccess || kit == null) return null
val i420 = try { frame.buffer.toI420() } catch (_: Throwable) { null } ?: return null
return try {
val width = i420.width
val height = i420.height
if (width == 0 || height == 0) return null
val i420Bytes = toI420Bytes(i420)
val inputData = FURenderInputData(width, height).apply {
imageBuffer = FURenderInputData.FUImageBuffer(
FUInputBufferEnum.FU_FORMAT_I420_BUFFER,
i420Bytes
)
renderConfig.apply {
externalInputType = FUExternalInputEnum.EXTERNAL_INPUT_TYPE_IMAGE
if (isFrontCamera) {
cameraFacing = CameraFacingEnum.CAMERA_FRONT
inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
outputMatrix = FUTransformMatrixEnum.CCROT0
} else {
cameraFacing = CameraFacingEnum.CAMERA_BACK
inputTextureMatrix = FUTransformMatrixEnum.CCROT0
inputBufferMatrix = FUTransformMatrixEnum.CCROT0
outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
}
isNeedBufferReturn = true
}
}
val output = kit.renderWithInput(inputData)
val outImage = output.image ?: return null
val outI420 = outImage.buffer ?: return null
if (outI420.isEmpty()) return null
// 安全:将 I420 字节填充到 JavaI420Buffer避免手写 NV21 转换越界
val jbuf = fromI420BytesToJavaI420(outI420, width, height)
VideoFrame(jbuf, frame.rotation, frame.timestampNs)
val i420Bytes = toI420Bytes(
width = width,
height = height,
dataY = i420.dataY,
strideY = i420.strideY,
dataU = i420.dataU,
strideU = i420.strideU,
dataV = i420.dataV,
strideV = i420.strideV
)
val outI420 = renderI420(width, height, i420Bytes) ?: return null
VideoFrame(fromI420BytesToJavaI420(outI420, width, height), frame.rotation, frame.timestampNs)
} catch (t: Throwable) {
Log.w(tag, "beauty failed: ${t.message}")
null
} finally {
// 只释放我们创建的 I420Buffer不释放原始 frame
try { i420.release() } catch (_: Throwable) {}
}
}
private fun toI420Bytes(i420: VideoFrame.I420Buffer): ByteArray {
val w = i420.width
val h = i420.height
val ySize = w * h
val uvW = (w + 1) / 2
val uvH = (h + 1) / 2
val uSize = uvW * uvH
val vSize = uSize
val out = ByteArray(ySize + uSize + vSize)
val yBuf = i420.dataY
val uBuf = i420.dataU
val vBuf = i420.dataV
val yStride = i420.strideY
val uStride = i420.strideU
val vStride = i420.strideV
// copy Y
private fun renderI420(width: Int, height: Int, i420Bytes: ByteArray): ByteArray? {
val inputData = FURenderInputData(width, height).apply {
imageBuffer = FURenderInputData.FUImageBuffer(
FUInputBufferEnum.FU_FORMAT_I420_BUFFER,
i420Bytes
)
renderConfig.apply {
externalInputType = FUExternalInputEnum.EXTERNAL_INPUT_TYPE_IMAGE
if (isFrontCamera) {
cameraFacing = CameraFacingEnum.CAMERA_FRONT
inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
outputMatrix = FUTransformMatrixEnum.CCROT0
} else {
cameraFacing = CameraFacingEnum.CAMERA_BACK
inputTextureMatrix = FUTransformMatrixEnum.CCROT0
inputBufferMatrix = FUTransformMatrixEnum.CCROT0
outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
}
isNeedBufferReturn = true
}
}
val output = fuRenderer.fuRenderKit?.renderWithInput(inputData) ?: return null
val outImage = output.image ?: return null
val outI420 = outImage.buffer ?: return null
return outI420.takeIf { it.isNotEmpty() }
}
private fun toI420Bytes(
width: Int,
height: Int,
dataY: ByteBuffer,
strideY: Int,
dataU: ByteBuffer,
strideU: Int,
dataV: ByteBuffer,
strideV: Int
): ByteArray {
val ySize = width * height
val uvWidth = (width + 1) / 2
val uvHeight = (height + 1) / 2
val uSize = uvWidth * uvHeight
val out = ByteArray(ySize + uSize * 2)
var dst = 0
for (j in 0 until h) {
val srcPos = j * yStride
yBuf.position(srcPos)
yBuf.get(out, dst, w)
dst += w
for (row in 0 until height) {
val srcBase = row * strideY
for (col in 0 until width) {
out[dst++] = dataY.get(srcBase + col)
}
}
// copy U
for (j in 0 until uvH) {
val srcPos = j * uStride
uBuf.position(srcPos)
uBuf.get(out, ySize + j * uvW, uvW)
for (row in 0 until uvHeight) {
val srcBase = row * strideU
for (col in 0 until uvWidth) {
out[dst++] = dataU.get(srcBase + col)
}
}
// copy V
for (j in 0 until uvH) {
val srcPos = j * vStride
vBuf.position(srcPos)
vBuf.get(out, ySize + uSize + j * uvW, uvW)
for (row in 0 until uvHeight) {
val srcBase = row * strideV
for (col in 0 until uvWidth) {
out[dst++] = dataV.get(srcBase + col)
}
}
return out
}
// 将连续 I420 字节拷贝到 JavaI420Buffer
private fun fromI420BytesToJavaI420(i420: ByteArray, width: Int, height: Int): JavaI420Buffer {
private fun fromI420BytesToSellyI420(i420: ByteArray, width: Int, height: Int): SellyVideoFrame.I420Buffer {
val ySize = width * height
val uvW = (width + 1) / 2
val uvH = (height + 1) / 2
val uSize = uvW * uvH
val uvWidth = (width + 1) / 2
val uvHeight = (height + 1) / 2
val uSize = uvWidth * uvHeight
val vSize = uSize
require(i420.size >= ySize + uSize + vSize) { "I420 buffer too small: ${i420.size}" }
val buf = JavaI420Buffer.allocate(width, height)
val y = buf.dataY
val u = buf.dataU
val v = buf.dataV
val yStride = buf.strideY
val uStride = buf.strideU
val vStride = buf.strideV
// 拷贝 Y
val buffer = SellyVideoFrame.allocateI420Buffer(width, height)
val y = buffer.dataY
val u = buffer.dataU
val v = buffer.dataV
var src = 0
for (j in 0 until height) {
y.position(j * yStride)
for (row in 0 until height) {
y.position(row * buffer.strideY)
y.put(i420, src, width)
src += width
}
// 拷贝 U
var uSrc = ySize
for (j in 0 until uvH) {
u.position(j * uStride)
u.put(i420, uSrc, uvW)
uSrc += uvW
for (row in 0 until uvHeight) {
u.position(row * buffer.strideU)
u.put(i420, src, uvWidth)
src += uvWidth
}
// 拷贝 V
var vSrc = ySize + uSize
for (j in 0 until uvH) {
v.position(j * vStride)
v.put(i420, vSrc, uvW)
vSrc += uvW
for (row in 0 until uvHeight) {
v.position(row * buffer.strideV)
v.put(i420, src, uvWidth)
src += uvWidth
}
return buf
return buffer
}
private fun fromI420BytesToJavaI420(i420: ByteArray, width: Int, height: Int): JavaI420Buffer {
val ySize = width * height
val uvWidth = (width + 1) / 2
val uvHeight = (height + 1) / 2
val uSize = uvWidth * uvHeight
val vSize = uSize
require(i420.size >= ySize + uSize + vSize) { "I420 buffer too small: ${i420.size}" }
val buffer = JavaI420Buffer.allocate(width, height)
val y = buffer.dataY
val u = buffer.dataU
val v = buffer.dataV
var src = 0
for (row in 0 until height) {
y.position(row * buffer.strideY)
y.put(i420, src, width)
src += width
}
for (row in 0 until uvHeight) {
u.position(row * buffer.strideU)
u.put(i420, src, uvWidth)
src += uvWidth
}
for (row in 0 until uvHeight) {
v.position(row * buffer.strideV)
v.put(i420, src, uvWidth)
src += uvWidth
}
return buffer
}
}

View File

@@ -65,17 +65,15 @@ class InteractiveForegroundService : Service() {
}
private fun ensureChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Interactive Call",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Interactive Call",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
}

View File

@@ -14,10 +14,13 @@ import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity
import androidx.core.content.ContextCompat
import androidx.core.view.isVisible
import androidx.lifecycle.lifecycleScope
import com.demo.SellyCloudSDK.KiwiHelper
import com.demo.SellyCloudSDK.R
import com.demo.SellyCloudSDK.beauty.FURenderer
import com.demo.SellyCloudSDK.beauty.FuVideoFrameInterceptor
import com.demo.SellyCloudSDK.avdemo.AvDemoSettingsStore
import com.demo.SellyCloudSDK.beauty.FaceUnityBeautyEngine
import com.demo.SellyCloudSDK.databinding.ActivityInteractiveLiveBinding
import com.pedro.encoder.input.video.CameraHelper
import com.sellycloud.sellycloudsdk.interactive.CallType
import com.sellycloud.sellycloudsdk.interactive.InteractiveCallConfig
import com.sellycloud.sellycloudsdk.interactive.InteractiveChannelMediaOptions
@@ -30,18 +33,28 @@ import com.sellycloud.sellycloudsdk.interactive.InteractiveStreamStats
import com.sellycloud.sellycloudsdk.interactive.InteractiveVideoCanvas
import com.sellycloud.sellycloudsdk.interactive.InteractiveVideoEncoderConfig
import com.sellycloud.sellycloudsdk.interactive.RemoteState
import com.sellycloud.sellycloudsdk.render.RtcRenderTarget
import com.sellycloud.sellycloudsdk.render.SurfaceViewRtcTarget
import com.sellycloud.sellycloudsdk.render.TextureViewRtcTarget
import android.view.TextureView
import android.view.View
import kotlinx.coroutines.launch
import org.webrtc.SurfaceViewRenderer
import java.util.Locale
class InteractiveLiveActivity : AppCompatActivity() {
private lateinit var binding: ActivityInteractiveLiveBinding
private lateinit var settingsStore: AvDemoSettingsStore
private var useTextureView: Boolean = false
private var rtcEngine: InteractiveRtcEngine? = null
private var lockedCallType: CallType? = null
private var localRenderer: SurfaceViewRenderer? = null
private var localRenderTarget: RtcRenderTarget? = null
private var localRenderView: View? = null
private lateinit var localSlot: VideoSlot
private lateinit var remoteSlots: List<VideoSlot>
private val remoteRendererMap = mutableMapOf<String, SurfaceViewRenderer>()
private val remoteRenderMap = mutableMapOf<String, Pair<View, RtcRenderTarget>>()
private var isLocalPreviewEnabled = true
private var isLocalAudioEnabled = true
private var isSpeakerOn = true
@@ -52,8 +65,14 @@ class InteractiveLiveActivity : AppCompatActivity() {
private var currentConnectionState: InteractiveConnectionState = InteractiveConnectionState.Disconnected
private var callDurationSeconds: Long = 0
private var lastMessage: String? = null
private var beautyRenderer: FURenderer? = null
private var fuFrameInterceptor: FuVideoFrameInterceptor? = null
private var beautyEngine: FaceUnityBeautyEngine? = null
private val defaultCameraVideoConfig = InteractiveVideoEncoderConfig(
640,
480,
fps = 20,
minBitrateKbps = 150,
maxBitrateKbps = 850
)
@Volatile private var isFrontCamera = true
@Volatile private var beautyEnabled: Boolean = true
@Volatile private var isLocalVideoEnabled: Boolean = true
@@ -68,6 +87,7 @@ class InteractiveLiveActivity : AppCompatActivity() {
private var currentCallId: String? = null
@Volatile private var selfUserId: String? = null
private var isScreenSharing: Boolean = false
@Volatile private var leaveInProgress: Boolean = false
private val permissionLauncher = registerForActivityResult(
ActivityResultContracts.RequestMultiplePermissions()
@@ -104,6 +124,8 @@ class InteractiveLiveActivity : AppCompatActivity() {
setDisplayHomeAsUpEnabled(true)
}
settingsStore = AvDemoSettingsStore(this)
useTextureView = settingsStore.read().renderBackendPreference.isTextureView()
setupVideoSlots()
initRtcEngine()
setupUiDefaults()
@@ -120,12 +142,12 @@ class InteractiveLiveActivity : AppCompatActivity() {
binding.btnSwitchCamera.setOnClickListener {
isFrontCamera = !isFrontCamera
fuFrameInterceptor?.setFrontCamera(isFrontCamera)
beautyEngine?.onCameraFacingChanged(currentCameraFacing())
rtcEngine?.switchCamera()
}
binding.btnToggleBeauty.setOnClickListener {
beautyEnabled = !beautyEnabled
fuFrameInterceptor?.setEnabled(beautyEnabled)
ensureBeautySessionReady()
updateControlButtons()
}
}
@@ -150,26 +172,26 @@ class InteractiveLiveActivity : AppCompatActivity() {
override fun onDestroy() {
super.onDestroy()
rtcEngine?.setCaptureVideoFrameInterceptor(null)
fuFrameInterceptor = null
rtcEngine?.setCaptureVideoProcessor(null)
remoteMediaState.clear()
// 捕获需要释放的引用,避免主线程阻塞导致 ANR
val engine = rtcEngine
val local = localRenderer
val remotes = remoteRendererMap.values.toList()
val beauty = beautyRenderer
val localTarget = localRenderTarget
val remoteTargets = remoteRenderMap.values.map { it.second }
val beauty = beautyEngine
rtcEngine = null
localRenderer = null
remoteRendererMap.clear()
beautyRenderer = null
localRenderTarget = null
localRenderView = null
remoteRenderMap.clear()
beautyEngine = null
// 重量级资源释放移到后台线程
Thread {
try { engine?.leaveChannel() } catch (_: Exception) {}
try { InteractiveRtcEngine.destroy(engine) } catch (_: Exception) {}
try { local?.release() } catch (_: Exception) {}
remotes.forEach { try { it.release() } catch (_: Exception) {} }
try { localTarget?.release() } catch (_: Exception) {}
remoteTargets.forEach { try { it.release() } catch (_: Exception) {} }
try { beauty?.release() } catch (_: Exception) {}
}.start()
}
@@ -180,32 +202,36 @@ class InteractiveLiveActivity : AppCompatActivity() {
}
private fun initRtcEngine() {
rtcEngine?.setCaptureVideoProcessor(null)
rtcEngine?.destroy()
rtcEngine = null
beautyEngine?.release()
beautyEngine = null
val appId = getString(R.string.signaling_app_id)
val token = getString(R.string.signaling_token).takeIf { it.isNotBlank() }
// Kiwi 代理后台获取rsName 为空时清除残留
val kiwiRsName = getString(R.string.signaling_kiwi_rsname).trim()
beautyRenderer = FURenderer(this).also { it.setup() }
fuFrameInterceptor = beautyRenderer?.let { FuVideoFrameInterceptor(it).apply {
setFrontCamera(isFrontCamera)
setEnabled(beautyEnabled)
} }
KiwiHelper.startProxySetup(kiwiRsName.isNotBlank(), kiwiRsName)
beautyEngine = FaceUnityBeautyEngine().also {
it.initialize(this)
it.setEnabled(beautyEnabled)
it.onCameraFacingChanged(currentCameraFacing())
}
rtcEngine = InteractiveRtcEngine.create(
InteractiveRtcEngineConfig(
context = applicationContext,
appId = appId,
defaultToken = token,
kiwiRsName = kiwiRsName
defaultToken = token
)
).apply {
setEventHandler(rtcEventHandler)
setClientRole(InteractiveRtcEngine.ClientRole.BROADCASTER)
// setVideoEncoderConfiguration(InteractiveVideoEncoderConfig()) 使用默认值
setVideoEncoderConfiguration(InteractiveVideoEncoderConfig(640, 480 , fps = 20, minBitrateKbps = 150, maxBitrateKbps = 850))
setVideoEncoderConfiguration(defaultCameraVideoConfig)
setDefaultAudioRoutetoSpeakerphone(true)
setCaptureVideoFrameInterceptor { frame ->
if (!beautyEnabled) return@setCaptureVideoFrameInterceptor frame
fuFrameInterceptor?.process(frame) ?: frame
}
}
ensureBeautySessionReady()
}
private val rtcEventHandler = object : InteractiveRtcEngineEventHandler {
@@ -227,6 +253,8 @@ class InteractiveLiveActivity : AppCompatActivity() {
override fun onLeaveChannel(durationSeconds: Int) {
Log.d(TAG, "回调onLeaveChannel duration=${durationSeconds}s")
runOnUiThread {
leaveInProgress = false
releaseLocalRenderTargetAsync()
resetUiAfterLeave()
}
}
@@ -320,12 +348,12 @@ class InteractiveLiveActivity : AppCompatActivity() {
runOnUiThread { handleRemoteAudioState(enabled, userId) }
}
override fun onStreamStateChanged(peerId: String, state: RemoteState, code: Int, message: String?) {
override fun onStreamStateChanged(userId: String, state: RemoteState, code: Int, message: String?) {
runOnUiThread {
val tip = "onStreamStateChanged[$peerId] state=$state code=$code ${message ?: ""}"
val tip = "onStreamStateChanged[$userId] state=$state code=$code ${message ?: ""}"
Log.d(TAG, tip)
Toast.makeText(this@InteractiveLiveActivity, tip, Toast.LENGTH_SHORT).show()
if (peerId == currentUserId && message?.contains("screen_share_stopped") == true) {
if (userId == currentUserId && message?.contains("screen_share_stopped") == true) {
isScreenSharing = false
updateControlButtons()
}
@@ -340,11 +368,13 @@ class InteractiveLiveActivity : AppCompatActivity() {
VideoSlot(binding.flRemote2, TileType.REMOTE),
VideoSlot(binding.flRemote3, TileType.REMOTE)
)
if (localRenderer == null) {
localRenderer = createRenderer()
if (localRenderView == null) {
val (view, target) = createRenderTarget()
localRenderView = view
localRenderTarget = target
}
localRenderer?.let { renderer ->
localSlot.layout.attachRenderer(renderer)
localRenderView?.let { view ->
localSlot.layout.attachRenderer(view)
}
resetVideoSlots(releaseRemotes = false)
binding.videoContainer.isVisible = false
@@ -476,9 +506,9 @@ class InteractiveLiveActivity : AppCompatActivity() {
}
private fun applyLocalPreviewVisibility() {
val renderer = localRenderer ?: createRenderer().also { localRenderer = it }
if (isLocalPreviewEnabled) {
localSlot.layout.attachRenderer(renderer)
val view = localRenderView ?: return
localSlot.layout.attachRenderer(view)
} else {
localSlot.layout.detachRenderer()
}
@@ -508,7 +538,15 @@ class InteractiveLiveActivity : AppCompatActivity() {
if (stopped) {
isScreenSharing = false
ensureBeautySessionReady()
fuFrameInterceptor?.setEnabled(beautyEnabled)
binding.root.post {
// The active call keeps the local preview target inside the SDK.
// During a live session we must not swap/release that target from the demo side.
applyDefaultCameraVideoConfig()
if (!isLocalVideoEnabled) {
rtcEngine?.enableLocalVideo(false)
}
applyLocalPreviewVisibility()
}
} else if (showToast) {
Toast.makeText(this, "停止屏幕共享失败", Toast.LENGTH_SHORT).show()
}
@@ -596,11 +634,24 @@ class InteractiveLiveActivity : AppCompatActivity() {
private fun executeJoin(request: JoinRequest) {
pendingJoinRequest = null
InteractiveForegroundService.start(this)
val renderer = localRenderer ?: createRenderer().also {
localRenderer = it
// 立即禁用按钮,防止 await 期间重复点击
setJoinButtonEnabled(false)
lifecycleScope.launch {
KiwiHelper.awaitProxyReady()
executeJoinInternal(request)
}
}
private fun executeJoinInternal(request: JoinRequest) {
applyDefaultCameraVideoConfig()
val target = localRenderTarget ?: run {
val (view, t) = createRenderTarget()
localRenderView = view
localRenderTarget = t
t
}
currentUserId = request.userId
rtcEngine?.setupLocalVideo(InteractiveVideoCanvas(renderer, request.userId))
rtcEngine?.setupLocalVideo(InteractiveVideoCanvas(target, request.userId))
ensureBeautySessionReady()
rtcEngine?.joinChannel(
request.token,
@@ -620,10 +671,13 @@ class InteractiveLiveActivity : AppCompatActivity() {
private fun ensureBeautySessionReady() {
try {
beautyRenderer?.releaseGlContext()
beautyRenderer?.reinitializeGlContext()
fuFrameInterceptor?.setEnabled(beautyEnabled)
fuFrameInterceptor?.setFrontCamera(isFrontCamera)
val engine = rtcEngine
val beauty = beautyEngine
beauty?.setEnabled(beautyEnabled)
beauty?.onCameraFacingChanged(currentCameraFacing())
engine?.setCaptureVideoProcessor(
if (beautyEnabled) beauty?.createProcessor() else null
)
} catch (_: Exception) {
}
}
@@ -658,8 +712,8 @@ class InteractiveLiveActivity : AppCompatActivity() {
private fun addRemoteTile(userId: String) {
remoteSlots.firstOrNull { it.userId == userId }?.let { existing ->
val renderer = ensureRemoteRenderer(userId)
existing.layout.attachRenderer(renderer)
val view = ensureRemoteRenderView(userId)
existing.layout.attachRenderer(view)
remoteSlots.filter { it.userId == userId && it !== existing }.forEach { extra ->
extra.userId = null
extra.layout.detachRenderer()
@@ -676,17 +730,19 @@ class InteractiveLiveActivity : AppCompatActivity() {
return
}
slot.userId = userId
val renderer = ensureRemoteRenderer(userId)
slot.layout.attachRenderer(renderer)
val view = ensureRemoteRenderView(userId)
slot.layout.attachRenderer(view)
updateSlotOverlay(slot)
binding.videoContainer.isVisible = true
}
private fun ensureRemoteRenderer(userId: String): SurfaceViewRenderer {
return remoteRendererMap[userId] ?: createRenderer().also { renderer ->
remoteRendererMap[userId] = renderer
rtcEngine?.setupRemoteVideo(InteractiveVideoCanvas(renderer, userId))
}
private fun ensureRemoteRenderView(userId: String): View {
val existing = remoteRenderMap[userId]
if (existing != null) return existing.first
val (view, target) = createRenderTarget()
remoteRenderMap[userId] = view to target
rtcEngine?.setupRemoteVideo(InteractiveVideoCanvas(target, userId))
return view
}
private fun removeRemoteTile(userId: String) {
@@ -697,27 +753,27 @@ class InteractiveLiveActivity : AppCompatActivity() {
updateSlotOverlay(slot)
}
val engine = rtcEngine
val renderer = remoteRendererMap.remove(userId)
val removed = remoteRenderMap.remove(userId)
remoteStats.remove(userId)
// SurfaceViewRenderer.release() 会死锁主线程,移到后台
// RtcRenderTarget.release() may block the main thread, move to background
Thread {
try { engine?.clearRemoteVideo(userId) } catch (_: Exception) {}
try { renderer?.release() } catch (_: Exception) {}
try { removed?.second?.release() } catch (_: Exception) {}
}.start()
}
private fun resetVideoSlots(releaseRemotes: Boolean = true) {
private fun resetVideoSlots(releaseRemotes: Boolean = true, reattachLocal: Boolean = true) {
if (releaseRemotes) {
val engine = rtcEngine
val remoteIds = remoteRendererMap.keys.toList()
val renderersToRelease = remoteIds.mapNotNull { remoteRendererMap.remove(it) }
val remoteIds = remoteRenderMap.keys.toList()
val targetsToRelease = remoteIds.mapNotNull { remoteRenderMap.remove(it)?.second }
remoteStats.clear()
// SurfaceViewRenderer.release() 会死锁主线程,移到后台
// RtcRenderTarget.release() may block the main thread, move to background
Thread {
remoteIds.forEach { userId ->
try { engine?.clearRemoteVideo(userId) } catch (_: Exception) {}
}
renderersToRelease.forEach { try { it.release() } catch (_: Exception) {} }
targetsToRelease.forEach { try { it.release() } catch (_: Exception) {} }
}.start()
}
remoteSlots.forEach { slot ->
@@ -726,9 +782,19 @@ class InteractiveLiveActivity : AppCompatActivity() {
updateSlotOverlay(slot)
}
localSlot.userId = currentUserId
val renderer = localRenderer ?: createRenderer().also { localRenderer = it }
if (!reattachLocal) {
localSlot.layout.detachRenderer()
updateSlotOverlay(localSlot)
return
}
val view = localRenderView ?: run {
val (v, t) = createRenderTarget()
localRenderView = v
localRenderTarget = t
v
}
if (isLocalPreviewEnabled) {
localSlot.layout.attachRenderer(renderer)
localSlot.layout.attachRenderer(view)
} else {
localSlot.layout.detachRenderer()
}
@@ -743,23 +809,29 @@ class InteractiveLiveActivity : AppCompatActivity() {
private fun displayId(userId: String): String = userId
private fun leaveChannel() {
// SDK 的 leaveChannel() 会同步停止 Whip/Whep 客户端,阻塞主线程
if (leaveInProgress) return
leaveInProgress = true
val engine = rtcEngine
Thread { try { engine?.leaveChannel() } catch (_: Exception) {} }.start()
resetUiAfterLeave()
currentConnectionState = InteractiveConnectionState.Disconnected
updateCallInfo()
setJoinButtonEnabled(false)
Thread {
try {
engine?.leaveChannel()
} catch (_: Exception) {
} finally {
runOnUiThread {
if (!leaveInProgress) return@runOnUiThread
leaveInProgress = false
releaseLocalRenderTargetAsync()
resetUiAfterLeave()
}
}
}.start()
}
private fun resetUiAfterLeave() {
currentCallId = null
resetVideoSlots()
binding.videoContainer.isVisible = false
binding.btnJoin.text = getString(R.string.join)
setJoinButtonEnabled(true)
isLocalPreviewEnabled = true
isLocalAudioEnabled = true
isSpeakerOn = true
beautyEnabled = true
fuFrameInterceptor?.setEnabled(true)
selfUserId = null
localStats = null
remoteStats.clear()
@@ -767,6 +839,16 @@ class InteractiveLiveActivity : AppCompatActivity() {
currentConnectionState = InteractiveConnectionState.Disconnected
callDurationSeconds = 0
lastMessage = null
resetVideoSlots(reattachLocal = false)
binding.videoContainer.isVisible = false
binding.btnJoin.text = getString(R.string.join)
setJoinButtonEnabled(!leaveInProgress)
isLocalPreviewEnabled = true
isLocalAudioEnabled = true
isSpeakerOn = true
isFrontCamera = true
isLocalVideoEnabled = true
beautyEnabled = true
binding.tvMessageLog.text = getString(R.string.message_none)
isScreenSharing = false
updateControlButtons()
@@ -774,16 +856,33 @@ class InteractiveLiveActivity : AppCompatActivity() {
updateCallInfo()
setJoinInputsVisible(true)
InteractiveForegroundService.stop(this)
initRtcEngine()
}
private fun createRenderer(): SurfaceViewRenderer = SurfaceViewRenderer(this).apply {
setZOrderMediaOverlay(false)
private fun currentCameraFacing(): CameraHelper.Facing {
return if (isFrontCamera) CameraHelper.Facing.FRONT else CameraHelper.Facing.BACK
}
private fun releaseRenderer(renderer: SurfaceViewRenderer) {
try {
renderer.release()
} catch (_: Exception) {}
private fun createRenderTarget(): Pair<View, RtcRenderTarget> {
return if (useTextureView) {
// Interactive demo owns these targets and releases them in onDestroy().
val tv = com.sellycloud.sellycloudsdk.widget.AspectRatioTextureView(this)
tv to TextureViewRtcTarget(tv, ownedBySdk = false)
} else {
val svr = SurfaceViewRenderer(this).apply { setZOrderMediaOverlay(false) }
svr to SurfaceViewRtcTarget(svr, ownedBySdk = false)
}
}
private fun releaseLocalRenderTargetAsync() {
val target = localRenderTarget ?: return
localRenderTarget = null
localRenderView = null
Thread { try { target.release() } catch (_: Exception) {} }.start()
}
private fun applyDefaultCameraVideoConfig() {
rtcEngine?.setVideoEncoderConfiguration(defaultCameraVideoConfig)
}
private fun hideKeyboard() {
@@ -838,7 +937,7 @@ class InteractiveLiveActivity : AppCompatActivity() {
val duration = if (callDurationSeconds > 0) {
val minutes = callDurationSeconds / 60
val seconds = callDurationSeconds % 60
String.format(" | 时长 %02d:%02d", minutes, seconds)
String.format(Locale.getDefault(), " | 时长 %02d:%02d", minutes, seconds)
} else {
""
}
@@ -849,7 +948,9 @@ class InteractiveLiveActivity : AppCompatActivity() {
val lines = mutableListOf(header)
val width = stats?.width?.takeIf { it > 0 }?.toString() ?: "--"
val height = stats?.height?.takeIf { it > 0 }?.toString() ?: "--"
val fpsText = stats?.fps?.takeIf { it > 0 }?.let { String.format("%.1f fps", it.toDouble()) } ?: "-- fps"
val fpsText = stats?.fps?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.1f fps", it.toDouble())
} ?: "-- fps"
lines += "Res:${width}x${height} $fpsText"
val videoCodec = stats?.videoCodec?.takeIf { it.isNotBlank() }
val audioCodec = stats?.audioCodec?.takeIf { it.isNotBlank() }
@@ -860,10 +961,16 @@ class InteractiveLiveActivity : AppCompatActivity() {
else -> null
}
codecLine?.let { lines += it }
val videoBitrate = stats?.videoBitrateKbps?.takeIf { it > 0 }?.let { String.format("%.0f", it.toDouble()) } ?: "--"
val audioBitrate = stats?.audioBitrateKbps?.takeIf { it > 0 }?.let { String.format("%.0f", it.toDouble()) } ?: "--"
val videoBitrate = stats?.videoBitrateKbps?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.0f", it.toDouble())
} ?: "--"
val audioBitrate = stats?.audioBitrateKbps?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.0f", it.toDouble())
} ?: "--"
lines += "Video:${videoBitrate}kbps Audio:${audioBitrate}kbps"
val rtt = stats?.rttMs?.takeIf { it > 0 }?.let { String.format("%.0fms", it.toDouble()) } ?: "--"
val rtt = stats?.rttMs?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.0fms", it.toDouble())
} ?: "--"
lines += "RTT:$rtt"
return lines.joinToString("\n")
}

View File

@@ -8,6 +8,7 @@ import android.content.Intent
import android.content.pm.PackageManager
import android.content.res.Configuration
import android.graphics.Bitmap
import com.sellycloud.sellycloudsdk.render.RenderBackend
import android.graphics.Color
import android.graphics.Typeface
import android.graphics.drawable.GradientDrawable
@@ -27,10 +28,13 @@ import android.widget.Toast
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity
import androidx.lifecycle.lifecycleScope
import androidx.appcompat.widget.AppCompatTextView
import androidx.core.content.ContextCompat
import coil.load
import com.demo.SellyCloudSDK.KiwiHelper
import com.demo.SellyCloudSDK.R
import com.demo.SellyCloudSDK.avdemo.AvDemoSettingsStore
import com.demo.SellyCloudSDK.databinding.ActivityLivePlayBinding
import com.demo.SellyCloudSDK.live.auth.LiveAuthHelper
import com.demo.SellyCloudSDK.live.auth.LiveTokenSigner
@@ -41,6 +45,9 @@ import com.demo.SellyCloudSDK.live.env.normalizedAppName
import com.demo.SellyCloudSDK.live.env.normalizedVhost
import com.demo.SellyCloudSDK.live.env.toLiveMode
import com.demo.SellyCloudSDK.live.util.GalleryImageSaver
import com.demo.SellyCloudSDK.playback.PlaybackProcessingPreset
import com.demo.SellyCloudSDK.playback.PlaybackTextureObserverDemo
import com.demo.SellyCloudSDK.playback.PlaybackTexturePatchProcessor
import com.sellycloud.sellycloudsdk.SellyLatencyChasingUpdate
import com.sellycloud.sellycloudsdk.SellyLiveMode
import com.sellycloud.sellycloudsdk.SellyLiveVideoPlayer
@@ -69,6 +76,7 @@ class LivePlayActivity : AppCompatActivity() {
private lateinit var playerClient: SellyLiveVideoPlayer
private lateinit var pipController: SellyPipController
private var useTextureView: Boolean = false
private var isPlaying: Boolean = false
private var isMuted: Boolean = false
private var previewImageUrl: String? = null
@@ -83,6 +91,10 @@ class LivePlayActivity : AppCompatActivity() {
private var lastLatencyChasingSpeed: Float? = null
private var lastLatencyChasingUpdate: SellyLatencyChasingUpdate? = null
private var hasReleasedPlayer: Boolean = false
private var logEnabled: Boolean = true
private var processingPreset: PlaybackProcessingPreset = PlaybackProcessingPreset.DIRECT
private var renderTargetRebindCount: Int = 0
private var lastRenderTargetRebindCostMs: Long? = null
private val logLines: ArrayDeque<String> = ArrayDeque()
private val logTimeFormat = SimpleDateFormat("HH:mm:ss.SSS", Locale.getDefault())
@@ -90,6 +102,14 @@ class LivePlayActivity : AppCompatActivity() {
private var logSummaryView: TextView? = null
private var logContentView: TextView? = null
private var logFloatingButton: View? = null
private var toolsFloatingButton: View? = null
private val playbackObserverDemo by lazy(LazyThreadSafetyMode.NONE) {
PlaybackTextureObserverDemo(::logEvent)
}
private val playbackPatchProcessor by lazy(LazyThreadSafetyMode.NONE) {
PlaybackTexturePatchProcessor(::logEvent)
}
private val storagePermissionLauncher = registerForActivityResult(
ActivityResultContracts.RequestPermission()
@@ -105,12 +125,15 @@ class LivePlayActivity : AppCompatActivity() {
setContentView(binding.root)
supportActionBar?.hide()
addLogFloatingButton()
addToolsFloatingButton()
envStore = LiveEnvSettingsStore(this)
useTextureView = AvDemoSettingsStore(this).read().renderBackendPreference.isTextureView()
pipController = SellyPipController(this)
val env = envStore.read().also { it.applyToSdkRuntimeConfig(this) }
logEnabled = env.logEnabled
args = Args.from(intent, env)
Log.d(TAG, "init liveMode=${args.liveMode} input=${args.streamIdOrUrl} autoStart=${args.autoStart}")
debugLog("init liveMode=${args.liveMode} input=${args.streamIdOrUrl} autoStart=${args.autoStart}")
setupPreview(args.previewImageUrl)
playerClient = createPlayerForArgs(args).also { client ->
@@ -190,6 +213,13 @@ class LivePlayActivity : AppCompatActivity() {
}
}
override fun onReconnectStateChanged(isReconnecting: Boolean, detail: String?) {
runOnUiThread {
val suffix = detail?.takeIf { it.isNotBlank() }?.let { ": $it" }.orEmpty()
logEvent(if (isReconnecting) "重连开始$suffix" else "重连结束$suffix")
}
}
override fun onError(error: com.sellycloud.sellycloudsdk.SellyLiveError) {
runOnUiThread {
logEvent("错误: ${error.message}")
@@ -206,10 +236,15 @@ class LivePlayActivity : AppCompatActivity() {
binding.actionScreenshot.setOnClickListener { captureCurrentFrame() }
binding.actionPip.setOnClickListener { enterPipMode() }
playerClient.attachRenderView(binding.renderContainer)
val backend = currentRenderBackend()
playerClient.attachRenderView(binding.renderContainer, backend)
logEvent("渲染目标已绑定: backend=${currentRenderBackendLabel()}, processing=${processingPreset.label}")
if (args.autoStart) {
startPlayback()
lifecycleScope.launch {
KiwiHelper.awaitProxyReady()
startPlayback()
}
}
}
@@ -258,7 +293,10 @@ class LivePlayActivity : AppCompatActivity() {
if (currentState == SellyPlayerState.Paused) {
playerClient.play()
} else {
startPlayback()
lifecycleScope.launch {
KiwiHelper.awaitProxyReady()
startPlayback()
}
}
}
@@ -270,7 +308,6 @@ class LivePlayActivity : AppCompatActivity() {
}
private fun enterPipMode() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) return
if (!isPlaying) return
val renderView = playerClient.getRenderView() ?: binding.renderContainer
pipController.enterPictureInPictureMode(renderView)
@@ -314,7 +351,7 @@ class LivePlayActivity : AppCompatActivity() {
Toast.makeText(this, "生成 token 失败", Toast.LENGTH_SHORT).show()
return
}
Log.d(TAG, "startPlayback params liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
debugLog("startPlayback params liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
playerClient.token = auth.tokenResult.token
beginPlayback()
return
@@ -326,7 +363,7 @@ class LivePlayActivity : AppCompatActivity() {
return
}
if (input.contains("://")) {
Log.d(TAG, "startPlayback directUrl=$input")
debugLog("startPlayback directUrl=$input")
playerClient.token = null
beginPlayback()
return
@@ -346,7 +383,7 @@ class LivePlayActivity : AppCompatActivity() {
Toast.makeText(this, "生成 token 失败", Toast.LENGTH_SHORT).show()
return
}
Log.d(TAG, "startPlayback liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
debugLog("startPlayback liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
playerClient.token = auth.tokenResult.token
beginPlayback()
}
@@ -408,6 +445,7 @@ class LivePlayActivity : AppCompatActivity() {
binding.controlBar.visibility = controlsVisibility
binding.btnClose.visibility = controlsVisibility
logFloatingButton?.visibility = controlsVisibility
toolsFloatingButton?.visibility = controlsVisibility
if (isInPip) {
binding.ivPreview.visibility = View.GONE
} else {
@@ -435,8 +473,22 @@ class LivePlayActivity : AppCompatActivity() {
Toast.makeText(this, "视图尚未布局完成,稍后再试", Toast.LENGTH_SHORT).show()
return
}
if (view is android.view.TextureView) {
val bmp = view.getBitmap()
if (bmp == null) {
Toast.makeText(this, "TextureView 尚未渲染画面", Toast.LENGTH_SHORT).show()
return
}
uiScope.launch(Dispatchers.IO) {
val ok = saveBitmapToGallery(bmp, prefix)
launch(Dispatchers.Main) {
Toast.makeText(this@LivePlayActivity, if (ok) "截图已保存到相册" else "保存失败", Toast.LENGTH_SHORT).show()
}
}
return
}
if (view !is android.view.SurfaceView) {
Toast.makeText(this, "当前视图不支持截图", Toast.LENGTH_SHORT).show()
Toast.makeText(this, "当前视图类型不支持截图", Toast.LENGTH_SHORT).show()
return
}
val bmp = Bitmap.createBitmap(view.width, view.height, Bitmap.Config.ARGB_8888)
@@ -512,6 +564,99 @@ class LivePlayActivity : AppCompatActivity() {
logFloatingButton = button
}
private fun addToolsFloatingButton() {
val sizePx = dpToPx(44)
val marginEndPx = dpToPx(72)
val controlBarHeight = resources.getDimensionPixelSize(R.dimen.av_control_bar_height)
val marginBottomPx = controlBarHeight + dpToPx(16)
val bgDrawable = GradientDrawable(GradientDrawable.Orientation.TOP_BOTTOM, intArrayOf(
Color.parseColor("#B33B0764"),
Color.parseColor("#803B0764")
)).apply {
shape = GradientDrawable.OVAL
setStroke(dpToPx(1), Color.parseColor("#55FFFFFF"))
}
val button = AppCompatTextView(this).apply {
text = ""
setTextColor(Color.parseColor("#F8FAFC"))
textSize = 11f
gravity = Gravity.CENTER
background = bgDrawable
elevation = dpToPx(4).toFloat()
setShadowLayer(2f, 0f, 1f, Color.parseColor("#66000000"))
isClickable = true
isFocusable = true
contentDescription = "播放处理与回归工具"
setOnClickListener { showPlaybackToolsDialog() }
}
val params = FrameLayout.LayoutParams(sizePx, sizePx).apply {
gravity = Gravity.END or Gravity.BOTTOM
marginEnd = marginEndPx
bottomMargin = marginBottomPx
}
addContentView(button, params)
toolsFloatingButton = button
}
private fun showPlaybackToolsDialog() {
val container = LinearLayout(this).apply {
orientation = LinearLayout.VERTICAL
setPadding(dpToPx(20), dpToPx(16), dpToPx(20), dpToPx(8))
}
val summary = TextView(this).apply {
text = "当前后端: ${currentRenderBackendLabel()}\n" +
"当前协议: ${args.liveMode.name}\n" +
"当前模式: ${processingPreset.label}\n" +
"说明: processing 仅支持 RTMP + TextureView。"
setTextColor(Color.parseColor("#E5E7EB"))
textSize = 13f
}
container.addView(summary)
container.addView(spaceView(dpToPx(12)))
container.addView(createToolActionButton("切换 DIRECT 直出") {
applyPlaybackProcessingPreset(PlaybackProcessingPreset.DIRECT, trigger = "工具面板")
})
container.addView(createToolActionButton("切换 PROCESSING Observer") {
applyPlaybackProcessingPreset(PlaybackProcessingPreset.OBSERVER, trigger = "工具面板")
})
container.addView(createToolActionButton("切换 PROCESSING 红块 Processor") {
applyPlaybackProcessingPreset(PlaybackProcessingPreset.PROCESSOR, trigger = "工具面板")
})
container.addView(createToolActionButton("仅重绑当前目标") {
rebindRenderTarget("手动回归")
})
AlertDialog.Builder(this)
.setTitle("播放处理 / 目标重绑")
.setView(container)
.setNegativeButton("关闭", null)
.show()
}
private fun createToolActionButton(label: String, onClick: () -> Unit): View {
return AppCompatTextView(this).apply {
text = label
gravity = Gravity.CENTER
textSize = 14f
setTextColor(Color.parseColor("#F8FAFC"))
background = GradientDrawable().apply {
cornerRadius = dpToPx(10).toFloat()
setColor(Color.parseColor("#334155"))
setStroke(dpToPx(1), Color.parseColor("#475569"))
}
setPadding(dpToPx(12), dpToPx(12), dpToPx(12), dpToPx(12))
isClickable = true
isFocusable = true
setOnClickListener { onClick() }
layoutParams = LinearLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT
).apply {
bottomMargin = dpToPx(10)
}
}
}
private fun showLogDialog() {
if (logDialog?.isShowing == true) {
refreshLogDialogContent()
@@ -645,10 +790,20 @@ class LivePlayActivity : AppCompatActivity() {
builder.append("streamName: ").append(params.streamName).append('\n')
}
builder.append("当前状态: ").append(formatState(currentState)).append('\n')
builder.append("渲染后端: ").append(currentRenderBackendLabel()).append('\n')
builder.append("播放处理: ").append(processingPreset.label).append('\n')
builder.append("是否播放中: ").append(if (isPlaying) "" else "").append('\n')
builder.append("是否静音: ").append(if (isMuted) "" else "").append('\n')
builder.append("首帧视频耗时(ms): ").append(firstVideoFrameCostMs ?: "未统计").append('\n')
builder.append("首帧音频耗时(ms): ").append(firstAudioFrameCostMs ?: "未统计").append('\n')
builder.append("目标重绑次数: ").append(renderTargetRebindCount).append('\n')
builder.append("最近重绑耗时(ms): ").append(lastRenderTargetRebindCostMs ?: "未统计").append('\n')
val processingDetail = when (processingPreset) {
PlaybackProcessingPreset.DIRECT -> "processing: 关闭"
PlaybackProcessingPreset.OBSERVER -> playbackObserverDemo.summary()
PlaybackProcessingPreset.PROCESSOR -> playbackPatchProcessor.summary()
}
builder.append(processingDetail).append('\n')
val attemptElapsed = playAttemptStartElapsedMs?.let { SystemClock.elapsedRealtime() - it }
if (attemptElapsed == null) {
builder.append("本次播放已耗时(ms): 未开始").append('\n')
@@ -717,7 +872,81 @@ class LivePlayActivity : AppCompatActivity() {
isLatencyChasingActive = false
lastLatencyChasingSpeed = null
lastLatencyChasingUpdate = null
logEvent("播放尝试开始")
logEvent("播放尝试开始: backend=${currentRenderBackendLabel()}, processing=${processingPreset.label}")
}
private fun applyPlaybackProcessingPreset(preset: PlaybackProcessingPreset, trigger: String) {
if (preset == processingPreset) {
logEvent("播放处理保持不变: ${preset.label}, trigger=$trigger")
Toast.makeText(this, "当前已是 ${preset.label}", Toast.LENGTH_SHORT).show()
return
}
if (args.liveMode != SellyLiveMode.RTMP && preset != PlaybackProcessingPreset.DIRECT) {
logEvent("播放处理切换被拒绝: liveMode=${args.liveMode.name} 当前仅支持 RTMP")
Toast.makeText(this, "当前 demo 仅支持 RTMP 播放 processing", Toast.LENGTH_SHORT).show()
return
}
if (!useTextureView && preset != PlaybackProcessingPreset.DIRECT) {
logEvent("播放处理切换被拒绝: backend=${currentRenderBackendLabel()} 不支持 ${preset.label}")
Toast.makeText(this, "播放 processing 仅支持 TextureView 后端", Toast.LENGTH_SHORT).show()
return
}
processingPreset = preset
configurePlaybackProcessing()
logEvent("播放处理切换: mode=${preset.label}, trigger=$trigger")
rebindRenderTarget("processing_${preset.name.lowercase(Locale.US)}")
Toast.makeText(this, "已切到 ${preset.label}", Toast.LENGTH_SHORT).show()
}
private fun configurePlaybackProcessing() {
when (processingPreset) {
PlaybackProcessingPreset.DIRECT -> {
playerClient.setPlaybackFrameObserver(null)
playerClient.setPlaybackVideoProcessor(null)
}
PlaybackProcessingPreset.OBSERVER -> {
playerClient.setPlaybackVideoProcessor(null)
playerClient.setPlaybackFrameObserver(playbackObserverDemo)
}
PlaybackProcessingPreset.PROCESSOR -> {
playerClient.setPlaybackFrameObserver(null)
playerClient.setPlaybackVideoProcessor(playbackPatchProcessor)
}
}
}
private fun rebindRenderTarget(reason: String) {
if (hasReleasedPlayer) return
val shouldResumePlayback = currentState == SellyPlayerState.Playing ||
currentState == SellyPlayerState.Connecting ||
currentState == SellyPlayerState.Reconnecting
val startedAtMs = SystemClock.elapsedRealtime()
val backend = currentRenderBackend()
logEvent("目标重绑开始: reason=$reason, backend=${currentRenderBackendLabel()}, processing=${processingPreset.label}")
playerClient.clearRenderTarget()
playerClient.attachRenderView(binding.renderContainer, backend)
if (shouldResumePlayback) {
logEvent("目标重绑后恢复播放: previousState=${formatState(currentState)}")
startPlayAttempt()
resetPreviewForPlayback()
playerClient.prepareToPlay()
playerClient.play()
} else if (currentState == SellyPlayerState.Paused) {
logEvent("目标重绑完成: 当前处于暂停态,变更将在下次播放时生效")
}
val costMs = SystemClock.elapsedRealtime() - startedAtMs
renderTargetRebindCount += 1
lastRenderTargetRebindCostMs = costMs
logEvent("目标重绑完成: count=$renderTargetRebindCount, cost=${costMs}ms")
}
private fun currentRenderBackend(): RenderBackend {
return if (useTextureView) RenderBackend.TEXTURE_VIEW else RenderBackend.SURFACE_VIEW
}
private fun currentRenderBackendLabel(): String {
return if (useTextureView) "TextureView" else "SurfaceView"
}
private fun formatLatencyChasingSpeed(speed: Float): String {
@@ -753,6 +982,10 @@ class LivePlayActivity : AppCompatActivity() {
}
}
private fun debugLog(message: String) {
if (logEnabled) Log.d(TAG, message)
}
private fun dpToPx(dp: Int): Int {
return (dp * resources.displayMetrics.density + 0.5f).toInt()
}
@@ -801,7 +1034,8 @@ class LivePlayActivity : AppCompatActivity() {
vhost: String,
appName: String,
streamName: String,
autoStart: Boolean = true
autoStart: Boolean = true,
xorKeyHex: String = ""
): Intent {
return Intent(context, LivePlayActivity::class.java)
.putExtra(EXTRA_PLAY_PROTOCOL, liveMode.name)
@@ -809,6 +1043,7 @@ class LivePlayActivity : AppCompatActivity() {
.putExtra(EXTRA_PLAY_APP_NAME, appName)
.putExtra(EXTRA_PLAY_STREAM_NAME, streamName)
.putExtra(EXTRA_AUTO_START, autoStart)
.putExtra(EXTRA_XOR_KEY_HEX, xorKeyHex)
}
fun closePipIfAny(): Boolean {
@@ -869,7 +1104,8 @@ class LivePlayActivity : AppCompatActivity() {
val input = intent.getStringExtra(EXTRA_STREAM_ID_OR_URL).orEmpty()
.ifBlank { playParams?.streamName ?: env.defaultStreamId }
val autoStart = intent.getBooleanExtra(EXTRA_AUTO_START, true)
val xorKeyHex = intent.getStringExtra(EXTRA_XOR_KEY_HEX).orEmpty().trim()
val rawXorKey = intent.getStringExtra(EXTRA_XOR_KEY_HEX).orEmpty().trim()
val xorKeyHex = sanitizeXorKeyHex(rawXorKey)
val mode = resolveLiveMode(rawProtocol, input, env)
return Args(
liveMode = mode,
@@ -881,6 +1117,22 @@ class LivePlayActivity : AppCompatActivity() {
)
}
private val HEX_REGEX = Regex("^[0-9a-fA-F]+$")
/**
* Validate and normalize the XOR key. Returns empty string if invalid
* to prevent native crash from malformed keys.
*/
private fun sanitizeXorKeyHex(raw: String): String {
if (raw.isBlank()) return ""
val hex = if (raw.startsWith("0x", ignoreCase = true)) raw.substring(2) else raw
if (hex.isEmpty() || hex.length % 2 != 0 || !HEX_REGEX.matches(hex)) {
android.util.Log.w("LivePlayActivity", "Invalid xorKeyHex '$raw', ignoring to prevent crash")
return ""
}
return hex
}
private fun resolveLiveMode(raw: String?, input: String, env: LiveEnvSettings): SellyLiveMode {
val normalized = raw?.trim()?.uppercase()
val modeFromExtra = when (normalized) {

View File

@@ -67,17 +67,15 @@ class LivePlayForegroundService : Service() {
}
private fun ensureChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Live Playback",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Live Playback",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
}
@@ -88,14 +86,10 @@ class LivePlayForegroundService : Service() {
fun start(context: Context) {
val appContext = context.applicationContext
val intent = Intent(appContext, LivePlayForegroundService::class.java)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
try {
appContext.startService(intent)
} catch (_: IllegalStateException) {
ContextCompat.startForegroundService(appContext, intent)
}
} else {
try {
appContext.startService(intent)
} catch (_: IllegalStateException) {
ContextCompat.startForegroundService(appContext, intent)
}
}

View File

@@ -22,7 +22,10 @@ import android.widget.Toast
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity
import androidx.appcompat.widget.AppCompatTextView
import androidx.lifecycle.lifecycleScope
import com.demo.SellyCloudSDK.KiwiHelper
import com.demo.SellyCloudSDK.R
import kotlinx.coroutines.launch
import com.demo.SellyCloudSDK.databinding.ActivityPkPlayBinding
import com.demo.SellyCloudSDK.live.auth.LiveAuthHelper
import com.demo.SellyCloudSDK.live.auth.LiveTokenSigner
@@ -79,6 +82,7 @@ class PkPlayActivity : AppCompatActivity() {
// Shared state
private var isMuted: Boolean = false
private var hasReleasedPlayers: Boolean = false
private var logEnabled: Boolean = true
// Log system
private val logLines: ArrayDeque<String> = ArrayDeque()
@@ -97,6 +101,7 @@ class PkPlayActivity : AppCompatActivity() {
envStore = LiveEnvSettingsStore(this)
val env = envStore.read().also { it.applyToSdkRuntimeConfig(this) }
logEnabled = env.logEnabled
args = Args.from(intent, env) ?: run {
Toast.makeText(this, "缺少 PK 播放参数", Toast.LENGTH_SHORT).show()
finish()
@@ -106,7 +111,7 @@ class PkPlayActivity : AppCompatActivity() {
binding.tvMainStreamName.text = args.mainStreamName
binding.tvPkStreamName.text = args.pkStreamName
Log.d(TAG, "初始化主播放器streamId=${args.mainStreamName}, 协议: RTC")
debugLog("初始化主播放器streamId=${args.mainStreamName}, 协议: RTC")
mainPlayer = SellyLiveVideoPlayer.initWithStreamId(
this,
args.mainStreamName,
@@ -141,7 +146,7 @@ class PkPlayActivity : AppCompatActivity() {
)
mainPlayer.setMuted(isMuted)
Log.d(TAG, "初始化 PK 播放器streamId=${args.pkStreamName}")
debugLog("初始化 PK 播放器streamId=${args.pkStreamName}")
pkPlayer = SellyLiveVideoPlayer.initWithStreamId(
this,
args.pkStreamName,
@@ -184,7 +189,10 @@ class PkPlayActivity : AppCompatActivity() {
binding.actionMute.setOnClickListener { toggleMute() }
if (args.autoStart) {
startPlayback()
lifecycleScope.launch {
KiwiHelper.awaitProxyReady()
startPlayback()
}
}
}
@@ -274,6 +282,13 @@ class PkPlayActivity : AppCompatActivity() {
}
}
override fun onReconnectStateChanged(isReconnecting: Boolean, detail: String?) {
runOnUiThread {
val suffix = detail?.takeIf { it.isNotBlank() }?.let { ": $it" }.orEmpty()
logEvent(if (isReconnecting) "$prefix: 重连开始$suffix" else "$prefix: 重连结束$suffix")
}
}
override fun onError(error: com.sellycloud.sellycloudsdk.SellyLiveError) {
runOnUiThread {
logEvent("$prefix: 错误: ${error.message}")
@@ -371,7 +386,10 @@ class PkPlayActivity : AppCompatActivity() {
if (mainPaused) mainPlayer.play()
if (pkPaused) pkPlayer.play()
} else {
startPlayback()
lifecycleScope.launch {
KiwiHelper.awaitProxyReady()
startPlayback()
}
}
}
}
@@ -651,6 +669,10 @@ class PkPlayActivity : AppCompatActivity() {
}
}
private fun debugLog(message: String) {
if (logEnabled) Log.d(TAG, message)
}
private fun dpToPx(dp: Int): Int {
return (dp * resources.displayMetrics.density + 0.5f).toInt()
}

View File

@@ -1,11 +1,13 @@
package com.demo.SellyCloudSDK.live.env
import android.content.Context
import com.demo.SellyCloudSDK.KiwiHelper
import com.sellycloud.sellycloudsdk.SellyCloudConfig
import com.sellycloud.sellycloudsdk.SellyCloudManager
import com.sellycloud.sellycloudsdk.SellyLiveMode
fun LiveEnvSettings.applyToSdkRuntimeConfig(context: Context) {
// 1. SDK 初始化(同步,轻量)
SellyCloudManager.initialize(
context = context,
appId = appId,
@@ -15,12 +17,13 @@ fun LiveEnvSettings.applyToSdkRuntimeConfig(context: Context) {
vhost = normalizedVhost(),
vhostKey = vhostKey,
defaultStreamId = defaultStreamId,
enableKiwi = enableKiwi,
kiwiRsName = kiwiRsName,
logEnabled = logEnabled,
defaultLiveMode = protocol.toLiveMode()
)
)
// 2. 启动代理获取:内部受控 scope、cancel 旧 Job
// 不阻塞主线程,关键 start 点通过 awaitProxyReady() 保证就绪
KiwiHelper.startProxySetup(enableKiwi, kiwiRsName)
}
fun LiveEnvSettings.normalizedAppName(): String = normalizedAppId()

View File

@@ -28,7 +28,8 @@ data class AliveStreamItem(
val previewImage: String?,
val durationSeconds: Long?,
val playProtocol: String?,
val streamPk: String?
val streamPk: String?,
val xorKey: String? = null
)
val AliveStreamItem.isPkStream: Boolean
@@ -101,6 +102,8 @@ private fun JSONObject.toAliveItem(): AliveStreamItem {
val streamPk = optString("stream_pk")
.ifBlank { optString("streamPk") }
.takeIf { it.isNotBlank() }
val xorKey = optString("xor_key")
.takeIf { it.isNotBlank() }
return AliveStreamItem(
vhost = vhost,
@@ -110,6 +113,7 @@ private fun JSONObject.toAliveItem(): AliveStreamItem {
previewImage = previewImage,
durationSeconds = durationSeconds,
playProtocol = playProtocol,
streamPk = streamPk
streamPk = streamPk,
xorKey = xorKey
)
}

View File

@@ -53,6 +53,9 @@ class AliveStreamAdapter(
binding.tvStreamName.text = title
binding.tvPkBadge.visibility = if (item.isPkStream) View.VISIBLE else View.GONE
val hasXor = !item.xorKey.isNullOrBlank()
binding.tvXorBadge.visibility = if (hasXor) View.VISIBLE else View.GONE
if (hasXor) binding.tvXorBadge.text = "\uD83D\uDD12"
val protocol = item.playProtocol
?.trim()

View File

@@ -0,0 +1,57 @@
package com.demo.SellyCloudSDK.live.square
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.OkHttpClient
import okhttp3.Request
import okhttp3.RequestBody.Companion.toRequestBody
import org.json.JSONObject
private const val STREAM_XOR_URL = "http://rtmp.sellycloud.io:8089/live/sdk/demo/stream-xor"
private val JSON_MEDIA_TYPE = "application/json; charset=utf-8".toMediaType()
/**
* Reports the XOR encryption state for a push stream.
*
* - `xorKey` non-empty: tells the server this stream uses XOR encryption with this key.
* - `xorKey` empty/null: clears the cached key on the server.
*
* The server caches the key in memory. When `GET /live/sdk/alive-list` returns, matching
* streams will include the `xor_key` field so viewers can auto-decrypt.
*/
object StreamXorRepository {
private val client = OkHttpClient()
/**
* @param stream Stream name (required).
* @param app Application name (required, e.g. "live").
* @param vhost Virtual host (optional).
* @param xorKey XOR hex key. Empty string or null means "disable & clear".
* @return `true` if the server accepted the request.
*/
suspend fun reportXorKey(
stream: String,
app: String,
vhost: String? = null,
xorKey: String?
): Boolean = withContext(Dispatchers.IO) {
try {
val body = JSONObject().apply {
put("stream", stream)
put("app", app)
if (!vhost.isNullOrBlank()) put("vhost", vhost)
put("xor_key", xorKey.orEmpty())
}
val request = Request.Builder()
.url(STREAM_XOR_URL)
.post(body.toString().toRequestBody(JSON_MEDIA_TYPE))
.build()
client.newCall(request).execute().use { response ->
response.isSuccessful
}
} catch (_: Exception) {
false
}
}
}

View File

@@ -0,0 +1,192 @@
package com.demo.SellyCloudSDK.playback
import android.opengl.GLES20
import android.os.SystemClock
import com.sellycloud.sellycloudsdk.PlaybackFrameObserver
import com.sellycloud.sellycloudsdk.PlaybackFrameObserverConfig
import com.sellycloud.sellycloudsdk.PlaybackVideoProcessor
import com.sellycloud.sellycloudsdk.PlaybackVideoProcessorConfig
import com.sellycloud.sellycloudsdk.VideoProcessFormat
import com.sellycloud.sellycloudsdk.VideoProcessMode
import com.sellycloud.sellycloudsdk.VideoStage
import com.sellycloud.sellycloudsdk.VideoTextureFrame
import java.util.Locale
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.atomic.AtomicLong
enum class PlaybackProcessingPreset(val label: String) {
DIRECT("DIRECT"),
OBSERVER("PROCESSING_OBSERVER"),
PROCESSOR("PROCESSING_PROCESSOR")
}
class PlaybackTextureObserverDemo(
private val log: (String) -> Unit
) : PlaybackFrameObserver {
override val config: PlaybackFrameObserverConfig = PlaybackFrameObserverConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
stage = VideoStage.RENDER_PRE_DISPLAY
)
@Volatile
private var lastFrameWidth: Int = 0
@Volatile
private var lastFrameHeight: Int = 0
@Volatile
private var lastFrameRotation: Int = 0
@Volatile
private var lastFps: Float = 0f
private val frameCounter = AtomicInteger(0)
private val windowStartMs = AtomicLong(0L)
override fun onGlContextCreated() {
log("processing observer: GL context created")
}
override fun onGlContextDestroyed() {
log("processing observer: GL context destroyed")
}
override fun onTextureFrame(frame: VideoTextureFrame) {
lastFrameWidth = frame.width
lastFrameHeight = frame.height
lastFrameRotation = frame.rotation
val nowMs = SystemClock.elapsedRealtime()
val startedAtMs = windowStartMs.updateAndGet { existing -> if (existing == 0L) nowMs else existing }
val count = frameCounter.incrementAndGet()
val elapsedMs = nowMs - startedAtMs
if (elapsedMs < 1_000L) return
lastFps = count * 1000f / elapsedMs
frameCounter.set(0)
windowStartMs.set(nowMs)
log(
"processing observer: texture fps=${String.format(Locale.US, "%.1f", lastFps)}, " +
"size=${frame.width}x${frame.height}, rotation=${frame.rotation}"
)
}
fun summary(): String {
if (lastFrameWidth <= 0 || lastFrameHeight <= 0) return "observer: 等待纹理帧"
return "observer: ${String.format(Locale.US, "%.1f", lastFps)}fps, ${lastFrameWidth}x${lastFrameHeight}, rot=$lastFrameRotation"
}
}
class PlaybackTexturePatchProcessor(
private val log: (String) -> Unit
) : PlaybackVideoProcessor {
override val config: PlaybackVideoProcessorConfig = PlaybackVideoProcessorConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
mode = VideoProcessMode.READ_WRITE,
stage = VideoStage.RENDER_PRE_DISPLAY,
fullRewrite = false
)
@Volatile
private var lastFrameWidth: Int = 0
@Volatile
private var lastFrameHeight: Int = 0
@Volatile
private var lastFrameRotation: Int = 0
@Volatile
private var lastPatchFps: Float = 0f
private val patchCounter = AtomicInteger(0)
private val windowStartMs = AtomicLong(0L)
private var framebuffer = 0
override fun onGlContextCreated() {
log("processing processor: GL context created")
}
override fun onGlContextDestroyed() {
if (framebuffer != 0) {
GLES20.glDeleteFramebuffers(1, intArrayOf(framebuffer), 0)
framebuffer = 0
}
log("processing processor: GL context destroyed")
}
override fun processTexture(input: VideoTextureFrame, outputTextureId: Int) {
if (outputTextureId <= 0) return
lastFrameWidth = input.width
lastFrameHeight = input.height
lastFrameRotation = input.rotation
val patchWidth = (input.width * 0.18f).toInt().coerceAtLeast(48)
val patchHeight = (input.height * 0.10f).toInt().coerceAtLeast(32)
ensureFramebuffer()
if (framebuffer == 0) return
val previousFramebuffer = IntArray(1)
val previousViewport = IntArray(4)
val scissorWasEnabled = GLES20.glIsEnabled(GLES20.GL_SCISSOR_TEST)
val previousClearColor = FloatArray(4)
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, previousFramebuffer, 0)
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, previousViewport, 0)
GLES20.glGetFloatv(GLES20.GL_COLOR_CLEAR_VALUE, previousClearColor, 0)
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebuffer)
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
outputTextureId,
0
)
GLES20.glViewport(0, 0, input.width, input.height)
GLES20.glEnable(GLES20.GL_SCISSOR_TEST)
GLES20.glScissor(0, 0, patchWidth, patchHeight)
GLES20.glClearColor(0.98f, 0.20f, 0.24f, 1.0f)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
0,
0
)
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, previousFramebuffer[0])
GLES20.glViewport(previousViewport[0], previousViewport[1], previousViewport[2], previousViewport[3])
GLES20.glClearColor(
previousClearColor[0],
previousClearColor[1],
previousClearColor[2],
previousClearColor[3]
)
if (!scissorWasEnabled) {
GLES20.glDisable(GLES20.GL_SCISSOR_TEST)
}
val nowMs = SystemClock.elapsedRealtime()
val startedAtMs = windowStartMs.updateAndGet { existing -> if (existing == 0L) nowMs else existing }
val count = patchCounter.incrementAndGet()
val elapsedMs = nowMs - startedAtMs
if (elapsedMs < 1_000L) return
lastPatchFps = count * 1000f / elapsedMs
patchCounter.set(0)
windowStartMs.set(nowMs)
log(
"processing processor: red patch fps=${String.format(Locale.US, "%.1f", lastPatchFps)}, " +
"size=${input.width}x${input.height}, rotation=${input.rotation}"
)
}
fun summary(): String {
if (lastFrameWidth <= 0 || lastFrameHeight <= 0) return "processor: 等待纹理帧"
return "processor: ${String.format(Locale.US, "%.1f", lastPatchFps)}fps, ${lastFrameWidth}x${lastFrameHeight}, rot=$lastFrameRotation"
}
private fun ensureFramebuffer() {
if (framebuffer != 0) return
val framebuffers = IntArray(1)
GLES20.glGenFramebuffers(1, framebuffers, 0)
framebuffer = framebuffers[0]
}
}

View File

@@ -0,0 +1,51 @@
package com.demo.SellyCloudSDK.vod
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import okhttp3.OkHttpClient
import okhttp3.Request
import org.json.JSONObject
private const val VOD_LIST_URL = "http://rtmp.sellycloud.io:8089/live/sdk/demo/vodlist"
sealed class VodListResult {
data class Success(val formats: Map<String, String>) : VodListResult()
data class Error(val message: String) : VodListResult()
}
object VodListRepository {
private val client = OkHttpClient()
suspend fun fetchVodList(): VodListResult = withContext(Dispatchers.IO) {
val request = Request.Builder()
.url(VOD_LIST_URL)
.get()
.build()
try {
client.newCall(request).execute().use { response ->
val body = response.body?.string().orEmpty()
if (!response.isSuccessful) {
return@withContext VodListResult.Error("网络错误: ${response.code}")
}
if (body.isBlank()) {
return@withContext VodListResult.Error("服务返回为空")
}
val json = JSONObject(body)
val formats = linkedMapOf<String, String>()
val keys = json.keys()
while (keys.hasNext()) {
val key = keys.next()
val url = json.optString(key).takeIf { it.isNotBlank() } ?: continue
formats[key.uppercase()] = url
}
if (formats.isEmpty()) {
return@withContext VodListResult.Error("暂无在线资源")
}
return@withContext VodListResult.Success(formats)
}
} catch (e: Exception) {
return@withContext VodListResult.Error(e.message ?: "网络请求失败")
}
}
}

View File

@@ -10,6 +10,7 @@ import android.graphics.Bitmap
import android.graphics.Color
import android.graphics.Typeface
import android.graphics.drawable.GradientDrawable
import com.sellycloud.sellycloudsdk.render.RenderBackend
import android.os.Build
import android.os.Bundle
import android.os.Looper
@@ -28,8 +29,13 @@ import androidx.appcompat.app.AppCompatActivity
import androidx.appcompat.widget.AppCompatTextView
import androidx.core.content.ContextCompat
import com.demo.SellyCloudSDK.R
import com.demo.SellyCloudSDK.avdemo.AvDemoSettingsStore
import com.demo.SellyCloudSDK.databinding.ActivityVodPlayBinding
import com.demo.SellyCloudSDK.live.util.GalleryImageSaver
import com.demo.SellyCloudSDK.playback.PlaybackProcessingPreset
import com.demo.SellyCloudSDK.playback.PlaybackTextureObserverDemo
import com.demo.SellyCloudSDK.playback.PlaybackTexturePatchProcessor
import com.sellycloud.sellycloudsdk.SellyCloudManager
import com.sellycloud.sellycloudsdk.SellyLiveError
import com.sellycloud.sellycloudsdk.SellyPlayerState
import com.sellycloud.sellycloudsdk.SellyVodPlayer
@@ -54,6 +60,7 @@ class VodPlayActivity : AppCompatActivity() {
private var player: SellyVodPlayer? = null
private var renderView: View? = null
private var useTextureView = false
private var isPlaying = false
private var isMuted = false
private var currentState: SellyPlayerState = SellyPlayerState.Idle
@@ -66,6 +73,9 @@ class VodPlayActivity : AppCompatActivity() {
private var firstAudioFrameElapsedMs: Long? = null
private var firstAudioFrameCostMs: Long? = null
private var bufferingActive = false
private var processingPreset: PlaybackProcessingPreset = PlaybackProcessingPreset.DIRECT
private var renderTargetRebindCount = 0
private var lastRenderTargetRebindCostMs: Long? = null
private var progressJob: Job? = null
@@ -75,6 +85,14 @@ class VodPlayActivity : AppCompatActivity() {
private var logSummaryView: TextView? = null
private var logContentView: TextView? = null
private var logFloatingButton: View? = null
private var toolsFloatingButton: View? = null
private val playbackObserverDemo by lazy(LazyThreadSafetyMode.NONE) {
PlaybackTextureObserverDemo(::logEvent)
}
private val playbackPatchProcessor by lazy(LazyThreadSafetyMode.NONE) {
PlaybackTexturePatchProcessor(::logEvent)
}
private val storagePermissionLauncher = registerForActivityResult(
ActivityResultContracts.RequestPermission()
@@ -89,7 +107,9 @@ class VodPlayActivity : AppCompatActivity() {
binding = ActivityVodPlayBinding.inflate(layoutInflater)
setContentView(binding.root)
supportActionBar?.hide()
useTextureView = AvDemoSettingsStore(this).read().renderBackendPreference.isTextureView()
addLogFloatingButton()
addToolsFloatingButton()
binding.btnClose.setOnClickListener { finish() }
binding.actionPlay.setOnClickListener { togglePlay() }
@@ -118,11 +138,14 @@ class VodPlayActivity : AppCompatActivity() {
})
val url = intent.getStringExtra(EXTRA_VOD_URL)?.trim().orEmpty()
.ifEmpty { intent.dataString?.trim().orEmpty() }
if (url.isEmpty()) {
Toast.makeText(this, "请输入有效的播放地址", Toast.LENGTH_SHORT).show()
finish()
return
}
// VOD 模式始终走直连,不使用 Kiwi 代理替换路径。
SellyCloudManager.setProxyAddress(null)
initPlayer(url)
}
@@ -143,7 +166,21 @@ class VodPlayActivity : AppCompatActivity() {
}
}
private fun classifySource(url: String): String {
return when {
url.startsWith("content://", ignoreCase = true) -> "Content URI"
url.startsWith("android.resource://", ignoreCase = true) -> "Raw Resource"
url.startsWith("asset:///", ignoreCase = true) -> "Asset"
url.startsWith("file://", ignoreCase = true) -> "File URI"
url.startsWith("http://", ignoreCase = true) || url.startsWith("https://", ignoreCase = true) -> "Network"
url.startsWith("/") -> "Local Path"
else -> "Unknown"
}
}
private fun initPlayer(url: String) {
logEvent("播放源类型: ${classifySource(url)}")
logEvent("播放地址: $url")
val vodPlayer = SellyVodPlayer.initWithUrl(this, url).also { client ->
client.autoPlay = true
client.delegate = object : SellyVodPlayerDelegate {
@@ -231,7 +268,9 @@ class VodPlayActivity : AppCompatActivity() {
client.setMuted(isMuted)
}
renderView = vodPlayer.attachRenderView(binding.renderContainer)
val backend = currentRenderBackend()
renderView = vodPlayer.attachRenderView(binding.renderContainer, backend)
logEvent("渲染目标已绑定: backend=${currentRenderBackendLabel()}, processing=${processingPreset.label}")
player = vodPlayer
startPlayAttempt()
vodPlayer.prepareAsync()
@@ -313,8 +352,22 @@ class VodPlayActivity : AppCompatActivity() {
Toast.makeText(this, "视图尚未布局完成,稍后再试", Toast.LENGTH_SHORT).show()
return
}
if (view is android.view.TextureView) {
val bmp = view.getBitmap()
if (bmp == null) {
Toast.makeText(this, "TextureView 尚未渲染画面", Toast.LENGTH_SHORT).show()
return
}
uiScope.launch(Dispatchers.IO) {
val ok = saveBitmapToGallery(bmp, prefix)
launch(Dispatchers.Main) {
Toast.makeText(this@VodPlayActivity, if (ok) "截图已保存到相册" else "保存失败", Toast.LENGTH_SHORT).show()
}
}
return
}
if (view !is android.view.SurfaceView) {
Toast.makeText(this, "当前视图不支持截图", Toast.LENGTH_SHORT).show()
Toast.makeText(this, "当前视图类型不支持截图", Toast.LENGTH_SHORT).show()
return
}
val bmp = Bitmap.createBitmap(view.width, view.height, Bitmap.Config.ARGB_8888)
@@ -390,6 +443,98 @@ class VodPlayActivity : AppCompatActivity() {
logFloatingButton = button
}
private fun addToolsFloatingButton() {
val sizePx = dpToPx(44)
val marginEndPx = dpToPx(72)
val controlBarHeight = resources.getDimensionPixelSize(R.dimen.av_control_bar_height)
val marginBottomPx = controlBarHeight + dpToPx(80)
val bgDrawable = GradientDrawable(GradientDrawable.Orientation.TOP_BOTTOM, intArrayOf(
Color.parseColor("#B33B0764"),
Color.parseColor("#803B0764")
)).apply {
shape = GradientDrawable.OVAL
setStroke(dpToPx(1), Color.parseColor("#55FFFFFF"))
}
val button = AppCompatTextView(this).apply {
text = ""
setTextColor(Color.parseColor("#F8FAFC"))
textSize = 11f
gravity = Gravity.CENTER
background = bgDrawable
elevation = dpToPx(4).toFloat()
setShadowLayer(2f, 0f, 1f, Color.parseColor("#66000000"))
isClickable = true
isFocusable = true
contentDescription = "播放处理与回归工具"
setOnClickListener { showPlaybackToolsDialog() }
}
val params = FrameLayout.LayoutParams(sizePx, sizePx).apply {
gravity = Gravity.END or Gravity.BOTTOM
marginEnd = marginEndPx
bottomMargin = marginBottomPx
}
addContentView(button, params)
toolsFloatingButton = button
}
private fun showPlaybackToolsDialog() {
val container = LinearLayout(this).apply {
orientation = LinearLayout.VERTICAL
setPadding(dpToPx(20), dpToPx(16), dpToPx(20), dpToPx(8))
}
val summary = TextView(this).apply {
text = "当前后端: ${currentRenderBackendLabel()}\n" +
"当前模式: ${processingPreset.label}\n" +
"说明: processing 仅支持 TextureView。"
setTextColor(Color.parseColor("#E5E7EB"))
textSize = 13f
}
container.addView(summary)
container.addView(spaceView(dpToPx(12)))
container.addView(createToolActionButton("切换 DIRECT 直出") {
applyPlaybackProcessingPreset(PlaybackProcessingPreset.DIRECT, trigger = "工具面板")
})
container.addView(createToolActionButton("切换 PROCESSING Observer") {
applyPlaybackProcessingPreset(PlaybackProcessingPreset.OBSERVER, trigger = "工具面板")
})
container.addView(createToolActionButton("切换 PROCESSING 红块 Processor") {
applyPlaybackProcessingPreset(PlaybackProcessingPreset.PROCESSOR, trigger = "工具面板")
})
container.addView(createToolActionButton("仅重绑当前目标") {
rebindRenderTarget("手动回归")
})
AlertDialog.Builder(this)
.setTitle("播放处理 / 目标重绑")
.setView(container)
.setNegativeButton("关闭", null)
.show()
}
private fun createToolActionButton(label: String, onClick: () -> Unit): View {
return AppCompatTextView(this).apply {
text = label
gravity = Gravity.CENTER
textSize = 14f
setTextColor(Color.parseColor("#F8FAFC"))
background = GradientDrawable().apply {
cornerRadius = dpToPx(10).toFloat()
setColor(Color.parseColor("#334155"))
setStroke(dpToPx(1), Color.parseColor("#475569"))
}
setPadding(dpToPx(12), dpToPx(12), dpToPx(12), dpToPx(12))
isClickable = true
isFocusable = true
setOnClickListener { onClick() }
layoutParams = LinearLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT
).apply {
bottomMargin = dpToPx(10)
}
}
}
private fun showLogDialog() {
if (logDialog?.isShowing == true) {
refreshLogDialogContent()
@@ -515,12 +660,22 @@ class VodPlayActivity : AppCompatActivity() {
private fun buildLogSummary(): String {
val builder = StringBuilder()
builder.append("状态: ").append(formatState(currentState)).append('\n')
builder.append("渲染后端: ").append(currentRenderBackendLabel()).append('\n')
builder.append("播放处理: ").append(processingPreset.label).append('\n')
builder.append("是否播放中: ").append(if (isPlaying) "" else "").append('\n')
builder.append("是否静音: ").append(if (isMuted) "" else "").append('\n')
builder.append("总时长: ").append(if (durationMs > 0) formatTime(durationMs) else "--").append('\n')
builder.append("当前进度: ").append(formatTime(player?.getCurrentPositionMs() ?: 0L)).append('\n')
builder.append("首帧视频耗时(ms): ").append(firstVideoFrameCostMs ?: "未统计").append('\n')
builder.append("首帧音频耗时(ms): ").append(firstAudioFrameCostMs ?: "未统计").append('\n')
builder.append("目标重绑次数: ").append(renderTargetRebindCount).append('\n')
builder.append("最近重绑耗时(ms): ").append(lastRenderTargetRebindCostMs ?: "未统计").append('\n')
val processingDetail = when (processingPreset) {
PlaybackProcessingPreset.DIRECT -> "processing: 关闭"
PlaybackProcessingPreset.OBSERVER -> playbackObserverDemo.summary()
PlaybackProcessingPreset.PROCESSOR -> playbackPatchProcessor.summary()
}
builder.append(processingDetail).append('\n')
val attemptElapsed = playAttemptStartElapsedMs?.let { SystemClock.elapsedRealtime() - it }
if (attemptElapsed == null) {
builder.append("本次播放已耗时(ms): 未开始").append('\n')
@@ -587,7 +742,65 @@ class VodPlayActivity : AppCompatActivity() {
firstAudioFrameElapsedMs = null
firstAudioFrameCostMs = null
bufferingActive = false
logEvent("播放尝试开始")
logEvent("播放尝试开始: backend=${currentRenderBackendLabel()}, processing=${processingPreset.label}")
}
private fun applyPlaybackProcessingPreset(preset: PlaybackProcessingPreset, trigger: String) {
if (preset == processingPreset) {
logEvent("播放处理保持不变: ${preset.label}, trigger=$trigger")
Toast.makeText(this, "当前已是 ${preset.label}", Toast.LENGTH_SHORT).show()
return
}
if (!useTextureView && preset != PlaybackProcessingPreset.DIRECT) {
logEvent("播放处理切换被拒绝: backend=${currentRenderBackendLabel()} 不支持 ${preset.label}")
Toast.makeText(this, "播放 processing 仅支持 TextureView 后端", Toast.LENGTH_SHORT).show()
return
}
processingPreset = preset
configurePlaybackProcessing()
logEvent("播放处理切换: mode=${preset.label}, trigger=$trigger")
rebindRenderTarget("processing_${preset.name.lowercase(Locale.US)}")
Toast.makeText(this, "已切到 ${preset.label}", Toast.LENGTH_SHORT).show()
}
private fun configurePlaybackProcessing() {
val currentPlayer = player ?: return
when (processingPreset) {
PlaybackProcessingPreset.DIRECT -> {
currentPlayer.setPlaybackFrameObserver(null)
currentPlayer.setPlaybackVideoProcessor(null)
}
PlaybackProcessingPreset.OBSERVER -> {
currentPlayer.setPlaybackVideoProcessor(null)
currentPlayer.setPlaybackFrameObserver(playbackObserverDemo)
}
PlaybackProcessingPreset.PROCESSOR -> {
currentPlayer.setPlaybackFrameObserver(null)
currentPlayer.setPlaybackVideoProcessor(playbackPatchProcessor)
}
}
}
private fun rebindRenderTarget(reason: String) {
val currentPlayer = player ?: return
val startedAtMs = SystemClock.elapsedRealtime()
val backend = currentRenderBackend()
logEvent("目标重绑开始: reason=$reason, backend=${currentRenderBackendLabel()}, processing=${processingPreset.label}")
currentPlayer.clearRenderTarget()
renderView = currentPlayer.attachRenderView(binding.renderContainer, backend)
val costMs = SystemClock.elapsedRealtime() - startedAtMs
renderTargetRebindCount += 1
lastRenderTargetRebindCostMs = costMs
logEvent("目标重绑完成: count=$renderTargetRebindCount, cost=${costMs}ms")
}
private fun currentRenderBackend(): RenderBackend {
return if (useTextureView) RenderBackend.TEXTURE_VIEW else RenderBackend.SURFACE_VIEW
}
private fun currentRenderBackendLabel(): String {
return if (useTextureView) "TextureView" else "SurfaceView"
}
private fun formatState(state: SellyPlayerState): String {
@@ -636,9 +849,13 @@ class VodPlayActivity : AppCompatActivity() {
private const val MAX_LOG_LINES = 200
private const val SEEK_FORWARD_MS = 10_000L
fun createIntent(context: Context, url: String): Intent {
fun createIntent(context: Context, url: String, sourceUri: android.net.Uri? = null): Intent {
return Intent(context, VodPlayActivity::class.java).apply {
putExtra(EXTRA_VOD_URL, url)
if (sourceUri != null) {
data = sourceUri
addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION)
}
}
}
}

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle">
<solid android:color="@color/av_card_fill" />
<corners
android:topLeftRadius="18dp"
android:topRightRadius="18dp"
android:bottomLeftRadius="0dp"
android:bottomRightRadius="0dp" />
</shape>

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle">
<solid android:color="@color/brand_primary" />
<corners android:radius="@dimen/av_corner_large" />
<corners android:radius="@dimen/av_corner_small" />
</shape>

View File

@@ -0,0 +1,14 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<!-- Frame corners -->
<path
android:fillColor="#FFFFFF"
android:pathData="M3,3h4V5H5v2H3V3zM17,3h4v4h-2V5h-2V3zM3,17h2v2h2v2H3v-4zM19,19h-2v2h4v-4h-2v2z" />
<!-- Person silhouette -->
<path
android:fillColor="#FFFFFF"
android:pathData="M12,8a2.5,2.5 0,1 0,0 -5,2.5 2.5,0 0,0 0,5zM12,9.5c-2.33,0 -7,1.17 -7,3.5v1.5h14V13c0,-2.33 -4.67,-3.5 -7,-3.5z" />
</vector>

View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@drawable/bg_av_vod_chip_selected" android:state_selected="true" />
<item android:drawable="@drawable/bg_av_vod_chip_selected" android:state_pressed="true" />
<item android:drawable="@drawable/bg_av_input_field" />
</selector>

View File

@@ -476,6 +476,57 @@
android:textColorHint="@color/av_text_hint"
android:textSize="14sp" />
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="16dp"
android:text="@string/settings_render_backend"
android:textColor="@color/av_text_primary"
android:textSize="14sp"
android:textStyle="bold" />
<RadioGroup
android:id="@+id/rgSettingsRenderBackend"
android:layout_width="match_parent"
android:layout_height="@dimen/av_field_height"
android:layout_marginTop="8dp"
android:background="@drawable/bg_av_segment_container"
android:checkedButton="@+id/rbSettingsRenderSurface"
android:orientation="horizontal">
<RadioButton
android:id="@+id/rbSettingsRenderSurface"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:background="@drawable/selector_av_segment_left"
android:button="@null"
android:gravity="center"
android:text="@string/settings_render_backend_surface"
android:textColor="@color/av_segment_text"
android:textSize="14sp" />
<RadioButton
android:id="@+id/rbSettingsRenderTexture"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:background="@drawable/selector_av_segment_right"
android:button="@null"
android:gravity="center"
android:text="@string/settings_render_backend_texture"
android:textColor="@color/av_segment_text"
android:textSize="14sp" />
</RadioGroup>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="8dp"
android:text="@string/settings_render_backend_hint"
android:textColor="@color/av_text_hint"
android:textSize="12sp" />
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"

View File

@@ -37,7 +37,8 @@
android:src="@drawable/ic_av_close"
app:tint="@color/av_text_primary"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toTopOf="parent" />
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintStart_toStartOf="parent" />
<LinearLayout
android:id="@+id/controlBar"

View File

@@ -105,6 +105,17 @@
android:textSize="12sp" />
</LinearLayout>
<TextView
android:id="@+id/tvStatsDetail"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="6dp"
android:maxLines="3"
android:text="状态详情"
android:textColor="@color/brand_primary_text_sub"
android:textSize="12sp"
android:visibility="gone" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
@@ -201,6 +212,30 @@
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="6dp"
android:gravity="center_vertical"
android:orientation="horizontal">
<ImageView
android:layout_width="16dp"
android:layout_height="16dp"
android:contentDescription="@null"
android:src="@drawable/ic_live_stats_fps"
app:tint="@color/brand_primary_text_sub" />
<TextView
android:id="@+id/tvStatsFrameCallback"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="8dp"
android:text="@string/live_stats_frame_callback_off"
android:textColor="@color/brand_primary_text_sub"
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
@@ -404,7 +439,7 @@
</LinearLayout>
<LinearLayout
android:id="@+id/actionScreenshot"
android:id="@+id/actionAutoFraming"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
@@ -417,17 +452,81 @@
android:paddingBottom="8dp">
<ImageView
android:id="@+id/ivToolAutoFraming"
android:layout_width="24dp"
android:layout_height="24dp"
android:contentDescription="@string/push_tool_screenshot"
android:src="@drawable/ic_av_camera"
android:contentDescription="@string/push_tool_autoframing_off"
android:src="@drawable/ic_live_auto_framing"
app:tint="@color/brand_primary_text_on" />
<TextView
android:id="@+id/tvToolAutoFramingLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="4dp"
android:text="@string/push_tool_screenshot"
android:text="@string/push_tool_autoframing_off"
android:textColor="@color/brand_primary_text_on"
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:id="@+id/actionFrameCallback"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:clickable="true"
android:focusable="true"
android:foreground="?attr/selectableItemBackground"
android:gravity="center"
android:orientation="vertical"
android:paddingTop="8dp"
android:paddingBottom="8dp">
<ImageView
android:id="@+id/ivToolFrameCallback"
android:layout_width="24dp"
android:layout_height="24dp"
android:contentDescription="@string/push_tool_frame_callback_off"
android:src="@drawable/ic_live_stats_fps"
app:tint="@color/brand_primary_text_on" />
<TextView
android:id="@+id/tvToolFrameCallbackLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="4dp"
android:text="@string/push_tool_frame_callback_off"
android:textColor="@color/brand_primary_text_on"
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:id="@+id/actionFrameModify"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:clickable="true"
android:focusable="true"
android:foreground="?attr/selectableItemBackground"
android:gravity="center"
android:orientation="vertical"
android:paddingTop="8dp"
android:paddingBottom="8dp">
<ImageView
android:id="@+id/ivToolFrameModify"
android:layout_width="24dp"
android:layout_height="24dp"
android:contentDescription="@string/push_tool_frame_modify_off"
android:src="@drawable/ic_live_stats_fps"
app:tint="@color/brand_primary_text_on" />
<TextView
android:id="@+id/tvToolFrameModifyLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="4dp"
android:text="@string/push_tool_frame_modify_off"
android:textColor="@color/brand_primary_text_on"
android:textSize="12sp" />
</LinearLayout>

View File

@@ -25,7 +25,8 @@
android:src="@drawable/ic_av_close"
app:tint="@color/av_text_primary"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toTopOf="parent" />
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintStart_toStartOf="parent" />
<LinearLayout
android:id="@+id/progressRow"

View File

@@ -1,267 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:fillViewport="true">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:padding="20dp"
android:background="@android:color/white">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="美颜设置"
android:textSize="18sp"
android:textStyle="bold"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginBottom="20dp" />
<!-- 美颜开关 -->
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<TextView
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="启用美颜"
android:textSize="16sp"
android:textColor="@android:color/black" />
<Switch
android:id="@+id/switchBeautyEnable"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:checked="true" />
</LinearLayout>
<!-- 磨皮强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="磨皮强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarBeautyIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="100"
android:progress="60" />
<TextView
android:id="@+id/tvBeautyValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="6.0"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 滤镜强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="滤镜强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarFilterIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="7" />
<TextView
android:id="@+id/tvFilterValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="0.7"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 美白强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="美白强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarColorIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="5" />
<TextView
android:id="@+id/tvColorValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="0.5"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 红润强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="红润强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarRedIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="5" />
<TextView
android:id="@+id/tvRedValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="0.5"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 亮眼强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="亮眼强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarEyeBrightIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="10" />
<TextView
android:id="@+id/tvEyeBrightValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="1.0"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 美牙强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="美牙强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="20dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarToothIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="10" />
<TextView
android:id="@+id/tvToothValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="1.0"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 关闭按钮 -->
<Button
android:id="@+id/btnClose"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="关闭"
android:backgroundTint="#607D8B"
android:textColor="@android:color/white" />
</LinearLayout>
</ScrollView>

View File

@@ -17,58 +17,142 @@
android:src="@drawable/ic_av_close"
app:tint="@color/av_text_primary" />
<LinearLayout
android:id="@+id/card"
<ScrollView
android:layout_width="320dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:background="@drawable/bg_av_dialog_card_gray"
android:orientation="vertical"
android:paddingStart="18dp"
android:paddingTop="16dp"
android:paddingEnd="18dp"
android:paddingBottom="18dp">
android:scrollbars="none">
<TextView
<LinearLayout
android:id="@+id/card"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:text="@string/vod_config_title"
android:textColor="@color/av_text_primary"
android:textSize="18sp"
android:textStyle="bold" />
android:background="@drawable/bg_av_dialog_card_gray"
android:orientation="vertical"
android:paddingStart="18dp"
android:paddingTop="16dp"
android:paddingEnd="18dp"
android:paddingBottom="18dp">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="14dp"
android:text="@string/vod_config_hint"
android:textColor="@color/av_text_secondary"
android:textSize="12sp" />
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:text="@string/vod_config_title"
android:textColor="@color/av_text_primary"
android:textSize="18sp"
android:textStyle="bold" />
<EditText
android:id="@+id/etVodUrl"
android:layout_width="match_parent"
android:layout_height="@dimen/av_field_height"
android:layout_marginTop="8dp"
android:background="@drawable/bg_av_input_field"
android:importantForAutofill="no"
android:inputType="textUri"
android:paddingStart="12dp"
android:paddingEnd="12dp"
android:textColor="@color/av_text_primary"
android:textColorHint="@color/av_text_hint"
android:textSize="14sp" />
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="14dp"
android:text="@string/vod_config_hint"
android:textColor="@color/av_text_secondary"
android:textSize="12sp" />
<Button
android:id="@+id/btnStartVod"
android:layout_width="match_parent"
android:layout_height="@dimen/av_primary_button_height"
android:layout_marginTop="18dp"
android:background="@drawable/selector_av_primary_button"
android:text="@string/play_start"
android:textColor="@color/brand_primary_text_on"
android:textSize="16sp"
android:textStyle="bold" />
</LinearLayout>
<EditText
android:id="@+id/etVodUrl"
android:layout_width="match_parent"
android:layout_height="@dimen/av_field_height"
android:layout_marginTop="8dp"
android:background="@drawable/bg_av_input_field"
android:importantForAutofill="no"
android:inputType="textUri"
android:paddingStart="12dp"
android:paddingEnd="12dp"
android:textColor="@color/av_text_primary"
android:textColorHint="@color/av_text_hint"
android:textSize="14sp" />
<!-- VOD Online Resources Section -->
<LinearLayout
android:id="@+id/vodListSection"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="12dp"
android:orientation="vertical">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center_vertical"
android:orientation="horizontal">
<TextView
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="@string/vod_online_resources"
android:textColor="@color/av_text_secondary"
android:textSize="12sp" />
<ProgressBar
android:id="@+id/pbVodList"
style="?android:attr/progressBarStyleSmall"
android:layout_width="16dp"
android:layout_height="16dp"
android:visibility="gone" />
</LinearLayout>
<ProgressBar
android:id="@+id/pbVodListFull"
style="?android:attr/progressBarStyle"
android:layout_width="24dp"
android:layout_height="24dp"
android:layout_gravity="center"
android:layout_marginTop="8dp"
android:visibility="gone" />
<TextView
android:id="@+id/tvVodListError"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="8dp"
android:gravity="center"
android:textColor="@color/av_stats_red"
android:textSize="12sp"
android:visibility="gone" />
<GridLayout
android:id="@+id/gridVodFormats"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="8dp"
android:columnCount="4"
android:visibility="gone" />
</LinearLayout>
<Button
android:id="@+id/btnStartVod"
android:layout_width="match_parent"
android:layout_height="@dimen/av_primary_button_height"
android:layout_marginTop="18dp"
android:background="@drawable/selector_av_primary_button"
android:text="@string/play_start"
android:textColor="@color/brand_primary_text_on"
android:textSize="16sp"
android:textStyle="bold" />
<Button
android:id="@+id/btnPickLocalFile"
android:layout_width="match_parent"
android:layout_height="@dimen/av_primary_button_height"
android:layout_marginTop="8dp"
android:background="@drawable/bg_av_input_field"
android:text="@string/vod_pick_local_file"
android:textColor="@color/av_text_primary"
android:textSize="14sp" />
<Button
android:id="@+id/btnPlayAssetSample"
android:layout_width="match_parent"
android:layout_height="@dimen/av_primary_button_height"
android:layout_marginTop="8dp"
android:background="@drawable/bg_av_input_field"
android:text="@string/vod_play_asset_sample"
android:textColor="@color/av_text_primary"
android:textSize="14sp" />
</LinearLayout>
</ScrollView>
</FrameLayout>

View File

@@ -130,6 +130,14 @@
android:text="RTMP"
android:textColor="@color/brand_primary_text_on"
android:textSize="11sp" />
<TextView
android:id="@+id/tvXorBadge"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="6dp"
android:textSize="13sp"
android:visibility="gone" />
</LinearLayout>
</LinearLayout>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@mipmap/ic_launcher_foreground" />
</adaptive-icon>

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@mipmap/ic_launcher_foreground" />
</adaptive-icon>

View File

@@ -1,7 +0,0 @@
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
void main() {
gl_FragColor = texture2D(uSampler, vTextureCoord);
}

View File

@@ -1,12 +0,0 @@
attribute vec4 aPosition;
attribute vec4 aTextureCoord;
uniform mat4 uMVPMatrix;
uniform mat4 uSTMatrix;
varying vec2 vTextureCoord;
void main() {
gl_Position = uMVPMatrix * aPosition;
vTextureCoord = (uSTMatrix * aTextureCoord).xy;
}

View File

@@ -16,7 +16,6 @@
<color name="av_tab_inactive">#8E8E93</color>
<color name="av_overlay_dim">#B3000000</color>
<color name="av_card_shadow">#26000000</color>
<color name="av_stats_bg">#B0000000</color>
<color name="av_stats_green">#33C759</color>

View File

@@ -22,7 +22,6 @@
<color name="av_tab_inactive">#8E8E93</color>
<color name="av_overlay_dim">#99000000</color>
<color name="av_card_shadow">#14000000</color>
<color name="av_stats_bg">#B0000000</color>
<color name="av_stats_green">#33C759</color>

View File

@@ -7,7 +7,6 @@
<dimen name="av_corner_small">8dp</dimen>
<dimen name="av_home_button_height">96dp</dimen>
<dimen name="av_home_button_spacing">16dp</dimen>
<dimen name="av_field_height">44dp</dimen>
<dimen name="av_primary_button_height">50dp</dimen>

View File

@@ -1,7 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">RTMPDEMO</string>
<!-- Login -->
<string name="login_welcome">欢迎使用</string>
<string name="login_username_hint">请输入用户名</string>
@@ -17,14 +15,9 @@
<string name="logout_action">退出登录</string>
<!-- Feature Hub -->
<string name="hub_subtitle_basic">选择你要体验的场景</string>
<string name="live_streaming_title">直播推拉流</string>
<string name="live_streaming_subtitle">RTMP / WHIP 推流、拉流示例</string>
<string name="interactive_live_title">VideoCall</string>
<string name="interactive_live_subtitle">多人语音、视频互动体验</string>
<!-- AVDemo (iOS aligned) -->
<string name="avdemo_title">首页</string>
<string name="tab_home">首页</string>
<string name="tab_call">通话</string>
<string name="tab_settings">设置</string>
@@ -34,7 +27,6 @@
<string name="home_vod">点播播放</string>
<string name="home_single_chat">音视频单聊</string>
<string name="home_conference">音视频会议</string>
<string name="multi_play">多路播放</string>
<string name="home_live_square">直播广场</string>
<string name="home_live_square_title">直播广场</string>
<string name="home_live_square_empty">暂无直播</string>
@@ -47,6 +39,10 @@
<string name="settings_fps">帧率 (FPS)</string>
<string name="settings_max_bitrate">最大码率 (kbps)</string>
<string name="settings_min_bitrate">最小码率 (kbps)</string>
<string name="settings_render_backend">渲染后端</string>
<string name="settings_render_backend_surface">SurfaceView</string>
<string name="settings_render_backend_texture">TextureView</string>
<string name="settings_render_backend_hint">进入播放、推流或通话页面前选定,页面内不支持切换。</string>
<string name="settings_env_title">直播环境设置</string>
<string name="settings_vhost">VHost</string>
<string name="settings_vhost_key">VHost Key</string>
@@ -64,13 +60,20 @@
<string name="push_tool_camera_on">开启摄像头</string>
<string name="push_tool_beauty_on">美颜开启</string>
<string name="push_tool_beauty_off">美颜关闭</string>
<string name="push_tool_screenshot">截图</string>
<string name="push_tool_background">背景图</string>
<string name="push_tool_frame_callback_off">帧回调关</string>
<string name="push_tool_frame_callback_texture">帧回调纹理</string>
<string name="push_tool_frame_callback_cpu_empty">帧回调空CPU</string>
<string name="push_tool_frame_callback_cpu_single">帧回调单CPU</string>
<string name="push_tool_frame_callback_cpu_double">帧回调双CPU</string>
<string name="push_tool_frame_modify_on">改帧开</string>
<string name="push_tool_frame_modify_off">改帧关</string>
<string name="push_tool_autoframing_on">取景开</string>
<string name="push_tool_autoframing_off">取景关</string>
<string name="push_stream_orientation">推流方向</string>
<string name="push_stream_portrait">竖屏推流</string>
<string name="push_stream_landscape">横屏推流</string>
<string name="push_tool_not_supported">暂不支持</string>
<string name="push_tool_mute_not_supported">推流静音暂不支持</string>
<string name="push_tool_screenshot_saved">截图已保存到相册</string>
<string name="push_tool_screenshot_failed">保存失败</string>
<string name="push_settings_apply">应用</string>
@@ -87,8 +90,10 @@
<string name="play_start">开始播放</string>
<string name="close">关闭</string>
<string name="vod_config_title">点播播放</string>
<string name="vod_config_hint">请输入 MP4 / HLS URL</string>
<string name="vod_config_hint">请输入播放地址URL、本地路径、asset:///...</string>
<string name="vod_pick_local_file">选择本地文件</string>
<string name="vod_play_asset_sample">播放包内示例</string>
<string name="vod_online_resources">在线资源</string>
<string name="protocol_rtmp">RTMP</string>
<string name="protocol_rtc">RTC</string>
@@ -109,6 +114,9 @@
<string name="live_play_foreground_text">直播播放保持中</string>
<string name="live_stats_title">直播数据</string>
<string name="live_stats_frame_callback_off">FrameCB: off</string>
<string name="live_stats_frame_callback_modify">FrameCB[modify]: %1$d fps · %2$s</string>
<string name="live_stats_frame_callback_generic">FrameCB[%1$s]: %2$d fps · %3$s</string>
<!-- Interactive Live -->
<string name="switch_camera">切换摄像头</string>
@@ -131,9 +139,6 @@
<string name="ctrl_local_preview_off">关闭预览</string>
<string name="ctrl_local_preview_on">开启预览</string>
<string name="ctrl_local_publish_off">停止推送</string>
<string name="ctrl_local_publish_on">恢复推送</string>
<string name="ctrl_remote_off">静音远端</string>
<string name="ctrl_remote_on">开启远端</string>
<string name="ctrl_audio_speaker">扬声器</string>
<string name="ctrl_audio_earpiece">听筒</string>
<string name="ctrl_mic_off">关闭麦克风</string>

View File

@@ -8,7 +8,7 @@ org.gradle.jvmargs=-Xmx4g -XX:MaxMetaspaceSize=1g -Dkotlin.daemon.jvm.options="-
# SellyCloud SDK publishing metadata
sellySdkGroupId=com.sellycloud
sellySdkArtifactId=sellycloudsdk
sellySdkVersion=1.0.0
sellySdkVersion=1.0.1
# Optional: local folder repository for sharing the built AAR (relative to project root)
sellySdkPublishRepo=build/maven-repo