sdk更新,文档更新,统一渲染后端配置,优化美颜帧处理逻辑,补充推流状态能力Auto-Framing,webrtc支持xor加解密

This commit is contained in:
2026-03-30 15:18:11 +08:00
parent 84050ab7ff
commit ae8a341fb4
35 changed files with 2289 additions and 1140 deletions

View File

@@ -3,7 +3,7 @@ plugins {
id 'org.jetbrains.kotlin.android'
}
def sdkAarPath = "libs/${findProperty("sellySdkArtifactId") ?: "sellycloudsdk"}-${findProperty("sellySdkVersion") ?: "1.0.0"}.aar"
def sdkAarPath = "libs/${findProperty("sellySdkArtifactId") ?: "sellycloudsdk"}-${findProperty("sellySdkVersion") ?: "1.0.1"}.aar"
def releaseStorePath = project.rootProject.file(findProperty("MY_STORE_FILE") ?: "release.keystore")
def hasReleaseKeystore = releaseStorePath.exists()
@@ -64,11 +64,11 @@ android {
}
dependencies {
implementation files(sdkAarPath)
implementation files(
sdkAarPath,
"libs/Kiwi.aar",
"libs/fu_core_all_feature_release.aar",
"libs/fu_model_all_feature_release.aar",
"libs/Kiwi.aar"
"libs/fu_model_all_feature_release.aar"
)
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation 'androidx.appcompat:appcompat:1.7.0-alpha03'

View File

@@ -642,6 +642,12 @@ class FeatureHubActivity : AppCompatActivity() {
AvDemoSettings.Resolution.P540 -> binding.rgSettingsResolution.check(R.id.rbSettingsRes540p)
AvDemoSettings.Resolution.P720 -> binding.rgSettingsResolution.check(R.id.rbSettingsRes720p)
}
binding.rgSettingsRenderBackend.check(
when (settings.renderBackendPreference) {
AvDemoSettings.RenderBackendPreference.SURFACE_VIEW -> R.id.rbSettingsRenderSurface
AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW -> R.id.rbSettingsRenderTexture
}
)
restoreEnvSettingsToUi()
}
@@ -681,13 +687,18 @@ class FeatureHubActivity : AppCompatActivity() {
R.id.rbSettingsRes540p -> AvDemoSettings.Resolution.P540
else -> AvDemoSettings.Resolution.P720
}
val renderBackendPreference = when (binding.rgSettingsRenderBackend.checkedRadioButtonId) {
R.id.rbSettingsRenderTexture -> AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW
else -> AvDemoSettings.RenderBackendPreference.SURFACE_VIEW
}
val current = settingsStore.read()
return current.copy(
streamId = streamId,
resolution = res,
fps = fps,
maxBitrateKbps = maxKbps,
minBitrateKbps = minKbps
minBitrateKbps = minKbps,
renderBackendPreference = renderBackendPreference
)
}

View File

@@ -2,6 +2,7 @@ package com.demo.SellyCloudSDK.avdemo
import android.content.Context
import androidx.core.content.edit
import com.sellycloud.sellycloudsdk.render.RenderBackend
data class AvDemoSettings(
val streamId: String,
@@ -12,9 +13,20 @@ data class AvDemoSettings(
val xorKeyHex: String = "",
val useUrlMode: Boolean = false,
val pushUrl: String = "",
val renderBackendPreference: RenderBackendPreference = RenderBackendPreference.SURFACE_VIEW,
) {
enum class Resolution { P360, P480, P540, P720 }
enum class RenderBackendPreference {
SURFACE_VIEW,
TEXTURE_VIEW;
fun isTextureView(): Boolean = this == TEXTURE_VIEW
fun toSdkBackend(): RenderBackend =
if (this == TEXTURE_VIEW) RenderBackend.TEXTURE_VIEW else RenderBackend.SURFACE_VIEW
}
fun resolutionSize(): Pair<Int, Int> = when (resolution) {
Resolution.P360 -> 640 to 360
Resolution.P480 -> 854 to 480
@@ -34,6 +46,13 @@ class AvDemoSettingsStore(context: Context) {
AvDemoSettings.Resolution.P540.name -> AvDemoSettings.Resolution.P540
else -> AvDemoSettings.Resolution.P720
}
val renderBackendPreference = when (
prefs.getString(KEY_RENDER_BACKEND, AvDemoSettings.RenderBackendPreference.SURFACE_VIEW.name)
) {
AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW.name ->
AvDemoSettings.RenderBackendPreference.TEXTURE_VIEW
else -> AvDemoSettings.RenderBackendPreference.SURFACE_VIEW
}
return AvDemoSettings(
streamId = prefs.getString(KEY_STREAM_ID, DEFAULT_STREAM_ID).orEmpty(),
resolution = resolution,
@@ -42,7 +61,8 @@ class AvDemoSettingsStore(context: Context) {
minBitrateKbps = prefs.getInt(KEY_MIN_KBPS, DEFAULT_MIN_KBPS),
xorKeyHex = prefs.getString(KEY_XOR_KEY_HEX, "").orEmpty(),
useUrlMode = prefs.getBoolean(KEY_USE_URL_MODE, false),
pushUrl = prefs.getString(KEY_PUSH_URL, "").orEmpty()
pushUrl = prefs.getString(KEY_PUSH_URL, "").orEmpty(),
renderBackendPreference = renderBackendPreference
)
}
@@ -56,6 +76,7 @@ class AvDemoSettingsStore(context: Context) {
putString(KEY_XOR_KEY_HEX, settings.xorKeyHex)
putBoolean(KEY_USE_URL_MODE, settings.useUrlMode)
putString(KEY_PUSH_URL, settings.pushUrl)
putString(KEY_RENDER_BACKEND, settings.renderBackendPreference.name)
}
}
@@ -74,5 +95,6 @@ class AvDemoSettingsStore(context: Context) {
private const val KEY_XOR_KEY_HEX = "xor_key_hex"
private const val KEY_USE_URL_MODE = "use_url_mode"
private const val KEY_PUSH_URL = "push_url"
private const val KEY_RENDER_BACKEND = "render_backend"
}
}

View File

@@ -1,185 +0,0 @@
package com.demo.SellyCloudSDK.beauty
//
//import android.app.Dialog
//import android.content.Context
//import android.os.Bundle
//import android.widget.SeekBar
//import android.widget.TextView
//import android.widget.Switch
//import android.widget.Button
//import android.view.Window
//
///**
// * 美颜参数控制对话框
// */
//class BeautyControlDialog(
// context: Context,
//) : Dialog(context) {
//
// private lateinit var switchBeautyEnable: Switch
// private lateinit var seekBarBeautyIntensity: SeekBar
// private lateinit var seekBarFilterIntensity: SeekBar
// private lateinit var seekBarColorIntensity: SeekBar
// private lateinit var seekBarRedIntensity: SeekBar
// private lateinit var seekBarEyeBrightIntensity: SeekBar
// private lateinit var seekBarToothIntensity: SeekBar
//
// private lateinit var tvBeautyValue: TextView
// private lateinit var tvFilterValue: TextView
// private lateinit var tvColorValue: TextView
// private lateinit var tvRedValue: TextView
// private lateinit var tvEyeBrightValue: TextView
// private lateinit var tvToothValue: TextView
// private lateinit var btnClose: Button
//
// override fun onCreate(savedInstanceState: Bundle?) {
// super.onCreate(savedInstanceState)
// requestWindowFeature(Window.FEATURE_NO_TITLE)
// setContentView(R.layout.dialog_beauty_control)
//
// initViews()
// setupListeners()
// updateUI()
// }
//
// private fun initViews() {
// switchBeautyEnable = findViewById(R.id.switchBeautyEnable)
// seekBarBeautyIntensity = findViewById(R.id.seekBarBeautyIntensity)
// seekBarFilterIntensity = findViewById(R.id.seekBarFilterIntensity)
// seekBarColorIntensity = findViewById(R.id.seekBarColorIntensity)
// seekBarRedIntensity = findViewById(R.id.seekBarRedIntensity)
// seekBarEyeBrightIntensity = findViewById(R.id.seekBarEyeBrightIntensity)
// seekBarToothIntensity = findViewById(R.id.seekBarToothIntensity)
//
// tvBeautyValue = findViewById(R.id.tvBeautyValue)
// tvFilterValue = findViewById(R.id.tvFilterValue)
// tvColorValue = findViewById(R.id.tvColorValue)
// tvRedValue = findViewById(R.id.tvRedValue)
// tvEyeBrightValue = findViewById(R.id.tvEyeBrightValue)
// tvToothValue = findViewById(R.id.tvToothValue)
// btnClose = findViewById(R.id.btnClose)
// }
//
// private fun setupListeners() {
// // 美颜开关
// switchBeautyEnable.setOnCheckedChangeListener { _, isChecked ->
// streamingService?.enableBeauty(isChecked)
// // 根据开关状态启用/禁用参数调节
// updateSeekBarsEnabled(isChecked)
// }
//
// // 美颜强度调节 (0-100, 转换为0.0-10.0)
// seekBarBeautyIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvBeautyValue.text = String.format("%.1f", intensity)
// streamingService?.setBeautyIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 滤镜强度调节 (0-10, 转换为0.0-1.0)
// seekBarFilterIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvFilterValue.text = String.format("%.1f", intensity)
// streamingService?.setFilterIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 美白强度调节
// seekBarColorIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvColorValue.text = String.format("%.1f", intensity)
// streamingService?.setColorIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 红润强度调节
// seekBarRedIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvRedValue.text = String.format("%.1f", intensity)
// streamingService?.setRedIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 亮眼强度调节
// seekBarEyeBrightIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvEyeBrightValue.text = String.format("%.1f", intensity)
// streamingService?.setEyeBrightIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 美牙强度调节
// seekBarToothIntensity.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
// val intensity = progress / 10.0
// tvToothValue.text = String.format("%.1f", intensity)
// streamingService?.setToothIntensity(intensity)
// }
// override fun onStartTrackingTouch(seekBar: SeekBar?) {}
// override fun onStopTrackingTouch(seekBar: SeekBar?) {}
// })
//
// // 关闭按钮
// btnClose.setOnClickListener {
// dismiss()
// }
// }
//
// private fun updateUI() {
// // 获取当前美颜状态并更新UI
// val isBeautyEnabled = streamingService?.isBeautyEnabled() ?: true
// switchBeautyEnable.isChecked = isBeautyEnabled
//
// // 获取当前美颜参数
// val params = streamingService?.getCurrentBeautyParams() ?: mapOf()
//
// // 设置各项参数的当前值
// val blurIntensity = params["blurIntensity"] as? Double ?: 6.0
// val filterIntensity = params["filterIntensity"] as? Double ?: 0.7
// val colorIntensity = params["colorIntensity"] as? Double ?: 0.5
// val redIntensity = params["redIntensity"] as? Double ?: 0.5
// val eyeBrightIntensity = params["eyeBrightIntensity"] as? Double ?: 1.0
// val toothIntensity = params["toothIntensity"] as? Double ?: 1.0
//
// seekBarBeautyIntensity.progress = (blurIntensity * 10).toInt()
// seekBarFilterIntensity.progress = (filterIntensity * 10).toInt()
// seekBarColorIntensity.progress = (colorIntensity * 10).toInt()
// seekBarRedIntensity.progress = (redIntensity * 10).toInt()
// seekBarEyeBrightIntensity.progress = (eyeBrightIntensity * 10).toInt()
// seekBarToothIntensity.progress = (toothIntensity * 10).toInt()
//
// tvBeautyValue.text = String.format("%.1f", blurIntensity)
// tvFilterValue.text = String.format("%.1f", filterIntensity)
// tvColorValue.text = String.format("%.1f", colorIntensity)
// tvRedValue.text = String.format("%.1f", redIntensity)
// tvEyeBrightValue.text = String.format("%.1f", eyeBrightIntensity)
// tvToothValue.text = String.format("%.1f", toothIntensity)
//
// // 根据开关状态启用/禁用参数调节
// updateSeekBarsEnabled(isBeautyEnabled)
// }
//
// private fun updateSeekBarsEnabled(enabled: Boolean) {
// seekBarBeautyIntensity.isEnabled = enabled
// seekBarFilterIntensity.isEnabled = enabled
// seekBarColorIntensity.isEnabled = enabled
// seekBarRedIntensity.isEnabled = enabled
// seekBarEyeBrightIntensity.isEnabled = enabled
// seekBarToothIntensity.isEnabled = enabled
// }
//}

View File

@@ -1,264 +0,0 @@
package com.demo.SellyCloudSDK.beauty
import android.content.Context
import android.opengl.GLES20
import android.opengl.Matrix
import android.util.Log
import com.demo.SellyCloudSDK.R
import com.pedro.encoder.input.gl.render.filters.BaseFilterRender
import com.pedro.encoder.utils.gl.GlUtil
import java.nio.ByteBuffer
import java.nio.ByteOrder
/**
* FaceUnity beauty filter that plugs into RootEncoder's GL filter chain.
* 优化后台兼容性避免依赖Activity上下文
*/
class FUBeautyFilterRender(
private val fuRenderer: FURenderer
) : BaseFilterRender() {
private val TAG = "FUBeautyFilterRender"
// 美颜开关状态
private var isBeautyEnabled = true
// 添加摄像头朝向跟踪
private var currentCameraFacing: com.pedro.encoder.input.video.CameraHelper.Facing =
com.pedro.encoder.input.video.CameraHelper.Facing.BACK
// Standard vertex data following pedro's pattern (X, Y, Z, U, V)
private val squareVertexDataFilter = floatArrayOf(
// X, Y, Z, U, V
-1f, -1f, 0f, 0f, 0f, // bottom left
1f, -1f, 0f, 1f, 0f, // bottom right
-1f, 1f, 0f, 0f, 1f, // top left
1f, 1f, 0f, 1f, 1f // top right
)
private var frameW = 0
private var frameH = 0
private lateinit var appContext: Context
// GLSL program and handles
private var program = -1
private var aPositionHandle = -1
private var aTextureHandle = -1
private var uMVPMatrixHandle = -1
private var uSTMatrixHandle = -1
private var uSamplerHandle = -1
// 添加初始化状态检查
private var isInitialized = false
init {
squareVertex = ByteBuffer.allocateDirect(squareVertexDataFilter.size * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
squareVertex.put(squareVertexDataFilter).position(0)
Matrix.setIdentityM(MVPMatrix, 0)
Matrix.setIdentityM(STMatrix, 0)
}
override fun initGl(
width: Int,
height: Int,
context: Context,
previewWidth: Int,
previewHeight: Int
) {
// GL 上下文可能重建:确保滤镜和 FaceUnity 资源重新初始化
isInitialized = false
program = -1
// 先保存 ApplicationContext避免 super.initGl 内部触发 initGlFilter 时为空
this.appContext = context.applicationContext
super.initGl(width, height, context, previewWidth, previewHeight)
// 确保使用 ApplicationContext避免Activity依赖
frameW = width
frameH = height
// 刷新 FaceUnity GL 资源绑定到新的上下文
fuRenderer.reinitializeGlContextBlocking()
Log.d(TAG, "initGl: width=$width, height=$height, context=${context.javaClass.simpleName}")
}
override fun initGlFilter(context: Context?) {
if (isInitialized) {
Log.d(TAG, "Filter already initialized. Skipping initGlFilter.")
return
}
try {
// 使用 ApplicationContext 避免Activity依赖
val safeContext = context?.applicationContext ?: appContext
val vertexShader = GlUtil.getStringFromRaw(safeContext, R.raw.simple_vertex)
val fragmentShader = GlUtil.getStringFromRaw(safeContext, R.raw.fu_base_fragment)
program = GlUtil.createProgram(vertexShader, fragmentShader)
aPositionHandle = GLES20.glGetAttribLocation(program, "aPosition")
aTextureHandle = GLES20.glGetAttribLocation(program, "aTextureCoord")
uMVPMatrixHandle = GLES20.glGetUniformLocation(program, "uMVPMatrix")
uSTMatrixHandle = GLES20.glGetUniformLocation(program, "uSTMatrix")
uSamplerHandle = GLES20.glGetUniformLocation(program, "uSampler")
isInitialized = true
Log.d(TAG, "initGlFilter completed - program: $program")
} catch (e: Exception) {
Log.e(TAG, "initGlFilter failed", e)
isInitialized = false
}
}
/**
* 设置摄像头朝向(供外部调用)
*/
fun setCameraFacing(facing: com.pedro.encoder.input.video.CameraHelper.Facing) {
currentCameraFacing = facing
fuRenderer.setCameraFacing(facing)
Log.d(TAG, "Camera facing updated: $facing")
}
/**
* Core render step called by BaseFilterRender every frame.
*/
override fun drawFilter() {
// 增加初始化检查
if (!isInitialized) {
Log.w(TAG, "Filter not initialized, skipping draw")
return
}
// 如果美颜被禁用,使用简单的纹理透传渲染
if (!isBeautyEnabled) {
drawPassThrough()
return
}
if (!fuRenderer.isAuthSuccess || fuRenderer.fuRenderKit == null) {
// Fallback: 使用透传渲染而不是直接return
drawPassThrough()
return
}
if (previousTexId <= 0 || frameW <= 0 || frameH <= 0) {
return
}
try {
// 保存当前 FBO 与 viewport避免外部库改写
val prevFbo = IntArray(1)
val prevViewport = IntArray(4)
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, prevFbo, 0)
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, prevViewport, 0)
// 使用带朝向的渲染方法
val processedTexId = fuRenderer.onDrawFrame(previousTexId, frameW, frameH, currentCameraFacing)
// 还原 FBO 与 viewport避免黑屏
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, prevFbo[0])
GLES20.glViewport(prevViewport[0], prevViewport[1], prevViewport[2], prevViewport[3])
// Use processed texture if available, otherwise fallback to original
val textureIdToDraw = if (processedTexId > 0) processedTexId else previousTexId
// Now draw using our own shader program
GLES20.glUseProgram(program)
// Set vertex position
squareVertex.position(SQUARE_VERTEX_DATA_POS_OFFSET)
GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aPositionHandle)
// Set texture coordinates
squareVertex.position(SQUARE_VERTEX_DATA_UV_OFFSET)
GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aTextureHandle)
// Set transformation matrices
GLES20.glUniformMatrix4fv(uMVPMatrixHandle, 1, false, MVPMatrix, 0)
GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, STMatrix, 0)
// Bind texture and draw
GLES20.glUniform1i(uSamplerHandle, 0)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIdToDraw)
// Draw the rectangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
} catch (e: Exception) {
Log.e(TAG, "Error in beauty processing", e)
// Fallback: 使用透传渲染
drawPassThrough()
}
}
/**
* 透传渲染:直接渲染原始纹理,不进行美颜处理
*/
private fun drawPassThrough() {
if (previousTexId <= 0 || !isInitialized) {
return
}
try {
// 使用原始纹理进行渲染
GLES20.glUseProgram(program)
// Set vertex position
squareVertex.position(SQUARE_VERTEX_DATA_POS_OFFSET)
GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aPositionHandle)
// Set texture coordinates
squareVertex.position(SQUARE_VERTEX_DATA_UV_OFFSET)
GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex)
GLES20.glEnableVertexAttribArray(aTextureHandle)
// Set transformation matrices
GLES20.glUniformMatrix4fv(uMVPMatrixHandle, 1, false, MVPMatrix, 0)
GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, STMatrix, 0)
// Bind original texture and draw
GLES20.glUniform1i(uSamplerHandle, 0)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, previousTexId)
// Draw the rectangle
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
} catch (e: Exception) {
Log.e(TAG, "Error in pass-through rendering", e)
}
}
override fun disableResources() {
GlUtil.disableResources(aTextureHandle, aPositionHandle)
}
override fun release() {
isInitialized = false
if (program != -1) {
GLES20.glDeleteProgram(program)
program = -1
}
isInitialized = false
Log.d(TAG, "FUBeautyFilterRender released")
}
/**
* 设置美颜开关状态
*/
fun setBeautyEnabled(enabled: Boolean) {
isBeautyEnabled = enabled
Log.d(TAG, "Beauty enabled: $enabled")
}
/**
* 获取美颜开关状态
*/
fun isBeautyEnabled(): Boolean = isBeautyEnabled
}

View File

@@ -1,6 +1,7 @@
package com.demo.SellyCloudSDK.beauty
import android.content.Context
import android.opengl.GLES20
import android.util.Log
import com.faceunity.core.callback.OperateCallback
import com.faceunity.core.entity.FUBundleData
@@ -19,9 +20,10 @@ import com.faceunity.wrapper.faceunity
import com.pedro.encoder.input.video.CameraHelper
import java.io.File
import java.io.IOException
import java.util.concurrent.CountDownLatch
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.FloatBuffer
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
/**
@@ -52,14 +54,18 @@ class FURenderer(private val context: Context) {
private val BUNDLE_AI_HUMAN = "model" + File.separator + "ai_human_processor.bundle"
private val BUNDLE_FACE_BEAUTY = "graphics" + File.separator + "face_beautification.bundle"
@Volatile
private var workerThreadRef: Thread? = null
private val workerThread = Executors.newSingleThreadExecutor { task ->
Thread(task, "FURenderer-Worker").also { workerThreadRef = it }
Thread(task, "FURenderer-Worker")
}
// 添加摄像头朝向管理
private var currentCameraFacing: CameraHelper.Facing = CameraHelper.Facing.BACK
private var blitProgram = 0
private var blitFramebuffer = 0
private var blitPositionLoc = 0
private var blitTexCoordLoc = 0
private var blitTextureLoc = 0
private var blitQuadBuffer: FloatBuffer? = null
/**
* 初始化美颜SDK
@@ -80,7 +86,7 @@ class FURenderer(private val context: Context) {
// 初始化成功后,在后台线程加载所需资源
workerThread.submit {
try {
faceunity.fuSetUseTexAsync(1)
applyTextureOutputMode()
// 获取 FURenderKit 实例
fuRenderKit = FURenderKit.getInstance()
@@ -142,8 +148,7 @@ class FURenderer(private val context: Context) {
// 重新应用美颜参数与道具
if (faceBeauty == null) loadBeautyBundle()
fuRenderKit?.faceBeauty = faceBeauty
// 再次开启异步纹理模式(稳妥起见)
try { faceunity.fuSetUseTexAsync(1) } catch (_: Throwable) {}
applyTextureOutputMode()
Log.d(TAG, "onGlContextRecreated: done")
} catch (e: Exception) {
Log.e(TAG, "onGlContextRecreated error", e)
@@ -206,6 +211,53 @@ class FURenderer(private val context: Context) {
}
}
fun renderProcessedTextureToOutput(
inputTex: Int,
outputTextureId: Int,
width: Int,
height: Int,
facing: CameraHelper.Facing
) {
if (outputTextureId <= 0) return
val renderedTextureId = onDrawFrame(inputTex, width, height, facing)
val sourceTextureId = when {
renderedTextureId == outputTextureId -> return
renderedTextureId > 0 -> renderedTextureId
else -> inputTex
}
ensureBlitResources()
if (blitProgram <= 0 || blitFramebuffer <= 0) return
val previousFramebuffer = IntArray(1)
val previousViewport = IntArray(4)
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, previousFramebuffer, 0)
GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, previousViewport, 0)
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, blitFramebuffer)
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
outputTextureId,
0
)
GLES20.glViewport(0, 0, width, height)
drawRgbTexture(sourceTextureId)
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
0,
0
)
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, previousFramebuffer[0])
GLES20.glViewport(
previousViewport[0],
previousViewport[1],
previousViewport[2],
previousViewport[3]
)
}
/**
* 加载美颜道具并设置默认参数
*/
@@ -235,19 +287,7 @@ class FURenderer(private val context: Context) {
if (!isAuthSuccess) return
workerThread.execute {
try {
Log.d(TAG, "Releasing GL context resources for protocol switch")
isGlInitialized = false
// 释放渲染器的 GL 资源
fuRenderKit?.release()
fuRenderKit = null
// 注意:不清空 faceBeauty保留美颜参数配置
Log.d(TAG, "GL context resources released successfully")
} catch (e: Exception) {
Log.e(TAG, "Error releasing GL context", e)
}
releaseGlContextOnCurrentThread()
}
}
@@ -256,33 +296,36 @@ class FURenderer(private val context: Context) {
*/
fun reinitializeGlContext() {
if (!isAuthSuccess) return
workerThread.execute { doReinitializeGlContext() }
workerThread.execute { reinitializeGlContextOnCurrentThread() }
}
/**
* 重新初始化 GL 上下文(同步等待完成,用于避免美颜空窗)
* 供 RTC texture processor 使用:必须在当前持有 GL context 的线程上执行。
*/
fun reinitializeGlContextBlocking(timeoutMs: Long = 2000L) {
fun reinitializeGlContextOnCurrentThread() {
if (!isAuthSuccess) return
if (Thread.currentThread() === workerThreadRef) {
doReinitializeGlContext()
return
}
val latch = CountDownLatch(1)
workerThread.execute {
try {
doReinitializeGlContext()
} finally {
latch.countDown()
}
}
try {
if (!latch.await(timeoutMs, TimeUnit.MILLISECONDS)) {
Log.w(TAG, "GL context reinit timeout: ${timeoutMs}ms")
}
} catch (_: InterruptedException) {
Thread.currentThread().interrupt()
Log.w(TAG, "GL context reinit interrupted")
doReinitializeGlContext()
} catch (e: Exception) {
Log.e(TAG, "Error reinitializing GL context on current thread", e)
isGlInitialized = false
}
}
/**
* 供 RTC texture processor 使用:必须在当前持有 GL context 的线程上执行。
*/
fun releaseGlContextOnCurrentThread() {
if (!isAuthSuccess) return
try {
Log.d(TAG, "Releasing GL context resources on current thread")
isGlInitialized = false
releaseBlitResources()
fuRenderKit?.release()
fuRenderKit = null
Log.d(TAG, "GL context resources released successfully")
} catch (e: Exception) {
Log.e(TAG, "Error releasing GL context on current thread", e)
}
}
@@ -293,8 +336,7 @@ class FURenderer(private val context: Context) {
// 重新获取 FURenderKit 实例(绑定到新的 GL 上下文)
fuRenderKit = FURenderKit.getInstance()
// 重新设置异步纹理模式
faceunity.fuSetUseTexAsync(1)
applyTextureOutputMode()
// 如果之前有美颜配置,重新应用
if (faceBeauty != null) {
@@ -316,6 +358,9 @@ class FURenderer(private val context: Context) {
fun release() {
Log.d(TAG, "Releasing FURenderer resources")
isGlInitialized = false
try {
releaseBlitResources()
} catch (_: Exception) {}
try {
fuRenderKit?.release()
} catch (_: Exception) {}
@@ -327,4 +372,132 @@ class FURenderer(private val context: Context) {
workerThread.shutdown()
} catch (_: Exception) {}
}
private fun ensureBlitResources() {
if (blitProgram > 0 && blitFramebuffer > 0 && blitQuadBuffer != null) return
blitProgram = createProgram(BLIT_VERTEX_SHADER, BLIT_FRAGMENT_SHADER)
if (blitProgram <= 0) return
blitPositionLoc = GLES20.glGetAttribLocation(blitProgram, "aPosition")
blitTexCoordLoc = GLES20.glGetAttribLocation(blitProgram, "aTextureCoord")
blitTextureLoc = GLES20.glGetUniformLocation(blitProgram, "uTexture")
if (blitQuadBuffer == null) {
blitQuadBuffer = ByteBuffer.allocateDirect(BLIT_QUAD.size * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(BLIT_QUAD)
.also { it.position(0) }
}
if (blitFramebuffer <= 0) {
val framebuffers = IntArray(1)
GLES20.glGenFramebuffers(1, framebuffers, 0)
blitFramebuffer = framebuffers[0]
}
}
private fun drawRgbTexture(textureId: Int) {
val quad = blitQuadBuffer ?: return
GLES20.glUseProgram(blitProgram)
quad.position(0)
GLES20.glVertexAttribPointer(blitPositionLoc, 2, GLES20.GL_FLOAT, false, 16, quad)
GLES20.glEnableVertexAttribArray(blitPositionLoc)
quad.position(2)
GLES20.glVertexAttribPointer(blitTexCoordLoc, 2, GLES20.GL_FLOAT, false, 16, quad)
GLES20.glEnableVertexAttribArray(blitTexCoordLoc)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId)
GLES20.glUniform1i(blitTextureLoc, 0)
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
GLES20.glDisableVertexAttribArray(blitPositionLoc)
GLES20.glDisableVertexAttribArray(blitTexCoordLoc)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0)
GLES20.glUseProgram(0)
}
private fun releaseBlitResources() {
if (blitProgram > 0) {
GLES20.glDeleteProgram(blitProgram)
blitProgram = 0
}
if (blitFramebuffer > 0) {
GLES20.glDeleteFramebuffers(1, intArrayOf(blitFramebuffer), 0)
blitFramebuffer = 0
}
blitQuadBuffer = null
}
private fun createProgram(vertexSource: String, fragmentSource: String): Int {
val vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource)
val fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource)
if (vertexShader <= 0 || fragmentShader <= 0) {
if (vertexShader > 0) GLES20.glDeleteShader(vertexShader)
if (fragmentShader > 0) GLES20.glDeleteShader(fragmentShader)
return 0
}
val program = GLES20.glCreateProgram()
if (program <= 0) return 0
GLES20.glAttachShader(program, vertexShader)
GLES20.glAttachShader(program, fragmentShader)
GLES20.glLinkProgram(program)
val status = IntArray(1)
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0)
GLES20.glDeleteShader(vertexShader)
GLES20.glDeleteShader(fragmentShader)
if (status[0] != GLES20.GL_TRUE) {
Log.w(TAG, "Failed to link blit program: ${GLES20.glGetProgramInfoLog(program)}")
GLES20.glDeleteProgram(program)
return 0
}
return program
}
private fun compileShader(type: Int, source: String): Int {
val shader = GLES20.glCreateShader(type)
if (shader <= 0) return 0
GLES20.glShaderSource(shader, source)
GLES20.glCompileShader(shader)
val status = IntArray(1)
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, status, 0)
if (status[0] != GLES20.GL_TRUE) {
Log.w(TAG, "Failed to compile shader: ${GLES20.glGetShaderInfoLog(shader)}")
GLES20.glDeleteShader(shader)
return 0
}
return shader
}
private fun applyTextureOutputMode() {
try {
faceunity.fuSetUseTexAsync(1)
} catch (t: Throwable) {
Log.w(TAG, "Failed to configure texture output mode", t)
}
}
companion object {
private val BLIT_QUAD = floatArrayOf(
-1f, -1f, 0f, 0f,
1f, -1f, 1f, 0f,
-1f, 1f, 0f, 1f,
1f, 1f, 1f, 1f,
)
private const val BLIT_VERTEX_SHADER = """
attribute vec4 aPosition;
attribute vec2 aTextureCoord;
varying vec2 vTextureCoord;
void main() {
gl_Position = aPosition;
vTextureCoord = aTextureCoord;
}
"""
private const val BLIT_FRAGMENT_SHADER = """
precision mediump float;
uniform sampler2D uTexture;
varying vec2 vTextureCoord;
void main() {
gl_FragColor = texture2D(uTexture, vTextureCoord);
}
"""
}
}

View File

@@ -2,9 +2,12 @@ package com.demo.SellyCloudSDK.beauty
import android.content.Context
import android.util.Log
import com.pedro.encoder.input.gl.render.filters.BaseFilterRender
import com.pedro.encoder.input.video.CameraHelper
import com.sellycloud.sellycloudsdk.VideoFrameInterceptor
import com.sellycloud.sellycloudsdk.VideoProcessFormat
import com.sellycloud.sellycloudsdk.VideoProcessMode
import com.sellycloud.sellycloudsdk.VideoProcessor
import com.sellycloud.sellycloudsdk.VideoProcessorConfig
import com.sellycloud.sellycloudsdk.VideoTextureFrame
import com.sellycloud.sellycloudsdk.beauty.BeautyEngine
/**
@@ -16,8 +19,6 @@ class FaceUnityBeautyEngine : BeautyEngine {
private val tag = "FaceUnityBeautyEng"
private var renderer: FURenderer? = null
private var filter: FUBeautyFilterRender? = null
private var whipInterceptor: FuVideoFrameInterceptor? = null
private var initialized = false
private var enabled = true
@@ -31,15 +32,6 @@ class FaceUnityBeautyEngine : BeautyEngine {
val fuRenderer = FURenderer(appCtx).also { it.setup() }
renderer = fuRenderer
filter = FUBeautyFilterRender(fuRenderer).apply {
setBeautyEnabled(enabled)
setCameraFacing(currentFacing)
}
whipInterceptor = FuVideoFrameInterceptor(fuRenderer).apply {
setFrontCamera(currentFacing == CameraHelper.Facing.FRONT)
}
applyIntensity()
initialized = true
Log.d(tag, "FaceUnity beauty engine initialized")
@@ -49,19 +41,40 @@ class FaceUnityBeautyEngine : BeautyEngine {
}
}
override fun obtainFilter(): BaseFilterRender? {
override fun createProcessor(): VideoProcessor? {
applyIntensity()
return filter
}
val textureRenderer = renderer ?: return null
return object : VideoProcessor {
override val config: VideoProcessorConfig = VideoProcessorConfig(
preferredFormat = VideoProcessFormat.TEXTURE_2D,
mode = VideoProcessMode.READ_WRITE,
fullRewrite = true
)
override fun obtainWhipInterceptor(): VideoFrameInterceptor? {
applyIntensity()
return whipInterceptor
override fun onGlContextCreated() {
textureRenderer.reinitializeGlContextOnCurrentThread()
applyIntensity()
}
override fun onGlContextDestroyed() {
textureRenderer.releaseGlContextOnCurrentThread()
}
override fun processTexture(input: VideoTextureFrame, outputTextureId: Int) {
if (!enabled || outputTextureId <= 0) return
textureRenderer.renderProcessedTextureToOutput(
inputTex = input.textureId,
outputTextureId = outputTextureId,
width = input.width,
height = input.height,
facing = currentFacing
)
}
}
}
override fun setEnabled(enabled: Boolean) {
this.enabled = enabled
filter?.setBeautyEnabled(enabled)
}
override fun setIntensity(intensity: Double) {
@@ -71,8 +84,6 @@ class FaceUnityBeautyEngine : BeautyEngine {
override fun onCameraFacingChanged(facing: CameraHelper.Facing) {
currentFacing = facing
filter?.setCameraFacing(facing)
whipInterceptor?.setFrontCamera(facing == CameraHelper.Facing.FRONT)
}
override fun onBeforeGlContextRelease() {
@@ -90,11 +101,8 @@ class FaceUnityBeautyEngine : BeautyEngine {
}
override fun release() {
kotlin.runCatching { filter?.release() }
kotlin.runCatching { renderer?.release() }
filter = null
renderer = null
whipInterceptor = null
initialized = false
}

View File

@@ -6,157 +6,215 @@ import com.faceunity.core.enumeration.CameraFacingEnum
import com.faceunity.core.enumeration.FUExternalInputEnum
import com.faceunity.core.enumeration.FUInputBufferEnum
import com.faceunity.core.enumeration.FUTransformMatrixEnum
import com.sellycloud.sellycloudsdk.VideoFrameInterceptor
import com.sellycloud.sellycloudsdk.SellyVideoFrame
import org.webrtc.JavaI420Buffer
import org.webrtc.VideoFrame
import java.nio.ByteBuffer
/**
* 将 WebRTC 采集的 I420 帧交给 FaceUnity 进行美颜,返回处理后的 NV21 帧
* 最小化侵入:当 SDK 未就绪或出错时,返回 null 让上游透传原始帧
*
* 重要:此拦截器不管理传入帧的生命周期,只负责创建新的处理后帧。
* 将 I420 帧交给 FaceUnity 进行美颜处理
* live 推流走 SDK 的 [SellyVideoFrame];互动 RTC 仍保留 WebRTC [VideoFrame] 的便捷重载
*/
class FuVideoFrameInterceptor(
private val fuRenderer: FURenderer
) : VideoFrameInterceptor {
) {
private val tag = "FuVideoFrameInt"
@Volatile private var isFrontCamera: Boolean = true
@Volatile private var enabled: Boolean = true
fun setFrontCamera(front: Boolean) { isFrontCamera = front }
fun setEnabled(enable: Boolean) { enabled = enable }
override fun process(frame: VideoFrame): VideoFrame? {
fun process(frame: SellyVideoFrame): SellyVideoFrame? {
if (!enabled) return null
val kit = fuRenderer.fuRenderKit
if (!fuRenderer.isAuthSuccess || kit == null) return null
val src = frame.buffer
// 兼容部分 webrtc 版本中 toI420 可能标注为可空的情况
val i420Maybe = try { src.toI420() } catch (_: Throwable) { null }
val i420 = i420Maybe ?: return null
val i420 = frame.buffer as? SellyVideoFrame.I420Buffer ?: return null
val width = i420.width
val height = i420.height
if (width == 0 || height == 0) return null
return try {
val i420Bytes = toI420Bytes(
width = width,
height = height,
dataY = i420.dataY,
strideY = i420.strideY,
dataU = i420.dataU,
strideU = i420.strideU,
dataV = i420.dataV,
strideV = i420.strideV
)
val outI420 = renderI420(width, height, i420Bytes) ?: return null
SellyVideoFrame(fromI420BytesToSellyI420(outI420, width, height), frame.rotation, frame.timestampNs)
} catch (t: Throwable) {
Log.w(tag, "beauty failed: ${t.message}")
null
}
}
fun process(frame: VideoFrame): VideoFrame? {
if (!enabled) return null
val kit = fuRenderer.fuRenderKit
if (!fuRenderer.isAuthSuccess || kit == null) return null
val i420 = try { frame.buffer.toI420() } catch (_: Throwable) { null } ?: return null
return try {
val width = i420.width
val height = i420.height
if (width == 0 || height == 0) return null
val i420Bytes = toI420Bytes(i420)
val inputData = FURenderInputData(width, height).apply {
imageBuffer = FURenderInputData.FUImageBuffer(
FUInputBufferEnum.FU_FORMAT_I420_BUFFER,
i420Bytes
)
renderConfig.apply {
externalInputType = FUExternalInputEnum.EXTERNAL_INPUT_TYPE_IMAGE
if (isFrontCamera) {
cameraFacing = CameraFacingEnum.CAMERA_FRONT
inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
outputMatrix = FUTransformMatrixEnum.CCROT0
} else {
cameraFacing = CameraFacingEnum.CAMERA_BACK
inputTextureMatrix = FUTransformMatrixEnum.CCROT0
inputBufferMatrix = FUTransformMatrixEnum.CCROT0
outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
}
isNeedBufferReturn = true
}
}
val output = kit.renderWithInput(inputData)
val outImage = output.image ?: return null
val outI420 = outImage.buffer ?: return null
if (outI420.isEmpty()) return null
// 安全:将 I420 字节填充到 JavaI420Buffer避免手写 NV21 转换越界
val jbuf = fromI420BytesToJavaI420(outI420, width, height)
VideoFrame(jbuf, frame.rotation, frame.timestampNs)
val i420Bytes = toI420Bytes(
width = width,
height = height,
dataY = i420.dataY,
strideY = i420.strideY,
dataU = i420.dataU,
strideU = i420.strideU,
dataV = i420.dataV,
strideV = i420.strideV
)
val outI420 = renderI420(width, height, i420Bytes) ?: return null
VideoFrame(fromI420BytesToJavaI420(outI420, width, height), frame.rotation, frame.timestampNs)
} catch (t: Throwable) {
Log.w(tag, "beauty failed: ${t.message}")
null
} finally {
// 只释放我们创建的 I420Buffer不释放原始 frame
try { i420.release() } catch (_: Throwable) {}
}
}
private fun toI420Bytes(i420: VideoFrame.I420Buffer): ByteArray {
val w = i420.width
val h = i420.height
val ySize = w * h
val uvW = (w + 1) / 2
val uvH = (h + 1) / 2
val uSize = uvW * uvH
val vSize = uSize
val out = ByteArray(ySize + uSize + vSize)
val yBuf = i420.dataY
val uBuf = i420.dataU
val vBuf = i420.dataV
val yStride = i420.strideY
val uStride = i420.strideU
val vStride = i420.strideV
// copy Y
private fun renderI420(width: Int, height: Int, i420Bytes: ByteArray): ByteArray? {
val inputData = FURenderInputData(width, height).apply {
imageBuffer = FURenderInputData.FUImageBuffer(
FUInputBufferEnum.FU_FORMAT_I420_BUFFER,
i420Bytes
)
renderConfig.apply {
externalInputType = FUExternalInputEnum.EXTERNAL_INPUT_TYPE_IMAGE
if (isFrontCamera) {
cameraFacing = CameraFacingEnum.CAMERA_FRONT
inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
outputMatrix = FUTransformMatrixEnum.CCROT0
} else {
cameraFacing = CameraFacingEnum.CAMERA_BACK
inputTextureMatrix = FUTransformMatrixEnum.CCROT0
inputBufferMatrix = FUTransformMatrixEnum.CCROT0
outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
}
isNeedBufferReturn = true
}
}
val output = fuRenderer.fuRenderKit?.renderWithInput(inputData) ?: return null
val outImage = output.image ?: return null
val outI420 = outImage.buffer ?: return null
return outI420.takeIf { it.isNotEmpty() }
}
private fun toI420Bytes(
width: Int,
height: Int,
dataY: ByteBuffer,
strideY: Int,
dataU: ByteBuffer,
strideU: Int,
dataV: ByteBuffer,
strideV: Int
): ByteArray {
val ySize = width * height
val uvWidth = (width + 1) / 2
val uvHeight = (height + 1) / 2
val uSize = uvWidth * uvHeight
val out = ByteArray(ySize + uSize * 2)
var dst = 0
for (j in 0 until h) {
val srcPos = j * yStride
yBuf.position(srcPos)
yBuf.get(out, dst, w)
dst += w
for (row in 0 until height) {
val srcBase = row * strideY
for (col in 0 until width) {
out[dst++] = dataY.get(srcBase + col)
}
}
// copy U
for (j in 0 until uvH) {
val srcPos = j * uStride
uBuf.position(srcPos)
uBuf.get(out, ySize + j * uvW, uvW)
for (row in 0 until uvHeight) {
val srcBase = row * strideU
for (col in 0 until uvWidth) {
out[dst++] = dataU.get(srcBase + col)
}
}
// copy V
for (j in 0 until uvH) {
val srcPos = j * vStride
vBuf.position(srcPos)
vBuf.get(out, ySize + uSize + j * uvW, uvW)
for (row in 0 until uvHeight) {
val srcBase = row * strideV
for (col in 0 until uvWidth) {
out[dst++] = dataV.get(srcBase + col)
}
}
return out
}
// 将连续 I420 字节拷贝到 JavaI420Buffer
private fun fromI420BytesToJavaI420(i420: ByteArray, width: Int, height: Int): JavaI420Buffer {
private fun fromI420BytesToSellyI420(i420: ByteArray, width: Int, height: Int): SellyVideoFrame.I420Buffer {
val ySize = width * height
val uvW = (width + 1) / 2
val uvH = (height + 1) / 2
val uSize = uvW * uvH
val uvWidth = (width + 1) / 2
val uvHeight = (height + 1) / 2
val uSize = uvWidth * uvHeight
val vSize = uSize
require(i420.size >= ySize + uSize + vSize) { "I420 buffer too small: ${i420.size}" }
val buf = JavaI420Buffer.allocate(width, height)
val y = buf.dataY
val u = buf.dataU
val v = buf.dataV
val yStride = buf.strideY
val uStride = buf.strideU
val vStride = buf.strideV
// 拷贝 Y
val buffer = SellyVideoFrame.allocateI420Buffer(width, height)
val y = buffer.dataY
val u = buffer.dataU
val v = buffer.dataV
var src = 0
for (j in 0 until height) {
y.position(j * yStride)
for (row in 0 until height) {
y.position(row * buffer.strideY)
y.put(i420, src, width)
src += width
}
// 拷贝 U
var uSrc = ySize
for (j in 0 until uvH) {
u.position(j * uStride)
u.put(i420, uSrc, uvW)
uSrc += uvW
for (row in 0 until uvHeight) {
u.position(row * buffer.strideU)
u.put(i420, src, uvWidth)
src += uvWidth
}
// 拷贝 V
var vSrc = ySize + uSize
for (j in 0 until uvH) {
v.position(j * vStride)
v.put(i420, vSrc, uvW)
vSrc += uvW
for (row in 0 until uvHeight) {
v.position(row * buffer.strideV)
v.put(i420, src, uvWidth)
src += uvWidth
}
return buf
return buffer
}
private fun fromI420BytesToJavaI420(i420: ByteArray, width: Int, height: Int): JavaI420Buffer {
val ySize = width * height
val uvWidth = (width + 1) / 2
val uvHeight = (height + 1) / 2
val uSize = uvWidth * uvHeight
val vSize = uSize
require(i420.size >= ySize + uSize + vSize) { "I420 buffer too small: ${i420.size}" }
val buffer = JavaI420Buffer.allocate(width, height)
val y = buffer.dataY
val u = buffer.dataU
val v = buffer.dataV
var src = 0
for (row in 0 until height) {
y.position(row * buffer.strideY)
y.put(i420, src, width)
src += width
}
for (row in 0 until uvHeight) {
u.position(row * buffer.strideU)
u.put(i420, src, uvWidth)
src += uvWidth
}
for (row in 0 until uvHeight) {
v.position(row * buffer.strideV)
v.put(i420, src, uvWidth)
src += uvWidth
}
return buffer
}
}

View File

@@ -65,17 +65,15 @@ class InteractiveForegroundService : Service() {
}
private fun ensureChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Interactive Call",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Interactive Call",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
}

View File

@@ -17,10 +17,10 @@ import androidx.core.view.isVisible
import androidx.lifecycle.lifecycleScope
import com.demo.SellyCloudSDK.KiwiHelper
import com.demo.SellyCloudSDK.R
import kotlinx.coroutines.launch
import com.demo.SellyCloudSDK.beauty.FURenderer
import com.demo.SellyCloudSDK.beauty.FuVideoFrameInterceptor
import com.demo.SellyCloudSDK.avdemo.AvDemoSettingsStore
import com.demo.SellyCloudSDK.beauty.FaceUnityBeautyEngine
import com.demo.SellyCloudSDK.databinding.ActivityInteractiveLiveBinding
import com.pedro.encoder.input.video.CameraHelper
import com.sellycloud.sellycloudsdk.interactive.CallType
import com.sellycloud.sellycloudsdk.interactive.InteractiveCallConfig
import com.sellycloud.sellycloudsdk.interactive.InteractiveChannelMediaOptions
@@ -33,18 +33,28 @@ import com.sellycloud.sellycloudsdk.interactive.InteractiveStreamStats
import com.sellycloud.sellycloudsdk.interactive.InteractiveVideoCanvas
import com.sellycloud.sellycloudsdk.interactive.InteractiveVideoEncoderConfig
import com.sellycloud.sellycloudsdk.interactive.RemoteState
import com.sellycloud.sellycloudsdk.render.RtcRenderTarget
import com.sellycloud.sellycloudsdk.render.SurfaceViewRtcTarget
import com.sellycloud.sellycloudsdk.render.TextureViewRtcTarget
import android.view.TextureView
import android.view.View
import kotlinx.coroutines.launch
import org.webrtc.SurfaceViewRenderer
import java.util.Locale
class InteractiveLiveActivity : AppCompatActivity() {
private lateinit var binding: ActivityInteractiveLiveBinding
private lateinit var settingsStore: AvDemoSettingsStore
private var useTextureView: Boolean = false
private var rtcEngine: InteractiveRtcEngine? = null
private var lockedCallType: CallType? = null
private var localRenderer: SurfaceViewRenderer? = null
private var localRenderTarget: RtcRenderTarget? = null
private var localRenderView: View? = null
private lateinit var localSlot: VideoSlot
private lateinit var remoteSlots: List<VideoSlot>
private val remoteRendererMap = mutableMapOf<String, SurfaceViewRenderer>()
private val remoteRenderMap = mutableMapOf<String, Pair<View, RtcRenderTarget>>()
private var isLocalPreviewEnabled = true
private var isLocalAudioEnabled = true
private var isSpeakerOn = true
@@ -55,8 +65,14 @@ class InteractiveLiveActivity : AppCompatActivity() {
private var currentConnectionState: InteractiveConnectionState = InteractiveConnectionState.Disconnected
private var callDurationSeconds: Long = 0
private var lastMessage: String? = null
private var beautyRenderer: FURenderer? = null
private var fuFrameInterceptor: FuVideoFrameInterceptor? = null
private var beautyEngine: FaceUnityBeautyEngine? = null
private val defaultCameraVideoConfig = InteractiveVideoEncoderConfig(
640,
480,
fps = 20,
minBitrateKbps = 150,
maxBitrateKbps = 850
)
@Volatile private var isFrontCamera = true
@Volatile private var beautyEnabled: Boolean = true
@Volatile private var isLocalVideoEnabled: Boolean = true
@@ -71,6 +87,7 @@ class InteractiveLiveActivity : AppCompatActivity() {
private var currentCallId: String? = null
@Volatile private var selfUserId: String? = null
private var isScreenSharing: Boolean = false
@Volatile private var leaveInProgress: Boolean = false
private val permissionLauncher = registerForActivityResult(
ActivityResultContracts.RequestMultiplePermissions()
@@ -107,6 +124,8 @@ class InteractiveLiveActivity : AppCompatActivity() {
setDisplayHomeAsUpEnabled(true)
}
settingsStore = AvDemoSettingsStore(this)
useTextureView = settingsStore.read().renderBackendPreference.isTextureView()
setupVideoSlots()
initRtcEngine()
setupUiDefaults()
@@ -123,12 +142,12 @@ class InteractiveLiveActivity : AppCompatActivity() {
binding.btnSwitchCamera.setOnClickListener {
isFrontCamera = !isFrontCamera
fuFrameInterceptor?.setFrontCamera(isFrontCamera)
beautyEngine?.onCameraFacingChanged(currentCameraFacing())
rtcEngine?.switchCamera()
}
binding.btnToggleBeauty.setOnClickListener {
beautyEnabled = !beautyEnabled
fuFrameInterceptor?.setEnabled(beautyEnabled)
ensureBeautySessionReady()
updateControlButtons()
}
}
@@ -153,26 +172,26 @@ class InteractiveLiveActivity : AppCompatActivity() {
override fun onDestroy() {
super.onDestroy()
rtcEngine?.setCaptureVideoFrameInterceptor(null)
fuFrameInterceptor = null
rtcEngine?.setCaptureVideoProcessor(null)
remoteMediaState.clear()
// 捕获需要释放的引用,避免主线程阻塞导致 ANR
val engine = rtcEngine
val local = localRenderer
val remotes = remoteRendererMap.values.toList()
val beauty = beautyRenderer
val localTarget = localRenderTarget
val remoteTargets = remoteRenderMap.values.map { it.second }
val beauty = beautyEngine
rtcEngine = null
localRenderer = null
remoteRendererMap.clear()
beautyRenderer = null
localRenderTarget = null
localRenderView = null
remoteRenderMap.clear()
beautyEngine = null
// 重量级资源释放移到后台线程
Thread {
try { engine?.leaveChannel() } catch (_: Exception) {}
try { InteractiveRtcEngine.destroy(engine) } catch (_: Exception) {}
try { local?.release() } catch (_: Exception) {}
remotes.forEach { try { it.release() } catch (_: Exception) {} }
try { localTarget?.release() } catch (_: Exception) {}
remoteTargets.forEach { try { it.release() } catch (_: Exception) {} }
try { beauty?.release() } catch (_: Exception) {}
}.start()
}
@@ -183,17 +202,23 @@ class InteractiveLiveActivity : AppCompatActivity() {
}
private fun initRtcEngine() {
rtcEngine?.setCaptureVideoProcessor(null)
rtcEngine?.destroy()
rtcEngine = null
beautyEngine?.release()
beautyEngine = null
val appId = getString(R.string.signaling_app_id)
val token = getString(R.string.signaling_token).takeIf { it.isNotBlank() }
// Kiwi 代理后台获取rsName 为空时清除残留
val kiwiRsName = getString(R.string.signaling_kiwi_rsname).trim()
KiwiHelper.startProxySetup(kiwiRsName.isNotBlank(), kiwiRsName)
beautyRenderer = FURenderer(this).also { it.setup() }
fuFrameInterceptor = beautyRenderer?.let { FuVideoFrameInterceptor(it).apply {
setFrontCamera(isFrontCamera)
setEnabled(beautyEnabled)
} }
beautyEngine = FaceUnityBeautyEngine().also {
it.initialize(this)
it.setEnabled(beautyEnabled)
it.onCameraFacingChanged(currentCameraFacing())
}
rtcEngine = InteractiveRtcEngine.create(
InteractiveRtcEngineConfig(
context = applicationContext,
@@ -203,14 +228,10 @@ class InteractiveLiveActivity : AppCompatActivity() {
).apply {
setEventHandler(rtcEventHandler)
setClientRole(InteractiveRtcEngine.ClientRole.BROADCASTER)
// setVideoEncoderConfiguration(InteractiveVideoEncoderConfig()) 使用默认值
setVideoEncoderConfiguration(InteractiveVideoEncoderConfig(640, 480 , fps = 20, minBitrateKbps = 150, maxBitrateKbps = 850))
setVideoEncoderConfiguration(defaultCameraVideoConfig)
setDefaultAudioRoutetoSpeakerphone(true)
setCaptureVideoFrameInterceptor { frame ->
if (!beautyEnabled) return@setCaptureVideoFrameInterceptor frame
fuFrameInterceptor?.process(frame) ?: frame
}
}
ensureBeautySessionReady()
}
private val rtcEventHandler = object : InteractiveRtcEngineEventHandler {
@@ -232,6 +253,8 @@ class InteractiveLiveActivity : AppCompatActivity() {
override fun onLeaveChannel(durationSeconds: Int) {
Log.d(TAG, "回调onLeaveChannel duration=${durationSeconds}s")
runOnUiThread {
leaveInProgress = false
releaseLocalRenderTargetAsync()
resetUiAfterLeave()
}
}
@@ -325,12 +348,12 @@ class InteractiveLiveActivity : AppCompatActivity() {
runOnUiThread { handleRemoteAudioState(enabled, userId) }
}
override fun onStreamStateChanged(peerId: String, state: RemoteState, code: Int, message: String?) {
override fun onStreamStateChanged(userId: String, state: RemoteState, code: Int, message: String?) {
runOnUiThread {
val tip = "onStreamStateChanged[$peerId] state=$state code=$code ${message ?: ""}"
val tip = "onStreamStateChanged[$userId] state=$state code=$code ${message ?: ""}"
Log.d(TAG, tip)
Toast.makeText(this@InteractiveLiveActivity, tip, Toast.LENGTH_SHORT).show()
if (peerId == currentUserId && message?.contains("screen_share_stopped") == true) {
if (userId == currentUserId && message?.contains("screen_share_stopped") == true) {
isScreenSharing = false
updateControlButtons()
}
@@ -345,11 +368,13 @@ class InteractiveLiveActivity : AppCompatActivity() {
VideoSlot(binding.flRemote2, TileType.REMOTE),
VideoSlot(binding.flRemote3, TileType.REMOTE)
)
if (localRenderer == null) {
localRenderer = createRenderer()
if (localRenderView == null) {
val (view, target) = createRenderTarget()
localRenderView = view
localRenderTarget = target
}
localRenderer?.let { renderer ->
localSlot.layout.attachRenderer(renderer)
localRenderView?.let { view ->
localSlot.layout.attachRenderer(view)
}
resetVideoSlots(releaseRemotes = false)
binding.videoContainer.isVisible = false
@@ -481,9 +506,9 @@ class InteractiveLiveActivity : AppCompatActivity() {
}
private fun applyLocalPreviewVisibility() {
val renderer = localRenderer ?: createRenderer().also { localRenderer = it }
if (isLocalPreviewEnabled) {
localSlot.layout.attachRenderer(renderer)
val view = localRenderView ?: return
localSlot.layout.attachRenderer(view)
} else {
localSlot.layout.detachRenderer()
}
@@ -513,7 +538,15 @@ class InteractiveLiveActivity : AppCompatActivity() {
if (stopped) {
isScreenSharing = false
ensureBeautySessionReady()
fuFrameInterceptor?.setEnabled(beautyEnabled)
binding.root.post {
// The active call keeps the local preview target inside the SDK.
// During a live session we must not swap/release that target from the demo side.
applyDefaultCameraVideoConfig()
if (!isLocalVideoEnabled) {
rtcEngine?.enableLocalVideo(false)
}
applyLocalPreviewVisibility()
}
} else if (showToast) {
Toast.makeText(this, "停止屏幕共享失败", Toast.LENGTH_SHORT).show()
}
@@ -610,11 +643,15 @@ class InteractiveLiveActivity : AppCompatActivity() {
}
private fun executeJoinInternal(request: JoinRequest) {
val renderer = localRenderer ?: createRenderer().also {
localRenderer = it
applyDefaultCameraVideoConfig()
val target = localRenderTarget ?: run {
val (view, t) = createRenderTarget()
localRenderView = view
localRenderTarget = t
t
}
currentUserId = request.userId
rtcEngine?.setupLocalVideo(InteractiveVideoCanvas(renderer, request.userId))
rtcEngine?.setupLocalVideo(InteractiveVideoCanvas(target, request.userId))
ensureBeautySessionReady()
rtcEngine?.joinChannel(
request.token,
@@ -634,10 +671,13 @@ class InteractiveLiveActivity : AppCompatActivity() {
private fun ensureBeautySessionReady() {
try {
beautyRenderer?.releaseGlContext()
beautyRenderer?.reinitializeGlContext()
fuFrameInterceptor?.setEnabled(beautyEnabled)
fuFrameInterceptor?.setFrontCamera(isFrontCamera)
val engine = rtcEngine
val beauty = beautyEngine
beauty?.setEnabled(beautyEnabled)
beauty?.onCameraFacingChanged(currentCameraFacing())
engine?.setCaptureVideoProcessor(
if (beautyEnabled) beauty?.createProcessor() else null
)
} catch (_: Exception) {
}
}
@@ -672,8 +712,8 @@ class InteractiveLiveActivity : AppCompatActivity() {
private fun addRemoteTile(userId: String) {
remoteSlots.firstOrNull { it.userId == userId }?.let { existing ->
val renderer = ensureRemoteRenderer(userId)
existing.layout.attachRenderer(renderer)
val view = ensureRemoteRenderView(userId)
existing.layout.attachRenderer(view)
remoteSlots.filter { it.userId == userId && it !== existing }.forEach { extra ->
extra.userId = null
extra.layout.detachRenderer()
@@ -690,17 +730,19 @@ class InteractiveLiveActivity : AppCompatActivity() {
return
}
slot.userId = userId
val renderer = ensureRemoteRenderer(userId)
slot.layout.attachRenderer(renderer)
val view = ensureRemoteRenderView(userId)
slot.layout.attachRenderer(view)
updateSlotOverlay(slot)
binding.videoContainer.isVisible = true
}
private fun ensureRemoteRenderer(userId: String): SurfaceViewRenderer {
return remoteRendererMap[userId] ?: createRenderer().also { renderer ->
remoteRendererMap[userId] = renderer
rtcEngine?.setupRemoteVideo(InteractiveVideoCanvas(renderer, userId))
}
private fun ensureRemoteRenderView(userId: String): View {
val existing = remoteRenderMap[userId]
if (existing != null) return existing.first
val (view, target) = createRenderTarget()
remoteRenderMap[userId] = view to target
rtcEngine?.setupRemoteVideo(InteractiveVideoCanvas(target, userId))
return view
}
private fun removeRemoteTile(userId: String) {
@@ -711,27 +753,27 @@ class InteractiveLiveActivity : AppCompatActivity() {
updateSlotOverlay(slot)
}
val engine = rtcEngine
val renderer = remoteRendererMap.remove(userId)
val removed = remoteRenderMap.remove(userId)
remoteStats.remove(userId)
// SurfaceViewRenderer.release() 会死锁主线程,移到后台
// RtcRenderTarget.release() may block the main thread, move to background
Thread {
try { engine?.clearRemoteVideo(userId) } catch (_: Exception) {}
try { renderer?.release() } catch (_: Exception) {}
try { removed?.second?.release() } catch (_: Exception) {}
}.start()
}
private fun resetVideoSlots(releaseRemotes: Boolean = true) {
private fun resetVideoSlots(releaseRemotes: Boolean = true, reattachLocal: Boolean = true) {
if (releaseRemotes) {
val engine = rtcEngine
val remoteIds = remoteRendererMap.keys.toList()
val renderersToRelease = remoteIds.mapNotNull { remoteRendererMap.remove(it) }
val remoteIds = remoteRenderMap.keys.toList()
val targetsToRelease = remoteIds.mapNotNull { remoteRenderMap.remove(it)?.second }
remoteStats.clear()
// SurfaceViewRenderer.release() 会死锁主线程,移到后台
// RtcRenderTarget.release() may block the main thread, move to background
Thread {
remoteIds.forEach { userId ->
try { engine?.clearRemoteVideo(userId) } catch (_: Exception) {}
}
renderersToRelease.forEach { try { it.release() } catch (_: Exception) {} }
targetsToRelease.forEach { try { it.release() } catch (_: Exception) {} }
}.start()
}
remoteSlots.forEach { slot ->
@@ -740,9 +782,19 @@ class InteractiveLiveActivity : AppCompatActivity() {
updateSlotOverlay(slot)
}
localSlot.userId = currentUserId
val renderer = localRenderer ?: createRenderer().also { localRenderer = it }
if (!reattachLocal) {
localSlot.layout.detachRenderer()
updateSlotOverlay(localSlot)
return
}
val view = localRenderView ?: run {
val (v, t) = createRenderTarget()
localRenderView = v
localRenderTarget = t
v
}
if (isLocalPreviewEnabled) {
localSlot.layout.attachRenderer(renderer)
localSlot.layout.attachRenderer(view)
} else {
localSlot.layout.detachRenderer()
}
@@ -757,23 +809,29 @@ class InteractiveLiveActivity : AppCompatActivity() {
private fun displayId(userId: String): String = userId
private fun leaveChannel() {
// SDK 的 leaveChannel() 会同步停止 Whip/Whep 客户端,阻塞主线程
if (leaveInProgress) return
leaveInProgress = true
val engine = rtcEngine
Thread { try { engine?.leaveChannel() } catch (_: Exception) {} }.start()
resetUiAfterLeave()
currentConnectionState = InteractiveConnectionState.Disconnected
updateCallInfo()
setJoinButtonEnabled(false)
Thread {
try {
engine?.leaveChannel()
} catch (_: Exception) {
} finally {
runOnUiThread {
if (!leaveInProgress) return@runOnUiThread
leaveInProgress = false
releaseLocalRenderTargetAsync()
resetUiAfterLeave()
}
}
}.start()
}
private fun resetUiAfterLeave() {
currentCallId = null
resetVideoSlots()
binding.videoContainer.isVisible = false
binding.btnJoin.text = getString(R.string.join)
setJoinButtonEnabled(true)
isLocalPreviewEnabled = true
isLocalAudioEnabled = true
isSpeakerOn = true
beautyEnabled = true
fuFrameInterceptor?.setEnabled(true)
selfUserId = null
localStats = null
remoteStats.clear()
@@ -781,6 +839,16 @@ class InteractiveLiveActivity : AppCompatActivity() {
currentConnectionState = InteractiveConnectionState.Disconnected
callDurationSeconds = 0
lastMessage = null
resetVideoSlots(reattachLocal = false)
binding.videoContainer.isVisible = false
binding.btnJoin.text = getString(R.string.join)
setJoinButtonEnabled(!leaveInProgress)
isLocalPreviewEnabled = true
isLocalAudioEnabled = true
isSpeakerOn = true
isFrontCamera = true
isLocalVideoEnabled = true
beautyEnabled = true
binding.tvMessageLog.text = getString(R.string.message_none)
isScreenSharing = false
updateControlButtons()
@@ -788,16 +856,33 @@ class InteractiveLiveActivity : AppCompatActivity() {
updateCallInfo()
setJoinInputsVisible(true)
InteractiveForegroundService.stop(this)
initRtcEngine()
}
private fun createRenderer(): SurfaceViewRenderer = SurfaceViewRenderer(this).apply {
setZOrderMediaOverlay(false)
private fun currentCameraFacing(): CameraHelper.Facing {
return if (isFrontCamera) CameraHelper.Facing.FRONT else CameraHelper.Facing.BACK
}
private fun releaseRenderer(renderer: SurfaceViewRenderer) {
try {
renderer.release()
} catch (_: Exception) {}
private fun createRenderTarget(): Pair<View, RtcRenderTarget> {
return if (useTextureView) {
// Interactive demo owns these targets and releases them in onDestroy().
val tv = com.sellycloud.sellycloudsdk.widget.AspectRatioTextureView(this)
tv to TextureViewRtcTarget(tv, ownedBySdk = false)
} else {
val svr = SurfaceViewRenderer(this).apply { setZOrderMediaOverlay(false) }
svr to SurfaceViewRtcTarget(svr, ownedBySdk = false)
}
}
private fun releaseLocalRenderTargetAsync() {
val target = localRenderTarget ?: return
localRenderTarget = null
localRenderView = null
Thread { try { target.release() } catch (_: Exception) {} }.start()
}
private fun applyDefaultCameraVideoConfig() {
rtcEngine?.setVideoEncoderConfiguration(defaultCameraVideoConfig)
}
private fun hideKeyboard() {
@@ -852,7 +937,7 @@ class InteractiveLiveActivity : AppCompatActivity() {
val duration = if (callDurationSeconds > 0) {
val minutes = callDurationSeconds / 60
val seconds = callDurationSeconds % 60
String.format(" | 时长 %02d:%02d", minutes, seconds)
String.format(Locale.getDefault(), " | 时长 %02d:%02d", minutes, seconds)
} else {
""
}
@@ -863,7 +948,9 @@ class InteractiveLiveActivity : AppCompatActivity() {
val lines = mutableListOf(header)
val width = stats?.width?.takeIf { it > 0 }?.toString() ?: "--"
val height = stats?.height?.takeIf { it > 0 }?.toString() ?: "--"
val fpsText = stats?.fps?.takeIf { it > 0 }?.let { String.format("%.1f fps", it.toDouble()) } ?: "-- fps"
val fpsText = stats?.fps?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.1f fps", it.toDouble())
} ?: "-- fps"
lines += "Res:${width}x${height} $fpsText"
val videoCodec = stats?.videoCodec?.takeIf { it.isNotBlank() }
val audioCodec = stats?.audioCodec?.takeIf { it.isNotBlank() }
@@ -874,10 +961,16 @@ class InteractiveLiveActivity : AppCompatActivity() {
else -> null
}
codecLine?.let { lines += it }
val videoBitrate = stats?.videoBitrateKbps?.takeIf { it > 0 }?.let { String.format("%.0f", it.toDouble()) } ?: "--"
val audioBitrate = stats?.audioBitrateKbps?.takeIf { it > 0 }?.let { String.format("%.0f", it.toDouble()) } ?: "--"
val videoBitrate = stats?.videoBitrateKbps?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.0f", it.toDouble())
} ?: "--"
val audioBitrate = stats?.audioBitrateKbps?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.0f", it.toDouble())
} ?: "--"
lines += "Video:${videoBitrate}kbps Audio:${audioBitrate}kbps"
val rtt = stats?.rttMs?.takeIf { it > 0 }?.let { String.format("%.0fms", it.toDouble()) } ?: "--"
val rtt = stats?.rttMs?.takeIf { it > 0 }?.let {
String.format(Locale.getDefault(), "%.0fms", it.toDouble())
} ?: "--"
lines += "RTT:$rtt"
return lines.joinToString("\n")
}

View File

@@ -8,6 +8,7 @@ import android.content.Intent
import android.content.pm.PackageManager
import android.content.res.Configuration
import android.graphics.Bitmap
import com.sellycloud.sellycloudsdk.render.RenderBackend
import android.graphics.Color
import android.graphics.Typeface
import android.graphics.drawable.GradientDrawable
@@ -33,6 +34,7 @@ import androidx.core.content.ContextCompat
import coil.load
import com.demo.SellyCloudSDK.KiwiHelper
import com.demo.SellyCloudSDK.R
import com.demo.SellyCloudSDK.avdemo.AvDemoSettingsStore
import com.demo.SellyCloudSDK.databinding.ActivityLivePlayBinding
import com.demo.SellyCloudSDK.live.auth.LiveAuthHelper
import com.demo.SellyCloudSDK.live.auth.LiveTokenSigner
@@ -71,6 +73,7 @@ class LivePlayActivity : AppCompatActivity() {
private lateinit var playerClient: SellyLiveVideoPlayer
private lateinit var pipController: SellyPipController
private var useTextureView: Boolean = false
private var isPlaying: Boolean = false
private var isMuted: Boolean = false
private var previewImageUrl: String? = null
@@ -85,6 +88,7 @@ class LivePlayActivity : AppCompatActivity() {
private var lastLatencyChasingSpeed: Float? = null
private var lastLatencyChasingUpdate: SellyLatencyChasingUpdate? = null
private var hasReleasedPlayer: Boolean = false
private var logEnabled: Boolean = true
private val logLines: ArrayDeque<String> = ArrayDeque()
private val logTimeFormat = SimpleDateFormat("HH:mm:ss.SSS", Locale.getDefault())
@@ -109,10 +113,12 @@ class LivePlayActivity : AppCompatActivity() {
addLogFloatingButton()
envStore = LiveEnvSettingsStore(this)
useTextureView = AvDemoSettingsStore(this).read().renderBackendPreference.isTextureView()
pipController = SellyPipController(this)
val env = envStore.read().also { it.applyToSdkRuntimeConfig(this) }
logEnabled = env.logEnabled
args = Args.from(intent, env)
Log.d(TAG, "init liveMode=${args.liveMode} input=${args.streamIdOrUrl} autoStart=${args.autoStart}")
debugLog("init liveMode=${args.liveMode} input=${args.streamIdOrUrl} autoStart=${args.autoStart}")
setupPreview(args.previewImageUrl)
playerClient = createPlayerForArgs(args).also { client ->
@@ -192,6 +198,13 @@ class LivePlayActivity : AppCompatActivity() {
}
}
override fun onReconnectStateChanged(isReconnecting: Boolean, detail: String?) {
runOnUiThread {
val suffix = detail?.takeIf { it.isNotBlank() }?.let { ": $it" }.orEmpty()
logEvent(if (isReconnecting) "重连开始$suffix" else "重连结束$suffix")
}
}
override fun onError(error: com.sellycloud.sellycloudsdk.SellyLiveError) {
runOnUiThread {
logEvent("错误: ${error.message}")
@@ -208,7 +221,8 @@ class LivePlayActivity : AppCompatActivity() {
binding.actionScreenshot.setOnClickListener { captureCurrentFrame() }
binding.actionPip.setOnClickListener { enterPipMode() }
playerClient.attachRenderView(binding.renderContainer)
val backend = if (useTextureView) RenderBackend.TEXTURE_VIEW else RenderBackend.SURFACE_VIEW
playerClient.attachRenderView(binding.renderContainer, backend)
if (args.autoStart) {
lifecycleScope.launch {
@@ -278,7 +292,6 @@ class LivePlayActivity : AppCompatActivity() {
}
private fun enterPipMode() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) return
if (!isPlaying) return
val renderView = playerClient.getRenderView() ?: binding.renderContainer
pipController.enterPictureInPictureMode(renderView)
@@ -322,7 +335,7 @@ class LivePlayActivity : AppCompatActivity() {
Toast.makeText(this, "生成 token 失败", Toast.LENGTH_SHORT).show()
return
}
Log.d(TAG, "startPlayback params liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
debugLog("startPlayback params liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
playerClient.token = auth.tokenResult.token
beginPlayback()
return
@@ -334,7 +347,7 @@ class LivePlayActivity : AppCompatActivity() {
return
}
if (input.contains("://")) {
Log.d(TAG, "startPlayback directUrl=$input")
debugLog("startPlayback directUrl=$input")
playerClient.token = null
beginPlayback()
return
@@ -354,7 +367,7 @@ class LivePlayActivity : AppCompatActivity() {
Toast.makeText(this, "生成 token 失败", Toast.LENGTH_SHORT).show()
return
}
Log.d(TAG, "startPlayback liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
debugLog("startPlayback liveMode=${args.liveMode} streamId=$channelId tokenPreview=${auth.tokenResult.tokenPreview}")
playerClient.token = auth.tokenResult.token
beginPlayback()
}
@@ -443,8 +456,22 @@ class LivePlayActivity : AppCompatActivity() {
Toast.makeText(this, "视图尚未布局完成,稍后再试", Toast.LENGTH_SHORT).show()
return
}
if (view is android.view.TextureView) {
val bmp = view.getBitmap()
if (bmp == null) {
Toast.makeText(this, "TextureView 尚未渲染画面", Toast.LENGTH_SHORT).show()
return
}
uiScope.launch(Dispatchers.IO) {
val ok = saveBitmapToGallery(bmp, prefix)
launch(Dispatchers.Main) {
Toast.makeText(this@LivePlayActivity, if (ok) "截图已保存到相册" else "保存失败", Toast.LENGTH_SHORT).show()
}
}
return
}
if (view !is android.view.SurfaceView) {
Toast.makeText(this, "当前视图不支持截图", Toast.LENGTH_SHORT).show()
Toast.makeText(this, "当前视图类型不支持截图", Toast.LENGTH_SHORT).show()
return
}
val bmp = Bitmap.createBitmap(view.width, view.height, Bitmap.Config.ARGB_8888)
@@ -761,6 +788,10 @@ class LivePlayActivity : AppCompatActivity() {
}
}
private fun debugLog(message: String) {
if (logEnabled) Log.d(TAG, message)
}
private fun dpToPx(dp: Int): Int {
return (dp * resources.displayMetrics.density + 0.5f).toInt()
}

View File

@@ -67,17 +67,15 @@ class LivePlayForegroundService : Service() {
}
private fun ensureChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Live Playback",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
val manager = getSystemService(NotificationManager::class.java) ?: return
val existing = manager.getNotificationChannel(CHANNEL_ID)
if (existing == null) {
val channel = NotificationChannel(
CHANNEL_ID,
"Live Playback",
NotificationManager.IMPORTANCE_LOW
)
manager.createNotificationChannel(channel)
}
}
@@ -88,14 +86,10 @@ class LivePlayForegroundService : Service() {
fun start(context: Context) {
val appContext = context.applicationContext
val intent = Intent(appContext, LivePlayForegroundService::class.java)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
try {
appContext.startService(intent)
} catch (_: IllegalStateException) {
ContextCompat.startForegroundService(appContext, intent)
}
} else {
try {
appContext.startService(intent)
} catch (_: IllegalStateException) {
ContextCompat.startForegroundService(appContext, intent)
}
}

View File

@@ -82,6 +82,7 @@ class PkPlayActivity : AppCompatActivity() {
// Shared state
private var isMuted: Boolean = false
private var hasReleasedPlayers: Boolean = false
private var logEnabled: Boolean = true
// Log system
private val logLines: ArrayDeque<String> = ArrayDeque()
@@ -100,6 +101,7 @@ class PkPlayActivity : AppCompatActivity() {
envStore = LiveEnvSettingsStore(this)
val env = envStore.read().also { it.applyToSdkRuntimeConfig(this) }
logEnabled = env.logEnabled
args = Args.from(intent, env) ?: run {
Toast.makeText(this, "缺少 PK 播放参数", Toast.LENGTH_SHORT).show()
finish()
@@ -109,7 +111,7 @@ class PkPlayActivity : AppCompatActivity() {
binding.tvMainStreamName.text = args.mainStreamName
binding.tvPkStreamName.text = args.pkStreamName
Log.d(TAG, "初始化主播放器streamId=${args.mainStreamName}, 协议: RTC")
debugLog("初始化主播放器streamId=${args.mainStreamName}, 协议: RTC")
mainPlayer = SellyLiveVideoPlayer.initWithStreamId(
this,
args.mainStreamName,
@@ -144,7 +146,7 @@ class PkPlayActivity : AppCompatActivity() {
)
mainPlayer.setMuted(isMuted)
Log.d(TAG, "初始化 PK 播放器streamId=${args.pkStreamName}")
debugLog("初始化 PK 播放器streamId=${args.pkStreamName}")
pkPlayer = SellyLiveVideoPlayer.initWithStreamId(
this,
args.pkStreamName,
@@ -280,6 +282,13 @@ class PkPlayActivity : AppCompatActivity() {
}
}
override fun onReconnectStateChanged(isReconnecting: Boolean, detail: String?) {
runOnUiThread {
val suffix = detail?.takeIf { it.isNotBlank() }?.let { ": $it" }.orEmpty()
logEvent(if (isReconnecting) "$prefix: 重连开始$suffix" else "$prefix: 重连结束$suffix")
}
}
override fun onError(error: com.sellycloud.sellycloudsdk.SellyLiveError) {
runOnUiThread {
logEvent("$prefix: 错误: ${error.message}")
@@ -660,6 +669,10 @@ class PkPlayActivity : AppCompatActivity() {
}
}
private fun debugLog(message: String) {
if (logEnabled) Log.d(TAG, message)
}
private fun dpToPx(dp: Int): Int {
return (dp * resources.displayMetrics.density + 0.5f).toInt()
}

View File

@@ -10,6 +10,7 @@ import android.graphics.Bitmap
import android.graphics.Color
import android.graphics.Typeface
import android.graphics.drawable.GradientDrawable
import com.sellycloud.sellycloudsdk.render.RenderBackend
import android.os.Build
import android.os.Bundle
import android.os.Looper
@@ -28,6 +29,7 @@ import androidx.appcompat.app.AppCompatActivity
import androidx.appcompat.widget.AppCompatTextView
import androidx.core.content.ContextCompat
import com.demo.SellyCloudSDK.R
import com.demo.SellyCloudSDK.avdemo.AvDemoSettingsStore
import com.demo.SellyCloudSDK.databinding.ActivityVodPlayBinding
import com.demo.SellyCloudSDK.live.util.GalleryImageSaver
import com.sellycloud.sellycloudsdk.SellyCloudManager
@@ -55,6 +57,7 @@ class VodPlayActivity : AppCompatActivity() {
private var player: SellyVodPlayer? = null
private var renderView: View? = null
private var useTextureView = false
private var isPlaying = false
private var isMuted = false
private var currentState: SellyPlayerState = SellyPlayerState.Idle
@@ -90,6 +93,7 @@ class VodPlayActivity : AppCompatActivity() {
binding = ActivityVodPlayBinding.inflate(layoutInflater)
setContentView(binding.root)
supportActionBar?.hide()
useTextureView = AvDemoSettingsStore(this).read().renderBackendPreference.isTextureView()
addLogFloatingButton()
binding.btnClose.setOnClickListener { finish() }
@@ -249,7 +253,8 @@ class VodPlayActivity : AppCompatActivity() {
client.setMuted(isMuted)
}
renderView = vodPlayer.attachRenderView(binding.renderContainer)
val backend = if (useTextureView) RenderBackend.TEXTURE_VIEW else RenderBackend.SURFACE_VIEW
renderView = vodPlayer.attachRenderView(binding.renderContainer, backend)
player = vodPlayer
startPlayAttempt()
vodPlayer.prepareAsync()
@@ -331,8 +336,22 @@ class VodPlayActivity : AppCompatActivity() {
Toast.makeText(this, "视图尚未布局完成,稍后再试", Toast.LENGTH_SHORT).show()
return
}
if (view is android.view.TextureView) {
val bmp = view.getBitmap()
if (bmp == null) {
Toast.makeText(this, "TextureView 尚未渲染画面", Toast.LENGTH_SHORT).show()
return
}
uiScope.launch(Dispatchers.IO) {
val ok = saveBitmapToGallery(bmp, prefix)
launch(Dispatchers.Main) {
Toast.makeText(this@VodPlayActivity, if (ok) "截图已保存到相册" else "保存失败", Toast.LENGTH_SHORT).show()
}
}
return
}
if (view !is android.view.SurfaceView) {
Toast.makeText(this, "当前视图不支持截图", Toast.LENGTH_SHORT).show()
Toast.makeText(this, "当前视图类型不支持截图", Toast.LENGTH_SHORT).show()
return
}
val bmp = Bitmap.createBitmap(view.width, view.height, Bitmap.Config.ARGB_8888)

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle">
<solid android:color="@color/av_card_fill" />
<corners
android:topLeftRadius="18dp"
android:topRightRadius="18dp"
android:bottomLeftRadius="0dp"
android:bottomRightRadius="0dp" />
</shape>

View File

@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle">
<solid android:color="@color/brand_primary" />
<corners android:radius="@dimen/av_corner_large" />
</shape>

View File

@@ -0,0 +1,14 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<!-- Frame corners -->
<path
android:fillColor="#FFFFFF"
android:pathData="M3,3h4V5H5v2H3V3zM17,3h4v4h-2V5h-2V3zM3,17h2v2h2v2H3v-4zM19,19h-2v2h4v-4h-2v2z" />
<!-- Person silhouette -->
<path
android:fillColor="#FFFFFF"
android:pathData="M12,8a2.5,2.5 0,1 0,0 -5,2.5 2.5,0 0,0 0,5zM12,9.5c-2.33,0 -7,1.17 -7,3.5v1.5h14V13c0,-2.33 -4.67,-3.5 -7,-3.5z" />
</vector>

View File

@@ -476,6 +476,57 @@
android:textColorHint="@color/av_text_hint"
android:textSize="14sp" />
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="16dp"
android:text="@string/settings_render_backend"
android:textColor="@color/av_text_primary"
android:textSize="14sp"
android:textStyle="bold" />
<RadioGroup
android:id="@+id/rgSettingsRenderBackend"
android:layout_width="match_parent"
android:layout_height="@dimen/av_field_height"
android:layout_marginTop="8dp"
android:background="@drawable/bg_av_segment_container"
android:checkedButton="@+id/rbSettingsRenderSurface"
android:orientation="horizontal">
<RadioButton
android:id="@+id/rbSettingsRenderSurface"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:background="@drawable/selector_av_segment_left"
android:button="@null"
android:gravity="center"
android:text="@string/settings_render_backend_surface"
android:textColor="@color/av_segment_text"
android:textSize="14sp" />
<RadioButton
android:id="@+id/rbSettingsRenderTexture"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:background="@drawable/selector_av_segment_right"
android:button="@null"
android:gravity="center"
android:text="@string/settings_render_backend_texture"
android:textColor="@color/av_segment_text"
android:textSize="14sp" />
</RadioGroup>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="8dp"
android:text="@string/settings_render_backend_hint"
android:textColor="@color/av_text_hint"
android:textSize="12sp" />
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"

View File

@@ -37,7 +37,8 @@
android:src="@drawable/ic_av_close"
app:tint="@color/av_text_primary"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toTopOf="parent" />
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintStart_toStartOf="parent" />
<LinearLayout
android:id="@+id/controlBar"

View File

@@ -105,6 +105,17 @@
android:textSize="12sp" />
</LinearLayout>
<TextView
android:id="@+id/tvStatsDetail"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="6dp"
android:maxLines="3"
android:text="状态详情"
android:textColor="@color/brand_primary_text_sub"
android:textSize="12sp"
android:visibility="gone" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
@@ -201,6 +212,30 @@
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="6dp"
android:gravity="center_vertical"
android:orientation="horizontal">
<ImageView
android:layout_width="16dp"
android:layout_height="16dp"
android:contentDescription="@null"
android:src="@drawable/ic_live_stats_fps"
app:tint="@color/brand_primary_text_sub" />
<TextView
android:id="@+id/tvStatsFrameCallback"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="8dp"
android:text="@string/live_stats_frame_callback_off"
android:textColor="@color/brand_primary_text_sub"
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
@@ -404,7 +439,7 @@
</LinearLayout>
<LinearLayout
android:id="@+id/actionScreenshot"
android:id="@+id/actionAutoFraming"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
@@ -417,17 +452,81 @@
android:paddingBottom="8dp">
<ImageView
android:id="@+id/ivToolAutoFraming"
android:layout_width="24dp"
android:layout_height="24dp"
android:contentDescription="@string/push_tool_screenshot"
android:src="@drawable/ic_av_camera"
android:contentDescription="@string/push_tool_autoframing_off"
android:src="@drawable/ic_live_auto_framing"
app:tint="@color/brand_primary_text_on" />
<TextView
android:id="@+id/tvToolAutoFramingLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="4dp"
android:text="@string/push_tool_screenshot"
android:text="@string/push_tool_autoframing_off"
android:textColor="@color/brand_primary_text_on"
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:id="@+id/actionFrameCallback"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:clickable="true"
android:focusable="true"
android:foreground="?attr/selectableItemBackground"
android:gravity="center"
android:orientation="vertical"
android:paddingTop="8dp"
android:paddingBottom="8dp">
<ImageView
android:id="@+id/ivToolFrameCallback"
android:layout_width="24dp"
android:layout_height="24dp"
android:contentDescription="@string/push_tool_frame_callback_off"
android:src="@drawable/ic_live_stats_fps"
app:tint="@color/brand_primary_text_on" />
<TextView
android:id="@+id/tvToolFrameCallbackLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="4dp"
android:text="@string/push_tool_frame_callback_off"
android:textColor="@color/brand_primary_text_on"
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:id="@+id/actionFrameModify"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:clickable="true"
android:focusable="true"
android:foreground="?attr/selectableItemBackground"
android:gravity="center"
android:orientation="vertical"
android:paddingTop="8dp"
android:paddingBottom="8dp">
<ImageView
android:id="@+id/ivToolFrameModify"
android:layout_width="24dp"
android:layout_height="24dp"
android:contentDescription="@string/push_tool_frame_modify_off"
android:src="@drawable/ic_live_stats_fps"
app:tint="@color/brand_primary_text_on" />
<TextView
android:id="@+id/tvToolFrameModifyLabel"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="4dp"
android:text="@string/push_tool_frame_modify_off"
android:textColor="@color/brand_primary_text_on"
android:textSize="12sp" />
</LinearLayout>

View File

@@ -25,7 +25,8 @@
android:src="@drawable/ic_av_close"
app:tint="@color/av_text_primary"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toTopOf="parent" />
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintStart_toStartOf="parent" />
<LinearLayout
android:id="@+id/progressRow"

View File

@@ -1,267 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<ScrollView xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:fillViewport="true">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:padding="20dp"
android:background="@android:color/white">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="美颜设置"
android:textSize="18sp"
android:textStyle="bold"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginBottom="20dp" />
<!-- 美颜开关 -->
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<TextView
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="启用美颜"
android:textSize="16sp"
android:textColor="@android:color/black" />
<Switch
android:id="@+id/switchBeautyEnable"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:checked="true" />
</LinearLayout>
<!-- 磨皮强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="磨皮强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarBeautyIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="100"
android:progress="60" />
<TextView
android:id="@+id/tvBeautyValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="6.0"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 滤镜强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="滤镜强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarFilterIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="7" />
<TextView
android:id="@+id/tvFilterValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="0.7"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 美白强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="美白强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarColorIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="5" />
<TextView
android:id="@+id/tvColorValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="0.5"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 红润强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="红润强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarRedIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="5" />
<TextView
android:id="@+id/tvRedValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="0.5"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 亮眼强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="亮眼强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="15dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarEyeBrightIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="10" />
<TextView
android:id="@+id/tvEyeBrightValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="1.0"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 美牙强度 -->
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="美牙强度"
android:textSize="14sp"
android:textColor="@android:color/black"
android:layout_marginBottom="8dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="20dp"
android:gravity="center_vertical">
<SeekBar
android:id="@+id/seekBarToothIntensity"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:max="10"
android:progress="10" />
<TextView
android:id="@+id/tvToothValue"
android:layout_width="50dp"
android:layout_height="wrap_content"
android:text="1.0"
android:textSize="14sp"
android:textColor="@android:color/black"
android:gravity="center"
android:layout_marginStart="8dp" />
</LinearLayout>
<!-- 关闭按钮 -->
<Button
android:id="@+id/btnClose"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="关闭"
android:backgroundTint="#607D8B"
android:textColor="@android:color/white" />
</LinearLayout>
</ScrollView>

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@mipmap/ic_launcher_foreground" />
</adaptive-icon>

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@mipmap/ic_launcher_foreground" />
</adaptive-icon>

View File

@@ -1,7 +0,0 @@
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
void main() {
gl_FragColor = texture2D(uSampler, vTextureCoord);
}

View File

@@ -1,12 +0,0 @@
attribute vec4 aPosition;
attribute vec4 aTextureCoord;
uniform mat4 uMVPMatrix;
uniform mat4 uSTMatrix;
varying vec2 vTextureCoord;
void main() {
gl_Position = uMVPMatrix * aPosition;
vTextureCoord = (uSTMatrix * aTextureCoord).xy;
}

View File

@@ -16,7 +16,6 @@
<color name="av_tab_inactive">#8E8E93</color>
<color name="av_overlay_dim">#B3000000</color>
<color name="av_card_shadow">#26000000</color>
<color name="av_stats_bg">#B0000000</color>
<color name="av_stats_green">#33C759</color>

View File

@@ -22,7 +22,6 @@
<color name="av_tab_inactive">#8E8E93</color>
<color name="av_overlay_dim">#99000000</color>
<color name="av_card_shadow">#14000000</color>
<color name="av_stats_bg">#B0000000</color>
<color name="av_stats_green">#33C759</color>

View File

@@ -7,7 +7,6 @@
<dimen name="av_corner_small">8dp</dimen>
<dimen name="av_home_button_height">96dp</dimen>
<dimen name="av_home_button_spacing">16dp</dimen>
<dimen name="av_field_height">44dp</dimen>
<dimen name="av_primary_button_height">50dp</dimen>

View File

@@ -1,7 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">RTMPDEMO</string>
<!-- Login -->
<string name="login_welcome">欢迎使用</string>
<string name="login_username_hint">请输入用户名</string>
@@ -17,14 +15,9 @@
<string name="logout_action">退出登录</string>
<!-- Feature Hub -->
<string name="hub_subtitle_basic">选择你要体验的场景</string>
<string name="live_streaming_title">直播推拉流</string>
<string name="live_streaming_subtitle">RTMP / WHIP 推流、拉流示例</string>
<string name="interactive_live_title">VideoCall</string>
<string name="interactive_live_subtitle">多人语音、视频互动体验</string>
<!-- AVDemo (iOS aligned) -->
<string name="avdemo_title">首页</string>
<string name="tab_home">首页</string>
<string name="tab_call">通话</string>
<string name="tab_settings">设置</string>
@@ -34,7 +27,6 @@
<string name="home_vod">点播播放</string>
<string name="home_single_chat">音视频单聊</string>
<string name="home_conference">音视频会议</string>
<string name="multi_play">多路播放</string>
<string name="home_live_square">直播广场</string>
<string name="home_live_square_title">直播广场</string>
<string name="home_live_square_empty">暂无直播</string>
@@ -47,6 +39,10 @@
<string name="settings_fps">帧率 (FPS)</string>
<string name="settings_max_bitrate">最大码率 (kbps)</string>
<string name="settings_min_bitrate">最小码率 (kbps)</string>
<string name="settings_render_backend">渲染后端</string>
<string name="settings_render_backend_surface">SurfaceView</string>
<string name="settings_render_backend_texture">TextureView</string>
<string name="settings_render_backend_hint">进入播放、推流或通话页面前选定,页面内不支持切换。</string>
<string name="settings_env_title">直播环境设置</string>
<string name="settings_vhost">VHost</string>
<string name="settings_vhost_key">VHost Key</string>
@@ -64,13 +60,20 @@
<string name="push_tool_camera_on">开启摄像头</string>
<string name="push_tool_beauty_on">美颜开启</string>
<string name="push_tool_beauty_off">美颜关闭</string>
<string name="push_tool_screenshot">截图</string>
<string name="push_tool_background">背景图</string>
<string name="push_tool_frame_callback_off">帧回调关</string>
<string name="push_tool_frame_callback_texture">帧回调纹理</string>
<string name="push_tool_frame_callback_cpu_empty">帧回调空CPU</string>
<string name="push_tool_frame_callback_cpu_single">帧回调单CPU</string>
<string name="push_tool_frame_callback_cpu_double">帧回调双CPU</string>
<string name="push_tool_frame_modify_on">改帧开</string>
<string name="push_tool_frame_modify_off">改帧关</string>
<string name="push_tool_autoframing_on">取景开</string>
<string name="push_tool_autoframing_off">取景关</string>
<string name="push_stream_orientation">推流方向</string>
<string name="push_stream_portrait">竖屏推流</string>
<string name="push_stream_landscape">横屏推流</string>
<string name="push_tool_not_supported">暂不支持</string>
<string name="push_tool_mute_not_supported">推流静音暂不支持</string>
<string name="push_tool_screenshot_saved">截图已保存到相册</string>
<string name="push_tool_screenshot_failed">保存失败</string>
<string name="push_settings_apply">应用</string>
@@ -91,10 +94,6 @@
<string name="vod_pick_local_file">选择本地文件</string>
<string name="vod_play_asset_sample">播放包内示例</string>
<string name="vod_online_resources">在线资源</string>
<string name="vod_list_loading">加载中…</string>
<string name="vod_list_error">加载失败,点击重试</string>
<string name="vod_list_empty">暂无在线资源</string>
<string name="protocol_rtmp">RTMP</string>
<string name="protocol_rtc">RTC</string>
@@ -115,6 +114,9 @@
<string name="live_play_foreground_text">直播播放保持中</string>
<string name="live_stats_title">直播数据</string>
<string name="live_stats_frame_callback_off">FrameCB: off</string>
<string name="live_stats_frame_callback_modify">FrameCB[modify]: %1$d fps · %2$s</string>
<string name="live_stats_frame_callback_generic">FrameCB[%1$s]: %2$d fps · %3$s</string>
<!-- Interactive Live -->
<string name="switch_camera">切换摄像头</string>
@@ -137,9 +139,6 @@
<string name="ctrl_local_preview_off">关闭预览</string>
<string name="ctrl_local_preview_on">开启预览</string>
<string name="ctrl_local_publish_off">停止推送</string>
<string name="ctrl_local_publish_on">恢复推送</string>
<string name="ctrl_remote_off">静音远端</string>
<string name="ctrl_remote_on">开启远端</string>
<string name="ctrl_audio_speaker">扬声器</string>
<string name="ctrl_audio_earpiece">听筒</string>
<string name="ctrl_mic_off">关闭麦克风</string>